-
Notifications
You must be signed in to change notification settings - Fork 2
/
plugin.go
131 lines (112 loc) · 2.86 KB
/
plugin.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
package main
import (
"crypto/md5"
"fmt"
"io"
"log"
"path/filepath"
"strings"
"time"
"github.com/Sirupsen/logrus"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/bsm/drone-s3-cache/cache"
)
// Plugin for caching directories to an SFTP server.
type Plugin struct {
Rebuild bool
Restore bool
Mount []string
Endpoint string
Key string
Secret string
Bucket string
Region string
// if not "", enable server-side encryption
// valid values are:
// AES256
// aws:kms
Encryption string
// Indicates the files ACL, which should be one
// of the following:
// private
// public-read
// public-read-write
// authenticated-read
// bucket-owner-read
// bucket-owner-full-control
ACL string
// Use path style instead of domain style.
//
// Should be true for minio and false for AWS.
PathStyle bool
Repo string
Branch string
Default string // default master branch
}
func (p *Plugin) Exec() error {
conf := &aws.Config{
Region: aws.String(p.Region),
Endpoint: &p.Endpoint,
DisableSSL: aws.Bool(strings.HasPrefix(p.Endpoint, "http://")),
S3ForcePathStyle: aws.Bool(p.PathStyle),
}
//Allowing to use the instance role or provide a key and secret
if p.Key != "" && p.Secret != "" {
conf.Credentials = credentials.NewStaticCredentials(p.Key, p.Secret, "")
}
cc := cache.New(p.Bucket, p.ACL, p.Encryption, conf)
if p.Rebuild {
now := time.Now()
if err := p.ProcessRebuild(cc); err != nil {
logrus.Println(err)
} else {
logrus.Printf("cache built in %v", time.Since(now))
}
}
if p.Restore {
now := time.Now()
if err := p.ProcessRestore(cc); err != nil {
logrus.Println(err)
} else {
logrus.Printf("cache restored in %v", time.Since(now))
}
}
return nil
}
// Rebuild the remote cache from the local environment.
func (p Plugin) ProcessRebuild(c cache.Cache) error {
for _, mount := range p.Mount {
hash := hasher(mount, p.Branch)
path := filepath.Join(p.Repo, hash)
log.Printf("archiving directory <%s> to remote cache <%s>", mount, path)
err := cache.RebuildCmd(c, mount, path)
if err != nil {
return err
}
}
return nil
}
// Restore the local environment from the remote cache.
func (p Plugin) ProcessRestore(c cache.Cache) error {
for _, mount := range p.Mount {
hash := hasher(mount, p.Branch)
path := filepath.Join(p.Repo, hash)
log.Printf("restoring directory <%s> from remote cache <%s>", mount, path)
err := cache.RestoreCmd(c, path, mount)
if err != nil {
return err
}
}
return nil
}
// helper function to hash a file name based on path and branch.
func hasher(mount, branch string) string {
parts := []string{mount, branch}
// calculate the hash using the branch
h := md5.New()
for _, part := range parts {
io.WriteString(h, part)
}
return fmt.Sprintf("%x", h.Sum(nil))
}