/
s3-upload.go
225 lines (184 loc) · 4.95 KB
/
s3-upload.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
package cmd
import (
"errors"
"fmt"
"io/ioutil"
"path"
"path/filepath"
"strings"
"sync"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
ctlaws "github.com/liangrog/cfctl/pkg/aws"
"github.com/liangrog/cfctl/pkg/utils"
"github.com/liangrog/cfctl/pkg/utils/i18n"
"github.com/liangrog/cfctl/pkg/utils/templates"
"github.com/spf13/cobra"
)
var (
s3UploadShort = i18n.T("Upload files to S3.")
s3UploadLong = templates.LongDesc(i18n.T(`
Upload files to S3 bucket. Overwrite files if exist.`))
s3UploadExample = templates.Examples(i18n.T(`
# Upload one file
$ cfctl s3 upload file-1 --bucket my-bucket
# Upload multiple files
$ cfctl s3 upload file-1 file-2 --bucket my-bucket
# Upload everything in a folder recursively
$ cfctl s3 upload template/web --bucket my-bucket -r
# Upload everything in a folder recursively except fileA
$ cfctl s3 upload template/web --bucket my-bucket -r --exclude-files fileA
# Upload files and folder
$ cfctl s3 upload file-1 template/web --bucket my-bucket -r`))
)
// Register sub commands
func init() {
cmd := getCmdS3Upload()
addFlagsS3Upload(cmd)
CmdS3.AddCommand(cmd)
}
func addFlagsS3Upload(cmd *cobra.Command) {
cmd.Flags().String(CMD_S3_UPLOAD_BUCKET, "", "S3 bucket name")
cmd.Flags().String(CMD_S3_UPLOAD_PREFIX, "", "the path prefix for S3 bucket that the objects will be uploaded to")
cmd.Flags().BoolP(CMD_S3_UPLOAD_RECURSIVE, "r", false, "recursively travel the given directory for all objects")
cmd.Flags().String(CMD_S3_UPLOAD_EXCLUDE_FILES, "", "exclude files with matching file names from upload. Multiple file names seperate by comma")
}
// cmd: upload
func getCmdS3Upload() *cobra.Command {
cmd := &cobra.Command{
Use: "upload",
Short: s3UploadShort,
Long: s3UploadLong,
Example: fmt.Sprintf(s3UploadExample),
Args: func(cmd *cobra.Command, args []string) error {
if len(args) < 1 {
return errors.New("Missing local objects path")
}
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
recursive, _ := cmd.Flags().GetBool(CMD_S3_UPLOAD_RECURSIVE)
for _, arg := range args {
err := s3Upload(
arg,
cmd.Flags().Lookup(CMD_S3_UPLOAD_BUCKET).Value.String(),
cmd.Flags().Lookup(CMD_S3_UPLOAD_PREFIX).Value.String(),
recursive,
cmd.Flags().Lookup(CMD_S3_UPLOAD_EXCLUDE_FILES).Value.String(),
)
silenceUsageOnError(cmd, err)
if err != nil {
return err
}
}
return nil
},
}
return cmd
}
// Upload
func s3Upload(objPath, bucket, prefix string, recursive bool, exf string) error {
// Default only current dir
level := 1
if recursive {
level = 0
}
var exfiles []string
if len(exf) > 0 {
exfiles = strings.Split(exf, ",")
}
done := make(chan bool)
defer close(done)
isDir, err := utils.IsDir(objPath)
if err != nil {
return err
}
fmt.Println("")
// If it's only one file
if !isDir {
content, err := ioutil.ReadFile(objPath)
if err != nil {
return err
}
// Upload all nested template to s3
cfs3 := ctlaws.NewS3(s3.New(ctlaws.AWSSess))
out, err := cfs3.Upload(bucket, path.Join(prefix, objPath), content)
if err != nil {
return err
}
s3UploadPrintToConsole(objPath, out.Location)
} else {
paths, errc := utils.ScanFiles(objPath, done, level)
// Start 10 workers
var wg sync.WaitGroup
numProc := 10
wg.Add(numProc)
startPath, _ := filepath.Abs(objPath)
startPath = path.Base(startPath)
result := make(chan *uploadResult)
for i := 0; i < numProc; i++ {
go func() {
uploadWorker(bucket, prefix, startPath, paths, result, done, exfiles)
wg.Done()
}()
}
// Close result when all workers
go func() {
wg.Wait()
close(result)
}()
for r := range result {
if r.err != nil {
return r.err
}
s3UploadPrintToConsole(r.path, r.output.Location)
}
// Check whether the file scan failed.
if err := <-errc; err != nil {
return err
}
}
return nil
}
// Output from upload worker
type uploadResult struct {
path string
output *s3manager.UploadOutput
err error
}
// Worker to upload object to s3 bucket
func uploadWorker(bucket, prefix, startPath string, paths <-chan string, result chan<- *uploadResult, done <-chan bool, exfiles []string) {
cfs3 := ctlaws.NewS3(s3.New(ctlaws.AWSSess))
for p := range paths {
// Skip file that's in exclusion list
if utils.InSlice(exfiles, filepath.Base(p)) {
continue
}
content, err := ioutil.ReadFile(p)
if err != nil {
result <- &uploadResult{err: err}
continue
}
// Upload all nested template to s3
out, err := cfs3.Upload(bucket, path.Join(prefix, utils.RewritePath(p, startPath)), content)
if err != nil {
result <- &uploadResult{err: err}
continue
}
select {
case result <- &uploadResult{path: p, output: out}:
case <-done:
return
}
}
}
// Print to console
func s3UploadPrintToConsole(path, s3url string) {
utils.InfoPrint(
fmt.Sprintf(
"[ s3 | upload ] %s -> %s",
path,
s3url,
),
)
}