-
-
Notifications
You must be signed in to change notification settings - Fork 249
/
file.go
346 lines (293 loc) · 9.2 KB
/
file.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
package processor
import (
"fmt"
"github.com/karrick/godirwalk"
"github.com/monochromegane/go-gitignore"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"runtime/debug"
"strings"
"sync"
"sync/atomic"
)
// Used as quick lookup for files with the same name to avoid some processing
// needs to be sync.Map as it potentially could be called by many GoRoutines
var extensionCache sync.Map
// A custom version of extracting extensions for a file
// which also has a case insensitive cache in order to save
// some needless processing
func getExtension(name string) string {
name = strings.ToLower(name)
extension, ok := extensionCache.Load(name)
if ok {
return extension.(string)
}
ext := filepath.Ext(name)
if ext == "" || strings.LastIndex(name, ".") == 0 {
extension = name
} else {
// Handling multiple dots or multiple extensions only needs to delete the last extension
// and then call filepath.Ext.
// If there are multiple extensions, it is the value of subExt,
// otherwise subExt is an empty string.
subExt := filepath.Ext(strings.TrimSuffix(name, ext))
extension = strings.TrimPrefix(subExt+ext, ".")
}
extensionCache.Store(name, extension)
return extension.(string)
}
// Iterate over the supplied directory in parallel and each file that is not
// excluded by the .gitignore and we know the extension of add to the supplied
// channel. This attempts to span out in parallel based on the number of directories
// in the supplied directory. Tests using a single process showed no lack of performance
// even when hitting older spinning platter disks for this way
//func walkDirectoryParallel(root string, output *RingBuffer) {
func walkDirectoryParallel(root string, output chan *FileJob) {
startTime := makeTimestampMilli()
extensionLookup := ExtensionToLanguage
// If input has a supplied white list of extensions then loop through them
// and modify the lookup we use to cut down on extra checks
if len(WhiteListExtensions) != 0 {
wlExtensionLookup := map[string][]string{}
for _, white := range WhiteListExtensions {
language, ok := extensionLookup[white]
if ok {
wlExtensionLookup[white] = language
}
}
extensionLookup = wlExtensionLookup
}
var totalCount int64 = 0
var wg sync.WaitGroup
isSoloFile := false
var all []os.FileInfo
// clean path including trailing slashes
root = filepath.Clean(root)
target, err := os.Lstat(root)
if err != nil {
// This error is non-recoverable due to user input so hard crash
printError(err.Error())
os.Exit(1)
return
}
if !target.IsDir() {
// create an array with a single FileInfo
all = append(all, target)
isSoloFile = true
} else {
all, _ = ioutil.ReadDir(root)
}
ignores := []gitignore.IgnoreMatcher{}
if !GitIgnore {
ignores = loadIgnoreFile(root, ".gitignore", ignores)
}
if !Ignore {
ignores = loadIgnoreFile(root, ".ignore", ignores)
}
resetGc := false
var excludes []*regexp.Regexp
for _, exclude := range Exclude {
excludes = append(excludes, regexp.MustCompile(exclude))
}
var fpath string
for _, f := range all {
// Godirwalk despite being faster than the default walk is still too slow to feed the
// CPU's and so we need to walk in parallel to keep up as much as possible
if f.IsDir() {
// Need to check if the directory is in the blacklist and if so don't bother adding a goroutine to process it
shouldSkip := false
for _, black := range PathBlacklist {
if strings.HasPrefix(filepath.Join(root, f.Name()), filepath.Join(root, black)) {
shouldSkip = true
if Verbose {
printWarn(fmt.Sprintf("skipping directory due to being in blacklist: %s", filepath.Join(root, f.Name())))
}
break
}
}
for _, exclude := range excludes {
if exclude.Match([]byte(f.Name())) {
if Verbose {
printWarn("skipping directory due to match exclude: " + f.Name())
}
shouldSkip = true
break
}
}
for _, i := range ignores {
if i.Match(filepath.Join(root, f.Name()), true) {
if Verbose {
printWarn("skipping directory due to ignore: " + filepath.Join(root, f.Name()))
}
shouldSkip = true
}
}
if !shouldSkip {
wg.Add(1)
go func(toWalk string) {
filejobs := walkDirectory(toWalk, PathBlacklist, extensionLookup, ignores)
for i := 0; i < len(filejobs); i++ {
for _, lan := range filejobs[i].PossibleLanguages {
LoadLanguageFeature(lan)
}
output <- &filejobs[i]
}
atomic.AddInt64(&totalCount, int64(len(filejobs)))
// Turn GC back to what it was before if we have parsed enough files
if !resetGc && atomic.LoadInt64(&totalCount) >= int64(GcFileCount) {
debug.SetGCPercent(gcPercent)
resetGc = true
}
wg.Done()
}(filepath.Join(root, f.Name()))
}
} else { // File processing starts here
if isSoloFile {
fpath = root
} else {
fpath = filepath.Join(root, f.Name())
}
shouldSkip := false
for _, i := range ignores {
if i.Match(filepath.Join(root, f.Name()), false) {
if Verbose {
printWarn("skipping file due to ignore: " + filepath.Join(root, f.Name()))
}
shouldSkip = true
}
}
for _, exclude := range excludes {
if exclude.Match([]byte(f.Name())) {
if Verbose {
printWarn("skipping file due to match exclude: " + f.Name())
}
shouldSkip = true
break
}
}
if !shouldSkip {
extension := ""
// Lookup in case the full name matches
language, ok := extensionLookup[strings.ToLower(f.Name())]
// If no match check if we have a matching extension
if !ok {
extension = getExtension(f.Name())
language, ok = extensionLookup[extension]
}
// Convert from d.ts to ts and check that in case of multiple extensions
if !ok {
language, ok = extensionLookup[getExtension(extension)]
}
if ok {
atomic.AddInt64(&totalCount, 1)
for _, l := range language {
LoadLanguageFeature(l)
}
output <- &FileJob{Location: fpath, Filename: f.Name(), Extension: extension, PossibleLanguages: language}
} else if Verbose {
printWarn(fmt.Sprintf("skipping file unknown extension: %s", f.Name()))
}
}
}
}
wg.Wait()
if Debug {
printDebug(fmt.Sprintf("milliseconds to walk directory: %d", makeTimestampMilli()-startTime))
}
}
func loadIgnoreFile(root string, filename string, ignores []gitignore.IgnoreMatcher) []gitignore.IgnoreMatcher {
ig, err := gitignore.NewGitIgnore(filepath.Join(root, filename))
if err == nil {
ignores = append(ignores, ig)
}
if Verbose {
if err == nil {
printWarn(fmt.Sprintf("found and loaded ignore file: %s", filepath.Join(root, filename)))
} else {
printWarn(fmt.Sprintf("no ignore found: %s", filepath.Join(root, filename)))
}
}
return ignores
}
func walkDirectory(toWalk string, blackList []string, extensionLookup map[string][]string, ignores []gitignore.IgnoreMatcher) []FileJob {
extension := ""
var filejobs []FileJob
var excludes []*regexp.Regexp
for _, exclude := range Exclude {
excludes = append(excludes, regexp.MustCompile(exclude))
}
_ = godirwalk.Walk(toWalk, &godirwalk.Options{
// Unsorted is meant to make the walk faster and we need to sort after processing anyway
Unsorted: true,
Callback: func(root string, info *godirwalk.Dirent) error {
for _, exclude := range excludes {
if exclude.Match([]byte(info.Name())) {
if Verbose {
if info.IsDir() {
printWarn("skipping directory due to match exclude: " + root)
} else {
printWarn("skipping file due to match exclude: " + root)
}
}
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
}
if info.IsDir() {
for _, black := range blackList {
if strings.HasPrefix(root, filepath.Join(toWalk, black)) {
if Verbose {
printWarn(fmt.Sprintf("skipping directory due to being in blacklist: %s", root))
}
return filepath.SkipDir
}
}
for _, i := range ignores {
if i.Match(root, true) {
if Verbose {
printWarn("skipping directory due to ignore: " + root)
}
return filepath.SkipDir
}
}
} else {
for _, i := range ignores {
if i.Match(filepath.Join(root, info.Name()), false) {
if Verbose {
printWarn("skipping file due to ignore: " + filepath.Join(root, info.Name()))
}
return nil
}
}
// Lookup in case the full name matches
language, ok := extensionLookup[strings.ToLower(info.Name())]
// If no match check if we have a matching extension
if !ok {
extension = getExtension(info.Name())
language, ok = extensionLookup[extension]
}
// Convert from d.ts to ts and check that in case of multiple extensions
if !ok {
language, ok = extensionLookup[getExtension(extension)]
}
if ok {
filejobs = append(filejobs, FileJob{Location: root, Filename: info.Name(), Extension: extension, PossibleLanguages: language})
} else if Verbose {
printWarn(fmt.Sprintf("skipping file unknown extension: %s", info.Name()))
}
}
return nil
},
ErrorCallback: func(osPathname string, err error) godirwalk.ErrorAction {
if Verbose {
printWarn(fmt.Sprintf("error walking: %s %s", osPathname, err))
}
return godirwalk.SkipNode
},
})
return filejobs
}