-
Notifications
You must be signed in to change notification settings - Fork 334
/
file_history.go
255 lines (234 loc) · 8.41 KB
/
file_history.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
package leaves
import (
"fmt"
"io"
"sort"
"strings"
"github.com/gogo/protobuf/proto"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/object"
"gopkg.in/src-d/go-git.v4/utils/merkletrie"
"gopkg.in/src-d/hercules.v10/internal/core"
"gopkg.in/src-d/hercules.v10/internal/pb"
items "gopkg.in/src-d/hercules.v10/internal/plumbing"
"gopkg.in/src-d/hercules.v10/internal/plumbing/identity"
)
// FileHistoryAnalysis contains the intermediate state which is mutated by Consume(). It should implement
// LeafPipelineItem.
type FileHistoryAnalysis struct {
core.NoopMerger
core.OneShotMergeProcessor
files map[string]*FileHistory
lastCommit *object.Commit
l core.Logger
}
// FileHistoryResult is returned by Finalize() and represents the analysis result.
type FileHistoryResult struct {
Files map[string]FileHistory
}
// FileHistory is the gathered stats about a particular file.
type FileHistory struct {
// Hashes is the list of commit hashes which changed this file.
Hashes []plumbing.Hash
// People is the mapping from developers to the number of lines they altered.
People map[int]items.LineStats
}
// Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
func (history *FileHistoryAnalysis) Name() string {
return "FileHistoryAnalysis"
}
// Provides returns the list of names of entities which are produced by this PipelineItem.
// Each produced entity will be inserted into `deps` of dependent Consume()-s according
// to this list. Also used by core.Registry to build the global map of providers.
func (history *FileHistoryAnalysis) Provides() []string {
return []string{}
}
// Requires returns the list of names of entities which are needed by this PipelineItem.
// Each requested entity will be inserted into `deps` of Consume(). In turn, those
// entities are Provides() upstream.
func (history *FileHistoryAnalysis) Requires() []string {
return []string{items.DependencyTreeChanges, items.DependencyLineStats, identity.DependencyAuthor}
}
// ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
func (history *FileHistoryAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
return []core.ConfigurationOption{}
}
// Flag for the command line switch which enables this analysis.
func (history *FileHistoryAnalysis) Flag() string {
return "file-history"
}
// Description returns the text which explains what the analysis is doing.
func (history *FileHistoryAnalysis) Description() string {
return "Each file path is mapped to the list of commits which touch that file and the mapping " +
"from involved developers to the corresponding line statistics: how many lines were added, " +
"removed and changed throughout the whole history."
}
// Configure sets the properties previously published by ListConfigurationOptions().
func (history *FileHistoryAnalysis) Configure(facts map[string]interface{}) error {
if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
history.l = l
}
return nil
}
// Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
// calls. The repository which is going to be analysed is supplied as an argument.
func (history *FileHistoryAnalysis) Initialize(repository *git.Repository) error {
history.l = core.NewLogger()
history.files = map[string]*FileHistory{}
history.OneShotMergeProcessor.Initialize()
return nil
}
// Consume runs this PipelineItem on the next commit data.
// `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
// Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
// This function returns the mapping with analysis results. The keys must be the same as
// in Provides(). If there was an error, nil is returned.
func (history *FileHistoryAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
if deps[core.DependencyIsMerge].(bool) {
// we ignore merge commits
// TODO(vmarkovtsev): handle them better
return nil, nil
}
history.lastCommit = deps[core.DependencyCommit].(*object.Commit)
commit := history.lastCommit.Hash
changes := deps[items.DependencyTreeChanges].(object.Changes)
for _, change := range changes {
action, _ := change.Action()
var fh *FileHistory
if action != merkletrie.Delete {
fh = history.files[change.To.Name]
} else {
fh = history.files[change.From.Name]
}
if fh == nil {
fh = &FileHistory{}
history.files[change.To.Name] = fh
}
switch action {
case merkletrie.Insert:
fh.Hashes = []plumbing.Hash{commit}
case merkletrie.Delete:
fh.Hashes = append(fh.Hashes, commit)
case merkletrie.Modify:
hashes := history.files[change.From.Name].Hashes
if change.From.Name != change.To.Name {
delete(history.files, change.From.Name)
}
hashes = append(hashes, commit)
fh.Hashes = hashes
}
}
lineStats := deps[items.DependencyLineStats].(map[object.ChangeEntry]items.LineStats)
author := deps[identity.DependencyAuthor].(int)
for changeEntry, stats := range lineStats {
file := history.files[changeEntry.Name]
if file == nil {
file = &FileHistory{}
history.files[changeEntry.Name] = file
}
people := file.People
if people == nil {
people = map[int]items.LineStats{}
file.People = people
}
oldStats := people[author]
people[author] = items.LineStats{
Added: oldStats.Added + stats.Added,
Removed: oldStats.Removed + stats.Removed,
Changed: oldStats.Changed + stats.Changed,
}
}
return nil, nil
}
// Finalize returns the result of the analysis. Further Consume() calls are not expected.
func (history *FileHistoryAnalysis) Finalize() interface{} {
files := map[string]FileHistory{}
fileIter, err := history.lastCommit.Files()
if err != nil {
history.l.Errorf("Failed to iterate files of %s", history.lastCommit.Hash.String())
return err
}
err = fileIter.ForEach(func(file *object.File) error {
if fh := history.files[file.Name]; fh != nil {
files[file.Name] = *fh
}
return nil
})
if err != nil {
history.l.Errorf("Failed to iterate files of %s", history.lastCommit.Hash.String())
return err
}
return FileHistoryResult{Files: files}
}
// Fork clones this PipelineItem.
func (history *FileHistoryAnalysis) Fork(n int) []core.PipelineItem {
return core.ForkSamePipelineItem(history, n)
}
// Serialize converts the analysis result as returned by Finalize() to text or bytes.
// The text format is YAML and the bytes format is Protocol Buffers.
func (history *FileHistoryAnalysis) Serialize(result interface{}, binary bool, writer io.Writer) error {
historyResult := result.(FileHistoryResult)
if binary {
return history.serializeBinary(&historyResult, writer)
}
history.serializeText(&historyResult, writer)
return nil
}
func (history *FileHistoryAnalysis) serializeText(result *FileHistoryResult, writer io.Writer) {
keys := make([]string, len(result.Files))
i := 0
for key := range result.Files {
keys[i] = key
i++
}
sort.Strings(keys)
for _, key := range keys {
fmt.Fprintf(writer, " - %s:\n", key)
file := result.Files[key]
hashes := file.Hashes
strhashes := make([]string, len(hashes))
for i, hash := range hashes {
strhashes[i] = "\"" + hash.String() + "\""
}
sort.Strings(strhashes)
fmt.Fprintf(writer, " commits: [%s]\n", strings.Join(strhashes, ","))
strpeople := make([]string, 0, len(file.People))
for key, val := range file.People {
strpeople = append(strpeople, fmt.Sprintf("%d:[%d,%d,%d]", key, val.Added, val.Removed, val.Changed))
}
sort.Strings(strpeople)
fmt.Fprintf(writer, " people: {%s}\n", strings.Join(strpeople, ","))
}
}
func (history *FileHistoryAnalysis) serializeBinary(result *FileHistoryResult, writer io.Writer) error {
message := pb.FileHistoryResultMessage{
Files: map[string]*pb.FileHistory{},
}
for key, vals := range result.Files {
fh := &pb.FileHistory{
Commits: make([]string, len(vals.Hashes)),
ChangesByDeveloper: map[int32]*pb.LineStats{},
}
for i, hash := range vals.Hashes {
fh.Commits[i] = hash.String()
}
for key, val := range vals.People {
fh.ChangesByDeveloper[int32(key)] = &pb.LineStats{
Added: int32(val.Added),
Removed: int32(val.Removed),
Changed: int32(val.Changed),
}
}
message.Files[key] = fh
}
serialized, err := proto.Marshal(&message)
if err != nil {
return err
}
_, err = writer.Write(serialized)
return err
}
func init() {
core.Registry.Register(&FileHistoryAnalysis{})
}