forked from elastic/beats
-
Notifications
You must be signed in to change notification settings - Fork 0
/
crawler.go
117 lines (92 loc) · 2.71 KB
/
crawler.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
package crawler
import (
"fmt"
"sync"
"github.com/elastic/beats/filebeat/input/file"
"github.com/elastic/beats/filebeat/prospector"
"github.com/elastic/beats/filebeat/registrar"
"github.com/elastic/beats/libbeat/cfgfile"
"github.com/elastic/beats/libbeat/common"
"github.com/elastic/beats/libbeat/logp"
)
type Crawler struct {
prospectors map[uint64]*prospector.Prospector
prospectorConfigs []*common.Config
out prospector.Outlet
wg sync.WaitGroup
reloader *cfgfile.Reloader
once bool
beatDone chan struct{}
}
func New(out prospector.Outlet, prospectorConfigs []*common.Config, beatDone chan struct{}, once bool) (*Crawler, error) {
return &Crawler{
out: out,
prospectors: map[uint64]*prospector.Prospector{},
prospectorConfigs: prospectorConfigs,
once: once,
beatDone: beatDone,
}, nil
}
func (c *Crawler) Start(r *registrar.Registrar, reloaderConfig *common.Config) error {
logp.Info("Loading Prospectors: %v", len(c.prospectorConfigs))
// Prospect the globs/paths given on the command line and launch harvesters
for _, prospectorConfig := range c.prospectorConfigs {
err := c.startProspector(prospectorConfig, r.GetStates())
if err != nil {
return err
}
}
if reloaderConfig.Enabled() {
logp.Warn("BETA feature dynamic configuration reloading is enabled.")
c.reloader = cfgfile.NewReloader(reloaderConfig)
factory := prospector.NewFactory(c.out, r, c.beatDone)
go func() {
c.reloader.Run(factory)
}()
}
logp.Info("Loading and starting Prospectors completed. Enabled prospectors: %v", len(c.prospectors))
return nil
}
func (c *Crawler) startProspector(config *common.Config, states []file.State) error {
if !config.Enabled() {
return nil
}
p, err := prospector.NewProspector(config, c.out, c.beatDone)
if err != nil {
return fmt.Errorf("Error in initing prospector: %s", err)
}
p.Once = c.once
if _, ok := c.prospectors[p.ID()]; ok {
return fmt.Errorf("Prospector with same ID already exists: %v", p.ID())
}
err = p.LoadStates(states)
if err != nil {
return fmt.Errorf("error loading states for prospector %v: %v", p.ID(), err)
}
c.prospectors[p.ID()] = p
p.Start()
return nil
}
func (c *Crawler) Stop() {
logp.Info("Stopping Crawler")
asyncWaitStop := func(stop func()) {
c.wg.Add(1)
go func() {
defer c.wg.Done()
stop()
}()
}
logp.Info("Stopping %v prospectors", len(c.prospectors))
for _, p := range c.prospectors {
// Stop prospectors in parallel
asyncWaitStop(p.Stop)
}
if c.reloader != nil {
asyncWaitStop(c.reloader.Stop)
}
c.WaitForCompletion()
logp.Info("Crawler stopped")
}
func (c *Crawler) WaitForCompletion() {
c.wg.Wait()
}