This repository has been archived by the owner on Jan 28, 2021. It is now read-only.
/
analyzer.go
193 lines (165 loc) · 4.91 KB
/
analyzer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
package analyzer // import "github.com/src-d/go-mysql-server/sql/analyzer"
import (
"os"
opentracing "github.com/opentracing/opentracing-go"
"github.com/sirupsen/logrus"
"github.com/src-d/go-mysql-server/sql"
"gopkg.in/src-d/go-errors.v1"
)
const debugAnalyzerKey = "DEBUG_ANALYZER"
const maxAnalysisIterations = 1000
// ErrMaxAnalysisIters is thrown when the analysis iterations are exceeded
var ErrMaxAnalysisIters = errors.NewKind("exceeded max analysis iterations (%d)")
// Builder provides an easy way to generate Analyzer with custom rules and options.
type Builder struct {
preAnalyzeRules []Rule
postAnalyzeRules []Rule
preValidationRules []Rule
postValidationRules []Rule
catalog *sql.Catalog
debug bool
parallelism int
}
// NewBuilder creates a new Builder from a specific catalog.
// This builder allow us add custom Rules and modify some internal properties.
func NewBuilder(c *sql.Catalog) *Builder {
return &Builder{catalog: c}
}
// WithDebug activates debug on the Analyzer.
func (ab *Builder) WithDebug() *Builder {
ab.debug = true
return ab
}
// WithParallelism sets the parallelism level on the analyzer.
func (ab *Builder) WithParallelism(parallelism int) *Builder {
ab.parallelism = parallelism
return ab
}
// AddPreAnalyzeRule adds a new rule to the analyze before the standard analyzer rules.
func (ab *Builder) AddPreAnalyzeRule(name string, fn RuleFunc) *Builder {
ab.preAnalyzeRules = append(ab.preAnalyzeRules, Rule{name, fn})
return ab
}
// AddPostAnalyzeRule adds a new rule to the analyzer after standard analyzer rules.
func (ab *Builder) AddPostAnalyzeRule(name string, fn RuleFunc) *Builder {
ab.postAnalyzeRules = append(ab.postAnalyzeRules, Rule{name, fn})
return ab
}
// AddPreValidationRule adds a new rule to the analyzer before standard validation rules.
func (ab *Builder) AddPreValidationRule(name string, fn RuleFunc) *Builder {
ab.preValidationRules = append(ab.preValidationRules, Rule{name, fn})
return ab
}
// AddPostValidationRule adds a new rule to the analyzer after standard validation rules.
func (ab *Builder) AddPostValidationRule(name string, fn RuleFunc) *Builder {
ab.postValidationRules = append(ab.postValidationRules, Rule{name, fn})
return ab
}
// Build creates a new Analyzer using all previous data setted to the Builder
func (ab *Builder) Build() *Analyzer {
_, debug := os.LookupEnv(debugAnalyzerKey)
var batches = []*Batch{
&Batch{
Desc: "pre-analyzer rules",
Iterations: maxAnalysisIterations,
Rules: ab.preAnalyzeRules,
},
&Batch{
Desc: "once execution rule before default",
Iterations: 1,
Rules: OnceBeforeDefault,
},
&Batch{
Desc: "analyzer rules",
Iterations: maxAnalysisIterations,
Rules: DefaultRules,
},
&Batch{
Desc: "once execution rules after default",
Iterations: 1,
Rules: OnceAfterDefault,
},
&Batch{
Desc: "post-analyzer rules",
Iterations: maxAnalysisIterations,
Rules: ab.postAnalyzeRules,
},
&Batch{
Desc: "pre-validation rules",
Iterations: 1,
Rules: ab.preValidationRules,
},
&Batch{
Desc: "validation rules",
Iterations: 1,
Rules: DefaultValidationRules,
},
&Batch{
Desc: "post-validation rules",
Iterations: 1,
Rules: ab.postValidationRules,
},
&Batch{
Desc: "after-all rules",
Iterations: 1,
Rules: OnceAfterAll,
},
}
return &Analyzer{
Debug: debug || ab.debug,
Batches: batches,
Catalog: ab.catalog,
Parallelism: ab.parallelism,
}
}
// Analyzer analyzes nodes of the execution plan and applies rules and validations
// to them.
type Analyzer struct {
Debug bool
Parallelism int
// Batches of Rules to apply.
Batches []*Batch
// Catalog of databases and registered functions.
Catalog *sql.Catalog
}
// NewDefault creates a default Analyzer instance with all default Rules and configuration.
// To add custom rules, the easiest way is use the Builder.
func NewDefault(c *sql.Catalog) *Analyzer {
return NewBuilder(c).Build()
}
// Log prints an INFO message to stdout with the given message and args
// if the analyzer is in debug mode.
func (a *Analyzer) Log(msg string, args ...interface{}) {
if a != nil && a.Debug {
logrus.Infof(msg, args...)
}
}
// Analyze the node and all its children.
func (a *Analyzer) Analyze(ctx *sql.Context, n sql.Node) (sql.Node, error) {
span, ctx := ctx.Span("analyze", opentracing.Tags{
"plan": n.String(),
})
prev := n
var err error
a.Log("starting analysis of node of type: %T", n)
for _, batch := range a.Batches {
prev, err = batch.Eval(ctx, a, prev)
if ErrMaxAnalysisIters.Is(err) {
a.Log(err.Error())
continue
}
if err != nil {
return nil, err
}
}
defer func() {
if prev != nil {
span.SetTag("IsResolved", prev.Resolved())
}
span.Finish()
}()
return prev, err
}
type equaler interface {
Equal(sql.Node) bool
}