This repository has been archived by the owner on May 8, 2024. It is now read-only.
/
query-manager.go
182 lines (156 loc) · 5.38 KB
/
query-manager.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
package eth
import (
"fmt"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
batch "github.com/rocket-pool/batch-query"
"golang.org/x/sync/errgroup"
)
// Manages multicall-capable queries to the Execution layer.
type QueryManager struct {
// The client to use when querying the chain.
client IExecutionClient
// Address of the multicall contract to use.
multicallAddress common.Address
// The maximum number of batches to query in parallel
concurrentCallLimit int
}
// Creates a new query manager.
// concurrentCallLimit should be the maximum number of batches to query in parallel for batch calls. Negative values mean no limit.
func NewQueryManager(client IExecutionClient, multicallAddress common.Address, concurrentCallLimit int) *QueryManager {
return &QueryManager{
client: client,
multicallAddress: multicallAddress,
}
}
// Run a multicall query that doesn't perform any return type allocation.
// The 'query' function is an optional general-purpose function you can use to add whatever you want to the multicall
// before running it. The 'queryables' can be used to simply list a collection of IQueryable objects, each of which will
// run 'AddToQuery()' on the multicall for convenience.
func (q *QueryManager) Query(query func(*batch.MultiCaller) error, opts *bind.CallOpts, queryables ...IQueryable) error {
// Create the multicaller
mc, err := batch.NewMultiCaller(q.client, q.multicallAddress)
if err != nil {
return fmt.Errorf("error creating multicaller: %w", err)
}
// Add the query function
if query != nil {
err = query(mc)
if err != nil {
return fmt.Errorf("error running multicall query: %w", err)
}
}
// Add the queryables
AddQueryablesToMulticall(mc, queryables...)
// Execute the multicall
_, err = mc.FlexibleCall(true, opts)
if err != nil {
return fmt.Errorf("error executing multicall: %w", err)
}
return nil
}
// Run a multicall query that doesn't perform any return type allocation
// Use this if one of the calls is allowed to fail without interrupting the others; the returned result array provides information about the success of each call.
// The 'query' function is an optional general-purpose function you can use to add whatever you want to the multicall
// before running it. The 'queryables' can be used to simply list a collection of IQueryable objects, each of which will
// run 'AddToQuery()' on the multicall for convenience.
func (q *QueryManager) FlexQuery(query func(*batch.MultiCaller) error, opts *bind.CallOpts, queryables ...IQueryable) ([]bool, error) {
// Create the multicaller
mc, err := batch.NewMultiCaller(q.client, q.multicallAddress)
if err != nil {
return nil, fmt.Errorf("error creating multicaller: %w", err)
}
// Run the query
if query != nil {
err = query(mc)
if err != nil {
return nil, fmt.Errorf("error running multicall query: %w", err)
}
}
// Add the queryables
AddQueryablesToMulticall(mc, queryables...)
// Execute the multicall
return mc.FlexibleCall(false, opts)
}
// Create and execute a multicall query that is too big for one call and must be run in batches
func (q *QueryManager) BatchQuery(count int, batchSize int, query func(*batch.MultiCaller, int) error, opts *bind.CallOpts) error {
// Sync
var wg errgroup.Group
wg.SetLimit(q.concurrentCallLimit)
// Run getters in batches
for i := 0; i < count; i += batchSize {
i := i
max := i + batchSize
if max > count {
max = count
}
// Load details
wg.Go(func() error {
mc, err := batch.NewMultiCaller(q.client, q.multicallAddress)
if err != nil {
return err
}
for j := i; j < max; j++ {
err := query(mc, j)
if err != nil {
return fmt.Errorf("error running query adder: %w", err)
}
}
_, err = mc.FlexibleCall(true, opts)
if err != nil {
return fmt.Errorf("error executing multicall: %w", err)
}
return nil
})
}
// Wait for them all to complete
if err := wg.Wait(); err != nil {
return fmt.Errorf("error during multicall query: %w", err)
}
return nil
}
// Create and execute a multicall query that is too big for one call and must be run in batches.
// Use this if one of the calls is allowed to fail without interrupting the others; the returned result array provides information about the success of each call.
func (q *QueryManager) FlexBatchQuery(count int, batchSize int, query func(*batch.MultiCaller, int) error, handleResult func(bool, int) error, opts *bind.CallOpts) error {
// Sync
var wg errgroup.Group
wg.SetLimit(q.concurrentCallLimit)
// Run getters in batches
for i := 0; i < count; i += batchSize {
i := i
max := i + batchSize
if max > count {
max = count
}
// Load details
wg.Go(func() error {
mc, err := batch.NewMultiCaller(q.client, q.multicallAddress)
if err != nil {
return err
}
for j := i; j < max; j++ {
err := query(mc, j)
if err != nil {
return fmt.Errorf("error running query adder: %w", err)
}
}
results, err := mc.FlexibleCall(false, opts)
if err != nil {
return fmt.Errorf("error executing multicall: %w", err)
}
for j, result := range results {
err = handleResult(result, j+i)
if err != nil {
return fmt.Errorf("error running query result handler: %w", err)
}
}
return nil
})
}
// Wait for them all to complete
if err := wg.Wait(); err != nil {
return fmt.Errorf("error during multicall query: %w", err)
}
// Return
return nil
}