-
Notifications
You must be signed in to change notification settings - Fork 2.6k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
BatchMaxQueries #1659
BatchMaxQueries #1659
Changes from 2 commits
9eb223e
4aa4795
0f6d8be
99a2881
b2b02b3
2189f13
485882d
52e18d2
3c906b9
d587866
0c9392b
fcc2da7
6bfbd24
ffdacaf
f30b41a
74239ad
a8ecb34
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -24,20 +24,24 @@ export class QueryBatcher { | |
// Queue on which the QueryBatcher will operate on a per-tick basis. | ||
public queuedRequests: QueryFetchRequest[] = []; | ||
|
||
private batchInterval: Number; | ||
private batchInterval: number; | ||
private batchMax: number; | ||
|
||
//This function is called to the queries in the queue to the server. | ||
private batchFetchFunction: (request: Request[]) => Promise<ExecutionResult[]>; | ||
|
||
constructor({ | ||
batchInterval, | ||
batchMax, | ||
batchFetchFunction, | ||
}: { | ||
batchInterval: number, | ||
batchMax: number, | ||
batchFetchFunction: (request: Request[]) => Promise<ExecutionResult[]>, | ||
}) { | ||
this.queuedRequests = []; | ||
this.batchInterval = batchInterval; | ||
this.batchMax = batchMax; | ||
this.batchFetchFunction = batchFetchFunction; | ||
} | ||
|
||
|
@@ -62,20 +66,20 @@ export class QueryBatcher { | |
// Consumes the queue. | ||
// Returns a list of promises (one for each query). | ||
public consumeQueue(): (Promise<ExecutionResult> | undefined)[] | undefined { | ||
const requests: Request[] = this.queuedRequests.map( | ||
const queueSlice = this.queuedRequests.splice(0, this.batchMax); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Instead of slicing up when the queue is consumed, it would be better to immediately consume the queue once it reaches a length of |
||
const requests: Request[] = queueSlice.map( | ||
(queuedRequest) => queuedRequest.request, | ||
); | ||
|
||
const promises: (Promise<ExecutionResult> | undefined)[] = []; | ||
const resolvers: any[] = []; | ||
const rejecters: any[] = []; | ||
this.queuedRequests.forEach((fetchRequest, index) => { | ||
queueSlice.forEach((fetchRequest, index) => { | ||
promises.push(fetchRequest.promise); | ||
resolvers.push(fetchRequest.resolve); | ||
rejecters.push(fetchRequest.reject); | ||
}); | ||
|
||
this.queuedRequests = []; | ||
const batchedPromise = this.batchFetchFunction(requests); | ||
|
||
batchedPromise.then((results) => { | ||
|
@@ -87,6 +91,11 @@ export class QueryBatcher { | |
rejecters[index](error); | ||
}); | ||
}); | ||
|
||
if (this.queuedRequests.length) { | ||
promises.concat(this.consumeQueue()); | ||
} | ||
|
||
return promises; | ||
} | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,6 +16,7 @@ describe('QueryBatcher', () => { | |
assert.doesNotThrow(() => { | ||
const querySched = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If we make it optional, we won't have to add it to every test. This will also ensure that the change is not breaking for current users. |
||
batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), | ||
}); | ||
querySched.consumeQueue(); | ||
|
@@ -25,6 +26,7 @@ describe('QueryBatcher', () => { | |
it('should not do anything when faced with an empty queue', () => { | ||
const batcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), | ||
}); | ||
|
||
|
@@ -36,6 +38,7 @@ describe('QueryBatcher', () => { | |
it('should be able to add to the queue', () => { | ||
const batcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), | ||
}); | ||
|
||
|
@@ -84,6 +87,7 @@ describe('QueryBatcher', () => { | |
); | ||
const batcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: myNetworkInterface.batchQuery.bind(myNetworkInterface), | ||
}); | ||
const request: Request = { | ||
|
@@ -93,6 +97,7 @@ describe('QueryBatcher', () => { | |
it('should be able to consume from a queue containing a single query', (done) => { | ||
const myBatcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: myNetworkInterface.batchQuery.bind(myNetworkInterface), | ||
}); | ||
|
||
|
@@ -123,6 +128,7 @@ describe('QueryBatcher', () => { | |
|
||
const myBatcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: NI.batchQuery.bind(NI), | ||
}); | ||
myBatcher.enqueueRequest(request); | ||
|
@@ -148,6 +154,7 @@ describe('QueryBatcher', () => { | |
); | ||
const myBatcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: NI.batchQuery.bind(NI), | ||
}); | ||
const promise = myBatcher.enqueueRequest(request); | ||
|
@@ -162,6 +169,7 @@ describe('QueryBatcher', () => { | |
it('should work when single query', (done) => { | ||
const batcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), | ||
}); | ||
const query = gql` | ||
|
@@ -185,6 +193,7 @@ describe('QueryBatcher', () => { | |
it('should correctly batch multiple queries', (done) => { | ||
const batcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), | ||
}); | ||
const query = gql` | ||
|
@@ -233,6 +242,7 @@ describe('QueryBatcher', () => { | |
); | ||
const batcher = new QueryBatcher({ | ||
batchInterval: 10, | ||
batchMax: 10, | ||
batchFetchFunction: myNetworkInterface.batchQuery.bind(myNetworkInterface), | ||
}); | ||
const promise = batcher.enqueueRequest(request); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2025,6 +2025,7 @@ describe('client', () => { | |
const networkInterface = createBatchingNetworkInterface({ | ||
uri: 'http://not-a-real-url.com', | ||
batchInterval: 5, | ||
batchMax: 5, | ||
opts: {}, | ||
}); | ||
Promise.all([ | ||
|
@@ -2086,6 +2087,7 @@ describe('client', () => { | |
const networkInterface = createBatchingNetworkInterface({ | ||
uri: 'http://not-a-real-url.com', | ||
batchInterval: 5, | ||
batchMax: 5, | ||
opts: {}, | ||
}); | ||
Promise.all([ | ||
|
@@ -2166,6 +2168,7 @@ describe('client', () => { | |
const networkInterface = createBatchingNetworkInterface({ | ||
uri: 'http://not-a-real-url.com', | ||
batchInterval: 5, | ||
batchMax: 5, | ||
opts: {}, | ||
}); | ||
Promise.all([ | ||
|
@@ -2181,6 +2184,101 @@ describe('client', () => { | |
}); | ||
}); | ||
|
||
it('should limit the amount of queries in a batch according to the batchMax value', (done) => { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This test looks good! After making |
||
const firstQuery = gql` | ||
query { | ||
author { | ||
firstName | ||
lastName | ||
} | ||
}`; | ||
const firstResult = { | ||
data: { | ||
author: { | ||
firstName: 'John', | ||
lastName: 'Smith', | ||
}, | ||
}, | ||
loading: false, | ||
}; | ||
const secondQuery = gql` | ||
query { | ||
person { | ||
name | ||
} | ||
}`; | ||
const secondResult = { | ||
data: { | ||
person: { | ||
name: 'Jane Smith', | ||
}, | ||
}, | ||
}; | ||
const url = 'http://not-a-real-url.com'; | ||
|
||
const networkInterface = createBatchingNetworkInterface({ | ||
uri: 'http://fake-url.com', | ||
batchInterval: 5, | ||
batchMax: 1, | ||
opts: {}, | ||
}); | ||
|
||
const oldFetch = fetch; | ||
fetch = createMockFetch({ | ||
url, | ||
opts: { | ||
body: JSON.stringify([ | ||
{ | ||
query: print(firstQuery), | ||
}, | ||
]), | ||
headers: { | ||
Accept: '*/*', | ||
'Content-Type': 'application/json', | ||
}, | ||
method: 'POST', | ||
}, | ||
result: createMockedIResponse([firstResult]), | ||
}); | ||
|
||
const firstFetch = networkInterface.query({ query: firstQuery }); | ||
firstFetch.then((results) => { | ||
console.log('first results', results); | ||
assert.deepEqual(results, [firstResult]); | ||
fetch = oldFetch; | ||
}).catch( e => { | ||
console.error(e); | ||
}); | ||
|
||
fetch = createMockFetch({ | ||
url, | ||
opts: { | ||
body: JSON.stringify([ | ||
{ | ||
query: print(secondQuery), | ||
}, | ||
]), | ||
headers: { | ||
Accept: '*/*', | ||
'Content-Type': 'application/json', | ||
}, | ||
method: 'POST', | ||
}, | ||
result: createMockedIResponse([secondResult]), | ||
}); | ||
|
||
const secondFetch = networkInterface.query({ query: secondQuery }); | ||
secondFetch.then((results) => { | ||
console.log('second results', results); | ||
assert.deepEqual(results, [secondResult]); | ||
fetch = oldFetch; | ||
}).catch( e => { | ||
console.error(e); | ||
}); | ||
|
||
done(); | ||
}); | ||
|
||
it('should enable dev tools logging', () => { | ||
const query = gql` | ||
query people { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Let's make
batchMax
optional, and set it to 0 by default, in which case it should behave the same way it did before this PR.