Skip to content

Commit

Permalink
Support batchDebounce option for BatchLink and BatchHttpLink (#8024)
Browse files Browse the repository at this point in the history
  • Loading branch information
dannycochran committed Apr 30, 2021
1 parent 03a4503 commit 7f37666
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 23 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
## Apollo Client 3.3.16 (to be released)

### Bug fixes
- Prevent `undefined` mutation result in useMutation <br/>

- Prevent `undefined` mutation result in `useMutation`. <br/>
[@jcreighton](https://github.com/jcreighton) in [#8018](https://github.com/apollographql/apollo-client/pull/8018)

- Fix `useReactiveVar` not rerendering for successive synchronous calls. <br/>
[@brainkim](https://github.com/brainkim) in [#8022](https://github.com/apollographql/apollo-client/pull/8022)

- Support `batchDebounce` option for `BatchLink` and `BatchHttpLink`. <br/>
[@dannycochran](https://github.com/dannycochran) in [#8024](https://github.com/apollographql/apollo-client/pull/8024)

## Apollo Client 3.3.15

### Bug fixes
Expand Down
28 changes: 8 additions & 20 deletions src/link/batch-http/batchHttpLink.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,33 +14,18 @@ import {
import { BatchLink } from '../batch';

export namespace BatchHttpLink {
export interface Options extends HttpOptions {
/**
* The maximum number of operations to include in one fetch.
*
* Defaults to 10.
*/
batchMax?: number;

/**
* The interval at which to batch, in milliseconds.
*
* Defaults to 10.
*/
batchInterval?: number;

/**
* Sets the key for an Operation, which specifies the batch an operation is included in
*/
batchKey?: (operation: Operation) => string;
}
export type Options = Pick<
BatchLink.Options,
'batchMax' | 'batchDebounce' | 'batchInterval' | 'batchKey'
> & HttpOptions;
}

/**
* Transforms Operation for into HTTP results.
* context can include the headers property, which will be passed to the fetch function
*/
export class BatchHttpLink extends ApolloLink {
private batchDebounce?: boolean;
private batchInterval: number;
private batchMax: number;
private batcher: ApolloLink;
Expand All @@ -54,6 +39,7 @@ export class BatchHttpLink extends ApolloLink {
fetch: fetcher,
includeExtensions,
batchInterval,
batchDebounce,
batchMax,
batchKey,
...requestOptions
Expand All @@ -76,6 +62,7 @@ export class BatchHttpLink extends ApolloLink {
headers: requestOptions.headers,
};

this.batchDebounce = batchDebounce;
this.batchInterval = batchInterval || 10;
this.batchMax = batchMax || 10;

Expand Down Expand Up @@ -219,6 +206,7 @@ export class BatchHttpLink extends ApolloLink {
});

this.batcher = new BatchLink({
batchDebounce: this.batchDebounce,
batchInterval: this.batchInterval,
batchMax: this.batchMax,
batchKey,
Expand Down
39 changes: 39 additions & 0 deletions src/link/batch/__tests__/batchLink.ts
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@ function createMockBatchHandler(...mockedResponses: MockedResponse[]) {
}

describe('OperationBatcher', () => {
afterEach(() => jest.useRealTimers());

it('should construct', () => {
expect(() => {
const querySched = new OperationBatcher({
Expand Down Expand Up @@ -325,6 +327,43 @@ describe('OperationBatcher', () => {
);
myBatcher.consumeQueue();
});

it('should be able to debounce requests', done => {
jest.useFakeTimers();
const batchInterval = 10;
const myBatcher = new OperationBatcher({
batchDebounce: true,
batchInterval,
batchHandler,
});

// 1. Queue up 3 requests
myBatcher.enqueueRequest({ operation }).subscribe({});
myBatcher.enqueueRequest({ operation }).subscribe({});
myBatcher.enqueueRequest({ operation }).subscribe({});
expect(myBatcher.queuedRequests.get('')!.length).toEqual(3);

// 2. Run the timer halfway.
jest.runTimersToTime(batchInterval / 2);
expect(myBatcher.queuedRequests.get('')!.length).toEqual(3);

// 3. Queue a 4th request, causing the timer to reset.
myBatcher.enqueueRequest({ operation }).subscribe({});
expect(myBatcher.queuedRequests.get('')!.length).toEqual(4);

// 4. Run the timer to batchInterval + 1, at this point, if debounce were
// not set, the original 3 requests would have fired, but we expect
// instead that the queries will instead fire at
// (batchInterval + batchInterval / 2).
jest.runTimersToTime(batchInterval / 2 + 1);
expect(myBatcher.queuedRequests.get('')!.length).toEqual(4);

// 5. Finally, run the timer to (batchInterval + batchInterval / 2) +1,
// and expect the queue to be empty.
jest.runTimersToTime(batchInterval / 2);
expect(myBatcher.queuedRequests.size).toEqual(0);
done();
});
});

it('should work when single query', done => {
Expand Down
10 changes: 10 additions & 0 deletions src/link/batch/batchLink.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { Observable } from '../../utilities';
import { OperationBatcher, BatchHandler } from './batching';
export { OperationBatcher, BatchableRequest, BatchHandler } from './batching';


export namespace BatchLink {
export interface Options {
/**
Expand All @@ -12,6 +13,13 @@ export namespace BatchLink {
*/
batchInterval?: number;

/**
* "batchInterval" is a throttling behavior by default, if you instead wish
* to debounce outbound requests, set "batchDebounce" to true. More useful
* for mutations than queries.
*/
batchDebounce?: boolean;

/**
* The maximum number of operations to include in one fetch.
*
Expand All @@ -38,13 +46,15 @@ export class BatchLink extends ApolloLink {
super();

const {
batchDebounce,
batchInterval = 10,
batchMax = 0,
batchHandler = () => null,
batchKey = () => '',
} = fetchParams || {};

this.batcher = new OperationBatcher({
batchDebounce,
batchInterval,
batchMax,
batchHandler,
Expand Down
12 changes: 10 additions & 2 deletions src/link/batch/batching.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ export class OperationBatcher {
// Public only for testing
public queuedRequests: Map<string, BatchableRequest[]>;

private scheduledBatchTimer: ReturnType<typeof setTimeout>;
private batchDebounce?: boolean;
private batchInterval?: number;
private batchMax: number;

Expand All @@ -35,17 +37,20 @@ export class OperationBatcher {
private batchKey: (operation: Operation) => string;

constructor({
batchDebounce,
batchInterval,
batchMax,
batchHandler,
batchKey,
}: {
batchDebounce?: boolean;
batchInterval?: number;
batchMax?: number;
batchHandler: BatchHandler;
batchKey?: (operation: Operation) => string;
}) {
this.queuedRequests = new Map();
this.batchDebounce = batchDebounce;
this.batchInterval = batchInterval;
this.batchMax = batchMax || 0;
this.batchHandler = batchHandler;
Expand Down Expand Up @@ -86,6 +91,9 @@ export class OperationBatcher {
// The first enqueued request triggers the queue consumption after `batchInterval` milliseconds.
if (this.queuedRequests.get(key)!.length === 1) {
this.scheduleQueueConsumption(key);
} else if (this.batchDebounce) {
clearTimeout(this.scheduledBatchTimer);
this.scheduleQueueConsumption(key);
}

// When amount of requests reaches `batchMax`, trigger the queue consumption without waiting on the `batchInterval`.
Expand Down Expand Up @@ -183,13 +191,13 @@ export class OperationBatcher {

private scheduleQueueConsumption(key?: string): void {
const requestKey = key || '';
setTimeout(() => {
this.scheduledBatchTimer = (setTimeout(() => {
if (
this.queuedRequests.get(requestKey) &&
this.queuedRequests.get(requestKey)!.length
) {
this.consumeQueue(requestKey);
}
}, this.batchInterval);
}, this.batchInterval));
}
}

0 comments on commit 7f37666

Please sign in to comment.