Skip to content

Commit

Permalink
Temporarily revert "Merge pull request #9248 from PowerKiKi/batch-can…
Browse files Browse the repository at this point in the history
…cel"

This reverts commit cefd24c, reversing
changes made to d98f1de.

I plan to publish this version in the next v3.7.0 beta release, then
immediately reinstate most of this functionality, with a narrower
attempt to solve issue #9773 in a follow-up beta release, to allow folks
to compare the behavior of those two versions.
  • Loading branch information
benjamn committed Jun 7, 2022
1 parent b4b2eef commit 0e68db1
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 278 deletions.
2 changes: 1 addition & 1 deletion src/link/batch-http/__tests__/batchHttpLink.ts
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ describe('BatchHttpLink', () => {
},
batchInterval: 1,
//if batchKey does not work, then the batch size would be 3
batchMax: 2,
batchMax: 3,
batchKey,
}),
]);
Expand Down
217 changes: 27 additions & 190 deletions src/link/batch/__tests__/batchLink.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
import gql from 'graphql-tag';
import { print } from 'graphql';

import { ApolloLink, execute } from '../../core';
import { ApolloLink } from '../../core/ApolloLink';
import { execute } from '../../core/execute';
import { Operation, FetchResult, GraphQLRequest } from '../../core/types';
import { Observable } from '../../../utilities';
import { itAsync } from '../../../testing';
import { Observable } from '../../../utilities/observables/Observable';
import {
BatchLink,
OperationBatcher,
BatchHandler,
BatchableRequest,
} from '../batchLink';
import { itAsync } from '../../../testing';

interface MockedResponse {
request: GraphQLRequest;
Expand All @@ -26,7 +27,7 @@ function getKey(operation: GraphQLRequest) {
return JSON.stringify([operationName, query, variables]);
}

function createOperation(
export function createOperation(
starting: any,
operation: GraphQLRequest,
): Operation {
Expand Down Expand Up @@ -157,11 +158,11 @@ describe('OperationBatcher', () => {
batchKey: () => 'yo',
});

expect(batcher["batchesByKey"].get('')).toBeUndefined();
expect(batcher["batchesByKey"].get('yo')).toBeUndefined();
expect(batcher.queuedRequests.get('')).toBeUndefined();
expect(batcher.queuedRequests.get('yo')).toBeUndefined();
batcher.consumeQueue();
expect(batcher["batchesByKey"].get('')).toBeUndefined();
expect(batcher["batchesByKey"].get('yo')).toBeUndefined();
expect(batcher.queuedRequests.get('')).toBeUndefined();
expect(batcher.queuedRequests.get('yo')).toBeUndefined();
});

it('should be able to add to the queue', () => {
Expand All @@ -185,11 +186,11 @@ describe('OperationBatcher', () => {
operation: createOperation({}, { query }),
};

expect(batcher["batchesByKey"].get('')).toBeUndefined();
expect(batcher.queuedRequests.get('')).toBeUndefined();
batcher.enqueueRequest(request).subscribe({});
expect(batcher["batchesByKey"].get('')!.size).toBe(1);
expect(batcher.queuedRequests.get('')!.length).toBe(1);
batcher.enqueueRequest(request).subscribe({});
expect(batcher["batchesByKey"].get('')!.size).toBe(2);
expect(batcher.queuedRequests.get('')!.length).toBe(2);
});

describe('request queue', () => {
Expand Down Expand Up @@ -232,7 +233,7 @@ describe('OperationBatcher', () => {

myBatcher.enqueueRequest({ operation }).subscribe(
terminatingCheck(resolve, reject, (resultObj: any) => {
expect(myBatcher["batchesByKey"].get('')).toBeUndefined();
expect(myBatcher.queuedRequests.get('')).toBeUndefined();
expect(resultObj).toEqual({ data });
}),
);
Expand Down Expand Up @@ -303,11 +304,11 @@ describe('OperationBatcher', () => {
});

try {
expect(myBatcher["batchesByKey"].get('')!.size).toBe(2);
expect(myBatcher.queuedRequests.get('')!.length).toBe(2);
const observables: (
| Observable<FetchResult>
| undefined)[] = myBatcher.consumeQueue()!;
expect(myBatcher["batchesByKey"].get('')).toBeUndefined();
expect(myBatcher.queuedRequests.get('')).toBeUndefined();
expect(observables.length).toBe(2);
} catch (e) {
reject(e);
Expand Down Expand Up @@ -345,27 +346,27 @@ describe('OperationBatcher', () => {
myBatcher.enqueueRequest({ operation }).subscribe({});
myBatcher.enqueueRequest({ operation }).subscribe({});
myBatcher.enqueueRequest({ operation }).subscribe({});
expect(myBatcher["batchesByKey"].get('')!.size).toEqual(3);
expect(myBatcher.queuedRequests.get('')!.length).toEqual(3);

// 2. Run the timer halfway.
jest.advanceTimersByTime(batchInterval / 2);
expect(myBatcher["batchesByKey"].get('')!.size).toEqual(3);
expect(myBatcher.queuedRequests.get('')!.length).toEqual(3);

// 3. Queue a 4th request, causing the timer to reset.
myBatcher.enqueueRequest({ operation }).subscribe({});
expect(myBatcher["batchesByKey"].get('')!.size).toEqual(4);
expect(myBatcher.queuedRequests.get('')!.length).toEqual(4);

// 4. Run the timer to batchInterval + 1, at this point, if debounce were
// not set, the original 3 requests would have fired, but we expect
// instead that the queries will instead fire at
// (batchInterval + batchInterval / 2).
jest.advanceTimersByTime(batchInterval / 2 + 1);
expect(myBatcher["batchesByKey"].get('')!.size).toEqual(4);
expect(myBatcher.queuedRequests.get('')!.length).toEqual(4);

// 5. Finally, run the timer to (batchInterval + batchInterval / 2) +1,
// and expect the queue to be empty.
jest.advanceTimersByTime(batchInterval / 2);
expect(myBatcher["batchesByKey"].size).toEqual(0);
expect(myBatcher.queuedRequests.size).toEqual(0);
resolve();
});
});
Expand Down Expand Up @@ -395,130 +396,19 @@ describe('OperationBatcher', () => {

batcher.enqueueRequest({ operation }).subscribe({});
try {
expect(batcher["batchesByKey"].get('')!.size).toBe(1);
expect(batcher.queuedRequests.get('')!.length).toBe(1);
} catch (e) {
reject(e);
}

setTimeout(
terminatingCheck(resolve, reject, () => {
expect(batcher["batchesByKey"].get('')).toBeUndefined();
expect(batcher.queuedRequests.get('')).toBeUndefined();
}),
20,
);
});

itAsync('should cancel single query in queue when unsubscribing', (resolve, reject) => {
const data = {
lastName: 'Ever',
firstName: 'Greatest',
};

const batcher = new OperationBatcher({
batchInterval: 10,
batchHandler: () =>
new Observable(observer => {
observer.next([{data}]);
setTimeout(observer.complete.bind(observer));
}),
});

const query = gql`
query {
author {
firstName
lastName
}
}
`;

batcher.enqueueRequest({
operation: createOperation({}, { query }),
}).subscribe(() => reject('next should never be called')).unsubscribe();

expect(batcher["batchesByKey"].get('')).toBeUndefined();
resolve();
});

itAsync('should cancel single query in queue with multiple subscriptions', (resolve, reject) => {
const data = {
lastName: 'Ever',
firstName: 'Greatest',
};
const batcher = new OperationBatcher({
batchInterval: 10,
batchHandler: () =>
new Observable(observer => {
observer.next([{data}]);
setTimeout(observer.complete.bind(observer));
}),
});
const query = gql`
query {
author {
firstName
lastName
}
}
`;
const operation: Operation = createOperation({}, {query});

const observable = batcher.enqueueRequest({operation});

const checkQueuedRequests = (
expectedSubscriberCount: number,
) => {
const batch = batcher["batchesByKey"].get('');
expect(batch).not.toBeUndefined();
expect(batch!.size).toBe(1);
batch!.forEach(request => {
expect(request.subscribers.size).toBe(expectedSubscriberCount);
});
};

const sub1 = observable.subscribe(() => reject('next should never be called'));
checkQueuedRequests(1);

const sub2 = observable.subscribe(() => reject('next should never be called'));
checkQueuedRequests(2);

sub1.unsubscribe();
checkQueuedRequests(1);

sub2.unsubscribe();
expect(batcher["batchesByKey"].get('')).toBeUndefined();
resolve();
});

itAsync('should cancel single query in flight when unsubscribing', (resolve, reject) => {
const batcher = new OperationBatcher({
batchInterval: 10,
batchHandler: () =>
new Observable(() => {
// Instead of typically starting an XHR, we trigger the unsubscription from outside
setTimeout(() => subscription?.unsubscribe(), 5);

return () => {
expect(batcher["batchesByKey"].get('')).toBeUndefined();
resolve();
};
}),
});

const query = gql`
query {
author {
firstName
lastName
}
}
`;

const subscription = batcher.enqueueRequest({
operation: createOperation({}, { query }),
}).subscribe(() => reject('next should never be called'));
});

itAsync('should correctly batch multiple queries', (resolve, reject) => {
const data = {
lastName: 'Ever',
Expand Down Expand Up @@ -551,7 +441,7 @@ describe('OperationBatcher', () => {
batcher.enqueueRequest({ operation }).subscribe({});
batcher.enqueueRequest({ operation: operation2 }).subscribe({});
try {
expect(batcher["batchesByKey"].get('')!.size).toBe(2);
expect(batcher.queuedRequests.get('')!.length).toBe(2);
} catch (e) {
reject(e);
}
Expand All @@ -560,7 +450,7 @@ describe('OperationBatcher', () => {
// The batch shouldn't be fired yet, so we can add one more request.
batcher.enqueueRequest({ operation: operation3 }).subscribe({});
try {
expect(batcher["batchesByKey"].get('')!.size).toBe(3);
expect(batcher.queuedRequests.get('')!.length).toBe(3);
} catch (e) {
reject(e);
}
Expand All @@ -569,65 +459,12 @@ describe('OperationBatcher', () => {
setTimeout(
terminatingCheck(resolve, reject, () => {
// The batch should've been fired by now.
expect(batcher["batchesByKey"].get('')).toBeUndefined();
expect(batcher.queuedRequests.get('')).toBeUndefined();
}),
20,
);
});

itAsync('should cancel multiple queries in queue when unsubscribing and let pass still subscribed one', (resolve, reject) => {
const data2 = {
lastName: 'Hauser',
firstName: 'Evans',
};

const batcher = new OperationBatcher({
batchInterval: 10,
batchHandler: () =>
new Observable(observer => {
observer.next([{ data: data2 }]);
setTimeout(observer.complete.bind(observer));
}),
});

const query = gql`
query {
author {
firstName
lastName
}
}
`;

const operation: Operation = createOperation({}, {query});
const operation2: Operation = createOperation({}, {query});
const operation3: Operation = createOperation({}, {query});

const sub1 = batcher.enqueueRequest({operation}).subscribe(() => reject('next should never be called'));
batcher.enqueueRequest({operation: operation2}).subscribe(result => {
expect(result.data).toBe(data2);

// The batch should've been fired by now.
expect(batcher["batchesByKey"].get('')).toBeUndefined();

resolve();
});

expect(batcher["batchesByKey"].get('')!.size).toBe(2);

sub1.unsubscribe();
expect(batcher["batchesByKey"].get('')!.size).toBe(1);

setTimeout(() => {
// The batch shouldn't be fired yet, so we can add one more request.
const sub3 = batcher.enqueueRequest({operation: operation3}).subscribe(() => reject('next should never be called'));
expect(batcher["batchesByKey"].get('')!.size).toBe(2);

sub3.unsubscribe();
expect(batcher["batchesByKey"].get('')!.size).toBe(1);
}, 5);
});

itAsync('should reject the promise if there is a network error', (resolve, reject) => {
const query = gql`
query {
Expand Down Expand Up @@ -928,14 +765,14 @@ describe('BatchLink', () => {
new BatchLink({
batchInterval: 1,
//if batchKey does not work, then the batch size would be 3
batchMax: 2,
batchMax: 3,
batchHandler,
batchKey,
}),
]);

let count = 0;
[1, 2, 3, 4].forEach(() => {
[1, 2, 3, 4].forEach(x => {
execute(link, {
query,
}).subscribe({
Expand Down
Loading

0 comments on commit 0e68db1

Please sign in to comment.