Skip to content

Commit

Permalink
fix(preview): chunk document availability ids on id length
Browse files Browse the repository at this point in the history
The previous approach attempted to solve the issue of requesting too many IDs
at the same time by chunking the IDs into groups of 300. For shorter IDs this
was an acceptable solution - but with long document IDs, the limit (enforced
by the backend) can actually be reached much sooner.

This commit changes the algorithm to account for the length of IDs - attempting
to stay within the limit of ~11kB per chunk. This is the same as @sanity/client
uses as a limit for its query string, and should work both with older browsers,
the backend and account for a potentially large set of headers being present.
  • Loading branch information
rexxars committed Aug 14, 2022
1 parent 4dac479 commit d64aefd
Showing 1 changed file with 39 additions and 2 deletions.
41 changes: 39 additions & 2 deletions packages/sanity/src/preview/availability.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import type {SanityClient} from '@sanity/client'
import {combineLatest, defer, from, Observable, of} from 'rxjs'
import {distinctUntilChanged, map, mergeMap, switchMap} from 'rxjs/operators'
import shallowEquals from 'shallow-equals'
import {chunk, flatten, keyBy} from 'lodash'
import {flatten, keyBy} from 'lodash'
import {isRecord} from '../util/isRecord'
import {getDraftId, getPublishedId} from '../util/draftUtils'
import type {
Expand All @@ -21,6 +21,43 @@ import {
} from './constants'
import {ObservePathsFn} from './types'

const MAX_DOCUMENT_ID_CHUNK_SIZE = 11164

/**
* Takes an array of document IDs and puts them into individual chunks.
* Because document IDs can vary greatly in size, we want to chunk by the length of the
* combined comma-separated ID set. We try to stay within 11164 bytes - this is about the
* same length the Sanity client uses for max query size, and accounts for rather large
* headers to be present - so this _should_ be safe.
*
* @param documentIds - Unique document IDs to chunk
* @returns Array of document ID chunks
*/
function chunkDocumentIds(documentIds: string[]): string[][] {
let chunk: string[] = []
let chunkSize = 0

const chunks: string[][] = []

for (const documentId of documentIds) {
// Reached the max length? start a new chunk
if (chunkSize + documentId.length + 1 >= MAX_DOCUMENT_ID_CHUNK_SIZE) {
chunks.push(chunk)
chunk = []
chunkSize = 0
}

chunkSize += documentId.length + 1 // +1 is to account for a comma between IDs
chunk.push(documentId)
}

if (!chunks.includes(chunk)) {
chunks.push(chunk)
}

return chunks
}

export function create_preview_availability(
versionedClient: SanityClient,
observePaths: ObservePathsFn
Expand Down Expand Up @@ -76,7 +113,7 @@ export function create_preview_availability(
args: string[][]
): Observable<DocumentAvailability[]> {
const uniqueIds = [...new Set(flatten(args))]
return from(chunk(uniqueIds, 300)).pipe(
return from(chunkDocumentIds(uniqueIds)).pipe(
mergeMap(fetchDocumentReadabilityChunked, 10),
map((res) => args.map(([id]) => res[uniqueIds.indexOf(id)]))
)
Expand Down

0 comments on commit d64aefd

Please sign in to comment.