diff --git a/packages/nx/src/command-line/format.ts b/packages/nx/src/command-line/format.ts index bf43ba7675044..c4a4ae8799f5d 100644 --- a/packages/nx/src/command-line/format.ts +++ b/packages/nx/src/command-line/format.ts @@ -20,6 +20,7 @@ import { createProjectGraphAsync } from '../project-graph/project-graph'; import { filterAffected } from '../project-graph/affected/affected-project-graph'; import { readNxJson } from '../config/configuration'; import { ProjectGraph } from '../config/project-graph'; +import { chunkify } from '../utils/chunkify'; const PRETTIER_PATH = require.resolve('prettier/bin-prettier'); @@ -40,7 +41,7 @@ export async function format( ); // Chunkify the patterns array to prevent crashing the windows terminal - const chunkList: string[][] = chunkify(patterns, 50); + const chunkList: string[][] = chunkify(patterns); switch (command) { case 'write': @@ -143,14 +144,6 @@ function getPatternsFromProjects( return getProjectRoots(projects, projectGraph); } -function chunkify(target: string[], size: number): string[][] { - return target.reduce((current: string[][], value: string, index: number) => { - if (index % size === 0) current.push([]); - current[current.length - 1].push(value); - return current; - }, []); -} - function write(patterns: string[]) { if (patterns.length > 0) { const [swcrcPatterns, regularPatterns] = patterns.reduce( diff --git a/packages/nx/src/hasher/git-hasher.ts b/packages/nx/src/hasher/git-hasher.ts index 29b47bce1568e..1b4a5c8d80095 100644 --- a/packages/nx/src/hasher/git-hasher.ts +++ b/packages/nx/src/hasher/git-hasher.ts @@ -1,4 +1,5 @@ import { spawn } from 'child_process'; +import { chunkify } from '../utils/chunkify'; import { fileExists } from '../utils/fileutils'; import { joinPathFragments } from '../utils/path'; @@ -12,26 +13,9 @@ export async function getGitHashForFiles( ); const res: Map = new Map(); - const promises: Promise>[] = []; - if (filesToHash.length) { - // On windows the max length is limited by the length of - // the overall comand, rather than the number of individual - // arguments. Since file paths are large and rather variable, - // we use a smaller batchSize. - const batchSize = process.platform === 'win32' ? 250 : 4000; - for ( - let startIndex = 0; - startIndex < filesToHash.length; - startIndex += batchSize - ) { - promises.push( - getGitHashForBatch( - filesToHash.slice(startIndex, startIndex + batchSize), - path - ) - ); - } - } + const promises: Promise>[] = chunkify(filesToHash).map( + (files) => getGitHashForBatch(files, path) + ); // Merge batch results into final result set const batchResults = await Promise.all(promises); for (const batch of batchResults) { diff --git a/packages/nx/src/utils/chunkify.spec.ts b/packages/nx/src/utils/chunkify.spec.ts new file mode 100644 index 0000000000000..20afb9a2273ab --- /dev/null +++ b/packages/nx/src/utils/chunkify.spec.ts @@ -0,0 +1,15 @@ +import { chunkify } from './chunkify'; + +describe('chunkify', () => { + it('should wrap chunks at passed in size', () => { + const files = ['aa', 'bb', 'cc', 'dd', 'ee']; + expect(chunkify(files, 4)).toHaveLength(5); + expect(chunkify(files, 7)).toHaveLength(3); + expect(chunkify(files, 16)).toHaveLength(1); + }); + + it('should contain all items from target', () => { + const files = ['aa', 'bb', 'cc', 'dd', 'ee']; + expect(chunkify(files, 7).flat()).toHaveLength(5); + }); +}); diff --git a/packages/nx/src/utils/chunkify.ts b/packages/nx/src/utils/chunkify.ts new file mode 100644 index 0000000000000..fdb4cdda8b6e1 --- /dev/null +++ b/packages/nx/src/utils/chunkify.ts @@ -0,0 +1,41 @@ +import { execSync } from 'child_process'; + +const TERMINAL_SIZE = + process.platform === 'win32' ? 8192 : getUnixTerminalSize(); + +export function chunkify( + target: string[], + maxChunkLength: number = TERMINAL_SIZE - 500 +): string[][] { + const chunks = []; + let currentChunk = []; + let currentChunkLength = 0; + for (const file of target) { + if ( + // Prevent empty chunk if first file path is longer than maxChunkLength + currentChunk.length && + // +1 accounts for the space between file names + currentChunkLength + file.length + 1 >= maxChunkLength + ) { + chunks.push(currentChunk); + currentChunk = []; + currentChunkLength = 0; + } + currentChunk.push(file); + currentChunkLength += file.length + 1; + } + chunks.push(currentChunk); + return chunks; +} + +function getUnixTerminalSize() { + try { + const argMax = execSync('getconf ARG_MAX').toString().trim(); + return Number.parseInt(argMax); + } catch { + // This number varies by system, but 100k seems like a safe + // number from some research... + // https://stackoverflow.com/questions/19354870/bash-command-line-and-input-limit + return 100000; + } +}