From 28069799d359ecb6fa376d08710747b4ae7979b4 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 26 Jun 2024 16:50:35 +0700 Subject: [PATCH] feat: batch hash using as-sha256 --- packages/as-sha256/src/index.ts | 29 +++ packages/as-sha256/test/unit/simd.test.ts | 19 ++ packages/persistent-merkle-tree/package.json | 6 - .../src/hasher/as-sha256.ts | 36 +++- .../src/hasher/hashtree.ts | 179 ------------------ .../src/hasher/index.ts | 8 +- .../test/unit/hasher.test.ts | 25 ++- setHasher.mjs | 2 +- yarn.lock | 43 +++-- 9 files changed, 130 insertions(+), 217 deletions(-) delete mode 100644 packages/persistent-merkle-tree/src/hasher/hashtree.ts diff --git a/packages/as-sha256/src/index.ts b/packages/as-sha256/src/index.ts index 867aec60..d7c022bc 100644 --- a/packages/as-sha256/src/index.ts +++ b/packages/as-sha256/src/index.ts @@ -235,6 +235,35 @@ export function batchHash4HashObjectInputs(inputs: HashObject[]): HashObject[] { return [output0, output1, output2, output3]; } +/** + * Hash an input into preallocated input using batch if possible. + */ +export function hashInto(input: Uint8Array, output: Uint8Array): void { + if (input.length % 64 !== 0) { + throw new Error(`Invalid input length ${input.length}`); + } + if (input.length !== output.length * 2) { + throw new Error(`Invalid output length ${output.length}`); + } + // for every 64 x 4 = 256 bytes, do the batch hash + const endBatch = Math.floor(input.length / 256); + for (let i = 0; i < endBatch; i++) { + inputUint8Array.set(input.subarray(i * 256, (i + 1) * 256), 0); + ctx.batchHash4UintArray64s(wasmOutputValue); + output.set(outputUint8Array.subarray(0, 128), i * 128); + } + + const numHashed = endBatch * 4; + const remainingHash = Math.floor((input.length % 256) / 64); + const inputOffset = numHashed * 64; + const outputOffset = numHashed * 32; + for (let i = 0; i < remainingHash; i++) { + inputUint8Array.set(input.subarray(inputOffset + i * 64, inputOffset + (i + 1) * 64), 0); + ctx.digest64(wasmInputValue, wasmOutputValue); + output.set(outputUint8Array.subarray(0, 32), outputOffset + i * 32); + } +} + function update(data: Uint8Array): void { const INPUT_LENGTH = ctx.INPUT_LENGTH; if (data.length > INPUT_LENGTH) { diff --git a/packages/as-sha256/test/unit/simd.test.ts b/packages/as-sha256/test/unit/simd.test.ts index 10e76b83..4f035668 100644 --- a/packages/as-sha256/test/unit/simd.test.ts +++ b/packages/as-sha256/test/unit/simd.test.ts @@ -44,4 +44,23 @@ describe("Test SIMD implementation of as-sha256", () => { expect(output).to.be.deep.equal(expectedOutput, "incorrect batchHash4UintArray64s result " + i); } }); + + const numHashes = [4, 5, 6, 7]; + for (const numHash of numHashes) { + it(`hashInto ${numHash} hashes`, () => { + const inputs = Array.from({length: numHash}, () => crypto.randomBytes(64)); + const input = new Uint8Array(numHash * 64); + for (let i = 0; i < numHash; i++) { + input.set(inputs[i], i * 64); + } + const output = new Uint8Array(numHash * 32); + + sha256.hashInto(input, output); + + const expectedOutputs = Array.from({length: numHash}, (_, i) => sha256.digest64(inputs[i])); + for (let i = 0; i < numHash; i++) { + expect(output.subarray(i * 32, (i + 1) * 32)).to.be.deep.equal(expectedOutputs[i]); + } + }); + } }); diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index f47fb9ab..735f2642 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -46,12 +46,6 @@ "homepage": "https://github.com/ChainSafe/persistent-merkle-tree#readme", "dependencies": { "@chainsafe/as-sha256": "0.4.2", - "@chainsafe/hashtree": "1.0.0", "@noble/hashes": "^1.3.0" - }, - "peerDependencies": { - "@chainsafe/hashtree-linux-x64-gnu": "1.0.0", - "@chainsafe/hashtree-linux-arm64-gnu": "1.0.0", - "@chainsafe/hashtree-darwin-arm64": "1.0.0" } } diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 670f096b..bb589b86 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,13 +1,45 @@ -import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; +import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs, hashInto} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; +// each validator needs to digest 8 chunks of 32 bytes = 4 hashes +// support up to 4 validators +const MAX_HASH = 16; +const MAX_INPUT_SIZE = MAX_HASH * 64; +const buffer = new Uint8Array(MAX_INPUT_SIZE); + export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects, + // given nLevel = 3 + // digest multiple of 8 chunks = 256 bytes + // the result is multiple of 1 chunk = 32 bytes + // this is the same to hashTreeRoot() of multiple validators digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - throw new Error("Not implemented"); + let inputLength = data.length; + const bytesInBatch = Math.pow(2, nLevel) * 32; + if (nLevel < 1) { + throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); + } + if (inputLength % bytesInBatch !== 0) { + throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); + } + if (inputLength > MAX_INPUT_SIZE) { + throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); + } + + buffer.set(data, 0); + for (let i = nLevel; i > 0; i--) { + const outputLength = Math.floor(inputLength / 2); + const hashInput = buffer.subarray(0, inputLength); + const hashOutput = buffer.subarray(0, outputLength); + hashInto(hashInput, hashOutput); + inputLength = outputLength + } + + // the result is unsafe as it will be modified later, consumer should save the result if needed + return buffer.subarray(0, inputLength); }, batchHashObjects: (inputs: HashObject[]) => { // as-sha256 uses SIMD for batch hash diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts deleted file mode 100644 index 39b9dcd6..00000000 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ /dev/null @@ -1,179 +0,0 @@ -import {hash, hashInto} from "@chainsafe/hashtree"; -import {Hasher, HashObject} from "./types"; -import {HashComputation, Node} from "../node"; -import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; - -/** - * Best SIMD implementation is in 512 bits = 64 bytes - * If not, hashtree will make a loop inside - * Given sha256 operates on a block of 4 bytes, we can hash 16 inputs at once - * Each input is 64 bytes - */ -const PARALLEL_FACTOR = 16; -const MAX_INPUT_SIZE = PARALLEL_FACTOR * 64; -const uint8Input = new Uint8Array(MAX_INPUT_SIZE); -const uint32Input = new Uint32Array(uint8Input.buffer); -const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); -const uint32Output = new Uint32Array(uint8Output.buffer); - - -export const hasher: Hasher = { - name: "hashtree", - digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { - if (obj1.length !== 32 || obj2.length !== 32) { - throw new Error("Invalid input length"); - } - uint8Input.set(obj1, 0); - uint8Input.set(obj2, 32); - const hashInput = uint8Input.subarray(0, 64); - const hashOutput = uint8Output.subarray(0, 32); - hashInto(hashInput, hashOutput); - return hashOutput.slice(); - }, - digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - hashObjectToUint32Array(obj1, uint32Input, 0); - hashObjectToUint32Array(obj2, uint32Input, 8); - const hashInput = uint8Input.subarray(0, 64); - const hashOutput = uint8Output.subarray(0, 32); - hashInto(hashInput, hashOutput); - return uint32ArrayToHashObject(uint32Output, 0); - }, - // given nLevel = 3 - // digest multiple of 8 chunks = 256 bytes - // the result is multiple of 1 chunk = 32 bytes - // this is the same to hashTreeRoot() of multiple validators - // TODO - batch: data, offset, length to avoid subarray call - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - let inputLength = data.length; - const bytesInBatch = Math.pow(2, nLevel) * 32; - if (nLevel < 1) { - throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); - } - if (inputLength % bytesInBatch !== 0) { - throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); - } - if (inputLength > MAX_INPUT_SIZE) { - throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); - } - - let outputLength = Math.floor(inputLength / 2); - let hashOutput: Uint8Array | null = null; - for (let i = nLevel; i > 0; i--) { - uint8Input.set(hashOutput ?? data, 0); - const hashInput = uint8Input.subarray(0, inputLength); - hashOutput = uint8Output.subarray(0, outputLength); - hashInto(hashInput, hashOutput); - inputLength = outputLength; - outputLength = Math.floor(inputLength / 2); - } - - if (hashOutput === null) { - throw new Error("hashOutput is null"); - } - // the result is unsafe as it will be modified later, consumer should save the result if needed - return hashOutput; - }, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - batchHashObjects(inputs: HashObject[]): HashObject[] { - if (inputs.length === 0) { - return []; - } - if (inputs.length % 2 !== 0) { - throw new Error("inputs length must be even"); - } - - const batch = PARALLEL_FACTOR * 2; - const outHashObjects: HashObject[] = []; - for (const [i, hashInput] of inputs.entries()) { - const indexInBatch = i % batch; - hashObjectToUint32Array(hashInput, uint32Input, indexInBatch * 8); - if (indexInBatch === batch - 1) { - hashInto(uint8Input, uint8Output); - for (let j = 0; j < batch / 2; j++) { - outHashObjects.push(uint32ArrayToHashObject(uint32Output, j * 8)); - } - } - } - - // hash remaining - const remaining = inputs.length % batch; - if (remaining > 0) { - const remainingInput = uint8Input.subarray(0, remaining * 32); - const remainingOutput = uint8Output.subarray(0, remaining * 16); - hashInto(remainingInput, remainingOutput); - for (let i = 0; i < remaining / 2; i++) { - outHashObjects.push(uint32ArrayToHashObject(uint32Output, i * 8)); - } - } - - return outHashObjects; - }, - executeHashComputations(hashComputations: Array): void { - for (let level = hashComputations.length - 1; level >= 0; level--) { - const hcArr = hashComputations[level]; - if (!hcArr) { - // should not happen - throw Error(`no hash computations for level ${level}`); - } - - // size input array to 2 HashObject per computation * 32 bytes per object - // const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); - let destNodes: Node[] = []; - - // hash every 16 inputs at once to avoid memory allocation - for (const [i, {src0, src1, dest}] of hcArr.entries()) { - const indexInBatch = i % PARALLEL_FACTOR; - const offset = indexInBatch * 16; - - hashObjectToUint32Array(src0, uint32Input, offset); - hashObjectToUint32Array(src1, uint32Input, offset + 8); - destNodes.push(dest); - if (indexInBatch === PARALLEL_FACTOR - 1) { - hashInto(uint8Input, uint8Output); - for (const [j, destNode] of destNodes.entries()) { - const outputOffset = j * 8; - destNode.applyHash(uint32ArrayToHashObject(uint32Output, outputOffset)); - } - destNodes = []; - } - } - - const remaining = hcArr.length % PARALLEL_FACTOR; - // we prepared data in input, now hash the remaining - if (remaining > 0) { - const remainingInput = uint8Input.subarray(0, remaining * 64); - const remainingOutput = uint8Output.subarray(0, remaining * 32); - hashInto(remainingInput, remainingOutput); - // destNodes was prepared above - for (const [i, destNode] of destNodes.entries()) { - const offset = i * 8; - destNode.applyHash(uint32ArrayToHashObject(uint32Output, offset)); - } - } - } - }, -}; - -function hashObjectToUint32Array(obj: HashObject, arr: Uint32Array, offset: number): void { - arr[offset] = obj.h0; - arr[offset + 1] = obj.h1; - arr[offset + 2] = obj.h2; - arr[offset + 3] = obj.h3; - arr[offset + 4] = obj.h4; - arr[offset + 5] = obj.h5; - arr[offset + 6] = obj.h6; - arr[offset + 7] = obj.h7; -} - -function uint32ArrayToHashObject(arr: Uint32Array, offset: number): HashObject { - return { - h0: arr[offset], - h1: arr[offset + 1], - h2: arr[offset + 2], - h3: arr[offset + 3], - h4: arr[offset + 4], - h5: arr[offset + 5], - h6: arr[offset + 6], - h7: arr[offset + 7], - }; -} diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 13fb6a7c..1fcf3cad 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,7 +1,6 @@ import {Hasher} from "./types"; // import {hasher as nobleHasher} from "./noble"; -// import {hasher as csHasher} from "./as-sha256"; -import {hasher as hashtreeHasher} from "./hashtree"; +import {hasher as csHasher} from "./as-sha256"; export * from "./types"; export * from "./util"; @@ -13,10 +12,7 @@ export * from "./util"; */ // export let hasher: Hasher = nobleHasher; // For testing purposes, we use the as-sha256 hasher -// export let hasher: Hasher = csHasher; - -// For testing purposes, we use the hashtree hasher -export let hasher: Hasher = hashtreeHasher; +export let hasher: Hasher = csHasher; /** * Set the hasher to be used across the SSZ codebase diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index a2edf1d2..02e41a85 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -2,11 +2,11 @@ import {expectEqualHex} from "../utils/expectHex"; import {uint8ArrayToHashObject, hashObjectToUint8Array} from "../../src/hasher/util"; import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; -import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; +import { LeafNode, subtreeFillToContents } from "../../src"; -const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; +const hashers = [asSha256Hasher, nobleHasher]; describe("hashers", function () { describe("digest64 vs digest64HashObjects methods should be the same", () => { @@ -30,9 +30,7 @@ describe("hashers", function () { const root2 = Buffer.alloc(32, 0xff); const hash1 = nobleHasher.digest64(root1, root2); const hash2 = asSha256Hasher.digest64(root1, root2); - const hash3 = hashtreeHasher.digest64(root1, root2); expectEqualHex(hash1, hash2); - expectEqualHex(hash1, hash3); }); it("all hashers should return the same values from digest64HashObjects", () => { @@ -42,9 +40,7 @@ describe("hashers", function () { const hashObject2 = uint8ArrayToHashObject(root2); const hash1 = hashObjectToUint8Array(nobleHasher.digest64HashObjects(hashObject1, hashObject2)); const hash2 = hashObjectToUint8Array(asSha256Hasher.digest64HashObjects(hashObject1, hashObject2)); - const hash3 = hashObjectToUint8Array(hashtreeHasher.digest64HashObjects(hashObject1, hashObject2)); expectEqualHex(hash1, hash2); - expectEqualHex(hash1, hash3); }); it("all hashers should return the same values from batchHashObjects", () => { @@ -53,10 +49,8 @@ describe("hashers", function () { .map(uint8ArrayToHashObject); const results1 = nobleHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); const results2 = asSha256Hasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); - const results3 = hashtreeHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); Object.values(results1).forEach((result1, i) => { expectEqualHex(result1, results2[i]); - expectEqualHex(result1, results3[i]); }); }); @@ -72,4 +66,19 @@ describe("hashers", function () { }); }); +describe("as-sha256 hasher", function () { + const numValidators = [1, 2, 3, 4]; + for (const numValidator of numValidators) { + it (`digestNLevelUnsafe ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => { + const nodes = Array.from({length: 8 * numValidator}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i + numValidator))); + const hashInput = Buffer.concat(nodes.map((node) => node.root)); + const hashOutput = asSha256Hasher.digestNLevelUnsafe(hashInput, 3); + for (let i = 0; i < numValidator; i++) { + const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root; + expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root); + } + }); + } +}); + // TODO - batch: test more methods diff --git a/setHasher.mjs b/setHasher.mjs index c210cdba..821986ea 100644 --- a/setHasher.mjs +++ b/setHasher.mjs @@ -1,5 +1,5 @@ // Set the hasher to hashtree // Used to run benchmarks with with visibility into hashtree performance, useful for Lodestar import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; setHasher(hasher); diff --git a/yarn.lock b/yarn.lock index 1c14c626..b74bf846 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1252,18 +1252,6 @@ core-js "2.6.10" require-resolve "0.0.2" -"@chainsafe/hashtree-linux-arm64-gnu@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-arm64-gnu/-/hashtree-linux-arm64-gnu-1.0.0.tgz#168db259636261d9f3612354cad9f730a4be7110" - integrity sha512-XdYEV6z503Oxa7+mPtUEq9KoKfBAs0BcxGaRiDttCbZK2/J7CcTlobBGd7KMxJ/dQ4IUonaXsob0BnXBcrlwuw== - -"@chainsafe/hashtree@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@chainsafe/hashtree/-/hashtree-1.0.0.tgz#529439fb07299758ca5bbe69a00d1dc4ad83a949" - integrity sha512-qft0MZiLl5jbe8omZaSp1vQ2YCO9qCb262+5qD1vsgN6l1ga3ZFKLyNI6xvwbhC7ZnzZd46vr+p+KvdUIgruOw== - optionalDependencies: - "@chainsafe/hashtree-linux-arm64-gnu" "1.0.0" - "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" @@ -11303,7 +11291,7 @@ streamroller@^3.1.5: debug "^4.3.4" fs-extra "^8.1.0" -"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11320,6 +11308,15 @@ streamroller@^3.1.5: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -11398,7 +11395,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -11426,6 +11423,13 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -12620,7 +12624,7 @@ workerpool@6.1.0: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.1.0.tgz#a8e038b4c94569596852de7a8ea4228eefdeb37b" integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -12647,6 +12651,15 @@ wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"