diff --git a/barretenberg/ts/src/types/fields.ts b/barretenberg/ts/src/types/fields.ts index a54cbe5a328..9305f0d6142 100644 --- a/barretenberg/ts/src/types/fields.ts +++ b/barretenberg/ts/src/types/fields.ts @@ -2,6 +2,7 @@ import { randomBytes } from '../random/index.js'; import { toBigIntBE, toBufferBE } from '../bigint-array/index.js'; import { BufferReader, uint8ArrayToHexString } from '../serialize/index.js'; +// TODO(#4189): Replace with implementation in yarn-project/foundation/src/fields/fields.ts export class Fr { static ZERO = new Fr(0n); static MODULUS = 0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001n; @@ -10,14 +11,14 @@ export class Fr { value: Uint8Array; constructor(value: Uint8Array | bigint) { - if (typeof value === 'bigint') { - if (value > Fr.MAX_VALUE) { - throw new Error(`Fr out of range ${value}.`); - } - this.value = toBufferBE(value); - } else { - this.value = value; + // We convert buffer value to bigint to be able to check it fits within modulus + const valueBigInt = typeof value === 'bigint' ? value : toBigIntBE(value); + + if (valueBigInt > Fr.MAX_VALUE) { + throw new Error(`Fr out of range: ${valueBigInt}`); } + + this.value = typeof value === 'bigint' ? toBufferBE(value) : value; } static random() { diff --git a/cspell.json b/cspell.json index 9180d507c56..b66546c31b1 100644 --- a/cspell.json +++ b/cspell.json @@ -79,7 +79,7 @@ "erc", "falsey", "fargate", - "Fieldeable", + "Fieldable", "filestat", "finalise", "finalised", diff --git a/noir-projects/aztec-nr/aztec/src/note/utils.nr b/noir-projects/aztec-nr/aztec/src/note/utils.nr index 8732ca6be61..49311d794af 100644 --- a/noir-projects/aztec-nr/aztec/src/note/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/note/utils.nr @@ -71,8 +71,8 @@ pub fn compute_note_hash_for_consumption(note: Note) -> Field where Not } else { // When nonce is nonzero, that means we are reading a settled note (from tree) created in a // previous TX. So we need the unique_siloed_note_hash which has already been hashed with - // contract address and then nonce. This hash will match the existing leaf in the private - // data tree, so the kernel can just perform a membership check directly on this hash/leaf. + // contract address and then nonce. This hash will match the existing leaf in the note hash + // tree, so the kernel can just perform a membership check directly on this hash/leaf. compute_unique_siloed_note_hash(note) // IMPORTANT NOTE ON REDUNDANT SILOING BY CONTRACT ADDRESS: The note hash computed above is // "siloed" by contract address. When a note hash is computed solely for the purpose of @@ -81,7 +81,7 @@ pub fn compute_note_hash_for_consumption(note: Note) -> Field where Not // be computed from a siloed note hash. After all, persistable note hashes and nullifiers are // siloed by the kernel circuit. That being said, the siloed note hash computed above CAN be // used for nullifier computation, and this achieves the (arguably unnecessary) property that - // nullifiers are computed from a note hash's fully-computed private data tree leaf. + // nullifiers are computed from a note hash's fully-computed note hash tree leaf. } } diff --git a/yarn-project/circuits.js/src/contract/private_function.ts b/yarn-project/circuits.js/src/contract/private_function.ts index ea244b9d94a..b0395155e1c 100644 --- a/yarn-project/circuits.js/src/contract/private_function.ts +++ b/yarn-project/circuits.js/src/contract/private_function.ts @@ -32,7 +32,7 @@ export function computePrivateFunctionLeaf(fn: PrivateFunction): Buffer { function getPrivateFunctionTreeCalculator(): MerkleTreeCalculator { if (!privateFunctionTreeCalculator) { - const functionTreeZeroLeaf = pedersenHash(new Array(PRIVATE_FUNCTION_SIZE).fill(Buffer.alloc(32))).toBuffer(); + const functionTreeZeroLeaf = pedersenHash(new Array(PRIVATE_FUNCTION_SIZE).fill(0)).toBuffer(); privateFunctionTreeCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); } return privateFunctionTreeCalculator; diff --git a/yarn-project/circuits.js/src/hash/hash.ts b/yarn-project/circuits.js/src/hash/hash.ts index b5f4f1c0a63..b5496f75713 100644 --- a/yarn-project/circuits.js/src/hash/hash.ts +++ b/yarn-project/circuits.js/src/hash/hash.ts @@ -61,7 +61,7 @@ export function hashVK(vkBuf: Buffer) { * @returns A commitment nonce. */ export function computeCommitmentNonce(nullifierZero: Fr, commitmentIndex: number): Fr { - return pedersenHash([nullifierZero, numToUInt32BE(commitmentIndex, 32)], GeneratorIndex.NOTE_HASH_NONCE); + return pedersenHash([nullifierZero, commitmentIndex], GeneratorIndex.NOTE_HASH_NONCE); } /** diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index 29fc82dcac3..bb07994bb95 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -1,7 +1,7 @@ import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js'; import { Fr } from '../../fields/fields.js'; -import { type Bufferable, serializeToBufferArray } from '../../serialize/serialize.js'; +import { type Fieldable, serializeToFields } from '../../serialize/serialize.js'; /** * Create a pedersen commitment (point) from an array of input fields. @@ -20,19 +20,17 @@ export function pedersenCommit(input: Buffer[]) { /** * Create a pedersen hash (field) from an array of input fields. - * Left pads any inputs less than 32 bytes. + * @param input - The input fieldables to hash. + * @param index - The separator index to use for the hash. + * @returns The pedersen hash. */ -export function pedersenHash(input: Bufferable[], index = 0): Fr { - let bufferredInput = serializeToBufferArray(input); - if (!bufferredInput.every(i => i.length <= 32)) { - throw new Error('All Pedersen Hash input buffers must be <= 32 bytes.'); - } - bufferredInput = bufferredInput.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); +export function pedersenHash(input: Fieldable[], index = 0): Fr { + const inputFields = serializeToFields(input); return Fr.fromBuffer( Buffer.from( BarretenbergSync.getSingleton() .pedersenHash( - bufferredInput.map(i => new FrBarretenberg(i)), + inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion index, ) .toBuffer(), diff --git a/yarn-project/foundation/src/crypto/poseidon/index.ts b/yarn-project/foundation/src/crypto/poseidon/index.ts index 8f77b580211..b02b09e16ed 100644 --- a/yarn-project/foundation/src/crypto/poseidon/index.ts +++ b/yarn-project/foundation/src/crypto/poseidon/index.ts @@ -1,16 +1,24 @@ import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js'; import { Fr } from '../../fields/fields.js'; +import { type Fieldable, serializeToFields } from '../../serialize/serialize.js'; /** * Create a poseidon hash (field) from an array of input fields. - * Left pads any inputs less than 32 bytes. + * @param input - The input fields to hash. + * @param index - The separator index to use for the hash. + * @returns The poseidon hash. + * TODO(#5714): enable index once barretenberg API supports it */ -export function poseidonHash(input: Buffer[]): Fr { +export function poseidonHash(input: Fieldable[], _index = 0): Fr { + const inputFields = serializeToFields(input); return Fr.fromBuffer( Buffer.from( BarretenbergSync.getSingleton() - .poseidonHash(input.map(i => new FrBarretenberg(i))) + .poseidonHash( + inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion + // index, // TODO: enable once the barretenberg API supports it + ) .toBuffer(), ), ); diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index c610b5fea64..eaa81c27c7f 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -121,11 +121,19 @@ export type Bufferable = | Bufferable[]; /** A type that can be converted to a Field or a Field array. */ -export type Fieldeable = +export type Fieldable = | Fr | boolean | number | bigint + | Buffer + | { + /** + * Serialize to a field. + * @dev Duplicate to `toField` but left as is as it is used in AVM codebase. + */ + toFr: () => Fr; + } | { /** Serialize to a field. */ toField: () => Fr; @@ -134,7 +142,7 @@ export type Fieldeable = /** Serialize to an array of fields. */ toFields: () => Fr[]; } - | Fieldeable[]; + | Fieldable[]; /** * Serializes a list of objects contiguously. @@ -142,10 +150,10 @@ export type Fieldeable = * @returns A buffer list with the concatenation of all fields. */ export function serializeToBufferArray(...objs: Bufferable[]): Buffer[] { - let ret: Buffer[] = []; + const ret: Buffer[] = []; for (const obj of objs) { if (Array.isArray(obj)) { - ret = [...ret, ...serializeToBufferArray(...obj)]; + ret.push(...serializeToBufferArray(...obj)); } else if (Buffer.isBuffer(obj)) { ret.push(obj); } else if (typeof obj === 'boolean') { @@ -176,19 +184,25 @@ export function serializeToBufferArray(...objs: Bufferable[]): Buffer[] { * @param objs - Objects to serialize. * @returns An array of fields with the concatenation of all fields. */ -export function serializeToFields(...objs: Fieldeable[]): Fr[] { - let ret: Fr[] = []; +export function serializeToFields(...objs: Fieldable[]): Fr[] { + const ret: Fr[] = []; for (const obj of objs) { if (Array.isArray(obj)) { - ret = [...ret, ...serializeToFields(...obj)]; + ret.push(...serializeToFields(...obj)); } else if (obj instanceof Fr) { ret.push(obj); } else if (typeof obj === 'boolean' || typeof obj === 'number' || typeof obj === 'bigint') { ret.push(new Fr(obj)); } else if ('toFields' in obj) { - ret = [...ret, ...obj.toFields()]; - } else { + ret.push(...obj.toFields()); + } else if ('toFr' in obj) { + ret.push(obj.toFr()); + } else if ('toField' in obj) { ret.push(obj.toField()); + } else if (Buffer.isBuffer(obj)) { + ret.push(Fr.fromBuffer(obj)); + } else { + throw new Error(`Cannot serialize input to field: ${typeof obj} ${(obj as any).constructor?.name}`); } } return ret; diff --git a/yarn-project/merkle-tree/src/pedersen.ts b/yarn-project/merkle-tree/src/pedersen.ts index b49c34c527e..8ddfa636549 100644 --- a/yarn-project/merkle-tree/src/pedersen.ts +++ b/yarn-project/merkle-tree/src/pedersen.ts @@ -1,4 +1,5 @@ import { pedersenHash } from '@aztec/foundation/crypto'; +import { Fr } from '@aztec/foundation/fields'; import { type Hasher } from '@aztec/types/interfaces'; /** @@ -12,7 +13,7 @@ export class Pedersen implements Hasher { * purposes. */ public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { - return pedersenHash([Buffer.from(lhs), Buffer.from(rhs)]).toBuffer(); + return pedersenHash([Fr.fromBuffer(Buffer.from(lhs)), Fr.fromBuffer(Buffer.from(rhs))]).toBuffer(); } /* @@ -20,6 +21,7 @@ export class Pedersen implements Hasher { * purposes. */ public hashInputs(inputs: Buffer[]): Buffer { - return pedersenHash(inputs).toBuffer(); + const inputFields = inputs.map(i => Fr.fromBuffer(i)); + return pedersenHash(inputFields).toBuffer(); } } diff --git a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts index 111f6de0439..e68bde0ee71 100644 --- a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts @@ -1,4 +1,4 @@ -import { randomBytes } from '@aztec/foundation/crypto'; +import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -24,7 +24,7 @@ describe('AppendOnlySnapshot', () => { () => tree, () => snapshotBuilder, async tree => { - const newLeaves = Array.from({ length: 2 }).map(() => randomBytes(32)); + const newLeaves = Array.from({ length: 2 }).map(() => Fr.random().toBuffer()); await tree.appendLeaves(newLeaves); }, ); diff --git a/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts index 486f633262a..be8a8572f6c 100644 --- a/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts @@ -1,4 +1,4 @@ -import { randomBytes } from '@aztec/foundation/crypto'; +import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -23,7 +23,7 @@ describe('FullSnapshotBuilder', () => { () => tree, () => snapshotBuilder, async () => { - const newLeaves = Array.from({ length: 2 }).map(() => randomBytes(32)); + const newLeaves = Array.from({ length: 2 }).map(() => Fr.random().toBuffer()); await tree.appendLeaves(newLeaves); }, ); diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts index e949192e900..56cbaee0a58 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts @@ -1,5 +1,6 @@ import { SiblingPath } from '@aztec/circuit-types'; -import { randomBigInt, randomBytes } from '@aztec/foundation/crypto'; +import { randomBigInt } from '@aztec/foundation/crypto'; +import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -71,11 +72,11 @@ describe('SparseTreeSpecific', () => { expect(tree.getNumLeaves(false)).toEqual(0n); // Insert a leaf - await tree.updateLeaf(randomBytes(32), randomIndex); + await tree.updateLeaf(Fr.random().toBuffer(), randomIndex); expect(tree.getNumLeaves(true)).toEqual(1n); // Update a leaf - await tree.updateLeaf(randomBytes(32), randomIndex); + await tree.updateLeaf(Fr.random().toBuffer(), randomIndex); expect(tree.getNumLeaves(true)).toEqual(1n); }); @@ -90,7 +91,7 @@ describe('SparseTreeSpecific', () => { expect(tree.getNumLeaves(false)).toEqual(0n); // Insert a leaf - await tree.updateLeaf(randomBytes(32), randomIndex); + await tree.updateLeaf(Fr.random().toBuffer(), randomIndex); expect(tree.getNumLeaves(true)).toEqual(1n); // Delete a leaf @@ -110,7 +111,7 @@ describe('SparseTreeSpecific', () => { // Insert leaf at index 3 let level1LeftHash: Buffer; - const leafAtIndex3 = randomBytes(32); + const leafAtIndex3 = Fr.random().toBuffer(); { await tree.updateLeaf(leafAtIndex3, 3n); expect(tree.getNumLeaves(true)).toEqual(1n); @@ -126,7 +127,7 @@ describe('SparseTreeSpecific', () => { // Insert leaf at index 6 let level1RightHash: Buffer; { - const leafAtIndex6 = randomBytes(32); + const leafAtIndex6 = Fr.random().toBuffer(); await tree.updateLeaf(leafAtIndex6, 6n); expect(tree.getNumLeaves(true)).toEqual(2n); const level2Hash = pedersen.hash(leafAtIndex6, INITIAL_LEAF); @@ -139,7 +140,7 @@ describe('SparseTreeSpecific', () => { } // Insert leaf at index 2 - const leafAtIndex2 = randomBytes(32); + const leafAtIndex2 = Fr.random().toBuffer(); { await tree.updateLeaf(leafAtIndex2, 2n); expect(tree.getNumLeaves(true)).toEqual(3n); @@ -154,7 +155,7 @@ describe('SparseTreeSpecific', () => { // Updating leaf at index 3 { - const updatedLeafAtIndex3 = randomBytes(32); + const updatedLeafAtIndex3 = Fr.random().toBuffer(); await tree.updateLeaf(updatedLeafAtIndex3, 3n); expect(tree.getNumLeaves(true)).toEqual(3n); const level2Hash = pedersen.hash(leafAtIndex2, updatedLeafAtIndex3); @@ -175,7 +176,7 @@ describe('SparseTreeSpecific', () => { const db = openTmpStore(); const tree = await createDb(db, pedersen, 'test', depth); - const leaves = Array.from({ length: 1000 }).map(() => randomBytes(32)); + const leaves = Array.from({ length: 1000 }).map(() => Fr.random().toBuffer()); const indices = Array.from({ length: 1000 }).map(() => randomBigInt(BigInt(maxIndex))); const start = Date.now(); diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts index cd946ac771a..b01409eb14c 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts @@ -1,4 +1,4 @@ -import { randomBytes } from '@aztec/foundation/crypto'; +import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -41,7 +41,7 @@ describe('StandardTree_batchAppend', () => { it('correctly computes root when batch appending and calls hash function expected num times', async () => { const db = openTmpStore(); const tree = await createDb(db, pedersen, 'test', 3); - const leaves = Array.from({ length: 5 }, _ => randomBytes(32)); + const leaves = Array.from({ length: 5 }, _ => Fr.random().toBuffer()); pedersen.resetCounter(); await tree.appendLeaves(leaves); diff --git a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts index a2533ad9e6d..6f8fc6d0987 100644 --- a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts +++ b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts @@ -1,5 +1,5 @@ import { SiblingPath } from '@aztec/circuit-types'; -import { randomBytes } from '@aztec/foundation/crypto'; +import { Fr } from '@aztec/foundation/fields'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type Hasher } from '@aztec/types/interfaces'; @@ -44,7 +44,7 @@ export const standardBasedTreeTestSuite = ( it('should throw when appending beyond max index', async () => { const db = openTmpStore(); const tree = await createDb(db, pedersen, 'test', 2); - const leaves = Array.from({ length: 5 }, _ => randomBytes(32)); + const leaves = Array.from({ length: 5 }, _ => Fr.random().toBuffer()); await expect(appendLeaves(tree, leaves)).rejects.toThrow(); }); diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 78d0aa01011..3e932af1294 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -5,6 +5,7 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; +import { type Fieldable } from '@aztec/foundation/serialize'; import { AvmNestedCallsTestContractArtifact, AvmTestContractArtifact } from '@aztec/noir-contracts.js'; import { jest } from '@jest/globals'; @@ -140,11 +141,11 @@ describe('AVM simulator: transpiled Noir contracts', () => { describe.each([ ['poseidon_hash', poseidonHash], ['pedersen_hash', pedersenHash], - ['pedersen_hash_with_index', (m: Buffer[]) => pedersenHash(m, 20)], - ])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Buffer[]) => Fr) => { + ['pedersen_hash_with_index', (m: Fieldable[]) => pedersenHash(m, 20)], + ])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Fieldable[]) => Fr) => { it(`Should execute contract function that performs ${name} hash`, async () => { const calldata = [new Fr(1), new Fr(2), new Fr(3)]; - const hash = hashFunction(calldata.map(f => f.toBuffer())); + const hash = hashFunction(calldata); const context = initContext({ env: initExecutionEnvironment({ calldata }) }); const bytecode = getAvmTestContractBytecode(name); diff --git a/yarn-project/simulator/src/avm/opcodes/hashing.test.ts b/yarn-project/simulator/src/avm/opcodes/hashing.test.ts index 42e6d690e48..3986bd73b2b 100644 --- a/yarn-project/simulator/src/avm/opcodes/hashing.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/hashing.test.ts @@ -41,7 +41,7 @@ describe('Hashing Opcodes', () => { const dstOffset = 3; - const expectedHash = poseidonHash(args.map(field => field.toBuffer())); + const expectedHash = poseidonHash(args); await new Poseidon2(indirect, dstOffset, messageOffset, args.length).execute(context); const result = context.machineState.memory.get(dstOffset); @@ -62,7 +62,7 @@ describe('Hashing Opcodes', () => { const dstOffset = 3; - const expectedHash = poseidonHash(args.map(field => field.toBuffer())); + const expectedHash = poseidonHash(args); await new Poseidon2(indirect, dstOffset, messageOffset, args.length).execute(context); const result = context.machineState.memory.get(dstOffset); diff --git a/yarn-project/simulator/src/avm/opcodes/hashing.ts b/yarn-project/simulator/src/avm/opcodes/hashing.ts index 31c5f7ac54c..500bbf46cbd 100644 --- a/yarn-project/simulator/src/avm/opcodes/hashing.ts +++ b/yarn-project/simulator/src/avm/opcodes/hashing.ts @@ -41,7 +41,7 @@ export class Poseidon2 extends Instruction { ); // Memory pointer will be indirect - const hashData = memory.getSlice(messageOffset, this.messageSize).map(word => word.toBuffer()); + const hashData = memory.getSlice(messageOffset, this.messageSize); const hash = poseidonHash(hashData); memory.set(dstOffset, new Field(hash));