Skip to content

Commit

Permalink
refactor: TS hash wrappers cleanup (#5691)
Browse files Browse the repository at this point in the history
  • Loading branch information
benesjan committed Apr 12, 2024
1 parent 10d9ad9 commit 7f8b09f
Show file tree
Hide file tree
Showing 17 changed files with 84 additions and 59 deletions.
15 changes: 8 additions & 7 deletions barretenberg/ts/src/types/fields.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { randomBytes } from '../random/index.js';
import { toBigIntBE, toBufferBE } from '../bigint-array/index.js';
import { BufferReader, uint8ArrayToHexString } from '../serialize/index.js';

// TODO(#4189): Replace with implementation in yarn-project/foundation/src/fields/fields.ts
export class Fr {
static ZERO = new Fr(0n);
static MODULUS = 0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001n;
Expand All @@ -10,14 +11,14 @@ export class Fr {
value: Uint8Array;

constructor(value: Uint8Array | bigint) {
if (typeof value === 'bigint') {
if (value > Fr.MAX_VALUE) {
throw new Error(`Fr out of range ${value}.`);
}
this.value = toBufferBE(value);
} else {
this.value = value;
// We convert buffer value to bigint to be able to check it fits within modulus
const valueBigInt = typeof value === 'bigint' ? value : toBigIntBE(value);

if (valueBigInt > Fr.MAX_VALUE) {
throw new Error(`Fr out of range: ${valueBigInt}`);
}

this.value = typeof value === 'bigint' ? toBufferBE(value) : value;
}

static random() {
Expand Down
2 changes: 1 addition & 1 deletion cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@
"erc",
"falsey",
"fargate",
"Fieldeable",
"Fieldable",
"filestat",
"finalise",
"finalised",
Expand Down
6 changes: 3 additions & 3 deletions noir-projects/aztec-nr/aztec/src/note/utils.nr
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ pub fn compute_note_hash_for_consumption<Note, N>(note: Note) -> Field where Not
} else {
// When nonce is nonzero, that means we are reading a settled note (from tree) created in a
// previous TX. So we need the unique_siloed_note_hash which has already been hashed with
// contract address and then nonce. This hash will match the existing leaf in the private
// data tree, so the kernel can just perform a membership check directly on this hash/leaf.
// contract address and then nonce. This hash will match the existing leaf in the note hash
// tree, so the kernel can just perform a membership check directly on this hash/leaf.
compute_unique_siloed_note_hash(note)
// IMPORTANT NOTE ON REDUNDANT SILOING BY CONTRACT ADDRESS: The note hash computed above is
// "siloed" by contract address. When a note hash is computed solely for the purpose of
Expand All @@ -81,7 +81,7 @@ pub fn compute_note_hash_for_consumption<Note, N>(note: Note) -> Field where Not
// be computed from a siloed note hash. After all, persistable note hashes and nullifiers are
// siloed by the kernel circuit. That being said, the siloed note hash computed above CAN be
// used for nullifier computation, and this achieves the (arguably unnecessary) property that
// nullifiers are computed from a note hash's fully-computed private data tree leaf.
// nullifiers are computed from a note hash's fully-computed note hash tree leaf.
}
}

Expand Down
2 changes: 1 addition & 1 deletion yarn-project/circuits.js/src/contract/private_function.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ export function computePrivateFunctionLeaf(fn: PrivateFunction): Buffer {

function getPrivateFunctionTreeCalculator(): MerkleTreeCalculator {
if (!privateFunctionTreeCalculator) {
const functionTreeZeroLeaf = pedersenHash(new Array(PRIVATE_FUNCTION_SIZE).fill(Buffer.alloc(32))).toBuffer();
const functionTreeZeroLeaf = pedersenHash(new Array(PRIVATE_FUNCTION_SIZE).fill(0)).toBuffer();
privateFunctionTreeCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf);
}
return privateFunctionTreeCalculator;
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/circuits.js/src/hash/hash.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ export function hashVK(vkBuf: Buffer) {
* @returns A commitment nonce.
*/
export function computeCommitmentNonce(nullifierZero: Fr, commitmentIndex: number): Fr {
return pedersenHash([nullifierZero, numToUInt32BE(commitmentIndex, 32)], GeneratorIndex.NOTE_HASH_NONCE);
return pedersenHash([nullifierZero, commitmentIndex], GeneratorIndex.NOTE_HASH_NONCE);
}

/**
Expand Down
16 changes: 7 additions & 9 deletions yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js';

import { Fr } from '../../fields/fields.js';
import { type Bufferable, serializeToBufferArray } from '../../serialize/serialize.js';
import { type Fieldable, serializeToFields } from '../../serialize/serialize.js';

/**
* Create a pedersen commitment (point) from an array of input fields.
Expand All @@ -20,19 +20,17 @@ export function pedersenCommit(input: Buffer[]) {

/**
* Create a pedersen hash (field) from an array of input fields.
* Left pads any inputs less than 32 bytes.
* @param input - The input fieldables to hash.
* @param index - The separator index to use for the hash.
* @returns The pedersen hash.
*/
export function pedersenHash(input: Bufferable[], index = 0): Fr {
let bufferredInput = serializeToBufferArray(input);
if (!bufferredInput.every(i => i.length <= 32)) {
throw new Error('All Pedersen Hash input buffers must be <= 32 bytes.');
}
bufferredInput = bufferredInput.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i));
export function pedersenHash(input: Fieldable[], index = 0): Fr {
const inputFields = serializeToFields(input);
return Fr.fromBuffer(
Buffer.from(
BarretenbergSync.getSingleton()
.pedersenHash(
bufferredInput.map(i => new FrBarretenberg(i)),
inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion
index,
)
.toBuffer(),
Expand Down
14 changes: 11 additions & 3 deletions yarn-project/foundation/src/crypto/poseidon/index.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js';

import { Fr } from '../../fields/fields.js';
import { type Fieldable, serializeToFields } from '../../serialize/serialize.js';

/**
* Create a poseidon hash (field) from an array of input fields.
* Left pads any inputs less than 32 bytes.
* @param input - The input fields to hash.
* @param index - The separator index to use for the hash.
* @returns The poseidon hash.
* TODO(#5714): enable index once barretenberg API supports it
*/
export function poseidonHash(input: Buffer[]): Fr {
export function poseidonHash(input: Fieldable[], _index = 0): Fr {
const inputFields = serializeToFields(input);
return Fr.fromBuffer(
Buffer.from(
BarretenbergSync.getSingleton()
.poseidonHash(input.map(i => new FrBarretenberg(i)))
.poseidonHash(
inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion
// index, // TODO: enable once the barretenberg API supports it
)
.toBuffer(),
),
);
Expand Down
32 changes: 23 additions & 9 deletions yarn-project/foundation/src/serialize/serialize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,11 +121,19 @@ export type Bufferable =
| Bufferable[];

/** A type that can be converted to a Field or a Field array. */
export type Fieldeable =
export type Fieldable =
| Fr
| boolean
| number
| bigint
| Buffer
| {
/**
* Serialize to a field.
* @dev Duplicate to `toField` but left as is as it is used in AVM codebase.
*/
toFr: () => Fr;
}
| {
/** Serialize to a field. */
toField: () => Fr;
Expand All @@ -134,18 +142,18 @@ export type Fieldeable =
/** Serialize to an array of fields. */
toFields: () => Fr[];
}
| Fieldeable[];
| Fieldable[];

/**
* Serializes a list of objects contiguously.
* @param objs - Objects to serialize.
* @returns A buffer list with the concatenation of all fields.
*/
export function serializeToBufferArray(...objs: Bufferable[]): Buffer[] {
let ret: Buffer[] = [];
const ret: Buffer[] = [];
for (const obj of objs) {
if (Array.isArray(obj)) {
ret = [...ret, ...serializeToBufferArray(...obj)];
ret.push(...serializeToBufferArray(...obj));
} else if (Buffer.isBuffer(obj)) {
ret.push(obj);
} else if (typeof obj === 'boolean') {
Expand Down Expand Up @@ -176,19 +184,25 @@ export function serializeToBufferArray(...objs: Bufferable[]): Buffer[] {
* @param objs - Objects to serialize.
* @returns An array of fields with the concatenation of all fields.
*/
export function serializeToFields(...objs: Fieldeable[]): Fr[] {
let ret: Fr[] = [];
export function serializeToFields(...objs: Fieldable[]): Fr[] {
const ret: Fr[] = [];
for (const obj of objs) {
if (Array.isArray(obj)) {
ret = [...ret, ...serializeToFields(...obj)];
ret.push(...serializeToFields(...obj));
} else if (obj instanceof Fr) {
ret.push(obj);
} else if (typeof obj === 'boolean' || typeof obj === 'number' || typeof obj === 'bigint') {
ret.push(new Fr(obj));
} else if ('toFields' in obj) {
ret = [...ret, ...obj.toFields()];
} else {
ret.push(...obj.toFields());
} else if ('toFr' in obj) {
ret.push(obj.toFr());
} else if ('toField' in obj) {
ret.push(obj.toField());
} else if (Buffer.isBuffer(obj)) {
ret.push(Fr.fromBuffer(obj));
} else {
throw new Error(`Cannot serialize input to field: ${typeof obj} ${(obj as any).constructor?.name}`);
}
}
return ret;
Expand Down
6 changes: 4 additions & 2 deletions yarn-project/merkle-tree/src/pedersen.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { pedersenHash } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { type Hasher } from '@aztec/types/interfaces';

/**
Expand All @@ -12,14 +13,15 @@ export class Pedersen implements Hasher {
* purposes.
*/
public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer {
return pedersenHash([Buffer.from(lhs), Buffer.from(rhs)]).toBuffer();
return pedersenHash([Fr.fromBuffer(Buffer.from(lhs)), Fr.fromBuffer(Buffer.from(rhs))]).toBuffer();
}

/*
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public hashInputs(inputs: Buffer[]): Buffer {
return pedersenHash(inputs).toBuffer();
const inputFields = inputs.map(i => Fr.fromBuffer(i));
return pedersenHash(inputFields).toBuffer();
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { randomBytes } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { type FromBuffer } from '@aztec/foundation/serialize';
import { type AztecKVStore } from '@aztec/kv-store';
import { openTmpStore } from '@aztec/kv-store/utils';
Expand All @@ -24,7 +24,7 @@ describe('AppendOnlySnapshot', () => {
() => tree,
() => snapshotBuilder,
async tree => {
const newLeaves = Array.from({ length: 2 }).map(() => randomBytes(32));
const newLeaves = Array.from({ length: 2 }).map(() => Fr.random().toBuffer());
await tree.appendLeaves(newLeaves);
},
);
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { randomBytes } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { type FromBuffer } from '@aztec/foundation/serialize';
import { type AztecKVStore } from '@aztec/kv-store';
import { openTmpStore } from '@aztec/kv-store/utils';
Expand All @@ -23,7 +23,7 @@ describe('FullSnapshotBuilder', () => {
() => tree,
() => snapshotBuilder,
async () => {
const newLeaves = Array.from({ length: 2 }).map(() => randomBytes(32));
const newLeaves = Array.from({ length: 2 }).map(() => Fr.random().toBuffer());
await tree.appendLeaves(newLeaves);
},
);
Expand Down
19 changes: 10 additions & 9 deletions yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { SiblingPath } from '@aztec/circuit-types';
import { randomBigInt, randomBytes } from '@aztec/foundation/crypto';
import { randomBigInt } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { createDebugLogger } from '@aztec/foundation/log';
import { type AztecKVStore } from '@aztec/kv-store';
import { openTmpStore } from '@aztec/kv-store/utils';
Expand Down Expand Up @@ -71,11 +72,11 @@ describe('SparseTreeSpecific', () => {
expect(tree.getNumLeaves(false)).toEqual(0n);

// Insert a leaf
await tree.updateLeaf(randomBytes(32), randomIndex);
await tree.updateLeaf(Fr.random().toBuffer(), randomIndex);
expect(tree.getNumLeaves(true)).toEqual(1n);

// Update a leaf
await tree.updateLeaf(randomBytes(32), randomIndex);
await tree.updateLeaf(Fr.random().toBuffer(), randomIndex);
expect(tree.getNumLeaves(true)).toEqual(1n);
});

Expand All @@ -90,7 +91,7 @@ describe('SparseTreeSpecific', () => {
expect(tree.getNumLeaves(false)).toEqual(0n);

// Insert a leaf
await tree.updateLeaf(randomBytes(32), randomIndex);
await tree.updateLeaf(Fr.random().toBuffer(), randomIndex);
expect(tree.getNumLeaves(true)).toEqual(1n);

// Delete a leaf
Expand All @@ -110,7 +111,7 @@ describe('SparseTreeSpecific', () => {

// Insert leaf at index 3
let level1LeftHash: Buffer;
const leafAtIndex3 = randomBytes(32);
const leafAtIndex3 = Fr.random().toBuffer();
{
await tree.updateLeaf(leafAtIndex3, 3n);
expect(tree.getNumLeaves(true)).toEqual(1n);
Expand All @@ -126,7 +127,7 @@ describe('SparseTreeSpecific', () => {
// Insert leaf at index 6
let level1RightHash: Buffer;
{
const leafAtIndex6 = randomBytes(32);
const leafAtIndex6 = Fr.random().toBuffer();
await tree.updateLeaf(leafAtIndex6, 6n);
expect(tree.getNumLeaves(true)).toEqual(2n);
const level2Hash = pedersen.hash(leafAtIndex6, INITIAL_LEAF);
Expand All @@ -139,7 +140,7 @@ describe('SparseTreeSpecific', () => {
}

// Insert leaf at index 2
const leafAtIndex2 = randomBytes(32);
const leafAtIndex2 = Fr.random().toBuffer();
{
await tree.updateLeaf(leafAtIndex2, 2n);
expect(tree.getNumLeaves(true)).toEqual(3n);
Expand All @@ -154,7 +155,7 @@ describe('SparseTreeSpecific', () => {

// Updating leaf at index 3
{
const updatedLeafAtIndex3 = randomBytes(32);
const updatedLeafAtIndex3 = Fr.random().toBuffer();
await tree.updateLeaf(updatedLeafAtIndex3, 3n);
expect(tree.getNumLeaves(true)).toEqual(3n);
const level2Hash = pedersen.hash(leafAtIndex2, updatedLeafAtIndex3);
Expand All @@ -175,7 +176,7 @@ describe('SparseTreeSpecific', () => {
const db = openTmpStore();
const tree = await createDb(db, pedersen, 'test', depth);

const leaves = Array.from({ length: 1000 }).map(() => randomBytes(32));
const leaves = Array.from({ length: 1000 }).map(() => Fr.random().toBuffer());
const indices = Array.from({ length: 1000 }).map(() => randomBigInt(BigInt(maxIndex)));

const start = Date.now();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { randomBytes } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { type FromBuffer } from '@aztec/foundation/serialize';
import { type AztecKVStore } from '@aztec/kv-store';
import { openTmpStore } from '@aztec/kv-store/utils';
Expand Down Expand Up @@ -41,7 +41,7 @@ describe('StandardTree_batchAppend', () => {
it('correctly computes root when batch appending and calls hash function expected num times', async () => {
const db = openTmpStore();
const tree = await createDb(db, pedersen, 'test', 3);
const leaves = Array.from({ length: 5 }, _ => randomBytes(32));
const leaves = Array.from({ length: 5 }, _ => Fr.random().toBuffer());

pedersen.resetCounter();
await tree.appendLeaves(leaves);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { SiblingPath } from '@aztec/circuit-types';
import { randomBytes } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { type AztecKVStore } from '@aztec/kv-store';
import { openTmpStore } from '@aztec/kv-store/utils';
import { type Hasher } from '@aztec/types/interfaces';
Expand Down Expand Up @@ -44,7 +44,7 @@ export const standardBasedTreeTestSuite = (
it('should throw when appending beyond max index', async () => {
const db = openTmpStore();
const tree = await createDb(db, pedersen, 'test', 2);
const leaves = Array.from({ length: 5 }, _ => randomBytes(32));
const leaves = Array.from({ length: 5 }, _ => Fr.random().toBuffer());
await expect(appendLeaves(tree, leaves)).rejects.toThrow();
});

Expand Down
7 changes: 4 additions & 3 deletions yarn-project/simulator/src/avm/avm_simulator.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { AztecAddress } from '@aztec/foundation/aztec-address';
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto';
import { EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { type Fieldable } from '@aztec/foundation/serialize';
import { AvmNestedCallsTestContractArtifact, AvmTestContractArtifact } from '@aztec/noir-contracts.js';

import { jest } from '@jest/globals';
Expand Down Expand Up @@ -140,11 +141,11 @@ describe('AVM simulator: transpiled Noir contracts', () => {
describe.each([
['poseidon_hash', poseidonHash],
['pedersen_hash', pedersenHash],
['pedersen_hash_with_index', (m: Buffer[]) => pedersenHash(m, 20)],
])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Buffer[]) => Fr) => {
['pedersen_hash_with_index', (m: Fieldable[]) => pedersenHash(m, 20)],
])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Fieldable[]) => Fr) => {
it(`Should execute contract function that performs ${name} hash`, async () => {
const calldata = [new Fr(1), new Fr(2), new Fr(3)];
const hash = hashFunction(calldata.map(f => f.toBuffer()));
const hash = hashFunction(calldata);

const context = initContext({ env: initExecutionEnvironment({ calldata }) });
const bytecode = getAvmTestContractBytecode(name);
Expand Down

0 comments on commit 7f8b09f

Please sign in to comment.