diff --git a/cspell.json b/cspell.json index b53724fe7ca..6c2ce17a407 100644 --- a/cspell.json +++ b/cspell.json @@ -20,6 +20,7 @@ "bbmalloc", "benesjan", "Bincode", + "bincoded", "bleurgh", "bodyparser", "bootnode", @@ -98,6 +99,7 @@ "gitrepo", "grumpkin", "gtest", + "gzipped", "hackmd", "hardfork", "hardlinks", @@ -181,6 +183,7 @@ "productionify", "protobuf", "protogalaxy", + "proverless", "proxied", "proxified", "proxify", @@ -287,4 +290,4 @@ "flagWords": [ "anonymous" ] -} \ No newline at end of file +} diff --git a/yarn-project/end-to-end/src/benchmarks/utils.ts b/yarn-project/end-to-end/src/benchmarks/utils.ts index 7463c24fd7a..0dbbe2d6162 100644 --- a/yarn-project/end-to-end/src/benchmarks/utils.ts +++ b/yarn-project/end-to-end/src/benchmarks/utils.ts @@ -110,7 +110,10 @@ export async function waitNewPXESynced( contract: BenchmarkingContract, startingBlock: number = INITIAL_L2_BLOCK_NUM, ): Promise { - const pxe = await createPXEService(node, { l2BlockPollingIntervalMS: 100, l2StartingBlock: startingBlock }); + const pxe = await createPXEService(node, { + l2BlockPollingIntervalMS: 100, + l2StartingBlock: startingBlock, + }); await pxe.registerContract(contract); await retryUntil(() => pxe.isGlobalStateSynchronized(), 'pxe-global-sync'); return pxe; diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts new file mode 100644 index 00000000000..ec6c32d11a6 --- /dev/null +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts @@ -0,0 +1,78 @@ +import { type Tx } from '@aztec/aztec.js'; +import { type ClientProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; +import { type BBNativeProofCreator } from '@aztec/pxe'; + +import { ClientProverTest } from './client_prover_test.js'; + +const TIMEOUT = 300_000; + +async function verifyProof(_1: ClientProtocolArtifact, _2: Tx, _3: BBNativeProofCreator) { + // TODO(@PhilWindle): Will verify proof once the circuits are fixed + await Promise.resolve(); + //const result = await proofCreator.verifyProof(circuitType, tx.proof); + expect(true).toBeTruthy(); +} + +describe('client_prover_integration', () => { + const t = new ClientProverTest('transfer_private'); + let { provenAsset, accounts, tokenSim, logger, proofCreator } = t; + + beforeAll(async () => { + await t.applyBaseSnapshots(); + await t.applyMintSnapshot(); + await t.setup(); + ({ provenAsset, accounts, tokenSim, logger, proofCreator } = t); + }); + + afterAll(async () => { + await t.teardown(); + }); + + afterEach(async () => { + await t.tokenSim.check(); + }); + + it( + 'private transfer less than balance', + async () => { + logger.info( + `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_private.selector}`, + ); + const balance0 = await provenAsset.methods.balance_of_private(accounts[0].address).simulate(); + const amount = balance0 / 2n; + expect(amount).toBeGreaterThan(0n); + const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); + const provenTx = await interaction.prove(); + + // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! + logger.info(`Verifying kernel tail proof`); + await verifyProof('PrivateKernelTailArtifact', provenTx, proofCreator!); + + await interaction.send().wait(); + tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); + }, + TIMEOUT, + ); + + it( + 'public transfer less than balance', + async () => { + logger.info( + `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_public.selector}`, + ); + const balance0 = await provenAsset.methods.balance_of_public(accounts[0].address).simulate(); + const amount = balance0 / 2n; + expect(amount).toBeGreaterThan(0n); + const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); + const provenTx = await interaction.prove(); + + // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! + logger.info(`Verifying kernel tail to public proof`); + await verifyProof('PrivateKernelTailToPublicArtifact', provenTx, proofCreator!); + + await interaction.send().wait(); + tokenSim.transferPublic(accounts[0].address, accounts[1].address, amount); + }, + TIMEOUT, + ); +}); diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts new file mode 100644 index 00000000000..d511f5ce716 --- /dev/null +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts @@ -0,0 +1,248 @@ +import { SchnorrAccountContractArtifact, getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { + type AccountWalletWithSecretKey, + type AztecNode, + type CompleteAddress, + type DebugLogger, + ExtendedNote, + type Fq, + Fr, + Note, + type TxHash, + computeSecretHash, + createDebugLogger, +} from '@aztec/aztec.js'; +import { TokenContract } from '@aztec/noir-contracts.js'; +import { BBNativeProofCreator, type PXEService } from '@aztec/pxe'; + +import * as fs from 'fs/promises'; + +import { waitRegisteredAccountSynced } from '../benchmarks/utils.js'; +import { + SnapshotManager, + type SubsystemsContext, + addAccounts, + publicDeployAccounts, +} from '../fixtures/snapshot_manager.js'; +import { getBBConfig, setupPXEService } from '../fixtures/utils.js'; +import { TokenSimulator } from '../simulators/token_simulator.js'; + +const { E2E_DATA_PATH: dataPath } = process.env; + +const SALT = 1; + +/** + * Largely taken from the e2e_token_contract test file. We deploy 2 accounts and a token contract. + * However, we then setup a second PXE with a full prover instance. + * We configure this instance with all of the accounts and contracts. + * We then prove and verify transactions created via this full prover PXE. + */ + +export class ClientProverTest { + static TOKEN_NAME = 'Aztec Token'; + static TOKEN_SYMBOL = 'AZT'; + static TOKEN_DECIMALS = 18n; + private snapshotManager: SnapshotManager; + logger: DebugLogger; + keys: Array<[Fr, Fq]> = []; + wallets: AccountWalletWithSecretKey[] = []; + accounts: CompleteAddress[] = []; + asset!: TokenContract; + tokenSim!: TokenSimulator; + aztecNode!: AztecNode; + pxe!: PXEService; + fullProverPXE!: PXEService; + provenAsset!: TokenContract; + provenPXETeardown?: () => Promise; + private directoryToCleanup?: string; + proofCreator?: BBNativeProofCreator; + + constructor(testName: string) { + this.logger = createDebugLogger(`aztec:client_prover_test:${testName}`); + this.snapshotManager = new SnapshotManager(`client_prover_integration/${testName}`, dataPath); + } + + /** + * Adds two state shifts to snapshot manager. + * 1. Add 2 accounts. + * 2. Publicly deploy accounts, deploy token contract + */ + async applyBaseSnapshots() { + await this.snapshotManager.snapshot('2_accounts', addAccounts(2, this.logger), async ({ accountKeys }, { pxe }) => { + this.keys = accountKeys; + const accountManagers = accountKeys.map(ak => getSchnorrAccount(pxe, ak[0], ak[1], SALT)); + this.wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + this.accounts = await pxe.getRegisteredAccounts(); + this.wallets.forEach((w, i) => this.logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); + }); + + await this.snapshotManager.snapshot( + 'client_prover_integration', + async () => { + // Create the token contract state. + // Move this account thing to addAccounts above? + this.logger.verbose(`Public deploy accounts...`); + await publicDeployAccounts(this.wallets[0], this.accounts.slice(0, 2)); + + this.logger.verbose(`Deploying TokenContract...`); + const asset = await TokenContract.deploy( + this.wallets[0], + this.accounts[0], + ClientProverTest.TOKEN_NAME, + ClientProverTest.TOKEN_SYMBOL, + ClientProverTest.TOKEN_DECIMALS, + ) + .send() + .deployed(); + this.logger.verbose(`Token deployed to ${asset.address}`); + + return { tokenContractAddress: asset.address }; + }, + async ({ tokenContractAddress }) => { + // Restore the token contract state. + this.asset = await TokenContract.at(tokenContractAddress, this.wallets[0]); + this.logger.verbose(`Token contract address: ${this.asset.address}`); + + this.tokenSim = new TokenSimulator( + this.asset, + this.logger, + this.accounts.map(a => a.address), + ); + + expect(await this.asset.methods.admin().simulate()).toBe(this.accounts[0].address.toBigInt()); + }, + ); + } + + async setup() { + const context = await this.snapshotManager.setup(); + ({ pxe: this.pxe, aztecNode: this.aztecNode } = context); + + // Configure a full prover PXE + const bbConfig = await getBBConfig(this.logger); + this.directoryToCleanup = bbConfig?.directoryToCleanup; + + if (!bbConfig?.bbWorkingDirectory || !bbConfig?.expectedBBPath) { + throw new Error(`Test must be run with BB native configuration`); + } + + this.proofCreator = new BBNativeProofCreator(bbConfig?.expectedBBPath, bbConfig?.bbWorkingDirectory); + + this.logger.debug(`Main setup completed, initializing full prover PXE...`); + ({ pxe: this.fullProverPXE, teardown: this.provenPXETeardown } = await setupPXEService( + 0, + this.aztecNode, + { + proverEnabled: false, + bbBinaryPath: bbConfig?.expectedBBPath, + bbWorkingDirectory: bbConfig?.bbWorkingDirectory, + }, + undefined, + true, + this.proofCreator, + )); + this.logger.debug(`Contract address ${this.asset.address}`); + await this.fullProverPXE.registerContract(this.asset); + + for (let i = 0; i < 2; i++) { + await waitRegisteredAccountSynced( + this.fullProverPXE, + this.keys[i][0], + this.wallets[i].getCompleteAddress().partialAddress, + ); + + await waitRegisteredAccountSynced(this.pxe, this.keys[i][0], this.wallets[i].getCompleteAddress().partialAddress); + } + + const account = getSchnorrAccount(this.fullProverPXE, this.keys[0][0], this.keys[0][1], SALT); + + await this.fullProverPXE.registerContract({ + instance: account.getInstance(), + artifact: SchnorrAccountContractArtifact, + }); + + const provenWallet = await account.getWallet(); + this.provenAsset = await TokenContract.at(this.asset.address, provenWallet); + this.logger.debug(`Full prover PXE started!!`); + return this; + } + + snapshot = ( + name: string, + apply: (context: SubsystemsContext) => Promise, + restore: (snapshotData: T, context: SubsystemsContext) => Promise = () => Promise.resolve(), + ): Promise => this.snapshotManager.snapshot(name, apply, restore); + + async teardown() { + await this.snapshotManager.teardown(); + + // Cleanup related to the second 'full prover' PXE + await this.provenPXETeardown?.(); + + if (this.directoryToCleanup) { + await fs.rm(this.directoryToCleanup, { recursive: true, force: true }); + } + } + + async addPendingShieldNoteToPXE(accountIndex: number, amount: bigint, secretHash: Fr, txHash: TxHash) { + const note = new Note([new Fr(amount), secretHash]); + const extendedNote = new ExtendedNote( + note, + this.accounts[accountIndex].address, + this.asset.address, + TokenContract.storage.pending_shields.slot, + TokenContract.notes.TransparentNote.id, + txHash, + ); + await this.wallets[accountIndex].addNote(extendedNote); + } + + async applyMintSnapshot() { + await this.snapshotManager.snapshot( + 'mint', + async () => { + const { asset, accounts } = this; + const amount = 10000n; + + this.logger.verbose(`Minting ${amount} publicly...`); + await asset.methods.mint_public(accounts[0].address, amount).send().wait(); + + this.logger.verbose(`Minting ${amount} privately...`); + const secret = Fr.random(); + const secretHash = computeSecretHash(secret); + const receipt = await asset.methods.mint_private(amount, secretHash).send().wait(); + + await this.addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); + const txClaim = asset.methods.redeem_shield(accounts[0].address, amount, secret).send(); + await txClaim.wait({ debug: true }); + this.logger.verbose(`Minting complete.`); + + return { amount }; + }, + async ({ amount }) => { + const { + asset, + accounts: [{ address }], + tokenSim, + } = this; + tokenSim.mintPublic(address, amount); + + const publicBalance = await asset.methods.balance_of_public(address).simulate(); + this.logger.verbose(`Public balance of wallet 0: ${publicBalance}`); + expect(publicBalance).toEqual(this.tokenSim.balanceOfPublic(address)); + + tokenSim.mintPrivate(amount); + tokenSim.redeemShield(address, amount); + const privateBalance = await asset.methods.balance_of_private(address).simulate(); + this.logger.verbose(`Private balance of wallet 0: ${privateBalance}`); + expect(privateBalance).toEqual(tokenSim.balanceOfPrivate(address)); + + const totalSupply = await asset.methods.total_supply().simulate(); + this.logger.verbose(`Total supply: ${totalSupply}`); + expect(totalSupply).toEqual(tokenSim.totalSupply); + + return Promise.resolve(); + }, + ); + } +} diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index df58258d1f6..9dca4d82eb1 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -55,7 +55,13 @@ import { KeyRegistryContract } from '@aztec/noir-contracts.js'; import { GasTokenContract } from '@aztec/noir-contracts.js/GasToken'; import { getCanonicalGasToken, getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; -import { PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { + type BBNativeProofCreator, + PXEService, + type PXEServiceConfig, + createPXEService, + getPXEServiceConfig, +} from '@aztec/pxe'; import { type SequencerClient } from '@aztec/sequencer-client'; import { type Anvil, createAnvil } from '@viem/anvil'; @@ -87,6 +93,9 @@ const { TEMP_DIR = '/tmp', ACVM_BINARY_PATH = '', ACVM_WORKING_DIRECTORY = '', + BB_BINARY_PATH = '', + BB_WORKING_DIRECTORY = '', + BB_RELEASE_DIR = 'cpp/build/bin', } = process.env; const getAztecUrl = () => { @@ -115,6 +124,28 @@ const getACVMConfig = async (logger: DebugLogger) => { } }; +// Determines if we have access to the bb binary and a tmp folder for temp files +export const getBBConfig = async (logger: DebugLogger) => { + try { + const expectedBBPath = BB_BINARY_PATH + ? BB_BINARY_PATH + : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../barretenberg/', BB_RELEASE_DIR)}/bb`; + await fs.access(expectedBBPath, fs.constants.R_OK); + const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; + const bbWorkingDirectory = BB_WORKING_DIRECTORY ? BB_WORKING_DIRECTORY : `${tempWorkingDirectory}/bb`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + logger.info(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); + return { + bbWorkingDirectory, + expectedBBPath, + directoryToCleanup: BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, + }; + } catch (err) { + logger.error(`Native BB not available, error: ${err}`); + return undefined; + } +}; + export const setupL1Contracts = async ( l1RpcUrl: string, account: HDAccount | PrivateKeyAccount, @@ -182,6 +213,7 @@ async function initGasBridge({ walletClient, l1ContractAddresses }: DeployL1Cont * @param firstPrivKey - The private key of the first account to be created. * @param logger - The logger to be used. * @param useLogSuffix - Whether to add a randomly generated suffix to the PXE debug logs. + * @param proofCreator - An optional proof creator to use * @returns Private eXecution Environment (PXE), accounts, wallets and logger. */ export async function setupPXEService( @@ -190,11 +222,12 @@ export async function setupPXEService( opts: Partial = {}, logger = getLogger(), useLogSuffix = false, + proofCreator?: BBNativeProofCreator, ): Promise<{ /** * The PXE instance. */ - pxe: PXE; + pxe: PXEService; /** * The wallets to be used. */ @@ -209,7 +242,7 @@ export async function setupPXEService( teardown: () => Promise; }> { const pxeServiceConfig = { ...getPXEServiceConfig(), ...opts }; - const pxe = await createPXEService(aztecNode, pxeServiceConfig, useLogSuffix); + const pxe = await createPXEService(aztecNode, pxeServiceConfig, useLogSuffix, proofCreator); const wallets = await createAccounts(pxe, numberOfAccounts); diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index cc31ec2dde7..6e48dddc7a3 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -115,6 +115,14 @@ export const PrivateKernelTailArtifact = PrivateKernelTailJson as NoirCompiledCi export const PrivateKernelTailToPublicArtifact = PrivateKernelTailToPublicJson as NoirCompiledCircuit; +export const SimulatedPublicKernelSetupArtifact = PublicKernelSetupSimulatedJson as NoirCompiledCircuit; + +export const SimulatedPublicKernelAppLogicArtifact = PublicKernelAppLogicSimulatedJson as NoirCompiledCircuit; + +export const SimulatedPublicKernelTeardownArtifact = PublicKernelTeardownSimulatedJson as NoirCompiledCircuit; + +export const SimulatedPublicKernelTailArtifact = PublicKernelTailSimulatedJson as NoirCompiledCircuit; + export const PublicKernelSetupArtifact = PublicKernelSetupSimulatedJson as NoirCompiledCircuit; export const PublicKernelAppLogicArtifact = PublicKernelAppLogicSimulatedJson as NoirCompiledCircuit; @@ -149,7 +157,8 @@ export type ServerProtocolArtifact = export type ClientProtocolArtifact = | 'PrivateKernelInitArtifact' | 'PrivateKernelInnerArtifact' - | 'PrivateKernelTailArtifact'; + | 'PrivateKernelTailArtifact' + | 'PrivateKernelTailToPublicArtifact'; export type ProtocolArtifact = ServerProtocolArtifact | ClientProtocolArtifact; @@ -169,12 +178,14 @@ export const ClientCircuitArtifacts: Record = { PrivateKernelInitArtifact: PrivateKernelInitArtifact, PrivateKernelInnerArtifact: PrivateKernelInnerArtifact, PrivateKernelTailArtifact: PrivateKernelTailArtifact, + PrivateKernelTailToPublicArtifact: PrivateKernelTailToPublicArtifact, PublicKernelSetupArtifact: PublicKernelSetupArtifact, PublicKernelAppLogicArtifact: PublicKernelAppLogicArtifact, PublicKernelTeardownArtifact: PublicKernelTeardownArtifact, @@ -243,7 +254,6 @@ export async function executeTail( const params: TailInputType = { input: mapPrivateKernelTailCircuitPrivateInputsToNoir(privateInputs), }; - const returnType = await executePrivateKernelTailWithACVM(params); return mapPrivateKernelTailCircuitPublicInputsForRollupFromNoir(returnType); @@ -266,6 +276,122 @@ export async function executeTailForPublic( return mapPrivateKernelTailCircuitPublicInputsForPublicFromNoir(returnType); } +/** + * Converts the inputs of the private kernel init circuit into a witness map + * @param inputs - The private kernel inputs. + * @returns The witness map + */ +export function convertPrivateKernelInitInputsToWitnessMap( + privateKernelInitCircuitPrivateInputs: PrivateKernelInitCircuitPrivateInputs, +): WitnessMap { + const mapped = mapPrivateKernelInitCircuitPrivateInputsToNoir(privateKernelInitCircuitPrivateInputs); + const initialWitnessMap = abiEncode(PrivateKernelInitArtifact.abi as Abi, { input: mapped as any }); + return initialWitnessMap; +} + +/** + * Converts the inputs of the private kernel inner circuit into a witness map + * @param inputs - The private kernel inputs. + * @returns The witness map + */ +export function convertPrivateKernelInnerInputsToWitnessMap( + privateKernelInnerCircuitPrivateInputs: PrivateKernelInnerCircuitPrivateInputs, +): WitnessMap { + const mapped = mapPrivateKernelInnerCircuitPrivateInputsToNoir(privateKernelInnerCircuitPrivateInputs); + const initialWitnessMap = abiEncode(PrivateKernelInnerArtifact.abi as Abi, { input: mapped as any }); + return initialWitnessMap; +} + +/** + * Converts the inputs of the private kernel tail circuit into a witness map + * @param inputs - The private kernel inputs. + * @returns The witness map + */ +export function convertPrivateKernelTailInputsToWitnessMap( + privateKernelTailCircuitPrivateInputs: PrivateKernelTailCircuitPrivateInputs, +): WitnessMap { + const mapped = mapPrivateKernelTailCircuitPrivateInputsToNoir(privateKernelTailCircuitPrivateInputs); + const initialWitnessMap = abiEncode(PrivateKernelTailArtifact.abi as Abi, { input: mapped as any }); + return initialWitnessMap; +} + +/** + * Converts the inputs of the private kernel tail to public circuit into a witness map + * @param inputs - The private kernel inputs. + * @returns The witness map + */ +export function convertPrivateKernelTailToPublicInputsToWitnessMap( + privateKernelTailToPublicCircuitPrivateInputs: PrivateKernelTailCircuitPrivateInputs, +): WitnessMap { + const mapped = mapPrivateKernelTailToPublicCircuitPrivateInputsToNoir(privateKernelTailToPublicCircuitPrivateInputs); + const initialWitnessMap = abiEncode(PrivateKernelTailToPublicArtifact.abi as Abi, { input: mapped as any }); + return initialWitnessMap; +} + +/** + * Converts the outputs of the private kernel init circuit from a witness map. + * @param outputs - The private kernel outputs as a witness map. + * @returns The public inputs. + */ +export function convertPrivateKernelInitOutputsFromWitnessMap(outputs: WitnessMap): PrivateKernelCircuitPublicInputs { + // Decode the witness map into two fields, the return values and the inputs + const decodedInputs: DecodedInputs = abiDecode(PrivateKernelInitArtifact.abi as Abi, outputs); + + // Cast the inputs as the return type + const returnType = decodedInputs.return_value as InitReturnType; + + return mapPrivateKernelCircuitPublicInputsFromNoir(returnType); +} + +/** + * Converts the outputs of the private kernel inner circuit from a witness map. + * @param outputs - The private kernel outputs as a witness map. + * @returns The public inputs. + */ +export function convertPrivateKernelInnerOutputsFromWitnessMap(outputs: WitnessMap): PrivateKernelCircuitPublicInputs { + // Decode the witness map into two fields, the return values and the inputs + const decodedInputs: DecodedInputs = abiDecode(PrivateKernelInnerArtifact.abi as Abi, outputs); + + // Cast the inputs as the return type + const returnType = decodedInputs.return_value as InnerReturnType; + + return mapPrivateKernelCircuitPublicInputsFromNoir(returnType); +} + +/** + * Converts the outputs of the private kernel tail circuit from a witness map. + * @param outputs - The private kernel outputs as a witness map. + * @returns The public inputs. + */ +export function convertPrivateKernelTailOutputsFromWitnessMap( + outputs: WitnessMap, +): PrivateKernelTailCircuitPublicInputs { + // Decode the witness map into two fields, the return values and the inputs + const decodedInputs: DecodedInputs = abiDecode(PrivateKernelTailArtifact.abi as Abi, outputs); + + // Cast the inputs as the return type + const returnType = decodedInputs.return_value as TailReturnType; + + return mapPrivateKernelTailCircuitPublicInputsForRollupFromNoir(returnType); +} + +/** + * Converts the outputs of the private kernel tail for public circuit from a witness map. + * @param outputs - The private kernel outputs as a witness map. + * @returns The public inputs. + */ +export function convertPrivateKernelTailForPublicOutputsFromWitnessMap( + outputs: WitnessMap, +): PrivateKernelTailCircuitPublicInputs { + // Decode the witness map into two fields, the return values and the inputs + const decodedInputs: DecodedInputs = abiDecode(PrivateKernelTailToPublicArtifact.abi as Abi, outputs); + + // Cast the inputs as the return type + const returnType = decodedInputs.return_value as PublicPublicPreviousReturnType; + + return mapPrivateKernelTailCircuitPublicInputsForPublicFromNoir(returnType); +} + /** * Converts the inputs of the base parity circuit into a witness map. * @param inputs - The base parity inputs. diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index a7c36b728b7..6d81c8f3f7e 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -48,6 +48,7 @@ ] }, "dependencies": { + "@aztec/bb.js": "portal:../../barretenberg/ts", "@aztec/builder": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", @@ -59,6 +60,7 @@ "@aztec/protocol-contracts": "workspace:^", "@aztec/simulator": "workspace:^", "@aztec/types": "workspace:^", + "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi", "koa": "^2.14.2", "koa-router": "^12.0.0", "lodash.omit": "^4.5.0", diff --git a/yarn-project/pxe/src/config/index.ts b/yarn-project/pxe/src/config/index.ts index 6bfe020a16e..acd6cdae938 100644 --- a/yarn-project/pxe/src/config/index.ts +++ b/yarn-project/pxe/src/config/index.ts @@ -5,9 +5,24 @@ import { dirname, resolve } from 'path'; import { fileURLToPath } from 'url'; /** - * Configuration settings for the PXE Service. + * Temporary configuration until WASM can be used instead of native */ -export interface PXEServiceConfig { +export interface BBProverConfig { + bbWorkingDirectory?: string; + bbBinaryPath?: string; +} + +/** + * Configuration settings for the prover factory + */ +export interface KernelProverConfig { + /** Whether we are running with real proofs */ + proverEnabled?: boolean; +} +/** + * Configuration settings for the PXE. + */ +export interface PXEConfig { /** The interval to wait between polling for new blocks. */ l2BlockPollingIntervalMS: number; /** L2 block to start scanning from for new accounts */ @@ -16,6 +31,8 @@ export interface PXEServiceConfig { dataDirectory?: string; } +export type PXEServiceConfig = PXEConfig & KernelProverConfig & BBProverConfig; + /** * Creates an instance of PXEServiceConfig out of environment variables using sensible defaults for integration testing if not set. */ diff --git a/yarn-project/pxe/src/index.ts b/yarn-project/pxe/src/index.ts index d7cf6d57253..df1d9447e6f 100644 --- a/yarn-project/pxe/src/index.ts +++ b/yarn-project/pxe/src/index.ts @@ -9,3 +9,6 @@ export * from '@aztec/foundation/fields'; export * from '@aztec/foundation/eth-address'; export * from '@aztec/foundation/aztec-address'; export * from '@aztec/key-store'; + +// Temporarily used in e2e client prover integration test +export { BBNativeProofCreator } from './kernel_prover/bb_prover/bb_native_proof_creator.js'; diff --git a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts new file mode 100644 index 00000000000..7f4fba37064 --- /dev/null +++ b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts @@ -0,0 +1,713 @@ +import { + Fr, + type PrivateCircuitPublicInputs, + type PrivateKernelCircuitPublicInputs, + type PrivateKernelInitCircuitPrivateInputs, + type PrivateKernelInnerCircuitPrivateInputs, + type PrivateKernelTailCircuitPrivateInputs, + type PrivateKernelTailCircuitPublicInputs, + Proof, + type VERIFICATION_KEY_LENGTH_IN_FIELDS, + makeEmptyProof, +} from '@aztec/circuits.js'; +import { siloNoteHash } from '@aztec/circuits.js/hash'; +import { randomBytes, sha256 } from '@aztec/foundation/crypto'; +import { type LogFn, createDebugLogger } from '@aztec/foundation/log'; +import { type Tuple } from '@aztec/foundation/serialize'; +import { Timer } from '@aztec/foundation/timer'; +import { + ClientCircuitArtifacts, + type ClientProtocolArtifact, + convertPrivateKernelInitInputsToWitnessMap, + convertPrivateKernelInitOutputsFromWitnessMap, + convertPrivateKernelInnerInputsToWitnessMap, + convertPrivateKernelInnerOutputsFromWitnessMap, + convertPrivateKernelTailForPublicOutputsFromWitnessMap, + convertPrivateKernelTailOutputsFromWitnessMap, + executeTail, + executeTailForPublic, +} from '@aztec/noir-protocol-circuits-types'; +import { type ACVMField, WASMSimulator } from '@aztec/simulator'; +import { type NoirCompiledCircuit } from '@aztec/types/noir'; + +import { type WitnessMap } from '@noir-lang/acvm_js'; +import { serializeWitness } from '@noir-lang/noirc_abi'; +import * as proc from 'child_process'; +import * as fs from 'fs/promises'; + +import { type ProofCreator, type ProofOutput } from '../interface/proof_creator.js'; + +/** + * Temporary implementation of ProofCreator using the native bb binary. + * Will be replaced by the WASM equivalent once ready + */ + +const VK_FILENAME = 'vk'; +const VK_FIELDS_FILENAME = 'vk_fields.json'; +const PROOF_FILENAME = 'proof'; +//const PROOF_FIELDS_FILENAME = 'proof_fields.json'; + +//const AGGREGATION_OBJECT_SIZE = 16; +const CIRCUIT_SIZE_INDEX = 3; +const CIRCUIT_PUBLIC_INPUTS_INDEX = 4; +const CIRCUIT_RECURSIVE_INDEX = 5; + +enum BB_RESULT { + SUCCESS, + FAILURE, + ALREADY_PRESENT, +} + +type BBSuccess = { + status: BB_RESULT.SUCCESS | BB_RESULT.ALREADY_PRESENT; + duration: number; + pkPath?: string; + vkPath?: string; + proofPath?: string; +}; + +type BBFailure = { + status: BB_RESULT.FAILURE; + reason: string; +}; + +/** + * Invokes the Barretenberg binary with the provided command and args + * @param pathToBB - The path to the BB binary + * @param command - The command to execute + * @param args - The arguments to pass + * @param logger - A log function + * @param resultParser - An optional handler for detecting success or failure + * @returns The completed partial witness outputted from the circuit + */ +function executeBB( + pathToBB: string, + command: string, + args: string[], + logger: LogFn, + resultParser = (code: number) => code === 0, +) { + return new Promise((resolve, reject) => { + // spawn the bb process + const bb = proc.spawn(pathToBB, [command, ...args]); + bb.stdout.on('data', data => { + const message = data.toString('utf-8').replace(/\n$/, ''); + logger(message); + }); + bb.stderr.on('data', data => { + const message = data.toString('utf-8').replace(/\n$/, ''); + logger(message); + }); + bb.on('close', (code: number) => { + if (resultParser(code)) { + resolve(BB_RESULT.SUCCESS); + } else { + reject(); + } + }); + }).catch(_ => BB_RESULT.FAILURE); +} + +/** + * Used for generating proofs of noir circuits. + * It is assumed that the working directory is a temporary and/or random directory used solely for generating this proof. + * @param pathToBB - The full path to the bb binary + * @param workingDirectory - A working directory for use by bb + * @param circuitName - An identifier for the circuit + * @param bytecode - The compiled circuit bytecode + * @param inputWitnessFile - The circuit input witness + * @param log - A logging function + * @returns An object containing a result indication, the location of the proof and the duration taken + */ +export async function generateProof( + pathToBB: string, + workingDirectory: string, + circuitName: string, + bytecode: Buffer, + inputWitnessFile: string, + log: LogFn, +): Promise { + // Check that the working directory exists + try { + await fs.access(workingDirectory); + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `Working directory ${workingDirectory} does not exist` }; + } + + // The bytecode is written to e.g. /workingDirectory/BaseParityArtifact-bytecode + const bytecodePath = `${workingDirectory}/${circuitName}-bytecode`; + + // The proof is written to e.g. /workingDirectory/proof + const outputPath = `${workingDirectory}`; + + const binaryPresent = await fs + .access(pathToBB, fs.constants.R_OK) + .then(_ => true) + .catch(_ => false); + if (!binaryPresent) { + return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; + } + + try { + // Write the bytecode to the working directory + await fs.writeFile(bytecodePath, bytecode); + const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile, '-v']; + const timer = new Timer(); + const logFunction = (message: string) => { + log(`${circuitName} BB out - ${message}`); + }; + const result = await executeBB(pathToBB, 'prove_output_all', args, logFunction); + const duration = timer.ms(); + // cleanup the bytecode + await fs.rm(bytecodePath, { force: true }); + if (result == BB_RESULT.SUCCESS) { + return { + status: BB_RESULT.SUCCESS, + duration, + proofPath: `${outputPath}`, + pkPath: undefined, + vkPath: `${outputPath}`, + }; + } + // Not a great error message here but it is difficult to decipher what comes from bb + return { status: BB_RESULT.FAILURE, reason: `Failed to generate proof` }; + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; + } +} + +/** + * Used for verifying proofs of noir circuits + * @param pathToBB - The full path to the bb binary + * @param proofFullPath - The full path to the proof to be verified + * @param verificationKeyPath - The full path to the circuit verification key + * @param log - A logging function + * @returns An object containing a result indication and duration taken + */ +async function verifyProof( + pathToBB: string, + proofFullPath: string, + verificationKeyPath: string, + log: LogFn, +): Promise { + const binaryPresent = await fs + .access(pathToBB, fs.constants.R_OK) + .then(_ => true) + .catch(_ => false); + if (!binaryPresent) { + return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; + } + + try { + const args = ['-p', proofFullPath, '-k', verificationKeyPath]; + const timer = new Timer(); + const result = await executeBB(pathToBB, 'verify', args, log); + const duration = timer.ms(); + if (result == BB_RESULT.SUCCESS) { + return { status: BB_RESULT.SUCCESS, duration }; + } + // Not a great error message here but it is difficult to decipher what comes from bb + return { status: BB_RESULT.FAILURE, reason: `Failed to verify proof` }; + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; + } +} + +const bytecodeHashFilename = 'bytecode_hash'; +const bytecodeFilename = 'bytecode'; + +/** + * Used for generating either a proving or verification key, will exit early if the key already exists + * It assumes the provided working directory is one where the caller wishes to maintain a permanent set of keys + * It is not considered a temporary directory + * @param pathToBB - The full path to the bb binary + * @param workingDirectory - The directory into which the key should be created + * @param circuitName - An identifier for the circuit + * @param compiledCircuit - The compiled circuit + * @param key - The type of key, either 'pk' or 'vk' + * @param log - A logging function + * @param force - Force the key to be regenerated even if it already exists + * @returns An instance of BBResult + */ +export async function generateKeyForNoirCircuit( + pathToBB: string, + workingDirectory: string, + circuitName: string, + compiledCircuit: NoirCompiledCircuit, + key: 'vk' | 'pk', + log: LogFn, + force = false, +): Promise { + const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); + + // The key generation is written to e.g. /workingDirectory/pk/BaseParityArtifact/pk + // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash + // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact/bytecode + // The bytecode is removed after the key is generated, leaving just the hash file + const circuitOutputDirectory = `${workingDirectory}/${key}/${circuitName}`; + const bytecodeHashPath = `${circuitOutputDirectory}/${bytecodeHashFilename}`; + const bytecodePath = `${circuitOutputDirectory}/${bytecodeFilename}`; + const bytecodeHash = sha256(bytecode); + + const outputPath = `${circuitOutputDirectory}`; + + // ensure the directory exists + await fs.mkdir(circuitOutputDirectory, { recursive: true }); + + // Generate the key if we have been told to, or there is no bytecode hash + let mustRegenerate = + force || + (await fs + .access(bytecodeHashPath, fs.constants.R_OK) + .then(_ => false) + .catch(_ => true)); + + if (!mustRegenerate) { + // Check to see if the bytecode hash has changed from the stored value + const data: Buffer = await fs.readFile(bytecodeHashPath).catch(_ => Buffer.alloc(0)); + mustRegenerate = data.length == 0 || !data.equals(bytecodeHash); + } + + if (!mustRegenerate) { + // No need to generate, early out + return { + status: BB_RESULT.ALREADY_PRESENT, + duration: 0, + pkPath: key === 'pk' ? outputPath : undefined, + vkPath: key === 'vk' ? outputPath : undefined, + proofPath: undefined, + }; + } + + // Check we have access to bb + const binaryPresent = await fs + .access(pathToBB, fs.constants.R_OK) + .then(_ => true) + .catch(_ => false); + if (!binaryPresent) { + return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; + } + + // We are now going to generate the key + try { + // Write the bytecode to the working directory + await fs.writeFile(bytecodePath, bytecode); + + // args are the output path and the input bytecode path + const args = ['-o', outputPath, '-b', bytecodePath]; + const timer = new Timer(); + let result = await executeBB(pathToBB, `write_${key}`, args, log); + // If we succeeded and the type of key if verification, have bb write the 'fields' version too + if (result == BB_RESULT.SUCCESS && key === 'vk') { + const asFieldsArgs = ['-k', `${outputPath}/${VK_FILENAME}`, '-o', `${outputPath}/${VK_FIELDS_FILENAME}`, '-v']; + result = await executeBB(pathToBB, `vk_as_fields`, asFieldsArgs, log); + } + const duration = timer.ms(); + // Cleanup the bytecode file + await fs.rm(bytecodePath, { force: true }); + if (result == BB_RESULT.SUCCESS) { + // Store the bytecode hash so we don't need to regenerate at a later time + await fs.writeFile(bytecodeHashPath, bytecodeHash); + return { + status: BB_RESULT.SUCCESS, + duration, + pkPath: key === 'pk' ? outputPath : undefined, + vkPath: key === 'vk' ? outputPath : undefined, + proofPath: undefined, + }; + } + // Not a great error message here but it is difficult to decipher what comes from bb + return { status: BB_RESULT.FAILURE, reason: `Failed to generate key` }; + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; + } +} + +/** + * Used for verifying proofs of noir circuits + * @param pathToBB - The full path to the bb binary + * @param verificationKeyPath - The directory containing the binary verification key + * @param verificationKeyFilename - The filename of the verification key + * @param log - A logging function + * @returns An object containing a result indication and duration taken + */ +// async function writeVkAsFields( +// pathToBB: string, +// verificationKeyPath: string, +// verificationKeyFilename: string, +// log: LogFn, +// ): Promise { +// const binaryPresent = await fs +// .access(pathToBB, fs.constants.R_OK) +// .then(_ => true) +// .catch(_ => false); +// if (!binaryPresent) { +// return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; +// } + +// try { +// const args = ['-k', `${verificationKeyPath}/${verificationKeyFilename}`, '-v']; +// const timer = new Timer(); +// const result = await executeBB(pathToBB, 'vk_as_fields', args, log); +// const duration = timer.ms(); +// if (result == BB_RESULT.SUCCESS) { +// return { status: BB_RESULT.SUCCESS, duration, vkPath: verificationKeyPath }; +// } +// // Not a great error message here but it is difficult to decipher what comes from bb +// return { status: BB_RESULT.FAILURE, reason: `Failed to create vk as fields` }; +// } catch (error) { +// return { status: BB_RESULT.FAILURE, reason: `${error}` }; +// } +// } + +/** + * Used for verifying proofs of noir circuits + * @param pathToBB - The full path to the bb binary + * @param proofPath - The directory containing the binary proof + * @param proofFileName - The filename of the proof + * @param log - A logging function + * @returns An object containing a result indication and duration taken + */ +// async function writeProofAsFields( +// pathToBB: string, +// proofPath: string, +// proofFileName: string, +// log: LogFn, +// ): Promise { +// const binaryPresent = await fs +// .access(pathToBB, fs.constants.R_OK) +// .then(_ => true) +// .catch(_ => false); +// if (!binaryPresent) { +// return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; +// } + +// try { +// const args = ['-p', `${proofPath}/${proofFileName}`, '-v']; +// const timer = new Timer(); +// const result = await executeBB(pathToBB, 'proof_as_fields', args, log); +// const duration = timer.ms(); +// if (result == BB_RESULT.SUCCESS) { +// return { status: BB_RESULT.SUCCESS, duration, proofPath: proofPath }; +// } +// // Not a great error message here but it is difficult to decipher what comes from bb +// return { status: BB_RESULT.FAILURE, reason: `Failed to create proof as fields` }; +// } catch (error) { +// return { status: BB_RESULT.FAILURE, reason: `${error}` }; +// } +// } + +type PrivateKernelProvingOps = { + convertOutputs: (outputs: WitnessMap) => PrivateKernelCircuitPublicInputs | PrivateKernelTailCircuitPublicInputs; +}; + +const KernelArtifactMapping: Record = { + PrivateKernelInitArtifact: { + convertOutputs: convertPrivateKernelInitOutputsFromWitnessMap, + }, + PrivateKernelInnerArtifact: { + convertOutputs: convertPrivateKernelInnerOutputsFromWitnessMap, + }, + PrivateKernelTailArtifact: { + convertOutputs: convertPrivateKernelTailOutputsFromWitnessMap, + }, + PrivateKernelTailToPublicArtifact: { + convertOutputs: convertPrivateKernelTailForPublicOutputsFromWitnessMap, + }, +}; + +type VerificationKeyData = { + hash: Fr; + keyAsFields: Tuple; + keyAsBytes: Buffer; + numPublicInputs: number; + circuitSize: number; + isRecursive: boolean; +}; + +/** + * This proof creator implementation uses the native bb binary. + * This is a temporary implementation until we make the WASM version work. + */ +export class BBNativeProofCreator implements ProofCreator { + private simulator = new WASMSimulator(); + + private verificationKeys: Map> = new Map< + ClientProtocolArtifact, + Promise + >(); + + constructor( + private bbBinaryPath: string, + private bbWorkingDirectory: string, + private log = createDebugLogger('aztec:bb-native-prover'), + ) {} + + public getSiloedCommitments(publicInputs: PrivateCircuitPublicInputs) { + const contractAddress = publicInputs.callContext.storageContractAddress; + + return Promise.resolve( + publicInputs.newNoteHashes.map(commitment => siloNoteHash(contractAddress, commitment.value)), + ); + } + + public async createProofInit( + inputs: PrivateKernelInitCircuitPrivateInputs, + ): Promise> { + const witnessMap = convertPrivateKernelInitInputsToWitnessMap(inputs); + return await this.createSafeProof(witnessMap, 'PrivateKernelInitArtifact'); + } + + public async createProofInner( + inputs: PrivateKernelInnerCircuitPrivateInputs, + ): Promise> { + const witnessMap = convertPrivateKernelInnerInputsToWitnessMap(inputs); + return await this.createSafeProof(witnessMap, 'PrivateKernelInnerArtifact'); + } + + public async createProofTail( + inputs: PrivateKernelTailCircuitPrivateInputs, + ): Promise> { + // if (!inputs.isForPublic()) { + // const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); + // return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); + // } + + if (!inputs.isForPublic()) { + const result = await executeTail(inputs); + return { + publicInputs: result, + proof: makeEmptyProof(), + }; + } + // const witnessMap = convertPrivateKernelTailToPublicInputsToWitnessMap(inputs); + // return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); + const result = await executeTailForPublic(inputs); + return { + publicInputs: result, + proof: makeEmptyProof(), + }; + } + + public async createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise { + const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(directory, { recursive: true }); + this.log.debug(`Created directory: ${directory}`); + try { + this.log.debug(`Proving app circuit`); + const proof = await this.createProof(directory, partialWitness, bytecode, 'App'); + return new Proof(proof); + } finally { + await fs.rm(directory, { recursive: true, force: true }); + this.log.debug(`Deleted directory: ${directory}`); + } + } + + /** + * Verifies a proof, will generate the verification key if one is not cached internally + * @param circuitType - The type of circuit whose proof is to be verified + * @param proof - The proof to be verified + */ + public async verifyProof(circuitType: ClientProtocolArtifact, proof: Proof) { + // Create random directory to be used for temp files + const bbWorkingDirectory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + const proofFileName = `${bbWorkingDirectory}/proof`; + const verificationKeyPath = `${bbWorkingDirectory}/vk`; + const verificationKey = await this.getVerificationKeyDataForCircuit(circuitType); + + this.log.debug(`Verifying with key: ${verificationKey.hash.toString()}`); + + await fs.writeFile(proofFileName, proof.buffer); + await fs.writeFile(verificationKeyPath, verificationKey.keyAsBytes); + + const logFunction = (message: string) => { + this.log.debug(`${circuitType} BB out - ${message}`); + }; + + const result = await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); + + await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + + if (result.status === BB_RESULT.FAILURE) { + const errorMessage = `Failed to verify ${circuitType} proof!`; + throw new Error(errorMessage); + } + + this.log.info(`Successfully verified ${circuitType} proof in ${result.duration} ms`); + } + + /** + * Returns the verification key data for a circuit, will generate and cache it if not cached internally + * @param circuitType - The type of circuit for which the verification key is required + * @returns The verification key data + */ + private async getVerificationKeyDataForCircuit(circuitType: ClientProtocolArtifact): Promise { + let promise = this.verificationKeys.get(circuitType); + if (!promise) { + promise = generateKeyForNoirCircuit( + this.bbBinaryPath, + this.bbWorkingDirectory, + circuitType, + ClientCircuitArtifacts[circuitType], + 'vk', + this.log.debug, + ).then(result => { + if (result.status === BB_RESULT.FAILURE) { + throw new Error(`Failed to generate verification key for ${circuitType}, ${result.reason}`); + } + return this.convertVk(result.vkPath!); + }); + this.verificationKeys.set(circuitType, promise); + } + return await promise; + } + + /** + * Reads the verification key data stored at the specified location and parses into a VerificationKeyData + * @param filePath - The directory containing the verification key data files + * @returns The verification key data + */ + private async convertVk(filePath: string): Promise { + const [rawFields, rawBinary] = await Promise.all([ + fs.readFile(`${filePath}/${VK_FIELDS_FILENAME}`, { encoding: 'utf-8' }), + fs.readFile(`${filePath}/${VK_FILENAME}`), + ]); + const fieldsJson = JSON.parse(rawFields); + const fields = fieldsJson.map(Fr.fromString); + // The first item is the hash, this is not part of the actual VK + const vkHash = fields[0]; + const actualVk = fields.slice(1); + const vk: VerificationKeyData = { + hash: vkHash, + keyAsFields: actualVk as Tuple, + keyAsBytes: rawBinary, + numPublicInputs: Number(actualVk[CIRCUIT_PUBLIC_INPUTS_INDEX]), + circuitSize: Number(actualVk[CIRCUIT_SIZE_INDEX]), + isRecursive: actualVk[CIRCUIT_RECURSIVE_INDEX] == Fr.ONE, + }; + return vk; + } + + /** + * Ensures our verification key cache includes the key data located at the specified directory + * @param filePath - The directory containing the verification key data files + * @param circuitType - The type of circuit to which the verification key corresponds + */ + private async updateVerificationKeyAfterProof(filePath: string, circuitType: ClientProtocolArtifact) { + let promise = this.verificationKeys.get(circuitType); + if (!promise) { + promise = this.convertVk(filePath); + this.log.debug(`Updated verification key for circuit: ${circuitType}`); + this.verificationKeys.set(circuitType, promise); + } + await promise; + } + + private async createSafeProof(inputs: WitnessMap, circuitType: ClientProtocolArtifact): Promise> { + const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(directory, { recursive: true }); + this.log.debug(`Created directory: ${directory}`); + try { + return await this.generateWitnessAndCreateProof(inputs, circuitType, directory); + } finally { + await fs.rm(directory, { recursive: true, force: true }); + this.log.debug(`Deleted directory: ${directory}`); + } + } + + private async generateWitnessAndCreateProof( + inputs: WitnessMap, + circuitType: ClientProtocolArtifact, + directory: string, + ): Promise> { + this.log.debug(`Generating witness for ${circuitType}`); + const compiledCircuit: NoirCompiledCircuit = ClientCircuitArtifacts[circuitType]; + + const outputWitness = await this.simulator.simulateCircuit(inputs, compiledCircuit); + + this.log.debug(`Generated witness for ${circuitType}`); + + const publicInputs = KernelArtifactMapping[circuitType].convertOutputs(outputWitness) as T; + + const proofBuffer = await this.createProof( + directory, + outputWitness, + Buffer.from(compiledCircuit.bytecode, 'base64'), + circuitType, + ); + + const proofOutput: ProofOutput = { + publicInputs, + proof: new Proof(proofBuffer), + }; + return proofOutput; + } + + private async createProof( + directory: string, + partialWitness: WitnessMap, + bytecode: Buffer, + circuitType: ClientProtocolArtifact | 'App', + ) { + const compressedBincodedWitness = serializeWitness(partialWitness); + + const inputsWitnessFile = `${directory}/witness.gz`; + + await fs.writeFile(inputsWitnessFile, compressedBincodedWitness); + + this.log.debug(`Written ${inputsWitnessFile}`); + + const provingResult = await generateProof( + this.bbBinaryPath, + directory, + circuitType, + bytecode, + inputsWitnessFile, + this.log.debug, + ); + + if (provingResult.status === BB_RESULT.FAILURE) { + this.log.error(`Failed to generate proof for ${circuitType}: ${provingResult.reason}`); + throw new Error(provingResult.reason); + } + + if (circuitType !== 'App') { + await this.updateVerificationKeyAfterProof(directory, circuitType); + } + const proofFile = `${directory}/${PROOF_FILENAME}`; + return await fs.readFile(proofFile); + } + + /** + * Parses and returns the proof data stored at the specified directory + * @param filePath - The directory containing the proof data + * @param circuitType - The type of circuit proven + * @returns The proof + */ + // private async readProofAsFields( + // filePath: string, + // circuitType: ClientProtocolArtifact, + // ): Promise> { + // const [binaryProof, proofString] = await Promise.all([ + // fs.readFile(`${filePath}/${PROOF_FILENAME}`), + // fs.readFile(`${filePath}/${PROOF_FIELDS_FILENAME}`, { encoding: 'utf-8' }), + // ]); + // const json = JSON.parse(proofString); + // const fields = json.map(Fr.fromString); + // const vkData = await this.verificationKeys.get(circuitType); + // if (!vkData) { + // throw new Error(`Invalid verification key for ${circuitType}`); + // } + // const numPublicInputs = CIRCUITS_WITHOUT_AGGREGATION.has(circuitType) + // ? vkData.numPublicInputs + // : vkData.numPublicInputs - AGGREGATION_OBJECT_SIZE; + // const fieldsWithoutPublicInputs = fields.slice(numPublicInputs); + // logger.debug( + // `Circuit type: ${circuitType}, complete proof length: ${fields.length}, without public inputs: ${fieldsWithoutPublicInputs.length}, num public inputs: ${numPublicInputs}, circuit size: ${vkData.circuitSize}, is recursive: ${vkData.isRecursive}, raw length: ${binaryProof.length}`, + // ); + // const proof = new RecursiveProof(fieldsWithoutPublicInputs, new Proof(binaryProof)); + // return proof; + // } +} diff --git a/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts b/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts new file mode 100644 index 00000000000..5e085ae3b34 --- /dev/null +++ b/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts @@ -0,0 +1,79 @@ +import { + type PrivateCircuitPublicInputs, + type PrivateKernelCircuitPublicInputs, + type PrivateKernelInitCircuitPrivateInputs, + type PrivateKernelInnerCircuitPrivateInputs, + type PrivateKernelTailCircuitPrivateInputs, + type PrivateKernelTailCircuitPublicInputs, + type Proof, +} from '@aztec/circuits.js'; +import { type Fr } from '@aztec/foundation/fields'; +import { type ACVMField } from '@aztec/simulator'; + +/** + * Represents the output of the proof creation process for init and inner private kernel circuit. + * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. + */ +export type ProofOutput = { + /** + * The public inputs required for the proof generation process. + */ + publicInputs: PublicInputsType; + /** + * The zk-SNARK proof for the kernel execution. + */ + proof: Proof; +}; + +/** + * ProofCreator provides functionality to create and validate proofs, and retrieve + * siloed commitments necessary for maintaining transaction privacy and security on the network. + */ +export interface ProofCreator { + /** + * Computes the siloed commitments for a given set of public inputs. + * + * @param publicInputs - The public inputs containing the contract address and new note hashes to be used in generating siloed note hashes. + * @returns An array of Fr (finite field) elements representing the siloed commitments. + */ + getSiloedCommitments(publicInputs: PrivateCircuitPublicInputs): Promise; + + /** + * Creates a proof output for a given signed transaction request and private call data for the first iteration. + * + * @param privateKernelInputsInit - The private data structure for the initial iteration. + * @returns A Promise resolving to a ProofOutput object containing public inputs and the kernel proof. + */ + createProofInit( + privateKernelInputsInit: PrivateKernelInitCircuitPrivateInputs, + ): Promise>; + + /** + * Creates a proof output for a given previous kernel data and private call data for an inner iteration. + * + * @param privateKernelInputsInner - The private input data structure for the inner iteration. + * @returns A Promise resolving to a ProofOutput object containing public inputs and the kernel proof. + */ + createProofInner( + privateKernelInputsInner: PrivateKernelInnerCircuitPrivateInputs, + ): Promise>; + + /** + * Creates a proof output based on the last inner kernel iteration kernel data for the final ordering iteration. + * + * @param privateKernelInputsTail - The private input data structure for the final ordering iteration. + * @returns A Promise resolving to a ProofOutput object containing public inputs and the kernel proof. + */ + createProofTail( + privateKernelInputsTail: PrivateKernelTailCircuitPrivateInputs, + ): Promise>; + + /** + * Creates a proof for an app circuit. + * + * @param partialWitness - The witness produced via circuit simulation + * @param bytecode - The circuit bytecode in gzipped bincode format + * @returns A Promise resolving to a Proof object + */ + createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise; +} diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index 0f41bace642..6c9bb7f2065 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -24,8 +24,8 @@ import { type ExecutionResult, type NoteAndSlot } from '@aztec/simulator'; import { mock } from 'jest-mock-extended'; +import { type ProofCreator } from './interface/proof_creator.js'; import { KernelProver } from './kernel_prover.js'; -import { type ProofCreator } from './proof_creator.js'; import { type ProvingDataOracle } from './proving_data_oracle.js'; describe('Kernel Prover', () => { diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts index 9bcd0020b00..73612e9e751 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts @@ -9,6 +9,8 @@ import { PrivateKernelInitCircuitPrivateInputs, PrivateKernelInnerCircuitPrivateInputs, PrivateKernelTailCircuitPrivateInputs, + type PrivateKernelTailCircuitPublicInputs, + type Proof, type TxRequest, VK_TREE_HEIGHT, VerificationKey, @@ -20,12 +22,12 @@ import { assertLength } from '@aztec/foundation/serialize'; import { pushTestData } from '@aztec/foundation/testing'; import { type ExecutionResult, collectNoteHashLeafIndexMap, collectNullifiedNoteHashCounters } from '@aztec/simulator'; +import { type ProofCreator, type ProofOutput } from './interface/proof_creator.js'; import { buildPrivateKernelInnerHints, buildPrivateKernelTailHints, buildPrivateKernelTailOutputs, } from './private_inputs_builders/index.js'; -import { KernelProofCreator, type ProofCreator, type ProofOutput, type ProofOutputFinal } from './proof_creator.js'; import { type ProvingDataOracle } from './proving_data_oracle.js'; /** @@ -36,7 +38,8 @@ import { type ProvingDataOracle } from './proving_data_oracle.js'; */ export class KernelProver { private log = createDebugLogger('aztec:kernel-prover'); - constructor(private oracle: ProvingDataOracle, private proofCreator: ProofCreator = new KernelProofCreator()) {} + + constructor(private oracle: ProvingDataOracle, private proofCreator: ProofCreator) {} /** * Generate a proof for a given transaction request and execution result. @@ -48,12 +51,15 @@ export class KernelProver { * @param executionResult - The execution result object containing nested executions and preimages. * @returns A Promise that resolves to a KernelProverOutput object containing proof, public inputs, and output notes. */ - async prove(txRequest: TxRequest, executionResult: ExecutionResult): Promise { + async prove( + txRequest: TxRequest, + executionResult: ExecutionResult, + ): Promise> { const executionStack = [executionResult]; let firstIteration = true; let previousVerificationKey = VerificationKey.makeFake(); - let output: ProofOutput = { + let output: ProofOutput = { publicInputs: PrivateKernelCircuitPublicInputs.empty(), proof: makeEmptyProof(), }; @@ -70,10 +76,16 @@ export class KernelProver { ); const publicCallRequests = currentExecution.enqueuedPublicFunctionCalls.map(result => result.toCallRequest()); + const proof = await this.proofCreator.createAppCircuitProof( + currentExecution.partialWitness, + currentExecution.acir, + ); + const privateCallData = await this.createPrivateCallData( currentExecution, privateCallRequests, publicCallRequests, + proof, ); const hints = buildPrivateKernelInnerHints( @@ -129,6 +141,7 @@ export class KernelProver { { callStackItem, vk }: ExecutionResult, privateCallRequests: CallRequest[], publicCallRequests: CallRequest[], + proof: Proof, ) { const { contractAddress, functionData } = callStackItem; @@ -154,9 +167,6 @@ export class KernelProver { // const acirHash = keccak256(Buffer.from(bytecode, 'hex')); const acirHash = Fr.fromBuffer(Buffer.alloc(32, 0)); - // TODO - const proof = makeEmptyProof(); - return PrivateCallData.from({ callStackItem, privateCallStack, diff --git a/yarn-project/pxe/src/kernel_prover/proof_creator.ts b/yarn-project/pxe/src/kernel_prover/proof_creator.ts deleted file mode 100644 index 4f48bf15a92..00000000000 --- a/yarn-project/pxe/src/kernel_prover/proof_creator.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { type CircuitSimulationStats } from '@aztec/circuit-types/stats'; -import { - type PrivateCircuitPublicInputs, - type PrivateKernelCircuitPublicInputs, - type PrivateKernelInitCircuitPrivateInputs, - type PrivateKernelInnerCircuitPrivateInputs, - type PrivateKernelTailCircuitPrivateInputs, - type PrivateKernelTailCircuitPublicInputs, - type Proof, - makeEmptyProof, -} from '@aztec/circuits.js'; -import { siloNoteHash } from '@aztec/circuits.js/hash'; -import { type Fr } from '@aztec/foundation/fields'; -import { createDebugLogger } from '@aztec/foundation/log'; -import { elapsed } from '@aztec/foundation/timer'; -import { executeInit, executeInner, executeTail, executeTailForPublic } from '@aztec/noir-protocol-circuits-types'; - -/** - * Represents the output of the proof creation process for init and inner private kernel circuit. - * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. - */ -export interface ProofOutput { - /** - * The public inputs required for the proof generation process. - */ - publicInputs: PrivateKernelCircuitPublicInputs; - /** - * The zk-SNARK proof for the kernel execution. - */ - proof: Proof; -} - -/** - * Represents the output of the proof creation process for final ordering private kernel circuit. - * Contains the public inputs required for the final ordering private kernel circuit and the generated proof. - */ -export interface ProofOutputFinal { - /** - * The public inputs required for the proof generation process. - */ - publicInputs: PrivateKernelTailCircuitPublicInputs; - /** - * The zk-SNARK proof for the kernel execution. - */ - proof: Proof; -} - -/** - * ProofCreator provides functionality to create and validate proofs, and retrieve - * siloed commitments necessary for maintaining transaction privacy and security on the network. - */ -export interface ProofCreator { - /** - * Computes the siloed commitments for a given set of public inputs. - * - * @param publicInputs - The public inputs containing the contract address and new note hashes to be used in generating siloed note hashes. - * @returns An array of Fr (finite field) elements representing the siloed commitments. - */ - getSiloedCommitments(publicInputs: PrivateCircuitPublicInputs): Promise; - - /** - * Creates a proof output for a given signed transaction request and private call data for the first iteration. - * - * @param privateKernelInputsInit - The private data structure for the initial iteration. - * @returns A Promise resolving to a ProofOutput object containing public inputs and the kernel proof. - */ - createProofInit(privateKernelInputsInit: PrivateKernelInitCircuitPrivateInputs): Promise; - - /** - * Creates a proof output for a given previous kernel data and private call data for an inner iteration. - * - * @param privateKernelInputsInner - The private input data structure for the inner iteration. - * @returns A Promise resolving to a ProofOutput object containing public inputs and the kernel proof. - */ - createProofInner(privateKernelInputsInner: PrivateKernelInnerCircuitPrivateInputs): Promise; - - /** - * Creates a proof output based on the last inner kernel iteration kernel data for the final ordering iteration. - * - * @param privateKernelInputsTail - The private input data structure for the final ordering iteration. - * @returns A Promise resolving to a ProofOutput object containing public inputs and the kernel proof. - */ - createProofTail(privateKernelInputsTail: PrivateKernelTailCircuitPrivateInputs): Promise; -} - -/** - * The KernelProofCreator class is responsible for generating siloed commitments and zero-knowledge proofs - * for private kernel circuit. It leverages Barretenberg to perform cryptographic operations and proof creation. - * The class provides methods to compute commitments based on the given public inputs and to generate proofs based on - * signed transaction requests, previous kernel data, private call data, and a flag indicating whether it's the first - * iteration or not. - */ -export class KernelProofCreator implements ProofCreator { - constructor(private log = createDebugLogger('aztec:kernel_proof_creator')) {} - - public getSiloedCommitments(publicInputs: PrivateCircuitPublicInputs) { - const contractAddress = publicInputs.callContext.storageContractAddress; - - return Promise.resolve( - publicInputs.newNoteHashes.map(commitment => siloNoteHash(contractAddress, commitment.value)), - ); - } - - public async createProofInit(privateInputs: PrivateKernelInitCircuitPrivateInputs): Promise { - const [duration, result] = await elapsed(() => executeInit(privateInputs)); - this.log.debug(`Simulated private kernel init`, { - eventName: 'circuit-simulation', - circuitName: 'private-kernel-init', - duration, - inputSize: privateInputs.toBuffer().length, - outputSize: result.toBuffer().length, - } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; - } - - public async createProofInner(privateInputs: PrivateKernelInnerCircuitPrivateInputs): Promise { - const [duration, result] = await elapsed(() => executeInner(privateInputs)); - this.log.debug(`Simulated private kernel inner`, { - eventName: 'circuit-simulation', - circuitName: 'private-kernel-inner', - duration, - inputSize: privateInputs.toBuffer().length, - outputSize: result.toBuffer().length, - } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; - } - - public async createProofTail(privateInputs: PrivateKernelTailCircuitPrivateInputs): Promise { - const isForPublic = privateInputs.isForPublic(); - const [duration, result] = await elapsed(() => - isForPublic ? executeTailForPublic(privateInputs) : executeTail(privateInputs), - ); - this.log.debug(`Simulated private kernel ordering`, { - eventName: 'circuit-simulation', - circuitName: 'private-kernel-ordering', - duration, - inputSize: privateInputs.toBuffer().length, - outputSize: result.toBuffer().length, - } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; - } -} diff --git a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts new file mode 100644 index 00000000000..4880e937eeb --- /dev/null +++ b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts @@ -0,0 +1,96 @@ +import { type CircuitSimulationStats } from '@aztec/circuit-types/stats'; +import { + type PrivateCircuitPublicInputs, + type PrivateKernelCircuitPublicInputs, + type PrivateKernelInitCircuitPrivateInputs, + type PrivateKernelInnerCircuitPrivateInputs, + type PrivateKernelTailCircuitPrivateInputs, + type PrivateKernelTailCircuitPublicInputs, + Proof, + makeEmptyProof, +} from '@aztec/circuits.js'; +import { siloNoteHash } from '@aztec/circuits.js/hash'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { elapsed } from '@aztec/foundation/timer'; +import { executeInit, executeInner, executeTail, executeTailForPublic } from '@aztec/noir-protocol-circuits-types'; + +import { type ProofCreator, type ProofOutput } from '../interface/proof_creator.js'; + +/** + * Test Proof Creator executes circuit simulations and provides fake proofs. + */ +export class TestProofCreator implements ProofCreator { + constructor(private log = createDebugLogger('aztec:test_proof_creator')) {} + + public getSiloedCommitments(publicInputs: PrivateCircuitPublicInputs) { + const contractAddress = publicInputs.callContext.storageContractAddress; + + return Promise.resolve( + publicInputs.newNoteHashes.map(commitment => siloNoteHash(contractAddress, commitment.value)), + ); + } + + public async createProofInit( + privateInputs: PrivateKernelInitCircuitPrivateInputs, + ): Promise> { + const [duration, result] = await elapsed(() => executeInit(privateInputs)); + this.log.debug(`Simulated private kernel init`, { + eventName: 'circuit-simulation', + circuitName: 'private-kernel-init', + duration, + inputSize: privateInputs.toBuffer().length, + outputSize: result.toBuffer().length, + } satisfies CircuitSimulationStats); + const proof = makeEmptyProof(); + + return { + publicInputs: result, + proof: proof, + }; + } + + public async createProofInner( + privateInputs: PrivateKernelInnerCircuitPrivateInputs, + ): Promise> { + const [duration, result] = await elapsed(() => executeInner(privateInputs)); + this.log.debug(`Simulated private kernel inner`, { + eventName: 'circuit-simulation', + circuitName: 'private-kernel-inner', + duration, + inputSize: privateInputs.toBuffer().length, + outputSize: result.toBuffer().length, + } satisfies CircuitSimulationStats); + const proof = makeEmptyProof(); + + return { + publicInputs: result, + proof: proof, + }; + } + + public async createProofTail( + privateInputs: PrivateKernelTailCircuitPrivateInputs, + ): Promise> { + const isForPublic = privateInputs.isForPublic(); + const [duration, result] = await elapsed(() => + isForPublic ? executeTailForPublic(privateInputs) : executeTail(privateInputs), + ); + this.log.debug(`Simulated private kernel ordering`, { + eventName: 'circuit-simulation', + circuitName: 'private-kernel-ordering', + duration, + inputSize: privateInputs.toBuffer().length, + outputSize: result.toBuffer().length, + } satisfies CircuitSimulationStats); + const proof = makeEmptyProof(); + + return { + publicInputs: result, + proof: proof, + }; + } + + createAppCircuitProof(_1: Map, _2: Buffer): Promise { + return Promise.resolve(new Proof(Buffer.alloc(0))); + } +} diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index 5566ccdc7f5..e2895e3d2c4 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -13,6 +13,9 @@ import { join } from 'path'; import { type PXEServiceConfig } from '../config/index.js'; import { KVPxeDatabase } from '../database/kv_pxe_database.js'; +import { BBNativeProofCreator } from '../kernel_prover/bb_prover/bb_native_proof_creator.js'; +import { type ProofCreator } from '../kernel_prover/interface/proof_creator.js'; +import { TestProofCreator } from '../kernel_prover/test/test_circuit_prover.js'; import { PXEService } from './pxe_service.js'; /** @@ -23,12 +26,14 @@ import { PXEService } from './pxe_service.js'; * @param aztecNode - The AztecNode instance to be used by the server. * @param config - The PXE Service Config to use * @param options - (Optional) Optional information for creating an PXEService. + * @param proofCreator - An optional proof creator to use in place of any other configuration * @returns A Promise that resolves to the started PXEService instance. */ export async function createPXEService( aztecNode: AztecNode, config: PXEServiceConfig, useLogSuffix: string | boolean | undefined = undefined, + proofCreator?: ProofCreator, ) { const logSuffix = typeof useLogSuffix === 'boolean' ? (useLogSuffix ? randomBytes(3).toString('hex') : undefined) : useLogSuffix; @@ -42,7 +47,18 @@ export async function createPXEService( ); const db = new KVPxeDatabase(await initStoreForRollup(AztecLmdbStore.open(pxeDbPath), l1Contracts.rollupAddress)); - const server = new PXEService(keyStore, aztecNode, db, config, logSuffix); + // (@PhilWindle) Temporary validation until WASM is implemented + let prover: ProofCreator | undefined = proofCreator; + if (!prover) { + if (config.proverEnabled && (!config.bbBinaryPath || !config.bbWorkingDirectory)) { + throw new Error(`Prover must be configured with binary path and working directory`); + } + prover = !config.proverEnabled + ? new TestProofCreator() + : new BBNativeProofCreator(config.bbBinaryPath!, config.bbWorkingDirectory!); + } + + const server = new PXEService(keyStore, aztecNode, db, prover, config, logSuffix); for (const contract of [ getCanonicalClassRegisterer(), getCanonicalInstanceDeployer(), diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index dc75deb293b..259d0db6255 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -57,6 +57,7 @@ import { ContractDataOracle } from '../contract_data_oracle/index.js'; import { type PxeDatabase } from '../database/index.js'; import { NoteDao } from '../database/note_dao.js'; import { KernelOracle } from '../kernel_oracle/index.js'; +import { type ProofCreator } from '../kernel_prover/interface/proof_creator.js'; import { KernelProver } from '../kernel_prover/kernel_prover.js'; import { getAcirSimulator } from '../simulator/index.js'; import { Synchronizer } from '../synchronizer/index.js'; @@ -78,6 +79,7 @@ export class PXEService implements PXE { private keyStore: KeyStore, private node: AztecNode, private db: PxeDatabase, + private proofCreator: ProofCreator, private config: PXEServiceConfig, logSuffix?: string, ) { @@ -661,7 +663,7 @@ export class PXEService implements PXE { const executionResult = await this.#simulate(txExecutionRequest, msgSender); const kernelOracle = new KernelOracle(this.contractDataOracle, this.keyStore, this.node); - const kernelProver = new KernelProver(kernelOracle); + const kernelProver = new KernelProver(kernelOracle, this.proofCreator); this.log.debug(`Executing kernel prover...`); const { proof, publicInputs } = await kernelProver.prove(txExecutionRequest.toTxRequest(), executionResult); diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts index 6bff5763494..ba00fbefa1f 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts @@ -10,6 +10,7 @@ import { type MockProxy, mock } from 'jest-mock-extended'; import { KVPxeDatabase } from '../../database/kv_pxe_database.js'; import { type PxeDatabase } from '../../database/pxe_database.js'; import { type PXEServiceConfig } from '../../index.js'; +import { TestProofCreator } from '../../kernel_prover/test/test_circuit_prover.js'; import { PXEService } from '../pxe_service.js'; import { pxeTestSuite } from './pxe_test_suite.js'; @@ -35,7 +36,7 @@ function createPXEService(): Promise { }; node.getL1ContractAddresses.mockResolvedValue(mockedContracts); - return Promise.resolve(new PXEService(keyStore, node, db, config)); + return Promise.resolve(new PXEService(keyStore, node, db, new TestProofCreator(), config)); } pxeTestSuite('PXEService', createPXEService); @@ -51,7 +52,7 @@ describe('PXEService', () => { keyStore = new TestKeyStore(kvStore); node = mock(); db = new KVPxeDatabase(kvStore); - config = { l2BlockPollingIntervalMS: 100, l2StartingBlock: INITIAL_L2_BLOCK_NUM }; + config = { l2BlockPollingIntervalMS: 100, l2StartingBlock: INITIAL_L2_BLOCK_NUM, proverEnabled: false }; }); it('throws when submitting a tx with a nullifier of already settled tx', async () => { @@ -60,7 +61,7 @@ describe('PXEService', () => { node.getTxEffect.mockResolvedValue(settledTx); - const pxe = new PXEService(keyStore, node, db, config); + const pxe = new PXEService(keyStore, node, db, new TestProofCreator(), config); await expect(pxe.sendTx(duplicateTx)).rejects.toThrow(/A settled tx with equal hash/); }); }); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 9b79cfcc6a7..f2539b2eb46 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -710,6 +710,7 @@ __metadata: version: 0.0.0-use.local resolution: "@aztec/pxe@workspace:pxe" dependencies: + "@aztec/bb.js": "portal:../../barretenberg/ts" "@aztec/builder": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" @@ -723,6 +724,7 @@ __metadata: "@aztec/simulator": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 + "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi" "@types/jest": ^29.5.0 "@types/lodash.omit": ^4.5.7 "@types/node": ^18.7.23