From 29e9b5e5d0f7a00c806639e900f2f8209675ee0e Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:54:28 +0100 Subject: [PATCH 01/17] chore: do not panic when dividing by zero (#4424) # Description ## Problem\* Resolves #2480 ## Summary\* In BrilligVM, we have now the same behaviour regarding division by 0. Whether it is a field or integer division, we return 0 when dividing by zero. Since we have a constraint which checks that the inverse times itself is 1, this constraint will always fail if we divide by zero (instead of panic or returning an error). ## Additional Context ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- acvm-repo/brillig_vm/src/arithmetic.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/acvm-repo/brillig_vm/src/arithmetic.rs b/acvm-repo/brillig_vm/src/arithmetic.rs index 263a733e3c..9d7b6fe8f0 100644 --- a/acvm-repo/brillig_vm/src/arithmetic.rs +++ b/acvm-repo/brillig_vm/src/arithmetic.rs @@ -36,18 +36,20 @@ pub(crate) fn evaluate_binary_bigint_op( BinaryIntOp::UnsignedDiv => { let b_mod = b % bit_modulo; if b_mod.is_zero() { - return Err("Division by zero".to_owned()); + BigUint::zero() + } else { + (a % bit_modulo) / b_mod } - (a % bit_modulo) / b_mod } // Perform signed division by first converting a and b to signed integers and then back to unsigned after the operation. BinaryIntOp::SignedDiv => { let b_signed = to_big_signed(b, bit_size); if b_signed.is_zero() { - return Err("Division by zero".to_owned()); + BigUint::zero() + } else { + let signed_div = to_big_signed(a, bit_size) / b_signed; + to_big_unsigned(signed_div, bit_size) } - let signed_div = to_big_signed(a, bit_size) / b_signed; - to_big_unsigned(signed_div, bit_size) } // Perform a == operation, returning 0 or 1 BinaryIntOp::Equals => { From 7cd5fdb3d2a53475b7c8681231d517cab30f9f9b Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:56:14 +0000 Subject: [PATCH 02/17] feat: expose separate functions to compile programs vs contracts in `noir_wasm` (#4413) # Description ## Problem\* Resolves ## Summary\* This PR exposes separate functions to compile contracts vs programs in the wasm compiler. This allows us to simplify various code paths as we don't need to deal with the potential for the two artifact types as this just leads to us asserting types and breaking type safety. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/src/compile.rs | 212 ++++++++++-------- compiler/wasm/src/compile_new.rs | 75 +++++-- compiler/wasm/src/index.cts | 77 ++++++- compiler/wasm/src/index.mts | 77 ++++++- compiler/wasm/src/lib.rs | 4 +- compiler/wasm/src/noir/noir-wasm-compiler.ts | 61 ++++- compiler/wasm/src/types/noir_artifact.ts | 19 -- .../test/compiler/browser/compile.test.ts | 8 +- .../wasm/test/compiler/node/compile.test.ts | 8 +- .../wasm/test/compiler/shared/compile.test.ts | 10 +- compiler/wasm/test/wasm/browser/index.test.ts | 10 +- compiler/wasm/test/wasm/node/index.test.ts | 10 +- 12 files changed, 377 insertions(+), 194 deletions(-) diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index c8b1680bc0..ca6c8efedb 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -6,8 +6,7 @@ use nargo::artifacts::{ program::ProgramArtifact, }; use noirc_driver::{ - add_dep, compile_contract, compile_main, file_manager_with_stdlib, prepare_crate, - prepare_dependency, CompileOptions, CompiledContract, CompiledProgram, + add_dep, file_manager_with_stdlib, prepare_crate, prepare_dependency, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_evaluator::errors::SsaReport; @@ -60,51 +59,64 @@ extern "C" { #[derive(Clone, Debug, PartialEq, Eq)] pub type JsDependencyGraph; - #[wasm_bindgen(extends = Object, js_name = "CompileResult", typescript_type = "CompileResult")] + #[wasm_bindgen(extends = Object, js_name = "ProgramCompileResult", typescript_type = "ProgramCompileResult")] #[derive(Clone, Debug, PartialEq, Eq)] - pub type JsCompileResult; + pub type JsCompileProgramResult; #[wasm_bindgen(constructor, js_class = "Object")] - fn constructor() -> JsCompileResult; + fn constructor() -> JsCompileProgramResult; + + #[wasm_bindgen(extends = Object, js_name = "ContractCompileResult", typescript_type = "ContractCompileResult")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsCompileContractResult; + + #[wasm_bindgen(constructor, js_class = "Object")] + fn constructor() -> JsCompileContractResult; } -impl JsCompileResult { - const CONTRACT_PROP: &'static str = "contract"; +impl JsCompileProgramResult { const PROGRAM_PROP: &'static str = "program"; const WARNINGS_PROP: &'static str = "warnings"; - pub fn new(resp: CompileResult) -> JsCompileResult { - let obj = JsCompileResult::constructor(); - match resp { - CompileResult::Contract { contract, warnings } => { - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::CONTRACT_PROP), - &::from_serde(&contract).unwrap(), - ) - .unwrap(); - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::WARNINGS_PROP), - &::from_serde(&warnings).unwrap(), - ) - .unwrap(); - } - CompileResult::Program { program, warnings } => { - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::PROGRAM_PROP), - &::from_serde(&program).unwrap(), - ) - .unwrap(); - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::WARNINGS_PROP), - &::from_serde(&warnings).unwrap(), - ) - .unwrap(); - } - }; + pub fn new(program: ProgramArtifact, warnings: Vec) -> JsCompileProgramResult { + let obj = JsCompileProgramResult::constructor(); + + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileProgramResult::PROGRAM_PROP), + &::from_serde(&program).unwrap(), + ) + .unwrap(); + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileProgramResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), + ) + .unwrap(); + + obj + } +} + +impl JsCompileContractResult { + const CONTRACT_PROP: &'static str = "contract"; + const WARNINGS_PROP: &'static str = "warnings"; + + pub fn new(contract: ContractArtifact, warnings: Vec) -> JsCompileContractResult { + let obj = JsCompileContractResult::constructor(); + + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileContractResult::CONTRACT_PROP), + &::from_serde(&contract).unwrap(), + ) + .unwrap(); + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileContractResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), + ) + .unwrap(); obj } @@ -144,73 +156,98 @@ pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() } -pub enum CompileResult { - Contract { contract: ContractArtifact, warnings: Vec }, - Program { program: ProgramArtifact, warnings: Vec }, -} - #[wasm_bindgen] -pub fn compile( +pub fn compile_program( entry_point: String, - contracts: Option, dependency_graph: Option, file_source_map: PathToFileSourceMap, -) -> Result { +) -> Result { console_error_panic_hook::set_once(); - - let dependency_graph: DependencyGraph = if let Some(dependency_graph) = dependency_graph { - ::into_serde(&JsValue::from(dependency_graph)) - .map_err(|err| err.to_string())? - } else { - DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } - }; - - let fm = file_manager_with_source_map(file_source_map); - let parsed_files = parse_all(&fm); - let mut context = Context::new(fm, parsed_files); - - let path = Path::new(&entry_point); - let crate_id = prepare_crate(&mut context, path); - - process_dependency_graph(&mut context, dependency_graph); + let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; - if contracts.unwrap_or_default() { - let compiled_contract = compile_contract(&mut context, crate_id, &compile_options) + let compiled_program = + noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) .map_err(|errs| { CompileError::with_file_diagnostics( - "Failed to compile contract", + "Failed to compile program", errs, &context.file_manager, ) })? .0; - let optimized_contract = - nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let warnings = optimized_program.warnings.clone(); - let compile_output = generate_contract_artifact(optimized_contract); - Ok(JsCompileResult::new(compile_output)) - } else { - let compiled_program = compile_main(&mut context, crate_id, &compile_options, None) + Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) +} + +#[wasm_bindgen] +pub fn compile_contract( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + console_error_panic_hook::set_once(); + let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; + + let compile_options = CompileOptions::default(); + // For now we default to a bounded width of 3, though we can add it as a parameter + let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + + let compiled_contract = + noirc_driver::compile_contract(&mut context, crate_id, &compile_options) .map_err(|errs| { CompileError::with_file_diagnostics( - "Failed to compile program", + "Failed to compile contract", errs, &context.file_manager, ) })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); - let compile_output = generate_program_artifact(optimized_program); - Ok(JsCompileResult::new(compile_output)) - } + let functions = + optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { + noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), + name: optimized_contract.name, + functions, + events: optimized_contract.events, + file_map: optimized_contract.file_map, + }; + + Ok(JsCompileContractResult::new(contract_artifact, optimized_contract.warnings)) +} + +fn prepare_context( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result<(CrateId, Context<'static, 'static>), JsCompileError> { + let dependency_graph: DependencyGraph = if let Some(dependency_graph) = dependency_graph { + ::into_serde(&JsValue::from(dependency_graph)) + .map_err(|err| err.to_string())? + } else { + DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } + }; + + let fm = file_manager_with_source_map(file_source_map); + let parsed_files = parse_all(&fm); + let mut context = Context::new(fm, parsed_files); + + let path = Path::new(&entry_point); + let crate_id = prepare_crate(&mut context, path); + + process_dependency_graph(&mut context, dependency_graph); + + Ok((crate_id, context)) } // Create a new FileManager with the given source map @@ -270,25 +307,6 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { prepare_dependency(context, &path_to_lib) } -pub(crate) fn generate_program_artifact(program: CompiledProgram) -> CompileResult { - let warnings = program.warnings.clone(); - CompileResult::Program { program: program.into(), warnings } -} - -pub(crate) fn generate_contract_artifact(contract: CompiledContract) -> CompileResult { - let functions = contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); - - let contract_artifact = ContractArtifact { - noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), - name: contract.name, - functions, - events: contract.events, - file_map: contract.file_map, - }; - - CompileResult::Contract { contract: contract_artifact, warnings: contract.warnings } -} - #[cfg(test)] mod test { use noirc_driver::prepare_crate; diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index f8fbed4f47..2a5f7ab654 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -1,10 +1,12 @@ use crate::compile::{ - file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, parse_all, - JsCompileResult, PathToFileSourceMap, + file_manager_with_source_map, parse_all, JsCompileContractResult, JsCompileProgramResult, + PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use noirc_driver::{ add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -92,7 +94,7 @@ impl CompilerContext { pub fn compile_program( mut self, program_width: usize, - ) -> Result { + ) -> Result { let compile_options = CompileOptions::default(); let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; @@ -110,15 +112,15 @@ impl CompilerContext { .0; let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let warnings = optimized_program.warnings.clone(); - let compile_output = generate_program_artifact(optimized_program); - Ok(JsCompileResult::new(compile_output)) + Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) } pub fn compile_contract( mut self, program_width: usize, - ) -> Result { + ) -> Result { let compile_options = CompileOptions::default(); let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; let root_crate_id = *self.context.root_crate_id(); @@ -136,24 +138,64 @@ impl CompilerContext { let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); - let compile_output = generate_contract_artifact(optimized_contract); - Ok(JsCompileResult::new(compile_output)) + let functions = + optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { + noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), + name: optimized_contract.name, + functions, + events: optimized_contract.events, + file_map: optimized_contract.file_map, + }; + + Ok(JsCompileContractResult::new(contract_artifact, optimized_contract.warnings)) } } /// This is a method that exposes the same API as `compile` /// But uses the Context based APi internally #[wasm_bindgen] -pub fn compile_( +pub fn compile_program_( entry_point: String, - contracts: Option, dependency_graph: Option, file_source_map: PathToFileSourceMap, -) -> Result { - use std::collections::HashMap; +) -> Result { + console_error_panic_hook::set_once(); + + let compiler_context = + prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; + let program_width = 3; + compiler_context.compile_program(program_width) +} + +/// This is a method that exposes the same API as `compile` +/// But uses the Context based APi internally +#[wasm_bindgen] +pub fn compile_contract_( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { console_error_panic_hook::set_once(); + let compiler_context = + prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; + let program_width = 3; + + compiler_context.compile_contract(program_width) +} + +/// This is a method that exposes the same API as `prepare_context` +/// But uses the Context based API internally +fn prepare_compiler_context( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + use std::collections::HashMap; + let dependency_graph: crate::compile::DependencyGraph = if let Some(dependency_graph) = dependency_graph { ::into_serde( @@ -218,14 +260,7 @@ pub fn compile_( } } - let is_contract = contracts.unwrap_or(false); - let program_width = 3; - - if is_contract { - compiler_context.compile_contract(program_width) - } else { - compiler_context.compile_program(program_width) - } + Ok(compiler_context) } #[cfg(test)] diff --git a/compiler/wasm/src/index.cts b/compiler/wasm/src/index.cts index 7c707e662d..234bfa7280 100644 --- a/compiler/wasm/src/index.cts +++ b/compiler/wasm/src/index.cts @@ -2,7 +2,7 @@ import { FileManager } from './noir/file-manager/file-manager'; import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; import { LogData, LogFn } from './utils'; -import { CompilationResult } from './types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; /** @@ -17,36 +17,86 @@ import { inflateDebugSymbols } from './noir/debug'; * ```typescript * // Node.js * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager(myProjectPath); - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` * * ```typescript * // Browser * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager('/'); * for (const path of files) { * await fm.writeFile(path, await getFileAsStream(path)); * } - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` */ -async function compile( +async function compile_program( fileManager: FileManager, projectPath?: string, logFn?: LogFn, debugLogFn?: LogFn, -): Promise { +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_program(); +} + +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile_contract(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile_contract(fm); + * ``` + */ +async function compile_contract( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_contract(); +} + +async function setup_compiler( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { if (logFn && !debugLogFn) { debugLogFn = logFn; } const cjs = await require('../build/cjs'); - const compiler = await NoirWasmCompiler.new( + return await NoirWasmCompiler.new( fileManager, projectPath ?? fileManager.getDataDir(), cjs, @@ -72,9 +122,16 @@ async function compile( }, }, ); - return await compiler.compile(); } const createFileManager = createNodejsFileManager; -export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; +export { + compile_program as compile, + compile_program, + compile_contract, + createFileManager, + inflateDebugSymbols, + ProgramCompilationArtifacts, + ContractCompilationArtifacts, +}; diff --git a/compiler/wasm/src/index.mts b/compiler/wasm/src/index.mts index d4ed0beccf..326a733711 100644 --- a/compiler/wasm/src/index.mts +++ b/compiler/wasm/src/index.mts @@ -2,7 +2,7 @@ import { FileManager } from './noir/file-manager/file-manager'; import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; import { LogData, LogFn } from './utils'; -import { CompilationResult } from './types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; /** @@ -17,30 +17,80 @@ import { inflateDebugSymbols } from './noir/debug'; * ```typescript * // Node.js * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager(myProjectPath); - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` * * ```typescript * // Browser * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager('/'); * for (const path of files) { * await fm.writeFile(path, await getFileAsStream(path)); * } - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` */ -async function compile( +async function compile_program( fileManager: FileManager, projectPath?: string, logFn?: LogFn, debugLogFn?: LogFn, -): Promise { +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_program(); +} + +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile_contract(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile_contract(fm); + * ``` + */ +async function compile_contract( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_contract(); +} + +async function setup_compiler( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { if (logFn && !debugLogFn) { debugLogFn = logFn; } @@ -48,7 +98,7 @@ async function compile( const esm = await import(/* webpackMode: "eager" */ '../build/esm'); await esm.default(); - const compiler = await NoirWasmCompiler.new( + return await NoirWasmCompiler.new( fileManager, projectPath ?? fileManager.getDataDir(), esm, @@ -74,9 +124,16 @@ async function compile( }, }, ); - return await compiler.compile(); } const createFileManager = createNodejsFileManager; -export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; +export { + compile_program as compile, + compile_program, + compile_contract, + createFileManager, + inflateDebugSymbols, + ProgramCompilationArtifacts, + ContractCompilationArtifacts, +}; diff --git a/compiler/wasm/src/lib.rs b/compiler/wasm/src/lib.rs index 174d9b9ce9..6753faf200 100644 --- a/compiler/wasm/src/lib.rs +++ b/compiler/wasm/src/lib.rs @@ -18,10 +18,10 @@ mod compile; mod compile_new; mod errors; -pub use compile::compile; +pub use compile::{compile_contract, compile_program}; // Expose the new Context-Centric API -pub use compile_new::{compile_, CompilerContext, CrateIDWrapper}; +pub use compile_new::{compile_contract_, compile_program_, CompilerContext, CrateIDWrapper}; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; #[derive(Serialize, Deserialize)] diff --git a/compiler/wasm/src/noir/noir-wasm-compiler.ts b/compiler/wasm/src/noir/noir-wasm-compiler.ts index 2a0af5d8fe..1ec3af1fd6 100644 --- a/compiler/wasm/src/noir/noir-wasm-compiler.ts +++ b/compiler/wasm/src/noir/noir-wasm-compiler.ts @@ -6,7 +6,7 @@ import { LocalDependencyResolver } from './dependencies/local-dependency-resolve import { FileManager } from './file-manager/file-manager'; import { Package } from './package'; import { LogFn } from '../utils'; -import { CompilationResult } from '../types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from '../types/noir_artifact'; /** Compilation options */ export type NoirWasmCompileOptions = { @@ -84,21 +84,64 @@ export class NoirWasmCompiler { /** * Compile EntryPoint */ + public async compile_program(): Promise { + console.log(`Compiling at ${this.#package.getEntryPointPath()}`); + + if (this.#package.getType() !== 'bin') { + throw new Error(`Expected to find package type "bin" but found ${this.#package.getType()}`); + } + await this.#dependencyManager.resolveDependencies(); + this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); + + try { + const entrypoint = this.#package.getEntryPointPath(); + const deps = { + /* eslint-disable camelcase */ + root_dependencies: this.#dependencyManager.getEntrypointDependencies(), + library_dependencies: this.#dependencyManager.getLibraryDependencies(), + /* eslint-enable camelcase */ + }; + const packageSources = await this.#package.getSources(this.#fm); + const librarySources = ( + await Promise.all( + this.#dependencyManager + .getLibraries() + .map(async ([alias, library]) => await library.package.getSources(this.#fm, alias)), + ) + ).flat(); + [...packageSources, ...librarySources].forEach((sourceFile) => { + this.#debugLog(`Adding source ${sourceFile.path}`); + this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); + }); + const result = this.#wasmCompiler.compile_program(entrypoint, deps, this.#sourceMap); + + return result; + } catch (err) { + if (err instanceof Error && err.name === 'CompileError') { + const logs = await this.#processCompileError(err); + for (const log of logs) { + this.#log(log); + } + throw new Error(logs.join('\n')); + } + + throw err; + } + } + /** * Compile EntryPoint */ - public async compile(): Promise { + public async compile_contract(): Promise { console.log(`Compiling at ${this.#package.getEntryPointPath()}`); - if (!(this.#package.getType() === 'contract' || this.#package.getType() === 'bin')) { - throw new Error(`Only supports compiling "contract" and "bin" package types (${this.#package.getType()})`); + if (this.#package.getType() !== 'contract') { + throw new Error(`Expected to find package type "contract" but found ${this.#package.getType()}`); } await this.#dependencyManager.resolveDependencies(); this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); try { - const isContract: boolean = this.#package.getType() === 'contract'; - const entrypoint = this.#package.getEntryPointPath(); const deps = { /* eslint-disable camelcase */ @@ -118,11 +161,7 @@ export class NoirWasmCompiler { this.#debugLog(`Adding source ${sourceFile.path}`); this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); }); - const result = this.#wasmCompiler.compile(entrypoint, isContract, deps, this.#sourceMap); - - if ((isContract && !('contract' in result)) || (!isContract && !('program' in result))) { - throw new Error('Invalid compilation result'); - } + const result = this.#wasmCompiler.compile_contract(entrypoint, deps, this.#sourceMap); return result; } catch (err) { diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts index e636212a48..832a6ed9bf 100644 --- a/compiler/wasm/src/types/noir_artifact.ts +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -180,22 +180,3 @@ export interface ProgramCompilationArtifacts { /** Compilation warnings. */ warnings: Warning[]; } - -/** - * output of Noir Wasm compilation, can be for a contract or lib/binary - */ -export type CompilationResult = ContractCompilationArtifacts | ProgramCompilationArtifacts; - -/** - * Check if it has Contract unique property - */ -export function isContractCompilationArtifacts(artifact: CompilationResult): artifact is ContractCompilationArtifacts { - return (artifact as ContractCompilationArtifacts).contract !== undefined; -} - -/** - * Check if it has Contract unique property - */ -export function isProgramCompilationArtifacts(artifact: CompilationResult): artifact is ProgramCompilationArtifacts { - return (artifact as ProgramCompilationArtifacts).program !== undefined; -} diff --git a/compiler/wasm/test/compiler/browser/compile.test.ts b/compiler/wasm/test/compiler/browser/compile.test.ts index b7e6c27427..7d4b3da55a 100644 --- a/compiler/wasm/test/compiler/browser/compile.test.ts +++ b/compiler/wasm/test/compiler/browser/compile.test.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/ban-ts-comment */ import { getPaths } from '../../shared'; import { expect } from '@esm-bundle/chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { compile_program, compile_contract, createFileManager } from '@noir-lang/noir_wasm'; import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; @@ -33,7 +33,7 @@ describe('noir-compiler/browser', () => { await fm.writeFile(path, (await getFile(path)).body as ReadableStream); } const nargoArtifact = (await getPrecompiledSource(simpleScriptExpectedArtifact)) as ProgramArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/simple'); + const noirWasmArtifact = await compile_program(fm, '/fixtures/simple'); return { nargoArtifact, noirWasmArtifact }; }, @@ -51,7 +51,7 @@ describe('noir-compiler/browser', () => { await fm.writeFile(path, (await getFile(path)).body as ReadableStream); } const nargoArtifact = (await getPrecompiledSource(depsScriptExpectedArtifact)) as ProgramArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/with-deps'); + const noirWasmArtifact = await compile_program(fm, '/fixtures/with-deps'); return { nargoArtifact, noirWasmArtifact }; }, @@ -69,7 +69,7 @@ describe('noir-compiler/browser', () => { await fm.writeFile(path, (await getFile(path)).body as ReadableStream); } const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); + const noirWasmArtifact = await compile_contract(fm, '/fixtures/noir-contract'); return { nargoArtifact, noirWasmArtifact }; }, diff --git a/compiler/wasm/test/compiler/node/compile.test.ts b/compiler/wasm/test/compiler/node/compile.test.ts index 9af9819582..811dc95ce1 100644 --- a/compiler/wasm/test/compiler/node/compile.test.ts +++ b/compiler/wasm/test/compiler/node/compile.test.ts @@ -2,7 +2,7 @@ import { join, resolve } from 'path'; import { getPaths } from '../../shared'; import { expect } from 'chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { compile_program, compile_contract, createFileManager } from '@noir-lang/noir_wasm'; import { readFile } from 'fs/promises'; import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; @@ -15,7 +15,7 @@ describe('noir-compiler/node', () => { const fm = createFileManager(simpleScriptProjectPath); const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; - const noirWasmArtifact = await compile(fm); + const noirWasmArtifact = await compile_program(fm); return { nargoArtifact, noirWasmArtifact }; }, expect); @@ -24,7 +24,7 @@ describe('noir-compiler/node', () => { const fm = createFileManager(depsScriptProjectPath); const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; - const noirWasmArtifact = await compile(fm); + const noirWasmArtifact = await compile_program(fm); return { nargoArtifact, noirWasmArtifact }; }, expect); @@ -33,7 +33,7 @@ describe('noir-compiler/node', () => { const fm = createFileManager(contractProjectPath); const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; - const noirWasmArtifact = await compile(fm); + const noirWasmArtifact = await compile_contract(fm); return { nargoArtifact, noirWasmArtifact }; }, expect); }); diff --git a/compiler/wasm/test/compiler/shared/compile.test.ts b/compiler/wasm/test/compiler/shared/compile.test.ts index 88e8e8c8e5..52cef14968 100644 --- a/compiler/wasm/test/compiler/shared/compile.test.ts +++ b/compiler/wasm/test/compiler/shared/compile.test.ts @@ -1,4 +1,4 @@ -import { CompilationResult, inflateDebugSymbols } from '@noir-lang/noir_wasm'; +import { inflateDebugSymbols } from '@noir-lang/noir_wasm'; import { type expect as Expect } from 'chai'; import { ContractArtifact, @@ -11,7 +11,7 @@ import { } from '../../../src/types/noir_artifact'; export function shouldCompileProgramIdentically( - compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: CompilationResult }>, + compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: ProgramCompilationArtifacts }>, expect: typeof Expect, timeout = 5000, ) { @@ -24,7 +24,7 @@ export function shouldCompileProgramIdentically( normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmProgram = (noirWasmArtifact as unknown as ProgramCompilationArtifacts).program; + const noirWasmProgram = noirWasmArtifact.program; expect(noirWasmProgram).not.to.be.undefined; const [_noirWasmDebugInfos, norWasmFileMap] = deleteProgramDebugMetadata(noirWasmProgram); normalizeVersion(noirWasmProgram); @@ -47,7 +47,7 @@ export function shouldCompileProgramIdentically( } export function shouldCompileContractIdentically( - compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: CompilationResult }>, + compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: ContractCompilationArtifacts }>, expect: typeof Expect, timeout = 5000, ) { @@ -60,7 +60,7 @@ export function shouldCompileContractIdentically( normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmContract = (noirWasmArtifact as unknown as ContractCompilationArtifacts).contract; + const noirWasmContract = noirWasmArtifact.contract; expect(noirWasmContract).not.to.be.undefined; const [noirWasmDebugInfos, norWasmFileMap] = deleteContractDebugMetadata(noirWasmContract); normalizeVersion(noirWasmContract); diff --git a/compiler/wasm/test/wasm/browser/index.test.ts b/compiler/wasm/test/wasm/browser/index.test.ts index 3122fa5794..b59b4ae417 100644 --- a/compiler/wasm/test/wasm/browser/index.test.ts +++ b/compiler/wasm/test/wasm/browser/index.test.ts @@ -2,7 +2,7 @@ import { getPaths } from '../../shared'; import { expect } from '@esm-bundle/chai'; -import init, { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/esm'; +import init, { compile_program, PathToFileSourceMap, compile_program_, CompilerContext } from '../../../build/esm'; // @ts-ignore await init(); @@ -35,7 +35,7 @@ describe('noir wasm compilation', () => { it('matching nargos compilation', async () => { const sourceMap = new PathToFileSourceMap(); sourceMap.add_source_code('script/main.nr', await getFileAsString(simpleScriptSourcePath)); - const wasmCircuit = compile('script/main.nr', undefined, undefined, sourceMap); + const wasmCircuit = compile_program('script/main.nr', undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -58,9 +58,8 @@ describe('noir wasm compilation', () => { }); it('matching nargos compilation', async () => { - const wasmCircuit = compile( + const wasmCircuit = compile_program( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { @@ -132,9 +131,8 @@ describe('noir wasm compilation', () => { }).timeout(60 * 20e3); it('matching nargos compilation - context-implementation-compile-api', async () => { - const wasmCircuit = await compile_( + const wasmCircuit = await compile_program_( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { diff --git a/compiler/wasm/test/wasm/node/index.test.ts b/compiler/wasm/test/wasm/node/index.test.ts index c73ce7477e..23c87cc059 100644 --- a/compiler/wasm/test/wasm/node/index.test.ts +++ b/compiler/wasm/test/wasm/node/index.test.ts @@ -3,7 +3,7 @@ import { readFileSync } from 'fs'; import { join, resolve } from 'path'; import { expect } from 'chai'; -import { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/cjs'; +import { compile_program, PathToFileSourceMap, compile_program_, CompilerContext } from '../../../build/cjs'; const basePath = resolve(join(__dirname, '../../')); const { @@ -26,7 +26,7 @@ describe('noir wasm compilation', () => { it('matching nargos compilation', async () => { const sourceMap = new PathToFileSourceMap(); sourceMap.add_source_code(simpleScriptSourcePath, readFileSync(simpleScriptSourcePath, 'utf-8')); - const wasmCircuit = compile(simpleScriptSourcePath, undefined, undefined, sourceMap); + const wasmCircuit = compile_program(simpleScriptSourcePath, undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -49,9 +49,8 @@ describe('noir wasm compilation', () => { }); it('matching nargos compilation', async () => { - const wasmCircuit = compile( + const wasmCircuit = compile_program( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { @@ -123,9 +122,8 @@ describe('noir wasm compilation', () => { }).timeout(60 * 20e3); it('matching nargos compilation - context-implementation-compile-api', async () => { - const wasmCircuit = await compile_( + const wasmCircuit = await compile_program_( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { From 176fab42970ff0a9797b7f8c7ce53817e7d85b90 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:05:41 +0000 Subject: [PATCH 03/17] chore(ci): prevent msrv checks from blocking PRs (#4414) # Description ## Problem\* Resolves ## Summary\* Current the MSRV check CI runs on every PR so if one of our dependencies breaks us, all merges halt until we fix this. This is unnecessary as we only need to stop publishing releases and normal development work can continue. This PR switches this workflow to instead run only on master and on a nightly schedule. If the workflow fails then an issue will be raised. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- .github/ACVM_NOT_PUBLISHABLE.md | 13 +++++++ .../workflows/test-rust-workspace-msrv.yml | 37 +++++++++++++------ .github/workflows/test-rust-workspace.yml | 20 +++++----- cspell.json | 1 + 4 files changed, 49 insertions(+), 22 deletions(-) create mode 100644 .github/ACVM_NOT_PUBLISHABLE.md diff --git a/.github/ACVM_NOT_PUBLISHABLE.md b/.github/ACVM_NOT_PUBLISHABLE.md new file mode 100644 index 0000000000..e7eacb3b52 --- /dev/null +++ b/.github/ACVM_NOT_PUBLISHABLE.md @@ -0,0 +1,13 @@ +--- +title: "ACVM crates are not publishable" +assignees: TomAFrench kevaundray savio-sou +--- + + +The ACVM crates are currently unpublishable, making a release will NOT push our crates to crates.io. + +This is likely due to a crate we depend on bumping its MSRV above our own. Our lockfile is not taken into account when publishing to crates.io (as people downloading our crate don't use it) so we need to be able to use the most up to date versions of our dependencies (including transient dependencies) specified. + +Check the [MSRV check]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml index 061fc65ca8..0b2855fa83 100644 --- a/.github/workflows/test-rust-workspace-msrv.yml +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -6,8 +6,9 @@ name: Test (MSRV check) # We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. on: - pull_request: - merge_group: + schedule: + # Run a nightly check at 2 AM UTC + - cron: "0 2 * * *" push: branches: - master @@ -100,13 +101,25 @@ jobs: - run-tests steps: - - name: Report overall success - run: | - if [[ $FAIL == true ]]; then - exit 1 - else - exit 0 - fi - env: - # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. - FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + + # Raise an issue if the tests failed + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/.github/workflows/test-rust-workspace.yml b/.github/workflows/test-rust-workspace.yml index c12dcaba0b..22684de304 100644 --- a/.github/workflows/test-rust-workspace.yml +++ b/.github/workflows/test-rust-workspace.yml @@ -88,13 +88,13 @@ jobs: - run-tests steps: - - name: Report overall success - run: | - if [[ $FAIL == true ]]; then - exit 1 - else - exit 0 - fi - env: - # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. - FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/cspell.json b/cspell.json index be6b7c5c7e..23659b39c6 100644 --- a/cspell.json +++ b/cspell.json @@ -118,6 +118,7 @@ "monomorphizes", "monomorphizing", "montcurve", + "MSRV", "nand", "nargo", "neovim", From ab25b5ed3cd17e3f53c5cc873571fe4c08bad35d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:40:31 +0000 Subject: [PATCH 04/17] chore: remove duplicate `parse_all` function in wasm compiler (#4411) # Description ## Problem\* Resolves ## Summary\* This removes a function which exists in `nargo` from being defined again in `noir_wasm` ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/src/compile.rs | 20 +++++++++----------- compiler/wasm/src/compile_new.rs | 6 ++++-- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index ca6c8efedb..9e6fca1126 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -1,9 +1,12 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; -use nargo::artifacts::{ - contract::{ContractArtifact, ContractFunctionArtifact}, - program::ProgramArtifact, +use nargo::{ + artifacts::{ + contract::{ContractArtifact, ContractFunctionArtifact}, + program::ProgramArtifact, + }, + parse_all, }; use noirc_driver::{ add_dep, file_manager_with_stdlib, prepare_crate, prepare_dependency, CompileOptions, @@ -12,7 +15,7 @@ use noirc_driver::{ use noirc_evaluator::errors::SsaReport; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::{def_map::parse_file, Context, ParsedFiles}, + hir::Context, }; use serde::Deserialize; use std::{collections::HashMap, path::Path}; @@ -152,10 +155,6 @@ impl PathToFileSourceMap { } } -pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { - fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() -} - #[wasm_bindgen] pub fn compile_program( entry_point: String, @@ -309,14 +308,13 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { #[cfg(test)] mod test { + use nargo::parse_all; use noirc_driver::prepare_crate; use noirc_frontend::{graph::CrateName, hir::Context}; use crate::compile::PathToFileSourceMap; - use super::{ - file_manager_with_source_map, parse_all, process_dependency_graph, DependencyGraph, - }; + use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; use std::{collections::HashMap, path::Path}; fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static, 'static> { diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index 2a5f7ab654..d6b382f669 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -1,9 +1,10 @@ use crate::compile::{ - file_manager_with_source_map, parse_all, JsCompileContractResult, JsCompileProgramResult, + file_manager_with_source_map, JsCompileContractResult, JsCompileProgramResult, PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; +use nargo::parse_all; use noirc_driver::{ add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, @@ -265,10 +266,11 @@ fn prepare_compiler_context( #[cfg(test)] mod test { + use nargo::parse_all; use noirc_driver::prepare_crate; use noirc_frontend::hir::Context; - use crate::compile::{file_manager_with_source_map, parse_all, PathToFileSourceMap}; + use crate::compile::{file_manager_with_source_map, PathToFileSourceMap}; use std::path::Path; From cd796dea4937dd1a261f154e5f2e599bbc649165 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 15:22:01 +0000 Subject: [PATCH 05/17] fix: correct formatting for databus visibility types (#4423) # Description ## Problem\* Resolves ## Summary\* This PR fixes an issue uncovered by #4422 where we're not properly formatting databus visibility modifiers. I've fixed this and added a new test case for regressions. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/nargo_fmt/src/utils.rs | 14 +++++++++++++- tooling/nargo_fmt/src/visitor/item.rs | 14 ++++++++++---- tooling/nargo_fmt/tests/expected/databus.nr | 2 ++ tooling/nargo_fmt/tests/input/databus.nr | 2 ++ 4 files changed, 27 insertions(+), 5 deletions(-) create mode 100644 tooling/nargo_fmt/tests/expected/databus.nr create mode 100644 tooling/nargo_fmt/tests/input/databus.nr diff --git a/tooling/nargo_fmt/src/utils.rs b/tooling/nargo_fmt/src/utils.rs index 5874ebdebb..94969d45e8 100644 --- a/tooling/nargo_fmt/src/utils.rs +++ b/tooling/nargo_fmt/src/utils.rs @@ -1,3 +1,5 @@ +use std::borrow::Cow; + use crate::items::HasItem; use crate::rewrite; use crate::visitor::{FmtVisitor, Shape}; @@ -143,7 +145,7 @@ impl HasItem for Param { fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let pattern = visitor.slice(self.pattern.span()); let visibility = match self.visibility { - Visibility::Public => "pub ", + Visibility::Public => "pub", Visibility::Private => "", Visibility::DataBus => "call_data", }; @@ -152,6 +154,7 @@ impl HasItem for Param { pattern.to_string() } else { let ty = rewrite::typ(visitor, shape, self.typ); + let visibility = append_space_if_nonempty(visibility.into()); format!("{pattern}: {visibility}{ty}") } } @@ -183,6 +186,15 @@ pub(crate) fn last_line_contains_single_line_comment(s: &str) -> bool { s.lines().last().map_or(false, |line| line.contains("//")) } +pub(crate) fn append_space_if_nonempty(mut string: Cow) -> Cow { + if !string.is_empty() { + let inner = string.to_mut(); + inner.push(' '); + } + + string +} + pub(crate) fn last_line_used_width(s: &str, offset: usize) -> usize { if s.contains('\n') { last_line_width(s) diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index 1825a6e05b..28aad3c551 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -7,7 +7,10 @@ use noirc_frontend::{ use crate::{ rewrite::{self, UseTree}, - utils::{last_line_contains_single_line_comment, last_line_used_width, FindToken}, + utils::{ + append_space_if_nonempty, last_line_contains_single_line_comment, last_line_used_width, + FindToken, + }, visitor::expr::{format_seq, NewlineMode}, }; @@ -119,9 +122,12 @@ impl super::FmtVisitor<'_> { result.push_str("distinct "); } - if let Visibility::Public = func.def.return_visibility { - result.push_str("pub "); - } + let visibility = match func.def.return_visibility { + Visibility::Public => "pub", + Visibility::DataBus => "return_data", + Visibility::Private => "", + }; + result.push_str(&append_space_if_nonempty(visibility.into())); let typ = rewrite::typ(self, self.shape(), func.return_type()); result.push_str(&typ); diff --git a/tooling/nargo_fmt/tests/expected/databus.nr b/tooling/nargo_fmt/tests/expected/databus.nr new file mode 100644 index 0000000000..60934b60b2 --- /dev/null +++ b/tooling/nargo_fmt/tests/expected/databus.nr @@ -0,0 +1,2 @@ +fn main(x: pub u8, y: call_data u8) -> return_data u32 {} + diff --git a/tooling/nargo_fmt/tests/input/databus.nr b/tooling/nargo_fmt/tests/input/databus.nr new file mode 100644 index 0000000000..60934b60b2 --- /dev/null +++ b/tooling/nargo_fmt/tests/input/databus.nr @@ -0,0 +1,2 @@ +fn main(x: pub u8, y: call_data u8) -> return_data u32 {} + From 15c5618c6d15af527287d21ac74eb07cd2b98c14 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 17:06:54 +0000 Subject: [PATCH 06/17] chore(ci): enforce formatting of noir code in CI (#4422) # Description ## Problem\* Resolves ## Summary\* We currently format everything in the repository except our noir source code. If we enforce this internally then we'll uncover issues in the formatter earlier and provide a good example of what Noir source should look like. We then now run `nargo fmt --check` on the stdlib and `test_programs/execution_success` ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/formatting.yml | 65 +++++++++ noir_stdlib/src/array.nr | 23 ++-- noir_stdlib/src/bigint.nr | 16 +-- noir_stdlib/src/collections/bounded_vec.nr | 2 +- noir_stdlib/src/collections/vec.nr | 8 +- noir_stdlib/src/ec/montcurve.nr | 89 ++++++------ noir_stdlib/src/ec/swcurve.nr | 127 +++++++++--------- noir_stdlib/src/ec/tecurve.nr | 122 ++++++++--------- noir_stdlib/src/ecdsa_secp256k1.nr | 2 +- noir_stdlib/src/ecdsa_secp256r1.nr | 2 +- noir_stdlib/src/field.nr | 11 +- noir_stdlib/src/option.nr | 24 +--- noir_stdlib/src/scalar_mul.nr | 2 +- noir_stdlib/src/schnorr.nr | 2 +- noir_stdlib/src/slice.nr | 12 +- noir_stdlib/src/string.nr | 2 +- noir_stdlib/src/test.nr | 4 +- noir_stdlib/src/uint128.nr | 65 ++++----- test_programs/.gitignore | 3 +- .../closure_explicit_types/src/main.nr | 4 +- .../conditional_regression_579/src/main.nr | 4 +- .../reexports/src/main.nr | 4 +- .../specialization/src/main.nr | 8 +- .../1327_concrete_in_generic/src/main.nr | 28 ++-- .../array_dynamic/src/main.nr | 4 +- .../array_dynamic_blackbox_input/src/main.nr | 2 +- .../array_dynamic_main_output/src/main.nr | 2 +- .../assert_statement_recursive/src/main.nr | 2 +- .../execution_success/bigint/src/main.nr | 10 +- .../execution_success/brillig_cow/src/main.nr | 27 ++-- .../brillig_cow_regression/src/main.nr | 74 +++++----- .../brillig_fns_as_values/src/main.nr | 2 +- .../conditional_regression_661/src/main.nr | 4 +- .../execution_success/databus/src/main.nr | 8 +- .../execution_success/debug_logs/src/main.nr | 6 +- .../distinct_keyword/src/main.nr | 2 +- .../ecdsa_secp256k1/src/main.nr | 10 +- .../ecdsa_secp256r1/src/main.nr | 2 +- .../main_bool_arg/src/main.nr | 2 +- .../operator_overloading/src/main.nr | 6 +- .../regression_3394/src/main.nr | 2 +- .../regression_3607/src/main.nr | 2 +- .../regression_3889/src/main.nr | 1 - .../side_effects_constrain_array/src/main.nr | 4 +- .../execution_success/struct/src/main.nr | 4 +- test_programs/format.sh | 47 +++++++ 46 files changed, 455 insertions(+), 397 deletions(-) create mode 100755 test_programs/format.sh diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 43fd6daa91..279e90f5f6 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -63,3 +63,68 @@ jobs: - name: Run `yarn lint` run: yarn lint + + build-nargo: + runs-on: ubuntu-22.04 + timeout-minutes: 30 + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.73.0 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + nargo_fmt: + needs: [build-nargo] + name: Nargo fmt + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Format stdlib + working-directory: ./noir_stdlib + run: nargo fmt --check + + - name: Format test suite + working-directory: ./test_programs + run: ./format.sh check diff --git a/noir_stdlib/src/array.nr b/noir_stdlib/src/array.nr index 7871b1a6f9..3da4b64917 100644 --- a/noir_stdlib/src/array.nr +++ b/noir_stdlib/src/array.nr @@ -17,14 +17,14 @@ impl [T; N] { for i in 0..N { let pos = find_index(sorted_index, i); assert(sorted_index[pos] == i); - } + } // Sort the array using the indexes - for i in 0..N { + for i in 0..N { result[i] = self[sorted_index[i]]; - } + } // Ensure the array is sorted - for i in 0..N-1 { - assert(ordering(result[i], result[i+1])); + for i in 0..N - 1 { + assert(ordering(result[i], result[i + 1])); } result @@ -32,12 +32,12 @@ impl [T; N] { /// Returns the index of the elements in the array that would sort it, using the provided custom sorting function. unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [u64; N] { - let mut result = [0;N]; + let mut result = [0; N]; let mut a = self; for i in 0..N { result[i] = i; } - for i in 1 .. N { + for i in 1..N { for j in 0..i { if ordering(a[i], a[j]) { let old_a_j = a[j]; @@ -45,14 +45,13 @@ impl [T; N] { a[i] = old_a_j; let old_j = result[j]; result[j] = result[i]; - result[i] = old_j; + result[i] = old_j; } } } result } - // Converts an array into a slice. pub fn as_slice(self) -> [T] { let mut slice = []; @@ -68,7 +67,7 @@ impl [T; N] { let first_elem = f(self[0]); let mut ret = [first_elem; N]; - for i in 1 .. self.len() { + for i in 1..self.len() { ret[i] = f(self[i]); } @@ -90,7 +89,7 @@ impl [T; N] { // element of the given array as its starting accumulator value. pub fn reduce(self, f: fn[Env](T, T) -> T) -> T { let mut accumulator = self[0]; - for i in 1 .. self.len() { + for i in 1..self.len() { accumulator = f(accumulator, self[i]); } accumulator @@ -122,7 +121,7 @@ unconstrained fn find_index(a: [u64; N], find: u64) -> u64 { for i in 0..a.len() { if a[i] == find { result = i; - } + } } result } diff --git a/noir_stdlib/src/bigint.nr b/noir_stdlib/src/bigint.nr index 1102665120..66e81f0581 100644 --- a/noir_stdlib/src/bigint.nr +++ b/noir_stdlib/src/bigint.nr @@ -1,5 +1,4 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem,}; - +use crate::ops::{Add, Sub, Mul, Div, Rem}; global bn254_fq = [0x47, 0xFD, 0x7C, 0xD8, 0x16, 0x8C, 0x20, 0x3C, 0x8d, 0xca, 0x71, 0x68, 0x91, 0x6a, 0x81, 0x97, 0x5d, 0x58, 0x81, 0x81, 0xb6, 0x45, 0x50, 0xb8, 0x29, 0xa0, 0x31, 0xe1, 0x72, 0x4e, 0x64, 0x30]; @@ -13,7 +12,6 @@ global secpr1_fq = [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF]; global secpr1_fr = [0x51, 0x25, 0x63, 0xFC, 0xC2, 0xCA, 0xB9, 0xF3, 0x84, 0x9E, 0x17, 0xA7, 0xAD, 0xFA, 0xE6, 0xBC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,0xFF, 0xFF, 0xFF, 0xFF]; - struct BigInt { pointer: u32, @@ -22,17 +20,13 @@ struct BigInt { impl BigInt { #[builtin(bigint_add)] - fn bigint_add(self, other: BigInt) -> BigInt { - } + fn bigint_add(self, other: BigInt) -> BigInt {} #[builtin(bigint_sub)] - fn bigint_sub(self, other: BigInt) -> BigInt { - } + fn bigint_sub(self, other: BigInt) -> BigInt {} #[builtin(bigint_mul)] - fn bigint_mul(self, other: BigInt) -> BigInt { - } + fn bigint_mul(self, other: BigInt) -> BigInt {} #[builtin(bigint_div)] - fn bigint_div(self, other: BigInt) -> BigInt { - } + fn bigint_div(self, other: BigInt) -> BigInt {} #[builtin(bigint_from_le_bytes)] fn from_le_bytes(bytes: [u8], modulus: [u8]) -> BigInt {} #[builtin(bigint_to_le_bytes)] diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index a4aa4823f3..f78d86de77 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -29,7 +29,7 @@ impl BoundedVec { self.len } - pub fn max_len(_self: BoundedVec) -> u64{ + pub fn max_len(_self: BoundedVec) -> u64 { MaxLen } diff --git a/noir_stdlib/src/collections/vec.nr b/noir_stdlib/src/collections/vec.nr index 2e7945be82..deec98185f 100644 --- a/noir_stdlib/src/collections/vec.nr +++ b/noir_stdlib/src/collections/vec.nr @@ -19,12 +19,12 @@ impl Vec { /// points beyond the end of the vector. pub fn get(self, index: u64) -> T { self.slice[index] - } + } /// Push a new element to the end of the vector, returning a /// new vector with a length one greater than the /// original unmodified vector. - pub fn push(&mut self, elem: T) { + pub fn push(&mut self, elem: T) { self.slice = self.slice.push_back(elem); } @@ -32,7 +32,7 @@ impl Vec { /// a new vector with a length of one less than the given vector, /// as well as the popped element. /// Panics if the given vector's length is zero. - pub fn pop(&mut self) -> T { + pub fn pop(&mut self) -> T { let (popped_slice, last_elem) = self.slice.pop_back(); self.slice = popped_slice; last_elem @@ -42,7 +42,7 @@ impl Vec { /// after it to the right pub fn insert(&mut self, index: u64, elem: T) { self.slice = self.slice.insert(index, elem); - } + } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the removed element diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index 83a17bae32..7dc756781c 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -31,7 +31,7 @@ mod affine { impl Point { // Point constructor pub fn new(x: Field, y: Field) -> Self { - Self {x, y, infty: false} + Self { x, y, infty: false } } // Check if zero @@ -45,30 +45,30 @@ mod affine { curvegroup::Point::zero() } else { let (x,y) = (self.x, self.y); - curvegroup::Point::new(x,y,1) + curvegroup::Point::new(x, y, 1) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, infty: true} + Self { x: 0, y: 0, infty: true } } // Negation fn negate(self) -> Self { let Self {x, y, infty} = self; - Self {x, y: 0-y, infty} + Self { x, y: 0 - y, infty } } // Map into equivalent Twisted Edwards curve fn into_tecurve(self) -> TEPoint { let Self {x, y, infty} = self; - - if infty | (y*(x+1) == 0) { + + if infty | (y * (x + 1) == 0) { TEPoint::zero() } else { - TEPoint::new(x/y, (x-1)/(x+1)) + TEPoint::new(x / y, (x - 1) / (x + 1)) } } } @@ -84,9 +84,9 @@ mod affine { pub fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients assert(k != 0); - assert(j*j != 4); + assert(j * j != 4); - let curve = Self {j, k, gen}; + let curve = Self { j, k, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -103,8 +103,8 @@ mod affine { pub fn contains(self, p: Point) -> bool { let Self {j, k, gen: _gen} = self; let Point {x, y, infty: infty} = p; - - infty | (k*y*y == x*(x*x + j*x + 1)) + + infty | (k * y * y == x * (x * x + j * x + 1)) } // Point addition @@ -122,7 +122,7 @@ mod affine { fn mul(self, n: Field, p: Point) -> Point { self.into_tecurve().mul(n, p.into_tecurve()).into_montcurve() } - + // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); @@ -142,15 +142,15 @@ mod affine { // Conversion to equivalent Twisted Edwards curve fn into_tecurve(self) -> TECurve { let Self {j, k, gen} = self; - TECurve::new((j+2)/k, (j-2)/k, gen.into_tecurve()) + TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } // Conversion to equivalent Short Weierstraß curve pub fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; - let a0 = (3-j*j)/(3*k*k); - let b0 = (2*j*j*j - 9*j)/(27*k*k*k); + let a0 = (3 - j * j) / (3 * k * k); + let b0 = (2 * j * j * j - 9 * j) / (27 * k * k * k); SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } @@ -160,8 +160,7 @@ mod affine { if p.is_zero() { SWPoint::zero() } else { - SWPoint::new((3*p.x + self.j)/(3*self.k), - p.y/self.k) + SWPoint::new((3 * p.x + self.j) / (3 * self.k), p.y / self.k) } } @@ -170,8 +169,8 @@ mod affine { let SWPoint {x, y, infty} = p; let j = self.j; let k = self.k; - - Point {x: (3*k*x - j)/3, y: y*k, infty} + + Point { x: (3 * k * x - j) / 3, y: y * k, infty } } // Elligator 2 map-to-curve method; see . @@ -179,18 +178,18 @@ mod affine { let j = self.j; let k = self.k; let z = ZETA; // Non-square Field element required for map - + // Check whether curve is admissible assert(j != 0); - let l = (j*j - 4)/(k*k); + let l = (j * j - 4) / (k * k); assert(l != 0); assert(is_square(l) == false); - let x1 = safe_inverse(1+z*u*u)*(0 - (j/k)); - - let gx1 = x1*x1*x1 + (j/k)*x1*x1 + x1/(k*k); - let x2 = 0 - x1 - (j/k); - let gx2 = x2*x2*x2 + (j/k)*x2*x2 + x2/(k*k); + let x1 = safe_inverse(1 + z * u * u) * (0 - (j / k)); + + let gx1 = x1 * x1 * x1 + (j / k) * x1 * x1 + x1 / (k * k); + let x2 = 0 - x1 - (j / k); + let gx2 = x2 * x2 * x2 + (j / k) * x2 * x2 + x2 / (k * k); let x = if is_square(gx1) { x1 } else { x2 }; @@ -202,13 +201,12 @@ mod affine { if y0.sgn0() == 0 { y0 } else { 0 - y0 } }; - Point::new(x*k, y*k) - + Point::new(x * k, y * k) } // SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.map_from_swcurve(self.into_swcurve().swu_map(z,u)) + self.map_from_swcurve(self.into_swcurve().swu_map(z, u)) } } } @@ -240,7 +238,7 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, z: Field) -> Self { - Self {x, y, z} + Self { x, y, z } } // Check if zero @@ -254,20 +252,20 @@ mod curvegroup { affine::Point::zero() } else { let (x,y,z) = (self.x, self.y, self.z); - affine::Point::new(x/z, y/z) + affine::Point::new(x / z, y / z) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 1,z: 0} + Self { x: 0, y: 1, z: 0 } } // Negation fn negate(self) -> Self { let Self {x, y, z} = self; - Point::new(x, 0-y, z) + Point::new(x, 0 - y, z) } // Map into equivalent Twisted Edwards curve @@ -287,9 +285,9 @@ mod curvegroup { pub fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients assert(k != 0); - assert(j*j != 4); + assert(j * j != 4); - let curve = Self {j, k, gen}; + let curve = Self { j, k, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -306,8 +304,8 @@ mod curvegroup { pub fn contains(self, p: Point) -> bool { let Self {j, k, gen: _gen} = self; let Point {x, y, z} = p; - - k*y*y*z == x*(x*x + j*x*z + z*z) + + k * y * y * z == x * (x * x + j * x * z + z * z) } // Point addition @@ -320,12 +318,12 @@ mod curvegroup { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { self.into_tecurve().mul(n, p.into_tecurve()).into_montcurve() } - + // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); @@ -345,18 +343,17 @@ mod curvegroup { // Conversion to equivalent Twisted Edwards curve fn into_tecurve(self) -> TECurve { let Self {j, k, gen} = self; - TECurve::new((j+2)/k, (j-2)/k, gen.into_tecurve()) + TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } // Conversion to equivalent Short Weierstraß curve fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; - let a0 = (3-j*j)/(3*k*k); - let b0 = (2*j*j*j - 9*j)/(27*k*k*k); + let a0 = (3 - j * j) / (3 * k * k); + let b0 = (2 * j * j * j - 9 * j) / (27 * k * k * k); - SWCurve::new(a0, b0, - self.map_into_swcurve(self.gen)) + SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } // Point mapping into equivalent Short Weierstraß curve @@ -373,10 +370,10 @@ mod curvegroup { fn elligator2_map(self, u: Field) -> Point { self.into_affine().elligator2_map(u).into_group() } - + // SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_affine().swu_map(z,u).into_group() + self.into_affine().swu_map(z, u).into_group() } } } diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index e64f5a7be0..9dd324f308 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -27,14 +27,14 @@ mod affine { impl Point { // Point constructor pub fn new(x: Field, y: Field) -> Self { - Self {x, y, infty: false} + Self { x, y, infty: false } } // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) } - + // Conversion to CurveGroup coordinates fn into_group(self) -> curvegroup::Point { let Self {x, y, infty} = self; @@ -45,16 +45,16 @@ mod affine { curvegroup::Point::new(x, y, 1) } } - + // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, infty: true} + Self { x: 0, y: 0, infty: true } } - + // Negation fn negate(self) -> Self { let Self {x, y, infty} = self; - Self {x, y: 0-y, infty} + Self { x, y: 0 - y, infty } } } @@ -72,8 +72,8 @@ mod affine { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - assert(4*a*a*a + 27*b*b != 0); - + assert(4 * a * a * a + 27 * b * b != 0); + let curve = Curve { a, b, gen }; // gen should be on the curve @@ -85,16 +85,16 @@ mod affine { // Conversion to CurveGroup coordinates fn into_group(self) -> curvegroup::Curve { let Curve{a, b, gen} = self; - - curvegroup::Curve {a, b, gen: gen.into_group()} + + curvegroup::Curve { a, b, gen: gen.into_group() } } // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y, infty} = p; - infty | (y*y == x*x*x + self.a*x + self.b) + infty | (y * y == x * x * x + self.a * x + self.b) } - + // Point addition, implemented in terms of mixed addition for reasons of efficiency pub fn add(self, p1: Point, p2: Point) -> Point { self.mixed_add(p1, p2.into_group()).into_affine() @@ -109,9 +109,9 @@ mod affine { } else { let Point {x: x1, y: y1, infty: _inf} = p1; let curvegroup::Point {x: x2, y: y2, z: z2} = p2; - let you1 = x1*z2*z2; + let you1 = x1 * z2 * z2; let you2 = x2; - let s1 = y1*z2*z2*z2; + let s1 = y1 * z2 * z2 * z2; let s2 = y2; if you1 == you2 { @@ -120,15 +120,14 @@ mod affine { } else { self.into_group().double(p2) } - } else - { + } else { let h = you2 - you1; let r = s2 - s1; - let x3 = r*r - h*h*h - 2*you1*h*h; - let y3 = r*(you1*h*h - x3) - s1*h*h*h; - let z3 = h*z2; + let x3 = r * r - h * h * h - 2 * you1 * h * h; + let y3 = r * (you1 * h * h - x3) - s1 * h * h * h; + let z3 = h * z2; - curvegroup::Point::new(x3,y3,z3) + curvegroup::Point::new(x3, y3, z3) } } } @@ -138,7 +137,7 @@ mod affine { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { self.into_group().mul(n, p.into_group()).into_affine() @@ -165,17 +164,25 @@ mod affine { // where g(x) = x^3 + a*x + b. swu_map(c,z,.) then maps a Field element to a point on curve c. fn swu_map(self, z: Field, u: Field) -> Point { // Check whether curve is admissible - assert(self.a*self.b != 0); - + assert(self.a * self.b != 0); + let Curve {a, b, gen: _gen} = self; - - let tv1 = safe_inverse(z*z*u*u*u*u + u*u*z); - let x1 = if tv1 == 0 {b/(z*a)} else {(0-b/a)*(1 + tv1)}; - let gx1 = x1*x1*x1 + a*x1 + b; - let x2 = z*u*u*x1; - let gx2 = x2*x2*x2 + a*x2 + b; - let (x,y) = if is_square(gx1) {(x1, sqrt(gx1))} else {(x2, sqrt(gx2))}; - Point::new(x, if u.sgn0() != y.sgn0() {0-y} else {y}) + + let tv1 = safe_inverse(z * z * u * u * u * u + u * u * z); + let x1 = if tv1 == 0 { + b / (z * a) + } else { + (0 - b / a) * (1 + tv1) + }; + let gx1 = x1 * x1 * x1 + a * x1 + b; + let x2 = z * u * u * x1; + let gx2 = x2 * x2 * x2 + a * x2 + b; + let (x,y) = if is_square(gx1) { + (x1, sqrt(gx1)) + } else { + (x2, sqrt(gx2)) + }; + Point::new(x, if u.sgn0() != y.sgn0() { 0 - y } else { y }) } } } @@ -205,14 +212,14 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, z: Field) -> Self { - Self {x, y, z} + Self { x, y, z } } // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) } - + // Conversion to affine coordinates pub fn into_affine(self) -> affine::Point { let Self {x, y, z} = self; @@ -220,20 +227,19 @@ mod curvegroup { if z == 0 { affine::Point::zero() } else { - affine::Point::new(x/(z*z), y/(z*z*z)) + affine::Point::new(x / (z * z), y / (z * z * z)) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, z: 0} + Self { x: 0, y: 0, z: 0 } } - - + // Negation fn negate(self) -> Self { let Self {x, y, z} = self; - Self {x, y: 0-y, z} + Self { x, y: 0 - y, z } } } @@ -250,8 +256,8 @@ mod curvegroup { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - assert(4*a*a*a + 27*b*b != 0); - + assert(4 * a * a * a + 27 * b * b != 0); + let curve = Curve { a, b, gen }; // gen should be on the curve @@ -264,7 +270,7 @@ mod curvegroup { pub fn into_affine(self) -> affine::Curve { let Curve{a, b, gen} = self; - affine::Curve {a, b, gen: gen.into_affine()} + affine::Curve { a, b, gen: gen.into_affine() } } // Membership check @@ -273,13 +279,12 @@ mod curvegroup { if z == 0 { true } else { - y*y == x*x*x + self.a*x*z*z*z*z + self.b*z*z*z*z*z*z + y * y == x * x * x + self.a * x * z * z * z * z + self.b * z * z * z * z * z * z } } - + // Addition pub fn add(self, p1: Point, p2: Point) -> Point { - if p1.is_zero() { p2 } else if p2.is_zero() { @@ -287,10 +292,10 @@ mod curvegroup { } else { let Point {x: x1, y: y1, z: z1} = p1; let Point {x: x2, y: y2, z: z2} = p2; - let you1 = x1*z2*z2; - let you2 = x2*z1*z1; - let s1 = y1*z2*z2*z2; - let s2 = y2*z1*z1*z1; + let you1 = x1 * z2 * z2; + let you2 = x2 * z1 * z1; + let s1 = y1 * z2 * z2 * z2; + let s2 = y2 * z1 * z1 * z1; if you1 == you2 { if s1 != s2 { @@ -301,11 +306,11 @@ mod curvegroup { } else { let h = you2 - you1; let r = s2 - s1; - let x3 = r*r - h*h*h - 2*you1*h*h; - let y3 = r*(you1*h*h - x3) - s1*h*h*h; - let z3 = h*z1*z2; + let x3 = r * r - h * h * h - 2 * you1 * h * h; + let y3 = r * (you1 * h * h - x3) - s1 * h * h * h; + let z3 = h * z1 * z2; - Point::new(x3,y3,z3) + Point::new(x3, y3, z3) } } } @@ -313,19 +318,19 @@ mod curvegroup { // Point doubling pub fn double(self, p: Point) -> Point { let Point {x, y, z} = p; - + if p.is_zero() { p } else if y == 0 { Point::zero() } else { - let s = 4*x*y*y; - let m = 3*x*x + self.a*z*z*z*z; - let x0 = m*m - 2*s; - let y0 = m*(s-x0) - 8*y*y*y*y; - let z0 = 2*y*z; + let s = 4 * x * y * y; + let m = 3 * x * x + self.a * z * z * z * z; + let x0 = m * m - 2 * s; + let y0 = m * (s - x0) - 8 * y * y * y * y; + let z0 = 2 * y * z; - Point::new(x0,y0,z0) + Point::new(x0, y0, z0) } } @@ -351,7 +356,7 @@ mod curvegroup { let mut n_as_bits: [u1; 254] = [0; 254]; let tmp = n.to_le_bits(N_BITS as u32); for i in 0..254 { - n_as_bits[i] = tmp[i]; + n_as_bits[i] = tmp[i]; } self.bit_mul(n_as_bits, p) @@ -375,7 +380,7 @@ mod curvegroup { // Simplified SWU map-to-curve method fn swu_map(self, z: Field, u: Field) -> Point { - self.into_affine().swu_map(z,u).into_group() + self.into_affine().swu_map(z, u).into_group() } } } diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 5333ece4c4..506fe89313 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -40,18 +40,18 @@ mod affine { fn into_group(self) -> curvegroup::Point { let Self {x, y} = self; - curvegroup::Point::new(x, y, x*y, 1) + curvegroup::Point::new(x, y, x * y, 1) } // Additive identity pub fn zero() -> Self { - Point::new(0,1) + Point::new(0, 1) } // Negation fn negate(self) -> Self { let Self {x, y} = self; - Point::new(0-x, y) + Point::new(0 - x, y) } // Map into prime-order subgroup of equivalent Montgomery curve @@ -60,10 +60,10 @@ mod affine { MPoint::zero() } else { let Self {x, y} = self; - let x0 = (1+y)/(1-y); - let y0 = (1+y)/(x*(1-y)); + let x0 = (1 + y) / (1 - y); + let y0 = (1 + y) / (x * (1 - y)); - MPoint::new(x0,y0) + MPoint::new(x0, y0) } } } @@ -81,9 +81,9 @@ mod affine { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - assert(a*d*(a-d) != 0); - - let curve = Curve {a, d, gen}; + assert(a * d * (a - d) != 0); + + let curve = Curve { a, d, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -95,15 +95,15 @@ mod affine { fn into_group(self) -> curvegroup::Curve { let Curve{a, d, gen} = self; - curvegroup::Curve {a, d, gen: gen.into_group()} + curvegroup::Curve { a, d, gen: gen.into_group() } } - + // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y} = p; - self.a*x*x + y*y == 1 + self.d*x*x*y*y + self.a * x * x + y * y == 1 + self.d * x * x * y * y } - + // Point addition, implemented in terms of mixed addition for reasons of efficiency pub fn add(self, p1: Point, p2: Point) -> Point { self.mixed_add(p1, p2.into_group()).into_affine() @@ -114,20 +114,20 @@ mod affine { let Point{x: x1, y: y1} = p1; let curvegroup::Point{x: x2, y: y2, t: t2, z: z2} = p2; - let a = x1*x2; - let b = y1*y2; - let c = self.d*x1*y1*t2; - let e = (x1 + y1)*(x2 + y2) - a - b; + let a = x1 * x2; + let b = y1 * y2; + let c = self.d * x1 * y1 * t2; + let e = (x1 + y1) * (x2 + y2) - a - b; let f = z2 - c; let g = z2 + c; - let h = b - self.a*a; + let h = b - self.a * a; - let x = e*f; - let y = g*h; - let t = e*h; - let z = f*g; + let x = e * f; + let y = g * h; + let t = e * h; + let z = f * g; - curvegroup::Point::new(x,y,t,z) + curvegroup::Point::new(x, y, t, z) } // Scalar multiplication with scalar represented by a bit array (little-endian convention). @@ -135,7 +135,7 @@ mod affine { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } - + // Scalar multiplication (p + ... + p n times) fn mul(self, n: Field, p: Point) -> Point { self.into_group().mul(n, p.into_group()).into_affine() @@ -159,10 +159,10 @@ mod affine { // Conversion to equivalent Montgomery curve pub fn into_montcurve(self) -> MCurve { - let j = 2*(self.a + self.d)/(self.a - self.d); - let k = 4/(self.a - self.d); + let j = 2 * (self.a + self.d) / (self.a - self.d); + let k = 4 / (self.a - self.d); let gen_montcurve = self.gen.into_montcurve(); - + MCurve::new(j, k, gen_montcurve) } @@ -188,7 +188,7 @@ mod affine { // Simplified SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_montcurve().swu_map(z,u).into_tecurve() + self.into_montcurve().swu_map(z, u).into_tecurve() } } } @@ -222,7 +222,7 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, t: Field, z: Field) -> Self { - Self {x, y, t, z} + Self { x, y, t, z } } // Check if zero @@ -235,19 +235,19 @@ mod curvegroup { pub fn into_affine(self) -> affine::Point { let Self {x, y, t: _t, z} = self; - affine::Point::new(x/z, y/z) + affine::Point::new(x / z, y / z) } // Additive identity pub fn zero() -> Self { - Point::new(0,1,0,1) + Point::new(0, 1, 0, 1) } // Negation fn negate(self) -> Self { let Self {x, y, t, z} = self; - Point::new(0-x, y, 0-t, z) + Point::new(0 - x, y, 0 - t, z) } // Map into prime-order subgroup of equivalent Montgomery curve @@ -269,8 +269,8 @@ mod curvegroup { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - assert(a*d*(a-d) != 0); - + assert(a * d * (a - d) != 0); + let curve = Curve { a, d, gen }; // gen should be on the curve @@ -283,14 +283,16 @@ mod curvegroup { pub fn into_affine(self) -> affine::Curve { let Curve{a, d, gen} = self; - affine::Curve {a, d, gen: gen.into_affine()} + affine::Curve { a, d, gen: gen.into_affine() } } // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y, t, z} = p; - (z != 0) & (z*t == x*y) & (z*z*(self.a*x*x + y*y) == z*z*z*z + self.d*x*x*y*y) + (z != 0) + & (z * t == x * y) + & (z * z * (self.a * x * x + y * y) == z * z * z * z + self.d * x * x * y * y) } // Point addition @@ -298,40 +300,40 @@ mod curvegroup { let Point{x: x1, y: y1, t: t1, z: z1} = p1; let Point{x: x2, y: y2, t: t2, z: z2} = p2; - let a = x1*x2; - let b = y1*y2; - let c = self.d*t1*t2; - let d = z1*z2; - let e = (x1 + y1)*(x2 + y2) - a - b; + let a = x1 * x2; + let b = y1 * y2; + let c = self.d * t1 * t2; + let d = z1 * z2; + let e = (x1 + y1) * (x2 + y2) - a - b; let f = d - c; let g = d + c; - let h = b - self.a*a; + let h = b - self.a * a; - let x = e*f; - let y = g*h; - let t = e*h; - let z = f*g; + let x = e * f; + let y = g * h; + let t = e * h; + let z = f * g; - Point::new(x,y,t,z) + Point::new(x, y, t, z) } // Point doubling, cf. §3.3 pub fn double(self, p: Point) -> Point { let Point{x, y, t: _t, z} = p; - let a = x*x; - let b = y*y; - let c = 2*z*z; - let d = self.a*a; - let e = (x + y)*(x + y) - a - b; + let a = x * x; + let b = y * y; + let c = 2 * z * z; + let d = self.a * a; + let e = (x + y) * (x + y) - a - b; let g = d + b; let f = g - c; let h = d - b; - let x0 = e*f; - let y0 = g*h; - let t0 = e*h; - let z0 = f*g; + let x0 = e * f; + let y0 = g * h; + let t0 = e * h; + let z0 = f * g; Point::new(x0, y0, t0, z0) } @@ -340,7 +342,7 @@ mod curvegroup { // If k is the natural number represented by `bits`, then this computes p + ... + p k times. fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); - + for i in 0..N { out = self.add( self.add(out, out), @@ -349,7 +351,7 @@ mod curvegroup { out } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { let N_BITS = crate::field::modulus_num_bits(); @@ -358,7 +360,7 @@ mod curvegroup { let mut n_as_bits: [u1; 254] = [0; 254]; let tmp = n.to_le_bits(N_BITS as u32); for i in 0..254 { - n_as_bits[i] = tmp[i]; + n_as_bits[i] = tmp[i]; } self.bit_mul(n_as_bits, p) @@ -407,7 +409,7 @@ mod curvegroup { // Simplified SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_montcurve().swu_map(z,u).into_tecurve() + self.into_montcurve().swu_map(z, u).into_tecurve() } } } diff --git a/noir_stdlib/src/ecdsa_secp256k1.nr b/noir_stdlib/src/ecdsa_secp256k1.nr index e8d9af2230..b72a1acd04 100644 --- a/noir_stdlib/src/ecdsa_secp256k1.nr +++ b/noir_stdlib/src/ecdsa_secp256k1.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message_hash: [u8; N] ) -> bool // docs:end:ecdsa_secp256k1 -{} \ No newline at end of file +{} diff --git a/noir_stdlib/src/ecdsa_secp256r1.nr b/noir_stdlib/src/ecdsa_secp256r1.nr index 9fe932a2f3..ef92bf24ae 100644 --- a/noir_stdlib/src/ecdsa_secp256r1.nr +++ b/noir_stdlib/src/ecdsa_secp256r1.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message_hash: [u8; N] ) -> bool // docs:end:ecdsa_secp256r1 -{} \ No newline at end of file +{} diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index a7278d8599..0f4c2caffd 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -6,7 +6,7 @@ impl Field { crate::assert_constant(bit_size); self.__to_le_bits(bit_size) } - + pub fn to_be_bits(self: Self, bit_size: u32) -> [u1] { crate::assert_constant(bit_size); self.__to_be_bits(bit_size) @@ -14,7 +14,7 @@ impl Field { #[builtin(to_le_bits)] fn __to_le_bits(self, _bit_size: u32) -> [u1] {} - + #[builtin(to_be_bits)] fn __to_be_bits(self, bit_size: u32) -> [u1] {} @@ -35,7 +35,6 @@ impl Field { self.to_be_radix(256, byte_size) } - pub fn to_le_radix(self: Self, radix: u32, result_len: u32) -> [u8] { crate::assert_constant(radix); crate::assert_constant(result_len); @@ -48,17 +47,14 @@ impl Field { self.__to_be_radix(radix, result_len) } - - // decompose `_self` into a `_result_len` vector over the `_radix` basis // `_radix` must be less than 256 #[builtin(to_le_radix)] fn __to_le_radix(self, radix: u32, result_len: u32) -> [u8] {} - + #[builtin(to_be_radix)] fn __to_be_radix(self, radix: u32, result_len: u32) -> [u8] {} - // Returns self to the power of the given exponent value. // Caution: we assume the exponent fits into 32 bits // using a bigger bit size impacts negatively the performance and should be done only if the exponent does not fit in 32 bits @@ -85,7 +81,6 @@ impl Field { lt_fallback(self, another) } } - } #[builtin(modulus_num_bits)] diff --git a/noir_stdlib/src/option.nr b/noir_stdlib/src/option.nr index cab95731d0..1c32f758af 100644 --- a/noir_stdlib/src/option.nr +++ b/noir_stdlib/src/option.nr @@ -39,11 +39,7 @@ impl Option { /// Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. pub fn unwrap_or(self, default: T) -> T { - if self._is_some { - self._value - } else { - default - } + if self._is_some { self._value } else { default } } /// Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return @@ -112,31 +108,19 @@ impl Option { /// If self is Some, return self. Otherwise, return `other`. pub fn or(self, other: Self) -> Self { - if self._is_some { - self - } else { - other - } + if self._is_some { self } else { other } } /// If self is Some, return self. Otherwise, return `default()`. pub fn or_else(self, default: fn[Env]() -> Self) -> Self { - if self._is_some { - self - } else { - default() - } + if self._is_some { self } else { default() } } // If only one of the two Options is Some, return that option. // Otherwise, if both options are Some or both are None, None is returned. pub fn xor(self, other: Self) -> Self { if self._is_some { - if other._is_some { - Option::none() - } else { - self - } + if other._is_some { Option::none() } else { self } } else if other._is_some { other } else { diff --git a/noir_stdlib/src/scalar_mul.nr b/noir_stdlib/src/scalar_mul.nr index 1a7f1ad707..eee7aac39f 100644 --- a/noir_stdlib/src/scalar_mul.nr +++ b/noir_stdlib/src/scalar_mul.nr @@ -6,7 +6,7 @@ struct EmbeddedCurvePoint { } impl EmbeddedCurvePoint { - fn double(self) -> EmbeddedCurvePoint { + fn double(self) -> EmbeddedCurvePoint { embedded_curve_add(self, self) } } diff --git a/noir_stdlib/src/schnorr.nr b/noir_stdlib/src/schnorr.nr index 3365625455..757963d40d 100644 --- a/noir_stdlib/src/schnorr.nr +++ b/noir_stdlib/src/schnorr.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message: [u8; N] ) -> bool // docs:end:schnorr_verify -{} \ No newline at end of file +{} diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index bb5c43e497..ea8d09d14c 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -3,34 +3,34 @@ impl [T] { /// new slice with a length one greater than the /// original unmodified slice. #[builtin(slice_push_back)] - pub fn push_back(self, elem: T) -> Self { } + pub fn push_back(self, elem: T) -> Self {} /// Push a new element to the front of the slice, returning a /// new slice with a length one greater than the /// original unmodified slice. #[builtin(slice_push_front)] - pub fn push_front(self, elem: T) -> Self { } + pub fn push_front(self, elem: T) -> Self {} /// Remove the last element of the slice, returning the /// popped slice and the element in a tuple #[builtin(slice_pop_back)] - pub fn pop_back(self) -> (Self, T) { } + pub fn pop_back(self) -> (Self, T) {} /// Remove the first element of the slice, returning the /// element and the popped slice in a tuple #[builtin(slice_pop_front)] - pub fn pop_front(self) -> (T, Self) { } + pub fn pop_front(self) -> (T, Self) {} /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - pub fn insert(self, index: u64, elem: T) -> Self { } + pub fn insert(self, index: u64, elem: T) -> Self {} /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - pub fn remove(self, index: u64) -> (Self, T) { } + pub fn remove(self, index: u64) -> (Self, T) {} // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/noir_stdlib/src/string.nr b/noir_stdlib/src/string.nr index ad6fd19e2d..12b5a1e75e 100644 --- a/noir_stdlib/src/string.nr +++ b/noir_stdlib/src/string.nr @@ -2,7 +2,7 @@ use crate::collections::vec::Vec; impl str { /// Converts the given string into a byte array #[builtin(str_as_bytes)] - pub fn as_bytes(self) -> [u8; N] { } + pub fn as_bytes(self) -> [u8; N] {} /// return a byte vector of the str content pub fn as_bytes_vec(self: Self) -> Vec { diff --git a/noir_stdlib/src/test.nr b/noir_stdlib/src/test.nr index 560cfde741..e1c320215d 100644 --- a/noir_stdlib/src/test.nr +++ b/noir_stdlib/src/test.nr @@ -19,9 +19,7 @@ struct OracleMock { impl OracleMock { unconstrained pub fn mock(name: str) -> Self { - Self { - id: create_mock_oracle(name), - } + Self { id: create_mock_oracle(name) } } unconstrained pub fn with_params

(self, params: P) -> Self { diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index c8c6217de9..d6f0b1e223 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -13,14 +13,11 @@ impl U128 { pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { // in order to handle multiplication, we need to represent the product of two u64 without overflow assert(crate::field::modulus_num_bits() as u32 > 128); - U128 { - lo: lo as Field, - hi: hi as Field, - } + U128 { lo: lo as Field, hi: hi as Field } } pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { - U128::from_u64s_le(lo,hi) + U128::from_u64s_le(lo, hi) } pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { @@ -36,16 +33,13 @@ impl U128 { hi += (bytes[i] as Field)*base; base *= 256; } - U128 { - lo, - hi, - } + U128 { lo, hi } } pub fn to_be_bytes(self: Self) -> [u8; 16] { let lo = self.lo.to_be_bytes(8); let hi = self.hi.to_be_bytes(8); - let mut bytes = [0;16]; + let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = hi[i]; bytes[i+8] = lo[i]; @@ -56,7 +50,7 @@ impl U128 { pub fn to_le_bytes(self: Self) -> [u8; 16] { let lo = self.lo.to_le_bytes(8); let hi = self.hi.to_le_bytes(8); - let mut bytes = [0;16]; + let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = lo[i]; bytes[i+8] = hi[i]; @@ -73,9 +67,9 @@ impl U128 { let mut lo = 0; let mut hi = 0; - let mut base = 1; + let mut base = 1; if N <= 18 { - for i in 0..N-2 { + for i in 0..N - 2 { lo += U128::decode_ascii(bytes[N-i-1])*base; base = base*16; } @@ -85,27 +79,21 @@ impl U128 { base = base*16; } base = 1; - for i in 17..N-1 { + for i in 17..N - 1 { hi += U128::decode_ascii(bytes[N-i])*base; base = base*16; } } - U128 { - lo: lo as Field, - hi: hi as Field, - } + U128 { lo: lo as Field, hi: hi as Field } } fn decode_ascii(ascii: u8) -> Field { if ascii < 58 { ascii - 48 + } else if ascii < 71 { + ascii - 55 } else { - if ascii < 71 { - ascii - 55 - } else { - ascii - 87 - } - + ascii - 87 } as Field } @@ -114,15 +102,14 @@ impl U128 { (U128::from_u64s_le(0, 0), self) } else { //TODO check if this can overflow? - let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2,0)); - let q_mul_2 = q * U128::from_u64s_le(2,0); + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2, 0)); + let q_mul_2 = q * U128::from_u64s_le(2, 0); if r < b { (q_mul_2, r) } else { - (q_mul_2 + U128::from_u64s_le(1,0), r - b) + (q_mul_2 + U128::from_u64s_le(1, 0), r - b) } - - } + } } pub fn from_integer(i: T) -> U128 { @@ -130,31 +117,25 @@ impl U128 { // Reject values which would overflow a u128 f.assert_max_bit_size(128); let lo = f as u64 as Field; - let hi = (f-lo) / pow64; - U128 { - lo, - hi, - } + let hi = (f - lo) / pow64; + U128 { lo, hi } } pub fn to_integer(self) -> T { - crate::from_field(self.lo+self.hi*pow64) + crate::from_field(self.lo + self.hi * pow64) } fn wrapping_mul(self: Self, b: U128) -> U128 { - let low = self.lo*b.lo; + let low = self.lo * b.lo; let lo = low as u64 as Field; let carry = (low - lo) / pow64; let high = if crate::field::modulus_num_bits() as u32 > 196 { - (self.lo+self.hi)*(b.lo+b.hi) - low + carry + (self.lo + self.hi) * (b.lo + b.hi) - low + carry } else { - self.lo*b.hi + self.hi*b.lo + carry + self.lo * b.hi + self.hi * b.lo + carry }; let hi = high as u64 as Field; - U128 { - lo, - hi, - } + U128 { lo, hi } } } diff --git a/test_programs/.gitignore b/test_programs/.gitignore index a229df6197..e98a2fb38b 100644 --- a/test_programs/.gitignore +++ b/test_programs/.gitignore @@ -1,2 +1,3 @@ acir_artifacts -execution_success/**/crs \ No newline at end of file +execution_success/**/crs +Nargo.toml diff --git a/test_programs/compile_success_empty/closure_explicit_types/src/main.nr b/test_programs/compile_success_empty/closure_explicit_types/src/main.nr index eec2b90b5b..b6c8a6b7b3 100644 --- a/test_programs/compile_success_empty/closure_explicit_types/src/main.nr +++ b/test_programs/compile_success_empty/closure_explicit_types/src/main.nr @@ -7,13 +7,13 @@ fn ret_closure1() -> fn[(Field,)]() -> Field { || x + 10 } // return lamda that captures two things -fn ret_closure2() -> fn[(Field,Field)]() -> Field { +fn ret_closure2() -> fn[(Field, Field)]() -> Field { let x = 20; let y = 10; || x + y + 10 } // return lamda that captures two things with different types -fn ret_closure3() -> fn[(u32,u64)]() -> u64 { +fn ret_closure3() -> fn[(u32, u64)]() -> u64 { let x: u32 = 20; let y: u64 = 10; || x as u64 + y + 10 diff --git a/test_programs/compile_success_empty/conditional_regression_579/src/main.nr b/test_programs/compile_success_empty/conditional_regression_579/src/main.nr index a479a7a6fb..a517f4fdb7 100644 --- a/test_programs/compile_success_empty/conditional_regression_579/src/main.nr +++ b/test_programs/compile_success_empty/conditional_regression_579/src/main.nr @@ -12,9 +12,7 @@ struct MyStruct579 { impl MyStruct579 { fn new(array_param: [u32; 2]) -> MyStruct579 { - MyStruct579 { - array_param: array_param - } + MyStruct579 { array_param } } } diff --git a/test_programs/compile_success_empty/reexports/src/main.nr b/test_programs/compile_success_empty/reexports/src/main.nr index bb94b21b22..ed469ff77d 100644 --- a/test_programs/compile_success_empty/reexports/src/main.nr +++ b/test_programs/compile_success_empty/reexports/src/main.nr @@ -1,8 +1,6 @@ use dep::reexporting_lib::{FooStruct, MyStruct, lib}; fn main() { - let x: FooStruct = MyStruct { - inner: 0 - }; + let x: FooStruct = MyStruct { inner: 0 }; assert(lib::is_struct_zero(x)); } diff --git a/test_programs/compile_success_empty/specialization/src/main.nr b/test_programs/compile_success_empty/specialization/src/main.nr index 9cd32e0f1e..30116330a8 100644 --- a/test_programs/compile_success_empty/specialization/src/main.nr +++ b/test_programs/compile_success_empty/specialization/src/main.nr @@ -1,11 +1,15 @@ struct Foo {} impl Foo { - fn foo(_self: Self) -> Field { 1 } + fn foo(_self: Self) -> Field { + 1 + } } impl Foo { - fn foo(_self: Self) -> Field { 2 } + fn foo(_self: Self) -> Field { + 2 + } } fn main() { diff --git a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr index e1d601b13c..8250b31789 100644 --- a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr +++ b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr @@ -10,15 +10,15 @@ struct B { } impl B { - fn new(new_concrete_t_c_constructor: fn () -> T_C) -> B { - B { new_concrete_t_c_constructor } - } + fn new(new_concrete_t_c_constructor: fn() -> T_C) -> B { + B { new_concrete_t_c_constructor } + } - fn get_t_c(self) -> T_C { - let new_concrete_t_c_constructor = self.new_concrete_t_c_constructor; - new_concrete_t_c_constructor() - } + fn get_t_c(self) -> T_C { + let new_concrete_t_c_constructor = self.new_concrete_t_c_constructor; + new_concrete_t_c_constructor() } +} // --- // Set struct C { @@ -26,15 +26,15 @@ struct C { } impl C { - fn new (t_d_interface: MethodInterface) -> Self { - C { t_d_interface } - } + fn new(t_d_interface: MethodInterface) -> Self { + C { t_d_interface } + } - fn call_method_of_t_d(self, t_d: T_D) -> Field { - let some_method_on_t_d = self.t_d_interface.some_method_on_t_d; - some_method_on_t_d(t_d) - } + fn call_method_of_t_d(self, t_d: T_D) -> Field { + let some_method_on_t_d = self.t_d_interface.some_method_on_t_d; + some_method_on_t_d(t_d) } +} // --- struct MethodInterface { some_method_on_t_d: fn(T_D)->Field, diff --git a/test_programs/execution_success/array_dynamic/src/main.nr b/test_programs/execution_success/array_dynamic/src/main.nr index dde7bacc45..6b51095bd8 100644 --- a/test_programs/execution_success/array_dynamic/src/main.nr +++ b/test_programs/execution_success/array_dynamic/src/main.nr @@ -2,8 +2,8 @@ fn main( x: [u32; 5], mut z: u32, t: u32, - index: [Field;5], - index2: [Field;5], + index: [Field; 5], + index2: [Field; 5], offset: Field, sublen: Field ) { diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr index aabf7fc9d5..4cbf1bd8e6 100644 --- a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr +++ b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr @@ -24,4 +24,4 @@ fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { // Regression for issue #4258 assert(root == current); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/array_dynamic_main_output/src/main.nr b/test_programs/execution_success/array_dynamic_main_output/src/main.nr index ccb7016a19..50feb71f98 100644 --- a/test_programs/execution_success/array_dynamic_main_output/src/main.nr +++ b/test_programs/execution_success/array_dynamic_main_output/src/main.nr @@ -1,4 +1,4 @@ fn main(mut x: [Field; 10], index: u8) -> pub [Field; 10] { x[index] = 0; x -} \ No newline at end of file +} diff --git a/test_programs/execution_success/assert_statement_recursive/src/main.nr b/test_programs/execution_success/assert_statement_recursive/src/main.nr index 687a0d324b..d89ea3d35b 100644 --- a/test_programs/execution_success/assert_statement_recursive/src/main.nr +++ b/test_programs/execution_success/assert_statement_recursive/src/main.nr @@ -8,4 +8,4 @@ fn main(x: Field, y: pub Field) { assert(x == y, "x and y are not equal"); assert_eq(x, y, "x and y are not equal"); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/bigint/src/main.nr b/test_programs/execution_success/bigint/src/main.nr index 74949a5f78..046d7d07d5 100644 --- a/test_programs/execution_success/bigint/src/main.nr +++ b/test_programs/execution_success/bigint/src/main.nr @@ -1,8 +1,8 @@ use dep::std::bigint; -fn main(mut x: [u8;5], y: [u8;5]) { - let a = bigint::BigInt::secpk1_fq_from_le_bytes([x[0],x[1],x[2],x[3],x[4]]); - let b = bigint::BigInt::secpk1_fq_from_le_bytes([y[0],y[1],y[2],y[3],y[4]]); +fn main(mut x: [u8; 5], y: [u8; 5]) { + let a = bigint::BigInt::secpk1_fq_from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); + let b = bigint::BigInt::secpk1_fq_from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); let a_bytes = a.to_le_bytes(); let b_bytes = b.to_le_bytes(); @@ -11,11 +11,11 @@ fn main(mut x: [u8;5], y: [u8;5]) { assert(b_bytes[i] == y[i]); } - let d = a*b - b; + let d = a * b - b; let d_bytes = d.to_le_bytes(); let d1 = bigint::BigInt::secpk1_fq_from_le_bytes(597243850900842442924.to_le_bytes(10)); let d1_bytes = d1.to_le_bytes(); for i in 0..32 { - assert(d_bytes[i] == d1_bytes[i]); + assert(d_bytes[i] == d1_bytes[i]); } } diff --git a/test_programs/execution_success/brillig_cow/src/main.nr b/test_programs/execution_success/brillig_cow/src/main.nr index 7d847e085f..52ce8b8be3 100644 --- a/test_programs/execution_success/brillig_cow/src/main.nr +++ b/test_programs/execution_success/brillig_cow/src/main.nr @@ -10,42 +10,37 @@ struct ExecutionResult { impl ExecutionResult { fn is_equal(self, other: ExecutionResult) -> bool { - (self.original == other.original) & - (self.modified_once == other.modified_once) & - (self.modified_twice == other.modified_twice) + (self.original == other.original) + & (self.modified_once == other.modified_once) + & (self.modified_twice == other.modified_twice) } } fn modify_in_inlined_constrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { let mut modified = original; - + modified[index] = 27; let modified_once = modified; modified[index+1] = 27; - ExecutionResult { - original, - modified_once, - modified_twice: modified - } + ExecutionResult { original, modified_once, modified_twice: modified } } -unconstrained fn modify_in_unconstrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { +unconstrained fn modify_in_unconstrained( + original: [Field; ARRAY_SIZE], + index: u64 +) -> ExecutionResult { let mut modified = original; - + modified[index] = 27; let modified_once = modified; modified[index+1] = 27; - ExecutionResult { - original, - modified_once, - modified_twice: modified - } + ExecutionResult { original, modified_once, modified_twice: modified } } unconstrained fn main(original: [Field; ARRAY_SIZE], index: u64, expected_result: ExecutionResult) { diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index 74aeda1826..7f3dd76648 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -47,54 +47,54 @@ struct U256 { } impl U256 { - pub fn from_bytes32(bytes : [u8;32]) -> U256 { + pub fn from_bytes32(bytes: [u8; 32]) -> U256 { // We use addition rather than a bitwise OR as the bitshifts ensure that none of the bytes overlap each other. let high_0 = ((bytes[0] as u64) << 56) - + ((bytes[1] as u64) << 48) - + ((bytes[2] as u64) << 40) - + ((bytes[3] as u64) << 32) - + ((bytes[4] as u64) << 24) - + ((bytes[5] as u64) << 16) - + ((bytes[6] as u64) << 8) - + (bytes[7] as u64); - + + ((bytes[1] as u64) << 48) + + ((bytes[2] as u64) << 40) + + ((bytes[3] as u64) << 32) + + ((bytes[4] as u64) << 24) + + ((bytes[5] as u64) << 16) + + ((bytes[6] as u64) << 8) + + (bytes[7] as u64); + let high_1 = ((bytes[8] as u64) << 56) - + ((bytes[9] as u64) << 48) - + ((bytes[10] as u64) << 40) - + ((bytes[11] as u64) << 32) - + ((bytes[12] as u64) << 24) - + ((bytes[13] as u64) << 16) - + ((bytes[14] as u64) << 8) - + (bytes[15] as u64); - + + ((bytes[9] as u64) << 48) + + ((bytes[10] as u64) << 40) + + ((bytes[11] as u64) << 32) + + ((bytes[12] as u64) << 24) + + ((bytes[13] as u64) << 16) + + ((bytes[14] as u64) << 8) + + (bytes[15] as u64); + let low_0 = ((bytes[16] as u64) << 56) - + ((bytes[17] as u64) << 48) - + ((bytes[18] as u64) << 40) - + ((bytes[19] as u64) << 32) - + ((bytes[20] as u64) << 24) - + ((bytes[21] as u64) << 16) - + ((bytes[22] as u64) << 8) - + (bytes[23] as u64); - + + ((bytes[17] as u64) << 48) + + ((bytes[18] as u64) << 40) + + ((bytes[19] as u64) << 32) + + ((bytes[20] as u64) << 24) + + ((bytes[21] as u64) << 16) + + ((bytes[22] as u64) << 8) + + (bytes[23] as u64); + let low_1 = ((bytes[24] as u64) << 56) - + ((bytes[25] as u64) << 48) - + ((bytes[26] as u64) << 40) - + ((bytes[27] as u64) << 32) - + ((bytes[28] as u64) << 24) - + ((bytes[29] as u64) << 16) - + ((bytes[30] as u64) << 8) - + (bytes[31] as u64); - - U256{inner : [high_0, high_1, low_0, low_1]} + + ((bytes[25] as u64) << 48) + + ((bytes[26] as u64) << 40) + + ((bytes[27] as u64) << 32) + + ((bytes[28] as u64) << 24) + + ((bytes[29] as u64) << 16) + + ((bytes[30] as u64) << 8) + + (bytes[31] as u64); + + U256 { inner: [high_0, high_1, low_0, low_1] } } - pub fn to_u128_limbs(self) -> [Field;2] { + pub fn to_u128_limbs(self) -> [Field; 2] { let two_pow_64 = 2.pow_32(64); let high = (self.inner[0] as Field) * two_pow_64 + self.inner[1] as Field; let low = (self.inner[2] as Field) * two_pow_64 + self.inner[3] as Field; - - [high,low] + + [high, low] } } diff --git a/test_programs/execution_success/brillig_fns_as_values/src/main.nr b/test_programs/execution_success/brillig_fns_as_values/src/main.nr index 2f5d14583d..ea3148915b 100644 --- a/test_programs/execution_success/brillig_fns_as_values/src/main.nr +++ b/test_programs/execution_success/brillig_fns_as_values/src/main.nr @@ -14,7 +14,7 @@ fn main(x: u32) { assert(increment(x) == x + 1); } -unconstrained fn wrapper(func: fn (u32) -> u32, param: u32) -> u32 { +unconstrained fn wrapper(func: fn(u32) -> u32, param: u32) -> u32 { func(param) } diff --git a/test_programs/execution_success/conditional_regression_661/src/main.nr b/test_programs/execution_success/conditional_regression_661/src/main.nr index 03102eb775..26521a8835 100644 --- a/test_programs/execution_success/conditional_regression_661/src/main.nr +++ b/test_programs/execution_success/conditional_regression_661/src/main.nr @@ -16,11 +16,11 @@ fn test5(a: u32) { } } -fn issue_661_foo(array: [u32;4], b: u32) -> [u32;1] { +fn issue_661_foo(array: [u32; 4], b: u32) -> [u32; 1] { [array[0] + b] } -fn issue_661_bar(a: [u32;4]) -> [u32;4] { +fn issue_661_bar(a: [u32; 4]) -> [u32; 4] { let mut b: [u32; 4] = [0; 4]; b[0]=a[0]+1; b diff --git a/test_programs/execution_success/databus/src/main.nr b/test_programs/execution_success/databus/src/main.nr index 61a9637f5f..1cf95be8a2 100644 --- a/test_programs/execution_success/databus/src/main.nr +++ b/test_programs/execution_success/databus/src/main.nr @@ -1,12 +1,12 @@ use dep::std; -fn main(mut x: u32, y: call_data u32, z: call_data [u32;4]) -> return_data u32 { - let a = z[x]; - a+foo(y) +fn main(mut x: u32, y: call_data u32, z: call_data [u32; 4]) -> return_data u32 { + let a = z[x]; + a + foo(y) } // Use an unconstrained function to force the compiler to avoid inlining unconstrained fn foo(x: u32) -> u32 { - x+1 + x + 1 } diff --git a/test_programs/execution_success/debug_logs/src/main.nr b/test_programs/execution_success/debug_logs/src/main.nr index c628a9ae6a..ec24b0cc8e 100644 --- a/test_programs/execution_success/debug_logs/src/main.nr +++ b/test_programs/execution_success/debug_logs/src/main.nr @@ -1,4 +1,4 @@ -fn main(x: Field, y: pub Field) { +fn main(x: Field, y: pub Field) { let string = "i: {i}, j: {j}"; println(string); @@ -102,8 +102,8 @@ fn regression_2903() { let a = v[0]; println(a); // will print `1` - let bytes = [ "aaa", "bbb", "ccc" ]; - println(bytes); + let bytes = ["aaa", "bbb", "ccc"]; + println(bytes); } fn regression_2906() { diff --git a/test_programs/execution_success/distinct_keyword/src/main.nr b/test_programs/execution_success/distinct_keyword/src/main.nr index 0e55a011a4..8e9b5c008e 100644 --- a/test_programs/execution_success/distinct_keyword/src/main.nr +++ b/test_programs/execution_success/distinct_keyword/src/main.nr @@ -1,4 +1,4 @@ // Example that uses the distinct keyword -fn main(x: pub Field) -> distinct pub [Field;2] { +fn main(x: pub Field) -> distinct pub [Field; 2] { [x + 1, x] } diff --git a/test_programs/execution_success/ecdsa_secp256k1/src/main.nr b/test_programs/execution_success/ecdsa_secp256k1/src/main.nr index 2f410755f7..ac0359e4bb 100644 --- a/test_programs/execution_success/ecdsa_secp256k1/src/main.nr +++ b/test_programs/execution_success/ecdsa_secp256k1/src/main.nr @@ -1,11 +1,11 @@ use dep::std; fn main( - message: [u8;38], - hashed_message: [u8;32], - pub_key_x: [u8;32], - pub_key_y: [u8;32], - signature: [u8;64] + message: [u8; 38], + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] ) { // Hash the message, since secp256k1 expects a hashed_message let expected = std::hash::sha256(message); diff --git a/test_programs/execution_success/ecdsa_secp256r1/src/main.nr b/test_programs/execution_success/ecdsa_secp256r1/src/main.nr index d23573d13a..c64e390d65 100644 --- a/test_programs/execution_success/ecdsa_secp256r1/src/main.nr +++ b/test_programs/execution_success/ecdsa_secp256r1/src/main.nr @@ -1,6 +1,6 @@ use dep::std; -fn main(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); assert(valid_signature); } diff --git a/test_programs/execution_success/main_bool_arg/src/main.nr b/test_programs/execution_success/main_bool_arg/src/main.nr index 111a23ec0c..2c50d7dee1 100644 --- a/test_programs/execution_success/main_bool_arg/src/main.nr +++ b/test_programs/execution_success/main_bool_arg/src/main.nr @@ -1,4 +1,4 @@ -fn main(x: bool, y: [bool;2]) { +fn main(x: bool, y: [bool; 2]) { if x { assert(1 != 2); } diff --git a/test_programs/execution_success/operator_overloading/src/main.nr b/test_programs/execution_success/operator_overloading/src/main.nr index 3867531abc..d61e1da170 100644 --- a/test_programs/execution_success/operator_overloading/src/main.nr +++ b/test_programs/execution_success/operator_overloading/src/main.nr @@ -1,4 +1,4 @@ -use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::ops::{Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr}; use dep::std::cmp::Ordering; // x = 3, y = 9 @@ -126,10 +126,6 @@ impl Ord for Wrapper { } } - - - - struct Pair { x: Wrapper, y: Wrapper, diff --git a/test_programs/execution_success/regression_3394/src/main.nr b/test_programs/execution_success/regression_3394/src/main.nr index cc45487b98..94b6c818ff 100644 --- a/test_programs/execution_success/regression_3394/src/main.nr +++ b/test_programs/execution_success/regression_3394/src/main.nr @@ -3,4 +3,4 @@ use dep::std; fn main() { let x : i8 = -128; std::println(x); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/regression_3607/src/main.nr b/test_programs/execution_success/regression_3607/src/main.nr index c09211c281..9c7ef243f6 100644 --- a/test_programs/execution_success/regression_3607/src/main.nr +++ b/test_programs/execution_success/regression_3607/src/main.nr @@ -5,4 +5,4 @@ fn main(mut x: u32) { x = (x+1) / x; } assert(x != 0); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/regression_3889/src/main.nr b/test_programs/execution_success/regression_3889/src/main.nr index 10b8ecabee..402a69a10d 100644 --- a/test_programs/execution_success/regression_3889/src/main.nr +++ b/test_programs/execution_success/regression_3889/src/main.nr @@ -17,7 +17,6 @@ mod Baz { use crate::Bar::NewType; } - fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { works.a + fails.a + also_fails.a } diff --git a/test_programs/execution_success/side_effects_constrain_array/src/main.nr b/test_programs/execution_success/side_effects_constrain_array/src/main.nr index fb3c346a46..c4a62603bc 100644 --- a/test_programs/execution_success/side_effects_constrain_array/src/main.nr +++ b/test_programs/execution_success/side_effects_constrain_array/src/main.nr @@ -7,11 +7,11 @@ fn main(y: pub u32) { // The assert inside the if should be hit if y < 10 { - assert(bar.inner == [100, 101, 102]); + assert(bar.inner == [100, 101, 102]); } // The assert inside the if should not be hit if y > 10 { assert(bar.inner == [0, 1, 2]); } -} \ No newline at end of file +} diff --git a/test_programs/execution_success/struct/src/main.nr b/test_programs/execution_success/struct/src/main.nr index 45c5e347e5..de08f42f79 100644 --- a/test_programs/execution_success/struct/src/main.nr +++ b/test_programs/execution_success/struct/src/main.nr @@ -9,8 +9,8 @@ struct Pair { } impl Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: 0, array: [x,y] } + fn default(x: Field, y: Field) -> Self { + Self { bar: 0, array: [x, y] } } } diff --git a/test_programs/format.sh b/test_programs/format.sh new file mode 100755 index 0000000000..3c679b8689 --- /dev/null +++ b/test_programs/format.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +set -e + +# These tests are incompatible with gas reporting +excluded_dirs=("workspace" "workspace_default_member" "workspace_reexport_bug") + +# These tests cause failures in CI with a stack overflow for some reason. +ci_excluded_dirs=("eddsa") + +current_dir=$(pwd) + +# We generate a Noir workspace which contains all of the test cases +# This allows us to generate a gates report using `nargo info` for all of them at once. + + +function collect_dirs { + test_dirs=$(ls $current_dir/$1) + + for dir in $test_dirs; do + if [[ " ${excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + if [[ ${CI-false} = "true" ]] && [[ " ${ci_excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + echo " \"$1/$dir\"," >> Nargo.toml +done +} + +echo "[workspace]" > Nargo.toml +echo "members = [" >> Nargo.toml + +collect_dirs compile_success_empty +collect_dirs execution_success + +echo "]" >> Nargo.toml + +if [ "$1" == "check" ]; then + nargo fmt --check +else + nargo fmt +fi + + +rm Nargo.toml From 00ab3db86b06111d144516e862902b8604284611 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 17:28:28 +0000 Subject: [PATCH 07/17] fix: remove panic when generic array length is not resolvable (#4408) # Description ## Problem\* Resolves #4407 ## Summary\* We currently have no way to gracefully error during monomorphization and so must panic if we run into any errors. This PR then adds the `MonomorphizationError` enum with an example error type. We've also added a `CompileError` which unifies `RuntimeError` and `MonomorphizationError` so they can be converted into `FileDiagnostic`s ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- Cargo.lock | 1 + compiler/noirc_driver/Cargo.toml | 1 + compiler/noirc_driver/src/lib.rs | 27 +- compiler/noirc_evaluator/src/errors.rs | 2 +- .../src/monomorphization/debug.rs | 38 ++- .../src/monomorphization/mod.rs | 286 ++++++++++++------ compiler/noirc_frontend/src/tests.rs | 2 +- tooling/nargo/src/ops/test.rs | 10 +- 8 files changed, 246 insertions(+), 121 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4d8b12d537..0f575d9c46 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2947,6 +2947,7 @@ dependencies = [ "noirc_macros", "rust-embed", "serde", + "thiserror", "tracing", ] diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index d9b240101d..681976735f 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -23,6 +23,7 @@ serde.workspace = true fxhash.workspace = true rust-embed.workspace = true tracing.workspace = true +thiserror.workspace = true aztec_macros = { path = "../../aztec_macros" } noirc_macros = { path = "../../noirc_macros" } diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 8b0fc5dc97..11f53cdb74 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -16,9 +16,10 @@ use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; use noirc_frontend::macros_api::MacroProcessor; -use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug}; +use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug, MonomorphizationError}; use noirc_frontend::node_interner::FuncId; use std::path::Path; +use thiserror::Error; use tracing::info; mod abi_gen; @@ -107,6 +108,24 @@ fn parse_expression_width(input: &str) -> Result for FileDiagnostic { + fn from(error: CompileError) -> FileDiagnostic { + match error { + CompileError::RuntimeError(err) => err.into(), + CompileError::MonomorphizationError(err) => err.into(), + } + } +} + /// Helper type used to signify where only warnings are expected in file diagnostics pub type Warnings = Vec; @@ -436,11 +455,11 @@ pub fn compile_no_check( main_function: FuncId, cached_program: Option, force_compile: bool, -) -> Result { +) -> Result { let program = if options.instrument_debug { - monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter) + monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter)? } else { - monomorphize(main_function, &mut context.def_interner) + monomorphize(main_function, &mut context.def_interner)? }; let hash = fxhash::hash64(&program); diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index ed94adac28..40f4336e0b 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -158,7 +158,7 @@ impl RuntimeError { RuntimeError::InternalError(cause) => { Diagnostic::simple_error( "Internal Consistency Evaluators Errors: \n - This is likely a bug. Consider Opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), + This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), cause.to_string(), noirc_errors::Span::inclusive(0, 0) ) diff --git a/compiler/noirc_frontend/src/monomorphization/debug.rs b/compiler/noirc_frontend/src/monomorphization/debug.rs index 5837d67660..a8ff4399f9 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -8,7 +8,7 @@ use crate::hir_def::expr::*; use crate::node_interner::ExprId; use super::ast::{Expression, Ident}; -use super::Monomorphizer; +use super::{MonomorphizationError, Monomorphizer}; const DEBUG_MEMBER_ASSIGN_PREFIX: &str = "__debug_member_assign_"; const DEBUG_VAR_ID_ARG_SLOT: usize = 0; @@ -39,18 +39,19 @@ impl<'interner> Monomorphizer<'interner> { &mut self, call: &HirCallExpression, arguments: &mut [Expression], - ) { - let original_func = Box::new(self.expr(call.func)); + ) -> Result<(), MonomorphizationError> { + let original_func = Box::new(self.expr(call.func)?); if let Expression::Ident(Ident { name, .. }) = original_func.as_ref() { if name == "__debug_var_assign" { - self.patch_debug_var_assign(call, arguments); + self.patch_debug_var_assign(call, arguments)?; } else if name == "__debug_var_drop" { - self.patch_debug_var_drop(call, arguments); + self.patch_debug_var_drop(call, arguments)?; } else if let Some(arity) = name.strip_prefix(DEBUG_MEMBER_ASSIGN_PREFIX) { let arity = arity.parse::().expect("failed to parse member assign arity"); - self.patch_debug_member_assign(call, arguments, arity); + self.patch_debug_member_assign(call, arguments, arity)?; } } + Ok(()) } /// Update instrumentation code inserted on variable assignment. We need to @@ -59,7 +60,11 @@ impl<'interner> Monomorphizer<'interner> { /// variable are possible if using generic functions, hence the temporary ID /// created when injecting the instrumentation code can map to multiple IDs /// at runtime. - fn patch_debug_var_assign(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + fn patch_debug_var_assign( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -73,13 +78,18 @@ impl<'interner> Monomorphizer<'interner> { // then update the ID used for tracking at runtime let var_id = self.debug_type_tracker.insert_var(source_var_id, var_type); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } /// Update instrumentation code for a variable being dropped out of scope. /// Given the source_var_id we search for the last assigned debug var_id and /// replace it instead. - fn patch_debug_var_drop(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + fn patch_debug_var_drop( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -92,7 +102,8 @@ impl<'interner> Monomorphizer<'interner> { .get_var_id(source_var_id) .unwrap_or_else(|| unreachable!("failed to find debug variable")); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } /// Update instrumentation code inserted when assigning to a member of an @@ -106,7 +117,7 @@ impl<'interner> Monomorphizer<'interner> { call: &HirCallExpression, arguments: &mut [Expression], arity: usize, - ) { + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -149,7 +160,7 @@ impl<'interner> Monomorphizer<'interner> { call.location.span, call.location.file, ); - arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id); + arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id)?; } else { // array/string element using constant index cursor_type = element_type_at_index(cursor_type, index as usize); @@ -165,7 +176,8 @@ impl<'interner> Monomorphizer<'interner> { .get_var_id(source_var_id) .unwrap_or_else(|| unreachable!("failed to find debug variable")); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } fn intern_var_id(&mut self, var_id: DebugVarId, location: &Location) -> ExprId { diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 2e714da21c..ce880401d7 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -9,13 +9,14 @@ //! The entry point to this pass is the `monomorphize` function which, starting from a given //! function, will monomorphize the entire reachable program. use acvm::FieldElement; -use iter_extended::{btree_map, vecmap}; -use noirc_errors::Location; +use iter_extended::{btree_map, try_vecmap, vecmap}; +use noirc_errors::{CustomDiagnostic, FileDiagnostic, Location}; use noirc_printable_type::PrintableType; use std::{ collections::{BTreeMap, HashMap, VecDeque}, unreachable, }; +use thiserror::Error; use crate::{ debug::DebugInstrumenter, @@ -87,6 +88,40 @@ struct Monomorphizer<'interner> { type HirType = crate::Type; +#[derive(Debug, Error)] +pub enum MonomorphizationError { + #[error("Length of generic array could not be determined.")] + UnknownArrayLength { location: Location }, +} + +impl MonomorphizationError { + fn call_stack(&self) -> Vec { + match self { + MonomorphizationError::UnknownArrayLength { location } => vec![*location], + } + } +} + +impl From for FileDiagnostic { + fn from(error: MonomorphizationError) -> FileDiagnostic { + let call_stack = error.call_stack(); + let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); + let diagnostic = error.into_diagnostic(); + diagnostic.in_file(file_id).with_call_stack(call_stack) + } +} + +impl MonomorphizationError { + fn into_diagnostic(self) -> CustomDiagnostic { + CustomDiagnostic::simple_error( + "Internal Consistency Evaluators Errors: \n + This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), + self.to_string(), + noirc_errors::Span::inclusive(0, 0) + ) + } +} + /// Starting from the given `main` function, monomorphize the entire program, /// replacing all references to type variables and NamedGenerics with concrete /// types, duplicating definitions as necessary to do so. @@ -99,7 +134,10 @@ type HirType = crate::Type; /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. #[tracing::instrument(level = "trace", skip(main, interner))] -pub fn monomorphize(main: node_interner::FuncId, interner: &mut NodeInterner) -> Program { +pub fn monomorphize( + main: node_interner::FuncId, + interner: &mut NodeInterner, +) -> Result { monomorphize_debug(main, interner, &DebugInstrumenter::default()) } @@ -107,10 +145,10 @@ pub fn monomorphize_debug( main: node_interner::FuncId, interner: &mut NodeInterner, debug_instrumenter: &DebugInstrumenter, -) -> Program { +) -> Result { let debug_type_tracker = DebugTypeTracker::build_from_debug_instrumenter(debug_instrumenter); let mut monomorphizer = Monomorphizer::new(interner, debug_type_tracker); - let function_sig = monomorphizer.compile_main(main); + let function_sig = monomorphizer.compile_main(main)?; while !monomorphizer.queue.is_empty() { let (next_fn_id, new_id, bindings, trait_method) = monomorphizer.queue.pop_front().unwrap(); @@ -118,7 +156,7 @@ pub fn monomorphize_debug( perform_instantiation_bindings(&bindings); let impl_bindings = monomorphizer.perform_impl_bindings(trait_method, next_fn_id); - monomorphizer.function(next_fn_id, new_id); + monomorphizer.function(next_fn_id, new_id)?; undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(bindings); } @@ -128,7 +166,7 @@ pub fn monomorphize_debug( monomorphizer.interner.function_meta(&main); let (debug_variables, debug_types) = monomorphizer.debug_type_tracker.extract_vars_and_types(); - Program::new( + let program = Program::new( functions, function_sig, *return_distinctness, @@ -137,7 +175,8 @@ pub fn monomorphize_debug( *kind == FunctionKind::Recursive, debug_variables, debug_types, - ) + ); + Ok(program) } impl<'interner> Monomorphizer<'interner> { @@ -233,10 +272,13 @@ impl<'interner> Monomorphizer<'interner> { self.globals.entry(id).or_default().insert(typ, new_id); } - fn compile_main(&mut self, main_id: node_interner::FuncId) -> FunctionSignature { + fn compile_main( + &mut self, + main_id: node_interner::FuncId, + ) -> Result { let new_main_id = self.next_function_id(); assert_eq!(new_main_id, Program::main_id()); - self.function(main_id, new_main_id); + self.function(main_id, new_main_id)?; self.return_location = self.interner.function(&main_id).block(self.interner).statements().last().and_then( |x| match self.interner.statement(x) { @@ -245,10 +287,14 @@ impl<'interner> Monomorphizer<'interner> { }, ); let main_meta = self.interner.function_meta(&main_id); - main_meta.function_signature() + Ok(main_meta.function_signature()) } - fn function(&mut self, f: node_interner::FuncId, id: FuncId) { + fn function( + &mut self, + f: node_interner::FuncId, + id: FuncId, + ) -> Result<(), MonomorphizationError> { if let Some((self_type, trait_id)) = self.interner.get_function_trait(&f) { let the_trait = self.interner.get_trait(trait_id); the_trait.self_type_typevar.force_bind(self_type); @@ -268,10 +314,11 @@ impl<'interner> Monomorphizer<'interner> { || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); let parameters = self.parameters(&meta.parameters); - let body = self.expr(body_expr_id); + let body = self.expr(body_expr_id)?; let function = ast::Function { id, name, parameters, body, return_type, unconstrained }; self.push_function(id, function); + Ok(()) } fn push_function(&mut self, id: FuncId, function: ast::Function) { @@ -331,15 +378,18 @@ impl<'interner> Monomorphizer<'interner> { } } - fn expr(&mut self, expr: node_interner::ExprId) -> ast::Expression { + fn expr( + &mut self, + expr: node_interner::ExprId, + ) -> Result { use ast::Expression::Literal; use ast::Literal::*; - match self.interner.expression(&expr) { - HirExpression::Ident(ident) => self.ident(ident, expr), + let expr = match self.interner.expression(&expr) { + HirExpression::Ident(ident) => self.ident(ident, expr)?, HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { - let fields = vecmap(idents, |ident| self.expr(ident)); + let fields = try_vecmap(idents, |ident| self.expr(ident))?; Literal(FmtStr( contents, fields.len() as u64, @@ -367,27 +417,27 @@ impl<'interner> Monomorphizer<'interner> { } } HirExpression::Literal(HirLiteral::Array(array)) => match array { - HirArrayLiteral::Standard(array) => self.standard_array(expr, array), + HirArrayLiteral::Standard(array) => self.standard_array(expr, array)?, HirArrayLiteral::Repeated { repeated_element, length } => { - self.repeated_array(expr, repeated_element, length) + self.repeated_array(expr, repeated_element, length)? } }, HirExpression::Literal(HirLiteral::Unit) => ast::Expression::Block(vec![]), - HirExpression::Block(block) => self.block(block.0), + HirExpression::Block(block) => self.block(block.0)?, HirExpression::Prefix(prefix) => { let location = self.interner.expr_location(&expr); ast::Expression::Unary(ast::Unary { operator: prefix.operator, - rhs: Box::new(self.expr(prefix.rhs)), + rhs: Box::new(self.expr(prefix.rhs)?), result_type: self.convert_type(&self.interner.id_type(expr)), location, }) } HirExpression::Infix(infix) => { - let lhs = self.expr(infix.lhs); - let rhs = self.expr(infix.rhs); + let lhs = self.expr(infix.lhs)?; + let rhs = self.expr(infix.rhs)?; let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); if self.interner.get_selected_impl_for_expression(expr).is_some() { @@ -418,26 +468,27 @@ impl<'interner> Monomorphizer<'interner> { } } - HirExpression::Index(index) => self.index(expr, index), + HirExpression::Index(index) => self.index(expr, index)?, HirExpression::MemberAccess(access) => { let field_index = self.interner.get_field_index(expr); - let expr = Box::new(self.expr(access.lhs)); + let expr = Box::new(self.expr(access.lhs)?); ast::Expression::ExtractTupleField(expr, field_index) } - HirExpression::Call(call) => self.function_call(call, expr), + HirExpression::Call(call) => self.function_call(call, expr)?, HirExpression::Cast(cast) => ast::Expression::Cast(ast::Cast { - lhs: Box::new(self.expr(cast.lhs)), + lhs: Box::new(self.expr(cast.lhs)?), r#type: self.convert_type(&cast.r#type), location: self.interner.expr_location(&expr), }), HirExpression::If(if_expr) => { - let cond = self.expr(if_expr.condition); - let then = self.expr(if_expr.consequence); - let else_ = if_expr.alternative.map(|alt| Box::new(self.expr(alt))); + let cond = self.expr(if_expr.condition)?; + let then = self.expr(if_expr.consequence)?; + let else_ = + if_expr.alternative.map(|alt| self.expr(alt)).transpose()?.map(Box::new); ast::Expression::If(ast::If { condition: Box::new(cond), consequence: Box::new(then), @@ -447,28 +498,30 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Tuple(fields) => { - let fields = vecmap(fields, |id| self.expr(id)); + let fields = try_vecmap(fields, |id| self.expr(id))?; ast::Expression::Tuple(fields) } - HirExpression::Constructor(constructor) => self.constructor(constructor, expr), + HirExpression::Constructor(constructor) => self.constructor(constructor, expr)?, - HirExpression::Lambda(lambda) => self.lambda(lambda, expr), + HirExpression::Lambda(lambda) => self.lambda(lambda, expr)?, HirExpression::MethodCall(hir_method_call) => { unreachable!("Encountered HirExpression::MethodCall during monomorphization {hir_method_call:?}") } HirExpression::Error => unreachable!("Encountered Error node during monomorphization"), - } + }; + + Ok(expr) } fn standard_array( &mut self, array: node_interner::ExprId, array_elements: Vec, - ) -> ast::Expression { + ) -> Result { let typ = self.convert_type(&self.interner.id_type(array)); - let contents = vecmap(array_elements, |id| self.expr(id)); - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ })) + let contents = try_vecmap(array_elements, |id| self.expr(id))?; + Ok(ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ }))) } fn repeated_array( @@ -476,48 +529,56 @@ impl<'interner> Monomorphizer<'interner> { array: node_interner::ExprId, repeated_element: node_interner::ExprId, length: HirType, - ) -> ast::Expression { + ) -> Result { let typ = self.convert_type(&self.interner.id_type(array)); - let length = length - .evaluate_to_u64() - .expect("Length of array is unknown when evaluating numeric generic"); + let length = length.evaluate_to_u64().ok_or_else(|| { + let location = self.interner.expr_location(&array); + MonomorphizationError::UnknownArrayLength { location } + })?; - let contents = vecmap(0..length, |_| self.expr(repeated_element)); - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ })) + let contents = try_vecmap(0..length, |_| self.expr(repeated_element))?; + Ok(ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ }))) } - fn index(&mut self, id: node_interner::ExprId, index: HirIndexExpression) -> ast::Expression { + fn index( + &mut self, + id: node_interner::ExprId, + index: HirIndexExpression, + ) -> Result { let element_type = self.convert_type(&self.interner.id_type(id)); - let collection = Box::new(self.expr(index.collection)); - let index = Box::new(self.expr(index.index)); + let collection = Box::new(self.expr(index.collection)?); + let index = Box::new(self.expr(index.index)?); let location = self.interner.expr_location(&id); - ast::Expression::Index(ast::Index { collection, index, element_type, location }) + Ok(ast::Expression::Index(ast::Index { collection, index, element_type, location })) } - fn statement(&mut self, id: StmtId) -> ast::Expression { + fn statement(&mut self, id: StmtId) -> Result { match self.interner.statement(&id) { HirStatement::Let(let_statement) => self.let_statement(let_statement), HirStatement::Constrain(constrain) => { - let expr = self.expr(constrain.0); + let expr = self.expr(constrain.0)?; let location = self.interner.expr_location(&constrain.0); - let assert_message = - constrain.2.map(|assert_msg_expr| Box::new(self.expr(assert_msg_expr))); - ast::Expression::Constrain(Box::new(expr), location, assert_message) + let assert_message = constrain + .2 + .map(|assert_msg_expr| self.expr(assert_msg_expr)) + .transpose()? + .map(Box::new); + Ok(ast::Expression::Constrain(Box::new(expr), location, assert_message)) } HirStatement::Assign(assign) => self.assign(assign), HirStatement::For(for_loop) => { self.is_range_loop = true; - let start = self.expr(for_loop.start_range); - let end = self.expr(for_loop.end_range); + let start = self.expr(for_loop.start_range)?; + let end = self.expr(for_loop.end_range)?; self.is_range_loop = false; let index_variable = self.next_local_id(); self.define_local(for_loop.identifier.id, index_variable); - let block = Box::new(self.expr(for_loop.block)); + let block = Box::new(self.expr(for_loop.block)?); - ast::Expression::For(ast::For { + Ok(ast::Expression::For(ast::For { index_variable, index_name: self.interner.definition_name(for_loop.identifier.id).to_owned(), index_type: self.convert_type(&self.interner.id_type(for_loop.start_range)), @@ -526,25 +587,30 @@ impl<'interner> Monomorphizer<'interner> { start_range_location: self.interner.expr_location(&for_loop.start_range), end_range_location: self.interner.expr_location(&for_loop.end_range), block, - }) + })) } HirStatement::Expression(expr) => self.expr(expr), - HirStatement::Semi(expr) => ast::Expression::Semi(Box::new(self.expr(expr))), + HirStatement::Semi(expr) => { + self.expr(expr).map(|expr| ast::Expression::Semi(Box::new(expr))) + } HirStatement::Error => unreachable!(), } } - fn let_statement(&mut self, let_statement: HirLetStatement) -> ast::Expression { - let expr = self.expr(let_statement.expression); + fn let_statement( + &mut self, + let_statement: HirLetStatement, + ) -> Result { + let expr = self.expr(let_statement.expression)?; let expected_type = self.interner.id_type(let_statement.expression); - self.unpack_pattern(let_statement.pattern, expr, &expected_type) + Ok(self.unpack_pattern(let_statement.pattern, expr, &expected_type)) } fn constructor( &mut self, constructor: HirConstructorExpression, id: node_interner::ExprId, - ) -> ast::Expression { + ) -> Result { let typ = self.interner.id_type(id); let field_types = unwrap_struct_type(&typ); @@ -561,7 +627,7 @@ impl<'interner> Monomorphizer<'interner> { let typ = self.convert_type(field_type); field_vars.insert(field_name.0.contents.clone(), (new_id, typ)); - let expression = Box::new(self.expr(expr_id)); + let expression = Box::new(self.expr(expr_id)?); new_exprs.push(ast::Expression::Let(ast::Let { id: new_id, @@ -586,11 +652,15 @@ impl<'interner> Monomorphizer<'interner> { // Finally we can return the created Tuple from the new block new_exprs.push(ast::Expression::Tuple(field_idents)); - ast::Expression::Block(new_exprs) + Ok(ast::Expression::Block(new_exprs)) } - fn block(&mut self, statement_ids: Vec) -> ast::Expression { - ast::Expression::Block(vecmap(statement_ids, |id| self.statement(id))) + fn block( + &mut self, + statement_ids: Vec, + ) -> Result { + let stmts = try_vecmap(statement_ids, |id| self.statement(id)); + stmts.map(ast::Expression::Block) } fn unpack_pattern( @@ -701,15 +771,19 @@ impl<'interner> Monomorphizer<'interner> { Some(ast::Ident { location: Some(ident.location), mutable, definition, name, typ }) } - fn ident(&mut self, ident: HirIdent, expr_id: node_interner::ExprId) -> ast::Expression { + fn ident( + &mut self, + ident: HirIdent, + expr_id: node_interner::ExprId, + ) -> Result { let typ = self.interner.id_type(expr_id); if let ImplKind::TraitMethod(method, _, _) = ident.impl_kind { - return self.resolve_trait_method_reference(expr_id, typ, method); + return Ok(self.resolve_trait_method_reference(expr_id, typ, method)); } let definition = self.interner.definition(ident.id); - match &definition.kind { + let ident = match &definition.kind { DefinitionKind::Function(func_id) => { let mutable = definition.mutable; let location = Some(ident.location); @@ -736,7 +810,7 @@ impl<'interner> Monomorphizer<'interner> { "Globals should have a corresponding let statement by monomorphization" ) }; - self.expr(let_.expression) + self.expr(let_.expression)? } DefinitionKind::Local(_) => self.lookup_captured_expr(ident.id).unwrap_or_else(|| { let ident = self.local_ident(&ident).unwrap(); @@ -757,7 +831,9 @@ impl<'interner> Monomorphizer<'interner> { let typ = self.convert_type(&typ); ast::Expression::Literal(ast::Literal::Integer(value, typ, location)) } - } + }; + + Ok(ident) } /// Convert a non-tuple/struct type to a monomorphized type @@ -949,12 +1025,12 @@ impl<'interner> Monomorphizer<'interner> { &mut self, call: HirCallExpression, id: node_interner::ExprId, - ) -> ast::Expression { - let original_func = Box::new(self.expr(call.func)); - let mut arguments = vecmap(&call.arguments, |id| self.expr(*id)); + ) -> Result { + let original_func = Box::new(self.expr(call.func)?); + let mut arguments = try_vecmap(&call.arguments, |id| self.expr(*id))?; let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); - self.patch_debug_instrumentation_call(&call, &mut arguments); + self.patch_debug_instrumentation_call(&call, &mut arguments)?; let return_type = self.interner.id_type(id); let return_type = self.convert_type(&return_type); @@ -969,7 +1045,7 @@ impl<'interner> Monomorphizer<'interner> { // The second argument is expected to always be an ident self.append_printable_type_info(&hir_arguments[1], &mut arguments); } else if name.as_str() == "assert_message" { - // The first argument to the `assert_message` oracle is the expression passed as a mesage to an `assert` or `assert_eq` statement + // The first argument to the `assert_message` oracle is the expression passed as a message to an `assert` or `assert_eq` statement self.append_printable_type_info(&hir_arguments[0], &mut arguments); } } @@ -1017,9 +1093,9 @@ impl<'interner> Monomorphizer<'interner> { if !block_expressions.is_empty() { block_expressions.push(call); - ast::Expression::Block(block_expressions) + Ok(ast::Expression::Block(block_expressions)) } else { - call + Ok(call) } } @@ -1190,47 +1266,59 @@ impl<'interner> Monomorphizer<'interner> { .collect() } - fn assign(&mut self, assign: HirAssignStatement) -> ast::Expression { - let expression = Box::new(self.expr(assign.expression)); - let lvalue = self.lvalue(assign.lvalue); - ast::Expression::Assign(ast::Assign { expression, lvalue }) + fn assign( + &mut self, + assign: HirAssignStatement, + ) -> Result { + let expression = Box::new(self.expr(assign.expression)?); + let lvalue = self.lvalue(assign.lvalue)?; + Ok(ast::Expression::Assign(ast::Assign { expression, lvalue })) } - fn lvalue(&mut self, lvalue: HirLValue) -> ast::LValue { - match lvalue { + fn lvalue(&mut self, lvalue: HirLValue) -> Result { + let value = match lvalue { HirLValue::Ident(ident, _) => self .lookup_captured_lvalue(ident.id) .unwrap_or_else(|| ast::LValue::Ident(self.local_ident(&ident).unwrap())), HirLValue::MemberAccess { object, field_index, .. } => { let field_index = field_index.unwrap(); - let object = Box::new(self.lvalue(*object)); + let object = Box::new(self.lvalue(*object)?); ast::LValue::MemberAccess { object, field_index } } HirLValue::Index { array, index, typ } => { let location = self.interner.expr_location(&index); - let array = Box::new(self.lvalue(*array)); - let index = Box::new(self.expr(index)); + let array = Box::new(self.lvalue(*array)?); + let index = Box::new(self.expr(index)?); let element_type = self.convert_type(&typ); ast::LValue::Index { array, index, element_type, location } } HirLValue::Dereference { lvalue, element_type } => { - let reference = Box::new(self.lvalue(*lvalue)); + let reference = Box::new(self.lvalue(*lvalue)?); let element_type = self.convert_type(&element_type); ast::LValue::Dereference { reference, element_type } } - } + }; + + Ok(value) } - fn lambda(&mut self, lambda: HirLambda, expr: node_interner::ExprId) -> ast::Expression { + fn lambda( + &mut self, + lambda: HirLambda, + expr: node_interner::ExprId, + ) -> Result { if lambda.captures.is_empty() { self.lambda_no_capture(lambda) } else { - let (setup, closure_variable) = self.lambda_with_setup(lambda, expr); - ast::Expression::Block(vec![setup, closure_variable]) + let (setup, closure_variable) = self.lambda_with_setup(lambda, expr)?; + Ok(ast::Expression::Block(vec![setup, closure_variable])) } } - fn lambda_no_capture(&mut self, lambda: HirLambda) -> ast::Expression { + fn lambda_no_capture( + &mut self, + lambda: HirLambda, + ) -> Result { let ret_type = self.convert_type(&lambda.return_type); let lambda_name = "lambda"; let parameter_types = vecmap(&lambda.parameters, |(_, typ)| self.convert_type(typ)); @@ -1240,7 +1328,7 @@ impl<'interner> Monomorphizer<'interner> { vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); let parameters = self.parameters(¶meters); - let body = self.expr(lambda.body); + let body = self.expr(lambda.body)?; let id = self.next_function_id(); let return_type = ret_type.clone(); @@ -1254,20 +1342,20 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Function(parameter_types, Box::new(ret_type), Box::new(ast::Type::Unit)); let name = lambda_name.to_owned(); - ast::Expression::Ident(ast::Ident { + Ok(ast::Expression::Ident(ast::Ident { definition: Definition::Function(id), mutable: false, location: None, name, typ, - }) + })) } fn lambda_with_setup( &mut self, lambda: HirLambda, expr: node_interner::ExprId, - ) -> (ast::Expression, ast::Expression) { + ) -> Result<(ast::Expression, ast::Expression), MonomorphizationError> { // returns (, ) // which can be used directly in callsites or transformed // directly to a single `Expression` @@ -1343,7 +1431,7 @@ impl<'interner> Monomorphizer<'interner> { self.lambda_envs_stack .push(LambdaContext { env_ident: env_ident.clone(), captures: lambda.captures }); - let body = self.expr(lambda.body); + let body = self.expr(lambda.body)?; self.lambda_envs_stack.pop(); let lambda_fn_typ: ast::Type = @@ -1385,7 +1473,7 @@ impl<'interner> Monomorphizer<'interner> { typ: ast::Type::Tuple(vec![env_typ, lambda_fn_typ]), }); - (block_let_stmt, closure_ident) + Ok((block_let_stmt, closure_ident)) } /// Implements std::unsafe::zeroed by returning an appropriate zeroed diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index c18379f1c2..c661cc92ee 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -1130,7 +1130,7 @@ mod test { fn check_rewrite(src: &str, expected: &str) { let (_program, mut context, _errors) = get_program(src); let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); assert!(format!("{}", program) == expected); } diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index 0929739a6a..92c09ec889 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -1,5 +1,5 @@ use acvm::{acir::native_types::WitnessMap, BlackBoxFunctionSolver}; -use noirc_driver::{compile_no_check, CompileOptions}; +use noirc_driver::{compile_no_check, CompileError, CompileOptions}; use noirc_errors::{debug_info::DebugInfo, FileDiagnostic}; use noirc_evaluator::errors::RuntimeError; use noirc_frontend::hir::{def_map::TestFunction, Context}; @@ -45,14 +45,18 @@ pub fn run_test( /// that a constraint was never satisfiable. /// An example of this is the program `assert(false)` /// In that case, we check if the test function should fail, and if so, we return `TestStatus::Pass`. -fn test_status_program_compile_fail(err: RuntimeError, test_function: TestFunction) -> TestStatus { +fn test_status_program_compile_fail(err: CompileError, test_function: TestFunction) -> TestStatus { // The test has failed compilation, but it should never fail. Report error. if !test_function.should_fail() { return TestStatus::CompileError(err.into()); } // The test has failed compilation, extract the assertion message if present and check if it's expected. - let assert_message = if let RuntimeError::FailedConstraint { assert_message, .. } = &err { + let assert_message = if let CompileError::RuntimeError(RuntimeError::FailedConstraint { + assert_message, + .. + }) = &err + { assert_message.clone() } else { None From 10e82920798380f50046e52db4a20ca205191ab7 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Mon, 26 Feb 2024 18:36:25 +0100 Subject: [PATCH 08/17] feat: add poseidon2 opcode implementation for acvm/brillig, and Noir (#4398) and poseidon2 noir implementation # Description ## Problem\* Resolves #4170 ## Summary\* The PR implements Poseidon2 permutation for ACMV and Brillig, enabling the use of the opcode. Then it also includes a Noir implementation of Poseidon2 using the opcode in the stdlib ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [X] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- Cargo.lock | 1 + acvm-repo/acvm/src/pwg/blackbox/hash.rs | 36 +- acvm-repo/acvm/src/pwg/blackbox/mod.rs | 8 +- .../src/curve_specific_solver.rs | 12 + acvm-repo/bn254_blackbox_solver/Cargo.toml | 1 + acvm-repo/bn254_blackbox_solver/src/lib.rs | 11 + .../bn254_blackbox_solver/src/poseidon2.rs | 1043 +++++++++++++++++ acvm-repo/brillig_vm/src/black_box.rs | 13 +- acvm-repo/brillig_vm/src/lib.rs | 7 + .../noirc_evaluator/src/brillig/brillig_ir.rs | 8 + .../cryptographic_primitives/hashes.mdx | 13 + noir_stdlib/src/hash.nr | 3 +- noir_stdlib/src/hash/poseidon2.nr | 119 ++ .../poseidon_bn254_hash/Prover.toml | 5 + .../poseidon_bn254_hash/src/main.nr | 6 +- tooling/lsp/src/solver.rs | 8 + 16 files changed, 1288 insertions(+), 6 deletions(-) create mode 100644 acvm-repo/bn254_blackbox_solver/src/poseidon2.rs create mode 100644 noir_stdlib/src/hash/poseidon2.nr diff --git a/Cargo.lock b/Cargo.lock index 0f575d9c46..714b700119 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -589,6 +589,7 @@ dependencies = [ "js-sys", "noir_grumpkin", "num-bigint", + "num-traits", "pkg-config", "reqwest", "rust-embed", diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 06489822c9..1bc26f0618 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -3,7 +3,7 @@ use acir::{ native_types::{Witness, WitnessMap}, BlackBoxFunc, FieldElement, }; -use acvm_blackbox_solver::{sha256compression, BlackBoxResolutionError}; +use acvm_blackbox_solver::{sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError}; use crate::pwg::{insert_value, witness_to_value}; use crate::OpcodeResolutionError; @@ -131,3 +131,37 @@ pub(crate) fn solve_sha_256_permutation_opcode( Ok(()) } + +pub(crate) fn solve_poseidon2_permutation_opcode( + backend: &impl BlackBoxFunctionSolver, + initial_witness: &mut WitnessMap, + inputs: &[FunctionInput], + outputs: &[Witness], + len: u32, +) -> Result<(), OpcodeResolutionError> { + if len as usize != inputs.len() { + return Err(OpcodeResolutionError::BlackBoxFunctionFailed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of inputs does not match specified length. {} > {}", + inputs.len(), + len + ), + )); + } + + // Read witness assignments + let mut state = Vec::new(); + for input in inputs.iter() { + let witness_assignment = witness_to_value(initial_witness, input.witness)?; + state.push(*witness_assignment); + } + + let state = backend.poseidon2_permutation(&state, len)?; + + // Write witness assignments + for (output_witness, value) in outputs.iter().zip(state.into_iter()) { + insert_value(output_witness, value, initial_witness)?; + } + Ok(()) +} diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 7ae92fd84f..4309cad1b2 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -5,7 +5,9 @@ use acir::{ }; use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; -use self::{bigint::BigIntSolver, pedersen::pedersen_hash}; +use self::{ + bigint::BigIntSolver, hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, +}; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; @@ -204,7 +206,6 @@ pub(crate) fn solve( BlackBoxFuncCall::BigIntToLeBytes { input, outputs } => { bigint_solver.bigint_to_bytes(*input, outputs, initial_witness) } - BlackBoxFuncCall::Poseidon2Permutation { .. } => todo!(), BlackBoxFuncCall::Sha256Compression { inputs, hash_values, outputs } => { solve_sha_256_permutation_opcode( initial_witness, @@ -214,5 +215,8 @@ pub(crate) fn solve( bb_func.get_black_box_func(), ) } + BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len } => { + solve_poseidon2_permutation_opcode(backend, initial_witness, inputs, outputs, *len) + } } } diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 2234710dec..f0ab456122 100644 --- a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -36,6 +36,11 @@ pub trait BlackBoxFunctionSolver { input2_x: &FieldElement, input2_y: &FieldElement, ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError>; } pub struct StubbedBlackBoxSolver; @@ -89,4 +94,11 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::EmbeddedCurveAdd)) } + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::Poseidon2Permutation)) + } } diff --git a/acvm-repo/bn254_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml index ef80e2c1c0..ea601a6b80 100644 --- a/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -16,6 +16,7 @@ repository.workspace = true acir.workspace = true acvm_blackbox_solver.workspace = true thiserror.workspace = true +num-traits.workspace = true rust-embed = { version = "6.6.0", features = [ "debug-embed", diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index 13aa956f9e..be0e60ada9 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -6,9 +6,11 @@ use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; mod fixed_base_scalar_mul; +mod poseidon2; mod wasm; pub use fixed_base_scalar_mul::{embedded_curve_add, fixed_base_scalar_mul}; +use poseidon2::Poseidon2; use wasm::Barretenberg; use self::wasm::{Pedersen, SchnorrSig}; @@ -97,4 +99,13 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { embedded_curve_add(*input1_x, *input1_y, *input2_x, *input2_y) } + + fn poseidon2_permutation( + &self, + inputs: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + let poseidon = Poseidon2::new(); + poseidon.permutation(inputs, len) + } } diff --git a/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs new file mode 100644 index 0000000000..e0ed5bcd05 --- /dev/null +++ b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs @@ -0,0 +1,1043 @@ +use acir::FieldElement; +use acvm_blackbox_solver::BlackBoxResolutionError; +use num_bigint::BigUint; +use num_traits::Num; + +pub(crate) struct Poseidon2 { + t: u32, + rounds_f: u32, + rounds_p: u32, + internal_matrix_diagonal: [FieldElement; 4], + round_constant: [[FieldElement; 4]; 64], +} + +impl Poseidon2 { + pub(crate) fn new() -> Self { + Poseidon2 { + t: 4, + rounds_f: 8, + rounds_p: 56, + internal_matrix_diagonal: [ + Poseidon2::field_from_hex( + "0x10dc6e9c006ea38b04b1e03b4bd9490c0d03f98929ca1d7fb56821fd19d3b6e7", + ), + Poseidon2::field_from_hex( + "0x0c28145b6a44df3e0149b3d0a30b3bb599df9756d4dd9b84a86b38cfb45a740b", + ), + Poseidon2::field_from_hex( + "0x00544b8338791518b2c7645a50392798b21f75bb60e3596170067d00141cac15", + ), + Poseidon2::field_from_hex( + "0x222c01175718386f2e2e82eb122789e352e105a3b8fa852613bc534433ee428b", + ), + ], + round_constant: [ + [ + Poseidon2::field_from_hex( + "0x19b849f69450b06848da1d39bd5e4a4302bb86744edc26238b0878e269ed23e5", + ), + Poseidon2::field_from_hex( + "0x265ddfe127dd51bd7239347b758f0a1320eb2cc7450acc1dad47f80c8dcf34d6", + ), + Poseidon2::field_from_hex( + "0x199750ec472f1809e0f66a545e1e51624108ac845015c2aa3dfc36bab497d8aa", + ), + Poseidon2::field_from_hex( + "0x157ff3fe65ac7208110f06a5f74302b14d743ea25067f0ffd032f787c7f1cdf8", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2e49c43c4569dd9c5fd35ac45fca33f10b15c590692f8beefe18f4896ac94902", + ), + Poseidon2::field_from_hex( + "0x0e35fb89981890520d4aef2b6d6506c3cb2f0b6973c24fa82731345ffa2d1f1e", + ), + Poseidon2::field_from_hex( + "0x251ad47cb15c4f1105f109ae5e944f1ba9d9e7806d667ffec6fe723002e0b996", + ), + Poseidon2::field_from_hex( + "0x13da07dc64d428369873e97160234641f8beb56fdd05e5f3563fa39d9c22df4e", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c009b84e650e6d23dc00c7dccef7483a553939689d350cd46e7b89055fd4738", + ), + Poseidon2::field_from_hex( + "0x011f16b1c63a854f01992e3956f42d8b04eb650c6d535eb0203dec74befdca06", + ), + Poseidon2::field_from_hex( + "0x0ed69e5e383a688f209d9a561daa79612f3f78d0467ad45485df07093f367549", + ), + Poseidon2::field_from_hex( + "0x04dba94a7b0ce9e221acad41472b6bbe3aec507f5eb3d33f463672264c9f789b", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0a3f2637d840f3a16eb094271c9d237b6036757d4bb50bf7ce732ff1d4fa28e8", + ), + Poseidon2::field_from_hex( + "0x259a666f129eea198f8a1c502fdb38fa39b1f075569564b6e54a485d1182323f", + ), + Poseidon2::field_from_hex( + "0x28bf7459c9b2f4c6d8e7d06a4ee3a47f7745d4271038e5157a32fdf7ede0d6a1", + ), + Poseidon2::field_from_hex( + "0x0a1ca941f057037526ea200f489be8d4c37c85bbcce6a2aeec91bd6941432447", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c6f8f958be0e93053d7fd4fc54512855535ed1539f051dcb43a26fd926361cf", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x123106a93cd17578d426e8128ac9d90aa9e8a00708e296e084dd57e69caaf811", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x26e1ba52ad9285d97dd3ab52f8e840085e8fa83ff1e8f1877b074867cd2dee75", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1cb55cad7bd133de18a64c5c47b9c97cbe4d8b7bf9e095864471537e6a4ae2c5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1dcd73e46acd8f8e0e2c7ce04bde7f6d2a53043d5060a41c7143f08e6e9055d0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x011003e32f6d9c66f5852f05474a4def0cda294a0eb4e9b9b12b9bb4512e5574", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2b1e809ac1d10ab29ad5f20d03a57dfebadfe5903f58bafed7c508dd2287ae8c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2539de1785b735999fb4dac35ee17ed0ef995d05ab2fc5faeaa69ae87bcec0a5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c246c5a2ef8ee0126497f222b3e0a0ef4e1c3d41c86d46e43982cb11d77951d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x192089c4974f68e95408148f7c0632edbb09e6a6ad1a1c2f3f0305f5d03b527b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1eae0ad8ab68b2f06a0ee36eeb0d0c058529097d91096b756d8fdc2fb5a60d85", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x179190e5d0e22179e46f8282872abc88db6e2fdc0dee99e69768bd98c5d06bfb", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x29bb9e2c9076732576e9a81c7ac4b83214528f7db00f31bf6cafe794a9b3cd1c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x225d394e42207599403efd0c2464a90d52652645882aac35b10e590e6e691e08", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x064760623c25c8cf753d238055b444532be13557451c087de09efd454b23fd59", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x10ba3a0e01df92e87f301c4b716d8a394d67f4bf42a75c10922910a78f6b5b87", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0e070bf53f8451b24f9c6e96b0c2a801cb511bc0c242eb9d361b77693f21471c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1b94cd61b051b04dd39755ff93821a73ccd6cb11d2491d8aa7f921014de252fb", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1d7cb39bafb8c744e148787a2e70230f9d4e917d5713bb050487b5aa7d74070b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2ec93189bd1ab4f69117d0fe980c80ff8785c2961829f701bb74ac1f303b17db", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2db366bfdd36d277a692bb825b86275beac404a19ae07a9082ea46bd83517926", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x062100eb485db06269655cf186a68532985275428450359adc99cec6960711b8", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0761d33c66614aaa570e7f1e8244ca1120243f92fa59e4f900c567bf41f5a59b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x20fc411a114d13992c2705aa034e3f315d78608a0f7de4ccf7a72e494855ad0d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x25b5c004a4bdfcb5add9ec4e9ab219ba102c67e8b3effb5fc3a30f317250bc5a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x23b1822d278ed632a494e58f6df6f5ed038b186d8474155ad87e7dff62b37f4b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x22734b4c5c3f9493606c4ba9012499bf0f14d13bfcfcccaa16102a29cc2f69e0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x26c0c8fe09eb30b7e27a74dc33492347e5bdff409aa3610254413d3fad795ce5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x070dd0ccb6bd7bbae88eac03fa1fbb26196be3083a809829bbd626df348ccad9", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x12b6595bdb329b6fb043ba78bb28c3bec2c0a6de46d8c5ad6067c4ebfd4250da", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x248d97d7f76283d63bec30e7a5876c11c06fca9b275c671c5e33d95bb7e8d729", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1a306d439d463b0816fc6fd64cc939318b45eb759ddde4aa106d15d9bd9baaaa", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x28a8f8372e3c38daced7c00421cb4621f4f1b54ddc27821b0d62d3d6ec7c56cf", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0094975717f9a8a8bb35152f24d43294071ce320c829f388bc852183e1e2ce7e", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x04d5ee4c3aa78f7d80fde60d716480d3593f74d4f653ae83f4103246db2e8d65", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2a6cf5e9aa03d4336349ad6fb8ed2269c7bef54b8822cc76d08495c12efde187", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2304d31eaab960ba9274da43e19ddeb7f792180808fd6e43baae48d7efcba3f3", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x03fd9ac865a4b2a6d5e7009785817249bff08a7e0726fcb4e1c11d39d199f0b0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x00b7258ded52bbda2248404d55ee5044798afc3a209193073f7954d4d63b0b64", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x159f81ada0771799ec38fca2d4bf65ebb13d3a74f3298db36272c5ca65e92d9a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1ef90e67437fbc8550237a75bc28e3bb9000130ea25f0c5471e144cf4264431f", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1e65f838515e5ff0196b49aa41a2d2568df739bc176b08ec95a79ed82932e30d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2b1b045def3a166cec6ce768d079ba74b18c844e570e1f826575c1068c94c33f", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0832e5753ceb0ff6402543b1109229c165dc2d73bef715e3f1c6e07c168bb173", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x02f614e9cedfb3dc6b762ae0a37d41bab1b841c2e8b6451bc5a8e3c390b6ad16", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0e2427d38bd46a60dd640b8e362cad967370ebb777bedff40f6a0be27e7ed705", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0493630b7c670b6deb7c84d414e7ce79049f0ec098c3c7c50768bbe29214a53a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x22ead100e8e482674decdab17066c5a26bb1515355d5461a3dc06cc85327cea9", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x25b3e56e655b42cdaae2626ed2554d48583f1ae35626d04de5084e0b6d2a6f16", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1e32752ada8836ef5837a6cde8ff13dbb599c336349e4c584b4fdc0a0cf6f9d0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2fa2a871c15a387cc50f68f6f3c3455b23c00995f05078f672a9864074d412e5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2f569b8a9a4424c9278e1db7311e889f54ccbf10661bab7fcd18e7c7a7d83505", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x044cb455110a8fdd531ade530234c518a7df93f7332ffd2144165374b246b43d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x227808de93906d5d420246157f2e42b191fe8c90adfe118178ddc723a5319025", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x02fcca2934e046bc623adead873579865d03781ae090ad4a8579d2e7a6800355", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0ef915f0ac120b876abccceb344a1d36bad3f3c5ab91a8ddcbec2e060d8befac", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1797130f4b7a3e1777eb757bc6f287f6ab0fb85f6be63b09f3b16ef2b1405d38", + ), + Poseidon2::field_from_hex( + "0x0a76225dc04170ae3306c85abab59e608c7f497c20156d4d36c668555decc6e5", + ), + Poseidon2::field_from_hex( + "0x1fffb9ec1992d66ba1e77a7b93209af6f8fa76d48acb664796174b5326a31a5c", + ), + Poseidon2::field_from_hex( + "0x25721c4fc15a3f2853b57c338fa538d85f8fbba6c6b9c6090611889b797b9c5f", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c817fd42d5f7a41215e3d07ba197216adb4c3790705da95eb63b982bfcaf75a", + ), + Poseidon2::field_from_hex( + "0x13abe3f5239915d39f7e13c2c24970b6df8cf86ce00a22002bc15866e52b5a96", + ), + Poseidon2::field_from_hex( + "0x2106feea546224ea12ef7f39987a46c85c1bc3dc29bdbd7a92cd60acb4d391ce", + ), + Poseidon2::field_from_hex( + "0x21ca859468a746b6aaa79474a37dab49f1ca5a28c748bc7157e1b3345bb0f959", + ), + ], + [ + Poseidon2::field_from_hex( + "0x05ccd6255c1e6f0c5cf1f0df934194c62911d14d0321662a8f1a48999e34185b", + ), + Poseidon2::field_from_hex( + "0x0f0e34a64b70a626e464d846674c4c8816c4fb267fe44fe6ea28678cb09490a4", + ), + Poseidon2::field_from_hex( + "0x0558531a4e25470c6157794ca36d0e9647dbfcfe350d64838f5b1a8a2de0d4bf", + ), + Poseidon2::field_from_hex( + "0x09d3dca9173ed2faceea125157683d18924cadad3f655a60b72f5864961f1455", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0328cbd54e8c0913493f866ed03d218bf23f92d68aaec48617d4c722e5bd4335", + ), + Poseidon2::field_from_hex( + "0x2bf07216e2aff0a223a487b1a7094e07e79e7bcc9798c648ee3347dd5329d34b", + ), + Poseidon2::field_from_hex( + "0x1daf345a58006b736499c583cb76c316d6f78ed6a6dffc82111e11a63fe412df", + ), + Poseidon2::field_from_hex( + "0x176563472456aaa746b694c60e1823611ef39039b2edc7ff391e6f2293d2c404", + ), + ], + ], + } + } + fn field_from_hex(hex: &str) -> FieldElement { + let bigint = BigUint::from_str_radix(hex.strip_prefix("0x").unwrap(), 16).unwrap(); + FieldElement::from_be_bytes_reduce(&bigint.to_bytes_be()) + } + + fn single_box(x: FieldElement) -> FieldElement { + let s = x * x; + s * s * x + } + + fn s_box(input: &mut [FieldElement]) { + for i in input { + *i = Self::single_box(*i); + } + } + + fn add_round_constants(&self, state: &mut [FieldElement], round: usize) { + for (state_element, constant_element) in state.iter_mut().zip(self.round_constant[round]) { + *state_element += constant_element; + } + } + + /// Algorithm is taken directly from the Poseidon2 implementation in Barretenberg crypto module. + fn matrix_multiplication_4x4(input: &mut [FieldElement]) { + assert!(input.len() == 4); + let t0 = input[0] + input[1]; // A + B + let t1 = input[2] + input[3]; // C + D + let mut t2 = input[1] + input[1]; // 2B + t2 += t1; // 2B + C + D + let mut t3 = input[3] + input[3]; // 2D + t3 += t0; // 2D + A + B + let mut t4 = t1 + t1; + t4 += t4; + t4 += t3; // A + B + 4C + 6D + let mut t5 = t0 + t0; + t5 += t5; + t5 += t2; // 4A + 6B + C + D + let t6 = t3 + t5; // 5A + 7B + C + 3D + let t7 = t2 + t4; // A + 3B + 5C + 7D + input[0] = t6; + input[1] = t5; + input[2] = t7; + input[3] = t4; + } + + fn internal_m_multiplication(&self, input: &mut [FieldElement]) { + let mut sum = FieldElement::zero(); + for i in input.iter() { + sum += *i; + } + for (index, i) in input.iter_mut().enumerate() { + *i = *i * self.internal_matrix_diagonal[index]; + *i += sum; + } + } + + pub(crate) fn permutation( + &self, + inputs: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + if len as usize != inputs.len() { + return Err(BlackBoxResolutionError::Failed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of inputs does not match specified length. {} > {}", + inputs.len(), + len + ), + )); + } + if len != self.t { + return Err(BlackBoxResolutionError::Failed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!("Expected {} values but encountered {}", self.t, len), + )); + } + // Read witness assignments + let mut state = [FieldElement::zero(); 4]; + for (index, input) in inputs.iter().enumerate() { + state[index] = *input; + } + // Apply 1st linear layer + Self::matrix_multiplication_4x4(&mut state); + + // First set of external rounds + let rf_first = self.rounds_f / 2; + for r in 0..rf_first { + self.add_round_constants(&mut state, r as usize); + Self::s_box(&mut state); + Self::matrix_multiplication_4x4(&mut state); + } + // Internal rounds + let p_end = rf_first + self.rounds_p; + for r in rf_first..p_end { + state[0] += self.round_constant[r as usize][0]; + state[0] = Self::single_box(state[0]); + self.internal_m_multiplication(&mut state); + } + + // Remaining external rounds + let num_rounds = self.rounds_f + self.rounds_p; + for i in p_end..num_rounds { + self.add_round_constants(&mut state, i as usize); + Self::s_box(&mut state); + Self::matrix_multiplication_4x4(&mut state); + } + Ok(state.into()) + } +} diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 5b2680465a..73b57b907f 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -184,7 +184,18 @@ pub(crate) fn evaluate_black_box( BlackBoxOp::BigIntDiv { .. } => todo!(), BlackBoxOp::BigIntFromLeBytes { .. } => todo!(), BlackBoxOp::BigIntToLeBytes { .. } => todo!(), - BlackBoxOp::Poseidon2Permutation { .. } => todo!(), + BlackBoxOp::Poseidon2Permutation { message, output, len } => { + let input = read_heap_vector(memory, message); + let input: Vec = input.iter().map(|x| x.to_field()).collect(); + let len = memory.read(*len).to_u128() as u32; + let result = solver.poseidon2_permutation(&input, len)?; + let mut values = Vec::new(); + for i in result { + values.push(Value::from(i)); + } + memory.write_slice(memory.read_ref(output.pointer), &values); + Ok(()) + } BlackBoxOp::Sha256Compression { input, hash_values, output } => { let mut message = [0; 16]; let inputs = read_heap_vector(memory, input); diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 13accbeacb..c7bf014f06 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -568,6 +568,13 @@ impl BlackBoxFunctionSolver for DummyBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Ok((5_u128.into(), 6_u128.into())) } + fn poseidon2_permutation( + &self, + _input: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + Ok(vec![0_u128.into(); len as usize]) + } } #[cfg(test)] diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 073b0e6f59..90608974f9 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -1151,6 +1151,14 @@ pub(crate) mod tests { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { panic!("Path not trodden by this test") } + + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError> { + Ok(vec![0_u128.into(), 1_u128.into(), 2_u128.into(), 3_u128.into()]) + } } pub(crate) fn create_context() -> BrilligContext { diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx index 85706384ee..b9239f822e 100644 --- a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx +++ b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -114,6 +114,19 @@ example: #include_code poseidon test_programs/execution_success/poseidon_bn254_hash/src/main.nr rust +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +The above example for Poseidon also includes Poseidon2. + ## mimc_bn254 and mimc `mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 7a931f7c04..fcf2143619 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -1,5 +1,6 @@ mod poseidon; mod mimc; +mod poseidon2; mod pedersen; use crate::default::Default; @@ -73,7 +74,7 @@ pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] {} #[foreign(poseidon2_permutation)] -pub fn poseidon2_permutation(_input: [u8; N], _state_length: u32) -> [u8; N] {} +pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr new file mode 100644 index 0000000000..8e0fcc6858 --- /dev/null +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -0,0 +1,119 @@ +global rate = 3; + +struct Poseidon2 { + cache: [Field;3], + state: [Field;4], + cache_size: u32, + squeeze_mode: bool, // 0 => absorb, 1 => squeeze +} + +impl Poseidon2 { + + pub fn hash(input: [Field; N], message_size: u32) -> Field { + if message_size == N { + Poseidon2::hash_internal(input, N, false) + } else { + Poseidon2::hash_internal(input, message_size, true) + } + } + + fn new(iv: Field) -> Poseidon2 { + let mut result = Poseidon2 { + cache: [0;3], + state: [0;4], + cache_size: 0, + squeeze_mode: false, + }; + result.state[rate] = iv; + result + } + + fn perform_duplex(&mut self) -> [Field; rate] { + // zero-pad the cache + for i in 0..rate { + if i >= self.cache_size { + self.cache[i] = 0; + } + } + // add the cache into sponge state + for i in 0..rate { + self.state[i] += self.cache[i]; + } + self.state = crate::hash::poseidon2_permutation(self.state, 4); + // return `rate` number of field elements from the sponge state. + let mut result = [0; rate]; + for i in 0..rate { + result[i] = self.state[i]; + } + result + } + + fn absorb(&mut self, input: Field) { + if (!self.squeeze_mode) & (self.cache_size == rate) { + // If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache + let _ = self.perform_duplex(); + self.cache[0] = input; + self.cache_size = 1; + } else if (!self.squeeze_mode) & (self.cache_size != rate) { + // If we're absorbing, and the cache is not full, add the input into the cache + self.cache[self.cache_size] = input; + self.cache_size += 1; + } else if self.squeeze_mode { + // If we're in squeeze mode, switch to absorb mode and add the input into the cache. + // N.B. I don't think this code path can be reached?! + self.cache[0] = input; + self.cache_size = 1; + self.squeeze_mode = false; + } + } + + fn squeeze(&mut self) -> Field + { + if self.squeeze_mode & (self.cache_size == 0) { + // If we're in squeze mode and the cache is empty, there is nothing left to squeeze out of the sponge! + // Switch to absorb mode. + self.squeeze_mode = false; + self.cache_size = 0; + } + if !self.squeeze_mode { + // If we're in absorb mode, apply sponge permutation to compress the cache, populate cache with compressed + // state and switch to squeeze mode. Note: this code block will execute if the previous `if` condition was + // matched + let new_output_elements = self.perform_duplex(); + self.squeeze_mode = true; + for i in 0..rate { + self.cache[i] = new_output_elements[i]; + } + self.cache_size = rate; + } + // By this point, we should have a non-empty cache. Pop one item off the top of the cache and return it. + let result = self.cache[0]; + for i in 1..rate { + if i < self.cache_size { + self.cache[i - 1] = self.cache[i]; + } + } + self.cache_size -= 1; + self.cache[self.cache_size] = 0; + result + } + + fn hash_internal(input:[Field;N], in_len:u32, is_variable_length: bool) -> Field + { + let iv : Field = (in_len as Field)*18446744073709551616; + let mut sponge = Poseidon2::new(iv); + for i in 0..input.len() { + if i as u32 < in_len { + sponge.absorb(input[i]); + } + } + + // In the case where the hash preimage is variable-length, we append `1` to the end of the input, to distinguish + // from fixed-length hashes. (the combination of this additional field element + the hash IV ensures + // fixed-length and variable-length hashes do not collide) + if is_variable_length { + sponge.absorb(1); + } + sponge.squeeze() + } +} diff --git a/test_programs/execution_success/poseidon_bn254_hash/Prover.toml b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml index 8eecf9a3db..fa6fd05b0a 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/Prover.toml +++ b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml @@ -2,3 +2,8 @@ x1 = [1,2] y1 = "0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a" x2 = [1,2,3,4] y2 = "0x299c867db6c1fdd79dcefa40e4510b9837e60ebb1ce0663dbaa525df65250465" +x3 = ["4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094"] + y3 = "0x2f43a0f83b51a6f5fc839dea0ecec74947637802a579fa9841930a25a0bcec11" diff --git a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr index e742a440d1..939b99595c 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr +++ b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr @@ -1,11 +1,15 @@ // docs:start:poseidon use dep::std::hash::poseidon; +use dep::std::hash::poseidon2; -fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { let hash1 = poseidon::bn254::hash_2(x1); assert(hash1 == y1); let hash2 = poseidon::bn254::hash_4(x2); assert(hash2 == y2); + + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len() as u32); + assert(hash3 == y3); } // docs:end:poseidon diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index f001cebaa4..d0acbf1aec 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -49,4 +49,12 @@ impl BlackBoxFunctionSolver for WrapperSolver { ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { self.0.ec_add(input1_x, input1_y, input2_x, input2_y) } + + fn poseidon2_permutation( + &self, + inputs: &[acvm::FieldElement], + len: u32, + ) -> Result, acvm::BlackBoxResolutionError> { + self.0.poseidon2_permutation(inputs, len) + } } From 568a7812b6a11cd427ffca38103179c7ec0830db Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 27 Feb 2024 10:19:15 +0000 Subject: [PATCH 09/17] chore: nargo fmt (#4434) # Description ## Problem\* Resolves ## Summary\* This PR fixes the formatting CI ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- noir_stdlib/src/collections/bounded_vec.nr | 2 +- noir_stdlib/src/hash/poseidon2.nr | 41 +++++++++------------- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index f78d86de77..4a14bd1637 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -77,7 +77,7 @@ impl BoundedVec { let mut exceeded_len = false; for i in 0..MaxLen { exceeded_len |= i == self.len; - if (!exceeded_len) { + if !exceeded_len { ret |= predicate(self.storage[i]); } } diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr index 8e0fcc6858..64c1876b4e 100644 --- a/noir_stdlib/src/hash/poseidon2.nr +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -1,4 +1,4 @@ -global rate = 3; +global RATE = 3; struct Poseidon2 { cache: [Field;3], @@ -18,43 +18,38 @@ impl Poseidon2 { } fn new(iv: Field) -> Poseidon2 { - let mut result = Poseidon2 { - cache: [0;3], - state: [0;4], - cache_size: 0, - squeeze_mode: false, - }; - result.state[rate] = iv; + let mut result = Poseidon2 { cache: [0; 3], state: [0; 4], cache_size: 0, squeeze_mode: false }; + result.state[RATE] = iv; result } - fn perform_duplex(&mut self) -> [Field; rate] { + fn perform_duplex(&mut self) -> [Field; RATE] { // zero-pad the cache - for i in 0..rate { + for i in 0..RATE { if i >= self.cache_size { self.cache[i] = 0; } } // add the cache into sponge state - for i in 0..rate { + for i in 0..RATE { self.state[i] += self.cache[i]; } self.state = crate::hash::poseidon2_permutation(self.state, 4); - // return `rate` number of field elements from the sponge state. - let mut result = [0; rate]; - for i in 0..rate { + // return `RATE` number of field elements from the sponge state. + let mut result = [0; RATE]; + for i in 0..RATE { result[i] = self.state[i]; } result } fn absorb(&mut self, input: Field) { - if (!self.squeeze_mode) & (self.cache_size == rate) { + if (!self.squeeze_mode) & (self.cache_size == RATE) { // If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache let _ = self.perform_duplex(); self.cache[0] = input; self.cache_size = 1; - } else if (!self.squeeze_mode) & (self.cache_size != rate) { + } else if (!self.squeeze_mode) & (self.cache_size != RATE) { // If we're absorbing, and the cache is not full, add the input into the cache self.cache[self.cache_size] = input; self.cache_size += 1; @@ -67,8 +62,7 @@ impl Poseidon2 { } } - fn squeeze(&mut self) -> Field - { + fn squeeze(&mut self) -> Field { if self.squeeze_mode & (self.cache_size == 0) { // If we're in squeze mode and the cache is empty, there is nothing left to squeeze out of the sponge! // Switch to absorb mode. @@ -81,14 +75,14 @@ impl Poseidon2 { // matched let new_output_elements = self.perform_duplex(); self.squeeze_mode = true; - for i in 0..rate { + for i in 0..RATE { self.cache[i] = new_output_elements[i]; } - self.cache_size = rate; + self.cache_size = RATE; } // By this point, we should have a non-empty cache. Pop one item off the top of the cache and return it. let result = self.cache[0]; - for i in 1..rate { + for i in 1..RATE { if i < self.cache_size { self.cache[i - 1] = self.cache[i]; } @@ -98,9 +92,8 @@ impl Poseidon2 { result } - fn hash_internal(input:[Field;N], in_len:u32, is_variable_length: bool) -> Field - { - let iv : Field = (in_len as Field)*18446744073709551616; + fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { + let iv : Field = (in_len as Field) * 18446744073709551616; let mut sponge = Poseidon2::new(iv); for i in 0..input.len() { if i as u32 < in_len { From b9384fb23abf4ab15e880fb7e03c21509a9fa8a6 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 27 Feb 2024 10:44:23 +0000 Subject: [PATCH 10/17] chore!: Remove empty value from bounded vec (#4431) # Description ## Problem\* ## Summary\* Removes the `empty_value` field from the bounded vec. This muddies the API and shouldn't be needed since we have `crate::unsafe::zeroed()` instead. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [x] **[Exceptional Case]** Documentation to be submitted in a separate PR. - Included in https://github.com/noir-lang/noir/pull/4430 # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: Tom French --- noir_stdlib/src/collections/bounded_vec.nr | 8 ++--- .../noir_test_success/bounded_vec/src/main.nr | 32 +++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index 4a14bd1637..752b96d659 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -1,12 +1,12 @@ struct BoundedVec { storage: [T; MaxLen], len: u64, - empty_value: T, } impl BoundedVec { - pub fn new(initial_value: T) -> Self { - BoundedVec { storage: [initial_value; MaxLen], len: 0, empty_value: initial_value } + pub fn new() -> Self { + let zeroed = crate::unsafe::zeroed(); + BoundedVec { storage: [zeroed; MaxLen], len: 0 } } pub fn get(mut self: Self, index: u64) -> T { @@ -68,7 +68,7 @@ impl BoundedVec { self.len -= 1; let elem = self.storage[self.len]; - self.storage[self.len] = self.empty_value; + self.storage[self.len] = crate::unsafe::zeroed(); elem } diff --git a/test_programs/noir_test_success/bounded_vec/src/main.nr b/test_programs/noir_test_success/bounded_vec/src/main.nr index d51d2cc368..0e2c89c906 100644 --- a/test_programs/noir_test_success/bounded_vec/src/main.nr +++ b/test_programs/noir_test_success/bounded_vec/src/main.nr @@ -1,6 +1,6 @@ #[test] fn test_vec_push_pop() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); assert(vec.len == 0); vec.push(2); assert(vec.len == 1); @@ -17,7 +17,7 @@ fn test_vec_push_pop() { #[test] fn test_vec_extend_from_array() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); assert(vec.len == 2); assert(vec.get(0) == 2); @@ -26,13 +26,13 @@ fn test_vec_extend_from_array() { #[test(should_fail_with="extend_from_array out of bounds")] fn test_vec_extend_from_array_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4, 6]); } #[test(should_fail_with="extend_from_array out of bounds")] fn test_vec_extend_from_array_twice_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2]); assert(vec.len == 1); vec.extend_from_array([4, 6]); @@ -40,36 +40,36 @@ fn test_vec_extend_from_array_twice_out_of_bound() { #[test(should_fail)] fn test_vec_get_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); let _x = vec.get(2); } #[test(should_fail)] fn test_vec_get_not_declared() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2]); let _x = vec.get(1); } #[test(should_fail)] fn test_vec_get_uninitialized() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); let _x = vec.get(0); } #[test(should_fail_with="push out of bounds")] fn test_vec_push_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.push(1); vec.push(2); } #[test(should_fail_with="extend_from_bounded_vec out of bounds")] fn test_vec_extend_from_bounded_vec_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); - let mut another_vec: BoundedVec = BoundedVec::new(0); + let mut another_vec: BoundedVec = BoundedVec::new(); another_vec.extend_from_array([1, 2, 3]); vec.extend_from_bounded_vec(another_vec); @@ -77,10 +77,10 @@ fn test_vec_extend_from_bounded_vec_out_of_bound() { #[test(should_fail_with="extend_from_bounded_vec out of bounds")] fn test_vec_extend_from_bounded_vec_twice_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([1, 2]); - let mut another_vec: BoundedVec = BoundedVec::new(0); + let mut another_vec: BoundedVec = BoundedVec::new(); another_vec.push(3); vec.extend_from_bounded_vec(another_vec); @@ -88,7 +88,7 @@ fn test_vec_extend_from_bounded_vec_twice_out_of_bound() { #[test] fn test_vec_any() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4, 6]); assert(vec.any(|v| v == 2) == true); assert(vec.any(|v| v == 4) == true); @@ -98,8 +98,8 @@ fn test_vec_any() { #[test] fn test_vec_any_not_default() { - let default_value = 1; - let mut vec: BoundedVec = BoundedVec::new(default_value); + let default_value = 0; + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); assert(vec.any(|v| v == default_value) == false); -} \ No newline at end of file +} From 8f935af0813c4f012005c8b3eb70441b44db5714 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 27 Feb 2024 11:43:55 +0000 Subject: [PATCH 11/17] chore(docs): fix external contributor force push workflow (#4437) # Description ## Problem\* Resolves # ## Summary\* This workflow was failing due to the fact that we're storing the message in a file but not checking out the repository. While checking out would be safe in this case, I've just moved the comment into the workflow for paranoia reasons. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md | 5 ----- .github/workflows/pull-request-title.yml | 6 +++++- 2 files changed, 5 insertions(+), 6 deletions(-) delete mode 100644 .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md diff --git a/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md b/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md deleted file mode 100644 index 4031bcdb61..0000000000 --- a/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md +++ /dev/null @@ -1,5 +0,0 @@ -Thank you for your contribution to the Noir language. - -Please **do not force push to this branch** after the Noir team have reviewed this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. - -Thanks for your understanding. \ No newline at end of file diff --git a/.github/workflows/pull-request-title.yml b/.github/workflows/pull-request-title.yml index 8f863160cf..7e9b729da2 100644 --- a/.github/workflows/pull-request-title.yml +++ b/.github/workflows/pull-request-title.yml @@ -39,6 +39,10 @@ jobs: - name: Post comment on force pushes uses: marocchino/sticky-pull-request-comment@v2 with: - path: ./.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md + message: | + Thank you for your contribution to the Noir language. + Please **do not force push to this branch** after the Noir team have started review of this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. + + Thanks for your understanding. \ No newline at end of file From 2498115cf197450f33af0b9c158fa2ee4ce3e222 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 27 Feb 2024 06:35:28 -0500 Subject: [PATCH 12/17] feat: Sync from aztec-packages (#4390) BEGIN_COMMIT_OVERRIDE chore: bootstrap improvements. (https://github.com/AztecProtocol/aztec-packages/pull/4711) chore: Add full recursive verification test (https://github.com/AztecProtocol/aztec-packages/pull/4658) chore: add struct for each bigint modulus (https://github.com/AztecProtocol/aztec-packages/pull/4422) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: sirasistant --- Cargo.lock | 1 + Dockerfile | 6 +- aztec_macros/Cargo.toml | 1 + aztec_macros/src/lib.rs | 269 ++++++++++++-- bootstrap.sh | 3 + bootstrap_cache.sh | 4 + .../src/hir/def_collector/dc_crate.rs | 14 + .../noirc_frontend/src/hir/def_map/mod.rs | 5 + compiler/noirc_frontend/src/hir/mod.rs | 4 + compiler/noirc_frontend/src/lib.rs | 11 + compiler/noirc_frontend/src/node_interner.rs | 2 +- noir_stdlib/src/bigint.nr | 327 ++++++++++++++++-- noir_stdlib/src/uint128.nr | 2 +- noirc_macros/src/lib.rs | 12 + scripts/test_native.sh | 4 +- .../1327_concrete_in_generic/src/main.nr | 4 +- .../execution_success/bigint/src/main.nr | 13 +- .../brillig_cow_regression/Prover.toml | 2 +- .../brillig_cow_regression/src/main.nr | 24 +- .../double_verify_nested_proof/Nargo.toml | 7 + .../double_verify_nested_proof/Prover.toml | 5 + .../double_verify_nested_proof/src/main.nr | 28 ++ .../double_verify_proof/src/main.nr | 3 +- .../regression_4124/src/main.nr | 8 +- test_programs/gates_report.sh | 2 +- tooling/debugger/ignored-tests.txt | 1 + tooling/nargo_fmt/tests/expected/contract.nr | 21 +- tooling/nargo_fmt/tests/input/contract.nr | 37 +- 28 files changed, 688 insertions(+), 132 deletions(-) create mode 100644 test_programs/execution_success/double_verify_nested_proof/Nargo.toml create mode 100644 test_programs/execution_success/double_verify_nested_proof/Prover.toml create mode 100644 test_programs/execution_success/double_verify_nested_proof/src/main.nr diff --git a/Cargo.lock b/Cargo.lock index 714b700119..c0438eaf81 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -417,6 +417,7 @@ version = "0.24.0" dependencies = [ "convert_case 0.6.0", "iter-extended", + "noirc_errors", "noirc_frontend", ] diff --git a/Dockerfile b/Dockerfile index 000292e0a4..3a478c3f95 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,12 @@ -FROM rust:bookworm +FROM rust:bullseye WORKDIR /usr/src/noir COPY . . RUN ./scripts/bootstrap_native.sh # When running the container, mount the users home directory to same location. -FROM ubuntu:lunar +FROM ubuntu:focal # Install Tini as nargo doesn't handle signals properly. # Install git as nargo needs it to clone. RUN apt-get update && apt-get install -y git tini && rm -rf /var/lib/apt/lists/* && apt-get clean COPY --from=0 /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo -ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/target/release/nargo"] \ No newline at end of file +ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/target/release/nargo"] diff --git a/aztec_macros/Cargo.toml b/aztec_macros/Cargo.toml index 5e908b2e67..ed9821fabc 100644 --- a/aztec_macros/Cargo.toml +++ b/aztec_macros/Cargo.toml @@ -11,5 +11,6 @@ repository.workspace = true [dependencies] noirc_frontend.workspace = true +noirc_errors.workspace = true iter-extended.workspace = true convert_case = "0.6.0" diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 0b93dbaa63..0ccc421d3b 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -3,6 +3,10 @@ use std::vec; use convert_case::{Case, Casing}; use iter_extended::vecmap; +use noirc_errors::Location; +use noirc_frontend::hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}; +use noirc_frontend::hir::def_map::{LocalModuleId, ModuleId}; +use noirc_frontend::macros_api::parse_program; use noirc_frontend::macros_api::FieldElement; use noirc_frontend::macros_api::{ BlockExpression, CallExpression, CastExpression, Distinctness, Expression, ExpressionKind, @@ -16,9 +20,8 @@ use noirc_frontend::macros_api::{ use noirc_frontend::macros_api::{CrateId, FileId}; use noirc_frontend::macros_api::{MacroError, MacroProcessor}; use noirc_frontend::macros_api::{ModuleDefId, NodeInterner, SortedModule, StructId}; -use noirc_frontend::node_interner::{TraitId, TraitImplKind}; +use noirc_frontend::node_interner::{FuncId, TraitId, TraitImplId, TraitImplKind}; use noirc_frontend::Lambda; - pub struct AztecMacro; impl MacroProcessor for AztecMacro { @@ -31,6 +34,25 @@ impl MacroProcessor for AztecMacro { transform(ast, crate_id, context) } + fn process_unresolved_traits_impls( + &self, + crate_id: &CrateId, + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + collected_functions: &mut Vec, + ) -> Result<(), (MacroError, FileId)> { + if has_aztec_dependency(crate_id, context) { + inject_compute_note_hash_and_nullifier( + crate_id, + context, + unresolved_traits_impls, + collected_functions, + ) + } else { + Ok(()) + } + } + fn process_typed_ast( &self, crate_id: &CrateId, @@ -46,7 +68,6 @@ const MAX_CONTRACT_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); #[derive(Debug, Clone)] pub enum AztecMacroError { AztecDepNotFound, - ComputeNoteHashAndNullifierNotFound { span: Span }, ContractHasTooManyFunctions { span: Span }, ContractConstructorMissing { span: Span }, UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, @@ -63,11 +84,6 @@ impl From for MacroError { secondary_message: None, span: None, }, - AztecMacroError::ComputeNoteHashAndNullifierNotFound { span } => MacroError { - primary_message: "compute_note_hash_and_nullifier function not found. Define it in your contract. For more information go to https://docs.aztec.network/developers/debugging/aztecnr-errors#compute_note_hash_and_nullifier-function-not-found-define-it-in-your-contract".to_owned(), - secondary_message: None, - span: Some(span), - }, AztecMacroError::ContractHasTooManyFunctions { span } => MacroError { primary_message: format!("Contract can only have a maximum of {} functions", MAX_CONTRACT_FUNCTIONS), secondary_message: None, @@ -313,15 +329,17 @@ fn check_for_aztec_dependency( crate_id: &CrateId, context: &HirContext, ) -> Result<(), (MacroError, FileId)> { - let crate_graph = &context.crate_graph[crate_id]; - let has_aztec_dependency = crate_graph.dependencies.iter().any(|dep| dep.as_name() == "aztec"); - if has_aztec_dependency { + if has_aztec_dependency(crate_id, context) { Ok(()) } else { - Err((AztecMacroError::AztecDepNotFound.into(), crate_graph.root_file_id)) + Err((AztecMacroError::AztecDepNotFound.into(), context.crate_graph[crate_id].root_file_id)) } } +fn has_aztec_dependency(crate_id: &CrateId, context: &HirContext) -> bool { + context.crate_graph[crate_id].dependencies.iter().any(|dep| dep.as_name() == "aztec") +} + // Check to see if the user has defined a storage struct fn check_for_storage_definition(module: &SortedModule) -> bool { module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") @@ -338,27 +356,30 @@ fn check_for_storage_implementation(module: &SortedModule) -> bool { } // Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,Field,[Field; N]) -> [Field; 4]" is defined -fn check_for_compute_note_hash_and_nullifier_definition(module: &SortedModule) -> bool { - module.functions.iter().any(|func| { - func.def.name.0.contents == "compute_note_hash_and_nullifier" - && func.def.parameters.len() == 5 - && match &func.def.parameters[0].typ.typ { +fn check_for_compute_note_hash_and_nullifier_definition( + functions_data: &[(LocalModuleId, FuncId, NoirFunction)], + module_id: LocalModuleId, +) -> bool { + functions_data.iter().filter(|func_data| func_data.0 == module_id).any(|func_data| { + func_data.2.def.name.0.contents == "compute_note_hash_and_nullifier" + && func_data.2.def.parameters.len() == 5 + && match &func_data.2.def.parameters[0].typ.typ { UnresolvedTypeData::Named(path, _, _) => path.segments.last().unwrap().0.contents == "AztecAddress", _ => false, } - && func.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement - && func.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement - && func.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement // checks if the 5th parameter is an array and the Box in // Array(Option, Box) contains only fields - && match &func.def.parameters[4].typ.typ { + && match &func_data.2.def.parameters[4].typ.typ { UnresolvedTypeData::Array(_, inner_type) => { matches!(inner_type.typ, UnresolvedTypeData::FieldElement) }, _ => false, } // We check the return type the same way as we did the 5th parameter - && match &func.def.return_type { + && match &func_data.2.def.return_type { FunctionReturnType::Default(_) => false, FunctionReturnType::Ty(unresolved_type) => { match &unresolved_type.typ { @@ -401,13 +422,6 @@ fn transform_module( generate_storage_implementation(module).map_err(|err| (err, crate_graph.root_file_id))?; } - if storage_defined && !check_for_compute_note_hash_and_nullifier_definition(module) { - return Err(( - AztecMacroError::ComputeNoteHashAndNullifierNotFound { span: Span::default() }, - crate_graph.root_file_id, - )); - } - for structure in module.types.iter() { if structure.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { module.impls.push(generate_selector_impl(structure)); @@ -596,7 +610,7 @@ fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), Azte /// If it does, it will insert the following things: /// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs /// - Hashes all of the function input variables -/// - This instantiates a helper function +/// - This instantiates a helper function fn transform_function( ty: &str, func: &mut NoirFunction, @@ -826,8 +840,8 @@ fn get_serialized_length( && !interner.lookup_all_trait_implementations(stored_in_state, trait_id).is_empty() }); - // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicState for whatever reason though. - if struct_name == "Map" || (is_note && struct_name != "PublicState") { + // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicMutable for whatever reason though. + if struct_name == "Map" || (is_note && struct_name != "PublicMutable") { return Ok(1); } @@ -1601,3 +1615,194 @@ fn event_signature(event: &StructType) -> String { let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); format!("{}({})", event.name.0.contents, fields.join(",")) } + +fn inject_compute_note_hash_and_nullifier( + crate_id: &CrateId, + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + collected_functions: &mut [UnresolvedFunctions], +) -> Result<(), (MacroError, FileId)> { + // We first fetch modules in this crate which correspond to contracts, along with their file id. + let contract_module_file_ids: Vec<(LocalModuleId, FileId)> = context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .filter(|(_, module)| module.is_contract) + .map(|(idx, module)| (LocalModuleId(idx), module.location.file)) + .collect(); + + // If the current crate does not contain a contract module we simply skip it. + if contract_module_file_ids.is_empty() { + return Ok(()); + } else if contract_module_file_ids.len() != 1 { + panic!("Found multiple contracts in the same crate"); + } + + let (module_id, file_id) = contract_module_file_ids[0]; + + // If compute_note_hash_and_nullifier is already defined by the user, we skip auto-generation in order to provide an + // escape hatch for this mechanism. + // TODO(#4647): improve this diagnosis and error messaging. + if collected_functions.iter().any(|coll_funcs_data| { + check_for_compute_note_hash_and_nullifier_definition(&coll_funcs_data.functions, module_id) + }) { + return Ok(()); + } + + // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the + // contract might use. These are the types that implement the NoteInterface trait, which provides the + // get_note_type_id function. + let note_types = fetch_struct_trait_impls(context, unresolved_traits_impls, "NoteInterface"); + + // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. + let func = generate_compute_note_hash_and_nullifier(¬e_types); + + // And inject the newly created function into the contract. + + // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply + // pass an empty span. This function should not produce errors anyway so this should not matter. + let location = Location::new(Span::empty(0), file_id); + + // These are the same things the ModCollector does when collecting functions: we push the function to the + // NodeInterner, declare it in the module (which checks for duplicate definitions), and finally add it to the list + // on collected but unresolved functions. + + let func_id = context.def_interner.push_empty_fn(); + context.def_interner.push_function( + func_id, + &func.def, + ModuleId { krate: *crate_id, local_id: module_id }, + location, + ); + + context.def_map_mut(crate_id).unwrap() + .modules_mut()[module_id.0] + .declare_function( + func.name_ident().clone(), func_id + ).expect( + "Failed to declare the autogenerated compute_note_hash_and_nullifier function, likely due to a duplicate definition. See https://github.com/AztecProtocol/aztec-packages/issues/4647." + ); + + collected_functions + .iter_mut() + .find(|fns| fns.file_id == file_id) + .expect("ICE: no functions found in contract file") + .push_fn(module_id, func_id, func.clone()); + + Ok(()) +} + +// Fetches the name of all structs that implement trait_name, both in the current crate and all of its dependencies. +fn fetch_struct_trait_impls( + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + trait_name: &str, +) -> Vec { + let mut struct_typenames: Vec = Vec::new(); + + // These structs can be declared in either external crates or the current one. External crates that contain + // dependencies have already been processed and resolved, but are available here via the NodeInterner. Note that + // crates on which the current crate does not depend on may not have been processed, and will be ignored. + for trait_impl_id in 0..context.def_interner.next_trait_impl_id().0 { + let trait_impl = &context.def_interner.get_trait_implementation(TraitImplId(trait_impl_id)); + + if trait_impl.borrow().ident.0.contents == *trait_name { + if let Type::Struct(s, _) = &trait_impl.borrow().typ { + struct_typenames.push(s.borrow().name.0.contents.clone()); + } else { + panic!("Found impl for {} on non-Struct", trait_name); + } + } + } + + // This crate's traits and impls have not yet been resolved, so we look for impls in unresolved_trait_impls. + struct_typenames.extend( + unresolved_traits_impls + .iter() + .filter(|trait_impl| { + trait_impl + .trait_path + .segments + .last() + .expect("ICE: empty trait_impl path") + .0 + .contents + == *trait_name + }) + .filter_map(|trait_impl| match &trait_impl.object_type.typ { + UnresolvedTypeData::Named(path, _, _) => { + Some(path.segments.last().unwrap().0.contents.clone()) + } + _ => None, + }), + ); + + struct_typenames +} + +fn generate_compute_note_hash_and_nullifier(note_types: &Vec) -> NoirFunction { + let function_source = generate_compute_note_hash_and_nullifier_source(note_types); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors.clone()); + } + assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); + + let mut function_ast = function_ast.into_sorted(); + function_ast.functions.remove(0) +} + +fn generate_compute_note_hash_and_nullifier_source(note_types: &Vec) -> String { + // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. + // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. + + if note_types.is_empty() { + // TODO(#4520): Even if the contract does not include any notes, other parts of the stack expect for this + // function to exist, so we include a dummy version. We likely should error out here instead. + " + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + note_type_id: Field, + serialized_note: [Field; 20] + ) -> pub [Field; 4] { + [0, 0, 0, 0] + }" + .to_string() + } else { + // For contracts that include notes we do a simple if-else chain comparing note_type_id with the different + // get_note_type_id of each of the note types. + + let if_statements: Vec = note_types.iter().map(|note_type| format!( + "if (note_type_id == {0}::get_note_type_id()) {{ + note_utils::compute_note_hash_and_nullifier({0}::deserialize_content, note_header, serialized_note) + }}" + , note_type)).collect(); + + // TODO(#4520): error out on the else instead of returning a zero array + let full_if_statement = if_statements.join(" else ") + + " + else { + [0, 0, 0, 0] + }"; + + format!( + " + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + note_type_id: Field, + serialized_note: [Field; 20] + ) -> pub [Field; 4] {{ + let note_header = NoteHeader::new(contract_address, nonce, storage_slot); + + {} + }}", + full_if_statement + ) + } +} diff --git a/bootstrap.sh b/bootstrap.sh index 5ebe7ade09..1f9506904a 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -15,5 +15,8 @@ if [ -n "$CMD" ]; then fi fi +# Attempt to just pull artefacts from CI and exit on success. +./bootstrap_cache.sh && exit + ./scripts/bootstrap_native.sh ./scripts/bootstrap_packages.sh \ No newline at end of file diff --git a/bootstrap_cache.sh b/bootstrap_cache.sh index 672702416b..d06aa49366 100755 --- a/bootstrap_cache.sh +++ b/bootstrap_cache.sh @@ -1,6 +1,8 @@ #!/usr/bin/env bash set -eu +[ -z "${NO_CACHE:-}" ] && type docker &> /dev/null && [ -f ~/.aws/credentials ] || exit 1 + cd "$(dirname "$0")" source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null @@ -9,3 +11,5 @@ extract_repo noir-packages /usr/src/noir/packages ./ echo -e "\033[1mRetrieving nargo from remote cache...\033[0m" extract_repo noir /usr/src/noir/target/release ./target/ +remove_old_images noir-packages +remove_old_images noir diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 0d1dd1b433..7f36af5b30 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -256,6 +256,20 @@ impl DefCollector { // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); + // TODO(#4653): generalize this function + for macro_processor in ¯o_processors { + macro_processor + .process_unresolved_traits_impls( + &crate_id, + context, + &def_collector.collected_traits_impls, + &mut def_collector.collected_functions, + ) + .unwrap_or_else(|(macro_err, file_id)| { + errors.push((macro_err.into(), file_id)); + }); + } + inject_prelude(crate_id, context, crate_root, &mut def_collector.collected_imports); for submodule in submodules { inject_prelude( diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 8e0dacc294..8721bdb6c3 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -135,6 +135,11 @@ impl CrateDefMap { pub fn modules(&self) -> &Arena { &self.modules } + + pub fn modules_mut(&mut self) -> &mut Arena { + &mut self.modules + } + pub fn krate(&self) -> CrateId { self.krate } diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 4d3800f1a5..00bcb0cdeb 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -91,6 +91,10 @@ impl Context<'_, '_> { self.def_maps.get(crate_id) } + pub fn def_map_mut(&mut self, crate_id: &CrateId) -> Option<&mut CrateDefMap> { + self.def_maps.get_mut(crate_id) + } + /// Return the CrateId for each crate that has been compiled /// successfully pub fn crates(&self) -> impl Iterator + '_ { diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index eb00a61adf..be007929fc 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -45,6 +45,7 @@ pub mod macros_api { pub use noirc_errors::Span; pub use crate::graph::CrateId; + use crate::hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}; pub use crate::hir::def_collector::errors::MacroError; pub use crate::hir_def::expr::{HirExpression, HirLiteral}; pub use crate::hir_def::stmt::HirStatement; @@ -74,6 +75,16 @@ pub mod macros_api { crate_id: &CrateId, context: &HirContext, ) -> Result; + + // TODO(#4653): generalize this function + fn process_unresolved_traits_impls( + &self, + _crate_id: &CrateId, + _context: &mut HirContext, + _unresolved_traits_impls: &[UnresolvedTraitImpl], + _collected_functions: &mut Vec, + ) -> Result<(), (MacroError, FileId)>; + /// Function to manipulate the AST after type checking has been completed. /// The AST after type checking has been done is called the HIR. fn process_typed_ast( diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 7d533947f6..5de43e5925 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -367,7 +367,7 @@ impl TraitId { } #[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] -pub struct TraitImplId(usize); +pub struct TraitImplId(pub usize); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct TraitMethodId { diff --git a/noir_stdlib/src/bigint.nr b/noir_stdlib/src/bigint.nr index 66e81f0581..98237a5477 100644 --- a/noir_stdlib/src/bigint.nr +++ b/noir_stdlib/src/bigint.nr @@ -1,4 +1,5 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem}; +use crate::ops::{Add, Sub, Mul, Div}; +use crate::cmp::Eq; global bn254_fq = [0x47, 0xFD, 0x7C, 0xD8, 0x16, 0x8C, 0x20, 0x3C, 0x8d, 0xca, 0x71, 0x68, 0x91, 0x6a, 0x81, 0x97, 0x5d, 0x58, 0x81, 0x81, 0xb6, 0x45, 0x50, 0xb8, 0x29, 0xa0, 0x31, 0xe1, 0x72, 0x4e, 0x64, 0x30]; @@ -30,46 +31,320 @@ impl BigInt { #[builtin(bigint_from_le_bytes)] fn from_le_bytes(bytes: [u8], modulus: [u8]) -> BigInt {} #[builtin(bigint_to_le_bytes)] - pub fn to_le_bytes(self) -> [u8] {} + fn to_le_bytes(self) -> [u8] {} - pub fn bn254_fr_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, bn254_fr) + fn check_32_bytes(self: Self, other: BigInt) -> bool { + let bytes = self.to_le_bytes(); + let o_bytes = other.to_le_bytes(); + let mut result = true; + for i in 0..32 { + result = result & (bytes[i] == o_bytes[i]); + } + result } - pub fn bn254_fq_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, bn254_fq) +} + +trait BigField { + fn from_le_bytes(bytes: [u8]) -> Self; + fn to_le_bytes(self) -> [u8]; +} + +struct Secpk1Fq { + inner: BigInt, +} + +impl BigField for Secpk1Fq { + fn from_le_bytes(bytes: [u8]) -> Secpk1Fq { + Secpk1Fq { + inner: BigInt::from_le_bytes(bytes, secpk1_fq) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() + } +} + +impl Add for Secpk1Fq { + fn add(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpk1Fq { + fn sub(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpk1Fq { + fn mul(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpk1Fq { + fn div(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpk1Fq { + fn eq(self: Self, other: Secpk1Fq) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Secpk1Fr { + inner: BigInt, +} + +impl BigField for Secpk1Fr { + fn from_le_bytes(bytes: [u8]) -> Secpk1Fr { + Secpk1Fr { + inner: BigInt::from_le_bytes(bytes, secpk1_fr) + } } - pub fn secpk1_fq_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, secpk1_fq) + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() } - pub fn secpk1_fr_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, secpk1_fr) +} + +impl Add for Secpk1Fr { + fn add(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpk1Fr { + fn sub(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpk1Fr { + fn mul(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpk1Fr { + fn div(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpk1Fr { + fn eq(self: Self, other: Secpk1Fr) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Bn254Fr { + inner: BigInt, +} + +impl BigField for Bn254Fr { + fn from_le_bytes(bytes: [u8]) -> Bn254Fr { + Bn254Fr { + inner: BigInt::from_le_bytes(bytes, bn254_fr) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() } } -impl Add for BigInt { - fn add(self: Self, other: BigInt) -> BigInt { - self.bigint_add(other) +impl Add for Bn254Fr { + fn add(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Bn254Fr { + fn sub(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_sub(other.inner) + } } } -impl Sub for BigInt { - fn sub(self: Self, other: BigInt) -> BigInt { - self.bigint_sub(other) +impl Mul for Bn254Fr { + fn mul(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_mul(other.inner) + } + } } -impl Mul for BigInt { - fn mul(self: Self, other: BigInt) -> BigInt { - self.bigint_mul(other) +impl Div for Bn254Fr { + fn div(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_div(other.inner) + } } } -impl Div for BigInt { - fn div(self: Self, other: BigInt) -> BigInt { - self.bigint_div(other) +impl Eq for Bn254Fr { + fn eq(self: Self, other: Bn254Fr) -> bool { + self.inner.check_32_bytes(other.inner) } } -impl Rem for BigInt { - fn rem(self: Self, other: BigInt) -> BigInt { - let quotient = self.bigint_div(other); - self.bigint_sub(quotient.bigint_mul(other)) + +struct Bn254Fq { + inner: BigInt, +} + +impl BigField for Bn254Fq { + fn from_le_bytes(bytes: [u8]) -> Bn254Fq { + Bn254Fq { + inner: BigInt::from_le_bytes(bytes, bn254_fq) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() } } +impl Add for Bn254Fq { + fn add(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Bn254Fq { + fn sub(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Bn254Fq { + fn mul(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Bn254Fq { + fn div(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Bn254Fq { + fn eq(self: Self, other: Bn254Fq) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Secpr1Fq { + inner: BigInt, +} + +impl BigField for Secpr1Fq { + fn from_le_bytes(bytes: [u8]) -> Secpr1Fq { + Secpr1Fq { + inner: BigInt::from_le_bytes(bytes, secpr1_fq) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() + } +} + +impl Add for Secpr1Fq { + fn add(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpr1Fq { + fn sub(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpr1Fq { + fn mul(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpr1Fq { + fn div(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpr1Fq { + fn eq(self: Self, other: Secpr1Fq) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Secpr1Fr { + inner: BigInt, +} + +impl BigField for Secpr1Fr { + fn from_le_bytes(bytes: [u8]) -> Secpr1Fr { + Secpr1Fr { + inner: BigInt::from_le_bytes(bytes, secpr1_fr) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() + } +} + +impl Add for Secpr1Fr { + fn add(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpr1Fr { + fn sub(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpr1Fr { + fn mul(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpr1Fr { + fn div(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpr1Fr { + fn eq(self: Self, other: Secpr1Fr) -> bool { + self.inner.check_32_bytes(other.inner) + } +} diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index d6f0b1e223..b91ed5c4cb 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -161,7 +161,7 @@ impl Sub for U128 { let borrow = (low == lo) as Field; let high = self.hi - b.hi - borrow; let hi = high as u64 as Field; - assert(hi == high, "attempt to subtract with overflow"); + assert(hi == high, "attempt to subtract with underflow"); U128 { lo, hi, diff --git a/noirc_macros/src/lib.rs b/noirc_macros/src/lib.rs index 4337214d69..9a91684320 100644 --- a/noirc_macros/src/lib.rs +++ b/noirc_macros/src/lib.rs @@ -1,3 +1,5 @@ +use noirc_frontend::hir::def_collector::dc_crate::UnresolvedFunctions; +use noirc_frontend::hir::def_collector::dc_crate::UnresolvedTraitImpl; use noirc_frontend::macros_api::parse_program; use noirc_frontend::macros_api::HirContext; use noirc_frontend::macros_api::SortedModule; @@ -16,6 +18,16 @@ impl MacroProcessor for AssertMessageMacro { transform(ast, crate_id) } + fn process_unresolved_traits_impls( + &self, + _crate_id: &CrateId, + _context: &mut HirContext, + _unresolved_traits_impls: &[UnresolvedTraitImpl], + _collected_functions: &mut Vec, + ) -> Result<(), (MacroError, FileId)> { + Ok(()) + } + // This macro does not need to process any information after name resolution fn process_typed_ast( &self, diff --git a/scripts/test_native.sh b/scripts/test_native.sh index bc1c47ecf1..9b9aa0ce4d 100755 --- a/scripts/test_native.sh +++ b/scripts/test_native.sh @@ -12,4 +12,6 @@ else export GIT_COMMIT=$(git rev-parse --verify HEAD) fi -cargo test --workspace --locked --release \ No newline at end of file +cargo fmt --all --check +cargo clippy --workspace --locked --release +cargo test --workspace --locked --release diff --git a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr index 8250b31789..3e476107c2 100644 --- a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr +++ b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr @@ -20,7 +20,7 @@ impl B { } } // --- -// Set +// PrivateSet struct C { t_d_interface: MethodInterface, } @@ -55,7 +55,7 @@ fn get_d_method_interface() -> MethodInterface { // --- fn main(input: Field) -> pub Field { let b: B> = B::new(new_concrete_c_over_d); - let c: C = b.get_t_c(); // Singleton + let c: C = b.get_t_c(); // PrivateMutable let d: D = D { d: input }; // Note let output = c.call_method_of_t_d(d); diff --git a/test_programs/execution_success/bigint/src/main.nr b/test_programs/execution_success/bigint/src/main.nr index 046d7d07d5..b93fec370e 100644 --- a/test_programs/execution_success/bigint/src/main.nr +++ b/test_programs/execution_success/bigint/src/main.nr @@ -1,9 +1,8 @@ use dep::std::bigint; fn main(mut x: [u8; 5], y: [u8; 5]) { - let a = bigint::BigInt::secpk1_fq_from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); - let b = bigint::BigInt::secpk1_fq_from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); - + let a = bigint::Secpk1Fq::from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); + let b = bigint::Secpk1Fq::from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); let a_bytes = a.to_le_bytes(); let b_bytes = b.to_le_bytes(); for i in 0..5 { @@ -12,10 +11,6 @@ fn main(mut x: [u8; 5], y: [u8; 5]) { } let d = a * b - b; - let d_bytes = d.to_le_bytes(); - let d1 = bigint::BigInt::secpk1_fq_from_le_bytes(597243850900842442924.to_le_bytes(10)); - let d1_bytes = d1.to_le_bytes(); - for i in 0..32 { - assert(d_bytes[i] == d1_bytes[i]); - } + let d1 = bigint::Secpk1Fq::from_le_bytes(597243850900842442924.to_le_bytes(10)); + assert(d1 == d); } diff --git a/test_programs/execution_success/brillig_cow_regression/Prover.toml b/test_programs/execution_success/brillig_cow_regression/Prover.toml index f0a4dc2485..4481382344 100644 --- a/test_programs/execution_success/brillig_cow_regression/Prover.toml +++ b/test_programs/execution_success/brillig_cow_regression/Prover.toml @@ -3,7 +3,7 @@ encrypted_logs_hash = [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", ] -new_commitments = [ +new_note_hashes = [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index 7f3dd76648..ba51548d9d 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -1,12 +1,12 @@ // Tests a performance regression found in aztec-packages with brillig cow optimization -global MAX_NEW_COMMITMENTS_PER_TX = 64; -global MAX_NEW_NULLIFIERS_PER_TX = 64; -global MAX_NEW_L2_TO_L1_MSGS_PER_TX = 2; -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX = 16; -global MAX_NEW_CONTRACTS_PER_TX = 1; -global NUM_ENCRYPTED_LOGS_HASHES_PER_TX = 1; -global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX = 1; +global MAX_NEW_NOTE_HASHES_PER_TX: u64 = 64; +global MAX_NEW_NULLIFIERS_PER_TX: u64 = 64; +global MAX_NEW_L2_TO_L1_MSGS_PER_TX: u64 = 2; +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 16; +global MAX_NEW_CONTRACTS_PER_TX: u64 = 1; +global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; +global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; global NUM_FIELDS_PER_SHA256 = 2; global CALLDATA_HASH_INPUT_SIZE = 169; global CALL_DATA_HASH_LOG_FIELDS = 4; @@ -30,7 +30,7 @@ impl NewContractData { } struct DataToHash { - new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_TX], + new_note_hashes: [Field; MAX_NEW_NOTE_HASHES_PER_TX], new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX], public_data_update_requests: [PublicDataUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_TX], @@ -101,7 +101,7 @@ impl U256 { unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA256] { let mut calldata_hash_inputs = [0; CALLDATA_HASH_INPUT_SIZE]; - let new_commitments = kernel_data.new_commitments; + let new_note_hashes = kernel_data.new_note_hashes; let new_nullifiers = kernel_data.new_nullifiers; let public_data_update_requests = kernel_data.public_data_update_requests; let newL2ToL1msgs = kernel_data.new_l2_to_l1_msgs; @@ -110,10 +110,10 @@ unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA let mut offset = 0; - for j in 0..MAX_NEW_COMMITMENTS_PER_TX { - calldata_hash_inputs[offset + j] = new_commitments[j]; + for j in 0..MAX_NEW_NOTE_HASHES_PER_TX { + calldata_hash_inputs[offset + j] = new_note_hashes[j]; } - offset += MAX_NEW_COMMITMENTS_PER_TX ; + offset += MAX_NEW_NOTE_HASHES_PER_TX ; for j in 0..MAX_NEW_NULLIFIERS_PER_TX { calldata_hash_inputs[offset + j] = new_nullifiers[j]; diff --git a/test_programs/execution_success/double_verify_nested_proof/Nargo.toml b/test_programs/execution_success/double_verify_nested_proof/Nargo.toml new file mode 100644 index 0000000000..3ead649c87 --- /dev/null +++ b/test_programs/execution_success/double_verify_nested_proof/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "double_verify_nested_proof" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/double_verify_nested_proof/Prover.toml b/test_programs/execution_success/double_verify_nested_proof/Prover.toml new file mode 100644 index 0000000000..2a2b4b3358 --- /dev/null +++ b/test_programs/execution_success/double_verify_nested_proof/Prover.toml @@ -0,0 +1,5 @@ +key_hash = "0x13fd5b632ce9e9d12c9ac56c150ed09413df3edf40d1b7ab8ced9f262ec61b29" +proof_b = ["0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x0000000000000000000000000000004bdfb9b586a637ceebd99ff26dcd3af427","0x0000000000000000000000000000000000265c2a5caf8e033e32d192807f5353","0x000000000000000000000000000000c0ab1db6ea40ac087cdc82c4a61ab00c86","0x0000000000000000000000000000000000010800ea8010f4bd3dd432d1cc11ed","0x000000000000000000000000000000eb3db3c41e3e636d686fd2903b1b913a01","0x000000000000000000000000000000000009bbab6b90377114c9e33d2a302226","0x000000000000000000000000000000758726e60ef4b211cf1c965fe08293365b","0x0000000000000000000000000000000000290ec193bc7f4f9006b9cea136bff5","0x0000000000000000000000000000005a2a389cd1702b3aa37f30ed974147d343","0x00000000000000000000000000000000001d83087d6efe0db3f482730b8d5e32","0x000000000000000000000000000000ad015051ed84c11d061e63eddbc3c0417a","0x0000000000000000000000000000000000155011c8b0167ff694740c48d67683","0x00000000000000000000000000000010c638a3b13dba3e77be3f10a3d096927c","0x00000000000000000000000000000000002372b9853214a1f76e5636dc26f146","0x00000000000000000000000000000005d9ca201c07bd4216689677feb9227715","0x000000000000000000000000000000000001dcf09921797dffb8eb21abef187b","0x00000000000000000000000000000070af16c9644b777dcf84d69e820e1ed895","0x00000000000000000000000000000000002d5e8f7eb7a4e20964dd94dc141534","0x0000000000000000000000000000003636871dbe453b366c3351be6e84144683","0x0000000000000000000000000000000000206464e290e4f4764365038ac77edf","0x000000000000000000000000000000175c20da35cc833dd542af57de9b62a2da","0x00000000000000000000000000000000001d2e31de3715e05ff6278f88e5a0db","0x000000000000000000000000000000328610e4eabb48be78d3c75f7c159205c5","0x000000000000000000000000000000000026720634b8076fee0a17b358b04653","0x0000000000000000000000000000000e5f48906892ffbff91e8b58ceabba0949","0x000000000000000000000000000000000013c349df687926ccb712622fc72a36","0x000000000000000000000000000000a4b8c9046c7e7e4cc19bbf9a367668eac7","0x00000000000000000000000000000000002a81128e53672c33bb0dae0ff18f41","0x000000000000000000000000000000edb79df57c4a2303ed1e5c2d7ed1e1bdaf","0x000000000000000000000000000000000018d3cea4ce204eafd70c0ded024650","0x000000000000000000000000000000e5f82856854fe0a2d587f6a9ae8555f321","0x0000000000000000000000000000000000235480ec2adc05f04261054345e568","0x00000000000000000000000000000083607465f60b70b092f606853f4d9e96eb","0x000000000000000000000000000000000006569e3a3174bcb71efe46f7fb7e0f","0x000000000000000000000000000000cb4d5fc546f20f63e3b7cf60341956f36f","0x00000000000000000000000000000000000e14b1932630bf606a637eabb7c80f","0x000000000000000000000000000000786f31c2e082aa7e398e6323bb48a27472","0x00000000000000000000000000000000002dd72746f5e5a4a438def122ae6bba","0x000000000000000000000000000000d007be60a28b744e49279fab277c8bd623","0x00000000000000000000000000000000000e52e2b940b9cd8d001209cc40f7c8","0x000000000000000000000000000000dd4357e24a1bda0b5a6c5eee657cfe9091","0x0000000000000000000000000000000000047bb24b20feb0b66089a96671c901","0x0000000000000000000000000000003fe7f42f34e3360ef0fa8bd9c17e6190a3","0x0000000000000000000000000000000000161d17a3848118e91b435b553d34e9","0x216fa2905e105e0c767687f9b5e81c2e4ce03abe2993ac8dcd9e8d89e088966f","0x1288ba942d41c7f4b048e125454253bc7d7ffc0875365c0b8f75a2bb3ea90b42","0x1ad706f84cffcc62fa030f1bc57cb478a687aa74c1019beeda9bab4e40d35373","0x03050c8016b8041a557a46840ab4166a9c2531eb7c3985a447996a334e0caf5f","0x2b3c485da75bdaef8cec120bd08bc21e3ff717740114d13d3811006215a1fb24","0x008fc8c76c4d8cbba8653bf0919c047d379941be60c7afc7250bc6bfc5f29ad5","0x1993ae2a0da54e5e643533fdefbf54a0df21115b2ee79a63a7f477c2c9c4a5d5","0x22520fa7fde2d72b9776c07c9b897ef7ce48f8a7937ec0cacb01d3e23f72b78a","0x259b7b9c1dbfe88d613102f0e8548f0c770a1c83876b26a5cb4b6790740cb487","0x043006102e519b8011d089f51811337fbdedc856a73842f7c8197be176b08d38","0x2222bd509df909ce38b67b3172b24c8ce1e0e1dd0d811f4fae6957e3418415ac","0x1b1204474652fa85979f0274145680718bed80466f4c91ad58f37df1b4fe2395","0x08d57251b42c0697535617ae239d7f3ef9d1558c1bb71fa01c68e7b5fd266139","0x04ca7f21f1d0ba50ecf00c615d18bf8f7291bb04f513cbef78fb6d03ed9b0cb2","0x070ae1119c80846863a4cd971e535ff87fe34473eb5730b14e5b30212b7b78a1","0x1128027ded5032cc265c96ff81d76e2ce06420702fd4e5bc4e24fda695961651","0x1ef7a9e5885b934eee2b44335157309de2f60519e50a8471e5e24495dff2a9fe","0x2d0dad89e5633da796c0c897804575879bc5dc7ad3805b44260943101ac9609e","0x287edcbd60e9d636ba1cd1c9ff3ec2b71b694112c65876525f5e2f8209cd747f","0x24b1157a1cb5bdbd2829de066b8c5573584f9b8638bf9bad476a1fe1172da4b9","0x1f9825731638cd1c43f7cf035b808a1704f122453318cb88fe3b1164f034e170","0x07003a6552f3a6ab1ad3e0717be0af049767b554ff88986c4e48224632523405","0x288002c2ff29874077b2c216a35cb61ecc97d12750a3a86574d50acd42607095","0x0a12fc37918ce7dcbd0d354a05bdbb409a8e4530d86f3d8ce07231240590f65c","0x2ec631b05fc693b07286eecf6b6ac1aef0d073cdced8e050244ec7cf4e8f6e42","0x107bc98da225efe7749d51b9966c3edd6c245f2e5cf183a924ba982817e4525a","0x2ca603f77ea0ca42b6f38cd43bc3cc09442906373a2f197fdc976533066ac343","0x138ace5653809375aa9d95240fa9b6508860a471aed70bcc8b7dd52ae34809f3","0x21e1eb924951881c3d0ce5657d2e26a3e5150ce8f49c9e4d0476c5fdf1e43a54","0x2e2daec93f5e94f6784ce569883cf285da12244b38fb001b94bfb99bb4de060c","0x186a8d30c973bef6286115865a690a2528adbeea8376e5221fffeb6a135d9904","0x1e0d9d90628be31ebc16ef1d85d5f9e6fb8cb57e6a74e576f958cf21db45042e","0x124ceb5e1d9da6d0fe163e961643bb0423c926ef4e0c583eaba9e32d99ec6c7c","0x2db34cc38a50bfea50750830710c13b4d80f4ec0e8df8f186047ee36f338eeeb","0x0b174aa403b42235d5bdde8e9f5bb6c52ae62fec2884334cbe3e53418bd2463d","0x1571ebd9c3854c2f63418f206c6937495450ab9238a238b9c63fbf5867378c5b","0x24f92d1ab27e5810e5b7f4b31254526822f866602922258135c5eb5a2b21ca04","0x20cc7f5ba8df67d9c95642e2662654eb2305c6a280ce1747aec88a581ee50647","0x24112b99f63bbda7487709396dff22aae89ae809263021b65503ff7f809c7e38","0x06805c80f64efd1fa7f08382c9981aad9cecad78808da670477566674141bc48","0x146d4801d6f5898051ee0d7c95375a65ea0e6deeac6ffee1d9b9cf64da72dc3e","0x000000000000000000000000000000425b99a5c96b22ba0286d9ebeecf8e4559","0x0000000000000000000000000000000000110be4b8fe46a96303c205d3a1d61d","0x000000000000000000000000000000d9ff7ae757f2f0c91d1f1e71fac1b27b74","0x000000000000000000000000000000000009b0c285f6c221f6eba93b1e330ac4","0x0000000000000000000000000000004055cd5738a25ab1860a1e35555962dc19","0x00000000000000000000000000000000001a8726ccf54e17cf1b005e3e04879a","0x0000000000000000000000000000007be4dc343e9c2e0d4a9156f1ef9769f65a","0x00000000000000000000000000000000002b0e96f68f6509615ca0544dfa3107"] +public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] +verification_key = ["0x2260e724844bca5251829353968e4915305258418357473a5c1d597f613f6cbd","0x0000000000000000000000000000000000000000000000000000000000080000","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000080000","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000003","0x0000000000000000000000000000000000000000000000000000000000000004","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000006","0x0000000000000000000000000000000000000000000000000000000000000007","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000009","0x000000000000000000000000000000000000000000000000000000000000000a","0x000000000000000000000000000000000000000000000000000000000000000b","0x000000000000000000000000000000000000000000000000000000000000000c","0x000000000000000000000000000000000000000000000000000000000000000d","0x000000000000000000000000000000000000000000000000000000000000000e","0x000000000000000000000000000000000000000000000000000000000000000f","0x0000000000000000000000000000000000000000000000000000000000000010","0x000000000000000000000000000000ba765ed919550454064328e0fd7c51ff4a","0x00000000000000000000000000000000000418b2f4104c289eb20cb95344c850","0x0000000000000000000000000000006defa500aab13c8cf3c00117db573bef2c","0x000000000000000000000000000000000026dea3ea8fb7e77b5bfa8443397dc7","0x0000000000000000000000000000009a5c33c4054817f3402e68aeca4728a405","0x00000000000000000000000000000000002abf5ea67ec384cb2e5998c7a48b3a","0x000000000000000000000000000000ee78817f7d959ea45abb27404e3371c708","0x0000000000000000000000000000000000260a979e8190a83b0bce1351b92d3c","0x000000000000000000000000000000ec447bd83a83883ce4c11573ce24845c74","0x000000000000000000000000000000000005b23c2076f50d10baa061a67b9200","0x00000000000000000000000000000058ffc16cfb64ec06a56a2b1a9047fb8f0c","0x000000000000000000000000000000000011d2f5833d720e1d0a02749471e7ad","0x000000000000000000000000000000416dd6c8c0d1cbb185b3c3197eac767d0b","0x000000000000000000000000000000000023b9c5a4e525926d64247ec92e0baf","0x000000000000000000000000000000a55f5f52ebc8936a58e413a1068d94d376","0x00000000000000000000000000000000000be3f377ccc88a6cb5df6f230da95e","0x00000000000000000000000000000070a162a08d4d4800f450af94888f8f3480","0x0000000000000000000000000000000000085883b02590372a7b36a1c57db4c3","0x00000000000000000000000000000045b0b661ea73930ee3327ccff8a0ca9ce1","0x00000000000000000000000000000000002854cab8629792eb07e9ef81bc46ee","0x00000000000000000000000000000067f365021e0e42117c43a39419d1d9cc73","0x000000000000000000000000000000000022c370b38f0a97eb3d718146f2284b","0x00000000000000000000000000000016de6670aba605233072b8eecfa9069b06","0x000000000000000000000000000000000002c29c49d66457bcbd4fa5bf6096fd","0x000000000000000000000000000000e32e8ce4f18ba30ce53245044d0c60508a","0x00000000000000000000000000000000001170220489121b8eedd58a4b5599df","0x000000000000000000000000000000139ed828b410380d053ec0f056656f5703","0x0000000000000000000000000000000000072aebdce25ba333c86769adec1362","0x000000000000000000000000000000aa352ee565f91fc2b73323fc824bc14636","0x00000000000000000000000000000000001f3e272a192808ec9283ee3bb4df4b","0x00000000000000000000000000000005c72c8c88be0259ae226ccb0488452b4b","0x00000000000000000000000000000000001c68407d694502b929b77cbbab8374","0x0000000000000000000000000000003716bda8267f29931ed0aa811e4607f1c6","0x000000000000000000000000000000000007d888936af2141bb2f6823a587e81","0x0000000000000000000000000000004cf1a4f39c5363f70ecc9e433d751ea529","0x00000000000000000000000000000000002e8a81232ec84e48032178f1ee6edb","0x000000000000000000000000000000388e8265061fa0c92c96fc85d99bac7891","0x00000000000000000000000000000000002e3c516222565332e6e7362400bc5f","0x0000000000000000000000000000003a68d13661a0906e5828fe8271a336bf64","0x00000000000000000000000000000000001412d3e67497c98e5ec2aaee8779f5","0x000000000000000000000000000000b5d123498733b5279d8bcbade0d8345ef7","0x00000000000000000000000000000000000fa572890537089a5fb36953e7a1ca","0x0000000000000000000000000000004d8ff057fc9936a693035266c80c6ea57d","0x00000000000000000000000000000000001907a614968d777fcc506f639799f6","0x00000000000000000000000000000010769533212d3cafbf6ac378c8055c33a2","0x00000000000000000000000000000000000eac32851272327acdc0890792dfb7","0x000000000000000000000000000000e3e32f343643d319a977beb0c2b0ab9b31","0x00000000000000000000000000000000000c10c4c9dce6ff648ef70f54d45ba6","0x00000000000000000000000000000025721304165b9b313b94cf2c77b61dc1ef","0x000000000000000000000000000000000024b8083b0f323c2703a7255caa7078","0x0000000000000000000000000000002b860372c65049c88f6532cbd360917b11","0x000000000000000000000000000000000011ee2ac2bc36cdfdc107eca47369f3","0x0000000000000000000000000000001c1b0233882acb5a78a977642e4dce91d5","0x000000000000000000000000000000000020922a70853993b3516eeb01d7c8a4","0x0000000000000000000000000000001f90b5fade69a55a2da8d2db3c62b62d7c","0x0000000000000000000000000000000000173312bb89c6722b548ff87a7487a2","0x0000000000000000000000000000009d618ffd933cf58a8a0953dc76f97cf108","0x00000000000000000000000000000000000ddc3b6d8e59cf0996ca71ad4132ca","0x000000000000000000000000000000ec4c6a253f431d3f3fc06aa0e5b0448b8c","0x0000000000000000000000000000000000153193287060386695f4f2d0d3525d","0x0000000000000000000000000000004bd25585edb9319128045c005d48491b1e","0x00000000000000000000000000000000001170f0ece62f8c572bca96b141d27f","0x0000000000000000000000000000003dd2e37b8edb1f56b785809d7710bf1c88","0x0000000000000000000000000000000000246cd041690f653f88ed0c56ad282a","0x00000000000000000000000000000034bc8a00ce9d452888e5fc2b5a7e14fed7","0x000000000000000000000000000000000026153c937447356a0c6d6be09d85eb","0x000000000000000000000000000000555388ad9364679246b07992f84b4e91b2","0x0000000000000000000000000000000000189da022421fbd8dfd7973084d978e","0x000000000000000000000000000000e8c0f9753e2a5a35acec051fafe2cecce5","0x0000000000000000000000000000000000285311c5e9a4cbb56a3f04f29d5443","0x00000000000000000000000000000092d2d0ac76a1be7f1fad96cbd997175312","0x00000000000000000000000000000000002436400260c9d3180beedd0bf49fec","0x000000000000000000000000000000887d86d95387bbb29616cc5c41ee4a2669","0x0000000000000000000000000000000000063bf32f8addf7a3e1cf6cd223cb71","0x000000000000000000000000000000d841dc7d9da6cc699e8377b2a04723fea0","0x00000000000000000000000000000000002ce091428268c212a2bcfea0edb338","0x00000000000000000000000000000012fe4771092fa47e4d6050701527133f09","0x00000000000000000000000000000000002f36672865c5ae4976486fdaf2d81d","0x0000000000000000000000000000008e6bced56a3d94dfe9d476da3a424b8eff","0x00000000000000000000000000000000002d6303cf28aa721f4e5348a0d83642","0x0000000000000000000000000000008c5807dace05b2079d200f7f71caffdaf7","0x000000000000000000000000000000000008f7beb50cb16f3b6210aff1bdb05d","0x0000000000000000000000000000004f9ee08a49536eb54a238b982c4dfd5446","0x000000000000000000000000000000000014f55e7065eabacf1a7d6cbf1f6765","0x00000000000000000000000000000021150153ec654b02a66d9bea056185877e","0x00000000000000000000000000000000000e7bf50a142b21057bcfd340a5e77c","0x00000000000000000000000000000038110629263a662f10464b375f988cccda","0x00000000000000000000000000000000001964a0ab814f71282cd159df492710","0x000000000000000000000000000000b9310dd49ea52ba735b9654ebced7bc67b","0x000000000000000000000000000000000019ad72f92554ce44921ca3f420f995","0x000000000000000000000000000000d67d7e81fa6e1cdfae6d84510a8cb7e257","0x00000000000000000000000000000000000a6ec9d85c10a85e8f31eaedb4e459"] +proof = ["0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x00000000000000000000000000000063cb03b1d83ae3942e11ca8ec63055898b","0x00000000000000000000000000000000001edaf70d547a857fbed6a9ff8a38c9","0x000000000000000000000000000000097fb881332193ff4489e213f600e6a007","0x00000000000000000000000000000000001f2903742639c3595d22b96d4d9c21","0x000000000000000000000000000000bca7215bb1bcdde52ed9cf845b7e54072d","0x0000000000000000000000000000000000188bd12b19073eb01e8be5bda41b3e","0x0000000000000000000000000000007d1a114656606c391bfb286ea4e14062a5","0x000000000000000000000000000000000026d8a3b8821da41b6b1d6b85872260","0x000000000000000000000000000000c49078b857741b82cba39d8a394c1876c1","0x00000000000000000000000000000000002f9b9f76f80a4ff456e60c024f8d03","0x0000000000000000000000000000004bab3e60680935219213ea32be70ec5100","0x00000000000000000000000000000000002c45bda56f0115cfde2678889694ab","0x0000000000000000000000000000006434e56313172088d5a6b10fdd1b94b4ca","0x000000000000000000000000000000000007ad41e7980534fc2f89e8ad7366ad","0x00000000000000000000000000000023d769c68ef65f0b4f06a01e655fb265e7","0x0000000000000000000000000000000000008d3b5d5b201ed6773c369fe20d10","0x0000000000000000000000000000005eacdd2121ba4b1cf0df09632df6991fcf","0x0000000000000000000000000000000000005e98e857c8c1eb16cef913e44f90","0x0000000000000000000000000000003449da35dc7c0b67b0c3e99ced603ea381","0x000000000000000000000000000000000022347c8daec6739b183413a787fd13","0x000000000000000000000000000000df23d8f1ac4ddfced428737db15e63f603","0x000000000000000000000000000000000015e03670ba72d84269d764d8f8e725","0x000000000000000000000000000000457a7f854dbab545c8c94ccdb8e4b9ad45","0x00000000000000000000000000000000000a268fc41b7031912cec59dc0a7078","0x00000000000000000000000000000022fcb55824b67af33225f8f2e614fbbdb4","0x0000000000000000000000000000000000235f698e6aee7bf8ca94f4a44db006","0x000000000000000000000000000000a327da390bd3e01e4a7b639605fdfd9c42","0x0000000000000000000000000000000000210196c4fb53d660a3824867b2b1c5","0x000000000000000000000000000000728fb44750fa2b956221bd441fa61e32d6","0x0000000000000000000000000000000000073db9e2cafdf0fe22b5090855533e","0x0000000000000000000000000000004fe310e93730876891eebab46db9496dbc","0x000000000000000000000000000000000007d3574fe79c87011abdbd51a46670","0x000000000000000000000000000000adc522f42e085c51403fc50c83f35904b9","0x00000000000000000000000000000000000d2d9ef8fc0031b4568842a99b34eb","0x00000000000000000000000000000098586d928c8abc7cc56d571c8eded52168","0x000000000000000000000000000000000024279c001a40e94d3d149ec01a468a","0x00000000000000000000000000000066122aaf47d9d5060a2ce1d17cc5201be0","0x00000000000000000000000000000000001c21031d83d52e27867a611229d2ca","0x000000000000000000000000000000838dfc066499f7715682f755b42f3a4869","0x00000000000000000000000000000000001f816d2c5b2e903496f1443cb91de3","0x0000000000000000000000000000007ef917b6df805f430f8a0833942a7c3094","0x00000000000000000000000000000000000a9cefe716f31dbe37485179d60f0e","0x00000000000000000000000000000028adb1040bd0c07448de51d5cac9fd0495","0x00000000000000000000000000000000000c66b25a22c8b3ba82ec09ab4bdef3","0x2cc791d253f03f47cc88f7f0aeae481762f4aa6426712772544aaeca72466cb7","0x14197950f448f679eeff75c4e83dac9f0ebd5aa194709ea3875fb4e4b15bc2f2","0x1a92022c2ed8f8a41e3f392e22f1875f6916543bbb22c3aaf50d703de649c381","0x2ee77a26e78d5e1093dabd3612beee4b515a4f159992138e13ecd3f0afcfba18","0x2c280cba627b147142a2d333ee856000298708f9b5df0cc8d23c26d0936d6869","0x1b2569bb6f6b60b6f743ff892a39a490770d4ad40a961a06149d4968b0487a40","0x2f80351e43621d69b7e620338b2822e15dec9e6a2de16e8d04bb559153cd53a3","0x15a78b8ae9b3be431b609250b69c7cb746c6a689b2122150f258c6f7d67409fc","0x1334c47f273be542576813933e89a9130a342846272b39a2eab3ab7fc022d5fe","0x1031bdcafc5c0dad81c8b6c4931c9b442cd0c8a0bb9a729cc2f6bf0a18dc1b82","0x177f92f0cef76c5c45f55d16fa2be426354cdd4af6ac9aaad479c9b47f88656d","0x0064c0e0ec8984d612189e5287d59eedc1a6de52fc78bf72028f744350c27a0e","0x2c06222cf0d415c976e6904f1706b77cf438636ada3222e1c31c4957d6877dac","0x173da534b7001f44f19bb3e3f8601ac94fbf90b2e39b7d4079d8fac2d65102ea","0x012909bcdbd1167010cf0084028e851f3448f58946f4951b1b8d544d86b138c8","0x2975c3987f110c06bd8ced1d8bb0d398ac72c6f196ea639bdde58fa4b899d4a0","0x05c196fb2f6ccfd92a38ae526af85bccc3695ea0e2561e7a211c60360187602d","0x18a288590dd0cbfe5b7652458c9caddc9eac2f08e5822b64141ed1b4e805bda3","0x0cd08c41605b22a7ae31c3961486f645a32bff0ccaef63b6d661ef356db78560","0x05d5e48184693259f722f84ea48f9b84667d1e9db19e1381b2279fe24b01484b","0x2187a6f6a2398e5f0137880a983ff6b682b5a7c2b62e4bdfff6ff6becd0d53ab","0x1d4764ca9346e8ac48675320521e0daba651f480efe932302e8a9673580fc0d8","0x00cfcb920adeb0293acf26e63aeac4489622e4c806b93f1c72f8491cba3d0196","0x1bcd6a556800b8385ba1250afd69999fe2bb5518a6ba2cc461a4afba21ffbedb","0x11a15b3c8ef0e4ac0ff151fba72b922f6c005519151a4f88557352265944aeea","0x063d550a154f2ce80b08fb169d137fa96dcea6a6c489e98e1390aa9a5db18928","0x25da993132041b9f667de044194f5c6b0cdae961cdea5f6dbbda8595f213ac08","0x22fcecc2e3794814bbb84700031cd75ec9817201c8c88df2e86407a14412f902","0x01583d25d2f91d646da02a520d3dbf758b0a0590a533bd1417a717fd0cd18915","0x18ebacffdc81e15547232dfc1a0e31ec2848a1e5b9c8509a92432c2549d93091","0x20a3d15aa70d04a841802fe1d990f56c6b9e6eadc17da2c0dfd2a817e6cf0430","0x0b497cc2e54412ce07c52effdce6c01de2c1a0e1d095a2a37f5351232400c0a1","0x14419bb69d02675b8d58e60ce88a2f4b6a43674461e4015e2e302285a42c5784","0x0c84db03ff77d0729bb68eab2d6d697b7caebd4ea3db781499492a6f0ef67765","0x1a676b1c6b0ab1c85b31af681e05751296c3d0a1a883668f5fe971827ce86fc9","0x08da949bf7603bfe20f3c152abe727051c6306cff322197e8fa56b390f565b5b","0x1fd77e041239f94e907dc3ae3069a70cbff726b9d8b3a368a4910c8a070a9c9a","0x03755d83a4f0fdfbb4fd1b2b465842e1bb707a419c2952a2ca9faba50d4be379","0x0ee90c8166adcb238d85c72a85db2248353610c55390a2ed54e59dd1c35c12d2","0x170bcd78efaa1b19bcfd065c2ec60b48aa1e62465df73e62f3bd291115315144","0x015d60e5cc5c7d67853993261bd9e3c6e56f95dee8724ce79c7601ee10c1a731","0x000000000000000000000000000000f0a8b99d65fc1555bafb688233a6489aea","0x0000000000000000000000000000000000043849f038ec96c8c1c6e242351361","0x0000000000000000000000000000001ad41d3dfebb280623d5b325f0a7aa38f7","0x00000000000000000000000000000000002e5f2119536daa9e6d1f9b82b797dd","0x000000000000000000000000000000e5570c2b6e74d0994e2fc8be1a9dab4160","0x00000000000000000000000000000000002ed426a78ed52d4c13f2c651a6d4ec","0x000000000000000000000000000000aba14637487e4d3ca30dc397416696c85c","0x000000000000000000000000000000000005ae1eb3eee0cdf5e5c7bb0ac9be07"] diff --git a/test_programs/execution_success/double_verify_nested_proof/src/main.nr b/test_programs/execution_success/double_verify_nested_proof/src/main.nr new file mode 100644 index 0000000000..0466f2a226 --- /dev/null +++ b/test_programs/execution_success/double_verify_nested_proof/src/main.nr @@ -0,0 +1,28 @@ +use dep::std; + +fn main( + verification_key: [Field; 114], + // This is the proof without public inputs attached. + // + // This means: the size of this does not change with the number of public inputs. + proof: [Field; 109], + public_inputs: pub [Field; 1], + // This is currently not public. It is fine given that the vk is a part of the circuit definition. + // I believe we want to eventually make it public too though. + key_hash: Field, + proof_b: [Field; 109] +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} diff --git a/test_programs/execution_success/double_verify_proof/src/main.nr b/test_programs/execution_success/double_verify_proof/src/main.nr index ce087dc4e6..e4c6926efb 100644 --- a/test_programs/execution_success/double_verify_proof/src/main.nr +++ b/test_programs/execution_success/double_verify_proof/src/main.nr @@ -1,12 +1,13 @@ use dep::std; +#[recursive] fn main( verification_key: [Field; 114], // This is the proof without public inputs attached. // // This means: the size of this does not change with the number of public inputs. proof: [Field; 93], - public_inputs: [Field; 1], + public_inputs: pub [Field; 1], // This is currently not public. It is fine given that the vk is a part of the circuit definition. // I believe we want to eventually make it public too though. key_hash: Field, diff --git a/test_programs/execution_success/regression_4124/src/main.nr b/test_programs/execution_success/regression_4124/src/main.nr index b47bf28d46..49ff68ee6a 100644 --- a/test_programs/execution_success/regression_4124/src/main.nr +++ b/test_programs/execution_success/regression_4124/src/main.nr @@ -14,14 +14,14 @@ pub fn storage_read() -> [Field; N] { dep::std::unsafe::zeroed() } -struct PublicState { +struct PublicMutable { storage_slot: Field, } -impl PublicState { +impl PublicMutable { pub fn new(storage_slot: Field) -> Self { assert(storage_slot != 0, "Storage slot 0 not allowed. Storage slots must start from 1."); - PublicState { storage_slot } + PublicMutable { storage_slot } } pub fn read(_self: Self) -> T where T: MyDeserialize { @@ -32,7 +32,7 @@ impl PublicState { } fn main(value: Field) { - let ps: PublicState = PublicState::new(27); + let ps: PublicMutable = PublicMutable::new(27); // error here assert(ps.read() == value); diff --git a/test_programs/gates_report.sh b/test_programs/gates_report.sh index 4192c58137..3b0b4d9e14 100755 --- a/test_programs/gates_report.sh +++ b/test_programs/gates_report.sh @@ -2,7 +2,7 @@ set -e # These tests are incompatible with gas reporting -excluded_dirs=("workspace" "workspace_default_member") +excluded_dirs=("workspace" "workspace_default_member" "double_verify_nested_proof") # These tests cause failures in CI with a stack overflow for some reason. ci_excluded_dirs=("eddsa") diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt index 7ac440c335..c472e82873 100644 --- a/tooling/debugger/ignored-tests.txt +++ b/tooling/debugger/ignored-tests.txt @@ -7,6 +7,7 @@ brillig_nested_arrays brillig_references brillig_to_bytes_integration debug_logs +double_verify_nested_proof double_verify_proof modulus nested_array_dynamic diff --git a/tooling/nargo_fmt/tests/expected/contract.nr b/tooling/nargo_fmt/tests/expected/contract.nr index b80efeeb69..a03b877470 100644 --- a/tooling/nargo_fmt/tests/expected/contract.nr +++ b/tooling/nargo_fmt/tests/expected/contract.nr @@ -10,14 +10,14 @@ contract Benchmarking { use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, + log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, types::address::{AztecAddress} }; struct Storage { - notes: Map>, - balances: Map>, + notes: Map>, + balances: Map>, } impl Storage { @@ -26,12 +26,12 @@ contract Benchmarking { notes: Map::new( context, 1, - |context, slot| { Set::new(context, slot, ValueNoteMethods) } + |context, slot| { PrivateSet::new(context, slot, ValueNoteMethods) } ), balances: Map::new( context, 2, - |context, slot| { PublicState::new(context, slot, FieldSerializationMethods) } + |context, slot| { PublicMutable::new(context, slot, FieldSerializationMethods) } ) } } @@ -74,17 +74,6 @@ contract Benchmarking { fn broadcast(owner: Field) { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - - unconstrained fn compute_note_hash_and_nullifier( - contract_address: AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - preimage: [Field; VALUE_NOTE_LEN] - ) -> [Field; 4] { - let note_header = NoteHeader::new(contract_address, nonce, storage_slot); - note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) - } } // Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 diff --git a/tooling/nargo_fmt/tests/input/contract.nr b/tooling/nargo_fmt/tests/input/contract.nr index d10bfb745b..a03b877470 100644 --- a/tooling/nargo_fmt/tests/input/contract.nr +++ b/tooling/nargo_fmt/tests/input/contract.nr @@ -5,30 +5,34 @@ contract Benchmarking { use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; - use dep::value_note::{ - utils::{increment, decrement}, - value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, - }; + use dep::value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - log::emit_unencrypted_log, - state_vars::{map::Map, public_state::PublicState, set::Set}, + log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, - types::address::{AztecAddress}, + types::address::{AztecAddress} }; struct Storage { - notes: Map>, - balances: Map>, + notes: Map>, + balances: Map>, } impl Storage { fn init(context: Context) -> pub Self { Storage { - notes: Map::new(context, 1, |context, slot| { Set::new(context, slot, ValueNoteMethods) }), - balances: Map::new(context, 2, |context, slot| { PublicState::new(context, slot, FieldSerializationMethods) }), + notes: Map::new( + context, + 1, + |context, slot| { PrivateSet::new(context, slot, ValueNoteMethods) } + ), + balances: Map::new( + context, + 2, + |context, slot| { PublicMutable::new(context, slot, FieldSerializationMethods) } + ) } } } @@ -70,17 +74,6 @@ contract Benchmarking { fn broadcast(owner: Field) { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - - unconstrained fn compute_note_hash_and_nullifier( - contract_address: AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - preimage: [Field; VALUE_NOTE_LEN] - ) -> [Field; 4] { - let note_header = NoteHeader::new(contract_address, nonce, storage_slot); - note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) - } } // Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 From e80c5f73a4cdcba3f5cf44576c605ba1e611a2ab Mon Sep 17 00:00:00 2001 From: NaijaCoderGirl <150683513+NaijaCoderGirl@users.noreply.github.com> Date: Tue, 27 Feb 2024 11:59:22 +0000 Subject: [PATCH 13/17] chore(docs): correct 'Edit this page' URL for dev docs (#4433) # Description ## Problem* The "Edit this page" button in the generated documentation for the `noir-lang` project leads to a 404 - Page Not Found error for documents in the development version. This issue is due to the button's link containing `/processed-docs` in the path instead of `/docs`. The problem affects the development version of the documentation, whereas versioned documentation (e.g., v0.23.0) correctly links to editable markdown files on GitHub. | Before | After | | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | | ![Before](https://github.com/noir-lang/noir/assets/150683513/260f8eb5-4ef9-46db-ba39-ae836d2bb7de) | ![After](https://github.com/noir-lang/noir/assets/150683513/087e2c8c-b9dc-4c35-b824-2b95742872f6) | ## Summary* This pull request fixes the incorrect URL generated for the "Edit this page" button in the docusaurus configuration. By modifying the `editUrl` function within `docusaurus.config.ts`, the path now correctly replaces `processed-docs` with `docs` for the development version of the documentation. This change guarantees that contributors are directed to the correct GitHub page to edit the documentation, thus eliminating the 404 error previously encountered. ## Additional Context The issue was identified when attempting to edit pages from the development version of the docs. The button's link incorrectly pointed to a non-existent path due to the inclusion of `processed-docs` in the URL. ## Documentation* - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- docs/docusaurus.config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index 1b6c65d513..49566c5c38 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -38,7 +38,7 @@ export default { }, }, editUrl: ({ versionDocsDirPath, docPath }) => - `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath}/${docPath}`, + `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath.replace('processed-docs', 'docs')}/${docPath}`, }, blog: false, theme: { From a25d5da32c1eb868fa400c61245edbd3db72ba3b Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 27 Feb 2024 08:33:39 -0500 Subject: [PATCH 14/17] feat: Sync from aztec-packages (#4438) BEGIN_COMMIT_OVERRIDE chore: bootstrap improvements. (https://github.com/AztecProtocol/aztec-packages/pull/4711) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French --- bootstrap.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index 1f9506904a..54129c3d61 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -16,7 +16,7 @@ if [ -n "$CMD" ]; then fi # Attempt to just pull artefacts from CI and exit on success. -./bootstrap_cache.sh && exit +[ -n "${USE_CACHE:-}" ] && ./bootstrap_cache.sh && exit ./scripts/bootstrap_native.sh -./scripts/bootstrap_packages.sh \ No newline at end of file +./scripts/bootstrap_packages.sh From a112b303650e71db7406011edde92942c571654e Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 27 Feb 2024 09:55:34 -0500 Subject: [PATCH 15/17] feat: Sync from aztec-packages (#4439) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit BEGIN_COMMIT_OVERRIDE chore(aztec): Change function limit to private function limit (https://github.com/AztecProtocol/aztec-packages/pull/4785) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French Co-authored-by: Álvaro Rodríguez --- .aztec-sync-commit | 1 + aztec_macros/src/lib.rs | 24 ++++++++++++++----- bootstrap_cache.sh | 2 -- noir_stdlib/src/collections/map.nr | 2 +- noir_stdlib/src/sha256.nr | 2 +- noir_stdlib/src/sha512.nr | 2 +- .../execution_success/regression/src/main.nr | 6 ++--- 7 files changed, 25 insertions(+), 14 deletions(-) create mode 100644 .aztec-sync-commit diff --git a/.aztec-sync-commit b/.aztec-sync-commit new file mode 100644 index 0000000000..9acc65d8b9 --- /dev/null +++ b/.aztec-sync-commit @@ -0,0 +1 @@ +e69b58660ff843350e1e098d8f1a84f4ce3d3c34 diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 0ccc421d3b..156ba1d5b0 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -63,12 +63,12 @@ impl MacroProcessor for AztecMacro { } const FUNCTION_TREE_HEIGHT: u32 = 5; -const MAX_CONTRACT_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); +const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); #[derive(Debug, Clone)] pub enum AztecMacroError { AztecDepNotFound, - ContractHasTooManyFunctions { span: Span }, + ContractHasTooManyPrivateFunctions { span: Span }, ContractConstructorMissing { span: Span }, UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, UnsupportedStorageType { span: Option, typ: UnresolvedTypeData }, @@ -84,8 +84,8 @@ impl From for MacroError { secondary_message: None, span: None, }, - AztecMacroError::ContractHasTooManyFunctions { span } => MacroError { - primary_message: format!("Contract can only have a maximum of {} functions", MAX_CONTRACT_FUNCTIONS), + AztecMacroError::ContractHasTooManyPrivateFunctions { span } => MacroError { + primary_message: format!("Contract can only have a maximum of {} private functions", MAX_CONTRACT_PRIVATE_FUNCTIONS), secondary_message: None, span: Some(span), }, @@ -456,10 +456,22 @@ fn transform_module( if has_transformed_module { // We only want to run these checks if the macro processor has found the module to be an Aztec contract. - if module.functions.len() > MAX_CONTRACT_FUNCTIONS { + let private_functions_count = module + .functions + .iter() + .filter(|func| { + func.def + .attributes + .secondary + .iter() + .any(|attr| is_custom_attribute(attr, "aztec(private)")) + }) + .count(); + + if private_functions_count > MAX_CONTRACT_PRIVATE_FUNCTIONS { let crate_graph = &context.crate_graph[crate_id]; return Err(( - AztecMacroError::ContractHasTooManyFunctions { span: Span::default() }, + AztecMacroError::ContractHasTooManyPrivateFunctions { span: Span::default() }, crate_graph.root_file_id, )); } diff --git a/bootstrap_cache.sh b/bootstrap_cache.sh index d06aa49366..1cec6c81d8 100755 --- a/bootstrap_cache.sh +++ b/bootstrap_cache.sh @@ -1,8 +1,6 @@ #!/usr/bin/env bash set -eu -[ -z "${NO_CACHE:-}" ] && type docker &> /dev/null && [ -f ~/.aws/credentials ] || exit 1 - cd "$(dirname "$0")" source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null diff --git a/noir_stdlib/src/collections/map.nr b/noir_stdlib/src/collections/map.nr index d9eb83ff5d..056299b423 100644 --- a/noir_stdlib/src/collections/map.nr +++ b/noir_stdlib/src/collections/map.nr @@ -400,7 +400,7 @@ impl HashMap { // n * MAX_LOAD_FACTOR_DEN0MINATOR >= m * MAX_LOAD_FACTOR_NUMERATOR fn assert_load_factor(self) { let lhs = self._len * MAX_LOAD_FACTOR_DEN0MINATOR; - let rhs = self._table.len() as u64 * MAX_LOAD_FACTOR_NUMERATOR; + let rhs = self._table.len() * MAX_LOAD_FACTOR_NUMERATOR; let exceeded = lhs >= rhs; assert(!exceeded, "Load factor is exceeded, consider increasing the capacity."); } diff --git a/noir_stdlib/src/sha256.nr b/noir_stdlib/src/sha256.nr index 6bcc5ea74c..2f686a6416 100644 --- a/noir_stdlib/src/sha256.nr +++ b/noir_stdlib/src/sha256.nr @@ -57,7 +57,7 @@ pub fn digest(msg: [u8; N]) -> [u8; 32] { msg_block[i as Field] = 0; i = i + 1; } else if i < 64 { - let mut len = 8 * msg.len() as u64; + let mut len = 8 * msg.len(); for j in 0..8 { msg_block[63 - j] = len as u8; len >>= 8; diff --git a/noir_stdlib/src/sha512.nr b/noir_stdlib/src/sha512.nr index 155ba593bb..4dfe78308e 100644 --- a/noir_stdlib/src/sha512.nr +++ b/noir_stdlib/src/sha512.nr @@ -136,7 +136,7 @@ pub fn digest(msg: [u8; N]) -> [u8; 64] { msg_block[i as Field] = 0; i += 1; } else if i < 128 { - let mut len = 8 * msg.len() as u64; // u128 unsupported + let mut len = 8 * msg.len(); for j in 0..16 { msg_block[127 - j] = len as u8; len >>= 8; diff --git a/test_programs/execution_success/regression/src/main.nr b/test_programs/execution_success/regression/src/main.nr index c70e2e75fa..c56f3ef419 100644 --- a/test_programs/execution_success/regression/src/main.nr +++ b/test_programs/execution_success/regression/src/main.nr @@ -1,4 +1,4 @@ -global NIBBLE_LENGTH: Field = 16; +global NIBBLE_LENGTH: u64 = 16; struct U4 { inner: u8, @@ -21,8 +21,8 @@ impl Eq for U4 { } fn compact_decode(input: [u8; N], length: Field) -> ([U4; NIBBLE_LENGTH], Field) { - assert(2 * input.len() as u64 <= NIBBLE_LENGTH as u64); - assert(length as u64 <= input.len() as u64); + assert(2 * input.len() <= NIBBLE_LENGTH); + assert(length as u64 <= input.len()); let mut nibble = [U4::zero(); NIBBLE_LENGTH]; From 97bcae2725fc59c7d4ce5212990a40fdcd993e53 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Tue, 27 Feb 2024 18:29:30 +0100 Subject: [PATCH 16/17] chore: address code review comments of PR4398 (#4435) # Description ## Problem\* Related to #4170, this PR address comments in PR #4398 ## Summary\* Add a comment and a sanity check. ## Additional Context ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- acvm-repo/acvm/src/pwg/blackbox/hash.rs | 12 +++++++++++- noir_stdlib/src/hash/poseidon2.nr | 3 ++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 1bc26f0618..24c835a636 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -143,12 +143,22 @@ pub(crate) fn solve_poseidon2_permutation_opcode( return Err(OpcodeResolutionError::BlackBoxFunctionFailed( acir::BlackBoxFunc::Poseidon2Permutation, format!( - "the number of inputs does not match specified length. {} > {}", + "the number of inputs does not match specified length. {} != {}", inputs.len(), len ), )); } + if len as usize != outputs.len() { + return Err(OpcodeResolutionError::BlackBoxFunctionFailed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of outputs does not match specified length. {} != {}", + outputs.len(), + len + ), + )); + } // Read witness assignments let mut state = Vec::new(); diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr index 64c1876b4e..40eea029e8 100644 --- a/noir_stdlib/src/hash/poseidon2.nr +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -93,7 +93,8 @@ impl Poseidon2 { } fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { - let iv : Field = (in_len as Field) * 18446744073709551616; + let two_pow_64 = 18446744073709551616; + let iv : Field = (in_len as Field) * two_pow_64; let mut sponge = Poseidon2::new(iv); for i in 0..input.len() { if i as u32 < in_len { From 9544813fabbd18a87dd88456e6a5b781bd0cf008 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 27 Feb 2024 22:12:59 +0000 Subject: [PATCH 17/17] chore!: reserve `unchecked` keyword (#4432) # Description ## Problem\* Resolves ## Summary\* There's a decent chance that we're going to add some form of unchecked maths in future so I'd like to reserve this keyword in preparation for that. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_frontend/src/lexer/token.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index fe12132e20..5674ae5a39 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -673,6 +673,7 @@ pub enum Keyword { Struct, Trait, Type, + Unchecked, Unconstrained, Use, Where, @@ -715,6 +716,7 @@ impl fmt::Display for Keyword { Keyword::Struct => write!(f, "struct"), Keyword::Trait => write!(f, "trait"), Keyword::Type => write!(f, "type"), + Keyword::Unchecked => write!(f, "unchecked"), Keyword::Unconstrained => write!(f, "unconstrained"), Keyword::Use => write!(f, "use"), Keyword::Where => write!(f, "where"), @@ -760,6 +762,7 @@ impl Keyword { "struct" => Keyword::Struct, "trait" => Keyword::Trait, "type" => Keyword::Type, + "unchecked" => Keyword::Unchecked, "unconstrained" => Keyword::Unconstrained, "use" => Keyword::Use, "where" => Keyword::Where,