diff --git a/codegen/merkle-zeros/Cargo.toml b/codegen/merkle-zeros/Cargo.toml index f35b57b..169c59b 100644 --- a/codegen/merkle-zeros/Cargo.toml +++ b/codegen/merkle-zeros/Cargo.toml @@ -4,8 +4,12 @@ version = "0.1.0" edition = "2021" [dependencies] +# === Renegade === # renegade-crypto = { git = "https://github.com/renegade-fi/renegade.git" } renegade-constants = { package = "constants", git = "https://github.com/renegade-fi/renegade.git", default-features = false } +common = { path = "../../test/rust-reference-impls/common" } + +# === Misc === # clap = { version = "4.5.1", features = ["derive"] } anyhow = "1.0" tiny-keccak = { version = "2.0", features = ["keccak"] } diff --git a/codegen/merkle-zeros/src/main.rs b/codegen/merkle-zeros/src/main.rs index 75d0ee6..4a2fb11 100644 --- a/codegen/merkle-zeros/src/main.rs +++ b/codegen/merkle-zeros/src/main.rs @@ -4,17 +4,14 @@ use anyhow::{anyhow, Result}; use clap::Parser; -use renegade_constants::{Scalar, MERKLE_HEIGHT}; -use renegade_crypto::hash::compute_poseidon_hash; +use common::merkle_helpers::{generate_zero_values, LEAF_KECCAK_PREIMAGE}; +use renegade_constants::MERKLE_HEIGHT; use std::fs::File; use std::io::Write; use std::path::PathBuf; -use tiny_keccak::{Hasher, Keccak}; /// Name of the Solidity contract to generate const CONTRACT_NAME: &str = "MerkleZeros"; -/// The string that is used to create leaf zero values -const LEAF_KECCAK_PREIMAGE: &str = "renegade"; /// Command line arguments for the merkle-zeros-codegen binary #[derive(Parser, Debug)] @@ -44,7 +41,7 @@ fn generate_solidity_contract() -> Result { let root = zero_values[MERKLE_HEIGHT]; // Add the constant values to the contract - for (i, value) in zero_values[..MERKLE_HEIGHT].iter().rev().enumerate() { + for (i, value) in zero_values[..MERKLE_HEIGHT].iter().enumerate() { contract.push_str(&format!( "\tuint256 constant public ZERO_VALUE_{} = {};\n", i, value @@ -58,13 +55,13 @@ fn generate_solidity_contract() -> Result { // Add an assembly-based getter function for gas-efficient constant-time access contract.push_str("\n\t/// @notice Get zero value for a given height\n"); contract.push_str("\t/// @param height The height in the Merkle tree\n"); - contract.push_str("\t/// @return The zero value for the given height\n"); - contract - .push_str("\tfunction getZeroValue(uint256 height) internal pure returns (uint256) {\n"); + contract.push_str("\t/// @return result the zero value for the given height\n"); + contract.push_str( + "\tfunction getZeroValue(uint256 height) internal pure returns (uint256 result) {\n", + ); contract.push_str("\t\t// Require height to be within valid range\n"); contract.push_str("\t\trequire(height <= 31, \"MerkleZeros: height must be <= 31\");\n\n"); - contract.push_str("\t\tuint256 result;\n"); contract.push_str("\t\tassembly {\n"); contract.push_str("\t\t\tswitch height\n"); @@ -85,33 +82,6 @@ fn generate_solidity_contract() -> Result { Ok(contract) } -/// Generate the zero values for each height in the Merkle tree -fn generate_zero_values() -> Vec { - let mut result = vec![generate_leaf_zero_value()]; - for height in 1..=MERKLE_HEIGHT { - let last_zero = result[height - 1]; - let next_zero = compute_poseidon_hash(&[last_zero, last_zero]); - result.push(next_zero); - } - result -} - -/// Generate the zero value for a leaf in the Merkle tree -fn generate_leaf_zero_value() -> Scalar { - // Create a Keccak-256 hasher - let mut hasher = Keccak::v256(); - - // Prepare input and output buffers - let input = LEAF_KECCAK_PREIMAGE.as_bytes(); - let mut output = [0u8; 32]; // 256 bits = 32 bytes - - // Compute the hash - hasher.update(input); - hasher.finalize(&mut output); - - Scalar::from_be_bytes_mod_order(&output) -} - /// Entrypoint fn main() -> Result<()> { // Parse command line arguments diff --git a/src/Darkpool.sol b/src/Darkpool.sol index c173d6e..8d4d5f4 100644 --- a/src/Darkpool.sol +++ b/src/Darkpool.sol @@ -1,6 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; +import { console2 } from "forge-std/console2.sol"; import { PlonkProof, VerificationKey, NUM_SELECTORS, NUM_WIRE_TYPES } from "./libraries/verifier/Types.sol"; import { BN254 } from "solidity-bn254/BN254.sol"; import { VerifierCore } from "./libraries/verifier/VerifierCore.sol"; diff --git a/src/libraries/merkle/MerkleTree.sol b/src/libraries/merkle/MerkleTree.sol index e09dbbf..f2ff08e 100644 --- a/src/libraries/merkle/MerkleTree.sol +++ b/src/libraries/merkle/MerkleTree.sol @@ -34,7 +34,8 @@ library MerkleTreeLib { /// @param tree The tree to initialize function initialize(MerkleTree storage tree) internal { tree.nextIndex = 0; - tree.root = BN254.ScalarField.wrap(0); + tree.root = BN254.ScalarField.wrap(MerkleZeros.ZERO_VALUE_ROOT); + tree.rootHistory[tree.root] = true; // Initialize the sibling path array tree.siblingPath = new BN254.ScalarField[](DarkpoolConstants.MERKLE_DEPTH); @@ -46,16 +47,16 @@ library MerkleTreeLib { /// @notice Returns the root of the tree /// @param tree The tree to get the root of /// @return The root of the tree - function root(MerkleTree storage tree) internal view returns (BN254.ScalarField) { + function getRoot(MerkleTree storage tree) internal view returns (BN254.ScalarField) { return tree.root; } /// @notice Returns whether the given root is in the history of the tree /// @param tree The tree to check the root history of - /// @param root The root to check + /// @param historicalRoot The root to check /// @return Whether the root is in the history of the tree - function rootInHistory(MerkleTree storage tree, BN254.ScalarField root) internal view returns (bool) { - return tree.rootHistory[root]; + function rootInHistory(MerkleTree storage tree, BN254.ScalarField historicalRoot) internal view returns (bool) { + return tree.rootHistory[historicalRoot]; } /// @notice Insert a leaf into the tree diff --git a/src/libraries/merkle/MerkleZeros.sol b/src/libraries/merkle/MerkleZeros.sol index b744edf..99050c4 100644 --- a/src/libraries/merkle/MerkleZeros.sol +++ b/src/libraries/merkle/MerkleZeros.sol @@ -5,48 +5,47 @@ pragma solidity ^0.8.0; library MerkleZeros { // LEAF_ZERO_VALUE is the keccak256 hash of the string "renegade" - uint256 constant public ZERO_VALUE_0 = 15962612577647058420645476952537206750366608626029610482408928804774318734059; - uint256 constant public ZERO_VALUE_1 = 19133436467657663288002792317295361471584527575409806386877579220142957274311; - uint256 constant public ZERO_VALUE_2 = 9687651560707488562669993660671667535167069078962303879140600742064277138783; - uint256 constant public ZERO_VALUE_3 = 7756681173448585507407836521077252702714499259676930386277118558421472733695; - uint256 constant public ZERO_VALUE_4 = 19675392939688276802339156281601404320980406070882905227039824358807516124555; - uint256 constant public ZERO_VALUE_5 = 2125779998549108384849449352238557248823106982222780494941361329497674962055; - uint256 constant public ZERO_VALUE_6 = 21045373409225237393227367297008943911912705632505943546436228216465187679794; - uint256 constant public ZERO_VALUE_7 = 15240140699176831850148543788268509701353515782125796441374715127049044490345; - uint256 constant public ZERO_VALUE_8 = 4869202566029698064271465793060035209512941191405170347160900659782492599420; - uint256 constant public ZERO_VALUE_9 = 21499774241007975435158701841671012387268617016438138010529957752176281043982; - uint256 constant public ZERO_VALUE_10 = 12005465458277291918585874267570875140312745411055133999597826802122331383829; - uint256 constant public ZERO_VALUE_11 = 5954813481663260920526782055140925931240652453878241256547343648547886256115; - uint256 constant public ZERO_VALUE_12 = 11215346573587842331133140809886378146723590715517066956754015818399718028211; - uint256 constant public ZERO_VALUE_13 = 7411979333183272993586192846445207497153951530258254274653315223417956289516; - uint256 constant public ZERO_VALUE_14 = 1021314982886605788655750223042027732834175048831600032925052368337071312920; - uint256 constant public ZERO_VALUE_15 = 12626775664493191592868103463442370443572107219205819234160503479267552095144; - uint256 constant public ZERO_VALUE_16 = 20123836996685171851147985684125431013529635771352218757607199589107940123097; - uint256 constant public ZERO_VALUE_17 = 3382712961983301882764582068036696973957406957867468693626457706392093541451; - uint256 constant public ZERO_VALUE_18 = 2902297038939909195370997474544705413513565002202017991304595081134299351859; - uint256 constant public ZERO_VALUE_19 = 18195474162697958019742127733181574648348403460399143282804115462140090973272; - uint256 constant public ZERO_VALUE_20 = 7917393958708656447966347763175495240817592276812457488958366778499968534404; - uint256 constant public ZERO_VALUE_21 = 7093370145395736599029247649221138464255596728001346862162636717742248456822; - uint256 constant public ZERO_VALUE_22 = 7143033676627539261504534687959441406366684773196748433880878745292610721342; - uint256 constant public ZERO_VALUE_23 = 19481598376776831435239791620729985799885972897300809781485986746773456392129; - uint256 constant public ZERO_VALUE_24 = 19076218734736948577879157617365119126235873506901406237134838330454326767382; - uint256 constant public ZERO_VALUE_25 = 11202409622115594108632295319026050428586618859253344007474069726617383330201; - uint256 constant public ZERO_VALUE_26 = 4691736141445911110922368938732204433610503339213348004083266455528299545086; - uint256 constant public ZERO_VALUE_27 = 7492191656949889326191300377435382192231120613403472660772864155136961721921; - uint256 constant public ZERO_VALUE_28 = 17974638503328037533292433722585764042021737271514033680858878722945293807513; - uint256 constant public ZERO_VALUE_29 = 16152469242921808488194486632224509727076872200432979464611802545119788097844; - uint256 constant public ZERO_VALUE_30 = 7035835480239620343712770214636030506415861196323445446427955599547555378646; - uint256 constant public ZERO_VALUE_31 = 3570982782379586050211724779746612745305269241448247085265205218748662232570; + uint256 constant public ZERO_VALUE_0 = 3570982782379586050211724779746612745305269241448247085265205218748662232570; + uint256 constant public ZERO_VALUE_1 = 7035835480239620343712770214636030506415861196323445446427955599547555378646; + uint256 constant public ZERO_VALUE_2 = 16152469242921808488194486632224509727076872200432979464611802545119788097844; + uint256 constant public ZERO_VALUE_3 = 17974638503328037533292433722585764042021737271514033680858878722945293807513; + uint256 constant public ZERO_VALUE_4 = 7492191656949889326191300377435382192231120613403472660772864155136961721921; + uint256 constant public ZERO_VALUE_5 = 4691736141445911110922368938732204433610503339213348004083266455528299545086; + uint256 constant public ZERO_VALUE_6 = 11202409622115594108632295319026050428586618859253344007474069726617383330201; + uint256 constant public ZERO_VALUE_7 = 19076218734736948577879157617365119126235873506901406237134838330454326767382; + uint256 constant public ZERO_VALUE_8 = 19481598376776831435239791620729985799885972897300809781485986746773456392129; + uint256 constant public ZERO_VALUE_9 = 7143033676627539261504534687959441406366684773196748433880878745292610721342; + uint256 constant public ZERO_VALUE_10 = 7093370145395736599029247649221138464255596728001346862162636717742248456822; + uint256 constant public ZERO_VALUE_11 = 7917393958708656447966347763175495240817592276812457488958366778499968534404; + uint256 constant public ZERO_VALUE_12 = 18195474162697958019742127733181574648348403460399143282804115462140090973272; + uint256 constant public ZERO_VALUE_13 = 2902297038939909195370997474544705413513565002202017991304595081134299351859; + uint256 constant public ZERO_VALUE_14 = 3382712961983301882764582068036696973957406957867468693626457706392093541451; + uint256 constant public ZERO_VALUE_15 = 20123836996685171851147985684125431013529635771352218757607199589107940123097; + uint256 constant public ZERO_VALUE_16 = 12626775664493191592868103463442370443572107219205819234160503479267552095144; + uint256 constant public ZERO_VALUE_17 = 1021314982886605788655750223042027732834175048831600032925052368337071312920; + uint256 constant public ZERO_VALUE_18 = 7411979333183272993586192846445207497153951530258254274653315223417956289516; + uint256 constant public ZERO_VALUE_19 = 11215346573587842331133140809886378146723590715517066956754015818399718028211; + uint256 constant public ZERO_VALUE_20 = 5954813481663260920526782055140925931240652453878241256547343648547886256115; + uint256 constant public ZERO_VALUE_21 = 12005465458277291918585874267570875140312745411055133999597826802122331383829; + uint256 constant public ZERO_VALUE_22 = 21499774241007975435158701841671012387268617016438138010529957752176281043982; + uint256 constant public ZERO_VALUE_23 = 4869202566029698064271465793060035209512941191405170347160900659782492599420; + uint256 constant public ZERO_VALUE_24 = 15240140699176831850148543788268509701353515782125796441374715127049044490345; + uint256 constant public ZERO_VALUE_25 = 21045373409225237393227367297008943911912705632505943546436228216465187679794; + uint256 constant public ZERO_VALUE_26 = 2125779998549108384849449352238557248823106982222780494941361329497674962055; + uint256 constant public ZERO_VALUE_27 = 19675392939688276802339156281601404320980406070882905227039824358807516124555; + uint256 constant public ZERO_VALUE_28 = 7756681173448585507407836521077252702714499259676930386277118558421472733695; + uint256 constant public ZERO_VALUE_29 = 9687651560707488562669993660671667535167069078962303879140600742064277138783; + uint256 constant public ZERO_VALUE_30 = 19133436467657663288002792317295361471584527575409806386877579220142957274311; + uint256 constant public ZERO_VALUE_31 = 15962612577647058420645476952537206750366608626029610482408928804774318734059; uint256 constant public ZERO_VALUE_ROOT = 21822647340628839684360703580761466999432252031123851327655447000077200870350; /// @notice Get zero value for a given height /// @param height The height in the Merkle tree - /// @return The zero value for the given height - function getZeroValue(uint256 height) internal pure returns (uint256) { + /// @return result the zero value for the given height + function getZeroValue(uint256 height) internal pure returns (uint256 result) { // Require height to be within valid range require(height <= 31, "MerkleZeros: height must be <= 31"); - uint256 result; assembly { switch height case 0 { result := ZERO_VALUE_0 } diff --git a/test/MerkleTree.t.sol b/test/MerkleTree.t.sol new file mode 100644 index 0000000..481a03a --- /dev/null +++ b/test/MerkleTree.t.sol @@ -0,0 +1,50 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import { DarkpoolConstants } from "../src/libraries/darkpool/Constants.sol"; +import { MerkleTreeLib } from "../src/libraries/merkle/MerkleTree.sol"; +import { MerkleZeros } from "../src/libraries/merkle/MerkleZeros.sol"; +import { IHasher } from "../src/libraries/poseidon2/IHasher.sol"; +import { TestUtils } from "./utils/TestUtils.sol"; +import { HuffDeployer } from "foundry-huff/HuffDeployer.sol"; +import { BN254 } from "solidity-bn254/BN254.sol"; + +contract MerkleTreeTest is TestUtils { + using MerkleTreeLib for MerkleTreeLib.MerkleTree; + + MerkleTreeLib.MerkleTree private tree; + IHasher private hasher; + + function setUp() public { + tree.initialize(); + hasher = IHasher(HuffDeployer.deploy("libraries/poseidon2/poseidonHasher")); + } + + /// @notice Test that the root and root history are initialized correctly + function test_rootAfterInitialization() public view { + // Test that the root is the default zero valued root + uint256 expectedRoot = MerkleZeros.ZERO_VALUE_ROOT; + uint256 actualRoot = BN254.ScalarField.unwrap(tree.getRoot()); + assertEq(actualRoot, expectedRoot); + + // Test that the zero valued root is in the history + BN254.ScalarField rootScalar = BN254.ScalarField.wrap(expectedRoot); + bool expectedInHistory = tree.rootInHistory(rootScalar); + assertEq(expectedInHistory, true); + } + + /// @notice Test that the zero valued leaf hashes to the zero valued root + function test_zeroValueLeafMerkleHash() public view { + uint256 root = BN254.ScalarField.unwrap(tree.getRoot()); + + uint256 currLeaf = MerkleZeros.getZeroValue(0); + for (uint256 i = 0; i < DarkpoolConstants.MERKLE_DEPTH; i++) { + uint256[] memory inputs = new uint256[](2); + inputs[0] = currLeaf; + inputs[1] = currLeaf; + currLeaf = hasher.spongeHash(inputs); + } + + assertEq(currLeaf, root); + } +} diff --git a/test/rust-reference-impls/common/Cargo.toml b/test/rust-reference-impls/common/Cargo.toml index 97295da..b00a45c 100644 --- a/test/rust-reference-impls/common/Cargo.toml +++ b/test/rust-reference-impls/common/Cargo.toml @@ -10,6 +10,9 @@ ark-ff = "0.4.0" ark-ec = "0.4.0" ark-serialize = "0.4.0" +# === Cryptography === # +tiny-keccak = { version = "2.0", features = ["keccak"] } + # === EVM === # alloy = "0.11" alloy-sol-types = "0.8.21" @@ -19,6 +22,7 @@ mpc-plonk = { workspace = true } mpc-relation = { workspace = true } renegade-circuit-types = { workspace = true } renegade-constants = { workspace = true } +renegade-crypto = { workspace = true } # === Misc === # itertools = "0.14" diff --git a/test/rust-reference-impls/common/src/lib.rs b/test/rust-reference-impls/common/src/lib.rs index d700a80..c074e31 100644 --- a/test/rust-reference-impls/common/src/lib.rs +++ b/test/rust-reference-impls/common/src/lib.rs @@ -1,3 +1,4 @@ //! Common utilities for the reference implementations pub mod abi_types; +pub mod merkle_helpers; diff --git a/test/rust-reference-impls/common/src/merkle_helpers.rs b/test/rust-reference-impls/common/src/merkle_helpers.rs new file mode 100644 index 0000000..8c9be5b --- /dev/null +++ b/test/rust-reference-impls/common/src/merkle_helpers.rs @@ -0,0 +1,35 @@ +//! Helper functions for merkle tree operations + +use renegade_constants::{Scalar, MERKLE_HEIGHT}; +use renegade_crypto::hash::compute_poseidon_hash; +use tiny_keccak::{Hasher, Keccak}; + +/// The string that is used to create leaf zero values +pub const LEAF_KECCAK_PREIMAGE: &str = "renegade"; + +/// Generate the zero values for each height in the Merkle tree +pub fn generate_zero_values() -> Vec { + let mut result = vec![generate_leaf_zero_value()]; + for height in 1..=MERKLE_HEIGHT { + let last_zero = result[height - 1]; + let next_zero = compute_poseidon_hash(&[last_zero, last_zero]); + result.push(next_zero); + } + result +} + +/// Generate the zero value for a leaf in the Merkle tree +pub fn generate_leaf_zero_value() -> Scalar { + // Create a Keccak-256 hasher + let mut hasher = Keccak::v256(); + + // Prepare input and output buffers + let input = LEAF_KECCAK_PREIMAGE.as_bytes(); + let mut output = [0u8; 32]; // 256 bits = 32 bytes + + // Compute the hash + hasher.update(input); + hasher.finalize(&mut output); + + Scalar::from_be_bytes_mod_order(&output) +} diff --git a/test/rust-reference-impls/merkle/Cargo.toml b/test/rust-reference-impls/merkle/Cargo.toml index e53636c..ce2d671 100644 --- a/test/rust-reference-impls/merkle/Cargo.toml +++ b/test/rust-reference-impls/merkle/Cargo.toml @@ -16,3 +16,4 @@ rand = "0.8" renegade-constants = { workspace = true } renegade-crypto = { workspace = true } +common = { workspace = true } diff --git a/test/rust-reference-impls/merkle/src/main.rs b/test/rust-reference-impls/merkle/src/main.rs index 3dd38ec..e7d33af 100644 --- a/test/rust-reference-impls/merkle/src/main.rs +++ b/test/rust-reference-impls/merkle/src/main.rs @@ -1,7 +1,9 @@ +pub mod merkle_hash; +pub mod merkle_insert; +pub mod sponge_hash; +pub mod util; + use clap::{Parser, Subcommand}; -use renegade_constants::Scalar; -use renegade_crypto::fields::scalar_to_biguint; -use renegade_crypto::hash::compute_poseidon_hash; /// The height of the Merkle tree const TREE_HEIGHT: usize = 32; @@ -20,6 +22,9 @@ enum Commands { /// Hash inputs using a Poseidon sponge SpongeHash(SpongeHashArgs), + + /// Compute the root after inserting elements into a Merkle tree sequentially + InsertRoot(InsertRootArgs), } #[derive(Parser)] @@ -42,208 +47,19 @@ struct SpongeHashArgs { inputs: Vec, } +#[derive(Parser)] +struct InsertRootArgs { + /// Input values to insert sequentially (starting from index 0) + #[arg(required = true)] + inputs: Vec, +} + fn main() { let cli = Cli::parse(); match cli.command { - Commands::MerkleHash(args) => handle_merkle_hash(args), - Commands::SpongeHash(args) => handle_sponge_hash(args), - } -} - -fn handle_merkle_hash(args: MerkleHashArgs) { - if args.sister_leaves.len() != TREE_HEIGHT { - eprintln!( - "Expected {} sister leaves, got {}", - TREE_HEIGHT, - args.sister_leaves.len() - ); - std::process::exit(1); - } - - let input = Scalar::from_decimal_string(&args.input).unwrap(); - - // Parse sister leaves directly from arguments - let sister_leaves: Vec = args - .sister_leaves - .iter() - .map(|s| Scalar::from_decimal_string(s).unwrap()) - .collect(); - - let results = hash_merkle(args.idx, input, &sister_leaves); - - // Output results as space-separated decimal values - let result_strings: Vec = results - .iter() - .map(|r| scalar_to_biguint(r).to_string()) - .collect(); - - println!("RES:{}", result_strings.join(" ")); -} - -fn handle_sponge_hash(args: SpongeHashArgs) { - // Parse input values to Scalars - let inputs: Vec = args - .inputs - .iter() - .map(|s| Scalar::from_decimal_string(s).unwrap()) - .collect(); - - let res = compute_poseidon_hash(&inputs); - let res_hex = format!("{:x}", res.to_biguint()); - println!("RES:0x{res_hex}"); -} - -/// Hash the input through the Merkle tree using the given sister nodes -/// -/// Returns the incremental results at each level, representing the updated values to the insertion path -fn hash_merkle(idx: u64, input: Scalar, sister_leaves: &[Scalar]) -> Vec { - let mut results = Vec::with_capacity(TREE_HEIGHT); - let mut current = input; - let mut current_idx = idx; - - for sister in sister_leaves.iter().copied() { - // The input is a left-hand node if the index is even at this level - let inputs = if current_idx % 2 == 0 { - [current, sister] - } else { - [sister, current] - }; - - current = compute_poseidon_hash(&inputs); - results.push(current); - current_idx /= 2; - } - - results -} - -#[cfg(test)] -mod tests { - //! We test the Merkle tree helper above against the reference implementation in Arkworks - //! for a known reference implementation. - //! - //! It is difficult to test the huff contracts against the Arkworks impl because the Arkworks impl - //! handles deep trees very inefficiently, making a 32-depth tree impossible to run. - //! - //! Instead, we opt to test our helper against Arkworks on a shallower tree, thereby testing the - //! huff implementation only transitively. - - use std::borrow::Borrow; - - use ark_crypto_primitives::{ - crh::{CRHScheme, TwoToOneCRHScheme}, - merkle_tree::{Config, IdentityDigestConverter, MerkleTree}, - }; - use rand::{thread_rng, Rng}; - use renegade_constants::{Scalar, ScalarField}; - use renegade_crypto::hash::compute_poseidon_hash; - - use crate::hash_merkle; - - /// The height of the Merkle tree - const TEST_TREE_HEIGHT: usize = 10; - /// The number of leaves in the tree - const N_LEAVES: usize = 1 << (TEST_TREE_HEIGHT - 1); - - // --- Hash Impls --- // - - struct IdentityHasher; - impl CRHScheme for IdentityHasher { - type Input = ScalarField; - type Output = ScalarField; - type Parameters = (); - - fn setup(_: &mut R) -> Result { - Ok(()) - } - - fn evaluate>( - _parameters: &Self::Parameters, - input: T, - ) -> Result { - Ok(*input.borrow()) - } - } - - /// A dummy hasher to build an arkworks Merkle tree on top of - struct Poseidon2Hasher; - impl TwoToOneCRHScheme for Poseidon2Hasher { - type Input = ScalarField; - type Output = ScalarField; - type Parameters = (); - - fn setup(_: &mut R) -> Result { - Ok(()) - } - - fn evaluate>( - _parameters: &Self::Parameters, - left_input: T, - right_input: T, - ) -> Result { - let lhs = Scalar::new(*left_input.borrow()); - let rhs = Scalar::new(*right_input.borrow()); - let res = compute_poseidon_hash(&[lhs, rhs]); - - Ok(res.inner()) - } - - fn compress>( - parameters: &Self::Parameters, - left_input: T, - right_input: T, - ) -> Result { - ::evaluate(parameters, left_input, right_input) - } - } - - struct MerkleConfig {} - impl Config for MerkleConfig { - type Leaf = ScalarField; - type LeafDigest = ScalarField; - type InnerDigest = ScalarField; - - type LeafHash = IdentityHasher; - type TwoToOneHash = Poseidon2Hasher; - type LeafInnerDigestConverter = IdentityDigestConverter; - } - - /// Build an arkworks tree and fill it with random values - fn build_arkworks_tree() -> MerkleTree { - let mut rng = thread_rng(); - - let mut tree = MerkleTree::::blank(&(), &(), TEST_TREE_HEIGHT).unwrap(); - for i in 0..N_LEAVES { - let leaf = Scalar::random(&mut rng); - tree.update(i, &leaf.inner()).unwrap(); - } - - tree - } - - /// Test the Merkle helper against an arkworks tree - #[test] - fn test_merkle_tree() { - // Build an arkworks tree and fill it with random values - let mut rng = thread_rng(); - let mut tree = build_arkworks_tree(); - - // Choose a random index to update into - let idx = rng.gen_range(0..N_LEAVES); - let input = Scalar::random(&mut rng); - - // Get a sibling path for the input - let path = tree.generate_proof(idx).unwrap(); - let mut sister_scalars = vec![Scalar::new(path.leaf_sibling_hash)]; - sister_scalars.extend(path.auth_path.into_iter().rev().map(Scalar::new)); - - // Get the updated path - let res = hash_merkle(idx as u64, input, &sister_scalars); - let new_root = res.last().unwrap(); - - // Update the tree with the input - tree.update(idx, &input.inner()).unwrap(); - assert_eq!(tree.root(), new_root.inner()); + Commands::MerkleHash(args) => merkle_hash::handle_merkle_hash(args), + Commands::SpongeHash(args) => sponge_hash::handle_sponge_hash(args), + Commands::InsertRoot(args) => merkle_insert::handle_insert_root(args), } } diff --git a/test/rust-reference-impls/merkle/src/merkle_hash.rs b/test/rust-reference-impls/merkle/src/merkle_hash.rs new file mode 100644 index 0000000..b2015cc --- /dev/null +++ b/test/rust-reference-impls/merkle/src/merkle_hash.rs @@ -0,0 +1,84 @@ +/// Hash through a Merkle tree +use renegade_constants::Scalar; +use renegade_crypto::fields::scalar_to_biguint; + +use crate::{util, MerkleHashArgs, TREE_HEIGHT}; + +/// Handle the Merkle hash operation +pub(crate) fn handle_merkle_hash(args: MerkleHashArgs) { + if args.sister_leaves.len() != TREE_HEIGHT { + eprintln!( + "Expected {} sister leaves, got {}", + TREE_HEIGHT, + args.sister_leaves.len() + ); + std::process::exit(1); + } + + let input = Scalar::from_decimal_string(&args.input).unwrap(); + + // Parse sister leaves directly from arguments + let sister_leaves: Vec = args + .sister_leaves + .iter() + .map(|s| Scalar::from_decimal_string(s).unwrap()) + .collect(); + + let results = util::hash_merkle(args.idx, input, &sister_leaves); + + // Output results as space-separated decimal values + let result_strings: Vec = results + .iter() + .map(|r| scalar_to_biguint(r).to_string()) + .collect(); + + let res_str = result_strings.join(" "); + util::print_string_result(&res_str); +} + +#[cfg(test)] +mod tests { + //! We test the Merkle tree helper above against the reference implementation in Arkworks + //! for a known reference implementation. + //! + //! It is difficult to test the huff contracts against the Arkworks impl because the Arkworks impl + //! handles deep trees very inefficiently, making a 32-depth tree impossible to run. + //! + //! Instead, we opt to test our helper against Arkworks on a shallower tree, thereby testing the + //! huff implementation only transitively. + + use rand::{thread_rng, Rng}; + use renegade_constants::Scalar; + + use crate::util::{build_arkworks_tree, hash_merkle}; + + /// The height of the Merkle tree + const TEST_TREE_HEIGHT: usize = 10; + /// The number of leaves in the tree + const N_LEAVES: usize = 1 << (TEST_TREE_HEIGHT - 1); + + /// Test the Merkle helper against an arkworks tree + #[test] + fn test_merkle_tree() { + // Build an arkworks tree and fill it with random values + let mut rng = thread_rng(); + let mut tree = build_arkworks_tree(TEST_TREE_HEIGHT, N_LEAVES); + + // Choose a random index to update into + let idx = rng.gen_range(0..N_LEAVES); + let input = Scalar::random(&mut rng); + + // Get a sibling path for the input + let path = tree.generate_proof(idx).unwrap(); + let mut sister_scalars = vec![Scalar::new(path.leaf_sibling_hash)]; + sister_scalars.extend(path.auth_path.into_iter().rev().map(Scalar::new)); + + // Get the updated path + let res = hash_merkle(idx as u64, input, &sister_scalars); + let new_root = res.last().unwrap(); + + // Update the tree with the input + tree.update(idx, &input.inner()).unwrap(); + assert_eq!(tree.root(), new_root.inner()); + } +} diff --git a/test/rust-reference-impls/merkle/src/merkle_insert.rs b/test/rust-reference-impls/merkle/src/merkle_insert.rs new file mode 100644 index 0000000..0258001 --- /dev/null +++ b/test/rust-reference-impls/merkle/src/merkle_insert.rs @@ -0,0 +1,162 @@ +//! Handler for inserting elements into a Merkle tree sequentially + +use renegade_constants::Scalar; +use renegade_crypto::hash::compute_poseidon_hash; + +use crate::{ + util::{get_merkle_zero, print_scalar_result}, + InsertRootArgs, TREE_HEIGHT, +}; + +// We want to be able to test the following: +// 1. Root after insert +// 2. Sibling path after insert +// +// + +/// Compute the root after inserting elements into a Merkle tree sequentially +pub(crate) fn handle_insert_root(args: InsertRootArgs) { + // Parse input values to Scalars + let inputs: Vec = args + .inputs + .iter() + .map(|s| Scalar::from_decimal_string(s).unwrap()) + .collect(); + + let tree = TestMerkleTree::new(TREE_HEIGHT as u64, inputs); + let root = tree.root(); + print_scalar_result(root); +} + +/// A simple Merkle tree query implementation +struct TestMerkleTree { + /// The height of the tree + height: u64, + /// The leaves of the tree + leaves: Vec, + /// The zeros at each height + zeros: Vec, +} + +impl TestMerkleTree { + /// Create a new Merkle tree + pub fn new(height: u64, leaves: Vec) -> Self { + let zeros = (0..=height).map(|h| get_merkle_zero(h as usize)).collect(); + Self { + height, + leaves, + zeros, + } + } + + /// Update the leaf at the given index + #[cfg(test)] + pub fn update(&mut self, index: u64, new_leaf: Scalar) { + if self.leaves.len() <= index as usize { + self.leaves.resize(index as usize + 1, self.zeros[0]); + } + self.leaves[index as usize] = new_leaf; + } + + /// The number of leaves at the given height + fn max_leaves_at_height(&self, height: u64) -> u64 { + let depth = self.height - height; + 1 << depth + } + + /// Get the root of the tree + pub fn root(&self) -> Scalar { + self.get_node(self.height, 0 /* idx */) + } + + /// Whether or not the subtree at the given coordinates is empty + fn subtree_empty(&self, height: u64, idx: u64) -> bool { + let full_leaves = self.leaves.len() as u64; + let idx_first_leaf = idx * (1 << height); + + full_leaves < idx_first_leaf + } + + /// Get the node at the given height and index + fn get_node(&self, height: u64, idx: u64) -> Scalar { + // Bounds checks + assert!(height <= self.height, "Height out of bounds"); + assert!( + idx < self.max_leaves_at_height(height), + "Index out of bounds" + ); + + // Base case: leaf node + if height == 0 { + let zero_leaf = self.zeros[0]; + return self.leaves.get(idx as usize).copied().unwrap_or(zero_leaf); + } + + // If the subtree is empty, return the zero value for this height + if self.subtree_empty(height, idx) { + return self.zeros[height as usize]; + } + + // Otherwise, recursively evaluate the tree + let left_idx = 2 * idx; + let right_idx = left_idx + 1; + let left_node = self.get_node(height - 1, left_idx); + let right_node = self.get_node(height - 1, right_idx); + compute_poseidon_hash(&[left_node, right_node]) + } +} + +#[cfg(test)] +mod tree_tests { + use rand::thread_rng; + + use crate::util::build_full_arkworks_tree; + + use super::*; + + const TEST_TREE_HEIGHT: u64 = 10; + + #[test] + fn test_tree_empty() { + let tree = TestMerkleTree::new(TEST_TREE_HEIGHT, vec![]); + assert_eq!(tree.root(), get_merkle_zero(TEST_TREE_HEIGHT as usize)); + } + + #[test] + fn test_tree_single_leaf() { + let mut rng = thread_rng(); + let leaf = Scalar::random(&mut rng); + let tree = TestMerkleTree::new(TEST_TREE_HEIGHT, vec![leaf]); + + let mut expected_root = leaf; + for height in 0..TEST_TREE_HEIGHT { + let zero = get_merkle_zero(height as usize); + expected_root = compute_poseidon_hash(&[expected_root, zero]); + } + + assert_eq!(tree.root(), expected_root); + } + + #[test] + fn test_against_arkworks() { + const N: usize = 100; + let mut rng = thread_rng(); + let mut ark_tree = build_full_arkworks_tree(TEST_TREE_HEIGHT as usize, vec![]); + let mut test_tree = TestMerkleTree::new(TEST_TREE_HEIGHT, vec![]); + + let new_leaf = Scalar::random(&mut rng); + test_tree.update(0, new_leaf); + ark_tree.update(0, &new_leaf.inner()).unwrap(); + + for i in 0..N { + let leaf = Scalar::random(&mut rng); + ark_tree.update(i, &leaf.inner()).unwrap(); + test_tree.update(i as u64, leaf); + + // Check that the roots align after each insert + let ark_root = ark_tree.root(); + let test_root = test_tree.root().inner(); + assert_eq!(test_root, ark_root); + } + } +} diff --git a/test/rust-reference-impls/merkle/src/sponge_hash.rs b/test/rust-reference-impls/merkle/src/sponge_hash.rs new file mode 100644 index 0000000..5753e36 --- /dev/null +++ b/test/rust-reference-impls/merkle/src/sponge_hash.rs @@ -0,0 +1,19 @@ +//! Handlers for cli hash operations +use renegade_constants::Scalar; +use renegade_crypto::hash::compute_poseidon_hash; + +use crate::util; +use crate::SpongeHashArgs; + +/// Handle the Sponge hash operation +pub(crate) fn handle_sponge_hash(args: SpongeHashArgs) { + // Parse input values to Scalars + let inputs: Vec = args + .inputs + .iter() + .map(|s| Scalar::from_decimal_string(s).unwrap()) + .collect(); + + let res = compute_poseidon_hash(&inputs); + util::print_scalar_result(res); +} diff --git a/test/rust-reference-impls/merkle/src/util.rs b/test/rust-reference-impls/merkle/src/util.rs new file mode 100644 index 0000000..93921ab --- /dev/null +++ b/test/rust-reference-impls/merkle/src/util.rs @@ -0,0 +1,169 @@ +//! Utility functions for the hashing reference implementation + +use std::{borrow::Borrow, iter}; + +use ark_crypto_primitives::{ + crh::{CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{Config, IdentityDigestConverter, MerkleTree}, +}; +use common::merkle_helpers::generate_leaf_zero_value; +use itertools::Itertools; +use rand::{thread_rng, Rng}; +use renegade_constants::{Scalar, ScalarField}; +use renegade_crypto::hash::compute_poseidon_hash; + +use crate::TREE_HEIGHT; + +// --- Hashing --- // + +/// Get the merkle tree zero value for a given height +pub fn get_merkle_zero(height: usize) -> Scalar { + let mut curr = generate_leaf_zero_value(); + for _ in 0..height { + curr = compute_poseidon_hash(&[curr, curr]); + } + + curr +} + +/// Hash the input through the Merkle tree using the given sister nodes +/// +/// Returns the incremental results at each level, representing the updated values to the insertion path +pub fn hash_merkle(idx: u64, input: Scalar, sister_leaves: &[Scalar]) -> Vec { + let mut results = Vec::with_capacity(TREE_HEIGHT); + let mut current = input; + let mut current_idx = idx; + + for sister in sister_leaves.iter().copied() { + // The input is a left-hand node if the index is even at this level + let inputs = if current_idx % 2 == 0 { + [current, sister] + } else { + [sister, current] + }; + + current = compute_poseidon_hash(&inputs); + results.push(current); + current_idx /= 2; + } + + results +} + +// --- Test Helpers --- // + +pub struct IdentityHasher; +impl CRHScheme for IdentityHasher { + type Input = ScalarField; + type Output = ScalarField; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _parameters: &Self::Parameters, + input: T, + ) -> Result { + Ok(*input.borrow()) + } +} + +/// A dummy hasher to build an arkworks Merkle tree on top of +pub struct Poseidon2Hasher; +impl TwoToOneCRHScheme for Poseidon2Hasher { + type Input = ScalarField; + type Output = ScalarField; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _parameters: &Self::Parameters, + left_input: T, + right_input: T, + ) -> Result { + let lhs = Scalar::new(*left_input.borrow()); + let rhs = Scalar::new(*right_input.borrow()); + let res = compute_poseidon_hash(&[lhs, rhs]); + + Ok(res.inner()) + } + + fn compress>( + parameters: &Self::Parameters, + left_input: T, + right_input: T, + ) -> Result { + ::evaluate(parameters, left_input, right_input) + } +} + +pub struct MerkleConfig {} +impl Config for MerkleConfig { + type Leaf = ScalarField; + type LeafDigest = ScalarField; + type InnerDigest = ScalarField; + + type LeafHash = IdentityHasher; + type TwoToOneHash = Poseidon2Hasher; + type LeafInnerDigestConverter = IdentityDigestConverter; +} + +/// Build an arkworks tree and fill it with random values +pub fn build_arkworks_tree(height: usize, n_leaves: usize) -> MerkleTree { + let mut rng = thread_rng(); + let leaves = (0..n_leaves) + .map(|_| Scalar::random(&mut rng)) + .collect_vec(); + + build_arkworks_tree_with_leaves(height, &leaves) +} + +/// Build a full arkworks tree with the given leaves +/// +/// Pads the set of leaves to fill the tree +pub fn build_full_arkworks_tree(height: usize, leaves: Vec) -> MerkleTree { + let expected_leaves = 1 << height; + let zero_leaf = get_merkle_zero(0); + let leaves = leaves + .into_iter() + .chain(iter::repeat(zero_leaf)) + .take(expected_leaves) + .collect_vec(); + + build_arkworks_tree_with_leaves(height, &leaves) +} + +/// Build an arkworks tree with the given leaves +/// +/// Leaves must fill the tree +pub fn build_arkworks_tree_with_leaves( + height: usize, + leaves: &[Scalar], +) -> MerkleTree { + let expected_n_leaves = 1 << height; + assert!( + leaves.len() == expected_n_leaves, + "Number of leaves must fill the tree" + ); + + let inner_scalars: Vec = leaves.iter().map(Scalar::inner).collect(); + MerkleTree::::new(&(), &(), inner_scalars).unwrap() +} + +// --- Solidity FFI Helpers --- // + +/// Print the given scalar as an FFI result +pub fn print_scalar_result(scalar: Scalar) { + let res_hex = format!("{:x}", scalar.to_biguint()); + println!("RES:0x{res_hex}"); +} + +/// Print a string result +pub fn print_string_result(s: &str) { + println!("RES:{}", s); +} diff --git a/test/test-contracts/TestVerifier.sol b/test/test-contracts/TestVerifier.sol index a743a00..bae12f1 100644 --- a/test/test-contracts/TestVerifier.sol +++ b/test/test-contracts/TestVerifier.sol @@ -1,12 +1,18 @@ // SPDX-License-Identifier: UNLICENSED pragma solidity ^0.8.0; -import { PlonkProof } from "../../src/libraries/verifier/Types.sol"; +import { PlonkProof, VerificationKey } from "../../src/libraries/verifier/Types.sol"; +import { ValidWalletCreateStatement, StatementSerializer } from "../../src/libraries/darkpool/PublicInputs.sol"; +import { VerificationKeys } from "../../src/libraries/darkpool/VerificationKeys.sol"; import { IVerifier } from "../../src/libraries/verifier/IVerifier.sol"; -import { ValidWalletCreateStatement } from "../../src/libraries/darkpool/PublicInputs.sol"; +import { VerifierCore } from "../../src/libraries/verifier/VerifierCore.sol"; +import { BN254 } from "solidity-bn254/BN254.sol"; + +using StatementSerializer for ValidWalletCreateStatement; /// @title Test Verifier Implementation /// @notice This is a test implementation of the `IVerifier` interface that always returns true +/// @notice even if verification fails contract TestVerifier is IVerifier { /// @notice Verify a proof of `VALID WALLET CREATE` /// @param statement The public inputs to the proof @@ -20,7 +26,9 @@ contract TestVerifier is IVerifier { view returns (bool) { - // Always return true for testing purposes + VerificationKey memory vk = abi.decode(VerificationKeys.VALID_WALLET_CREATE_VKEY, (VerificationKey)); + BN254.ScalarField[] memory publicInputs = statement.scalarSerialize(); + VerifierCore.verify(proof, publicInputs, vk); return true; } }