From 5d725cca32cd316b4098c07c2a33c6a27b579b4b Mon Sep 17 00:00:00 2001 From: Ryan Date: Sat, 3 Aug 2024 18:07:53 +0200 Subject: [PATCH 01/33] feat(WIP): nova --- Cargo.lock | 364 +++++++++++++++++++++++++++++++++++++++++++--- Cargo.toml | 3 + src/lib.rs | 1 + src/main.rs | 1 + src/nova/batch.rs | 317 ++++++++++++++++++++++++++++++++++++++++ src/nova/mod.rs | 1 + 6 files changed, 670 insertions(+), 17 deletions(-) create mode 100644 src/nova/batch.rs create mode 100644 src/nova/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 08b3718a..df9993e2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -199,6 +199,17 @@ dependencies = [ "syn 2.0.72", ] +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + [[package]] name = "addr2line" version = "0.22.0" @@ -227,7 +238,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ "crypto-common", - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -389,7 +400,7 @@ dependencies = [ "ark-serialize 0.3.0", "ark-std 0.3.0", "derivative", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "paste", "rustc_version 0.3.3", @@ -409,7 +420,7 @@ dependencies = [ "derivative", "digest 0.10.7", "itertools 0.10.5", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "paste", "rustc_version 0.4.0", @@ -442,7 +453,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-traits", "quote", "syn 1.0.109", @@ -454,7 +465,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-traits", "proc-macro2", "quote", @@ -479,7 +490,7 @@ checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" dependencies = [ "ark-std 0.4.0", "digest 0.10.7", - "num-bigint", + "num-bigint 0.4.6", ] [[package]] @@ -868,6 +879,30 @@ dependencies = [ "subtle", ] +[[package]] +name = "bellpepper" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae286c2cb403324ab644c7cc68dceb25fe52ca9429908a726d7ed272c1edf7b" +dependencies = [ + "bellpepper-core", + "byteorder", + "ff", +] + +[[package]] +name = "bellpepper-core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8abb418570756396d722841b19edfec21d4e89e1cf8990610663040ecb1aea" +dependencies = [ + "blake2s_simd", + "byteorder", + "ff", + "serde", + "thiserror", +] + [[package]] name = "bincode" version = "1.3.3" @@ -904,6 +939,17 @@ dependencies = [ "wyz", ] +[[package]] +name = "blake2b_simd" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" +dependencies = [ + "arrayref", + "arrayvec 0.7.4", + "constant_time_eq 0.3.0", +] + [[package]] name = "blake2s_simd" version = "1.0.2" @@ -936,7 +982,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -945,7 +991,7 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -986,6 +1032,34 @@ dependencies = [ "subtle", ] +[[package]] +name = "blst" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4378725facc195f1a538864863f6de233b500a8862747e7f165078a419d5e874" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + +[[package]] +name = "blstrs" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a8a8ed6fefbeef4a8c7b460e4110e12c5e22a5b7cf32621aae6ad650c4dcf29" +dependencies = [ + "blst", + "byte-slice-cast", + "ff", + "group", + "pairing", + "rand_core 0.6.4", + "serde", + "subtle", +] + [[package]] name = "borsh" version = "1.5.1" @@ -1656,7 +1730,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array", + "generic-array 0.14.7", "rand_core 0.6.4", "typenum", ] @@ -1679,7 +1753,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array", + "generic-array 0.14.7", "subtle", ] @@ -1689,7 +1763,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" dependencies = [ - "generic-array", + "generic-array 0.14.7", "subtle", ] @@ -1849,7 +1923,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -2095,10 +2169,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "bitvec", + "byteorder", + "ff_derive", "rand_core 0.6.4", "subtle", ] +[[package]] +name = "ff_derive" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9f54704be45ed286151c5e11531316eaef5b8f5af7d597b806fdb8af108d84a" +dependencies = [ + "addchain", + "cfg-if 1.0.0", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "fiat-crypto" version = "0.2.9" @@ -2340,6 +2432,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "generic-array" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96512db27971c2c3eece70a1e106fbe6c87760234e31e8f7e5634912fe52794a" +dependencies = [ + "typenum", +] + [[package]] name = "getrandom" version = "0.1.16" @@ -2358,8 +2459,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if 1.0.0", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -2378,6 +2481,12 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + [[package]] name = "gloo-timers" version = "0.2.6" @@ -2397,7 +2506,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", + "rand 0.8.5", "rand_core 0.6.4", + "rand_xorshift", "subtle", ] @@ -2430,6 +2541,31 @@ dependencies = [ "crunchy", ] +[[package]] +name = "halo2curves" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db81d01d0bbfec9f624d7590fc6929ee2537a64ec1e080d8f8c9e2d2da291405" +dependencies = [ + "blake2b_simd", + "ff", + "group", + "hex 0.4.3", + "lazy_static", + "num-bigint 0.4.6", + "num-traits", + "pairing", + "pasta_curves", + "paste", + "rand 0.8.5", + "rand_core 0.6.4", + "rayon", + "serde", + "serde_arrays", + "static_assertions", + "subtle", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -2492,6 +2628,9 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "hkdf" @@ -2521,6 +2660,15 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "http" version = "0.2.12" @@ -2690,7 +2838,7 @@ dependencies = [ "borsh", "hex 0.4.3", "num", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "serde", "sha2 0.10.8", @@ -2723,7 +2871,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -3030,6 +3178,15 @@ dependencies = [ "url", ] +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + [[package]] name = "keyring" version = "3.0.5" @@ -3073,6 +3230,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "leopard-codec" @@ -3372,6 +3532,25 @@ dependencies = [ "tempfile", ] +[[package]] +name = "neptune" +version = "13.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06626c9ac04c894e9a23d061ba1309f28506cdc5fe64156d28a15fb57fc8e438" +dependencies = [ + "bellpepper", + "bellpepper-core", + "blake2s_simd", + "blstrs", + "byteorder", + "ff", + "generic-array 0.14.7", + "log", + "pasta_curves", + "serde", + "trait-set", +] + [[package]] name = "nix" version = "0.26.4" @@ -3415,13 +3594,47 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nova-snark" +version = "0.37.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69b80bc67f3e77ad68dec356b5df15e8ce30d8855fc76e92782945a5fa74d6fc" +dependencies = [ + "bellpepper", + "bellpepper-core", + "bincode", + "bitvec", + "byteorder", + "digest 0.10.7", + "ff", + "generic-array 1.1.0", + "getrandom 0.2.15", + "group", + "halo2curves", + "itertools 0.12.1", + "neptune", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "once_cell", + "pasta-msm", + "pasta_curves", + "rand_chacha 0.3.1", + "rand_core 0.6.4", + "rayon", + "serde", + "sha3", + "subtle", + "thiserror", +] + [[package]] name = "num" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-complex", "num-integer", "num-iter", @@ -3429,6 +3642,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -3437,6 +3661,8 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", + "rand 0.8.5", + "serde", ] [[package]] @@ -3491,7 +3717,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-integer", "num-traits", ] @@ -3673,6 +3899,36 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "pasta-msm" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e85d75eba3e7e9ee3bd11342b669185e194dadda3557934bc1000d9b87159d3" +dependencies = [ + "cc", + "pasta_curves", + "semolina", + "sppark", + "which", +] + +[[package]] +name = "pasta_curves" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e57598f73cc7e1b2ac63c79c517b31a0877cd7c402cdcaa311b5208de7a095" +dependencies = [ + "blake2b_simd", + "ff", + "group", + "hex 0.4.3", + "lazy_static", + "rand 0.8.5", + "serde", + "static_assertions", + "subtle", +] + [[package]] name = "paste" version = "1.0.15" @@ -3981,6 +4237,8 @@ dependencies = [ "axum", "base64 0.22.1", "bellman", + "bellpepper", + "bellpepper-core", "bls12_381", "borsh", "celestia-rpc", @@ -4005,6 +4263,7 @@ dependencies = [ "lazy_static", "log", "mockall", + "nova-snark", "num", "openssl", "pairing", @@ -4513,7 +4772,7 @@ dependencies = [ "ark-ff 0.4.2", "bytes", "fastrlp", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "parity-scale-codec", "primitive-types", @@ -4821,6 +5080,16 @@ dependencies = [ "libc", ] +[[package]] +name = "semolina" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b0111fd4fa831becb0606b9a2285ef3bee3c6a70d690209b8ae9514e9befe23" +dependencies = [ + "cc", + "glob", +] + [[package]] name = "semver" version = "0.11.0" @@ -4854,6 +5123,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_arrays" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38636132857f68ec3d5f3eb121166d2af33cb55174c4d5ff645db6165cbef0fd" +dependencies = [ + "serde", +] + [[package]] name = "serde_bytes" version = "0.11.15" @@ -5016,6 +5294,16 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + [[package]] name = "shellexpand" version = "2.1.2" @@ -5109,6 +5397,16 @@ dependencies = [ "der", ] +[[package]] +name = "sppark" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a075ecc826f50c645a1e6bb2f94560ded74ec7584a6061e72470802a2eee350c" +dependencies = [ + "cc", + "which", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -5298,6 +5596,15 @@ dependencies = [ "syn 2.0.72", ] +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + [[package]] name = "time" version = "0.3.36" @@ -5596,6 +5903,17 @@ dependencies = [ "once_cell", ] +[[package]] +name = "trait-set" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "try-lock" version = "0.2.5" @@ -5909,6 +6227,18 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix 0.38.34", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index 25fb21e1..b4d7e1af 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -68,6 +68,9 @@ pyroscope_pprofrs = "0.2.7" toml = "0.8.14" dirs = "5.0.1" anyhow = "1.0.44" +bellpepper-core = { version = "0.4.0", default-features = false } +bellpepper = { version = "0.4.0", default-features = false } +nova-snark = { version = "0.37.0", default-features = false } [dev-dependencies] serial_test = "3.1.1" diff --git a/src/lib.rs b/src/lib.rs index 7d93911b..9da4fe92 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -5,6 +5,7 @@ pub mod consts; pub mod da; pub mod error; pub mod node_types; +pub mod nova; pub mod storage; pub mod utils; pub mod webserver; diff --git a/src/main.rs b/src/main.rs index 09d9079e..3770ab2d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,6 +5,7 @@ pub mod consts; pub mod da; pub mod error; mod node_types; +mod nova; pub mod storage; mod utils; mod webserver; diff --git a/src/nova/batch.rs b/src/nova/batch.rs new file mode 100644 index 00000000..bc7c1e1c --- /dev/null +++ b/src/nova/batch.rs @@ -0,0 +1,317 @@ +use anyhow::{anyhow, Result}; +use bellpepper_core::{ + num::{AllocatedNum, Num}, + ConstraintSystem, SynthesisError, +}; +use core::marker::PhantomData; +use ff::PrimeField; +use indexed_merkle_tree::{ + node::LeafNode, + node::Node, + sha256_mod, + tree::{MerkleProof, NonMembershipProof}, +}; +use nova_snark::{ + provider::{Bn256EngineKZG, GrumpkinEngine}, + traits::circuit::StepCircuit, +}; + +type E1 = Bn256EngineKZG; +type E2 = GrumpkinEngine; + +#[derive(Clone)] +enum UnifiedProofStep { + /// Update proof step ensures that an existing LeafNode is updated with a new value. + /// Cares about inputs z[0]. + // TODO: adr-003: Adding authentication circuit with poseidon hash, which is not needed in Verdict but needed here. + // This is because Verdict assumes the downstream application verifies the hashchain themselves. + // We need to be able to prove the validity of the hashchain though, since anybody can post an Update operation. + Update { + old_proof: MerkleProof, + new_proof: MerkleProof, + }, + /// InsertStart proof step ensures that a LeafNode to be inserted does not yet exist in the tree. + /// Cares about inputs z[0]. + InsertStart { + non_membership_proof: NonMembershipProof, + new_leaf: LeafNode, + }, + /// InsertUpdate proof step ensures that: + /// 1. There exists a LeafNode where existing_node.label < new_node.label < existing_node.next + /// 2. The existing_node's next pointer is updated to new_node.label. + /// Cares about inputs z[0] and z[2]. + InsertUpdate { + old_proof: MerkleProof, + new_proof: MerkleProof, + }, + /// InsertEnd proof step ensures that the new_node from the last step is added to the tree. + /// Cares about inputs z[0] and z[1]. + InsertEnd { + old_proof: MerkleProof, + new_proof: MerkleProof, + }, +} + +#[derive(Clone)] +struct MerkleProofStepCircuit { + step_type: UnifiedProofStep, + old_root: Option, + new_root: Option, + proof_path: Vec, + + // Additional fields for non-membership proof + is_non_membership: bool, + missing_node: Option, + _p: PhantomData, +} + +// TODO: these are just here temporarily as I write the circuits, they need to be moved to where the circuit gets instantiated later ////////////////////// + +struct Hash { + hash: indexed_merkle_tree::Hash, + _p: PhantomData, +} + +impl Hash { + pub fn new(hash: indexed_merkle_tree::Hash) -> Self { + Self { + hash, + _p: PhantomData, + } + } + + // uses [`PrimeField::from_u128`] for inspiration. If the field element's capacity is not enough to hold the hash, + pub fn to_scalar(&self) -> Result { + let bytes = self.hash.as_ref(); + + // Convert the 32 bytes to two u128 values + let lower = u128::from_le_bytes(bytes[0..16].try_into()?); + let upper = u128::from_le_bytes(bytes[16..32].try_into()?); + + let mut tmp = Scalar::from_u128(upper); + for _ in 0..128 { + tmp = tmp.double(); + } + Ok(tmp + Scalar::from_u128(lower)) + } +} + +pub fn unpack_and_process(proof: &MerkleProof) -> Result<(Scalar, &Vec)> { + if !proof.path.is_empty() { + let root: Scalar = Hash::new(proof.root_hash).to_scalar()?; + Ok((root, &proof.path)) + } else { + // TODO: This if else makes no sense, can't we just give an empty path and let the circuit handle it? + Err(anyhow!("Proof path is empty.")) + } +} + +pub fn recalculate_hash_as_scalar(path: &[Node]) -> Result { + let mut current_hash = path[0].get_hash(); + for node in path.iter().skip(1) { + let combined = if node.is_left_sibling() { + [node.get_hash().as_ref(), current_hash.as_ref()].concat() + } else { + [current_hash.as_ref(), node.get_hash().as_ref()].concat() + }; + // TODO: sha256_mod is not generic for scalar, its using the order of bls12_381 + current_hash = sha256_mod(&combined); + } + Hash::new(current_hash).to_scalar() +} +///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +impl StepCircuit for MerkleProofStepCircuit { + fn arity(&self) -> usize { + // z[0] is the old root + // z[1] is the existing node's label + // z[2] is the missing node's label + 3 + } + + fn synthesize>( + &self, + cs: &mut CS, + z: &[AllocatedNum], + ) -> Result>, SynthesisError> { + // ahhhh these probably arent always filled in, need to check if they are None and handle that + let previous_root = &z[0]; + let existing_node_label = &z[1]; + let missing_node_label = &z[2]; + + let mut z_out: Vec> = Vec::new(); + + match self.step_type.clone() { + UnifiedProofStep::Update { + old_proof, + new_proof, + } => { + let vars = self.process_update(cs, &old_proof, &new_proof)?; + let updated_root = vars[1].clone(); + z_out.push(updated_root); + z_out.push(missing_node_label.clone()); + z_out.push(existing_node_label.clone()); + Ok(z_out) + } + UnifiedProofStep::InsertStart { + non_membership_proof, + new_leaf, + } => { + let (non_membership_root, non_membership_path) = + unpack_and_process::(&non_membership_proof.merkle_proof).unwrap(); + // todo: reminder. use push and pop namespace + // let namespace = format!("non-membership for {:?}", non_membership_root); + + // TODO: LessThan gadget + let existing_leaf = non_membership_path.first().unwrap(); + let existing_leaf_label: Scalar = Hash::new(existing_leaf.clone().get_label()) + .to_scalar() + .unwrap(); + // let existing_leaf_next: Scalar = Hash::new(existing_leaf.clone().get_next()) + // .to_scalar() + // .unwrap(); + let new_leaf_label: Scalar = Hash::new(new_leaf.label).to_scalar().unwrap(); + + let allocated_pre_insertion_root = + AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { + Ok(non_membership_root) + })?; + + let recalculated_root = recalculate_hash_as_scalar::(non_membership_path) + .map_err(|_| SynthesisError::Unsatisfiable)?; + + let allocated_recalculated_root = AllocatedNum::alloc( + cs.namespace(|| "recalculated_pre_insertion_root"), + || Ok(recalculated_root), + )?; + + // Enforce that the provided pre-insertion root matches the recalculated root. + // This ensures that the ordered structure of the tree is maintained in the path. + // (allocated_pre_insertion_root) * (1) = allocated_recalculated_root + cs.enforce( + || "pre_insertion_root_verification", + |lc| lc + allocated_pre_insertion_root.get_variable(), + |lc| lc + CS::one(), + |lc| lc + allocated_recalculated_root.get_variable(), + ); + + // we don't update the root in this operation, so we pass it on + z_out.push(previous_root.clone()); + + // but we do need to allocate for the next Insert step functions + let z1 = AllocatedNum::alloc(cs.namespace(|| "z1"), || Ok(existing_leaf_label))?; + let z2 = AllocatedNum::alloc(cs.namespace(|| "z2"), || Ok(new_leaf_label))?; + z_out.push(z1); + z_out.push(z2); + Ok(z_out) + } + UnifiedProofStep::InsertUpdate { + old_proof, + new_proof, + } => { + let old_element_hash: Scalar = Hash::new(old_proof.path.last().unwrap().get_hash()) + .to_scalar() + .unwrap(); + let old_element_hash_alloc = + AllocatedNum::alloc(cs.namespace(|| format!("TODO")), || Ok(old_element_hash))?; + cs.enforce( + || "z1 equality check pre-proof: NAMESPACE TODO", + |lc| lc + existing_node_label.get_variable(), + |lc| lc + CS::one(), + |lc| lc + old_element_hash_alloc.get_variable(), + ); + // todo: does the hash contain the next value? if so, we shouldnt constrain it to the new proof as below + let new_element_hash: Scalar = Hash::new(new_proof.path.last().unwrap().get_hash()) + .to_scalar() + .unwrap(); + let new_element_hash_alloc = + AllocatedNum::alloc(cs.namespace(|| format!("TODO")), || Ok(new_element_hash))?; + cs.enforce( + || "z1 equality check post-proof: NAMESPACE TODO", + |lc| lc + existing_node_label.get_variable(), + |lc| lc + CS::one(), + |lc| lc + new_element_hash_alloc.get_variable(), + ); + + let vars = self.process_update(cs, &old_proof, &new_proof).unwrap(); + let updated_root = vars[1].clone(); + + z_out.push(updated_root); + z_out.push(missing_node_label.clone()); + z_out.push(existing_node_label.clone()); + Ok(z_out) + } + UnifiedProofStep::InsertEnd { + old_proof, + new_proof, + } => { + let vars = self.process_update(cs, &old_proof, &new_proof)?; + let updated_root = vars[1].clone(); + z_out.push(updated_root); + Ok(z_out) + } + } + } +} + +impl MerkleProofStepCircuit { + fn process_update>( + &self, + cs: &mut CS, + old_proof: &MerkleProof, + new_proof: &MerkleProof, + ) -> Result>, SynthesisError> { + // todo: we should be checking z[0] against old_root, the reason I don't yet here is because idk how to handle the case where this is the first proof step + + // todo: perhaps add a cumulative iterator to z to make it easier to find problems later, + // using intermediate roots as a namespace will cause a bit of searching + let namespace = format!("{:?}->{:?}", old_proof.root_hash, new_proof.root_hash); + + // todo: repalce unwraps when i get a sec + + let (old_root, old_path) = unpack_and_process::(old_proof).unwrap(); + let (updated_root, updated_path) = unpack_and_process::(new_proof).unwrap(); + + let root_with_old_pointer = + AllocatedNum::alloc(cs.namespace(|| format!("old_root: {namespace}")), || { + Ok(old_root) + })?; + + let root_with_new_pointer = + AllocatedNum::alloc(cs.namespace(|| format!("new_root: {namespace}")), || { + Ok(updated_root) + })?; + + let recalculated_old_root = recalculate_hash_as_scalar::(old_path).unwrap(); + let recalculated_updated_root = recalculate_hash_as_scalar::(updated_path).unwrap(); + + let allocated_recalculated_old_root = AllocatedNum::alloc( + cs.namespace(|| format!("recalculated_old_root: {namespace}")), + || Ok(recalculated_old_root), + )?; + let allocated_recalculated_updated_root = AllocatedNum::alloc( + cs.namespace(|| format!("recalculated_updated_root: {namespace}")), + || Ok(recalculated_updated_root), + )?; + + cs.enforce( + || format!("old_root update equality: {namespace}"), + |lc| lc + allocated_recalculated_old_root.get_variable(), + |lc| lc + CS::one(), + |lc| lc + root_with_old_pointer.get_variable(), + ); + + cs.enforce( + || format!("new_root update equality: {namespace}"), + |lc| lc + allocated_recalculated_updated_root.get_variable(), + |lc| lc + CS::one(), + |lc| lc + root_with_new_pointer.get_variable(), + ); + + // is this jank or are we fine? + Ok(vec![ + allocated_recalculated_old_root, + allocated_recalculated_updated_root, + ]) + } +} diff --git a/src/nova/mod.rs b/src/nova/mod.rs new file mode 100644 index 00000000..f02defef --- /dev/null +++ b/src/nova/mod.rs @@ -0,0 +1 @@ +pub mod batch; From 2bc7a86d6ba262838fcfb2b3339db4e04f93e373 Mon Sep 17 00:00:00 2001 From: Ryan Date: Sun, 4 Aug 2024 10:58:51 +0200 Subject: [PATCH 02/33] fix: not using struct variants in step enum --- src/nova/batch.rs | 198 +++++++++++++++++++++++++++++++++++----------- src/nova/mod.rs | 117 +++++++++++++++++++++++++++ 2 files changed, 269 insertions(+), 46 deletions(-) diff --git a/src/nova/batch.rs b/src/nova/batch.rs index bc7c1e1c..ddb5a3d5 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -6,58 +6,104 @@ use bellpepper_core::{ use core::marker::PhantomData; use ff::PrimeField; use indexed_merkle_tree::{ - node::LeafNode, - node::Node, + node::{LeafNode, Node}, sha256_mod, - tree::{MerkleProof, NonMembershipProof}, + tree::{InsertProof, MerkleProof, NonMembershipProof, Proof, UpdateProof}, }; use nova_snark::{ provider::{Bn256EngineKZG, GrumpkinEngine}, traits::circuit::StepCircuit, }; -type E1 = Bn256EngineKZG; -type E2 = GrumpkinEngine; +// #[derive(Clone)] +// pub enum UnifiedProofStep { +// Update { +// old_proof: MerkleProof, +// new_proof: MerkleProof, +// }, +// InsertStart { +// non_membership_proof: NonMembershipProof, +// }, +// /// InsertUpdate proof step ensures that: +// /// 1. There exists a LeafNode where existing_node.label < new_node.label < existing_node.next +// /// 2. The existing_node's next pointer is updated to new_node.label. +// /// Cares about inputs z[0] and z[2]. +// InsertUpdate { +// old_proof: MerkleProof, +// new_proof: MerkleProof, +// }, +// InsertEnd { +// old_proof: MerkleProof, +// new_proof: MerkleProof, +// }, +// } #[derive(Clone)] -enum UnifiedProofStep { +pub enum UnifiedProofStep { /// Update proof step ensures that an existing LeafNode is updated with a new value. /// Cares about inputs z[0]. // TODO: adr-003: Adding authentication circuit with poseidon hash, which is not needed in Verdict but needed here. // This is because Verdict assumes the downstream application verifies the hashchain themselves. // We need to be able to prove the validity of the hashchain though, since anybody can post an Update operation. - Update { - old_proof: MerkleProof, - new_proof: MerkleProof, - }, + Update, /// InsertStart proof step ensures that a LeafNode to be inserted does not yet exist in the tree. /// Cares about inputs z[0]. - InsertStart { - non_membership_proof: NonMembershipProof, - new_leaf: LeafNode, - }, + InsertStart, /// InsertUpdate proof step ensures that: /// 1. There exists a LeafNode where existing_node.label < new_node.label < existing_node.next /// 2. The existing_node's next pointer is updated to new_node.label. /// Cares about inputs z[0] and z[2]. - InsertUpdate { - old_proof: MerkleProof, - new_proof: MerkleProof, - }, + InsertUpdate, /// InsertEnd proof step ensures that the new_node from the last step is added to the tree. /// Cares about inputs z[0] and z[1]. - InsertEnd { - old_proof: MerkleProof, - new_proof: MerkleProof, - }, + InsertEnd, } +// impl UnifiedProofStep { +// pub fn aggregate(proofs: Vec) -> Vec { +// proofs +// .iter() +// .flat_map(|p| UnifiedProofStep::from_proof(p.clone())) +// .collect() +// } + +// pub fn from_insert_proof(proof: &InsertProof) -> Vec { +// let mut steps: Vec = Vec::new(); +// let non_membership_proof = proof.non_membership_proof.clone(); +// steps.push(UnifiedProofStep::InsertStart { +// non_membership_proof: non_membership_proof.clone(), +// }); +// steps.push(UnifiedProofStep::InsertUpdate { +// old_proof: proof.first_proof.old_proof.clone(), +// new_proof: proof.first_proof.new_proof.clone(), +// }); +// steps.push(UnifiedProofStep::InsertEnd { +// old_proof: proof.second_proof.old_proof.clone(), +// new_proof: proof.second_proof.new_proof.clone(), +// }); +// steps +// } + +// pub fn from_update_proof(proof: &UpdateProof) -> Vec { +// vec![UnifiedProofStep::Update { +// old_proof: proof.old_proof.clone(), +// new_proof: proof.new_proof.clone(), +// }] +// } + +// pub fn from_proof(proof: Proof) -> Vec { +// match proof { +// Proof::Insert(insert_proof) => Self::from_insert_proof(&insert_proof), +// Proof::Update(update_proof) => Self::from_update_proof(&update_proof), +// } +// } +// } + #[derive(Clone)] -struct MerkleProofStepCircuit { +pub struct MerkleProofStepCircuit { step_type: UnifiedProofStep, - old_root: Option, - new_root: Option, - proof_path: Vec, + old_proof: Option, + new_proof: Option, // Additional fields for non-membership proof is_non_membership: bool, @@ -65,9 +111,69 @@ struct MerkleProofStepCircuit { _p: PhantomData, } +impl MerkleProofStepCircuit { + pub fn new( + step: UnifiedProofStep, + old_proof: Option, + new_proof: Option, + is_non_membership: bool, + missing_node: Option, + ) -> Self { + MerkleProofStepCircuit { + step_type: step, + old_proof, + new_proof, + is_non_membership, + missing_node, + _p: PhantomData, + } + } +} + +impl MerkleProofStepCircuit { + pub fn from_proof(proof: Proof) -> Vec { + match proof { + Proof::Insert(insert_proof) => { + vec![ + Self::new( + UnifiedProofStep::InsertStart, + Some(insert_proof.non_membership_proof.merkle_proof.clone()), + None, + true, + Some(insert_proof.non_membership_proof.missing_node), + ), + Self::new( + UnifiedProofStep::InsertUpdate, + Some(insert_proof.first_proof.old_proof), + Some(insert_proof.first_proof.new_proof), + false, + None, + ), + Self::new( + UnifiedProofStep::InsertEnd, + Some(insert_proof.second_proof.old_proof), + Some(insert_proof.second_proof.new_proof), + false, + None, + ), + ] + } + Proof::Update(update_proof) => { + vec![Self::new( + UnifiedProofStep::Update, + Some(update_proof.old_proof), + Some(update_proof.new_proof), + false, + None, + )] + } + } + } +} + // TODO: these are just here temporarily as I write the circuits, they need to be moved to where the circuit gets instantiated later ////////////////////// -struct Hash { +pub struct Hash { hash: indexed_merkle_tree::Hash, _p: PhantomData, } @@ -142,10 +248,10 @@ impl StepCircuit for MerkleProofStepCircuit let mut z_out: Vec> = Vec::new(); match self.step_type.clone() { - UnifiedProofStep::Update { - old_proof, - new_proof, - } => { + UnifiedProofStep::Update => { + let old_proof = self.old_proof.clone().unwrap(); + let new_proof = self.new_proof.clone().unwrap(); + let vars = self.process_update(cs, &old_proof, &new_proof)?; let updated_root = vars[1].clone(); z_out.push(updated_root); @@ -153,12 +259,11 @@ impl StepCircuit for MerkleProofStepCircuit z_out.push(existing_node_label.clone()); Ok(z_out) } - UnifiedProofStep::InsertStart { - non_membership_proof, - new_leaf, - } => { + UnifiedProofStep::InsertStart => { + let old_proof = self.old_proof.clone().unwrap(); let (non_membership_root, non_membership_path) = - unpack_and_process::(&non_membership_proof.merkle_proof).unwrap(); + unpack_and_process::(&old_proof).unwrap(); + let new_leaf = self.missing_node.clone().unwrap(); // todo: reminder. use push and pop namespace // let namespace = format!("non-membership for {:?}", non_membership_root); @@ -196,7 +301,7 @@ impl StepCircuit for MerkleProofStepCircuit ); // we don't update the root in this operation, so we pass it on - z_out.push(previous_root.clone()); + z_out.push(allocated_recalculated_root.clone()); // but we do need to allocate for the next Insert step functions let z1 = AllocatedNum::alloc(cs.namespace(|| "z1"), || Ok(existing_leaf_label))?; @@ -205,10 +310,10 @@ impl StepCircuit for MerkleProofStepCircuit z_out.push(z2); Ok(z_out) } - UnifiedProofStep::InsertUpdate { - old_proof, - new_proof, - } => { + UnifiedProofStep::InsertUpdate => { + let old_proof = self.old_proof.clone().unwrap(); + let new_proof = self.new_proof.clone().unwrap(); + let old_element_hash: Scalar = Hash::new(old_proof.path.last().unwrap().get_hash()) .to_scalar() .unwrap(); @@ -241,11 +346,12 @@ impl StepCircuit for MerkleProofStepCircuit z_out.push(existing_node_label.clone()); Ok(z_out) } - UnifiedProofStep::InsertEnd { - old_proof, - new_proof, - } => { - let vars = self.process_update(cs, &old_proof, &new_proof)?; + UnifiedProofStep::InsertEnd => { + let vars = self.process_update( + cs, + &self.old_proof.clone().unwrap(), + &self.new_proof.clone().unwrap(), + )?; let updated_root = vars[1].clone(); z_out.push(updated_root); Ok(z_out) diff --git a/src/nova/mod.rs b/src/nova/mod.rs index f02defef..6334c03e 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -1 +1,118 @@ pub mod batch; + +#[cfg(test)] +mod tests { + use crate::nova::batch::MerkleProofStepCircuit; + use indexed_merkle_tree::{node::Node, sha256_mod, tree::IndexedMerkleTree, tree::Proof}; + use nova_snark::{ + provider::{Bn256EngineKZG, GrumpkinEngine}, + traits::{circuit::TrivialCircuit, snark::default_ck_hint, Engine}, + PublicParams, RecursiveSNARK, + }; + + type E1 = Bn256EngineKZG; + type E2 = GrumpkinEngine; + + type C1 = MerkleProofStepCircuit<::Scalar>; + type C2 = TrivialCircuit<::Scalar>; + + fn create_public_params() -> PublicParams { + let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); + let test_label = sha256_mod(b"test"); + let test_value = sha256_mod(b"value"); + let mut test_node = Node::new_leaf(true, test_label, test_value, Node::TAIL); + + let test_proof = tree.insert_node(&mut test_node).unwrap(); + let test_circuit = MerkleProofStepCircuit::from_proof(Proof::Insert(test_proof))[0].clone(); + + let circuit_primary = test_circuit; + let circuit_secondary = TrivialCircuit::default(); + + PublicParams::::setup( + &circuit_primary, + &circuit_secondary, + &*default_ck_hint(), + &*default_ck_hint(), + ) + .unwrap() + } + + #[test] + fn test_nova() { + let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); + + // create three nodes to insert + let ryan = sha256_mod(b"Ryan"); + let ford = sha256_mod(b"Ford"); + let sebastian = sha256_mod(b"Sebastian"); + let pusch = sha256_mod(b"Pusch"); + let ethan = sha256_mod(b"Ethan"); + let triple_zero = sha256_mod(b"000"); + + let mut ryans_node = Node::new_leaf(true, ryan, ford, Node::TAIL); + let mut sebastians_node = Node::new_leaf(true, sebastian, pusch, Node::TAIL); + let mut ethans_node = Node::new_leaf(true, ethan, triple_zero, Node::TAIL); + + // generate proofs for the three nodes + let first_insert_proof = tree.insert_node(&mut ryans_node).unwrap(); + let second_insert_proof = tree.insert_node(&mut sebastians_node).unwrap(); + let third_insert_proof = tree.insert_node(&mut ethans_node).unwrap(); + + // create zkSNARKs for the three proofs + let first_insert_zk_snark = Proof::Insert(first_insert_proof); + let second_insert_zk_snark = Proof::Insert(second_insert_proof); + let third_insert_zk_snark = Proof::Insert(third_insert_proof); + + let proofs = vec![ + first_insert_zk_snark, + second_insert_zk_snark, + third_insert_zk_snark, + ]; + + let circuits: Vec = proofs + .into_iter() + .flat_map(MerkleProofStepCircuit::from_proof) + .collect(); + + println!("Creating public params..."); + let pp = create_public_params(); + println!("Created public params."); + + println!("Creating recursive snark..."); + let initial_primary_inputs = vec![ + ::Scalar::zero(), // initial root + ::Scalar::zero(), // initial existing node label + ::Scalar::zero(), // initial missing node label + ]; + + let (initial_circuit, next_steps) = circuits.split_first().unwrap(); + + let mut recursive_snark: RecursiveSNARK = RecursiveSNARK::new( + &pp, + initial_circuit, + &TrivialCircuit::default(), + &initial_primary_inputs, + &[::Scalar::from(2u64)], + ) + .unwrap(); + println!("Created recursive snark."); + + for (i, circuit) in next_steps.iter().enumerate() { + println!("Added proof {i} to recursive snark"); + recursive_snark.prove_step(&pp, circuit, &TrivialCircuit::default()); + // assert!(res.is_ok()); + + // let res = recursive_snark.verify( + // &pp, + // i + 1, + // &[::Scalar::from(3u64)], + // &[::Scalar::from(2u64)], + // ); + // assert!(res.is_ok()); + } + + // Add assertions to check the final state if needed + // For example, you might want to check if the final root matches the expected value + // assert_eq!(final_root, expected_root); + } +} From 43f23de71fd41e2d24a95b56235ae5b05f20cf4e Mon Sep 17 00:00:00 2001 From: Ryan Date: Sun, 4 Aug 2024 11:00:08 +0200 Subject: [PATCH 03/33] removing dead code --- src/nova/batch.rs | 63 ----------------------------------------------- 1 file changed, 63 deletions(-) diff --git a/src/nova/batch.rs b/src/nova/batch.rs index ddb5a3d5..178984ce 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -15,29 +15,6 @@ use nova_snark::{ traits::circuit::StepCircuit, }; -// #[derive(Clone)] -// pub enum UnifiedProofStep { -// Update { -// old_proof: MerkleProof, -// new_proof: MerkleProof, -// }, -// InsertStart { -// non_membership_proof: NonMembershipProof, -// }, -// /// InsertUpdate proof step ensures that: -// /// 1. There exists a LeafNode where existing_node.label < new_node.label < existing_node.next -// /// 2. The existing_node's next pointer is updated to new_node.label. -// /// Cares about inputs z[0] and z[2]. -// InsertUpdate { -// old_proof: MerkleProof, -// new_proof: MerkleProof, -// }, -// InsertEnd { -// old_proof: MerkleProof, -// new_proof: MerkleProof, -// }, -// } - #[derive(Clone)] pub enum UnifiedProofStep { /// Update proof step ensures that an existing LeafNode is updated with a new value. @@ -59,46 +36,6 @@ pub enum UnifiedProofStep { InsertEnd, } -// impl UnifiedProofStep { -// pub fn aggregate(proofs: Vec) -> Vec { -// proofs -// .iter() -// .flat_map(|p| UnifiedProofStep::from_proof(p.clone())) -// .collect() -// } - -// pub fn from_insert_proof(proof: &InsertProof) -> Vec { -// let mut steps: Vec = Vec::new(); -// let non_membership_proof = proof.non_membership_proof.clone(); -// steps.push(UnifiedProofStep::InsertStart { -// non_membership_proof: non_membership_proof.clone(), -// }); -// steps.push(UnifiedProofStep::InsertUpdate { -// old_proof: proof.first_proof.old_proof.clone(), -// new_proof: proof.first_proof.new_proof.clone(), -// }); -// steps.push(UnifiedProofStep::InsertEnd { -// old_proof: proof.second_proof.old_proof.clone(), -// new_proof: proof.second_proof.new_proof.clone(), -// }); -// steps -// } - -// pub fn from_update_proof(proof: &UpdateProof) -> Vec { -// vec![UnifiedProofStep::Update { -// old_proof: proof.old_proof.clone(), -// new_proof: proof.new_proof.clone(), -// }] -// } - -// pub fn from_proof(proof: Proof) -> Vec { -// match proof { -// Proof::Insert(insert_proof) => Self::from_insert_proof(&insert_proof), -// Proof::Update(update_proof) => Self::from_update_proof(&update_proof), -// } -// } -// } - #[derive(Clone)] pub struct MerkleProofStepCircuit { step_type: UnifiedProofStep, From 40a0cb6e4a763f5ecaf5d3bad40fd4a5317068d2 Mon Sep 17 00:00:00 2001 From: Ryan Date: Sun, 4 Aug 2024 16:29:51 +0200 Subject: [PATCH 04/33] fuck this man what am i even doing --- src/nova/batch.rs | 167 +++++++++++++++++++++++----------------------- src/nova/mod.rs | 84 +++++++++++++++-------- 2 files changed, 142 insertions(+), 109 deletions(-) diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 178984ce..74add1fb 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -15,7 +15,7 @@ use nova_snark::{ traits::circuit::StepCircuit, }; -#[derive(Clone)] +#[derive(Clone, Debug)] pub enum UnifiedProofStep { /// Update proof step ensures that an existing LeafNode is updated with a new value. /// Cares about inputs z[0]. @@ -38,7 +38,7 @@ pub enum UnifiedProofStep { #[derive(Clone)] pub struct MerkleProofStepCircuit { - step_type: UnifiedProofStep, + pub step_type: UnifiedProofStep, old_proof: Option, new_proof: Option, @@ -77,14 +77,14 @@ impl MerkleProofStepCircuit { Some(insert_proof.non_membership_proof.merkle_proof.clone()), None, true, - Some(insert_proof.non_membership_proof.missing_node), + Some(insert_proof.non_membership_proof.missing_node.clone()), ), Self::new( UnifiedProofStep::InsertUpdate, Some(insert_proof.first_proof.old_proof), Some(insert_proof.first_proof.new_proof), false, - None, + Some(insert_proof.non_membership_proof.missing_node), ), Self::new( UnifiedProofStep::InsertEnd, @@ -166,9 +166,6 @@ pub fn recalculate_hash_as_scalar(path: &[Node]) -> Result StepCircuit for MerkleProofStepCircuit { fn arity(&self) -> usize { - // z[0] is the old root - // z[1] is the existing node's label - // z[2] is the missing node's label 3 } @@ -177,42 +174,74 @@ impl StepCircuit for MerkleProofStepCircuit cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { - // ahhhh these probably arent always filled in, need to check if they are None and handle that - let previous_root = &z[0]; - let existing_node_label = &z[1]; - let missing_node_label = &z[2]; + println!("Step: {:?}", self.step_type); + println!( + "Input z: {:?}", + z.iter().map(|num| num.get_value()).collect::>() + ); + + let previous_root_input = &z[0]; + let existing_node_label_input = &z[1]; + let missing_node_label_input = &z[2]; + + let old_proof = self + .old_proof + .as_ref() + .ok_or(SynthesisError::Unsatisfiable)?; + + let mut new_proof: Option<&MerkleProof> = None; + if !self.is_non_membership { + new_proof = Some( + self.new_proof + .as_ref() + .expect("New proof is missing for non-membership proof."), + ); + } + + let previous_root_alloc = AllocatedNum::alloc(cs.namespace(|| "old root"), || { + Ok(Hash::new(old_proof.root_hash).to_scalar().unwrap()) + }) + .unwrap(); + + cs.enforce( + || "z_0 == old_root", + |lc| lc + previous_root_input.get_variable(), + |lc| lc + CS::one(), + |lc| lc + previous_root_alloc.get_variable(), + ); let mut z_out: Vec> = Vec::new(); - match self.step_type.clone() { + match self.step_type { UnifiedProofStep::Update => { - let old_proof = self.old_proof.clone().unwrap(); - let new_proof = self.new_proof.clone().unwrap(); - - let vars = self.process_update(cs, &old_proof, &new_proof)?; + let new_proof = new_proof.ok_or(SynthesisError::Unsatisfiable)?; + let vars = self.process_update(cs, old_proof, new_proof)?; let updated_root = vars[1].clone(); - z_out.push(updated_root); - z_out.push(missing_node_label.clone()); - z_out.push(existing_node_label.clone()); - Ok(z_out) + z_out.extend_from_slice(&[ + updated_root, + existing_node_label_input.clone(), + missing_node_label_input.clone(), + ]); } UnifiedProofStep::InsertStart => { - let old_proof = self.old_proof.clone().unwrap(); let (non_membership_root, non_membership_path) = - unpack_and_process::(&old_proof).unwrap(); - let new_leaf = self.missing_node.clone().unwrap(); - // todo: reminder. use push and pop namespace - // let namespace = format!("non-membership for {:?}", non_membership_root); - - // TODO: LessThan gadget - let existing_leaf = non_membership_path.first().unwrap(); - let existing_leaf_label: Scalar = Hash::new(existing_leaf.clone().get_label()) + unpack_and_process::(old_proof) + .map_err(|_| SynthesisError::Unsatisfiable)?; + + let new_leaf = self + .missing_node + .as_ref() + .ok_or(SynthesisError::Unsatisfiable)?; + + let existing_leaf = non_membership_path + .first() + .ok_or(SynthesisError::Unsatisfiable)?; + let existing_leaf_label: Scalar = Hash::new(existing_leaf.get_label()) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable)?; + let new_leaf_label: Scalar = Hash::new(new_leaf.label) .to_scalar() - .unwrap(); - // let existing_leaf_next: Scalar = Hash::new(existing_leaf.clone().get_next()) - // .to_scalar() - // .unwrap(); - let new_leaf_label: Scalar = Hash::new(new_leaf.label).to_scalar().unwrap(); + .map_err(|_| SynthesisError::Unsatisfiable)?; let allocated_pre_insertion_root = AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { @@ -227,9 +256,6 @@ impl StepCircuit for MerkleProofStepCircuit || Ok(recalculated_root), )?; - // Enforce that the provided pre-insertion root matches the recalculated root. - // This ensures that the ordered structure of the tree is maintained in the path. - // (allocated_pre_insertion_root) * (1) = allocated_recalculated_root cs.enforce( || "pre_insertion_root_verification", |lc| lc + allocated_pre_insertion_root.get_variable(), @@ -237,63 +263,40 @@ impl StepCircuit for MerkleProofStepCircuit |lc| lc + allocated_recalculated_root.get_variable(), ); - // we don't update the root in this operation, so we pass it on - z_out.push(allocated_recalculated_root.clone()); - - // but we do need to allocate for the next Insert step functions let z1 = AllocatedNum::alloc(cs.namespace(|| "z1"), || Ok(existing_leaf_label))?; let z2 = AllocatedNum::alloc(cs.namespace(|| "z2"), || Ok(new_leaf_label))?; - z_out.push(z1); - z_out.push(z2); - Ok(z_out) + z_out.extend_from_slice(&[allocated_pre_insertion_root, z1, z2]); } UnifiedProofStep::InsertUpdate => { - let old_proof = self.old_proof.clone().unwrap(); - let new_proof = self.new_proof.clone().unwrap(); + let new_proof = new_proof.ok_or(SynthesisError::Unsatisfiable)?; - let old_element_hash: Scalar = Hash::new(old_proof.path.last().unwrap().get_hash()) - .to_scalar() - .unwrap(); - let old_element_hash_alloc = - AllocatedNum::alloc(cs.namespace(|| format!("TODO")), || Ok(old_element_hash))?; - cs.enforce( - || "z1 equality check pre-proof: NAMESPACE TODO", - |lc| lc + existing_node_label.get_variable(), - |lc| lc + CS::one(), - |lc| lc + old_element_hash_alloc.get_variable(), - ); - // todo: does the hash contain the next value? if so, we shouldnt constrain it to the new proof as below - let new_element_hash: Scalar = Hash::new(new_proof.path.last().unwrap().get_hash()) - .to_scalar() - .unwrap(); - let new_element_hash_alloc = - AllocatedNum::alloc(cs.namespace(|| format!("TODO")), || Ok(new_element_hash))?; - cs.enforce( - || "z1 equality check post-proof: NAMESPACE TODO", - |lc| lc + existing_node_label.get_variable(), - |lc| lc + CS::one(), - |lc| lc + new_element_hash_alloc.get_variable(), - ); - - let vars = self.process_update(cs, &old_proof, &new_proof).unwrap(); + let vars = self.process_update(cs, old_proof, new_proof)?; let updated_root = vars[1].clone(); - z_out.push(updated_root); - z_out.push(missing_node_label.clone()); - z_out.push(existing_node_label.clone()); - Ok(z_out) + z_out.extend_from_slice(&[ + updated_root, + existing_node_label_input.clone(), + missing_node_label_input.clone(), + ]); } UnifiedProofStep::InsertEnd => { - let vars = self.process_update( - cs, - &self.old_proof.clone().unwrap(), - &self.new_proof.clone().unwrap(), - )?; + let new_proof = new_proof.ok_or(SynthesisError::Unsatisfiable)?; + + let vars = self.process_update(cs, old_proof, new_proof)?; let updated_root = vars[1].clone(); - z_out.push(updated_root); - Ok(z_out) + z_out.extend_from_slice(&[ + updated_root, + existing_node_label_input.clone(), + missing_node_label_input.clone(), + ]); } } + + println!( + "Output z_out: {:?}", + z_out.iter().map(|num| num.get_value()).collect::>() + ); + Ok(z_out) } } diff --git a/src/nova/mod.rs b/src/nova/mod.rs index 6334c03e..6faaec86 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -2,7 +2,7 @@ pub mod batch; #[cfg(test)] mod tests { - use crate::nova::batch::MerkleProofStepCircuit; + use crate::nova::batch::{Hash, MerkleProofStepCircuit}; use indexed_merkle_tree::{node::Node, sha256_mod, tree::IndexedMerkleTree, tree::Proof}; use nova_snark::{ provider::{Bn256EngineKZG, GrumpkinEngine}, @@ -40,6 +40,9 @@ mod tests { #[test] fn test_nova() { let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); + let initial_commitment = Hash::new(tree.get_commitment().unwrap()) + .to_scalar() + .unwrap(); // create three nodes to insert let ryan = sha256_mod(b"Ryan"); @@ -78,41 +81,68 @@ mod tests { let pp = create_public_params(); println!("Created public params."); - println!("Creating recursive snark..."); let initial_primary_inputs = vec![ - ::Scalar::zero(), // initial root + initial_commitment, ::Scalar::zero(), // initial existing node label ::Scalar::zero(), // initial missing node label ]; - let (initial_circuit, next_steps) = circuits.split_first().unwrap(); + let secondary_circuit = TrivialCircuit::default(); - let mut recursive_snark: RecursiveSNARK = RecursiveSNARK::new( + println!("Creating recursive snark..."); + let recursive_snark_result = RecursiveSNARK::new( &pp, - initial_circuit, - &TrivialCircuit::default(), + &circuits[0], + &secondary_circuit, &initial_primary_inputs, &[::Scalar::from(2u64)], - ) - .unwrap(); - println!("Created recursive snark."); - - for (i, circuit) in next_steps.iter().enumerate() { - println!("Added proof {i} to recursive snark"); - recursive_snark.prove_step(&pp, circuit, &TrivialCircuit::default()); - // assert!(res.is_ok()); - - // let res = recursive_snark.verify( - // &pp, - // i + 1, - // &[::Scalar::from(3u64)], - // &[::Scalar::from(2u64)], - // ); - // assert!(res.is_ok()); + ); + + let mut z1_scalars = initial_primary_inputs; + let mut z2_scalars = [::Scalar::from(2u64)]; + + match recursive_snark_result { + Ok(mut recursive_snark) => { + println!("Created recursive snark successfully."); + + for (i, circuit) in circuits.iter().enumerate() { + if i == 0 { + continue; + } + println!("Step: {i}"); + let prove_result = recursive_snark.prove_step(&pp, circuit, &secondary_circuit); + + match prove_result { + Ok(_) => { + println!("Prove step {i} succeeded"); + } + Err(e) => { + println!("Prove step {i} failed with error: {:?}", e); + panic!("Test failed at prove step {i}"); + } + } + + let verify_result = + recursive_snark.verify(&pp, i + 1, &z1_scalars, &z2_scalars); + + match verify_result { + Ok((z1, z2)) => { + z1_scalars = z1; + // wow thats ugly + z2_scalars = [z2[0]; 1]; + println!("Verify step {i} succeeded") + } + Err(e) => { + println!("Verify step {i} failed with error: {:?}", e); + panic!("Test failed at verify step {i}"); + } + } + } + } + Err(e) => { + println!("Failed to create recursive snark. Error: {:?}", e); + panic!("Test failed during recursive snark creation"); + } } - - // Add assertions to check the final state if needed - // For example, you might want to check if the final root matches the expected value - // assert_eq!(final_root, expected_root); } } From 7dc7200adb45b17a77cea779ea15e78dc5b47667 Mon Sep 17 00:00:00 2001 From: Ryan Date: Thu, 8 Aug 2024 14:19:38 +0200 Subject: [PATCH 05/33] feat: beginning of migration to jmt --- Cargo.lock | 403 ++++++++++++++++++++++++++++++----- Cargo.toml | 7 +- src/common.rs | 66 +++++- src/lib.rs | 1 + src/main.rs | 1 + src/node_types/sequencer.rs | 46 ++-- src/nova/batch.rs | 5 +- src/nova/mod.rs | 326 +++++++++++++++------------- src/nova/utils.rs | 49 +++++ src/storage.rs | 6 +- src/tree/mod.rs | 414 ++++++++++++++++++++++++++++++++++++ 11 files changed, 1087 insertions(+), 237 deletions(-) create mode 100644 src/nova/utils.rs create mode 100644 src/tree/mod.rs diff --git a/Cargo.lock b/Cargo.lock index df9993e2..9e3dbfbf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,23 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "abomonation" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e72913c99b1f927aa7bd59a41518fdd9995f63ffc8760f211609e0241c4fb2" + +[[package]] +name = "abomonation_derive_ng" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34056136e0910411c88e9af0bca6e23fcdba258da239684014cd45ca900c9f0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", +] + [[package]] name = "actix-codec" version = "0.5.2" @@ -238,7 +255,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ "crypto-common", - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -389,6 +406,50 @@ version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +[[package]] +name = "arecibo" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc4ffbbd8b381d6c0a87464d682e1dcfea2bc0519994013267ed283b0cca57af" +dependencies = [ + "abomonation", + "abomonation_derive_ng", + "bellpepper", + "bellpepper-core", + "bincode", + "bitvec", + "byteorder", + "cfg-if 1.0.0", + "digest 0.10.7", + "ff", + "generic-array", + "getrandom 0.2.15", + "group", + "halo2curves", + "itertools 0.12.1", + "neptune", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "once_cell", + "pairing", + "pasta-msm", + "pasta_curves", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_core 0.6.4", + "rayon", + "ref-cast", + "serde", + "sha3", + "subtle", + "tap", + "thiserror", + "tracing", + "tracing-subscriber", + "tracing-texray", +] + [[package]] name = "ark-ff" version = "0.3.0" @@ -662,7 +723,7 @@ dependencies = [ "async-trait", "base64 0.13.1", "bincode", - "blake3", + "blake3 0.3.8", "chrono", "hmac 0.11.0", "log", @@ -912,6 +973,21 @@ dependencies = [ "serde", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "1.3.2" @@ -939,6 +1015,15 @@ dependencies = [ "wyz", ] +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + [[package]] name = "blake2b_simd" version = "1.0.2" @@ -976,13 +1061,26 @@ dependencies = [ "digest 0.9.0", ] +[[package]] +name = "blake3" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210" +dependencies = [ + "arrayref", + "arrayvec 0.7.4", + "cc", + "cfg-if 1.0.0", + "constant_time_eq 0.3.0", +] + [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -991,7 +1089,7 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -1730,7 +1828,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.7", + "generic-array", "rand_core 0.6.4", "typenum", ] @@ -1753,7 +1851,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array 0.14.7", + "generic-array", "subtle", ] @@ -1763,7 +1861,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" dependencies = [ - "generic-array 0.14.7", + "generic-array", "subtle", ] @@ -1923,7 +2021,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -2432,15 +2530,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "generic-array" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96512db27971c2c3eece70a1e106fbe6c87760234e31e8f7e5634912fe52794a" -dependencies = [ - "typenum", -] - [[package]] name = "getrandom" version = "0.1.16" @@ -2543,14 +2632,13 @@ dependencies = [ [[package]] name = "halo2curves" -version = "0.6.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db81d01d0bbfec9f624d7590fc6929ee2537a64ec1e080d8f8c9e2d2da291405" +checksum = "6d0263c2933ee18bf416552719c5621f677f87acca8d50afe4ee74c81bb8ecca" dependencies = [ "blake2b_simd", "ff", "group", - "hex 0.4.3", "lazy_static", "num-bigint 0.4.6", "num-traits", @@ -2559,7 +2647,6 @@ dependencies = [ "paste", "rand 0.8.5", "rand_core 0.6.4", - "rayon", "serde", "serde_arrays", "static_assertions", @@ -2577,6 +2664,9 @@ name = "hashbrown" version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash", +] [[package]] name = "hashbrown" @@ -2791,6 +2881,25 @@ dependencies = [ "cc", ] +[[package]] +name = "ics23" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18798160736c1e368938ba6967dbcb3c7afb3256b442a5506ba5222eebb68a5a" +dependencies = [ + "anyhow", + "blake2", + "blake3 1.5.3", + "bytes", + "hex 0.4.3", + "informalsystems-pbjson", + "prost 0.12.6", + "ripemd", + "serde", + "sha2 0.10.8", + "sha3", +] + [[package]] name = "idna" version = "0.5.0" @@ -2865,13 +2974,23 @@ dependencies = [ "serde", ] +[[package]] +name = "informalsystems-pbjson" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa4a0980c8379295100d70854354e78df2ee1c6ca0f96ffe89afeb3140e3a3d" +dependencies = [ + "base64 0.21.7", + "serde", +] + [[package]] name = "inout" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -2947,6 +3066,27 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +[[package]] +name = "jmt" +version = "0.10.0" +dependencies = [ + "anyhow", + "borsh", + "digest 0.10.7", + "hashbrown 0.13.2", + "hex 0.4.3", + "ics23", + "itertools 0.10.5", + "mirai-annotations", + "num-derive", + "num-traits", + "parking_lot", + "serde", + "sha2 0.10.8", + "thiserror", + "tracing", +] + [[package]] name = "jobserver" version = "0.1.32" @@ -3356,6 +3496,15 @@ dependencies = [ "value-bag", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "matchit" version = "0.7.3" @@ -3432,6 +3581,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" + [[package]] name = "mockall" version = "0.12.1" @@ -3538,13 +3693,15 @@ version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06626c9ac04c894e9a23d061ba1309f28506cdc5fe64156d28a15fb57fc8e438" dependencies = [ + "abomonation", + "abomonation_derive_ng", "bellpepper", "bellpepper-core", "blake2s_simd", "blstrs", "byteorder", "ff", - "generic-array 0.14.7", + "generic-array", "log", "pasta_curves", "serde", @@ -3595,37 +3752,13 @@ dependencies = [ ] [[package]] -name = "nova-snark" -version = "0.37.0" +name = "nu-ansi-term" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69b80bc67f3e77ad68dec356b5df15e8ce30d8855fc76e92782945a5fa74d6fc" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" dependencies = [ - "bellpepper", - "bellpepper-core", - "bincode", - "bitvec", - "byteorder", - "digest 0.10.7", - "ff", - "generic-array 1.1.0", - "getrandom 0.2.15", - "group", - "halo2curves", - "itertools 0.12.1", - "neptune", - "num-bigint 0.4.6", - "num-integer", - "num-traits", - "once_cell", - "pasta-msm", - "pasta_curves", - "rand_chacha 0.3.1", - "rand_core 0.6.4", - "rayon", - "serde", - "sha3", - "subtle", - "thiserror", + "overload", + "winapi", ] [[package]] @@ -3835,6 +3968,12 @@ version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "pairing" version = "0.23.0" @@ -4232,6 +4371,7 @@ dependencies = [ "actix-web", "ahash", "anyhow", + "arecibo", "async-redis-session", "async-trait", "axum", @@ -4258,16 +4398,17 @@ dependencies = [ "futures", "hex 0.4.3", "indexed-merkle-tree", + "jmt", "jsonrpsee 0.22.5", "keystore-rs", "lazy_static", "log", "mockall", - "nova-snark", "num", "openssl", "pairing", "pretty_env_logger", + "proptest", "pyroscope", "pyroscope_pprofrs", "rand 0.7.3", @@ -4277,6 +4418,7 @@ dependencies = [ "serde", "serde_json", "serial_test", + "sha2 0.10.8", "thiserror", "tokio", "toml", @@ -4343,13 +4485,17 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ + "bit-set", + "bit-vec", "bitflags 2.6.0", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax", + "regex-syntax 0.8.4", + "rusty-fork", + "tempfile", "unarray", ] @@ -4459,6 +4605,12 @@ dependencies = [ "thiserror", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quick-protobuf" version = "0.8.1" @@ -4639,6 +4791,26 @@ dependencies = [ "thiserror", ] +[[package]] +name = "ref-cast" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf0a6f84d5f1d581da8b41b47ec8600871962f2a528115b542b362d4b744931" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", +] + [[package]] name = "regex" version = "1.10.6" @@ -4647,8 +4819,17 @@ checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", ] [[package]] @@ -4659,7 +4840,7 @@ checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.4", ] [[package]] @@ -4668,6 +4849,12 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.8.4" @@ -4733,6 +4920,15 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "ripemd" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" +dependencies = [ + "digest 0.10.7", +] + [[package]] name = "rle-decode-fast" version = "1.0.3" @@ -5002,6 +5198,18 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "ryu" version = "1.0.18" @@ -5304,6 +5512,15 @@ dependencies = [ "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shellexpand" version = "2.1.2" @@ -5555,6 +5772,16 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "term_size" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -5596,6 +5823,16 @@ dependencies = [ "syn 2.0.72", ] +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", +] + [[package]] name = "threadpool" version = "1.8.1" @@ -5901,6 +6138,49 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tracing-texray" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07b7943a21ef76920e7250b59946b0068221c323bf1077baab36164477d63efc" +dependencies = [ + "lazy_static", + "parking_lot", + "term_size", + "tracing", + "tracing-subscriber", ] [[package]] @@ -6108,6 +6388,15 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.2.0" diff --git a/Cargo.toml b/Cargo.toml index b4d7e1af..362875d0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -68,9 +68,14 @@ pyroscope_pprofrs = "0.2.7" toml = "0.8.14" dirs = "5.0.1" anyhow = "1.0.44" +jmt = { path = "../jmt", features = [ + "mocks", +] } #{ version = "0.10.0", features = ["mocks"] } bellpepper-core = { version = "0.4.0", default-features = false } bellpepper = { version = "0.4.0", default-features = false } -nova-snark = { version = "0.37.0", default-features = false } +arecibo = { version = "0.1.1", default-features = false } +sha2 = "0.10.8" +proptest = "1.5.0" [dev-dependencies] serial_test = "3.1.1" diff --git a/src/common.rs b/src/common.rs index 8869da49..a0f313f4 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,8 +1,12 @@ +use anyhow::{bail, Result}; use borsh::{BorshDeserialize, BorshSerialize}; use indexed_merkle_tree::{sha256_mod, Hash}; +use jmt::KeyHash; use serde::{Deserialize, Serialize}; use std::fmt::Display; +use crate::tree::Hasher; + #[derive(Clone, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, PartialEq)] // An [`Operation`] represents a state transition in the system. // In a blockchain analogy, this would be the full set of our transaction types. @@ -56,7 +60,66 @@ impl Display for Operation { } } -#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] +#[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)] +pub struct Hashchain { + id: String, + entries: Vec, +} + +impl Hashchain { + pub fn new(id: String) -> Self { + Self { + id, + entries: Vec::new(), + } + } + + pub fn push(&mut self, operation: Operation) -> Result { + if let Operation::CreateAccount { .. } = operation { + bail!("Cannot CreateAccount on an already existing hashchain"); + } + if operation.id() != self.id { + bail!("Operation ID does not match Hashchain ID"); + } + + let previous_hash = self + .entries + .last() + .map_or(Hash::new([0u8; 32]), |entry| entry.hash); + + let entry = HashchainEntry::new(operation, previous_hash); + self.entries.push(entry.clone()); + + Ok(entry.hash) + } + + // TODO: Obviously, this needs to be authenticated by an existing key. + pub fn add(&mut self, value: String) -> Result { + let operation = Operation::Add { + id: self.id.clone(), + value, + }; + self.push(operation) + } + + pub fn revoke(&mut self, value: String) -> Result { + let operation = Operation::Revoke { + id: self.id.clone(), + value, + }; + self.push(operation) + } + + pub fn get_keyhash(&self) -> KeyHash { + KeyHash::with::(self.id.clone()) + } + + pub fn len(&self) -> usize { + self.entries.len() + } +} + +#[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)] // A [`HashchainEntry`] represents a single entry in an account's hashchain. // The value in the leaf of the corresponding account's node in the IMT is the hash of the last node in the hashchain. pub struct HashchainEntry { @@ -71,6 +134,7 @@ impl HashchainEntry { let mut data = Vec::new(); data.extend_from_slice(operation.to_string().as_bytes()); data.extend_from_slice(previous_hash.as_ref()); + // TODO: replace with sha256 after JMT complete sha256_mod(&data) }; Self { diff --git a/src/lib.rs b/src/lib.rs index 9da4fe92..eab31618 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,7 @@ pub mod error; pub mod node_types; pub mod nova; pub mod storage; +pub mod tree; pub mod utils; pub mod webserver; #[macro_use] diff --git a/src/main.rs b/src/main.rs index 3770ab2d..9f52af02 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,6 +7,7 @@ pub mod error; mod node_types; mod nova; pub mod storage; +mod tree; mod utils; mod webserver; diff --git a/src/node_types/sequencer.rs b/src/node_types/sequencer.rs index 7b0b3245..8a5cfa6d 100644 --- a/src/node_types/sequencer.rs +++ b/src/node_types/sequencer.rs @@ -2,12 +2,13 @@ use anyhow::{Context, Result}; use async_trait::async_trait; use ed25519::Signature; use ed25519_dalek::{Signer, SigningKey}; -use indexed_merkle_tree::{ - node::Node, - sha256_mod, - tree::{IndexedMerkleTree, Proof}, - Hash, -}; +use jmt::{storage::{TreeReader, TreeWriter}, KeyHash}; +// use indexed_merkle_tree::{ +// node::Node, +// tree::{IndexedMerkleTree, Proof}, +// Hash, +// }; +use crate::tree::{hash, Digest, KeyDirectoryTree, Proof}; use std::{self, str::FromStr, sync::Arc}; use tokio::{ sync::{ @@ -24,7 +25,7 @@ use crate::error::DataAvailabilityError; use crate::{ cfg::Config, circuits::BatchMerkleProofCircuit, - common::{AccountSource, HashchainEntry, Operation}, + common::{AccountSource, Hashchain, HashchainEntry, Operation}, consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL}, da::{DataAvailabilityLayer, FinalizedEpoch}, error::{DatabaseError, GeneralError}, @@ -33,7 +34,10 @@ use crate::{ webserver::{OperationInput, WebServer}, }; -pub struct Sequencer { +pub struct Sequencer<'a, S> +where + S: 'a + TreeReader + TreeWriter, +{ pub db: Arc, pub da: Arc, pub ws: WebServer, @@ -48,14 +52,17 @@ pub struct Sequencer { // [`pending_operations`] is a buffer for operations that have not yet been // posted to the DA layer. pending_operations: Arc>>, - tree: Arc>, + tree: Arc>>, epoch_buffer_tx: Arc>, epoch_buffer_rx: Arc>>, } #[async_trait] -impl NodeType for Sequencer { +impl<'a, S> NodeType for Sequencer<'a, S> +where + S: 'a + TreeReader + TreeWriter, +{ async fn start(self: Arc) -> Result<()> { self.da.start().await.context("Failed to start DA layer")?; @@ -262,7 +269,7 @@ impl Sequencer { .await } - pub async fn get_commitment(&self) -> Result { + pub async fn get_commitment(&self) -> Result { let tree = self.tree.lock().await; tree.get_commitment().context("Failed to get commitment") } @@ -280,7 +287,7 @@ impl Sequencer { "Failed to get commitment for previous epoch {}", prev_epoch ))?; - Hash::from_hex(&hash_string).context("Failed to parse commitment")? + Digest::from_hex(&hash_string).context("Failed to parse commitment")? } else { self.get_commitment().await? }; @@ -363,12 +370,7 @@ impl Sequencer { .context(format!("Failed to get hashchain for ID {}", id))?; let mut tree = self.tree.lock().await; - let hashed_id = sha256_mod(id.as_bytes()); - - let node = tree.find_leaf_by_label(&hashed_id).context(format!( - "Node with label {} not found in the tree", - hashed_id - ))?; + let hashed_id = hash(id.as_bytes()); let previous_hash = current_chain.last().context("Hashchain is empty")?.hash; @@ -382,12 +384,8 @@ impl Sequencer { node.get_next(), ); - let index = tree.find_node_index(&node).context(format!( - "Node with label {} not found in the tree, but has a hashchain entry", - hashed_id - ))?; - debug!("updating hashchain for user id {}", id.clone()); + self.tree.insert(KeyHash::with(hashed_id), ) self.db .update_hashchain(operation, ¤t_chain) .context(format!( @@ -432,7 +430,7 @@ impl Sequencer { ))?; let mut tree = self.tree.lock().await; - let hashed_id = sha256_mod(id.as_bytes()); + let hashed_id = hash(id.as_bytes()); let mut node = Node::new_leaf(true, hashed_id, new_chain.first().unwrap().hash, Node::TAIL); diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 74add1fb..dce21ae8 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -1,4 +1,5 @@ use anyhow::{anyhow, Result}; +use arecibo::traits::circuit::StepCircuit; use bellpepper_core::{ num::{AllocatedNum, Num}, ConstraintSystem, SynthesisError, @@ -10,10 +11,6 @@ use indexed_merkle_tree::{ sha256_mod, tree::{InsertProof, MerkleProof, NonMembershipProof, Proof, UpdateProof}, }; -use nova_snark::{ - provider::{Bn256EngineKZG, GrumpkinEngine}, - traits::circuit::StepCircuit, -}; #[derive(Clone, Debug)] pub enum UnifiedProofStep { diff --git a/src/nova/mod.rs b/src/nova/mod.rs index 6faaec86..cda7425f 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -1,148 +1,180 @@ pub mod batch; - -#[cfg(test)] -mod tests { - use crate::nova::batch::{Hash, MerkleProofStepCircuit}; - use indexed_merkle_tree::{node::Node, sha256_mod, tree::IndexedMerkleTree, tree::Proof}; - use nova_snark::{ - provider::{Bn256EngineKZG, GrumpkinEngine}, - traits::{circuit::TrivialCircuit, snark::default_ck_hint, Engine}, - PublicParams, RecursiveSNARK, - }; - - type E1 = Bn256EngineKZG; - type E2 = GrumpkinEngine; - - type C1 = MerkleProofStepCircuit<::Scalar>; - type C2 = TrivialCircuit<::Scalar>; - - fn create_public_params() -> PublicParams { - let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); - let test_label = sha256_mod(b"test"); - let test_value = sha256_mod(b"value"); - let mut test_node = Node::new_leaf(true, test_label, test_value, Node::TAIL); - - let test_proof = tree.insert_node(&mut test_node).unwrap(); - let test_circuit = MerkleProofStepCircuit::from_proof(Proof::Insert(test_proof))[0].clone(); - - let circuit_primary = test_circuit; - let circuit_secondary = TrivialCircuit::default(); - - PublicParams::::setup( - &circuit_primary, - &circuit_secondary, - &*default_ck_hint(), - &*default_ck_hint(), - ) - .unwrap() - } - - #[test] - fn test_nova() { - let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); - let initial_commitment = Hash::new(tree.get_commitment().unwrap()) - .to_scalar() - .unwrap(); - - // create three nodes to insert - let ryan = sha256_mod(b"Ryan"); - let ford = sha256_mod(b"Ford"); - let sebastian = sha256_mod(b"Sebastian"); - let pusch = sha256_mod(b"Pusch"); - let ethan = sha256_mod(b"Ethan"); - let triple_zero = sha256_mod(b"000"); - - let mut ryans_node = Node::new_leaf(true, ryan, ford, Node::TAIL); - let mut sebastians_node = Node::new_leaf(true, sebastian, pusch, Node::TAIL); - let mut ethans_node = Node::new_leaf(true, ethan, triple_zero, Node::TAIL); - - // generate proofs for the three nodes - let first_insert_proof = tree.insert_node(&mut ryans_node).unwrap(); - let second_insert_proof = tree.insert_node(&mut sebastians_node).unwrap(); - let third_insert_proof = tree.insert_node(&mut ethans_node).unwrap(); - - // create zkSNARKs for the three proofs - let first_insert_zk_snark = Proof::Insert(first_insert_proof); - let second_insert_zk_snark = Proof::Insert(second_insert_proof); - let third_insert_zk_snark = Proof::Insert(third_insert_proof); - - let proofs = vec![ - first_insert_zk_snark, - second_insert_zk_snark, - third_insert_zk_snark, - ]; - - let circuits: Vec = proofs - .into_iter() - .flat_map(MerkleProofStepCircuit::from_proof) - .collect(); - - println!("Creating public params..."); - let pp = create_public_params(); - println!("Created public params."); - - let initial_primary_inputs = vec![ - initial_commitment, - ::Scalar::zero(), // initial existing node label - ::Scalar::zero(), // initial missing node label - ]; - - let secondary_circuit = TrivialCircuit::default(); - - println!("Creating recursive snark..."); - let recursive_snark_result = RecursiveSNARK::new( - &pp, - &circuits[0], - &secondary_circuit, - &initial_primary_inputs, - &[::Scalar::from(2u64)], - ); - - let mut z1_scalars = initial_primary_inputs; - let mut z2_scalars = [::Scalar::from(2u64)]; - - match recursive_snark_result { - Ok(mut recursive_snark) => { - println!("Created recursive snark successfully."); - - for (i, circuit) in circuits.iter().enumerate() { - if i == 0 { - continue; - } - println!("Step: {i}"); - let prove_result = recursive_snark.prove_step(&pp, circuit, &secondary_circuit); - - match prove_result { - Ok(_) => { - println!("Prove step {i} succeeded"); - } - Err(e) => { - println!("Prove step {i} failed with error: {:?}", e); - panic!("Test failed at prove step {i}"); - } - } - - let verify_result = - recursive_snark.verify(&pp, i + 1, &z1_scalars, &z2_scalars); - - match verify_result { - Ok((z1, z2)) => { - z1_scalars = z1; - // wow thats ugly - z2_scalars = [z2[0]; 1]; - println!("Verify step {i} succeeded") - } - Err(e) => { - println!("Verify step {i} failed with error: {:?}", e); - panic!("Test failed at verify step {i}"); - } - } - } - } - Err(e) => { - println!("Failed to create recursive snark. Error: {:?}", e); - panic!("Test failed during recursive snark creation"); - } - } - } -} +pub mod utils; + +// #[cfg(test)] +// mod tests { +// use crate::nova::batch::{Hash, MerkleProofStepCircuit, UnifiedProofStep}; +// use arecibo::{ +// provider::{Bn256Engine, GrumpkinEngine}, +// traits::circuit::StepCircuit, +// }; +// use arecibo::{ +// traits::{circuit::TrivialCircuit, snark::default_ck_hint, Engine}, +// PublicParams, RecursiveSNARK, +// }; +// use bellpepper_core::{num::AllocatedNum, test_cs::TestConstraintSystem, ConstraintSystem}; +// use ff::PrimeField; +// use indexed_merkle_tree::{node::Node, sha256_mod, tree::IndexedMerkleTree, tree::Proof}; + +// type E1 = Bn256Engine; +// type E2 = GrumpkinEngine; + +// type C1 = MerkleProofStepCircuit<::Scalar>; +// type C2 = TrivialCircuit<::Scalar>; + +// fn debug_circuit(circuit: &C1, z_in: &[::Scalar]) { +// let mut cs = TestConstraintSystem::<::Scalar>::new(); + +// let z: Vec::Scalar>> = z_in +// .iter() +// .enumerate() +// .map(|(i, &value)| { +// AllocatedNum::alloc(&mut cs.namespace(|| format!("input {}", i)), || Ok(value)) +// .expect("failed to allocate input") +// }) +// .collect(); + +// circuit.synthesize(&mut cs, &z).expect("synthesis failed"); + +// println!("Constraint System:"); +// println!("{}", cs.pretty_print()); + +// if !cs.is_satisfied() { +// println!("Constraint system not satisfied!"); +// for (i, constraint) in cs.which_is_unsatisfied().iter().enumerate() { +// println!("Unsatisfied Constraint {}: {:?}", i, constraint); +// } +// } else { +// println!("All constraints satisfied."); +// } + +// assert!(cs.is_satisfied(), "Constraints not satisfied"); +// } + +// fn create_public_params() -> PublicParams { +// let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); +// let test_label = sha256_mod(b"test"); +// let test_value = sha256_mod(b"value"); +// let mut test_node = Node::new_leaf(true, test_label, test_value, Node::TAIL); + +// let test_proof = tree.insert_node(&mut test_node).unwrap(); +// let test_circuit = MerkleProofStepCircuit::from_proof(Proof::Insert(test_proof))[0].clone(); + +// let circuit_primary = test_circuit; +// let circuit_secondary = TrivialCircuit::default(); + +// PublicParams::::setup( +// &circuit_primary, +// &circuit_secondary, +// &*default_ck_hint(), +// &*default_ck_hint(), +// ) +// .unwrap() +// } + +// #[test] +// fn test_nova() { +// let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); +// let initial_commitment = Hash::new(tree.get_commitment().unwrap()) +// .to_scalar() +// .unwrap(); + +// // create three nodes to insert +// let ryan = sha256_mod(b"Ryan"); +// let ford = sha256_mod(b"Ford"); +// let sebastian = sha256_mod(b"Sebastian"); +// let pusch = sha256_mod(b"Pusch"); +// let ethan = sha256_mod(b"Ethan"); +// let triple_zero = sha256_mod(b"000"); + +// let mut ryans_node = Node::new_leaf(true, ryan, ford, Node::TAIL); +// let mut sebastians_node = Node::new_leaf(true, sebastian, pusch, Node::TAIL); +// let mut ethans_node = Node::new_leaf(true, ethan, triple_zero, Node::TAIL); + +// // generate proofs for the three nodes +// let first_insert_proof = tree.insert_node(&mut ryans_node).unwrap(); +// let second_insert_proof = tree.insert_node(&mut sebastians_node).unwrap(); +// let third_insert_proof = tree.insert_node(&mut ethans_node).unwrap(); + +// // create zkSNARKs for the three proofs +// let first_insert_zk_snark = Proof::Insert(first_insert_proof); +// let second_insert_zk_snark = Proof::Insert(second_insert_proof); +// let third_insert_zk_snark = Proof::Insert(third_insert_proof); + +// let proofs = vec![ +// first_insert_zk_snark, +// second_insert_zk_snark, +// third_insert_zk_snark, +// ]; + +// let circuits: Vec = proofs +// .into_iter() +// .flat_map(MerkleProofStepCircuit::from_proof) +// .collect(); + +// println!("Creating public params..."); +// let pp = create_public_params(); +// println!("Created public params."); + +// let initial_primary_inputs = vec![ +// initial_commitment, +// ::Scalar::zero(), // initial existing node label +// ::Scalar::zero(), // initial missing node label +// ]; + +// let secondary_circuit = TrivialCircuit::default(); + +// println!("Creating recursive snark..."); +// let recursive_snark_result = RecursiveSNARK::new( +// &pp, +// &circuits[0], +// &secondary_circuit, +// &initial_primary_inputs, +// &[::Scalar::from(2u64)], +// ); + +// let mut z1_scalars = initial_primary_inputs; +// let mut z2_scalars = [::Scalar::from(2u64)]; + +// match recursive_snark_result { +// Ok(mut recursive_snark) => { +// println!("Created recursive snark successfully."); + +// for (i, circuit) in circuits.iter().enumerate() { +// println!("Step: {i}"); + +// debug_circuit(circuit, &z1_scalars); + +// let prove_result = recursive_snark.prove_step(&pp, circuit, &secondary_circuit); + +// match prove_result { +// Ok(_) => { +// println!("Prove step {i} succeeded"); +// } +// Err(e) => { +// println!("Prove step {i} failed with error: {:?}", e); +// panic!("Test failed at prove step {i}"); +// } +// } + +// let verify_result = +// recursive_snark.verify(&pp, i + 1, &z1_scalars, &z2_scalars); + +// match verify_result { +// Ok(_) => { +// println!("Verify step {i} succeeded") +// } +// Err(e) => { +// println!("Verify step {i} failed with error: {:?}", e); +// panic!("Test failed at verify step {i}"); +// } +// } +// } +// } +// Err(e) => { +// println!("Failed to create recursive snark. Error: {:?}", e); +// panic!("Test failed during recursive snark creation"); +// } +// } +// } +// } diff --git a/src/nova/utils.rs b/src/nova/utils.rs new file mode 100644 index 00000000..4ade9c57 --- /dev/null +++ b/src/nova/utils.rs @@ -0,0 +1,49 @@ +// use bellpepper_core::ConstraintSystem; + +// pub(crate) fn prove_update>( +// cs: &mut CS, +// old_root: Scalar, +// old_path: &[Node], +// new_root: Scalar, +// new_path: &[Node], +// ) -> Result { +// let root_with_old_pointer = +// cs.alloc(|| "first update root with old pointer", || Ok(old_root))?; +// let root_with_new_pointer = +// cs.alloc(|| "first update root with new pointer", || Ok(new_root))?; + +// // update the root hash for old and new path +// let recalculated_root_with_old_pointer = +// recalculate_hash_as_scalar(old_path).map_err(|_| SynthesisError::Unsatisfiable)?; +// let recalculated_root_with_new_pointer = +// recalculate_hash_as_scalar(new_path).map_err(|_| SynthesisError::Unsatisfiable)?; + +// let allocated_recalculated_root_with_old_pointer = cs.alloc( +// || "recalculated first update proof old root", +// || Ok(recalculated_root_with_old_pointer), +// )?; +// let allocated_recalculated_root_with_new_pointer = cs.alloc( +// || "recalculated first update proof new root", +// || Ok(recalculated_root_with_new_pointer), +// )?; + +// // Check if the resulting hash is the root hash of the old tree +// // allocated_recalculated_root_with_old_pointer * (1) = root_with_old_pointer +// cs.enforce( +// || "first update old root equality", +// |lc| lc + allocated_recalculated_root_with_old_pointer, +// |lc| lc + CS::one(), +// |lc| lc + root_with_old_pointer, +// ); + +// // Check that the resulting hash is the root hash of the new tree. +// // allocated_recalculated_root_with_new_pointer * (1) = root_with_new_pointer +// cs.enforce( +// || "first update new root equality", +// |lc| lc + allocated_recalculated_root_with_new_pointer, +// |lc| lc + CS::one(), +// |lc| lc + root_with_new_pointer, +// ); + +// Ok(recalculated_root_with_new_pointer) +// } diff --git a/src/storage.rs b/src/storage.rs index b93e864b..8f040330 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -12,7 +12,7 @@ use std::{ use crate::{ cfg::RedisConfig, - common::{HashchainEntry, Operation}, + common::{Hashchain, HashchainEntry, Operation}, error::{DatabaseError, GeneralError, PrismError}, utils::parse_json_to_proof, }; @@ -30,7 +30,7 @@ pub struct RedisConnection { #[automock] pub trait Database: Send + Sync { fn get_keys(&self) -> Result>; - fn get_hashchain(&self, key: &str) -> Result>; + fn get_hashchain(&self, key: &str) -> Result; fn get_commitment(&self, epoch: &u64) -> Result; fn get_proof(&self, id: &str) -> Result; fn get_proofs_in_epoch(&self, epoch: &u64) -> Result>; @@ -105,7 +105,7 @@ impl Database for RedisConnection { Ok(keys.into_iter().map(|k| k.replace("main:", "")).collect()) } - fn get_hashchain(&self, key: &str) -> Result> { + fn get_hashchain(&self, key: &str) -> Result { let mut con = self.lock_connection()?; let value: String = con .get(format!("main:{}", key)) diff --git a/src/tree/mod.rs b/src/tree/mod.rs new file mode 100644 index 00000000..627c1570 --- /dev/null +++ b/src/tree/mod.rs @@ -0,0 +1,414 @@ +use anyhow::{anyhow, bail, ensure, Context, Result}; +use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize}; +use jmt::{ + proof::{SparseMerkleProof, UpdateMerkleProof}, + storage::{Node, NodeBatch, TreeReader, TreeUpdateBatch, TreeWriter}, + KeyHash, RootHash, Sha256Jmt, SimpleHasher, +}; + +use crate::common::Hashchain; + +const SPARSE_MERKLE_PLACEHOLDER_HASH: [u8; 32] = *b"SPARSE_MERKLE_PLACEHOLDER_HASH__"; + +pub type Hasher = sha2::Sha256; +pub struct Digest([u8; 32]); + +impl Digest { + pub const fn new(bytes: [u8; 32]) -> Self { + Digest(bytes) + } + + pub fn from_hex(hex_str: &str) -> Result { + let mut bytes = [0u8; 32]; + hex::decode_to_slice(hex_str, &mut bytes) + .map_err(|e| anyhow!(format!("Invalid Format: {e}")))?; + Ok(Digest(bytes)) + } + + #[cfg(feature = "std")] + pub fn to_hex(&self) -> String { + hex::encode(self.0) + } + + #[cfg(not(feature = "std"))] + pub fn to_hex(&self) -> [u8; 64] { + let mut hex = [0u8; 64]; + hex::encode_to_slice(self.0, &mut hex) + .expect("The output is exactly twice the size of the input"); + hex + } +} + +pub fn hash(data: &[u8]) -> Digest { + let mut hasher = sha2::Sha256::new(); + hasher.update(data); + Digest(hasher.finalize()) +} + +#[derive(BorshSerialize, BorshDeserialize, Debug, Clone)] +pub enum Proof { + Update(UpdateProof), + Insert(InsertProof), +} + +#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)] +pub struct NonMembershipProof { + pub root: RootHash, + pub proof: SparseMerkleProof, + pub key: KeyHash, +} + +impl NonMembershipProof { + pub fn verify(&self) -> Result<()> { + self.proof.verify_nonexistence(self.root, self.key) + } +} + +#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)] +pub struct InsertProof { + pub non_membership_proof: NonMembershipProof, + + pub new_root: RootHash, + pub membership_proof: UpdateMerkleProof, + pub value: Hashchain, +} + +impl InsertProof { + pub fn verify(&self) -> Result<()> { + self.non_membership_proof + .verify() + .context("Invalid NonMembershipProof"); + + let value = to_vec(&self.value).unwrap(); + + self.membership_proof.clone().verify_update( + self.non_membership_proof.root, + self.new_root, + vec![(self.non_membership_proof.key, Some(value))], + ); + + Ok(()) + } +} + +#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)] +pub struct UpdateProof { + pub old_root: RootHash, + pub new_root: RootHash, + + pub key: KeyHash, + pub new_value: Hashchain, + + pub proof: UpdateMerkleProof, +} + +impl UpdateProof { + pub fn verify(&self) -> Result<()> { + let new_value = to_vec(&self.new_value).unwrap(); + + self.proof.clone().verify_update( + self.old_root, + self.new_root, + vec![(self.key, Some(new_value))], + ) + } +} + +pub trait SnarkableTree { + fn insert(&mut self, key: KeyHash, value: Hashchain) -> Result; + fn update(&mut self, key: KeyHash, value: Hashchain) -> Result; + fn get(&self, key: KeyHash) -> Result>; +} + +pub struct KeyDirectoryTree<'a, S> +where + S: 'a + TreeReader + TreeWriter, +{ + jmt: Sha256Jmt<'a, S>, + pending_batch: Option, + epoch: u64, +} + +impl<'a, S> KeyDirectoryTree<'a, S> +where + S: 'a + TreeReader + TreeWriter, +{ + pub fn new(store: &'a S) -> Self { + let tree = Self { + jmt: Sha256Jmt::new(store), + pending_batch: None, + epoch: 0, + }; + let (_, batch) = tree + .jmt + .put_value_set(vec![(KeyHash(SPARSE_MERKLE_PLACEHOLDER_HASH), None)], 0) + .unwrap(); + store.write_node_batch(&batch.node_batch).unwrap(); + tree + } + + pub fn get_commitment(&self) -> Result { + let root = self.get_current_root()?; + Ok(Digest(root.0)) + } + + fn queue_batch(&mut self, batch: TreeUpdateBatch) { + match self.pending_batch { + Some(ref mut pending_batch) => pending_batch.merge(batch.node_batch), + None => self.pending_batch = Some(batch.node_batch), + } + } + + pub fn write_batch(&mut self, writer: &'a S) -> Result<()> { + if let Some(batch) = self.pending_batch.take() { + writer.write_node_batch(&batch)?; + } + Ok(()) + } + + fn get_current_root(&self) -> Result { + self.jmt + .get_root_hash(self.epoch) + .map_err(|e| anyhow!("Failed to get root hash: {}", e)) + } + + fn serialize_value(value: &Hashchain) -> Result> { + to_vec(value).map_err(|e| anyhow!("Failed to serialize value: {}", e)) + } + + fn deserialize_value(bytes: &[u8]) -> Result { + from_slice::(bytes).map_err(|e| anyhow!("Failed to deserialize value: {}", e)) + } +} + +impl<'a, S> SnarkableTree for KeyDirectoryTree<'a, S> +where + S: TreeReader + TreeWriter, +{ + fn insert(&mut self, key: KeyHash, value: Hashchain) -> Result { + let serialized_value = Self::serialize_value(&value)?; + + let old_root = self.get_current_root()?; + let (old_value, non_membership_merkle_proof) = self.jmt.get_with_proof(key, self.epoch)?; + + let non_membership_proof = NonMembershipProof { + root: old_root, + proof: non_membership_merkle_proof, + key, + }; + + if old_value.is_some() { + bail!("Key already exists"); + } + + let (new_root, membership_proof, tree_update_batch) = self + .jmt + .put_value_set_with_proof(vec![(key, Some(serialized_value))], self.epoch)?; + self.queue_batch(tree_update_batch); + + ensure!( + membership_proof.len() == 1, + "UpdateProof does not span only a single update" + ); + + Ok(InsertProof { + new_root, + value, + non_membership_proof, + membership_proof, + }) + } + + fn update(&mut self, key: KeyHash, value: Hashchain) -> Result { + let serialized_value = Self::serialize_value(&value)?; + + let old_root = self.get_current_root()?; + let (old_value, _) = self.jmt.get_with_proof(key, self.epoch)?; + + if old_value.is_none() { + bail!("Key does not exist"); + } + + let (new_root, proof, tree_update_batch) = self.jmt.put_value_set_with_proof( + vec![(key, Some(serialized_value.clone()))], + self.epoch + 1, + )?; + self.queue_batch(tree_update_batch); + + Ok(UpdateProof { + old_root, + new_root, + key, + new_value: value, + proof, + }) + } + + fn get(&self, key: KeyHash) -> Result> { + let (value, proof) = self.jmt.get_with_proof(key, self.epoch)?; + + match value { + Some(serialized_value) => { + let deserialized_value = Self::deserialize_value(&serialized_value)?; + Ok(Ok(deserialized_value)) + } + None => Ok(Err(NonMembershipProof { + root: self.get_current_root()?, + proof, + key, + })), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use jmt::mock::MockTreeStore; + + #[test] + fn test_insert_and_get() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let hc1 = Hashchain::new("key_1".into()); + let key = hc1.get_keyhash(); + + let insert_proof = tree.insert(key, hc1.clone()); + assert!(insert_proof.is_ok()); + + tree.write_batch(&store).unwrap(); + + let get_result = tree.get(key).unwrap().unwrap(); + + assert_eq!(get_result, hc1); + } + + #[test] + fn test_insert_duplicate_key() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let hc1 = Hashchain::new("key_1".into()); + let key = hc1.get_keyhash(); + + tree.insert(key, hc1.clone()).unwrap(); + tree.write_batch(&store).unwrap(); + + let hc2 = Hashchain::new("key_1".into()); + let result = tree.insert(key, hc2); + assert!(result.is_err()); + } + + #[test] + fn test_update_existing_key() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let mut hc1 = Hashchain::new("key_1".into()); + let key = hc1.get_keyhash(); + + tree.insert(key, hc1.clone()).unwrap(); + tree.write_batch(&store).unwrap(); + + hc1.add("new_value".into()).unwrap(); + let update_proof = tree.update(key, hc1.clone()).unwrap(); + assert!(update_proof.verify().is_ok()); + + tree.write_batch(&store).unwrap(); + + let get_result = tree.get(key).unwrap().unwrap(); + assert_eq!(get_result, hc1); + } + + #[test] + fn test_update_non_existing_key() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let hc1 = Hashchain::new("key_1".into()); + let key = hc1.get_keyhash(); + + let result = tree.update(key, hc1); + assert!(result.is_err()); + } + + #[test] + fn test_get_non_existing_key() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let key = KeyHash::with::(b"non_existing_key"); + let result = tree.get(key).unwrap(); + assert!(result.is_err()); + + if let Err(non_membership_proof) = result { + assert!(non_membership_proof.verify().is_ok()); + } + } + + #[test] + fn test_multiple_inserts_and_updates() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let mut hc1 = Hashchain::new("key_1".into()); + let mut hc2 = Hashchain::new("key_2".into()); + let key1 = hc1.get_keyhash(); + let key2 = hc2.get_keyhash(); + + tree.insert(key1, hc1.clone()).unwrap(); + tree.insert(key2, hc2.clone()).unwrap(); + tree.write_batch(&store).unwrap(); + + hc1.add("value1".into()).unwrap(); + hc2.add("value2".into()).unwrap(); + + tree.update(key1, hc1.clone()).unwrap(); + tree.update(key2, hc2.clone()).unwrap(); + tree.write_batch(&store).unwrap(); + + assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); + assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); + } + + #[test] + fn test_root_hash_changes() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let hc1 = Hashchain::new("key_1".into()); + let key1 = hc1.get_keyhash(); + + let root_before = tree.get_current_root().unwrap(); + tree.insert(key1, hc1).unwrap(); + tree.write_batch(&store).unwrap(); + let root_after = tree.get_current_root().unwrap(); + + assert_ne!(root_before, root_after); + } + + #[test] + fn test_batch_writing() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(&store); + + let hc1 = Hashchain::new("key_1".into()); + let hc2 = Hashchain::new("key_2".into()); + let key1 = hc1.get_keyhash(); + let key2 = hc2.get_keyhash(); + + tree.insert(key1, hc1.clone()).unwrap(); + tree.insert(key2, hc2.clone()).unwrap(); + + // Before writing the batch + assert!(tree.get(key1).unwrap().is_err()); + assert!(tree.get(key2).unwrap().is_err()); + + tree.write_batch(&store).unwrap(); + + // After writing the batch + assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); + assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); + } +} From fbb8aa994828631604e511b3fe372b4b2f067ef9 Mon Sep 17 00:00:00 2001 From: Ryan Date: Thu, 8 Aug 2024 23:13:36 +0200 Subject: [PATCH 06/33] refactor: redis as JMT backend (temporarily) --- src/common.rs | 33 +++- src/da/mod.rs | 9 +- src/node_types/lightclient.rs | 14 +- src/node_types/sequencer.rs | 84 ++++----- src/storage.rs | 341 +++++++++++----------------------- src/tree/mod.rs | 39 ++-- src/webserver.rs | 12 +- 7 files changed, 207 insertions(+), 325 deletions(-) diff --git a/src/common.rs b/src/common.rs index a0f313f4..5ab88075 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,11 +1,11 @@ use anyhow::{bail, Result}; use borsh::{BorshDeserialize, BorshSerialize}; -use indexed_merkle_tree::{sha256_mod, Hash}; +use indexed_merkle_tree::Hash; use jmt::KeyHash; use serde::{Deserialize, Serialize}; use std::fmt::Display; -use crate::tree::Hasher; +use crate::tree::{hash, Digest, Hasher}; #[derive(Clone, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, PartialEq)] // An [`Operation`] represents a state transition in the system. @@ -74,7 +74,20 @@ impl Hashchain { } } - pub fn push(&mut self, operation: Operation) -> Result { + pub fn create_account(&mut self, value: String, source: AccountSource) -> Result { + let operation = Operation::CreateAccount { + id: self.id.clone(), + value, + source, + }; + self.push(operation) + } + + pub fn get(&self, idx: usize) -> &HashchainEntry { + &self.entries[idx] + } + + pub fn push(&mut self, operation: Operation) -> Result { if let Operation::CreateAccount { .. } = operation { bail!("Cannot CreateAccount on an already existing hashchain"); } @@ -85,7 +98,7 @@ impl Hashchain { let previous_hash = self .entries .last() - .map_or(Hash::new([0u8; 32]), |entry| entry.hash); + .map_or(Digest::new([0u8; 32]), |entry| entry.hash); let entry = HashchainEntry::new(operation, previous_hash); self.entries.push(entry.clone()); @@ -94,7 +107,7 @@ impl Hashchain { } // TODO: Obviously, this needs to be authenticated by an existing key. - pub fn add(&mut self, value: String) -> Result { + pub fn add(&mut self, value: String) -> Result { let operation = Operation::Add { id: self.id.clone(), value, @@ -102,7 +115,7 @@ impl Hashchain { self.push(operation) } - pub fn revoke(&mut self, value: String) -> Result { + pub fn revoke(&mut self, value: String) -> Result { let operation = Operation::Revoke { id: self.id.clone(), value, @@ -123,19 +136,19 @@ impl Hashchain { // A [`HashchainEntry`] represents a single entry in an account's hashchain. // The value in the leaf of the corresponding account's node in the IMT is the hash of the last node in the hashchain. pub struct HashchainEntry { - pub hash: Hash, - pub previous_hash: Hash, + pub hash: Digest, + pub previous_hash: Digest, pub operation: Operation, } impl HashchainEntry { - pub fn new(operation: Operation, previous_hash: Hash) -> Self { + pub fn new(operation: Operation, previous_hash: Digest) -> Self { let hash = { let mut data = Vec::new(); data.extend_from_slice(operation.to_string().as_bytes()); data.extend_from_slice(previous_hash.as_ref()); // TODO: replace with sha256 after JMT complete - sha256_mod(&data) + hash(&data) }; Self { hash, diff --git a/src/da/mod.rs b/src/da/mod.rs index 3f10be05..39034f18 100644 --- a/src/da/mod.rs +++ b/src/da/mod.rs @@ -2,6 +2,7 @@ use crate::{ circuits::{Bls12Proof, VerifyingKey}, common::Operation, error::GeneralError, + tree::Digest, utils::SignedContent, }; use anyhow::Result; @@ -18,10 +19,10 @@ pub mod memory; #[derive(BorshSerialize, BorshDeserialize, Clone, Debug)] pub struct FinalizedEpoch { pub height: u64, - pub prev_commitment: Hash, - pub current_commitment: Hash, - pub proof: Bls12Proof, - pub verifying_key: VerifyingKey, + pub prev_commitment: Digest, + pub current_commitment: Digest, + // pub proof: Bls12Proof, + // pub verifying_key: VerifyingKey, pub signature: Option, } diff --git a/src/node_types/lightclient.rs b/src/node_types/lightclient.rs index 75a7aeb2..895741aa 100644 --- a/src/node_types/lightclient.rs +++ b/src/node_types/lightclient.rs @@ -80,13 +80,13 @@ impl LightClient { let prev_commitment = &epoch_json.prev_commitment; let current_commitment = &epoch_json.current_commitment; - let proof = match epoch_json.proof.clone().try_into() { - Ok(proof) => proof, - Err(e) => { - error!("failed to deserialize proof, skipping a blob at height {}: {:?}", i, e); - continue; - } - }; + // let proof = match epoch_json.proof.clone().try_into() { + // Ok(proof) => proof, + // Err(e) => { + // error!("failed to deserialize proof, skipping a blob at height {}: {:?}", i, e); + // continue; + // } + // }; // TODO(@distractedm1nd): i don't know rust yet but this seems like non-idiomatic rust - // is there not a Trait that can satisfy these properties for us? diff --git a/src/node_types/sequencer.rs b/src/node_types/sequencer.rs index 8a5cfa6d..3b29b10f 100644 --- a/src/node_types/sequencer.rs +++ b/src/node_types/sequencer.rs @@ -1,14 +1,9 @@ +use crate::tree::{hash, Digest, KeyDirectoryTree, Proof, RedisKDTree}; use anyhow::{Context, Result}; use async_trait::async_trait; use ed25519::Signature; use ed25519_dalek::{Signer, SigningKey}; -use jmt::{storage::{TreeReader, TreeWriter}, KeyHash}; -// use indexed_merkle_tree::{ -// node::Node, -// tree::{IndexedMerkleTree, Proof}, -// Hash, -// }; -use crate::tree::{hash, Digest, KeyDirectoryTree, Proof}; +use jmt::KeyHash; use std::{self, str::FromStr, sync::Arc}; use tokio::{ sync::{ @@ -24,7 +19,6 @@ use crate::error::DataAvailabilityError; use crate::{ cfg::Config, - circuits::BatchMerkleProofCircuit, common::{AccountSource, Hashchain, HashchainEntry, Operation}, consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL}, da::{DataAvailabilityLayer, FinalizedEpoch}, @@ -34,10 +28,7 @@ use crate::{ webserver::{OperationInput, WebServer}, }; -pub struct Sequencer<'a, S> -where - S: 'a + TreeReader + TreeWriter, -{ +pub struct Sequencer<'a> { pub db: Arc, pub da: Arc, pub ws: WebServer, @@ -52,17 +43,14 @@ where // [`pending_operations`] is a buffer for operations that have not yet been // posted to the DA layer. pending_operations: Arc>>, - tree: Arc>>, + tree: Arc>>, epoch_buffer_tx: Arc>, epoch_buffer_rx: Arc>>, } #[async_trait] -impl<'a, S> NodeType for Sequencer<'a, S> -where - S: 'a + TreeReader + TreeWriter, -{ +impl<'a> NodeType for Sequencer<'a> { async fn start(self: Arc) -> Result<()> { self.da.start().await.context("Failed to start DA layer")?; @@ -95,13 +83,15 @@ impl Sequencer { let start_height = cfg.celestia_config.unwrap_or_default().start_height; + let tree = KeyDirectoryTree::new(&db.clone()); + Ok(Sequencer { db, da, ws: WebServer::new(ws), key, start_height, - tree: Arc::new(Mutex::new(IndexedMerkleTree::new_with_size(1024).unwrap())), + tree, pending_operations: Arc::new(Mutex::new(Vec::new())), epoch_buffer_tx: Arc::new(tx), epoch_buffer_rx: Arc::new(Mutex::new(rx)), @@ -309,22 +299,22 @@ impl Sequencer { .context("Failed to set new epoch")?; // add the commitment for the operations ran since the last epoch self.db - .add_commitment(&epoch, ¤t_commitment) + .set_commitment(&epoch, ¤t_commitment) .context("Failed to add commitment for new epoch")?; - let batch_circuit = - BatchMerkleProofCircuit::new(&prev_commitment, ¤t_commitment, proofs) - .context("Failed to create BatchMerkleProofCircuit")?; - let (proof, verifying_key) = batch_circuit - .create_and_verify_snark() - .context("Failed to create and verify snark")?; + // let batch_circuit = + // BatchMerkleProofCircuit::new(&prev_commitment, ¤t_commitment, proofs) + // .context("Failed to create BatchMerkleProofCircuit")?; + // let (proof, verifying_key) = batch_circuit + // .create_and_verify_snark() + // .context("Failed to create and verify snark")?; let epoch_json = FinalizedEpoch { height: epoch, prev_commitment, current_commitment, - proof: proof.into(), - verifying_key: verifying_key.into(), + // proof: proof.into(), + // verifying_key: verifying_key.into(), signature: None, }; @@ -377,15 +367,15 @@ impl Sequencer { let new_chain_entry = HashchainEntry::new(operation.clone(), previous_hash); current_chain.push(new_chain_entry.clone()); - let updated_node = Node::new_leaf( - node.is_left_sibling(), - hashed_id, - new_chain_entry.hash, - node.get_next(), - ); + // let updated_node = Node::new_leaf( + // node.is_left_sibling(), + // hashed_id, + // new_chain_entry.hash, + // node.get_next(), + // ); debug!("updating hashchain for user id {}", id.clone()); - self.tree.insert(KeyHash::with(hashed_id), ) + let proof = self.tree.update(KeyHash::with(hashed_id), current_chain)?; self.db .update_hashchain(operation, ¤t_chain) .context(format!( @@ -393,9 +383,7 @@ impl Sequencer { operation ))?; - tree.update_node(index, updated_node) - .map(Proof::Update) - .context("Failed to update node in tree") + proof } Operation::CreateAccount { id, value, source } => { // validation of account source @@ -420,10 +408,11 @@ impl Sequencer { } debug!("creating new hashchain for user id {}", id.clone()); - let new_chain = vec![HashchainEntry::new(operation.clone(), Node::HEAD)]; + let chain = Hashchain::new(id.clone()); + chain.create_account(value.into(), *source); self.db - .update_hashchain(operation, &new_chain) + .update_hashchain(operation, &chain) .context(format!( "Failed to create hashchain for operation {:?}", operation @@ -432,11 +421,7 @@ impl Sequencer { let mut tree = self.tree.lock().await; let hashed_id = hash(id.as_bytes()); - let mut node = - Node::new_leaf(true, hashed_id, new_chain.first().unwrap().hash, Node::TAIL); - tree.insert_node(&mut node) - .map(Proof::Insert) - .context("Failed to insert node into tree") + tree.insert(KeyHash::with(hashed_id), chain) } } } @@ -479,7 +464,7 @@ mod tests { } // Helper function to create a test Sequencer instance - async fn create_test_sequencer() -> Arc { + async fn create_test_sequencer() -> Arc> { let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1); let da_layer = Arc::new(da_layer); let db = Arc::new(setup_db()); @@ -578,7 +563,7 @@ mod tests { assert_ne!(prev_commitment, new_commitment); let hashchain = sequencer.db.get_hashchain(id.as_str()); - let value = hashchain.unwrap().first().unwrap().operation.value(); + let value = hashchain.unwrap().get(0).operation.value(); assert_eq!(value, "test"); teardown_db(&db); @@ -696,7 +681,7 @@ mod tests { let hashchain = sequencer.db.get_hashchain("user@example.com").unwrap(); assert_eq!(hashchain.len(), 2); - assert_eq!(hashchain[1].operation.value(), "new_value"); + assert_eq!(hashchain.get(1).operation.value(), "new_value"); } #[tokio::test] @@ -724,7 +709,10 @@ mod tests { let hashchain = sequencer.db.get_hashchain("user@example.com").unwrap(); assert_eq!(hashchain.len(), 2); - assert!(matches!(hashchain[1].operation, Operation::Revoke { .. })); + assert!(matches!( + hashchain.get(1).operation, + Operation::Revoke { .. } + )); } #[tokio::test] diff --git a/src/storage.rs b/src/storage.rs index 8f040330..5aaa270b 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -1,5 +1,9 @@ use anyhow::{anyhow, Result}; -use indexed_merkle_tree::{tree::Proof, Hash}; +use indexed_merkle_tree::Hash; +use jmt::{ + storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, + KeyHash, OwnedValue, Version, +}; use mockall::{predicate::*, *}; use redis::{Client, Commands, Connection}; use std::{ @@ -14,14 +18,10 @@ use crate::{ cfg::RedisConfig, common::{Hashchain, HashchainEntry, Operation}, error::{DatabaseError, GeneralError, PrismError}, - utils::parse_json_to_proof, }; // there are different key prefixes for the different tables in the database -// main:key => clear text key with hashchain -// input_order => input order of the hashchain keys // app_state:key => app state (just epoch counter for now) -// merkle_proofs:key => merkle proofs (in the form: epoch_{epochnumber}_{commitment}) // commitments:key => epoch commitments pub struct RedisConnection { connection: Mutex, @@ -29,27 +29,24 @@ pub struct RedisConnection { #[automock] pub trait Database: Send + Sync { - fn get_keys(&self) -> Result>; fn get_hashchain(&self, key: &str) -> Result; - fn get_commitment(&self, epoch: &u64) -> Result; - fn get_proof(&self, id: &str) -> Result; - fn get_proofs_in_epoch(&self, epoch: &u64) -> Result>; - fn get_epoch(&self) -> Result; - fn set_epoch(&self, epoch: &u64) -> Result<()>; fn update_hashchain( &self, incoming_operation: &Operation, value: &[HashchainEntry], ) -> Result<()>; - fn get_epochs(&self) -> Result>; - fn add_merkle_proof( - &self, - epoch: &u64, - epoch_operation: &u64, - commitment: &Hash, - proofs: &str, - ) -> Result<()>; - fn add_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()>; + + fn get_commitment(&self, epoch: &u64) -> Result; + fn set_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()>; + + // fn get_node_option(&self, node_key: &NodeKey) -> Result>; + // fn get_value_option(&self, max_epoch: u64, key_hash: KeyHash) -> Result>; + // fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()>; + + fn get_epoch(&self) -> Result; + fn set_epoch(&self, epoch: &u64) -> Result<()>; + + #[cfg(test)] fn flush_database(&self) -> Result<()>; } @@ -96,15 +93,85 @@ impl RedisConnection { } } -impl Database for RedisConnection { - fn get_keys(&self) -> Result> { +impl TreeReader for RedisConnection { + fn get_node_option(&self, node_key: &NodeKey) -> Result> { + let mut con = self.lock_connection()?; + let serialized_key = hex::encode(borsh::to_vec(node_key).unwrap()); + let node_data: Option> = con.get(format!("node:{}", serialized_key))?; + match node_data { + None => return Ok(None), + Some(data) => { + let node: Node = borsh::from_slice::(&data).unwrap(); + Ok(Some(node)) + } + } + } + + fn get_rightmost_leaf(&self) -> Result> { + let mut con = self.lock_connection()?; + let keys: Vec = con.keys("node:*")?; + let mut rightmost: Option<(NodeKey, LeafNode)> = None; + + for key in keys { + let node_data: Vec = con.get(&key)?; + let node: Node = borsh::from_slice::(&node_data)?; + if let Node::Leaf(leaf) = node { + // let node_key = NodeKey::from_str(key.strip_prefix("node:").unwrap())?; + let node_key_bytes = hex::decode(key.strip_prefix("node:").unwrap()).unwrap(); + let node_key = borsh::from_slice::(node_key_bytes.as_ref()).unwrap(); + if rightmost.is_none() || leaf.key_hash() > rightmost.as_ref().unwrap().1.key_hash() + { + rightmost.replace((node_key, leaf)); + } + } + } + + Ok(rightmost) + } + + fn get_value_option( + &self, + max_version: Version, + key_hash: KeyHash, + ) -> Result> { let mut con = self.lock_connection()?; - let keys: Vec = con - .keys("main:*") - .map_err(|_| DatabaseError::KeysError("main".to_string()))?; - Ok(keys.into_iter().map(|k| k.replace("main:", "")).collect()) + let versions: Vec<(Version, OwnedValue)> = con.zrangebyscore_withscores( + format!("value_history:{:?}", key_hash), + 0, + max_version as f64, + )?; + Ok(versions.last().map(|(_, value)| value.clone())) + } +} + +impl TreeWriter for RedisConnection { + fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()> { + let mut con = self.lock_connection()?; + let mut pipe = redis::pipe(); + + for (node_key, node) in node_batch.nodes() { + let node_data = borsh::to_vec(node)?; + pipe.set(format!("node:{:?}", node_key), node_data); + } + + for ((version, key_hash), value) in node_batch.values() { + if let Some(v) = value { + pipe.zadd(format!("value_history:{:?}", key_hash), v, *version as f64); + } else { + pipe.zadd( + format!("value_history:{:?}", key_hash), + Vec::::new(), + *version as f64, + ); + } + } + + pipe.execute(&mut con); + Ok(()) } +} +impl Database for RedisConnection { fn get_hashchain(&self, key: &str) -> Result { let mut con = self.lock_connection()?; let value: String = con @@ -125,40 +192,6 @@ impl Database for RedisConnection { Ok(value.trim_matches('"').to_string()) } - fn get_proof(&self, id: &str) -> Result { - let mut con = self.lock_connection()?; - con.get(format!("merkle_proofs:{}", id)).map_err(|_| { - anyhow!(DatabaseError::NotFoundError(format!( - "Proof with id: {}", - id - ))) - }) - } - - fn get_proofs_in_epoch(&self, epoch: &u64) -> Result> { - let mut con = self.lock_connection()?; - let mut epoch_proofs: Vec = con - .keys::<&String, Vec>(&format!("merkle_proofs:epoch_{}*", epoch)) - .map_err(|_| DatabaseError::NotFoundError(format!("epoch: {}", epoch)))?; - - epoch_proofs.sort_by(|a, b| { - let a_parts: Vec<&str> = a.split('_').collect(); - let b_parts: Vec<&str> = b.split('_').collect(); - let a_number: u64 = a_parts[2].parse().unwrap_or(0); - let b_number: u64 = b_parts[2].parse().unwrap_or(0); - a_number.cmp(&b_number) - }); - - Ok(epoch_proofs - .into_iter() - .filter_map(|proof| { - con.get::<&str, String>(&proof) - .ok() - .and_then(|proof_str| parse_json_to_proof(&proof_str).ok()) - }) - .collect()) - } - fn get_epoch(&self) -> Result { let mut con = self.lock_connection()?; con.get("app_state:epoch") @@ -192,48 +225,7 @@ impl Database for RedisConnection { }) } - fn get_epochs(&self) -> Result> { - let mut con = self.lock_connection()?; - con.keys::<&str, Vec>("commitments:*") - .map_err(|_| { - PrismError::Database(DatabaseError::NotFoundError("Commitments".to_string())) - })? - .into_iter() - .map(|epoch| { - epoch - .replace("commitments:epoch_", "") - .parse::() - .map_err(|_| { - anyhow!(GeneralError::ParsingError( - "failed to parse epoch".to_string(), - )) - }) - }) - .collect() - } - - fn add_merkle_proof( - &self, - epoch: &u64, - epoch_operation: &u64, - commitment: &Hash, - proofs: &str, - ) -> Result<()> { - let mut con = self.lock_connection()?; - let formatted_epoch = format!( - "merkle_proofs:epoch_{}_{}_{}", - epoch, epoch_operation, commitment - ); - con.set::<&String, &String, ()>(&formatted_epoch, &proofs.to_string()) - .map_err(|_| { - anyhow!(DatabaseError::WriteError(format!( - "merkle proof for epoch: {}", - formatted_epoch - ))) - }) - } - - fn add_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()> { + fn set_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()> { let mut con = self.lock_connection()?; con.set::<&String, &String, ()>( &format!("commitments:epoch_{}", epoch), @@ -247,6 +239,7 @@ impl Database for RedisConnection { }) } + #[cfg(test)] fn flush_database(&self) -> Result<()> { let mut conn = self.lock_connection()?; redis::cmd("FLUSHALL") @@ -259,7 +252,7 @@ impl Database for RedisConnection { mod tests { use super::*; use crate::common::Operation; - use indexed_merkle_tree::sha256_mod; + use crate::tree::hash; use serde::{Deserialize, Serialize}; use serial_test::serial; @@ -279,8 +272,8 @@ mod tests { fn create_mock_chain_entry() -> HashchainEntry { HashchainEntry { - hash: sha256_mod(b"test_hash"), - previous_hash: sha256_mod(b"test_previous_hash"), + hash: hash(b"test_hash"), + previous_hash: hash(b"test_previous_hash"), operation: Operation::Add { id: "test_id".to_string(), value: "test_value".to_string(), @@ -295,131 +288,6 @@ mod tests { } } - // TESTS FOR fn get_keys(&self) -> Vec - - // TODO: In this context it occurs to me now that we should probably rename the get_keys() function to get_hashchain_keys() or something, because it actually only returns the keys of the hashchain. - // Better yet, there's also the get_derived_keys() function, which returns the derived_dict keys. These are simply the hashed keys. So possibly: get_keys() and get_hashed_keys() ?! - // probably not thaaat important - // TODO: get_keys() returns the keys in reverse order - #[test] - #[serial] - fn test_get_keys() { - // set up redis connection and flush database - let redis_connections = setup(); - - let incoming_operation1 = create_add_operation_with_test_value("main:test_key1"); - let incoming_operation2 = create_add_operation_with_test_value("main:test_key2"); - let incoming_operation3 = create_add_operation_with_test_value("main:test_key3"); - - redis_connections - .update_hashchain(&incoming_operation1, &[create_mock_chain_entry()]) - .unwrap(); - redis_connections - .update_hashchain(&incoming_operation2, &[create_mock_chain_entry()]) - .unwrap(); - redis_connections - .update_hashchain(&incoming_operation3, &[create_mock_chain_entry()]) - .unwrap(); - - let mut keys = redis_connections.get_keys().unwrap(); - keys.sort(); - - let expected_keys: Vec = vec![ - "test_key1".to_string(), - "test_key2".to_string(), - "test_key3".to_string(), - ]; - let returned_keys: Vec = keys; - - assert_eq!(expected_keys, returned_keys); - - teardown(&redis_connections); - } - - #[test] - #[serial] - fn test_get_keys_from_empty_dictionary() { - let redis_connections = setup(); - - let keys = redis_connections.get_keys().unwrap(); - - let expected_keys: Vec = vec![]; - let returned_keys: Vec = keys; - - assert_eq!(expected_keys, returned_keys); - - teardown(&redis_connections); - } - - #[test] - #[serial] - #[should_panic(expected = "assertion `left == right` failed")] - fn test_get_too_much_returned_keys() { - let redis_connections = setup(); - - let incoming_operation1 = create_add_operation_with_test_value("test_key_1"); - let incoming_operation2 = create_add_operation_with_test_value("test_key_2"); - let incoming_operation3 = create_add_operation_with_test_value("test_key_3"); - - redis_connections - .update_hashchain(&incoming_operation1, &[create_mock_chain_entry()]) - .unwrap(); - redis_connections - .update_hashchain(&incoming_operation2, &[create_mock_chain_entry()]) - .unwrap(); - redis_connections - .update_hashchain(&incoming_operation3, &[create_mock_chain_entry()]) - .unwrap(); - - let mut keys = redis_connections.get_keys().unwrap(); - - let too_little_keys: Vec = vec!["test_key1".to_string(), "test_key2".to_string()]; - keys.reverse(); - let returned_keys: Vec = keys; - - assert_eq!(too_little_keys, returned_keys); - - teardown(&redis_connections); - } - - #[test] - #[serial] - #[should_panic(expected = "assertion `left == right` failed")] - fn test_get_too_little_returned_keys() { - let redis_connections = setup(); - - let incoming_operation1 = create_add_operation_with_test_value("test_key_1"); - let incoming_operation2 = create_add_operation_with_test_value("test_key_2"); - let incoming_operation3 = create_add_operation_with_test_value("test_key_3"); - - redis_connections - .update_hashchain(&incoming_operation1, &[create_mock_chain_entry()]) - .unwrap(); - redis_connections - .update_hashchain(&incoming_operation2, &[create_mock_chain_entry()]) - .unwrap(); - redis_connections - .update_hashchain(&incoming_operation3, &[create_mock_chain_entry()]) - .unwrap(); - - let mut keys = redis_connections.get_keys().unwrap(); - - let too_little_keys: Vec = vec![ - "test_key1".to_string(), - "test_key2".to_string(), - "test_key3".to_string(), - "test_key4".to_string(), - ]; - keys.reverse(); - let returned_keys: Vec = keys; - - assert_eq!(too_little_keys, returned_keys); - - teardown(&redis_connections); - } - - // TESTS FOR fn get_hashchain(&self, key: &String) -> Result, &str> - #[test] #[serial] fn test_get_hashchain() { @@ -435,9 +303,12 @@ mod tests { let hashchain = redis_connections .get_hashchain(&incoming_operation.id()) .unwrap(); - assert_eq!(hashchain[0].hash, chain_entry.hash); - assert_eq!(hashchain[0].previous_hash, chain_entry.previous_hash); - assert_eq!(hashchain[0].operation, chain_entry.operation); + + let first = hashchain.get(0); + + assert_eq!(first.hash, chain_entry.hash); + assert_eq!(first.previous_hash, chain_entry.previous_hash); + assert_eq!(first.operation, chain_entry.operation); teardown(&redis_connections); } @@ -501,8 +372,6 @@ mod tests { teardown(&redis_connection); } - // TESTS FOR fn get_derived_value(&self, key: &String) -> Result - #[test] #[serial] /* @@ -529,7 +398,7 @@ mod tests { let hashchain = redis_connections .get_hashchain(&incoming_operation.id()) .unwrap(); - assert_eq!(hashchain[0].hash, sha256_mod(b"test_hash")); + assert_eq!(hashchain.get(0).hash, hash(b"test_hash")); assert_eq!(hashchain.len(), 1); teardown(&redis_connections); diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 627c1570..05b08b31 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -2,17 +2,35 @@ use anyhow::{anyhow, bail, ensure, Context, Result}; use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize}; use jmt::{ proof::{SparseMerkleProof, UpdateMerkleProof}, - storage::{Node, NodeBatch, TreeReader, TreeUpdateBatch, TreeWriter}, + storage::{NodeBatch, TreeReader, TreeUpdateBatch, TreeWriter}, KeyHash, RootHash, Sha256Jmt, SimpleHasher, }; +use serde::{Deserialize, Serialize}; -use crate::common::Hashchain; +use crate::{common::Hashchain, storage::RedisConnection}; -const SPARSE_MERKLE_PLACEHOLDER_HASH: [u8; 32] = *b"SPARSE_MERKLE_PLACEHOLDER_HASH__"; +pub const SPARSE_MERKLE_PLACEHOLDER_HASH: Digest = + Digest::new(*b"SPARSE_MERKLE_PLACEHOLDER_HASH__"); pub type Hasher = sha2::Sha256; + +#[derive( + Debug, Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, PartialEq, Eq, Copy, +)] pub struct Digest([u8; 32]); +impl AsRef<[u8]> for Digest { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl std::fmt::Display for Digest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.to_hex()) + } +} + impl Digest { pub const fn new(bytes: [u8; 32]) -> Self { Digest(bytes) @@ -25,18 +43,9 @@ impl Digest { Ok(Digest(bytes)) } - #[cfg(feature = "std")] pub fn to_hex(&self) -> String { hex::encode(self.0) } - - #[cfg(not(feature = "std"))] - pub fn to_hex(&self) -> [u8; 64] { - let mut hex = [0u8; 64]; - hex::encode_to_slice(self.0, &mut hex) - .expect("The output is exactly twice the size of the input"); - hex - } } pub fn hash(data: &[u8]) -> Digest { @@ -120,6 +129,8 @@ pub trait SnarkableTree { fn get(&self, key: KeyHash) -> Result>; } +pub type RedisKDTree<'a> = KeyDirectoryTree<'a, RedisConnection>; + pub struct KeyDirectoryTree<'a, S> where S: 'a + TreeReader + TreeWriter, @@ -141,7 +152,7 @@ where }; let (_, batch) = tree .jmt - .put_value_set(vec![(KeyHash(SPARSE_MERKLE_PLACEHOLDER_HASH), None)], 0) + .put_value_set(vec![(KeyHash(SPARSE_MERKLE_PLACEHOLDER_HASH.0), None)], 0) .unwrap(); store.write_node_batch(&batch.node_batch).unwrap(); tree @@ -336,7 +347,7 @@ mod tests { #[test] fn test_get_non_existing_key() { let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let tree = KeyDirectoryTree::new(&store); let key = KeyHash::with::(b"non_existing_key"); let result = tree.get(key).unwrap(); diff --git a/src/webserver.rs b/src/webserver.rs index 1ba9bcdc..2f719d8d 100644 --- a/src/webserver.rs +++ b/src/webserver.rs @@ -1,6 +1,6 @@ use crate::{ cfg::WebServerConfig, - common::{HashchainEntry, Operation}, + common::{Hashchain, Operation}, error::GeneralError, node_types::sequencer::Sequencer, utils::{verify_signature, SignedContent}, @@ -92,7 +92,7 @@ pub struct UserKeyRequest { // TODO: Retrieve Merkle proof of current epoch #[derive(Serialize, Deserialize, ToSchema)] pub struct UserKeyResponse { - pub hashchain: Vec, + pub hashchain: Hashchain, // pub proof: MerkleProof } @@ -132,7 +132,7 @@ impl WebServer { Self { cfg } } - pub async fn start(&self, session: Arc) -> Result<()> { + pub async fn start(&self, session: Arc>) -> Result<()> { info!("starting webserver on {}:{}", self.cfg.host, self.cfg.port); let app = Router::new() .route("/update-entry", post(update_entry)) @@ -165,7 +165,7 @@ impl WebServer { ) )] async fn update_entry( - State(session): State>, + State(session): State>>, Json(signature_with_key): Json, ) -> impl IntoResponse { match session.validate_and_queue_update(&signature_with_key).await { @@ -196,7 +196,7 @@ async fn update_entry( ) )] async fn get_hashchain( - State(session): State>, + State(session): State>>, Json(request): Json, ) -> impl IntoResponse { match session.db.get_hashchain(&request.id) { @@ -219,7 +219,7 @@ async fn get_hashchain( (status = 500, description = "Internal server error") ) )] -async fn get_commitment(State(session): State>) -> impl IntoResponse { +async fn get_commitment(State(session): State>>) -> impl IntoResponse { match session.get_commitment().await { Ok(commitment) => (StatusCode::OK, Json(commitment)).into_response(), Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), From 8ee6bd4c546ab17da2c7f4076f89963e85df4c4e Mon Sep 17 00:00:00 2001 From: Ryan Date: Sun, 11 Aug 2024 22:48:01 +0200 Subject: [PATCH 07/33] progress on JMT migration: removing lifetime annotations to own TreeReader --- Cargo.lock | 2 + Cargo.toml | 1 + src/node_types/sequencer.rs | 33 +++++++++-------- src/storage.rs | 16 ++++---- src/tree/mod.rs | 74 +++++++++++++++++++------------------ 5 files changed, 66 insertions(+), 60 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9e3dbfbf..ef0ad0f2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3071,6 +3071,7 @@ name = "jmt" version = "0.10.0" dependencies = [ "anyhow", + "auto_impl", "borsh", "digest 0.10.7", "hashbrown 0.13.2", @@ -4374,6 +4375,7 @@ dependencies = [ "arecibo", "async-redis-session", "async-trait", + "auto_impl", "axum", "base64 0.22.1", "bellman", diff --git a/Cargo.toml b/Cargo.toml index 362875d0..75b9393a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,6 +76,7 @@ bellpepper = { version = "0.4.0", default-features = false } arecibo = { version = "0.1.1", default-features = false } sha2 = "0.10.8" proptest = "1.5.0" +auto_impl = "1.2.0" [dev-dependencies] serial_test = "3.1.1" diff --git a/src/node_types/sequencer.rs b/src/node_types/sequencer.rs index 3b29b10f..8ee89c97 100644 --- a/src/node_types/sequencer.rs +++ b/src/node_types/sequencer.rs @@ -1,4 +1,7 @@ -use crate::tree::{hash, Digest, KeyDirectoryTree, Proof, RedisKDTree}; +use crate::{ + storage::RedisConnection, + tree::{hash, Digest, KeyDirectory, KeyDirectoryTree, Proof}, +}; use anyhow::{Context, Result}; use async_trait::async_trait; use ed25519::Signature; @@ -28,7 +31,7 @@ use crate::{ webserver::{OperationInput, WebServer}, }; -pub struct Sequencer<'a> { +pub struct Sequencer { pub db: Arc, pub da: Arc, pub ws: WebServer, @@ -43,14 +46,14 @@ pub struct Sequencer<'a> { // [`pending_operations`] is a buffer for operations that have not yet been // posted to the DA layer. pending_operations: Arc>>, - tree: Arc>>, + tree: Arc>>>, epoch_buffer_tx: Arc>, epoch_buffer_rx: Arc>>, } #[async_trait] -impl<'a> NodeType for Sequencer<'a> { +impl NodeType for Sequencer { async fn start(self: Arc) -> Result<()> { self.da.start().await.context("Failed to start DA layer")?; @@ -72,21 +75,20 @@ impl<'a> NodeType for Sequencer<'a> { impl Sequencer { pub fn new( - db: Arc, + db: Arc>, da: Arc, cfg: Config, key: SigningKey, ) -> Result { let (tx, rx) = channel(CHANNEL_BUFFER_SIZE); - let ws = cfg.webserver.context("Missing webserver configuration")?; - let start_height = cfg.celestia_config.unwrap_or_default().start_height; - let tree = KeyDirectoryTree::new(&db.clone()); + // Create the KeyDirectory + let tree = Arc::new(Mutex::new(KeyDirectoryTree::new(db.clone()))); Ok(Sequencer { - db, + db: db.clone(), da, ws: WebServer::new(ws), key, @@ -100,13 +102,14 @@ impl Sequencer { // sync_loop is responsible for downloading operations from the DA layer async fn sync_loop(self: Arc) -> Result<(), tokio::task::JoinError> { + let self_clone = self.clone(); info!("starting operation sync loop"); let epoch_buffer = self.epoch_buffer_tx.clone(); spawn(async move { - let mut current_position = self.start_height; + let mut current_position = self_clone.start_height; loop { // target is updated when a new header is received - let target = match self.da.get_latest_height().await { + let target = match self_clone.da.get_latest_height().await { Ok(target) => target, Err(e) => { error!("failed to update sync target, retrying: {:?}", e); @@ -121,7 +124,7 @@ impl Sequencer { debug!("updated sync target to height {}", target); while current_position < target { trace!("processing height: {}", current_position); - match self.da.get_operations(current_position + 1).await { + match self_clone.da.get_operations(current_position + 1).await { Ok(operations) => { if !operations.is_empty() { debug!( @@ -130,7 +133,7 @@ impl Sequencer { ); } - let epoch = match self.finalize_epoch(operations).await { + let epoch = match self_clone.finalize_epoch(operations).await { Ok(e) => e, Err(e) => { error!("sequencer_loop: finalizing epoch: {}", e); @@ -452,7 +455,7 @@ mod tests { use serial_test::serial; // set up redis connection and flush database before each test - fn setup_db() -> RedisConnection { + fn setup_db<'a>() -> RedisConnection { let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap(); redis_connection.flush_database().unwrap(); redis_connection @@ -464,7 +467,7 @@ mod tests { } // Helper function to create a test Sequencer instance - async fn create_test_sequencer() -> Arc> { + async fn create_test_sequencer() -> Arc { let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1); let da_layer = Arc::new(da_layer); let db = Arc::new(setup_db()); diff --git a/src/storage.rs b/src/storage.rs index 5aaa270b..4b3743f1 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -1,15 +1,16 @@ use anyhow::{anyhow, Result}; +use auto_impl::auto_impl; use indexed_merkle_tree::Hash; use jmt::{ storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, KeyHash, OwnedValue, Version, }; -use mockall::{predicate::*, *}; +use mockall::predicate::*; use redis::{Client, Commands, Connection}; use std::{ self, process::Command, - sync::{Mutex, MutexGuard}, + sync::{Arc, Mutex, MutexGuard}, thread::sleep, time::Duration, }; @@ -27,8 +28,8 @@ pub struct RedisConnection { connection: Mutex, } -#[automock] -pub trait Database: Send + Sync { +#[auto_impl(&, Box, Arc)] +pub trait Database: Send + Sync + TreeReader + TreeWriter { fn get_hashchain(&self, key: &str) -> Result; fn update_hashchain( &self, @@ -39,10 +40,6 @@ pub trait Database: Send + Sync { fn get_commitment(&self, epoch: &u64) -> Result; fn set_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()>; - // fn get_node_option(&self, node_key: &NodeKey) -> Result>; - // fn get_value_option(&self, max_epoch: u64, key_hash: KeyHash) -> Result>; - // fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()>; - fn get_epoch(&self) -> Result; fn set_epoch(&self, epoch: &u64) -> Result<()>; @@ -252,6 +249,7 @@ impl Database for RedisConnection { mod tests { use super::*; use crate::common::Operation; + use crate::storage::Database; use crate::tree::hash; use serde::{Deserialize, Serialize}; use serial_test::serial; @@ -259,7 +257,7 @@ mod tests { // Helper functions // set up redis connection and flush database before each test - fn setup() -> RedisConnection { + fn setup<'a>() -> RedisConnection<'a> { let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap(); redis_connection.flush_database().unwrap(); redis_connection diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 05b08b31..1eb512f5 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -1,13 +1,15 @@ +use crate::storage::RedisConnection; use anyhow::{anyhow, bail, ensure, Context, Result}; use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize}; use jmt::{ proof::{SparseMerkleProof, UpdateMerkleProof}, storage::{NodeBatch, TreeReader, TreeUpdateBatch, TreeWriter}, - KeyHash, RootHash, Sha256Jmt, SimpleHasher, + JellyfishMerkleTree, KeyHash, RootHash, Sha256Jmt, SimpleHasher, }; use serde::{Deserialize, Serialize}; +use std::sync::Arc; -use crate::{common::Hashchain, storage::RedisConnection}; +use crate::{common::Hashchain, storage::Database}; pub const SPARSE_MERKLE_PLACEHOLDER_HASH: Digest = Digest::new(*b"SPARSE_MERKLE_PLACEHOLDER_HASH__"); @@ -129,23 +131,23 @@ pub trait SnarkableTree { fn get(&self, key: KeyHash) -> Result>; } -pub type RedisKDTree<'a> = KeyDirectoryTree<'a, RedisConnection>; - -pub struct KeyDirectoryTree<'a, S> +pub struct KeyDirectoryTree where - S: 'a + TreeReader + TreeWriter, + S: TreeReader + TreeWriter, { - jmt: Sha256Jmt<'a, S>, + jmt: JellyfishMerkleTree, Hasher>, pending_batch: Option, epoch: u64, + db: Arc, } -impl<'a, S> KeyDirectoryTree<'a, S> +impl KeyDirectoryTree where - S: 'a + TreeReader + TreeWriter, + S: TreeReader + TreeWriter, { - pub fn new(store: &'a S) -> Self { + pub fn new(store: Arc) -> Self { let tree = Self { + db: store.clone(), jmt: Sha256Jmt::new(store), pending_batch: None, epoch: 0, @@ -154,7 +156,7 @@ where .jmt .put_value_set(vec![(KeyHash(SPARSE_MERKLE_PLACEHOLDER_HASH.0), None)], 0) .unwrap(); - store.write_node_batch(&batch.node_batch).unwrap(); + tree.db.write_node_batch(&batch.node_batch).unwrap(); tree } @@ -170,9 +172,9 @@ where } } - pub fn write_batch(&mut self, writer: &'a S) -> Result<()> { + pub fn write_batch(&mut self) -> Result<()> { if let Some(batch) = self.pending_batch.take() { - writer.write_node_batch(&batch)?; + self.db.write_node_batch(&batch)?; } Ok(()) } @@ -192,7 +194,7 @@ where } } -impl<'a, S> SnarkableTree for KeyDirectoryTree<'a, S> +impl SnarkableTree for KeyDirectoryTree where S: TreeReader + TreeWriter, { @@ -279,8 +281,8 @@ mod tests { #[test] fn test_insert_and_get() { - let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); let hc1 = Hashchain::new("key_1".into()); let key = hc1.get_keyhash(); @@ -288,7 +290,7 @@ mod tests { let insert_proof = tree.insert(key, hc1.clone()); assert!(insert_proof.is_ok()); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); let get_result = tree.get(key).unwrap().unwrap(); @@ -297,14 +299,14 @@ mod tests { #[test] fn test_insert_duplicate_key() { - let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); let hc1 = Hashchain::new("key_1".into()); let key = hc1.get_keyhash(); tree.insert(key, hc1.clone()).unwrap(); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); let hc2 = Hashchain::new("key_1".into()); let result = tree.insert(key, hc2); @@ -313,20 +315,20 @@ mod tests { #[test] fn test_update_existing_key() { - let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); let mut hc1 = Hashchain::new("key_1".into()); let key = hc1.get_keyhash(); tree.insert(key, hc1.clone()).unwrap(); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); hc1.add("new_value".into()).unwrap(); let update_proof = tree.update(key, hc1.clone()).unwrap(); assert!(update_proof.verify().is_ok()); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); let get_result = tree.get(key).unwrap().unwrap(); assert_eq!(get_result, hc1); @@ -334,8 +336,8 @@ mod tests { #[test] fn test_update_non_existing_key() { - let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); let hc1 = Hashchain::new("key_1".into()); let key = hc1.get_keyhash(); @@ -347,7 +349,7 @@ mod tests { #[test] fn test_get_non_existing_key() { let store = MockTreeStore::default(); - let tree = KeyDirectoryTree::new(&store); + let mut tree = KeyDirectoryTree::new(store); let key = KeyHash::with::(b"non_existing_key"); let result = tree.get(key).unwrap(); @@ -361,7 +363,7 @@ mod tests { #[test] fn test_multiple_inserts_and_updates() { let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let mut tree = KeyDirectoryTree::new(store); let mut hc1 = Hashchain::new("key_1".into()); let mut hc2 = Hashchain::new("key_2".into()); @@ -370,14 +372,14 @@ mod tests { tree.insert(key1, hc1.clone()).unwrap(); tree.insert(key2, hc2.clone()).unwrap(); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); hc1.add("value1".into()).unwrap(); hc2.add("value2".into()).unwrap(); tree.update(key1, hc1.clone()).unwrap(); tree.update(key2, hc2.clone()).unwrap(); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); @@ -385,15 +387,15 @@ mod tests { #[test] fn test_root_hash_changes() { - let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); let hc1 = Hashchain::new("key_1".into()); let key1 = hc1.get_keyhash(); let root_before = tree.get_current_root().unwrap(); tree.insert(key1, hc1).unwrap(); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); let root_after = tree.get_current_root().unwrap(); assert_ne!(root_before, root_after); @@ -401,8 +403,8 @@ mod tests { #[test] fn test_batch_writing() { - let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(&store); + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); let hc1 = Hashchain::new("key_1".into()); let hc2 = Hashchain::new("key_2".into()); @@ -416,7 +418,7 @@ mod tests { assert!(tree.get(key1).unwrap().is_err()); assert!(tree.get(key2).unwrap().is_err()); - tree.write_batch(&store).unwrap(); + tree.write_batch().unwrap(); // After writing the batch assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); From 7f9b30c5f7e6e8eecddb21235fc52cc380c298eb Mon Sep 17 00:00:00 2001 From: Ryan Date: Sun, 11 Aug 2024 23:06:39 +0200 Subject: [PATCH 08/33] build errors --- src/common.rs | 50 +++++++++++++++++++++++++++++++++++ src/node_types/lightclient.rs | 22 ++++++++------- src/node_types/sequencer.rs | 37 ++++++++++++++------------ src/storage.rs | 7 ++--- 4 files changed, 87 insertions(+), 29 deletions(-) diff --git a/src/common.rs b/src/common.rs index 5ab88075..0848929e 100644 --- a/src/common.rs +++ b/src/common.rs @@ -4,6 +4,7 @@ use indexed_merkle_tree::Hash; use jmt::KeyHash; use serde::{Deserialize, Serialize}; use std::fmt::Display; +use std::ops::{Deref, DerefMut}; use crate::tree::{hash, Digest, Hasher}; @@ -66,6 +67,47 @@ pub struct Hashchain { entries: Vec, } +impl IntoIterator for Hashchain { + type Item = HashchainEntry; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.entries.into_iter() + } +} + +impl<'a> IntoIterator for &'a Hashchain { + type Item = &'a HashchainEntry; + type IntoIter = std::slice::Iter<'a, HashchainEntry>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.iter() + } +} + +impl<'a> IntoIterator for &'a mut Hashchain { + type Item = &'a mut HashchainEntry; + type IntoIter = std::slice::IterMut<'a, HashchainEntry>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.iter_mut() + } +} + +impl Deref for Hashchain { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.entries + } +} + +impl DerefMut for Hashchain { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.entries + } +} + impl Hashchain { pub fn new(id: String) -> Self { Self { @@ -74,6 +116,14 @@ impl Hashchain { } } + pub fn iter(&self) -> std::slice::Iter<'_, HashchainEntry> { + self.entries.iter() + } + + pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, HashchainEntry> { + self.entries.iter_mut() + } + pub fn create_account(&mut self, value: String, source: AccountSource) -> Result { let operation = Operation::CreateAccount { id: self.id.clone(), diff --git a/src/node_types/lightclient.rs b/src/node_types/lightclient.rs index 895741aa..45ef686a 100644 --- a/src/node_types/lightclient.rs +++ b/src/node_types/lightclient.rs @@ -90,13 +90,13 @@ impl LightClient { // TODO(@distractedm1nd): i don't know rust yet but this seems like non-idiomatic rust - // is there not a Trait that can satisfy these properties for us? - let verifying_key = match epoch_json.verifying_key.clone().try_into() { - Ok(vk) => vk, - Err(e) => { - error!("failed to deserialize verifying key, skipping a blob at height {}: {:?}", i, e); - continue; - } - }; + // let verifying_key = match epoch_json.verifying_key.clone().try_into() { + // Ok(vk) => vk, + // Err(e) => { + // error!("failed to deserialize verifying key, skipping a blob at height {}: {:?}", i, e); + // continue; + // } + // }; // if the user does not add a verifying key, we will not verify the signature, // but only log a warning on startup @@ -105,7 +105,10 @@ impl LightClient { &epoch_json.clone(), self.verifying_key.clone(), ) { - Ok(_) => trace!("valid signature for epoch {}", epoch_json.height), + Ok(_) => trace!( + "valid signature for epoch {}", + epoch_json.height + ), Err(e) => { panic!("invalid signature in epoch {}: {:?}", i, e) } @@ -136,6 +139,7 @@ impl LightClient { ticker.tick().await; // only for testing purposes current_position = target; // Update the current position to the latest target } - }).await + }) + .await } } diff --git a/src/node_types/sequencer.rs b/src/node_types/sequencer.rs index 8ee89c97..5047ed70 100644 --- a/src/node_types/sequencer.rs +++ b/src/node_types/sequencer.rs @@ -1,6 +1,6 @@ use crate::{ storage::RedisConnection, - tree::{hash, Digest, KeyDirectory, KeyDirectoryTree, Proof}, + tree::{hash, Digest, Hasher, KeyDirectoryTree, Proof, SnarkableTree}, }; use anyhow::{Context, Result}; use async_trait::async_trait; @@ -368,7 +368,7 @@ impl Sequencer { let previous_hash = current_chain.last().context("Hashchain is empty")?.hash; let new_chain_entry = HashchainEntry::new(operation.clone(), previous_hash); - current_chain.push(new_chain_entry.clone()); + current_chain.push(new_chain_entry.operation.clone()); // let updated_node = Node::new_leaf( // node.is_left_sibling(), @@ -378,7 +378,8 @@ impl Sequencer { // ); debug!("updating hashchain for user id {}", id.clone()); - let proof = self.tree.update(KeyHash::with(hashed_id), current_chain)?; + let proof = + tree.update(KeyHash::with::(hashed_id), current_chain.clone())?; self.db .update_hashchain(operation, ¤t_chain) .context(format!( @@ -386,7 +387,7 @@ impl Sequencer { operation ))?; - proof + Ok(Proof::Update(proof)) } Operation::CreateAccount { id, value, source } => { // validation of account source @@ -401,7 +402,7 @@ impl Sequencer { } }?; - let hashchain: Result> = self.db.get_hashchain(id); + let hashchain: Result = self.db.get_hashchain(id); if hashchain.is_ok() { return Err(DatabaseError::NotFoundError(format!( "empty slot for ID {}", @@ -411,8 +412,8 @@ impl Sequencer { } debug!("creating new hashchain for user id {}", id.clone()); - let chain = Hashchain::new(id.clone()); - chain.create_account(value.into(), *source); + let mut chain = Hashchain::new(id.clone()); + chain.create_account(value.into(), source.clone()); self.db .update_hashchain(operation, &chain) @@ -424,7 +425,9 @@ impl Sequencer { let mut tree = self.tree.lock().await; let hashed_id = hash(id.as_bytes()); - tree.insert(KeyHash::with(hashed_id), chain) + Ok(Proof::Insert( + tree.insert(KeyHash::with::(hashed_id), chain)?, + )) } } } @@ -455,14 +458,14 @@ mod tests { use serial_test::serial; // set up redis connection and flush database before each test - fn setup_db<'a>() -> RedisConnection { + fn setup_db() -> RedisConnection { let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap(); redis_connection.flush_database().unwrap(); redis_connection } // flush database after each test - fn teardown_db(redis_connections: &RedisConnection) { + fn teardown_db(redis_connections: Arc>) { redis_connections.flush_database().unwrap(); } @@ -470,7 +473,7 @@ mod tests { async fn create_test_sequencer() -> Arc { let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1); let da_layer = Arc::new(da_layer); - let db = Arc::new(setup_db()); + let db: Arc> = Arc::new(Box::new(setup_db())); let signing_key = create_signing_key(); Arc::new( Sequencer::new(db.clone(), da_layer, Config::default(), signing_key.clone()).unwrap(), @@ -513,7 +516,7 @@ mod tests { async fn test_validate_and_queue_update() { let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1); let da_layer = Arc::new(da_layer); - let db = Arc::new(setup_db()); + let db: Arc> = Arc::new(Box::new(setup_db())); let sequencer = Arc::new( Sequencer::new( db.clone(), @@ -531,7 +534,7 @@ mod tests { .validate_and_queue_update(&update_entry) .await .unwrap(); - teardown_db(&db); + teardown_db(db); } #[tokio::test] @@ -539,7 +542,7 @@ mod tests { async fn test_queued_update_gets_finalized() { let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1); let da_layer = Arc::new(da_layer); - let db = Arc::new(setup_db()); + let db: Arc> = Arc::new(Box::new(setup_db())); let signing_key = create_signing_key(); let sequencer = Arc::new( Sequencer::new(db.clone(), da_layer, Config::default(), signing_key.clone()).unwrap(), @@ -569,7 +572,7 @@ mod tests { let value = hashchain.unwrap().get(0).operation.value(); assert_eq!(value, "test"); - teardown_db(&db); + teardown_db(db); } #[tokio::test] @@ -577,7 +580,7 @@ mod tests { async fn test_validate_invalid_update_fails() { let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1); let da_layer = Arc::new(da_layer); - let db = Arc::new(setup_db()); + let db: Arc> = Arc::new(Box::new(setup_db())); let sequencer = Arc::new( Sequencer::new( db.clone(), @@ -596,7 +599,7 @@ mod tests { let res = sequencer.validate_and_queue_update(&update_entry).await; assert!(res.is_err()); - teardown_db(&db); + teardown_db(db); } #[tokio::test] diff --git a/src/storage.rs b/src/storage.rs index 4b3743f1..8d016073 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -19,6 +19,7 @@ use crate::{ cfg::RedisConfig, common::{Hashchain, HashchainEntry, Operation}, error::{DatabaseError, GeneralError, PrismError}, + tree::Digest, }; // there are different key prefixes for the different tables in the database @@ -38,7 +39,7 @@ pub trait Database: Send + Sync + TreeReader + TreeWriter { ) -> Result<()>; fn get_commitment(&self, epoch: &u64) -> Result; - fn set_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()>; + fn set_commitment(&self, epoch: &u64, commitment: &Digest) -> Result<()>; fn get_epoch(&self) -> Result; fn set_epoch(&self, epoch: &u64) -> Result<()>; @@ -222,7 +223,7 @@ impl Database for RedisConnection { }) } - fn set_commitment(&self, epoch: &u64, commitment: &Hash) -> Result<()> { + fn set_commitment(&self, epoch: &u64, commitment: &Digest) -> Result<()> { let mut con = self.lock_connection()?; con.set::<&String, &String, ()>( &format!("commitments:epoch_{}", epoch), @@ -257,7 +258,7 @@ mod tests { // Helper functions // set up redis connection and flush database before each test - fn setup<'a>() -> RedisConnection<'a> { + fn setup() -> RedisConnection { let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap(); redis_connection.flush_database().unwrap(); redis_connection From 519638e1d0cb098c8f7c0d16ffd97832b3a77772 Mon Sep 17 00:00:00 2001 From: Ryan Date: Sat, 17 Aug 2024 10:01:16 +0200 Subject: [PATCH 09/33] supernova --- Cargo.lock | 265 +++++++++++++----------------- Cargo.toml | 3 +- src/nova/batch.rs | 389 +++++++-------------------------------------- src/nova/insert.rs | 92 +++++++++++ src/nova/mod.rs | 2 + src/nova/update.rs | 87 ++++++++++ src/nova/utils.rs | 151 ++++++++++++++++++ src/tree/mod.rs | 36 +++-- 8 files changed, 526 insertions(+), 499 deletions(-) create mode 100644 src/nova/insert.rs create mode 100644 src/nova/update.rs diff --git a/Cargo.lock b/Cargo.lock index ef0ad0f2..75eae12a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -255,7 +255,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ "crypto-common", - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -408,46 +408,46 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arecibo" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4ffbbd8b381d6c0a87464d682e1dcfea2bc0519994013267ed283b0cca57af" +version = "0.2.0" dependencies = [ "abomonation", "abomonation_derive_ng", - "bellpepper", + "bellpepper 0.4.1 (git+https://github.com/lurk-lab/bellpepper?branch=dev)", "bellpepper-core", "bincode", "bitvec", "byteorder", "cfg-if 1.0.0", + "derive_more", "digest 0.10.7", "ff", - "generic-array", + "generic-array 1.1.0", "getrandom 0.2.15", "group", + "grumpkin-msm", "halo2curves", - "itertools 0.12.1", + "itertools 0.13.0", "neptune", "num-bigint 0.4.6", "num-integer", "num-traits", "once_cell", "pairing", - "pasta-msm", "pasta_curves", + "proptest", "rand 0.8.5", "rand_chacha 0.3.1", "rand_core 0.6.4", "rayon", + "rayon-scan", "ref-cast", "serde", "sha3", + "static_assertions", "subtle", - "tap", "thiserror", "tracing", - "tracing-subscriber", - "tracing-texray", + "vergen", ] [[package]] @@ -951,6 +951,17 @@ dependencies = [ "ff", ] +[[package]] +name = "bellpepper" +version = "0.4.1" +source = "git+https://github.com/lurk-lab/bellpepper?branch=dev#d0225bf6cb2bb9abaff28ea96a3497f9829815f8" +dependencies = [ + "bellpepper-core", + "byteorder", + "ff", + "itertools 0.12.1", +] + [[package]] name = "bellpepper-core" version = "0.4.0" @@ -1080,7 +1091,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -1089,7 +1100,7 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -1828,7 +1839,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array", + "generic-array 0.14.7", "rand_core 0.6.4", "typenum", ] @@ -1851,7 +1862,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array", + "generic-array 0.14.7", "subtle", ] @@ -1861,7 +1872,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" dependencies = [ - "generic-array", + "generic-array 0.14.7", "subtle", ] @@ -2021,7 +2032,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -2530,6 +2541,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "generic-array" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96512db27971c2c3eece70a1e106fbe6c87760234e31e8f7e5634912fe52794a" +dependencies = [ + "typenum", +] + [[package]] name = "getrandom" version = "0.1.16" @@ -2601,6 +2621,24 @@ dependencies = [ "subtle", ] +[[package]] +name = "grumpkin-msm" +version = "0.1.0" +source = "git+https://github.com/lurk-lab/grumpkin-msm?branch=dev#414da3bca6135a15fa58466b8831f5161ab1c51e" +dependencies = [ + "blst", + "cc", + "getrandom 0.2.15", + "halo2curves", + "pasta_curves", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rayon", + "semolina", + "sppark", + "which", +] + [[package]] name = "h2" version = "0.3.26" @@ -2632,13 +2670,14 @@ dependencies = [ [[package]] name = "halo2curves" -version = "0.4.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d0263c2933ee18bf416552719c5621f677f87acca8d50afe4ee74c81bb8ecca" +checksum = "db81d01d0bbfec9f624d7590fc6929ee2537a64ec1e080d8f8c9e2d2da291405" dependencies = [ "blake2b_simd", "ff", "group", + "hex 0.4.3", "lazy_static", "num-bigint 0.4.6", "num-traits", @@ -2647,6 +2686,7 @@ dependencies = [ "paste", "rand 0.8.5", "rand_core 0.6.4", + "rayon", "serde", "serde_arrays", "static_assertions", @@ -2990,7 +3030,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -3054,6 +3094,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "0.4.8" @@ -3497,15 +3546,6 @@ dependencies = [ "value-bag", ] -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - [[package]] name = "matchit" version = "0.7.3" @@ -3691,19 +3731,17 @@ dependencies = [ [[package]] name = "neptune" version = "13.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06626c9ac04c894e9a23d061ba1309f28506cdc5fe64156d28a15fb57fc8e438" +source = "git+https://github.com/lurk-lab/neptune?branch=dev#b6fb1f9372be2a50c5686c1dfa27bb0d8c4253e7" dependencies = [ "abomonation", "abomonation_derive_ng", - "bellpepper", + "bellpepper 0.4.1 (git+https://github.com/lurk-lab/bellpepper?branch=dev)", "bellpepper-core", "blake2s_simd", "blstrs", "byteorder", "ff", - "generic-array", - "log", + "generic-array 0.14.7", "pasta_curves", "serde", "trait-set", @@ -3752,16 +3790,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - [[package]] name = "num" version = "0.4.3" @@ -3876,6 +3904,15 @@ dependencies = [ "libc", ] +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + [[package]] name = "object" version = "0.36.2" @@ -3969,12 +4006,6 @@ version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "pairing" version = "0.23.0" @@ -4039,19 +4070,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "pasta-msm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e85d75eba3e7e9ee3bd11342b669185e194dadda3557934bc1000d9b87159d3" -dependencies = [ - "cc", - "pasta_curves", - "semolina", - "sppark", - "which", -] - [[package]] name = "pasta_curves" version = "0.5.1" @@ -4379,7 +4397,7 @@ dependencies = [ "axum", "base64 0.22.1", "bellman", - "bellpepper", + "bellpepper 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "bellpepper-core", "bls12_381", "borsh", @@ -4400,6 +4418,7 @@ dependencies = [ "futures", "hex 0.4.3", "indexed-merkle-tree", + "itertools 0.13.0", "jmt", "jsonrpsee 0.22.5", "keystore-rs", @@ -4495,7 +4514,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.8.4", + "regex-syntax", "rusty-fork", "tempfile", "unarray", @@ -4737,6 +4756,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "rayon-scan" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f87cc11a0140b4b0da0ffc889885760c61b13672d80a908920b2c0df078fa14" +dependencies = [ + "rayon", +] + [[package]] name = "redis" version = "0.20.2" @@ -4821,17 +4849,8 @@ checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", + "regex-automata", + "regex-syntax", ] [[package]] @@ -4842,7 +4861,7 @@ checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.4", + "regex-syntax", ] [[package]] @@ -4851,12 +4870,6 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - [[package]] name = "regex-syntax" version = "0.8.4" @@ -5514,15 +5527,6 @@ dependencies = [ "keccak", ] -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - [[package]] name = "shellexpand" version = "2.1.2" @@ -5774,16 +5778,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "term_size" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "termcolor" version = "1.4.1" @@ -5825,16 +5819,6 @@ dependencies = [ "syn 2.0.72", ] -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if 1.0.0", - "once_cell", -] - [[package]] name = "threadpool" version = "1.8.1" @@ -5852,7 +5836,9 @@ checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa 1.0.11", + "libc", "num-conv", + "num_threads", "powerfmt", "serde", "time-core", @@ -6140,49 +6126,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", -] - -[[package]] -name = "tracing-texray" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07b7943a21ef76920e7250b59946b0068221c323bf1077baab36164477d63efc" -dependencies = [ - "lazy_static", - "parking_lot", - "term_size", - "tracing", - "tracing-subscriber", ] [[package]] @@ -6384,6 +6327,18 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "vergen" +version = "8.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2990d9ea5967266ea0ccf413a4aa5c42a93dbcfda9cb49a97de6931726b12566" +dependencies = [ + "anyhow", + "cfg-if 1.0.0", + "rustversion", + "time", +] + [[package]] name = "version_check" version = "0.9.5" diff --git a/Cargo.toml b/Cargo.toml index 75b9393a..2af051a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -73,7 +73,8 @@ jmt = { path = "../jmt", features = [ ] } #{ version = "0.10.0", features = ["mocks"] } bellpepper-core = { version = "0.4.0", default-features = false } bellpepper = { version = "0.4.0", default-features = false } -arecibo = { version = "0.1.1", default-features = false } +arecibo = { path = "../arecibo" } +itertools = "0.13.0" # zip_eq sha2 = "0.10.8" proptest = "1.5.0" auto_impl = "1.2.0" diff --git a/src/nova/batch.rs b/src/nova/batch.rs index dce21ae8..de0f2acc 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -1,360 +1,87 @@ -use anyhow::{anyhow, Result}; -use arecibo::traits::circuit::StepCircuit; -use bellpepper_core::{ - num::{AllocatedNum, Num}, - ConstraintSystem, SynthesisError, -}; -use core::marker::PhantomData; +use super::{insert::InsertCircuit, update::UpdateCircuit}; +use crate::tree::{InsertProof, UpdateProof}; +use arecibo::supernova::StepCircuit; +use arecibo::supernova::TrivialSecondaryCircuit; +use arecibo::traits::{CurveCycleEquipped, Dual, Engine}; use ff::PrimeField; -use indexed_merkle_tree::{ - node::{LeafNode, Node}, - sha256_mod, - tree::{InsertProof, MerkleProof, NonMembershipProof, Proof, UpdateProof}, -}; -#[derive(Clone, Debug)] -pub enum UnifiedProofStep { - /// Update proof step ensures that an existing LeafNode is updated with a new value. - /// Cares about inputs z[0]. - // TODO: adr-003: Adding authentication circuit with poseidon hash, which is not needed in Verdict but needed here. - // This is because Verdict assumes the downstream application verifies the hashchain themselves. - // We need to be able to prove the validity of the hashchain though, since anybody can post an Update operation. - Update, - /// InsertStart proof step ensures that a LeafNode to be inserted does not yet exist in the tree. - /// Cares about inputs z[0]. - InsertStart, - /// InsertUpdate proof step ensures that: - /// 1. There exists a LeafNode where existing_node.label < new_node.label < existing_node.next - /// 2. The existing_node's next pointer is updated to new_node.label. - /// Cares about inputs z[0] and z[2]. - InsertUpdate, - /// InsertEnd proof step ensures that the new_node from the last step is added to the tree. - /// Cares about inputs z[0] and z[1]. - InsertEnd, +// Assume these functions exist +fn create_random_insert() -> InsertProof { + unimplemented!() +} +fn create_random_update() -> UpdateProof { + unimplemented!() } #[derive(Clone)] -pub struct MerkleProofStepCircuit { - pub step_type: UnifiedProofStep, - old_proof: Option, - new_proof: Option, - - // Additional fields for non-membership proof - is_non_membership: bool, - missing_node: Option, - _p: PhantomData, +struct EpochCircuitSequence +where + E1: CurveCycleEquipped, +{ + circuits: Vec>, + rom: Vec, } -impl MerkleProofStepCircuit { - pub fn new( - step: UnifiedProofStep, - old_proof: Option, - new_proof: Option, - is_non_membership: bool, - missing_node: Option, - ) -> Self { - MerkleProofStepCircuit { - step_type: step, - old_proof, - new_proof, - is_non_membership, - missing_node, - _p: PhantomData, - } - } -} +impl arecibo::supernova::NonUniformCircuit for EpochCircuitSequence +where + E1: CurveCycleEquipped, +{ + type C1 = EpochCircuit; + type C2 = TrivialSecondaryCircuit< as Engine>::Scalar>; -impl MerkleProofStepCircuit { - pub fn from_proof(proof: Proof) -> Vec { - match proof { - Proof::Insert(insert_proof) => { - vec![ - Self::new( - UnifiedProofStep::InsertStart, - Some(insert_proof.non_membership_proof.merkle_proof.clone()), - None, - true, - Some(insert_proof.non_membership_proof.missing_node.clone()), - ), - Self::new( - UnifiedProofStep::InsertUpdate, - Some(insert_proof.first_proof.old_proof), - Some(insert_proof.first_proof.new_proof), - false, - Some(insert_proof.non_membership_proof.missing_node), - ), - Self::new( - UnifiedProofStep::InsertEnd, - Some(insert_proof.second_proof.old_proof), - Some(insert_proof.second_proof.new_proof), - false, - None, - ), - ] - } - Proof::Update(update_proof) => { - vec![Self::new( - UnifiedProofStep::Update, - Some(update_proof.old_proof), - Some(update_proof.new_proof), - false, - None, - )] - } - } + fn num_circuits(&self) -> usize { + 2 // Insert and Update } -} -// TODO: these are just here temporarily as I write the circuits, they need to be moved to where the circuit gets instantiated later ////////////////////// - -pub struct Hash { - hash: indexed_merkle_tree::Hash, - _p: PhantomData, -} - -impl Hash { - pub fn new(hash: indexed_merkle_tree::Hash) -> Self { - Self { - hash, - _p: PhantomData, - } + fn primary_circuit(&self, circuit_index: usize) -> Self::C1 { + self.circuits[circuit_index].clone() } - // uses [`PrimeField::from_u128`] for inspiration. If the field element's capacity is not enough to hold the hash, - pub fn to_scalar(&self) -> Result { - let bytes = self.hash.as_ref(); - - // Convert the 32 bytes to two u128 values - let lower = u128::from_le_bytes(bytes[0..16].try_into()?); - let upper = u128::from_le_bytes(bytes[16..32].try_into()?); - - let mut tmp = Scalar::from_u128(upper); - for _ in 0..128 { - tmp = tmp.double(); - } - Ok(tmp + Scalar::from_u128(lower)) + fn secondary_circuit(&self) -> Self::C2 { + TrivialSecondaryCircuit::default() } -} -pub fn unpack_and_process(proof: &MerkleProof) -> Result<(Scalar, &Vec)> { - if !proof.path.is_empty() { - let root: Scalar = Hash::new(proof.root_hash).to_scalar()?; - Ok((root, &proof.path)) - } else { - // TODO: This if else makes no sense, can't we just give an empty path and let the circuit handle it? - Err(anyhow!("Proof path is empty.")) + fn initial_circuit_index(&self) -> usize { + self.rom[0] } } -pub fn recalculate_hash_as_scalar(path: &[Node]) -> Result { - let mut current_hash = path[0].get_hash(); - for node in path.iter().skip(1) { - let combined = if node.is_left_sibling() { - [node.get_hash().as_ref(), current_hash.as_ref()].concat() - } else { - [current_hash.as_ref(), node.get_hash().as_ref()].concat() - }; - // TODO: sha256_mod is not generic for scalar, its using the order of bls12_381 - current_hash = sha256_mod(&combined); - } - Hash::new(current_hash).to_scalar() +#[derive(Clone)] +enum EpochCircuit { + Insert(InsertCircuit), + Update(UpdateCircuit), } -///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -impl StepCircuit for MerkleProofStepCircuit { +impl StepCircuit for EpochCircuit { fn arity(&self) -> usize { - 3 + match self { + Self::Insert(x) => x.arity(), + Self::Update(x) => x.arity(), + } } - fn synthesize>( + fn synthesize>( &self, cs: &mut CS, - z: &[AllocatedNum], - ) -> Result>, SynthesisError> { - println!("Step: {:?}", self.step_type); - println!( - "Input z: {:?}", - z.iter().map(|num| num.get_value()).collect::>() - ); - - let previous_root_input = &z[0]; - let existing_node_label_input = &z[1]; - let missing_node_label_input = &z[2]; - - let old_proof = self - .old_proof - .as_ref() - .ok_or(SynthesisError::Unsatisfiable)?; - - let mut new_proof: Option<&MerkleProof> = None; - if !self.is_non_membership { - new_proof = Some( - self.new_proof - .as_ref() - .expect("New proof is missing for non-membership proof."), - ); + pc: Option<&bellpepper_core::num::AllocatedNum>, + z: &[bellpepper_core::num::AllocatedNum], + ) -> Result< + ( + Option>, + Vec>, + ), + bellpepper_core::SynthesisError, + > { + match self { + Self::Insert(x) => x.synthesize(cs, pc, z), + Self::Update(x) => x.synthesize(cs, pc, z), } - - let previous_root_alloc = AllocatedNum::alloc(cs.namespace(|| "old root"), || { - Ok(Hash::new(old_proof.root_hash).to_scalar().unwrap()) - }) - .unwrap(); - - cs.enforce( - || "z_0 == old_root", - |lc| lc + previous_root_input.get_variable(), - |lc| lc + CS::one(), - |lc| lc + previous_root_alloc.get_variable(), - ); - - let mut z_out: Vec> = Vec::new(); - - match self.step_type { - UnifiedProofStep::Update => { - let new_proof = new_proof.ok_or(SynthesisError::Unsatisfiable)?; - let vars = self.process_update(cs, old_proof, new_proof)?; - let updated_root = vars[1].clone(); - z_out.extend_from_slice(&[ - updated_root, - existing_node_label_input.clone(), - missing_node_label_input.clone(), - ]); - } - UnifiedProofStep::InsertStart => { - let (non_membership_root, non_membership_path) = - unpack_and_process::(old_proof) - .map_err(|_| SynthesisError::Unsatisfiable)?; - - let new_leaf = self - .missing_node - .as_ref() - .ok_or(SynthesisError::Unsatisfiable)?; - - let existing_leaf = non_membership_path - .first() - .ok_or(SynthesisError::Unsatisfiable)?; - let existing_leaf_label: Scalar = Hash::new(existing_leaf.get_label()) - .to_scalar() - .map_err(|_| SynthesisError::Unsatisfiable)?; - let new_leaf_label: Scalar = Hash::new(new_leaf.label) - .to_scalar() - .map_err(|_| SynthesisError::Unsatisfiable)?; - - let allocated_pre_insertion_root = - AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { - Ok(non_membership_root) - })?; - - let recalculated_root = recalculate_hash_as_scalar::(non_membership_path) - .map_err(|_| SynthesisError::Unsatisfiable)?; - - let allocated_recalculated_root = AllocatedNum::alloc( - cs.namespace(|| "recalculated_pre_insertion_root"), - || Ok(recalculated_root), - )?; - - cs.enforce( - || "pre_insertion_root_verification", - |lc| lc + allocated_pre_insertion_root.get_variable(), - |lc| lc + CS::one(), - |lc| lc + allocated_recalculated_root.get_variable(), - ); - - let z1 = AllocatedNum::alloc(cs.namespace(|| "z1"), || Ok(existing_leaf_label))?; - let z2 = AllocatedNum::alloc(cs.namespace(|| "z2"), || Ok(new_leaf_label))?; - z_out.extend_from_slice(&[allocated_pre_insertion_root, z1, z2]); - } - UnifiedProofStep::InsertUpdate => { - let new_proof = new_proof.ok_or(SynthesisError::Unsatisfiable)?; - - let vars = self.process_update(cs, old_proof, new_proof)?; - let updated_root = vars[1].clone(); - - z_out.extend_from_slice(&[ - updated_root, - existing_node_label_input.clone(), - missing_node_label_input.clone(), - ]); - } - UnifiedProofStep::InsertEnd => { - let new_proof = new_proof.ok_or(SynthesisError::Unsatisfiable)?; - - let vars = self.process_update(cs, old_proof, new_proof)?; - let updated_root = vars[1].clone(); - z_out.extend_from_slice(&[ - updated_root, - existing_node_label_input.clone(), - missing_node_label_input.clone(), - ]); - } - } - - println!( - "Output z_out: {:?}", - z_out.iter().map(|num| num.get_value()).collect::>() - ); - Ok(z_out) } -} -impl MerkleProofStepCircuit { - fn process_update>( - &self, - cs: &mut CS, - old_proof: &MerkleProof, - new_proof: &MerkleProof, - ) -> Result>, SynthesisError> { - // todo: we should be checking z[0] against old_root, the reason I don't yet here is because idk how to handle the case where this is the first proof step - - // todo: perhaps add a cumulative iterator to z to make it easier to find problems later, - // using intermediate roots as a namespace will cause a bit of searching - let namespace = format!("{:?}->{:?}", old_proof.root_hash, new_proof.root_hash); - - // todo: repalce unwraps when i get a sec - - let (old_root, old_path) = unpack_and_process::(old_proof).unwrap(); - let (updated_root, updated_path) = unpack_and_process::(new_proof).unwrap(); - - let root_with_old_pointer = - AllocatedNum::alloc(cs.namespace(|| format!("old_root: {namespace}")), || { - Ok(old_root) - })?; - - let root_with_new_pointer = - AllocatedNum::alloc(cs.namespace(|| format!("new_root: {namespace}")), || { - Ok(updated_root) - })?; - - let recalculated_old_root = recalculate_hash_as_scalar::(old_path).unwrap(); - let recalculated_updated_root = recalculate_hash_as_scalar::(updated_path).unwrap(); - - let allocated_recalculated_old_root = AllocatedNum::alloc( - cs.namespace(|| format!("recalculated_old_root: {namespace}")), - || Ok(recalculated_old_root), - )?; - let allocated_recalculated_updated_root = AllocatedNum::alloc( - cs.namespace(|| format!("recalculated_updated_root: {namespace}")), - || Ok(recalculated_updated_root), - )?; - - cs.enforce( - || format!("old_root update equality: {namespace}"), - |lc| lc + allocated_recalculated_old_root.get_variable(), - |lc| lc + CS::one(), - |lc| lc + root_with_old_pointer.get_variable(), - ); - - cs.enforce( - || format!("new_root update equality: {namespace}"), - |lc| lc + allocated_recalculated_updated_root.get_variable(), - |lc| lc + CS::one(), - |lc| lc + root_with_new_pointer.get_variable(), - ); - - // is this jank or are we fine? - Ok(vec![ - allocated_recalculated_old_root, - allocated_recalculated_updated_root, - ]) + fn circuit_index(&self) -> usize { + match self { + Self::Insert(x) => x.circuit_index(), + Self::Update(x) => x.circuit_index(), + } } } diff --git a/src/nova/insert.rs b/src/nova/insert.rs new file mode 100644 index 00000000..766cb769 --- /dev/null +++ b/src/nova/insert.rs @@ -0,0 +1,92 @@ +use crate::tree::InsertProof; +use crate::{ + nova::utils::{next_rom_index_and_pc, Digest}, + tree, +}; +use anyhow::Result; +use arecibo::supernova::StepCircuit; +use bellpepper_core::{ + num::{AllocatedNum, Num}, + ConstraintSystem, SynthesisError, +}; +use ff::PrimeField; + +#[derive(Clone)] +pub struct InsertCircuit { + pub insertion_proof: InsertProof, + rom_size: usize, + _phantom: std::marker::PhantomData, +} + +impl StepCircuit for InsertCircuit +where + F: PrimeField, +{ + fn arity(&self) -> usize { + 2 + self.rom_size // old_root + rom_index + rom[].len() + } + + fn circuit_index(&self) -> usize { + 0 + } + + fn synthesize>( + &self, + cs: &mut CS, + pc: Option<&AllocatedNum>, + z: &[AllocatedNum], + ) -> Result<(Option>, Vec>), SynthesisError> { + let old_root = &z[0]; + let rom_index = &z[1]; + let allocated_rom = &z[2..]; + + let pc = pc.ok_or(SynthesisError::AssignmentMissing)?; + + // Compute next ROM index and PC + let (rom_index_next, pc_next) = next_rom_index_and_pc( + &mut cs.namespace(|| "next_rom_index_and_pc"), + rom_index, + allocated_rom, + pc, + )?; + + cs.push_namespace(|| { + format!( + "insert_proof {:?}", + self.insertion_proof.non_membership_proof.root + ) + }); + + let pre_insertion_scalar = Digest::new(self.insertion_proof.non_membership_proof.root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable); + let pre_insertion_root = + AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { + pre_insertion_scalar + })?; + let new_scalar = Digest::new(self.insertion_proof.new_root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable); + let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || new_scalar)?; + + cs.enforce( + || "z0 == pre_insertion_root", + |lc| lc + old_root.get_variable(), + |lc| lc + CS::one(), + |lc| lc + pre_insertion_root.get_variable(), + ); + // TODO: bellpepper merkle proof gadget + self.insertion_proof + .verify() + .map_err(|_| SynthesisError::Unsatisfiable)?; + + cs.pop_namespace(); + + // Prepare the next state vector + let mut z_next = vec![new_root]; + z_next.push(rom_index_next); + z_next.extend_from_slice(&z[2..]); + + Ok((Some(pc_next), z_next)) + } +} diff --git a/src/nova/mod.rs b/src/nova/mod.rs index cda7425f..4ac374c4 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -1,4 +1,6 @@ pub mod batch; +pub mod insert; +pub mod update; pub mod utils; // #[cfg(test)] diff --git a/src/nova/update.rs b/src/nova/update.rs new file mode 100644 index 00000000..26ff7d21 --- /dev/null +++ b/src/nova/update.rs @@ -0,0 +1,87 @@ +use crate::tree::{InsertProof, UpdateProof}; +use crate::{ + nova::utils::{next_rom_index_and_pc, Digest}, + tree, +}; +use anyhow::Result; +use arecibo::supernova::StepCircuit; +use bellpepper_core::{ + num::{AllocatedNum, Num}, + ConstraintSystem, SynthesisError, +}; +use ff::PrimeField; + +#[derive(Clone)] +pub struct UpdateCircuit { + pub update_proof: UpdateProof, + rom_size: usize, + _phantom: std::marker::PhantomData, +} + +impl StepCircuit for UpdateCircuit +where + F: PrimeField, +{ + fn arity(&self) -> usize { + 2 + self.rom_size // old_root + rom_index + rom[].len() + } + + fn circuit_index(&self) -> usize { + 1 + } + + fn synthesize>( + &self, + cs: &mut CS, + pc: Option<&AllocatedNum>, + z: &[AllocatedNum], + ) -> Result<(Option>, Vec>), SynthesisError> { + let old_root = &z[0]; + let rom_index = &z[1]; + let allocated_rom = &z[2..]; + + let pc = pc.ok_or(SynthesisError::AssignmentMissing)?; + + // Compute next ROM index and PC + let (rom_index_next, pc_next) = next_rom_index_and_pc( + &mut cs.namespace(|| "next_rom_index_and_pc"), + rom_index, + allocated_rom, + pc, + )?; + + cs.push_namespace(|| format!("update_proof {:?}", self.update_proof.old_root)); + + let pre_insertion_scalar = Digest::new(self.update_proof.new_root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable); + let pre_insertion_root = + AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { + pre_insertion_scalar + })?; + let new_scalar = Digest::new(self.update_proof.new_root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable); + let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || new_scalar)?; + + cs.enforce( + || "z0 == pre_insertion_root", + |lc| lc + old_root.get_variable(), + |lc| lc + CS::one(), + |lc| lc + pre_insertion_root.get_variable(), + ); + // TODO: bellpepper merkle proof gadget + self.update_proof + .verify() + .map_err(|_| SynthesisError::Unsatisfiable)?; + + cs.pop_namespace(); + + // Prepare the next state vector + let mut z_next = vec![new_root]; + z_next.push(rom_index_next); + z_next.extend_from_slice(&z[2..]); + + Ok((Some(pc_next), z_next)) + } +} diff --git a/src/nova/utils.rs b/src/nova/utils.rs index 4ade9c57..a6e20700 100644 --- a/src/nova/utils.rs +++ b/src/nova/utils.rs @@ -1,4 +1,155 @@ // use bellpepper_core::ConstraintSystem; +use anyhow::Result; +use bellpepper_core::{ + boolean::{AllocatedBit, Boolean}, + num::AllocatedNum, + ConstraintSystem, LinearCombination, SynthesisError, +}; +use ff::PrimeField; +use itertools::Itertools as _; +use std::marker::PhantomData; + +use crate::tree; + +pub struct Digest { + digest: tree::Digest, + _p: PhantomData, +} + +impl Digest { + pub fn new(digest: tree::Digest) -> Self { + Self { + digest, + _p: PhantomData, + } + } + + // uses [`PrimeField::from_u128`] for inspiration. If the field element's capacity is not enough to hold the hash, + pub fn to_scalar(&self) -> Result { + let bytes = self.digest.as_ref(); + + // Convert the 32 bytes to two u128 values + let lower = u128::from_le_bytes(bytes[0..16].try_into()?); + let upper = u128::from_le_bytes(bytes[16..32].try_into()?); + + let mut tmp = Scalar::from_u128(upper); + for _ in 0..128 { + tmp = tmp.double(); + } + Ok(tmp + Scalar::from_u128(lower)) + } +} + +pub struct Hash { + hash: indexed_merkle_tree::Hash, + _p: PhantomData, +} + +pub fn next_rom_index_and_pc>( + cs: &mut CS, + rom_index: &AllocatedNum, + allocated_rom: &[AllocatedNum], + pc: &AllocatedNum, +) -> Result<(AllocatedNum, AllocatedNum), SynthesisError> { + // Compute a selector for the current rom_index in allocated_rom + let current_rom_selector = get_selector_vec_from_index( + cs.namespace(|| "rom selector"), + rom_index, + allocated_rom.len(), + )?; + + // Enforce that allocated_rom[rom_index] = pc + for (rom, bit) in allocated_rom.iter().zip_eq(current_rom_selector.iter()) { + // if bit = 1, then rom = pc + // bit * (rom - pc) = 0 + cs.enforce( + || "enforce bit = 1 => rom = pc", + |lc| lc + &bit.lc(CS::one(), F::ONE), + |lc| lc + rom.get_variable() - pc.get_variable(), + |lc| lc, + ); + } + + // Get the index of the current rom, or the index of the invalid rom if no match + let current_rom_index = current_rom_selector + .iter() + .position(|bit| bit.get_value().is_some_and(|v| v)) + .unwrap_or_default(); + let next_rom_index = current_rom_index + 1; + + let rom_index_next = AllocatedNum::alloc_infallible(cs.namespace(|| "next rom index"), || { + F::from(next_rom_index as u64) + }); + cs.enforce( + || " rom_index + 1 - next_rom_index_num = 0", + |lc| lc, + |lc| lc, + |lc| lc + rom_index.get_variable() + CS::one() - rom_index_next.get_variable(), + ); + + // Allocate the next pc without checking. + // The next iteration will check whether the next pc is valid. + let pc_next = AllocatedNum::alloc_infallible(cs.namespace(|| "next pc"), || { + allocated_rom + .get(next_rom_index) + .and_then(|v| v.get_value()) + .unwrap_or(-F::ONE) + }); + + Ok((rom_index_next, pc_next)) +} + +/// Compute a selector vector `s` of size `num_indices`, such that +/// `s[i] == 1` if i == `target_index` and 0 otherwise. +pub fn get_selector_vec_from_index>( + mut cs: CS, + target_index: &AllocatedNum, + num_indices: usize, +) -> Result, SynthesisError> { + assert_ne!(num_indices, 0); + + // Compute the selector vector non-deterministically + let selector = (0..num_indices) + .map(|idx| { + // b <- idx == target_index + Ok(Boolean::Is(AllocatedBit::alloc( + cs.namespace(|| format!("allocate s_{:?}", idx)), + target_index.get_value().map(|v| v == F::from(idx as u64)), + )?)) + }) + .collect::, SynthesisError>>()?; + + // Enforce ∑ selector[i] = 1 + { + let selected_sum = selector.iter().fold(LinearCombination::zero(), |lc, bit| { + lc + &bit.lc(CS::one(), F::ONE) + }); + cs.enforce( + || "exactly-one-selection", + |_| selected_sum, + |lc| lc + CS::one(), + |lc| lc + CS::one(), + ); + } + + // Enforce `target_index - ∑ i * selector[i] = 0`` + { + let selected_value = selector + .iter() + .enumerate() + .fold(LinearCombination::zero(), |lc, (i, bit)| { + lc + &bit.lc(CS::one(), F::from(i as u64)) + }); + cs.enforce( + || "target_index - ∑ i * selector[i] = 0", + |lc| lc, + |lc| lc, + |lc| lc + target_index.get_variable() - &selected_value, + ); + } + + Ok(selector) +} // pub(crate) fn prove_update>( // cs: &mut CS, diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 1eb512f5..973be335 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -21,6 +21,18 @@ pub type Hasher = sha2::Sha256; )] pub struct Digest([u8; 32]); +impl Into for Digest { + fn into(self) -> RootHash { + RootHash::from(self.0) + } +} + +impl Into for RootHash { + fn into(self) -> Digest { + Digest(self.0) + } +} + impl AsRef<[u8]> for Digest { fn as_ref(&self) -> &[u8] { &self.0 @@ -64,14 +76,14 @@ pub enum Proof { #[derive(Debug, Clone, BorshSerialize, BorshDeserialize)] pub struct NonMembershipProof { - pub root: RootHash, + pub root: Digest, pub proof: SparseMerkleProof, pub key: KeyHash, } impl NonMembershipProof { pub fn verify(&self) -> Result<()> { - self.proof.verify_nonexistence(self.root, self.key) + self.proof.verify_nonexistence(self.root.into(), self.key) } } @@ -79,7 +91,7 @@ impl NonMembershipProof { pub struct InsertProof { pub non_membership_proof: NonMembershipProof, - pub new_root: RootHash, + pub new_root: Digest, pub membership_proof: UpdateMerkleProof, pub value: Hashchain, } @@ -93,8 +105,8 @@ impl InsertProof { let value = to_vec(&self.value).unwrap(); self.membership_proof.clone().verify_update( - self.non_membership_proof.root, - self.new_root, + self.non_membership_proof.root.into(), + self.new_root.into(), vec![(self.non_membership_proof.key, Some(value))], ); @@ -104,8 +116,8 @@ impl InsertProof { #[derive(Debug, Clone, BorshSerialize, BorshDeserialize)] pub struct UpdateProof { - pub old_root: RootHash, - pub new_root: RootHash, + pub old_root: Digest, + pub new_root: Digest, pub key: KeyHash, pub new_value: Hashchain, @@ -118,8 +130,8 @@ impl UpdateProof { let new_value = to_vec(&self.new_value).unwrap(); self.proof.clone().verify_update( - self.old_root, - self.new_root, + self.old_root.into(), + self.new_root.into(), vec![(self.key, Some(new_value))], ) } @@ -205,7 +217,7 @@ where let (old_value, non_membership_merkle_proof) = self.jmt.get_with_proof(key, self.epoch)?; let non_membership_proof = NonMembershipProof { - root: old_root, + root: old_root.into(), proof: non_membership_merkle_proof, key, }; @@ -225,7 +237,7 @@ where ); Ok(InsertProof { - new_root, + new_root: new_root.into(), value, non_membership_proof, membership_proof, @@ -266,7 +278,7 @@ where Ok(Ok(deserialized_value)) } None => Ok(Err(NonMembershipProof { - root: self.get_current_root()?, + root: self.get_current_root()?.into(), proof, key, })), From 26fcf3f5b34edf7dcb959dcb9eb5421005de8578 Mon Sep 17 00:00:00 2001 From: sebasti810 Date: Tue, 20 Aug 2024 14:22:42 +0200 Subject: [PATCH 10/33] fix some things --- benches/zk_benchmarks.rs | 4 +- src/main.rs | 16 +- src/node_types/lightclient.rs | 7 +- src/nova/insert.rs | 10 +- src/nova/mod.rs | 296 ++++++++++++++-------------------- src/nova/update.rs | 16 +- src/nova/utils.rs | 5 + src/tree/mod.rs | 97 ++++++++--- src/utils.rs | 11 +- src/webserver.rs | 8 +- 10 files changed, 239 insertions(+), 231 deletions(-) diff --git a/benches/zk_benchmarks.rs b/benches/zk_benchmarks.rs index b998df9d..f1e764e9 100644 --- a/benches/zk_benchmarks.rs +++ b/benches/zk_benchmarks.rs @@ -90,12 +90,12 @@ fn bench_proof_verification(c: &mut Criterion) { .unwrap(); let (proof, verifying_key) = circuit.create_and_verify_snark().unwrap(); b.iter(|| { - let _ = validate_epoch( + /* let _ = validate_epoch( black_box(&prev_commitment), black_box(¤t_commitment), black_box(proof.clone()), black_box(verifying_key.clone()), - ); + ); */ }); }, ); diff --git a/src/main.rs b/src/main.rs index 9f52af02..80cdfc96 100644 --- a/src/main.rs +++ b/src/main.rs @@ -59,12 +59,16 @@ async fn main() -> std::io::Result<()> { .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?; Arc::new( - Sequencer::new(Arc::new(redis_connections), da, config, signing_key).map_err( - |e| { - error!("error initializing sequencer: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, e.to_string()) - }, - )?, + Sequencer::new( + Arc::new(Box::new(redis_connections)), + da, + config, + signing_key, + ) + .map_err(|e| { + error!("error initializing sequencer: {}", e); + std::io::Error::new(std::io::ErrorKind::Other, e.to_string()) + })?, ) } }; diff --git a/src/node_types/lightclient.rs b/src/node_types/lightclient.rs index 45ef686a..1cebf58d 100644 --- a/src/node_types/lightclient.rs +++ b/src/node_types/lightclient.rs @@ -96,7 +96,7 @@ impl LightClient { // error!("failed to deserialize verifying key, skipping a blob at height {}: {:?}", i, e); // continue; // } - // }; + // }; // if the user does not add a verifying key, we will not verify the signature, // but only log a warning on startup @@ -115,6 +115,9 @@ impl LightClient { } } + /* + TODO: validation of the epoch proof + match validate_epoch( prev_commitment, current_commitment, @@ -128,7 +131,7 @@ impl LightClient { ) } Err(err) => panic!("failed to validate epoch: {:?}", err), - } + } */ } } Err(e) => { diff --git a/src/nova/insert.rs b/src/nova/insert.rs index 766cb769..8ba700ad 100644 --- a/src/nova/insert.rs +++ b/src/nova/insert.rs @@ -1,14 +1,8 @@ +use crate::nova::utils::{next_rom_index_and_pc, Digest}; use crate::tree::InsertProof; -use crate::{ - nova::utils::{next_rom_index_and_pc, Digest}, - tree, -}; use anyhow::Result; use arecibo::supernova::StepCircuit; -use bellpepper_core::{ - num::{AllocatedNum, Num}, - ConstraintSystem, SynthesisError, -}; +use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; use ff::PrimeField; #[derive(Clone)] diff --git a/src/nova/mod.rs b/src/nova/mod.rs index 4ac374c4..da3cefa5 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -3,180 +3,122 @@ pub mod insert; pub mod update; pub mod utils; -// #[cfg(test)] -// mod tests { -// use crate::nova::batch::{Hash, MerkleProofStepCircuit, UnifiedProofStep}; -// use arecibo::{ -// provider::{Bn256Engine, GrumpkinEngine}, -// traits::circuit::StepCircuit, -// }; -// use arecibo::{ -// traits::{circuit::TrivialCircuit, snark::default_ck_hint, Engine}, -// PublicParams, RecursiveSNARK, -// }; -// use bellpepper_core::{num::AllocatedNum, test_cs::TestConstraintSystem, ConstraintSystem}; -// use ff::PrimeField; -// use indexed_merkle_tree::{node::Node, sha256_mod, tree::IndexedMerkleTree, tree::Proof}; - -// type E1 = Bn256Engine; -// type E2 = GrumpkinEngine; - -// type C1 = MerkleProofStepCircuit<::Scalar>; -// type C2 = TrivialCircuit<::Scalar>; - -// fn debug_circuit(circuit: &C1, z_in: &[::Scalar]) { -// let mut cs = TestConstraintSystem::<::Scalar>::new(); - -// let z: Vec::Scalar>> = z_in -// .iter() -// .enumerate() -// .map(|(i, &value)| { -// AllocatedNum::alloc(&mut cs.namespace(|| format!("input {}", i)), || Ok(value)) -// .expect("failed to allocate input") -// }) -// .collect(); - -// circuit.synthesize(&mut cs, &z).expect("synthesis failed"); - -// println!("Constraint System:"); -// println!("{}", cs.pretty_print()); - -// if !cs.is_satisfied() { -// println!("Constraint system not satisfied!"); -// for (i, constraint) in cs.which_is_unsatisfied().iter().enumerate() { -// println!("Unsatisfied Constraint {}: {:?}", i, constraint); -// } -// } else { -// println!("All constraints satisfied."); -// } - -// assert!(cs.is_satisfied(), "Constraints not satisfied"); -// } - -// fn create_public_params() -> PublicParams { -// let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); -// let test_label = sha256_mod(b"test"); -// let test_value = sha256_mod(b"value"); -// let mut test_node = Node::new_leaf(true, test_label, test_value, Node::TAIL); - -// let test_proof = tree.insert_node(&mut test_node).unwrap(); -// let test_circuit = MerkleProofStepCircuit::from_proof(Proof::Insert(test_proof))[0].clone(); - -// let circuit_primary = test_circuit; -// let circuit_secondary = TrivialCircuit::default(); - -// PublicParams::::setup( -// &circuit_primary, -// &circuit_secondary, -// &*default_ck_hint(), -// &*default_ck_hint(), -// ) -// .unwrap() -// } - -// #[test] -// fn test_nova() { -// let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); -// let initial_commitment = Hash::new(tree.get_commitment().unwrap()) -// .to_scalar() -// .unwrap(); - -// // create three nodes to insert -// let ryan = sha256_mod(b"Ryan"); -// let ford = sha256_mod(b"Ford"); -// let sebastian = sha256_mod(b"Sebastian"); -// let pusch = sha256_mod(b"Pusch"); -// let ethan = sha256_mod(b"Ethan"); -// let triple_zero = sha256_mod(b"000"); - -// let mut ryans_node = Node::new_leaf(true, ryan, ford, Node::TAIL); -// let mut sebastians_node = Node::new_leaf(true, sebastian, pusch, Node::TAIL); -// let mut ethans_node = Node::new_leaf(true, ethan, triple_zero, Node::TAIL); - -// // generate proofs for the three nodes -// let first_insert_proof = tree.insert_node(&mut ryans_node).unwrap(); -// let second_insert_proof = tree.insert_node(&mut sebastians_node).unwrap(); -// let third_insert_proof = tree.insert_node(&mut ethans_node).unwrap(); - -// // create zkSNARKs for the three proofs -// let first_insert_zk_snark = Proof::Insert(first_insert_proof); -// let second_insert_zk_snark = Proof::Insert(second_insert_proof); -// let third_insert_zk_snark = Proof::Insert(third_insert_proof); - -// let proofs = vec![ -// first_insert_zk_snark, -// second_insert_zk_snark, -// third_insert_zk_snark, -// ]; - -// let circuits: Vec = proofs -// .into_iter() -// .flat_map(MerkleProofStepCircuit::from_proof) -// .collect(); - -// println!("Creating public params..."); -// let pp = create_public_params(); -// println!("Created public params."); - -// let initial_primary_inputs = vec![ -// initial_commitment, -// ::Scalar::zero(), // initial existing node label -// ::Scalar::zero(), // initial missing node label -// ]; - -// let secondary_circuit = TrivialCircuit::default(); - -// println!("Creating recursive snark..."); -// let recursive_snark_result = RecursiveSNARK::new( -// &pp, -// &circuits[0], -// &secondary_circuit, -// &initial_primary_inputs, -// &[::Scalar::from(2u64)], -// ); - -// let mut z1_scalars = initial_primary_inputs; -// let mut z2_scalars = [::Scalar::from(2u64)]; - -// match recursive_snark_result { -// Ok(mut recursive_snark) => { -// println!("Created recursive snark successfully."); - -// for (i, circuit) in circuits.iter().enumerate() { -// println!("Step: {i}"); - -// debug_circuit(circuit, &z1_scalars); - -// let prove_result = recursive_snark.prove_step(&pp, circuit, &secondary_circuit); - -// match prove_result { -// Ok(_) => { -// println!("Prove step {i} succeeded"); -// } -// Err(e) => { -// println!("Prove step {i} failed with error: {:?}", e); -// panic!("Test failed at prove step {i}"); -// } -// } - -// let verify_result = -// recursive_snark.verify(&pp, i + 1, &z1_scalars, &z2_scalars); - -// match verify_result { -// Ok(_) => { -// println!("Verify step {i} succeeded") -// } -// Err(e) => { -// println!("Verify step {i} failed with error: {:?}", e); -// panic!("Test failed at verify step {i}"); -// } -// } -// } -// } -// Err(e) => { -// println!("Failed to create recursive snark. Error: {:?}", e); -// panic!("Test failed during recursive snark creation"); -// } -// } -// } -// } +#[cfg(test)] +mod tests { + use crate::common::Hashchain; + use crate::tree::{Hasher, KeyDirectoryTree, SnarkableTree}; + use jmt::mock::MockTreeStore; + use jmt::KeyHash; + use std::sync::Arc; + + #[test] + fn test_key_directory_tree() { + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); + + println!("Initial tree state: {:?}", tree.get_commitment()); + + // Test insert + let hc1 = Hashchain::new("key_1".into()); + let key1 = hc1.get_keyhash(); + let insert_proof = tree + .insert(key1, hc1.clone()) + .expect("Insert should succeed"); + assert!(insert_proof.verify().is_ok()); + tree.write_batch().expect("Write batch should succeed"); + + println!("After first insert: {:?}", tree.get_commitment()); + + // Test get after insert + // Test get after insert + let get_result = tree.get(key1).expect("Get should succeed"); + println!("Get result after insert: {:?}", get_result); + assert_eq!(get_result.expect("Key should exist"), hc1); + + // Test update + let mut hc1_updated = hc1.clone(); + hc1_updated + .add("new_value".into()) + .expect("Add to hashchain should succeed"); + let update_proof = tree + .update(key1, hc1_updated.clone()) + .expect("Update should succeed"); + assert!(update_proof.verify().is_ok()); + tree.write_batch().expect("Write batch should succeed"); + + // Test get after update + let get_result_after_update = tree.get(key1).expect("Get should succeed"); + assert_eq!( + get_result_after_update.expect("Key should exist"), + hc1_updated + ); + + // Test insert duplicate key + let insert_duplicate_result = tree.insert(key1, hc1.clone()); + assert!(insert_duplicate_result.is_err()); + + // Test update non-existing key + let non_existing_key = KeyHash::with::(b"non_existing_key"); + let update_non_existing_result = tree.update(non_existing_key, hc1.clone()); + assert!(update_non_existing_result.is_err()); + + // Test get non-existing key + let get_non_existing_result = tree.get(non_existing_key).expect("Get should not fail"); + assert!(get_non_existing_result.is_err()); + if let Err(non_membership_proof) = get_non_existing_result { + assert!(non_membership_proof.verify().is_ok()); + } + + // Test multiple inserts and updates + let hc2 = Hashchain::new("key_2".into()); + let key2 = hc2.get_keyhash(); + tree.insert(key2, hc2.clone()) + .expect("Insert should succeed"); + tree.write_batch().expect("Write batch should succeed"); + + let mut hc2_updated = hc2.clone(); + hc2_updated + .add("value2".into()) + .expect("Add to hashchain should succeed"); + tree.update(key2, hc2_updated.clone()) + .expect("Update should succeed"); + tree.write_batch().expect("Write batch should succeed"); + + assert_eq!(tree.get(key2).unwrap().unwrap(), hc2_updated); + + // Test root hash changes + let root_before = tree + .get_commitment() + .expect("Get commitment should succeed"); + let hc3 = Hashchain::new("key_3".into()); + let key3 = hc3.get_keyhash(); + tree.insert(key3, hc3).expect("Insert should succeed"); + tree.write_batch().expect("Write batch should succeed"); + let root_after = tree + .get_commitment() + .expect("Get commitment should succeed"); + + assert_ne!(root_before, root_after); + + // Test batch writing + let hc4 = Hashchain::new("key_4".into()); + let hc5 = Hashchain::new("key_5".into()); + let key4 = hc4.get_keyhash(); + let key5 = hc5.get_keyhash(); + + tree.insert(key4, hc4.clone()) + .expect("Insert should succeed"); + tree.insert(key5, hc5.clone()) + .expect("Insert should succeed"); + + // Before writing the batch + assert!(tree.get(key4).unwrap().is_err()); + assert!(tree.get(key5).unwrap().is_err()); + + tree.write_batch().expect("Write batch should succeed"); + + // After writing the batch + assert_eq!(tree.get(key4).unwrap().unwrap(), hc4); + assert_eq!(tree.get(key5).unwrap().unwrap(), hc5); + } +} diff --git a/src/nova/update.rs b/src/nova/update.rs index 26ff7d21..9a08971a 100644 --- a/src/nova/update.rs +++ b/src/nova/update.rs @@ -1,14 +1,8 @@ -use crate::tree::{InsertProof, UpdateProof}; -use crate::{ - nova::utils::{next_rom_index_and_pc, Digest}, - tree, -}; +use crate::nova::utils::{next_rom_index_and_pc, Digest as NovaDigest}; +use crate::tree::UpdateProof; use anyhow::Result; use arecibo::supernova::StepCircuit; -use bellpepper_core::{ - num::{AllocatedNum, Num}, - ConstraintSystem, SynthesisError, -}; +use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; use ff::PrimeField; #[derive(Clone)] @@ -52,14 +46,14 @@ where cs.push_namespace(|| format!("update_proof {:?}", self.update_proof.old_root)); - let pre_insertion_scalar = Digest::new(self.update_proof.new_root) + let pre_insertion_scalar = NovaDigest::from_root_hash(self.update_proof.old_root) .to_scalar() .map_err(|_| SynthesisError::Unsatisfiable); let pre_insertion_root = AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { pre_insertion_scalar })?; - let new_scalar = Digest::new(self.update_proof.new_root) + let new_scalar = NovaDigest::from_root_hash(self.update_proof.new_root) .to_scalar() .map_err(|_| SynthesisError::Unsatisfiable); let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || new_scalar)?; diff --git a/src/nova/utils.rs b/src/nova/utils.rs index a6e20700..3b67909a 100644 --- a/src/nova/utils.rs +++ b/src/nova/utils.rs @@ -7,6 +7,7 @@ use bellpepper_core::{ }; use ff::PrimeField; use itertools::Itertools as _; +use jmt::RootHash; use std::marker::PhantomData; use crate::tree; @@ -24,6 +25,10 @@ impl Digest { } } + pub fn from_root_hash(root_hash: RootHash) -> Self { + Self::new(root_hash.into()) + } + // uses [`PrimeField::from_u128`] for inspiration. If the field element's capacity is not enough to hold the hash, pub fn to_scalar(&self) -> Result { let bytes = self.digest.as_ref(); diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 973be335..37cfd8ca 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -1,5 +1,5 @@ -use crate::storage::RedisConnection; use anyhow::{anyhow, bail, ensure, Context, Result}; +use bls12_381::Scalar; use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize}; use jmt::{ proof::{SparseMerkleProof, UpdateMerkleProof}, @@ -9,7 +9,7 @@ use jmt::{ use serde::{Deserialize, Serialize}; use std::sync::Arc; -use crate::{common::Hashchain, storage::Database}; +use crate::common::Hashchain; pub const SPARSE_MERKLE_PLACEHOLDER_HASH: Digest = Digest::new(*b"SPARSE_MERKLE_PLACEHOLDER_HASH__"); @@ -21,6 +21,35 @@ pub type Hasher = sha2::Sha256; )] pub struct Digest([u8; 32]); +// implementing it for now to get things to compile, curve choice will be made later +impl TryFrom for Scalar { + type Error = anyhow::Error; + + fn try_from(value: Digest) -> Result { + let mut byte_array = [0u8; 32]; + byte_array.copy_from_slice(value.as_ref()); + byte_array.reverse(); + + let val = + [ + u64::from_le_bytes(byte_array[0..8].try_into().map_err(|_| { + anyhow!(format!("slice to array: [0..8] for digest: {value:?}")) + })?), + u64::from_le_bytes(byte_array[8..16].try_into().map_err(|_| { + anyhow!(format!("slice to array: [8..16] for digest: {value:?}")) + })?), + u64::from_le_bytes(byte_array[16..24].try_into().map_err(|_| { + anyhow!(format!("slice to array: [16..24] for digest: {value:?}")) + })?), + u64::from_le_bytes(byte_array[24..32].try_into().map_err(|_| { + anyhow!(format!("slice to array: [24..32] for digest: {value:?}")) + })?), + ]; + + Ok(Scalar::from_raw(val)) + } +} + impl Into for Digest { fn into(self) -> RootHash { RootHash::from(self.0) @@ -116,8 +145,8 @@ impl InsertProof { #[derive(Debug, Clone, BorshSerialize, BorshDeserialize)] pub struct UpdateProof { - pub old_root: Digest, - pub new_root: Digest, + pub old_root: RootHash, + pub new_root: RootHash, pub key: KeyHash, pub new_value: Hashchain, @@ -130,8 +159,8 @@ impl UpdateProof { let new_value = to_vec(&self.new_value).unwrap(); self.proof.clone().verify_update( - self.old_root.into(), - self.new_root.into(), + self.old_root, + self.new_root, vec![(self.key, Some(new_value))], ) } @@ -299,11 +328,18 @@ mod tests { let hc1 = Hashchain::new("key_1".into()); let key = hc1.get_keyhash(); + println!("hc1: {:?}", hc1); + println!("key: {:?}", key); + + println!("Initial tree state: {:?}", tree.get_commitment()); + let insert_proof = tree.insert(key, hc1.clone()); assert!(insert_proof.is_ok()); tree.write_batch().unwrap(); + println!("After first insert: {:?}", tree.get_commitment()); + let get_result = tree.get(key).unwrap().unwrap(); assert_eq!(get_result, hc1); @@ -361,7 +397,7 @@ mod tests { #[test] fn test_get_non_existing_key() { let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(store); + let tree = KeyDirectoryTree::new(Arc::new(store)); let key = KeyHash::with::(b"non_existing_key"); let result = tree.get(key).unwrap(); @@ -375,7 +411,7 @@ mod tests { #[test] fn test_multiple_inserts_and_updates() { let store = MockTreeStore::default(); - let mut tree = KeyDirectoryTree::new(store); + let mut tree = KeyDirectoryTree::new(Arc::new(store)); let mut hc1 = Hashchain::new("key_1".into()); let mut hc2 = Hashchain::new("key_2".into()); @@ -416,24 +452,49 @@ mod tests { #[test] fn test_batch_writing() { let store = Arc::new(MockTreeStore::default()); - let mut tree = KeyDirectoryTree::new(store); + let mut tree = KeyDirectoryTree::new(store.clone()); let hc1 = Hashchain::new("key_1".into()); - let hc2 = Hashchain::new("key_2".into()); let key1 = hc1.get_keyhash(); - let key2 = hc2.get_keyhash(); + println!("Inserting key1: {:?}", key1); tree.insert(key1, hc1.clone()).unwrap(); - tree.insert(key2, hc2.clone()).unwrap(); - // Before writing the batch - assert!(tree.get(key1).unwrap().is_err()); - assert!(tree.get(key2).unwrap().is_err()); + println!("Tree state after first insert: {:?}", tree.get_commitment()); + tree.write_batch().unwrap(); + println!( + "Tree state after first write_batch: {:?}", + tree.get_commitment() + ); + + // Try to get the first value immediately + let get_result1 = tree.get(key1); + println!("Get result for key1 after first write: {:?}", get_result1); + + let hc2 = Hashchain::new("key_2".into()); + let key2 = hc2.get_keyhash(); + + println!("Inserting key2: {:?}", key2); + tree.insert(key2, hc2.clone()).unwrap(); + println!( + "Tree state after second insert: {:?}", + tree.get_commitment() + ); tree.write_batch().unwrap(); + println!( + "Tree state after second write_batch: {:?}", + tree.get_commitment() + ); - // After writing the batch - assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); - assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); + // Try to get both values + let get_result1 = tree.get(key1); + let get_result2 = tree.get(key2); + + println!("Final get result for key1: {:?}", get_result1); + println!("Final get result for key2: {:?}", get_result2); + + assert_eq!(get_result1.unwrap().unwrap(), hc1); + assert_eq!(get_result2.unwrap().unwrap(), hc2); } } diff --git a/src/utils.rs b/src/utils.rs index c00dccda..f93151a5 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,6 +1,7 @@ use crate::{ circuits::ProofVariantCircuit, error::{GeneralError, PrismError, ProofError}, + tree::Digest, }; use anyhow::Result; use base64::{engine::general_purpose::STANDARD as engine, Engine as _}; @@ -60,8 +61,8 @@ pub fn create_and_verify_snark( } pub fn validate_epoch( - previous_commitment: &Hash, - current_commitment: &Hash, + previous_commitment: &Digest, + current_commitment: &Digest, proof: groth16::Proof, verifying_key: VerifyingKey, ) -> Result, PrismError> { @@ -145,6 +146,10 @@ mod tests { assert!(decode_public_key(&invalid_length_pub_key_str.to_string()).is_err()); } + /* + + TODO: rewrite with supernova + #[test] fn test_validate_epoch_valid_proof() { let mut tree = IndexedMerkleTree::new_with_size(8).unwrap(); @@ -201,5 +206,5 @@ mod tests { assert!(result.is_ok()); assert_eq!(result.unwrap(), proof); - } + } */ } diff --git a/src/webserver.rs b/src/webserver.rs index 2f719d8d..6ae5269c 100644 --- a/src/webserver.rs +++ b/src/webserver.rs @@ -132,7 +132,7 @@ impl WebServer { Self { cfg } } - pub async fn start(&self, session: Arc>) -> Result<()> { + pub async fn start(&self, session: Arc) -> Result<()> { info!("starting webserver on {}:{}", self.cfg.host, self.cfg.port); let app = Router::new() .route("/update-entry", post(update_entry)) @@ -165,7 +165,7 @@ impl WebServer { ) )] async fn update_entry( - State(session): State>>, + State(session): State>, Json(signature_with_key): Json, ) -> impl IntoResponse { match session.validate_and_queue_update(&signature_with_key).await { @@ -196,7 +196,7 @@ async fn update_entry( ) )] async fn get_hashchain( - State(session): State>>, + State(session): State>, Json(request): Json, ) -> impl IntoResponse { match session.db.get_hashchain(&request.id) { @@ -219,7 +219,7 @@ async fn get_hashchain( (status = 500, description = "Internal server error") ) )] -async fn get_commitment(State(session): State>>) -> impl IntoResponse { +async fn get_commitment(State(session): State>) -> impl IntoResponse { match session.get_commitment().await { Ok(commitment) => (StatusCode::OK, Json(commitment)).into_response(), Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), From a3367dad7aeb2d9a3eb82914b2c3ba0b2873999d Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 20 Aug 2024 21:23:25 +0200 Subject: [PATCH 11/33] write_batch --- src/nova/mod.rs | 12 +- src/tree/mod.rs | 20 +-- tests/integration_tests.rs | 252 ++++++++++++++++++------------------- 3 files changed, 138 insertions(+), 146 deletions(-) diff --git a/src/nova/mod.rs b/src/nova/mod.rs index da3cefa5..b96d9b2b 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -25,7 +25,7 @@ mod tests { .insert(key1, hc1.clone()) .expect("Insert should succeed"); assert!(insert_proof.verify().is_ok()); - tree.write_batch().expect("Write batch should succeed"); + // tree.write_batch().expect("Write batch should succeed"); println!("After first insert: {:?}", tree.get_commitment()); @@ -44,7 +44,7 @@ mod tests { .update(key1, hc1_updated.clone()) .expect("Update should succeed"); assert!(update_proof.verify().is_ok()); - tree.write_batch().expect("Write batch should succeed"); + // tree.write_batch().expect("Write batch should succeed"); // Test get after update let get_result_after_update = tree.get(key1).expect("Get should succeed"); @@ -74,7 +74,7 @@ mod tests { let key2 = hc2.get_keyhash(); tree.insert(key2, hc2.clone()) .expect("Insert should succeed"); - tree.write_batch().expect("Write batch should succeed"); + // tree.write_batch().expect("Write batch should succeed"); let mut hc2_updated = hc2.clone(); hc2_updated @@ -82,7 +82,7 @@ mod tests { .expect("Add to hashchain should succeed"); tree.update(key2, hc2_updated.clone()) .expect("Update should succeed"); - tree.write_batch().expect("Write batch should succeed"); + // tree.write_batch().expect("Write batch should succeed"); assert_eq!(tree.get(key2).unwrap().unwrap(), hc2_updated); @@ -93,7 +93,7 @@ mod tests { let hc3 = Hashchain::new("key_3".into()); let key3 = hc3.get_keyhash(); tree.insert(key3, hc3).expect("Insert should succeed"); - tree.write_batch().expect("Write batch should succeed"); + // tree.write_batch().expect("Write batch should succeed"); let root_after = tree .get_commitment() .expect("Get commitment should succeed"); @@ -115,7 +115,7 @@ mod tests { assert!(tree.get(key4).unwrap().is_err()); assert!(tree.get(key5).unwrap().is_err()); - tree.write_batch().expect("Write batch should succeed"); + // tree.write_batch().expect("Write batch should succeed"); // After writing the batch assert_eq!(tree.get(key4).unwrap().unwrap(), hc4); diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 37cfd8ca..18a91b73 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -213,9 +213,10 @@ where } } - pub fn write_batch(&mut self) -> Result<()> { + fn write_batch(&mut self) -> Result<()> { if let Some(batch) = self.pending_batch.take() { self.db.write_node_batch(&batch)?; + self.epoch += 1; } Ok(()) } @@ -257,8 +258,9 @@ where let (new_root, membership_proof, tree_update_batch) = self .jmt - .put_value_set_with_proof(vec![(key, Some(serialized_value))], self.epoch)?; + .put_value_set_with_proof(vec![(key, Some(serialized_value))], self.epoch + 1)?; self.queue_batch(tree_update_batch); + self.write_batch()?; ensure!( membership_proof.len() == 1, @@ -288,6 +290,7 @@ where self.epoch + 1, )?; self.queue_batch(tree_update_batch); + self.write_batch()?; Ok(UpdateProof { old_root, @@ -323,7 +326,7 @@ mod tests { #[test] fn test_insert_and_get() { let store = Arc::new(MockTreeStore::default()); - let mut tree = KeyDirectoryTree::new(store); + let mut tree = KeyDirectoryTree::new(store.clone()); let hc1 = Hashchain::new("key_1".into()); let key = hc1.get_keyhash(); @@ -336,8 +339,6 @@ mod tests { let insert_proof = tree.insert(key, hc1.clone()); assert!(insert_proof.is_ok()); - tree.write_batch().unwrap(); - println!("After first insert: {:?}", tree.get_commitment()); let get_result = tree.get(key).unwrap().unwrap(); @@ -354,7 +355,6 @@ mod tests { let key = hc1.get_keyhash(); tree.insert(key, hc1.clone()).unwrap(); - tree.write_batch().unwrap(); let hc2 = Hashchain::new("key_1".into()); let result = tree.insert(key, hc2); @@ -370,14 +370,11 @@ mod tests { let key = hc1.get_keyhash(); tree.insert(key, hc1.clone()).unwrap(); - tree.write_batch().unwrap(); hc1.add("new_value".into()).unwrap(); let update_proof = tree.update(key, hc1.clone()).unwrap(); assert!(update_proof.verify().is_ok()); - tree.write_batch().unwrap(); - let get_result = tree.get(key).unwrap().unwrap(); assert_eq!(get_result, hc1); } @@ -420,14 +417,12 @@ mod tests { tree.insert(key1, hc1.clone()).unwrap(); tree.insert(key2, hc2.clone()).unwrap(); - tree.write_batch().unwrap(); hc1.add("value1".into()).unwrap(); hc2.add("value2".into()).unwrap(); tree.update(key1, hc1.clone()).unwrap(); tree.update(key2, hc2.clone()).unwrap(); - tree.write_batch().unwrap(); assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); @@ -443,7 +438,6 @@ mod tests { let root_before = tree.get_current_root().unwrap(); tree.insert(key1, hc1).unwrap(); - tree.write_batch().unwrap(); let root_after = tree.get_current_root().unwrap(); assert_ne!(root_before, root_after); @@ -461,7 +455,6 @@ mod tests { tree.insert(key1, hc1.clone()).unwrap(); println!("Tree state after first insert: {:?}", tree.get_commitment()); - tree.write_batch().unwrap(); println!( "Tree state after first write_batch: {:?}", tree.get_commitment() @@ -481,7 +474,6 @@ mod tests { "Tree state after second insert: {:?}", tree.get_commitment() ); - tree.write_batch().unwrap(); println!( "Tree state after second write_batch: {:?}", tree.get_commitment() diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index 50e4ad61..de3eb36d 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -12,129 +12,129 @@ use prism::{ use rand::{rngs::StdRng, Rng, SeedableRng}; use std::{sync::Arc, time::Duration}; -fn create_new_account_operation(id: String, value: String, key: &SigningKey) -> OperationInput { - let incoming = Operation::CreateAccount { - id: id.clone(), - value: value.clone(), - source: AccountSource::SignedBySequencer { - signature: key.sign(format!("{}{}", id, value).as_bytes()).to_string(), - }, - }; - let content = serde_json::to_string(&incoming).unwrap(); - let sig = key.sign(content.clone().as_bytes()); - OperationInput { - operation: incoming, - signed_operation: sig.to_string(), - public_key: engine.encode(key.verifying_key().to_bytes()), - } -} - -fn create_update_operation(id: String, value: String) -> OperationInput { - let key = create_signing_key(); - let incoming = Operation::Add { id, value }; - let content = serde_json::to_string(&incoming).unwrap(); - let sig = key.sign(content.clone().as_bytes()); - OperationInput { - operation: incoming, - signed_operation: sig.to_string(), - public_key: engine.encode(key.verifying_key().to_bytes()), - } -} - -#[tokio::test] -async fn test_light_client_sequencer_talking() { - std::env::set_var("RUST_LOG", "DEBUG"); - pretty_env_logger::init(); - - let (da_layer, mut height_rx, mut _block_rx) = InMemoryDataAvailabilityLayer::new(1); - let da_layer = Arc::new(da_layer); - let db = Arc::new(setup_db()); - let cfg = Config::default(); - let signing_key = create_signing_key(); - let pubkey = engine.encode(signing_key.verifying_key().to_bytes()); - - let sequencer = Arc::new( - Sequencer::new( - db.clone(), - da_layer.clone(), - cfg.clone(), - signing_key.clone(), - ) - .unwrap(), - ); - - let lightclient = Arc::new(LightClient::new( - da_layer, - cfg.celestia_config.unwrap(), - Some(pubkey), - )); - - let seq_1 = sequencer.clone(); - tokio::spawn(async move { - seq_1.start().await.unwrap(); - }); - - tokio::spawn(async move { - lightclient.clone().start().await.unwrap(); - }); - - let seq = sequencer.clone(); - tokio::spawn(async move { - let mut rng = StdRng::from_entropy(); - let mut accounts = Vec::new(); - let mut i = 0; - - loop { - let seq_clone = seq.clone(); - // Create 1 or 2 new accounts - let num_new_accounts = rng.gen_range(1..=10); - for _ in 0..num_new_accounts { - let seq_i = seq_clone.clone(); - let new_acc = create_new_account_operation( - format!("{}@gmail.com", i), - format!("key_{}", i), - &signing_key, - ); - seq_i.validate_and_queue_update(&new_acc).await.unwrap(); - accounts.push(format!("{}@gmail.com", i)); - i += 1; - } - - // Update 5 random existing accounts (if we have at least 5) - if accounts.len() >= 5 { - for _ in 0..5 { - let seq_i = seq_clone.clone(); - let account_index = rng.gen_range(0..accounts.len()); - let account_id = accounts[account_index].clone(); - let update_op = create_update_operation( - account_id, - format!("updated_key_{}", rng.gen::()), - ); - seq_i.validate_and_queue_update(&update_op).await.unwrap(); - } - } - - tokio::time::sleep(Duration::from_millis(500)).await; - } - }); - - while let Ok(height) = height_rx.recv().await { - if height == 60 { - break; - } - } - - teardown_db(db.clone()) -} - -// set up redis connection and flush database before each test -fn setup_db() -> RedisConnection { - let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap(); - redis_connection.flush_database().unwrap(); - redis_connection -} - -// flush database after each test -fn teardown_db(redis_connections: Arc) { - redis_connections.flush_database().unwrap(); -} +// fn create_new_account_operation(id: String, value: String, key: &SigningKey) -> OperationInput { +// let incoming = Operation::CreateAccount { +// id: id.clone(), +// value: value.clone(), +// source: AccountSource::SignedBySequencer { +// signature: key.sign(format!("{}{}", id, value).as_bytes()).to_string(), +// }, +// }; +// let content = serde_json::to_string(&incoming).unwrap(); +// let sig = key.sign(content.clone().as_bytes()); +// OperationInput { +// operation: incoming, +// signed_operation: sig.to_string(), +// public_key: engine.encode(key.verifying_key().to_bytes()), +// } +// } + +// fn create_update_operation(id: String, value: String) -> OperationInput { +// let key = create_signing_key(); +// let incoming = Operation::Add { id, value }; +// let content = serde_json::to_string(&incoming).unwrap(); +// let sig = key.sign(content.clone().as_bytes()); +// OperationInput { +// operation: incoming, +// signed_operation: sig.to_string(), +// public_key: engine.encode(key.verifying_key().to_bytes()), +// } +// } + +// #[tokio::test] +// async fn test_light_client_sequencer_talking() { +// std::env::set_var("RUST_LOG", "DEBUG"); +// pretty_env_logger::init(); + +// let (da_layer, mut height_rx, mut _block_rx) = InMemoryDataAvailabilityLayer::new(1); +// let da_layer = Arc::new(da_layer); +// let db = Arc::new(setup_db()); +// let cfg = Config::default(); +// let signing_key = create_signing_key(); +// let pubkey = engine.encode(signing_key.verifying_key().to_bytes()); + +// let sequencer = Arc::new( +// Sequencer::new( +// db.clone(), +// da_layer.clone(), +// cfg.clone(), +// signing_key.clone(), +// ) +// .unwrap(), +// ); + +// let lightclient = Arc::new(LightClient::new( +// da_layer, +// cfg.celestia_config.unwrap(), +// Some(pubkey), +// )); + +// let seq_1 = sequencer.clone(); +// tokio::spawn(async move { +// seq_1.start().await.unwrap(); +// }); + +// tokio::spawn(async move { +// lightclient.clone().start().await.unwrap(); +// }); + +// let seq = sequencer.clone(); +// tokio::spawn(async move { +// let mut rng = StdRng::from_entropy(); +// let mut accounts = Vec::new(); +// let mut i = 0; + +// loop { +// let seq_clone = seq.clone(); +// // Create 1 or 2 new accounts +// let num_new_accounts = rng.gen_range(1..=10); +// for _ in 0..num_new_accounts { +// let seq_i = seq_clone.clone(); +// let new_acc = create_new_account_operation( +// format!("{}@gmail.com", i), +// format!("key_{}", i), +// &signing_key, +// ); +// seq_i.validate_and_queue_update(&new_acc).await.unwrap(); +// accounts.push(format!("{}@gmail.com", i)); +// i += 1; +// } + +// // Update 5 random existing accounts (if we have at least 5) +// if accounts.len() >= 5 { +// for _ in 0..5 { +// let seq_i = seq_clone.clone(); +// let account_index = rng.gen_range(0..accounts.len()); +// let account_id = accounts[account_index].clone(); +// let update_op = create_update_operation( +// account_id, +// format!("updated_key_{}", rng.gen::()), +// ); +// seq_i.validate_and_queue_update(&update_op).await.unwrap(); +// } +// } + +// tokio::time::sleep(Duration::from_millis(500)).await; +// } +// }); + +// while let Ok(height) = height_rx.recv().await { +// if height == 60 { +// break; +// } +// } + +// teardown_db(db.clone()) +// } + +// // set up redis connection and flush database before each test +// fn setup_db() -> RedisConnection { +// let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap(); +// redis_connection.flush_database().unwrap(); +// redis_connection +// } + +// // flush database after each test +// fn teardown_db(redis_connections: Arc) { +// redis_connections.flush_database().unwrap(); +// } From a9195b1d8b7760363361ee00da924c1642736400 Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 20 Aug 2024 22:18:53 +0200 Subject: [PATCH 12/33] failing test --- src/nova/batch.rs | 187 +++++++++++++++++++++++++++++++++++++++++++++ src/nova/insert.rs | 24 ++++-- src/nova/mod.rs | 120 ----------------------------- src/nova/update.rs | 14 +++- 4 files changed, 216 insertions(+), 129 deletions(-) diff --git a/src/nova/batch.rs b/src/nova/batch.rs index de0f2acc..90401293 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -22,6 +22,18 @@ where rom: Vec, } +impl EpochCircuitSequence +where + E1: CurveCycleEquipped, +{ + pub fn new(operations: Vec<(usize, EpochCircuit)>) -> Self { + let rom = operations.iter().map(|(op, _)| *op).collect(); + let circuits = operations.into_iter().map(|(_, circuit)| circuit).collect(); + + Self { circuits, rom } + } +} + impl arecibo::supernova::NonUniformCircuit for EpochCircuitSequence where E1: CurveCycleEquipped, @@ -52,6 +64,16 @@ enum EpochCircuit { Update(UpdateCircuit), } +impl EpochCircuit { + pub fn new_insert(insertion_proof: InsertProof, rom_size: usize) -> Self { + Self::Insert(InsertCircuit::new(insertion_proof, rom_size)) + } + + pub fn new_update(update_proof: UpdateProof, rom_size: usize) -> Self { + Self::Update(UpdateCircuit::new(update_proof, rom_size)) + } +} + impl StepCircuit for EpochCircuit { fn arity(&self) -> usize { match self { @@ -85,3 +107,168 @@ impl StepCircuit for EpochCircuit { } } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::common::Hashchain; + use crate::nova::utils::Digest as NovaDigest; + use crate::tree::*; + use arecibo::provider::{PallasEngine, VestaEngine}; + use arecibo::supernova::{PublicParams, RecursiveSNARK, TrivialTestCircuit}; + use arecibo::traits::snark::default_ck_hint; + use ff::Field; + use jmt::mock::MockTreeStore; + use jmt::KeyHash; + use rand::{rngs::StdRng, Rng, SeedableRng}; + use std::sync::Arc; + + use std::collections::HashSet; + + struct TestTreeState { + pub tree: KeyDirectoryTree, + inserted_keys: HashSet, + } + + impl TestTreeState { + fn new() -> Self { + let store = Arc::new(MockTreeStore::default()); + let tree = KeyDirectoryTree::new(store); + Self { + tree, + inserted_keys: HashSet::new(), + } + } + } + + fn create_random_insert(state: &mut TestTreeState, rng: &mut StdRng) -> InsertProof { + loop { + let random_string: String = (0..10) + .map(|_| rng.sample(rand::distributions::Alphanumeric) as char) + .collect(); + let hc = Hashchain::new(random_string); + let key = hc.get_keyhash(); + + if !state.inserted_keys.contains(&key) { + let proof = state.tree.insert(key, hc).expect("Insert should succeed"); + state.inserted_keys.insert(key); + return proof; + } + } + } + + fn create_random_update(state: &mut TestTreeState, rng: &mut StdRng) -> UpdateProof { + if state.inserted_keys.is_empty() { + panic!("No keys have been inserted yet. Cannot perform update."); + } + + let key = *state + .inserted_keys + .iter() + .nth(rng.gen_range(0..state.inserted_keys.len())) + .unwrap(); + let mut hc = state.tree.get(key).unwrap().unwrap(); + + let random_string: String = (0..10) + .map(|_| rng.sample(rand::distributions::Alphanumeric) as char) + .collect(); + hc.add(random_string) + .expect("Adding to hashchain should succeed"); + + state.tree.update(key, hc).expect("Update should succeed") + } + + #[test] + fn test_recursive_epoch_circuit_proof() { + type E1 = PallasEngine; + type E2 = VestaEngine; + + let mut state = TestTreeState::new(); + let mut rng = StdRng::from_entropy(); + + let operations = vec![ + ( + 0, + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), + ), + ( + 1, + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + ), + ( + 0, + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), + ), + ( + 1, + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + ), + ]; + let circuit_sequence = EpochCircuitSequence::::new(operations); + let secondary_circuit = TrivialSecondaryCircuit::<::Scalar>::default(); + + let pp = PublicParams::setup(&circuit_sequence, &*default_ck_hint(), &*default_ck_hint()); + + let initial_commitment: ::Scalar = + NovaDigest::new(state.tree.get_commitment().unwrap()) + .to_scalar() + .unwrap(); + let mut z0_primary = vec![initial_commitment]; // Initial root + z0_primary.push(::Scalar::ZERO); // Initial ROM index + z0_primary.extend( + circuit_sequence + .rom + .iter() + .map(|&x| ::Scalar::from(x as u64)), + ); + let z0_secondary = vec![< as Engine>::Scalar>::ONE]; + + // Initialize RecursiveSNARK + let mut recursive_snark = RecursiveSNARK::::new( + &pp, + &circuit_sequence, + &circuit_sequence.circuits[0], + &secondary_circuit, + &z0_primary, + &z0_secondary, + ) + .unwrap(); + + // Prove steps + for circuit in &circuit_sequence.circuits { + recursive_snark + .prove_step(&pp, circuit, &secondary_circuit) + .unwrap(); + + // Verify after each step + recursive_snark + .verify(&pp, &z0_primary, &z0_secondary) + .unwrap(); + } + + // Final verification + assert!(recursive_snark + .verify(&pp, &z0_primary, &z0_secondary) + .is_ok()); + + // Additional assertions + let zi_primary = &recursive_snark.zi_primary(); + + println!("Final primary state: {:?}", zi_primary); + + assert_eq!( + zi_primary.len(), + z0_primary.len(), + "Primary state vector length should remain constant" + ); + + let final_commitment: ::Scalar = + NovaDigest::new(state.tree.get_commitment().unwrap()) + .to_scalar() + .unwrap(); + assert_eq!( + zi_primary[0], final_commitment, + "Final commitment should match the tree state" + ); + } +} diff --git a/src/nova/insert.rs b/src/nova/insert.rs index 8ba700ad..b1ea3f54 100644 --- a/src/nova/insert.rs +++ b/src/nova/insert.rs @@ -12,6 +12,16 @@ pub struct InsertCircuit { _phantom: std::marker::PhantomData, } +impl InsertCircuit { + pub fn new(insertion_proof: InsertProof, rom_size: usize) -> Self { + Self { + insertion_proof, + rom_size, + _phantom: std::marker::PhantomData, + } + } +} + impl StepCircuit for InsertCircuit where F: PrimeField, @@ -44,12 +54,12 @@ where pc, )?; - cs.push_namespace(|| { - format!( - "insert_proof {:?}", - self.insertion_proof.non_membership_proof.root - ) - }); + // cs.push_namespace(|| { + // format!( + // "insert_proof {:?}", + // self.insertion_proof.non_membership_proof.root + // ) + // }); let pre_insertion_scalar = Digest::new(self.insertion_proof.non_membership_proof.root) .to_scalar() @@ -74,7 +84,7 @@ where .verify() .map_err(|_| SynthesisError::Unsatisfiable)?; - cs.pop_namespace(); + // cs.pop_namespace(); // Prepare the next state vector let mut z_next = vec![new_root]; diff --git a/src/nova/mod.rs b/src/nova/mod.rs index b96d9b2b..ef348743 100644 --- a/src/nova/mod.rs +++ b/src/nova/mod.rs @@ -2,123 +2,3 @@ pub mod batch; pub mod insert; pub mod update; pub mod utils; - -#[cfg(test)] -mod tests { - use crate::common::Hashchain; - use crate::tree::{Hasher, KeyDirectoryTree, SnarkableTree}; - use jmt::mock::MockTreeStore; - use jmt::KeyHash; - use std::sync::Arc; - - #[test] - fn test_key_directory_tree() { - let store = Arc::new(MockTreeStore::default()); - let mut tree = KeyDirectoryTree::new(store); - - println!("Initial tree state: {:?}", tree.get_commitment()); - - // Test insert - let hc1 = Hashchain::new("key_1".into()); - let key1 = hc1.get_keyhash(); - let insert_proof = tree - .insert(key1, hc1.clone()) - .expect("Insert should succeed"); - assert!(insert_proof.verify().is_ok()); - // tree.write_batch().expect("Write batch should succeed"); - - println!("After first insert: {:?}", tree.get_commitment()); - - // Test get after insert - // Test get after insert - let get_result = tree.get(key1).expect("Get should succeed"); - println!("Get result after insert: {:?}", get_result); - assert_eq!(get_result.expect("Key should exist"), hc1); - - // Test update - let mut hc1_updated = hc1.clone(); - hc1_updated - .add("new_value".into()) - .expect("Add to hashchain should succeed"); - let update_proof = tree - .update(key1, hc1_updated.clone()) - .expect("Update should succeed"); - assert!(update_proof.verify().is_ok()); - // tree.write_batch().expect("Write batch should succeed"); - - // Test get after update - let get_result_after_update = tree.get(key1).expect("Get should succeed"); - assert_eq!( - get_result_after_update.expect("Key should exist"), - hc1_updated - ); - - // Test insert duplicate key - let insert_duplicate_result = tree.insert(key1, hc1.clone()); - assert!(insert_duplicate_result.is_err()); - - // Test update non-existing key - let non_existing_key = KeyHash::with::(b"non_existing_key"); - let update_non_existing_result = tree.update(non_existing_key, hc1.clone()); - assert!(update_non_existing_result.is_err()); - - // Test get non-existing key - let get_non_existing_result = tree.get(non_existing_key).expect("Get should not fail"); - assert!(get_non_existing_result.is_err()); - if let Err(non_membership_proof) = get_non_existing_result { - assert!(non_membership_proof.verify().is_ok()); - } - - // Test multiple inserts and updates - let hc2 = Hashchain::new("key_2".into()); - let key2 = hc2.get_keyhash(); - tree.insert(key2, hc2.clone()) - .expect("Insert should succeed"); - // tree.write_batch().expect("Write batch should succeed"); - - let mut hc2_updated = hc2.clone(); - hc2_updated - .add("value2".into()) - .expect("Add to hashchain should succeed"); - tree.update(key2, hc2_updated.clone()) - .expect("Update should succeed"); - // tree.write_batch().expect("Write batch should succeed"); - - assert_eq!(tree.get(key2).unwrap().unwrap(), hc2_updated); - - // Test root hash changes - let root_before = tree - .get_commitment() - .expect("Get commitment should succeed"); - let hc3 = Hashchain::new("key_3".into()); - let key3 = hc3.get_keyhash(); - tree.insert(key3, hc3).expect("Insert should succeed"); - // tree.write_batch().expect("Write batch should succeed"); - let root_after = tree - .get_commitment() - .expect("Get commitment should succeed"); - - assert_ne!(root_before, root_after); - - // Test batch writing - let hc4 = Hashchain::new("key_4".into()); - let hc5 = Hashchain::new("key_5".into()); - let key4 = hc4.get_keyhash(); - let key5 = hc5.get_keyhash(); - - tree.insert(key4, hc4.clone()) - .expect("Insert should succeed"); - tree.insert(key5, hc5.clone()) - .expect("Insert should succeed"); - - // Before writing the batch - assert!(tree.get(key4).unwrap().is_err()); - assert!(tree.get(key5).unwrap().is_err()); - - // tree.write_batch().expect("Write batch should succeed"); - - // After writing the batch - assert_eq!(tree.get(key4).unwrap().unwrap(), hc4); - assert_eq!(tree.get(key5).unwrap().unwrap(), hc5); - } -} diff --git a/src/nova/update.rs b/src/nova/update.rs index 9a08971a..1056e0df 100644 --- a/src/nova/update.rs +++ b/src/nova/update.rs @@ -12,6 +12,16 @@ pub struct UpdateCircuit { _phantom: std::marker::PhantomData, } +impl UpdateCircuit { + pub fn new(update_proof: UpdateProof, rom_size: usize) -> Self { + Self { + update_proof, + rom_size, + _phantom: std::marker::PhantomData, + } + } +} + impl StepCircuit for UpdateCircuit where F: PrimeField, @@ -44,7 +54,7 @@ where pc, )?; - cs.push_namespace(|| format!("update_proof {:?}", self.update_proof.old_root)); + // cs.push_namespace(|| format!("update_proof {:?}", self.update_proof.old_root)); let pre_insertion_scalar = NovaDigest::from_root_hash(self.update_proof.old_root) .to_scalar() @@ -69,7 +79,7 @@ where .verify() .map_err(|_| SynthesisError::Unsatisfiable)?; - cs.pop_namespace(); + // cs.pop_namespace(); // Prepare the next state vector let mut z_next = vec![new_root]; From dbff1d4e373f91007107fb8370f99e164e481139 Mon Sep 17 00:00:00 2001 From: Ryan Date: Thu, 22 Aug 2024 11:02:06 +0200 Subject: [PATCH 13/33] nova batch updates --- Cargo.lock | 1 + Cargo.toml | 1 + rustfmt.toml | 3 +- src/cfg.rs | 8 +- src/circuits/merkle_update.rs | 11 ++- src/common.rs | 6 +- src/da/memory.rs | 6 +- src/nova/batch.rs | 147 ++++++++++++++++++++-------------- src/nova/insert.rs | 21 ++--- src/nova/update.rs | 18 ++--- src/storage.rs | 4 +- src/webserver.rs | 9 ++- 12 files changed, 131 insertions(+), 104 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 75eae12a..ef67d846 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4440,6 +4440,7 @@ dependencies = [ "serde_json", "serial_test", "sha2 0.10.8", + "tap", "thiserror", "tokio", "toml", diff --git a/Cargo.toml b/Cargo.toml index 2af051a3..0292a923 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,6 +18,7 @@ default = [] key_transparency = [] [dependencies] +tap = "1.0.1" axum = "0.6" borsh = { version = "1.5.1", features = ["derive"] } tower-http = { version = "0.4", features = ["cors"] } diff --git a/rustfmt.toml b/rustfmt.toml index e86028b1..a811a490 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1 +1,2 @@ -imports_granularity="Crate" +merge_imports = true +imports_granularity = "Crate" diff --git a/src/cfg.rs b/src/cfg.rs index d3217979..ba3f8e6f 100644 --- a/src/cfg.rs +++ b/src/cfg.rs @@ -1,6 +1,8 @@ -use crate::consts::{DA_RETRY_COUNT, DA_RETRY_INTERVAL}; -use crate::da::memory::InMemoryDataAvailabilityLayer; -use crate::error::{DataAvailabilityError, GeneralError, PrismError}; +use crate::{ + consts::{DA_RETRY_COUNT, DA_RETRY_INTERVAL}, + da::memory::InMemoryDataAvailabilityLayer, + error::{DataAvailabilityError, GeneralError, PrismError}, +}; use anyhow::{anyhow, Context, Result}; use clap::{Parser, Subcommand}; use config::{builder::DefaultState, ConfigBuilder, File}; diff --git a/src/circuits/merkle_update.rs b/src/circuits/merkle_update.rs index 67840e63..002bb935 100644 --- a/src/circuits/merkle_update.rs +++ b/src/circuits/merkle_update.rs @@ -1,8 +1,11 @@ -use crate::circuits::{ - utils::{recalculate_hash_as_scalar, unpack_and_process}, - ProofVariantCircuit, +use crate::{ + circuits::{ + utils::{recalculate_hash_as_scalar, unpack_and_process}, + ProofVariantCircuit, + }, + error::PrismError, + utils::create_and_verify_snark, }; -use crate::{error::PrismError, utils::create_and_verify_snark}; use anyhow::Result; use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; use bls12_381::{Bls12, Scalar}; diff --git a/src/common.rs b/src/common.rs index 0848929e..a7f8571f 100644 --- a/src/common.rs +++ b/src/common.rs @@ -3,8 +3,10 @@ use borsh::{BorshDeserialize, BorshSerialize}; use indexed_merkle_tree::Hash; use jmt::KeyHash; use serde::{Deserialize, Serialize}; -use std::fmt::Display; -use std::ops::{Deref, DerefMut}; +use std::{ + fmt::Display, + ops::{Deref, DerefMut}, +}; use crate::tree::{hash, Digest, Hasher}; diff --git a/src/da/memory.rs b/src/da/memory.rs index 78efea9a..6f24fd1b 100644 --- a/src/da/memory.rs +++ b/src/da/memory.rs @@ -5,8 +5,10 @@ use crate::{ use anyhow::Result; use async_trait::async_trait; use std::sync::Arc; -use tokio::sync::{broadcast, RwLock}; -use tokio::time::{interval, Duration}; +use tokio::{ + sync::{broadcast, RwLock}, + time::{interval, Duration}, +}; #[derive(Clone, Debug)] pub struct Block { diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 90401293..5d520f41 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -1,25 +1,22 @@ use super::{insert::InsertCircuit, update::UpdateCircuit}; use crate::tree::{InsertProof, UpdateProof}; -use arecibo::supernova::StepCircuit; -use arecibo::supernova::TrivialSecondaryCircuit; -use arecibo::traits::{CurveCycleEquipped, Dual, Engine}; +use arecibo::{ + supernova::{StepCircuit, TrivialSecondaryCircuit}, + traits::{CurveCycleEquipped, Dual, Engine}, +}; use ff::PrimeField; - -// Assume these functions exist -fn create_random_insert() -> InsertProof { - unimplemented!() -} -fn create_random_update() -> UpdateProof { - unimplemented!() -} +use std::cell::RefCell; #[derive(Clone)] struct EpochCircuitSequence where E1: CurveCycleEquipped, { - circuits: Vec>, + insert_circuits: Vec>, + update_circuits: Vec>, rom: Vec, + insert_index: RefCell, + update_index: RefCell, } impl EpochCircuitSequence @@ -28,9 +25,18 @@ where { pub fn new(operations: Vec<(usize, EpochCircuit)>) -> Self { let rom = operations.iter().map(|(op, _)| *op).collect(); - let circuits = operations.into_iter().map(|(_, circuit)| circuit).collect(); - - Self { circuits, rom } + let (insert_circuits, update_circuits): (Vec<_>, Vec<_>) = operations + .into_iter() + .map(|(_, circuit)| circuit) + .partition(|circuit| matches!(circuit, EpochCircuit::Insert(_))); + + Self { + insert_circuits, + update_circuits, + rom, + insert_index: RefCell::new(0), + update_index: RefCell::new(0), + } } } @@ -42,11 +48,25 @@ where type C2 = TrivialSecondaryCircuit< as Engine>::Scalar>; fn num_circuits(&self) -> usize { - 2 // Insert and Update + self.insert_circuits.len() + self.update_circuits.len() } fn primary_circuit(&self, circuit_index: usize) -> Self::C1 { - self.circuits[circuit_index].clone() + let (circuits, index) = if circuit_index == 0 { + (&self.insert_circuits, &self.insert_index) + } else { + (&self.update_circuits, &self.update_index) + }; + + let mut current_index = index.borrow_mut(); + + if *current_index >= circuits.len() { + *current_index = 0; + } + + let circuit = circuits[*current_index].clone(); + *current_index += 1; + circuit } fn secondary_circuit(&self) -> Self::C2 { @@ -111,15 +131,14 @@ impl StepCircuit for EpochCircuit { #[cfg(test)] mod tests { use super::*; - use crate::common::Hashchain; - use crate::nova::utils::Digest as NovaDigest; - use crate::tree::*; - use arecibo::provider::{PallasEngine, VestaEngine}; - use arecibo::supernova::{PublicParams, RecursiveSNARK, TrivialTestCircuit}; - use arecibo::traits::snark::default_ck_hint; + use crate::{common::Hashchain, nova::utils::Digest as NovaDigest, tree::*}; + use arecibo::{ + provider::PallasEngine, + supernova::{NonUniformCircuit, PublicParams, RecursiveSNARK}, + traits::snark::default_ck_hint, + }; use ff::Field; - use jmt::mock::MockTreeStore; - use jmt::KeyHash; + use jmt::{mock::MockTreeStore, KeyHash}; use rand::{rngs::StdRng, Rng, SeedableRng}; use std::sync::Arc; @@ -181,38 +200,38 @@ mod tests { #[test] fn test_recursive_epoch_circuit_proof() { type E1 = PallasEngine; - type E2 = VestaEngine; let mut state = TestTreeState::new(); let mut rng = StdRng::from_entropy(); + let initial_commitment: ::Scalar = + NovaDigest::from_root_hash(state.tree.get_commitment().unwrap().into()) + .to_scalar() + .unwrap(); + let operations = vec![ ( 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 2), ), ( 1, - EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), - ), - ( - 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), - ), - ( - 1, - EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 2), ), + // ( + // 0, + // EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), + // ), + // ( + // 1, + // EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + // ), ]; + let circuit_sequence = EpochCircuitSequence::::new(operations); - let secondary_circuit = TrivialSecondaryCircuit::<::Scalar>::default(); let pp = PublicParams::setup(&circuit_sequence, &*default_ck_hint(), &*default_ck_hint()); - let initial_commitment: ::Scalar = - NovaDigest::new(state.tree.get_commitment().unwrap()) - .to_scalar() - .unwrap(); let mut z0_primary = vec![initial_commitment]; // Initial root z0_primary.push(::Scalar::ZERO); // Initial ROM index z0_primary.extend( @@ -221,39 +240,48 @@ mod tests { .iter() .map(|&x| ::Scalar::from(x as u64)), ); + let z0_secondary = vec![< as Engine>::Scalar>::ONE]; - // Initialize RecursiveSNARK - let mut recursive_snark = RecursiveSNARK::::new( - &pp, - &circuit_sequence, - &circuit_sequence.circuits[0], - &secondary_circuit, - &z0_primary, - &z0_secondary, - ) - .unwrap(); - - // Prove steps - for circuit in &circuit_sequence.circuits { + let mut recursive_snark_option: Option> = None; + + for &op_code in circuit_sequence.rom.iter() { + let primary_circuit = circuit_sequence.primary_circuit(op_code); + let secondary_circuit = circuit_sequence.secondary_circuit(); + + let mut recursive_snark = recursive_snark_option.unwrap_or_else(|| { + RecursiveSNARK::new( + &pp, + &circuit_sequence, + &primary_circuit, + &secondary_circuit, + &z0_primary, + &z0_secondary, + ) + .unwrap() + }); + recursive_snark - .prove_step(&pp, circuit, &secondary_circuit) + .prove_step(&pp, &primary_circuit, &secondary_circuit) .unwrap(); - // Verify after each step recursive_snark .verify(&pp, &z0_primary, &z0_secondary) .unwrap(); + + recursive_snark_option = Some(recursive_snark) } - // Final verification + assert!(recursive_snark_option.is_some()); + + let recursive_snark = recursive_snark_option.unwrap(); + assert!(recursive_snark .verify(&pp, &z0_primary, &z0_secondary) .is_ok()); // Additional assertions - let zi_primary = &recursive_snark.zi_primary(); - + let zi_primary = recursive_snark.zi_primary(); println!("Final primary state: {:?}", zi_primary); assert_eq!( @@ -266,6 +294,7 @@ mod tests { NovaDigest::new(state.tree.get_commitment().unwrap()) .to_scalar() .unwrap(); + assert_eq!( zi_primary[0], final_commitment, "Final commitment should match the tree state" diff --git a/src/nova/insert.rs b/src/nova/insert.rs index b1ea3f54..e3f6d07d 100644 --- a/src/nova/insert.rs +++ b/src/nova/insert.rs @@ -1,5 +1,7 @@ -use crate::nova::utils::{next_rom_index_and_pc, Digest}; -use crate::tree::InsertProof; +use crate::{ + nova::utils::{next_rom_index_and_pc, Digest}, + tree::InsertProof, +}; use anyhow::Result; use arecibo::supernova::StepCircuit; use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; @@ -46,21 +48,13 @@ where let pc = pc.ok_or(SynthesisError::AssignmentMissing)?; - // Compute next ROM index and PC let (rom_index_next, pc_next) = next_rom_index_and_pc( - &mut cs.namespace(|| "next_rom_index_and_pc"), + &mut cs.namespace(|| "next and rom_index and pc"), rom_index, allocated_rom, pc, )?; - // cs.push_namespace(|| { - // format!( - // "insert_proof {:?}", - // self.insertion_proof.non_membership_proof.root - // ) - // }); - let pre_insertion_scalar = Digest::new(self.insertion_proof.non_membership_proof.root) .to_scalar() .map_err(|_| SynthesisError::Unsatisfiable); @@ -84,12 +78,9 @@ where .verify() .map_err(|_| SynthesisError::Unsatisfiable)?; - // cs.pop_namespace(); - - // Prepare the next state vector let mut z_next = vec![new_root]; z_next.push(rom_index_next); - z_next.extend_from_slice(&z[2..]); + z_next.extend(z[2..].iter().cloned()); Ok((Some(pc_next), z_next)) } diff --git a/src/nova/update.rs b/src/nova/update.rs index 1056e0df..62f9ead2 100644 --- a/src/nova/update.rs +++ b/src/nova/update.rs @@ -1,5 +1,7 @@ -use crate::nova::utils::{next_rom_index_and_pc, Digest as NovaDigest}; -use crate::tree::UpdateProof; +use crate::{ + nova::utils::{next_rom_index_and_pc, Digest as NovaDigest}, + tree::UpdateProof, +}; use anyhow::Result; use arecibo::supernova::StepCircuit; use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; @@ -46,16 +48,13 @@ where let pc = pc.ok_or(SynthesisError::AssignmentMissing)?; - // Compute next ROM index and PC let (rom_index_next, pc_next) = next_rom_index_and_pc( - &mut cs.namespace(|| "next_rom_index_and_pc"), + &mut cs.namespace(|| "next rom_index and pc"), rom_index, allocated_rom, pc, )?; - // cs.push_namespace(|| format!("update_proof {:?}", self.update_proof.old_root)); - let pre_insertion_scalar = NovaDigest::from_root_hash(self.update_proof.old_root) .to_scalar() .map_err(|_| SynthesisError::Unsatisfiable); @@ -74,17 +73,14 @@ where |lc| lc + CS::one(), |lc| lc + pre_insertion_root.get_variable(), ); - // TODO: bellpepper merkle proof gadget + // // TODO: bellpepper merkle proof gadget self.update_proof .verify() .map_err(|_| SynthesisError::Unsatisfiable)?; - // cs.pop_namespace(); - - // Prepare the next state vector let mut z_next = vec![new_root]; z_next.push(rom_index_next); - z_next.extend_from_slice(&z[2..]); + z_next.extend(z[2..].iter().cloned()); Ok((Some(pc_next), z_next)) } diff --git a/src/storage.rs b/src/storage.rs index 8d016073..7440b75a 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -249,9 +249,7 @@ impl Database for RedisConnection { #[cfg(test)] mod tests { use super::*; - use crate::common::Operation; - use crate::storage::Database; - use crate::tree::hash; + use crate::{common::Operation, storage::Database, tree::hash}; use serde::{Deserialize, Serialize}; use serial_test::serial; diff --git a/src/webserver.rs b/src/webserver.rs index 6ae5269c..819c3dfc 100644 --- a/src/webserver.rs +++ b/src/webserver.rs @@ -14,11 +14,12 @@ use axum::{ Json, Router, }; use ed25519::Signature; -use indexed_merkle_tree::tree::{Proof, UpdateProof}; -use indexed_merkle_tree::Hash as TreeHash; +use indexed_merkle_tree::{ + tree::{Proof, UpdateProof}, + Hash as TreeHash, +}; use serde::{Deserialize, Serialize}; -use std::sync::Arc; -use std::{self, str::FromStr}; +use std::{self, str::FromStr, sync::Arc}; use tower_http::cors::CorsLayer; use utoipa::{OpenApi, ToSchema}; use utoipa_swagger_ui::SwaggerUi; From 4b1e82685d6db3eebf203a598497c40782e96dbe Mon Sep 17 00:00:00 2001 From: Ryan Date: Thu, 22 Aug 2024 11:02:43 +0200 Subject: [PATCH 14/33] cargo fix imports --- benches/zk_benchmarks.rs | 2 +- src/common.rs | 1 - src/da/mod.rs | 2 -- src/node_types/lightclient.rs | 2 +- src/node_types/sequencer.rs | 1 - src/storage.rs | 3 +-- src/utils.rs | 8 ++++---- tests/integration_tests.rs | 13 ------------- 8 files changed, 7 insertions(+), 25 deletions(-) diff --git a/benches/zk_benchmarks.rs b/benches/zk_benchmarks.rs index f1e764e9..515859e2 100644 --- a/benches/zk_benchmarks.rs +++ b/benches/zk_benchmarks.rs @@ -5,7 +5,7 @@ use indexed_merkle_tree::{ tree::{IndexedMerkleTree, Proof}, Hash, }; -use prism::{circuits::BatchMerkleProofCircuit, utils::validate_epoch}; +use prism::{circuits::BatchMerkleProofCircuit}; use rand::Rng; use std::time::Duration; diff --git a/src/common.rs b/src/common.rs index a7f8571f..63f822fd 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,6 +1,5 @@ use anyhow::{bail, Result}; use borsh::{BorshDeserialize, BorshSerialize}; -use indexed_merkle_tree::Hash; use jmt::KeyHash; use serde::{Deserialize, Serialize}; use std::{ diff --git a/src/da/mod.rs b/src/da/mod.rs index 39034f18..70326e5e 100644 --- a/src/da/mod.rs +++ b/src/da/mod.rs @@ -1,5 +1,4 @@ use crate::{ - circuits::{Bls12Proof, VerifyingKey}, common::Operation, error::GeneralError, tree::Digest, @@ -9,7 +8,6 @@ use anyhow::Result; use async_trait::async_trait; use borsh::{BorshDeserialize, BorshSerialize}; use ed25519::Signature; -use indexed_merkle_tree::Hash; use std::{self, str::FromStr}; pub mod celestia; diff --git a/src/node_types/lightclient.rs b/src/node_types/lightclient.rs index 1cebf58d..ae19b612 100644 --- a/src/node_types/lightclient.rs +++ b/src/node_types/lightclient.rs @@ -10,7 +10,7 @@ use tokio::{task::spawn, time::interval}; use crate::{ da::DataAvailabilityLayer, node_types::NodeType, - utils::{validate_epoch, verify_signature}, + utils::{verify_signature}, }; pub struct LightClient { diff --git a/src/node_types/sequencer.rs b/src/node_types/sequencer.rs index 5047ed70..08e5d467 100644 --- a/src/node_types/sequencer.rs +++ b/src/node_types/sequencer.rs @@ -1,5 +1,4 @@ use crate::{ - storage::RedisConnection, tree::{hash, Digest, Hasher, KeyDirectoryTree, Proof, SnarkableTree}, }; use anyhow::{Context, Result}; diff --git a/src/storage.rs b/src/storage.rs index 7440b75a..acca317d 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -1,6 +1,5 @@ use anyhow::{anyhow, Result}; use auto_impl::auto_impl; -use indexed_merkle_tree::Hash; use jmt::{ storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, KeyHash, OwnedValue, Version, @@ -10,7 +9,7 @@ use redis::{Client, Commands, Connection}; use std::{ self, process::Command, - sync::{Arc, Mutex, MutexGuard}, + sync::{Mutex, MutexGuard}, thread::sleep, time::Duration, }; diff --git a/src/utils.rs b/src/utils.rs index f93151a5..b9fd297a 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -9,7 +9,7 @@ use bellman::groth16::{self, VerifyingKey}; use bls12_381::{Bls12, Scalar}; use ed25519::Signature; use ed25519_dalek::{Verifier, VerifyingKey as Ed25519VerifyingKey}; -use indexed_merkle_tree::{tree::Proof, Hash}; +use indexed_merkle_tree::{tree::Proof}; use rand::rngs::OsRng; pub fn parse_json_to_proof(json_str: &str) -> Result> { @@ -120,10 +120,10 @@ pub fn verify_signature( #[cfg(test)] mod tests { - use crate::circuits::BatchMerkleProofCircuit; - use indexed_merkle_tree::tree::{IndexedMerkleTree, Proof}; + + - use indexed_merkle_tree::{node::Node, sha256_mod}; + use super::*; diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index de3eb36d..363cc416 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -1,16 +1,3 @@ -use base64::{engine::general_purpose::STANDARD as engine, Engine as _}; -use ed25519_dalek::{Signer, SigningKey}; -use keystore_rs::create_signing_key; -use prism::{ - cfg::{Config, RedisConfig}, - common::{AccountSource, Operation}, - da::memory::InMemoryDataAvailabilityLayer, - node_types::{lightclient::LightClient, sequencer::Sequencer, NodeType}, - storage::{Database, RedisConnection}, - webserver::OperationInput, -}; -use rand::{rngs::StdRng, Rng, SeedableRng}; -use std::{sync::Arc, time::Duration}; // fn create_new_account_operation(id: String, value: String, key: &SigningKey) -> OperationInput { // let incoming = Operation::CreateAccount { From 84b92c92d9e4149c821d4e4f0277dcd1c392a997 Mon Sep 17 00:00:00 2001 From: Ryan Date: Thu, 22 Aug 2024 14:14:54 +0200 Subject: [PATCH 15/33] removing unused deps --- Cargo.toml | 19 ------------------- src/nova/batch.rs | 31 ++++++++++++++++++++----------- src/tree/mod.rs | 25 +++++++++++++++++++++++++ 3 files changed, 45 insertions(+), 30 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0292a923..60928e61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,54 +18,37 @@ default = [] key_transparency = [] [dependencies] -tap = "1.0.1" axum = "0.6" borsh = { version = "1.5.1", features = ["derive"] } tower-http = { version = "0.4", features = ["cors"] } utoipa = { version = "3.3", features = ["axum_extras"] } utoipa-swagger-ui = { version = "3.1", features = ["axum"] } -crypto-hash = "0.3.4" async-trait = "0.1.68" serde = { version = "1.0.151", features = ["derive"] } serde_json = "1.0.79" redis = "0.24.0" actix-web = { version = "4.4.1" } -actix-cors = "0.7.0" -async-redis-session = "0.2.2" ed25519-dalek = "2.1.0" ed25519 = "2.2.0" base64 = "0.22.0" tokio = { version = "1.16.1", features = ["full"] } -reqwest = { version = "0.11.23", features = ["json"] } -ctrlc = "3.1.5" num = "0.4.0" bellman = "0.14.0" -pairing = "0.23.0" -jsonrpsee = { version = "0.22.2", features = ["ws-client"] } bls12_381 = "0.8.0" rand = "0.8.5" -rand07 = { package = "rand", version = "0.7.0" } hex = "0.4.3" ff = "0.13.0" -openssl = "0.10.66" -futures = "0.3" -lazy_static = "1.4" -colored = "2.0.0" log = "0.4" pretty_env_logger = "0.5.0" clap = { version = "4.3.2", features = ["derive"] } config = "0.14.0" -fs2 = "0.4.3" thiserror = "1.0.62" indexed-merkle-tree = "0.6.2" dotenvy = "0.15.7" -ahash = "0.8.7" celestia-rpc = "0.2.0" celestia-types = "0.2.0" mockall = "0.12.1" keystore-rs = "0.1.0" -pyroscope = "0.5.7" -pyroscope_pprofrs = "0.2.7" toml = "0.8.14" dirs = "5.0.1" anyhow = "1.0.44" @@ -73,11 +56,9 @@ jmt = { path = "../jmt", features = [ "mocks", ] } #{ version = "0.10.0", features = ["mocks"] } bellpepper-core = { version = "0.4.0", default-features = false } -bellpepper = { version = "0.4.0", default-features = false } arecibo = { path = "../arecibo" } itertools = "0.13.0" # zip_eq sha2 = "0.10.8" -proptest = "1.5.0" auto_impl = "1.2.0" [dev-dependencies] diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 5d520f41..0ea26878 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -38,6 +38,11 @@ where update_index: RefCell::new(0), } } + + pub fn reset_indices(&self) { + *self.insert_index.borrow_mut() = 0; + *self.update_index.borrow_mut() = 0; + } } impl arecibo::supernova::NonUniformCircuit for EpochCircuitSequence @@ -171,6 +176,7 @@ mod tests { if !state.inserted_keys.contains(&key) { let proof = state.tree.insert(key, hc).expect("Insert should succeed"); state.inserted_keys.insert(key); + println!("inserted key: {key:?}"); return proof; } } @@ -193,6 +199,7 @@ mod tests { .collect(); hc.add(random_string) .expect("Adding to hashchain should succeed"); + println!("updated key: {key:?}"); state.tree.update(key, hc).expect("Update should succeed") } @@ -212,20 +219,20 @@ mod tests { let operations = vec![ ( 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 2), + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), ), ( 1, - EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 2), + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + ), + ( + 0, + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), + ), + ( + 1, + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), ), - // ( - // 0, - // EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), - // ), - // ( - // 1, - // EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), - // ), ]; let circuit_sequence = EpochCircuitSequence::::new(operations); @@ -244,8 +251,10 @@ mod tests { let z0_secondary = vec![< as Engine>::Scalar>::ONE]; let mut recursive_snark_option: Option> = None; - + circuit_sequence.reset_indices(); + println!("ROM sequence: {:?}", circuit_sequence.rom); for &op_code in circuit_sequence.rom.iter() { + println!("Processing operation: {}", op_code); let primary_circuit = circuit_sequence.primary_circuit(op_code); let secondary_circuit = circuit_sequence.secondary_circuit(); diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 18a91b73..181f1979 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -428,6 +428,31 @@ mod tests { assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); } + #[test] + fn test_interleaved_inserts_and_updates() { + let store = MockTreeStore::default(); + let mut tree = KeyDirectoryTree::new(Arc::new(store)); + + let mut hc1 = Hashchain::new("key_1".into()); + let mut hc2 = Hashchain::new("key_2".into()); + let key1 = hc1.get_keyhash(); + let key2 = hc2.get_keyhash(); + + tree.insert(key1, hc1.clone()).unwrap(); + + hc1.add("value1".into()).unwrap(); + tree.update(key1, hc1.clone()).unwrap(); + + tree.insert(key2, hc2.clone()).unwrap(); + + hc2.add("value2".into()).unwrap(); + let last_proof = tree.update(key2, hc2.clone()).unwrap(); + + assert_eq!(tree.get(key1).unwrap().unwrap(), hc1); + assert_eq!(tree.get(key2).unwrap().unwrap(), hc2); + assert_eq!(last_proof.new_root, tree.get_current_root().unwrap()); + } + #[test] fn test_root_hash_changes() { let store = Arc::new(MockTreeStore::default()); From a529be017f47571204a624b2420b0ec10c373df4 Mon Sep 17 00:00:00 2001 From: Ryan Date: Thu, 22 Aug 2024 15:12:34 +0200 Subject: [PATCH 16/33] adding pp serde --- Cargo.lock | 1742 +++--------------------------- Cargo.toml | 1 + benches/zk_benchmarks.rs | 23 +- src/cfg.rs | 10 + src/circuits/hashchain.rs | 55 - src/circuits/less_than.rs | 66 -- src/circuits/merkle_batch.rs | 148 --- src/circuits/merkle_insertion.rs | 190 ---- src/circuits/merkle_update.rs | 126 --- src/circuits/mod.rs | 346 ------ src/circuits/utils.rs | 29 - src/lib.rs | 1 - src/main.rs | 32 +- src/nova/batch.rs | 4 +- src/nova/utils.rs | 75 +- src/utils.rs | 35 +- 16 files changed, 247 insertions(+), 2636 deletions(-) delete mode 100644 src/circuits/hashchain.rs delete mode 100644 src/circuits/less_than.rs delete mode 100644 src/circuits/merkle_batch.rs delete mode 100644 src/circuits/merkle_insertion.rs delete mode 100644 src/circuits/merkle_update.rs delete mode 100644 src/circuits/mod.rs delete mode 100644 src/circuits/utils.rs diff --git a/Cargo.lock b/Cargo.lock index ef67d846..6ef8d422 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,25 +32,10 @@ dependencies = [ "memchr", "pin-project-lite", "tokio", - "tokio-util 0.7.11", + "tokio-util", "tracing", ] -[[package]] -name = "actix-cors" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e772b3bcafe335042b5db010ab7c09013dad6eac4915c91d8d50902769f331" -dependencies = [ - "actix-utils", - "actix-web", - "derive_more", - "futures-util", - "log", - "once_cell", - "smallvec", -] - [[package]] name = "actix-http" version = "3.8.0" @@ -75,17 +60,17 @@ dependencies = [ "http", "httparse", "httpdate", - "itoa 1.0.11", + "itoa", "language-tags", "local-channel", "mime", "percent-encoding", "pin-project-lite", - "rand 0.8.5", - "sha1 0.10.6", + "rand", + "sha1", "smallvec", "tokio", - "tokio-util 0.7.11", + "tokio-util", "tracing", "zstd", ] @@ -107,7 +92,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" dependencies = [ "bytestring", - "cfg-if 1.0.0", + "cfg-if", "http", "regex", "regex-lite", @@ -181,13 +166,13 @@ dependencies = [ "ahash", "bytes", "bytestring", - "cfg-if 1.0.0", + "cfg-if", "cookie", "derive_more", "encoding_rs", "futures-core", "futures-util", - "itoa 1.0.11", + "itoa", "language-tags", "log", "mime", @@ -242,12 +227,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "adler32" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" - [[package]] name = "aead" version = "0.5.2" @@ -264,7 +243,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher", "cpufeatures", ] @@ -289,8 +268,8 @@ version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ - "cfg-if 1.0.0", - "getrandom 0.2.15", + "cfg-if", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -326,25 +305,10 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a43b18702501396fa9bcdeecd533bc85fac75150d308fc0f6800a01e6234a003" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "bytes", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - [[package]] name = "anes" version = "0.1.6" @@ -412,17 +376,17 @@ version = "0.2.0" dependencies = [ "abomonation", "abomonation_derive_ng", - "bellpepper 0.4.1 (git+https://github.com/lurk-lab/bellpepper?branch=dev)", + "bellpepper", "bellpepper-core", "bincode", "bitvec", "byteorder", - "cfg-if 1.0.0", + "cfg-if", "derive_more", "digest 0.10.7", "ff", "generic-array 1.1.0", - "getrandom 0.2.15", + "getrandom", "group", "grumpkin-msm", "halo2curves", @@ -435,9 +399,9 @@ dependencies = [ "pairing", "pasta_curves", "proptest", - "rand 0.8.5", - "rand_chacha 0.3.1", - "rand_core 0.6.4", + "rand", + "rand_chacha", + "rand_core", "rayon", "rayon-scan", "ref-cast", @@ -561,7 +525,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -571,7 +535,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -580,191 +544,21 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - [[package]] name = "arrayvec" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" -dependencies = [ - "concurrent-queue", - "event-listener-strategy", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-executor" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand 2.1.0", - "futures-lite 2.3.0", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io 2.3.3", - "async-lock 3.4.0", - "blocking", - "futures-lite 2.3.0", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if 1.0.0", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" -dependencies = [ - "async-lock 3.4.0", - "cfg-if 1.0.0", - "concurrent-queue", - "futures-io", - "futures-lite 2.3.0", - "parking", - "polling 3.7.2", - "rustix 0.38.34", - "slab", - "tracing", - "windows-sys 0.52.0", -] - [[package]] name = "async-lock" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-lock" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" -dependencies = [ - "event-listener 5.3.1", - "event-listener-strategy", - "pin-project-lite", -] - -[[package]] -name = "async-redis-session" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba82ce101e6cde598074604ef4a882bdd6b3a283baff446ae73ae2727c242452" -dependencies = [ - "async-session", - "redis 0.20.2", -] - -[[package]] -name = "async-session" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07da4ce523b4e2ebaaf330746761df23a465b951a83d84bbce4233dabedae630" -dependencies = [ - "anyhow", - "async-lock 2.8.0", - "async-trait", - "base64 0.13.1", - "bincode", - "blake3 0.3.8", - "chrono", - "hmac 0.11.0", - "log", - "rand 0.8.5", - "serde", - "serde_json", - "sha2 0.9.9", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 1.13.0", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", + "event-listener", ] -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - [[package]] name = "async-trait" version = "0.1.81" @@ -776,23 +570,6 @@ dependencies = [ "syn 2.0.72", ] -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "auto_impl" version = "1.2.0" @@ -824,7 +601,7 @@ dependencies = [ "http", "http-body", "hyper", - "itoa 1.0.11", + "itoa", "matchit", "memchr", "mime", @@ -867,7 +644,7 @@ checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", @@ -935,22 +712,11 @@ dependencies = [ "log", "num_cpus", "pairing", - "rand_core 0.6.4", + "rand_core", "rayon", "subtle", ] -[[package]] -name = "bellpepper" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae286c2cb403324ab644c7cc68dceb25fe52ca9429908a726d7ed272c1edf7b" -dependencies = [ - "bellpepper-core", - "byteorder", - "ff", -] - [[package]] name = "bellpepper" version = "0.4.1" @@ -1042,8 +808,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", - "arrayvec 0.7.4", - "constant_time_eq 0.3.0", + "arrayvec", + "constant_time_eq", ] [[package]] @@ -1053,23 +819,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94230421e395b9920d23df13ea5d77a20e1725331f90fbbf6df6040b33f756ae" dependencies = [ "arrayref", - "arrayvec 0.7.4", - "constant_time_eq 0.3.0", -] - -[[package]] -name = "blake3" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b64485778c4f16a6a5a9d335e80d449ac6c70cdd6a06d2af18a6f6f775a125b3" -dependencies = [ - "arrayref", - "arrayvec 0.5.2", - "cc", - "cfg-if 0.1.10", - "constant_time_eq 0.1.5", - "crypto-mac 0.8.0", - "digest 0.9.0", + "arrayvec", + "constant_time_eq", ] [[package]] @@ -1079,10 +830,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "cc", - "cfg-if 1.0.0", - "constant_time_eq 0.3.0", + "cfg-if", + "constant_time_eq", ] [[package]] @@ -1103,19 +854,6 @@ dependencies = [ "generic-array 0.14.7", ] -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite 2.3.0", - "piper", -] - [[package]] name = "blockstore" version = "0.5.0" @@ -1137,7 +875,7 @@ dependencies = [ "ff", "group", "pairing", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1164,7 +902,7 @@ dependencies = [ "ff", "group", "pairing", - "rand_core 0.6.4", + "rand_core", "serde", "subtle", ] @@ -1176,7 +914,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" dependencies = [ "borsh-derive", - "cfg_aliases 0.2.1", + "cfg_aliases", ] [[package]] @@ -1283,7 +1021,7 @@ checksum = "7f22a6baf972f7277acfd5c4ff9b894df7db5b0aaecdb57b9b77b5679fff323e" dependencies = [ "anyhow", "celestia-tendermint-proto", - "prost 0.12.6", + "prost", "prost-build", "prost-types", "serde", @@ -1298,7 +1036,7 @@ dependencies = [ "async-trait", "celestia-types", "http", - "jsonrpsee 0.20.3", + "jsonrpsee", "serde", "thiserror", "tracing", @@ -1319,7 +1057,7 @@ dependencies = [ "futures", "num-traits", "once_cell", - "prost 0.12.6", + "prost", "prost-types", "serde", "serde_bytes", @@ -1343,7 +1081,7 @@ dependencies = [ "flex-error", "num-derive", "num-traits", - "prost 0.12.6", + "prost", "prost-types", "serde", "serde_bytes", @@ -1379,43 +1117,18 @@ dependencies = [ "thiserror", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "cfg_aliases" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" - [[package]] name = "cfg_aliases" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" -[[package]] -name = "chrono" -version = "0.4.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "num-traits", - "serde", - "windows-targets 0.52.6", -] - [[package]] name = "ciborium" version = "0.2.2" @@ -1465,23 +1178,6 @@ dependencies = [ "inout", ] -[[package]] -name = "clap" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" -dependencies = [ - "atty", - "bitflags 1.3.2", - "clap_derive 3.2.25", - "clap_lex 0.2.4", - "indexmap 1.9.3", - "once_cell", - "strsim 0.10.0", - "termcolor", - "textwrap", -] - [[package]] name = "clap" version = "4.5.13" @@ -1489,7 +1185,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc" dependencies = [ "clap_builder", - "clap_derive 4.5.13", + "clap_derive", ] [[package]] @@ -1500,21 +1196,8 @@ checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99" dependencies = [ "anstream", "anstyle", - "clap_lex 0.7.2", - "strsim 0.11.1", -] - -[[package]] -name = "clap_derive" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" -dependencies = [ - "heck 0.4.1", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", + "clap_lex", + "strsim", ] [[package]] @@ -1529,15 +1212,6 @@ dependencies = [ "syn 2.0.72", ] -[[package]] -name = "clap_lex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", -] - [[package]] name = "clap_lex" version = "0.7.2" @@ -1550,16 +1224,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" -[[package]] -name = "colored" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" -dependencies = [ - "lazy_static", - "windows-sys 0.48.0", -] - [[package]] name = "combine" version = "4.6.7" @@ -1567,38 +1231,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", - "futures-core", "memchr", - "pin-project-lite", - "tokio", - "tokio-util 0.7.11", -] - -[[package]] -name = "commoncrypto" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007" -dependencies = [ - "commoncrypto-sys", -] - -[[package]] -name = "commoncrypto-sys" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2" -dependencies = [ - "libc", -] - -[[package]] -name = "concurrent-queue" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" -dependencies = [ - "crossbeam-utils", ] [[package]] @@ -1642,7 +1275,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.15", + "getrandom", "once_cell", "tiny-keccak", ] @@ -1667,12 +1300,6 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - [[package]] name = "constant_time_eq" version = "0.3.0" @@ -1730,15 +1357,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "cpp_demangle" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8227005286ec39567949b33df9896bcadfa6051bccca2488129f108ca23119" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "cpufeatures" version = "0.2.12" @@ -1754,7 +1372,7 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1766,7 +1384,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.13", + "clap", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -1840,59 +1458,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array 0.14.7", - "rand_core 0.6.4", + "rand_core", "typenum", ] [[package]] -name = "crypto-hash" -version = "0.3.4" +name = "ctr" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a77162240fd97248d19a564a565eb563a3f592b386e4136fb300909e67dddca" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" dependencies = [ - "commoncrypto", - "hex 0.3.2", - "openssl", - "winapi", -] - -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - -[[package]] -name = "crypto-mac" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - -[[package]] -name = "ctrlc" -version = "3.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" -dependencies = [ - "nix 0.28.0", - "windows-sys 0.52.0", + "cipher", ] [[package]] @@ -1901,7 +1477,7 @@ version = "4.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "curve25519-dalek-derive", "digest 0.10.7", @@ -1930,7 +1506,7 @@ checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.4", + "rand_core", "subtle-ng", "zeroize", ] @@ -1941,7 +1517,7 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "hashbrown 0.14.5", "lock_api", "once_cell", @@ -1974,15 +1550,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "debugid" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" -dependencies = [ - "uuid", -] - [[package]] name = "der" version = "0.7.9" @@ -2108,12 +1675,6 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" -[[package]] -name = "dtoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" - [[package]] name = "ed25519" version = "2.2.3" @@ -2131,8 +1692,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c8465edc8ee7436ffea81d21a019b16676ee3db267aa8d5a8d729581ecf998b" dependencies = [ "curve25519-dalek-ng", - "hex 0.4.3", - "rand_core 0.6.4", + "hex", + "rand_core", "sha2 0.9.9", "zeroize", ] @@ -2145,7 +1706,7 @@ checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ "curve25519-dalek", "ed25519", - "rand_core 0.6.4", + "rand_core", "serde", "sha2 0.10.8", "subtle", @@ -2164,7 +1725,7 @@ version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2214,27 +1775,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "event-listener" -version = "5.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" -dependencies = [ - "event-listener 5.3.1", - "pin-project-lite", -] - [[package]] name = "eyre" version = "0.6.12" @@ -2245,15 +1785,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - [[package]] name = "fastrand" version = "2.1.0" @@ -2266,7 +1797,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "auto_impl", "bytes", ] @@ -2280,7 +1811,7 @@ dependencies = [ "bitvec", "byteorder", "ff_derive", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2291,7 +1822,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9f54704be45ed286151c5e11531316eaef5b8f5af7d597b806fdb8af108d84a" dependencies = [ "addchain", - "cfg-if 1.0.0", + "cfg-if", "num-bigint 0.3.3", "num-integer", "num-traits", @@ -2306,18 +1837,6 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" -[[package]] -name = "findshlibs" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64" -dependencies = [ - "cc", - "lazy_static", - "libc", - "winapi", -] - [[package]] name = "fixed-hash" version = "0.8.0" @@ -2325,7 +1844,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand 0.8.5", + "rand", "rustc-hex", "static_assertions", ] @@ -2362,21 +1881,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "form_urlencoded" version = "1.2.1" @@ -2392,16 +1896,6 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "funty" version = "2.0.0" @@ -2456,45 +1950,6 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" -dependencies = [ - "fastrand 2.1.0", - "futures-core", - "futures-io", - "parking", - "pin-project-lite", -] - -[[package]] -name = "futures-macro" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.72", -] - [[package]] name = "futures-sink" version = "0.3.30" @@ -2522,7 +1977,6 @@ dependencies = [ "futures-channel", "futures-core", "futures-io", - "futures-macro", "futures-sink", "futures-task", "memchr", @@ -2550,27 +2004,16 @@ dependencies = [ "typenum", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] @@ -2596,18 +2039,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "group" version = "0.13.0" @@ -2615,8 +2046,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand 0.8.5", - "rand_core 0.6.4", + "rand", + "rand_core", "rand_xorshift", "subtle", ] @@ -2628,11 +2059,11 @@ source = "git+https://github.com/lurk-lab/grumpkin-msm?branch=dev#414da3bca6135a dependencies = [ "blst", "cc", - "getrandom 0.2.15", + "getrandom", "halo2curves", "pasta_curves", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "rayon", "semolina", "sppark", @@ -2651,10 +2082,10 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.3.0", + "indexmap", "slab", "tokio", - "tokio-util 0.7.11", + "tokio-util", "tracing", ] @@ -2664,7 +2095,7 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crunchy", ] @@ -2677,15 +2108,15 @@ dependencies = [ "blake2b_simd", "ff", "group", - "hex 0.4.3", + "hex", "lazy_static", "num-bigint 0.4.6", "num-traits", "pairing", "pasta_curves", "paste", - "rand 0.8.5", - "rand_core 0.6.4", + "rand", + "rand_core", "rayon", "serde", "serde_arrays", @@ -2693,12 +2124,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - [[package]] name = "hashbrown" version = "0.13.2" @@ -2726,33 +2151,12 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" -[[package]] -name = "hermit-abi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" - -[[package]] -name = "hex" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" - [[package]] name = "hex" version = "0.4.3" @@ -2768,17 +2172,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ - "hmac 0.12.1", -] - -[[package]] -name = "hmac" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" -dependencies = [ - "crypto-mac 0.11.0", - "digest 0.9.0", + "hmac", ] [[package]] @@ -2807,7 +2201,7 @@ checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", - "itoa 1.0.11", + "itoa", ] [[package]] @@ -2860,7 +2254,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.11", + "itoa", "pin-project-lite", "socket2 0.5.7", "tokio", @@ -2879,46 +2273,10 @@ dependencies = [ "http", "hyper", "log", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", - "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper", - "native-tls", + "rustls", + "rustls-native-certs", "tokio", - "tokio-native-tls", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", + "tokio-rustls", ] [[package]] @@ -2929,11 +2287,11 @@ checksum = "18798160736c1e368938ba6967dbcb3c7afb3256b442a5506ba5222eebb68a5a" dependencies = [ "anyhow", "blake2", - "blake3 1.5.3", + "blake3", "bytes", - "hex 0.4.3", + "hex", "informalsystems-pbjson", - "prost 0.12.6", + "prost", "ripemd", "serde", "sha2 0.10.8", @@ -2985,7 +2343,7 @@ dependencies = [ "anyhow", "bls12_381", "borsh", - "hex 0.4.3", + "hex", "num", "num-bigint 0.4.6", "num-traits", @@ -2993,16 +2351,6 @@ dependencies = [ "sha2 0.10.8", ] -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - [[package]] name = "indexmap" version = "2.3.0" @@ -3033,39 +2381,13 @@ dependencies = [ "generic-array 0.14.7", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "ipnet" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" - [[package]] name = "is-terminal" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", "windows-sys 0.52.0", ] @@ -3103,12 +2425,6 @@ dependencies = [ "either", ] -[[package]] -name = "itoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - [[package]] name = "itoa" version = "1.0.11" @@ -3124,7 +2440,7 @@ dependencies = [ "borsh", "digest 0.10.7", "hashbrown 0.13.2", - "hex 0.4.3", + "hex", "ics23", "itertools 0.10.5", "mirai-annotations", @@ -3155,12 +2471,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "json" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd" - [[package]] name = "json5" version = "0.4.1" @@ -3178,25 +2488,14 @@ version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "affdc52f7596ccb2d7645231fc6163bb314630c989b64998f3699a28b4d5d4dc" dependencies = [ - "jsonrpsee-core 0.20.3", + "jsonrpsee-core", "jsonrpsee-http-client", "jsonrpsee-proc-macros", - "jsonrpsee-types 0.20.3", - "jsonrpsee-ws-client 0.20.3", + "jsonrpsee-types", + "jsonrpsee-ws-client", "tracing", ] -[[package]] -name = "jsonrpsee" -version = "0.22.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfdb12a2381ea5b2e68c3469ec604a007b367778cdb14d09612c8069ebd616ad" -dependencies = [ - "jsonrpsee-core 0.22.5", - "jsonrpsee-types 0.22.5", - "jsonrpsee-ws-client 0.22.5", -] - [[package]] name = "jsonrpsee-client-transport" version = "0.20.3" @@ -3205,35 +2504,14 @@ checksum = "b5b005c793122d03217da09af68ba9383363caa950b90d3436106df8cabce935" dependencies = [ "futures-util", "http", - "jsonrpsee-core 0.20.3", + "jsonrpsee-core", "pin-project", - "rustls-native-certs 0.6.3", + "rustls-native-certs", "soketto", "thiserror", "tokio", - "tokio-rustls 0.24.1", - "tokio-util 0.7.11", - "tracing", - "url", -] - -[[package]] -name = "jsonrpsee-client-transport" -version = "0.22.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4978087a58c3ab02efc5b07c5e5e2803024536106fd5506f558db172c889b3aa" -dependencies = [ - "futures-util", - "http", - "jsonrpsee-core 0.22.5", - "pin-project", - "rustls-native-certs 0.7.1", - "rustls-pki-types", - "soketto", - "thiserror", - "tokio", - "tokio-rustls 0.25.0", - "tokio-util 0.7.11", + "tokio-rustls", + "tokio-util", "tracing", "url", ] @@ -3245,13 +2523,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da2327ba8df2fdbd5e897e2b5ed25ce7f299d345b9736b6828814c3dbd1fd47b" dependencies = [ "anyhow", - "async-lock 2.8.0", + "async-lock", "async-trait", "beef", "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.20.3", + "jsonrpsee-types", "rustc-hash", "serde", "serde_json", @@ -3260,28 +2538,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "jsonrpsee-core" -version = "0.22.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b257e1ec385e07b0255dde0b933f948b5c8b8c28d42afda9587c3a967b896d" -dependencies = [ - "anyhow", - "async-trait", - "beef", - "futures-timer", - "futures-util", - "jsonrpsee-types 0.22.5", - "pin-project", - "rustc-hash", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-stream", - "tracing", -] - [[package]] name = "jsonrpsee-http-client" version = "0.20.3" @@ -3291,8 +2547,8 @@ dependencies = [ "async-trait", "hyper", "hyper-rustls", - "jsonrpsee-core 0.20.3", - "jsonrpsee-types 0.20.3", + "jsonrpsee-core", + "jsonrpsee-types", "serde", "serde_json", "thiserror", @@ -3329,19 +2585,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "jsonrpsee-types" -version = "0.22.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "150d6168405890a7a3231a3c74843f58b8959471f6df76078db2619ddee1d07d" -dependencies = [ - "anyhow", - "beef", - "serde", - "serde_json", - "thiserror", -] - [[package]] name = "jsonrpsee-ws-client" version = "0.20.3" @@ -3349,22 +2592,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bca9cb3933ccae417eb6b08c3448eb1cb46e39834e5b503e395e5e5bd08546c0" dependencies = [ "http", - "jsonrpsee-client-transport 0.20.3", - "jsonrpsee-core 0.20.3", - "jsonrpsee-types 0.20.3", - "url", -] - -[[package]] -name = "jsonrpsee-ws-client" -version = "0.22.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58b9db2dfd5bb1194b0ce921504df9ceae210a345bc2f6c5a61432089bbab070" -dependencies = [ - "http", - "jsonrpsee-client-transport 0.22.5", - "jsonrpsee-core 0.22.5", - "jsonrpsee-types 0.22.5", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", "url", ] @@ -3393,22 +2623,13 @@ dependencies = [ "base64 0.22.1", "dotenvy", "ed25519-dalek", - "hex 0.4.3", + "hex", "keyring", "mockall", - "rand 0.8.5", + "rand", "security-framework", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "language-tags" version = "0.3.2" @@ -3441,26 +2662,6 @@ version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" -[[package]] -name = "libflate" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ff4ae71b685bbad2f2f391fe74f6b7659a34871c08b210fdc039e43bee07d18" -dependencies = [ - "adler32", - "crc32fast", - "libflate_lz77", -] - -[[package]] -name = "libflate_lz77" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a52d3a8bfc85f250440e4424db7d857e241a3aebbbe301f3eb606ab15c39acbf" -dependencies = [ - "rle-decode-fast", -] - [[package]] name = "libm" version = "0.2.8" @@ -3498,12 +2699,6 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3542,9 +2737,6 @@ name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -dependencies = [ - "value-bag", -] [[package]] name = "matchit" @@ -3558,15 +2750,6 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" -[[package]] -name = "memmap2" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" -dependencies = [ - "libc", -] - [[package]] name = "mime" version = "0.3.17" @@ -3606,7 +2789,7 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] @@ -3616,9 +2799,9 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.52.0", ] @@ -3634,7 +2817,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43766c2b5203b10de348ffe19f7e54564b64f3d6018ff7648d1e2d6d3a0f0a48" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "downcast", "fragile", "lazy_static", @@ -3649,7 +2832,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af7cbce79ec385a1d4f54baa90a76401eb15d9cab93685f62e7e9f942aa00ae2" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "proc-macro2", "quote", "syn 2.0.72", @@ -3701,33 +2884,6 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" -[[package]] -name = "names" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bddcd3bf5144b6392de80e04c347cd7fab2508f6df16a85fc496ecd5cec39bc" -dependencies = [ - "clap 3.2.25", - "rand 0.8.5", -] - -[[package]] -name = "native-tls" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "neptune" version = "13.0.0" @@ -3735,7 +2891,7 @@ source = "git+https://github.com/lurk-lab/neptune?branch=dev#b6fb1f9372be2a50c56 dependencies = [ "abomonation", "abomonation_derive_ng", - "bellpepper 0.4.1 (git+https://github.com/lurk-lab/bellpepper?branch=dev)", + "bellpepper", "bellpepper-core", "blake2s_simd", "blstrs", @@ -3747,29 +2903,6 @@ dependencies = [ "trait-set", ] -[[package]] -name = "nix" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" -dependencies = [ - "bitflags 1.3.2", - "cfg-if 1.0.0", - "libc", -] - -[[package]] -name = "nix" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" -dependencies = [ - "bitflags 2.6.0", - "cfg-if 1.0.0", - "cfg_aliases 0.1.1", - "libc", -] - [[package]] name = "nmt-rs" version = "0.1.0" @@ -3823,7 +2956,7 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", - "rand 0.8.5", + "rand", "serde", ] @@ -3900,7 +3033,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", ] @@ -3940,50 +3073,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" -[[package]] -name = "openssl" -version = "0.10.66" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" -dependencies = [ - "bitflags 2.6.0", - "cfg-if 1.0.0", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.72", -] - [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" -[[package]] -name = "openssl-sys" -version = "0.9.103" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "option-ext" version = "0.2.0" @@ -4000,12 +3095,6 @@ dependencies = [ "hashbrown 0.13.2", ] -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" - [[package]] name = "pairing" version = "0.23.0" @@ -4021,7 +3110,7 @@ version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", @@ -4041,12 +3130,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "parking" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" - [[package]] name = "parking_lot" version = "0.12.3" @@ -4063,7 +3146,7 @@ version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall", "smallvec", @@ -4079,9 +3162,9 @@ dependencies = [ "blake2b_simd", "ff", "group", - "hex 0.4.3", + "hex", "lazy_static", - "rand 0.8.5", + "rand", "serde", "static_assertions", "subtle", @@ -4157,7 +3240,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.3.0", + "indexmap", ] [[package]] @@ -4192,17 +3275,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "piper" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" -dependencies = [ - "atomic-waker", - "fastrand 2.1.0", - "futures-io", -] - [[package]] name = "pkcs8" version = "0.10.2" @@ -4247,44 +3319,13 @@ dependencies = [ "plotters-backend", ] -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if 1.0.0", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" -dependencies = [ - "cfg-if 1.0.0", - "concurrent-queue", - "hermit-abi 0.4.0", - "pin-project-lite", - "rustix 0.38.34", - "tracing", - "windows-sys 0.52.0", -] - [[package]] name = "polyval" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "opaque-debug", "universal-hash", @@ -4296,26 +3337,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" -[[package]] -name = "pprof" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" -dependencies = [ - "backtrace", - "cfg-if 1.0.0", - "findshlibs", - "libc", - "log", - "nix 0.26.4", - "once_cell", - "parking_lot", - "smallvec", - "symbolic-demangle", - "tempfile", - "thiserror", -] - [[package]] name = "ppv-lite86" version = "0.2.20" @@ -4386,61 +3407,43 @@ dependencies = [ name = "prism" version = "0.1.0" dependencies = [ - "actix-cors", "actix-web", - "ahash", "anyhow", "arecibo", - "async-redis-session", "async-trait", "auto_impl", "axum", "base64 0.22.1", "bellman", - "bellpepper 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "bellpepper-core", + "bincode", "bls12_381", "borsh", "celestia-rpc", "celestia-types", - "clap 4.5.13", - "colored", + "clap", "config", "criterion", - "crypto-hash", - "ctrlc", "dirs 5.0.1", "dotenvy", "ed25519", "ed25519-dalek", "ff", - "fs2", - "futures", - "hex 0.4.3", + "hex", "indexed-merkle-tree", "itertools 0.13.0", "jmt", - "jsonrpsee 0.22.5", "keystore-rs", - "lazy_static", "log", "mockall", "num", - "openssl", - "pairing", "pretty_env_logger", - "proptest", - "pyroscope", - "pyroscope_pprofrs", - "rand 0.7.3", - "rand 0.8.5", - "redis 0.24.0", - "reqwest", + "rand", + "redis", "serde", "serde_json", "serial_test", "sha2 0.10.8", - "tap", "thiserror", "tokio", "toml", @@ -4512,8 +3515,8 @@ dependencies = [ "bitflags 2.6.0", "lazy_static", "num-traits", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "rand_xorshift", "regex-syntax", "rusty-fork", @@ -4521,16 +3524,6 @@ dependencies = [ "unarray", ] -[[package]] -name = "prost" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" -dependencies = [ - "bytes", - "prost-derive 0.11.9", -] - [[package]] name = "prost" version = "0.12.6" @@ -4538,7 +3531,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", - "prost-derive 0.12.6", + "prost-derive", ] [[package]] @@ -4555,26 +3548,13 @@ dependencies = [ "once_cell", "petgraph", "prettyplease", - "prost 0.12.6", + "prost", "prost-types", "regex", "syn 2.0.72", "tempfile", ] -[[package]] -name = "prost-derive" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" -dependencies = [ - "anyhow", - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "prost-derive" version = "0.12.6" @@ -4589,42 +3569,12 @@ dependencies = [ ] [[package]] -name = "prost-types" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" -dependencies = [ - "prost 0.12.6", -] - -[[package]] -name = "pyroscope" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac8a53ce01af1087eaeee6ce7c4fbf50ea4040ab1825c0115c4bafa039644ba9" -dependencies = [ - "json", - "libc", - "libflate", - "log", - "names", - "prost 0.11.9", - "reqwest", - "thiserror", - "url", - "winapi", -] - -[[package]] -name = "pyroscope_pprofrs" -version = "0.2.7" +name = "prost-types" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f010b2a981a7f8449a650f25f309e520b5206ea2d89512dcb146aaa5518ff4" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" dependencies = [ - "log", - "pprof", - "pyroscope", - "thiserror", + "prost", ] [[package]] @@ -4657,19 +3607,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -4677,18 +3614,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -4698,16 +3625,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -4716,16 +3634,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", + "getrandom", ] [[package]] @@ -4734,7 +3643,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -4766,27 +3675,6 @@ dependencies = [ "rayon", ] -[[package]] -name = "redis" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4f0ceb2ec0dd769483ecd283f6615aa83dcd0be556d5294c6e659caefe7cc54" -dependencies = [ - "async-std", - "async-trait", - "bytes", - "combine", - "dtoa", - "futures-util", - "itoa 0.4.8", - "percent-encoding", - "pin-project-lite", - "sha1 0.6.1", - "tokio", - "tokio-util 0.6.10", - "url", -] - [[package]] name = "redis" version = "0.24.0" @@ -4794,7 +3682,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c580d9cbbe1d1b479e8d67cf9daf6a62c957e6846048408b80b43ac3f6af84cd" dependencies = [ "combine", - "itoa 1.0.11", + "itoa", "percent-encoding", "ryu", "sha1_smol", @@ -4817,7 +3705,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ - "getrandom 0.2.15", + "getrandom", "libredox", "thiserror", ] @@ -4877,50 +3765,6 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-rustls", - "hyper-tls", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "system-configuration", - "tokio", - "tokio-native-tls", - "tokio-rustls 0.24.1", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", - "winreg", -] - [[package]] name = "ring" version = "0.17.8" @@ -4928,8 +3772,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "cfg-if 1.0.0", - "getrandom 0.2.15", + "cfg-if", + "getrandom", "libc", "spin", "untrusted", @@ -4945,12 +3789,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "rle-decode-fast" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" - [[package]] name = "rlp" version = "0.5.2" @@ -4989,7 +3827,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "proptest", - "rand 0.8.5", + "rand", "rlp", "ruint-macro", "serde", @@ -5044,7 +3882,7 @@ version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e2a3bcec1f113553ef1c88aae6c020a369d03d55b58de9869a0908930385091" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ordered-multimap", ] @@ -5084,20 +3922,6 @@ dependencies = [ "semver 1.0.23", ] -[[package]] -name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - [[package]] name = "rustix" version = "0.38.34" @@ -5107,7 +3931,7 @@ dependencies = [ "bitflags 2.6.0", "errno", "libc", - "linux-raw-sys 0.4.14", + "linux-raw-sys", "windows-sys 0.52.0", ] @@ -5119,24 +3943,10 @@ checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", - "rustls-webpki 0.101.7", + "rustls-webpki", "sct", ] -[[package]] -name = "rustls" -version = "0.22.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" -dependencies = [ - "log", - "ring", - "rustls-pki-types", - "rustls-webpki 0.102.6", - "subtle", - "zeroize", -] - [[package]] name = "rustls-native-certs" version = "0.6.3" @@ -5144,20 +3954,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile 1.0.4", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-native-certs" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a88d6d420651b496bdd98684116959239430022a115c1240e6c3993be0b15fba" -dependencies = [ - "openssl-probe", - "rustls-pemfile 2.1.2", - "rustls-pki-types", + "rustls-pemfile", "schannel", "security-framework", ] @@ -5171,22 +3968,6 @@ dependencies = [ "base64 0.21.7", ] -[[package]] -name = "rustls-pemfile" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" -dependencies = [ - "base64 0.22.1", - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" - [[package]] name = "rustls-webpki" version = "0.101.7" @@ -5197,17 +3978,6 @@ dependencies = [ "untrusted", ] -[[package]] -name = "rustls-webpki" -version = "0.102.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - [[package]] name = "rustversion" version = "1.0.17" @@ -5382,7 +4152,7 @@ version = "1.0.122" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" dependencies = [ - "itoa 1.0.11", + "itoa", "memchr", "ryu", "serde", @@ -5394,7 +4164,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" dependencies = [ - "itoa 1.0.11", + "itoa", "serde", ] @@ -5425,7 +4195,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.11", + "itoa", "ryu", "serde", ] @@ -5462,28 +4232,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", ] -[[package]] -name = "sha1" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770" -dependencies = [ - "sha1_smol", -] - [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5501,7 +4262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -5513,7 +4274,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5552,7 +4313,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -5601,7 +4362,7 @@ dependencies = [ "futures", "httparse", "log", - "rand 0.8.5", + "rand", "sha-1", ] @@ -5631,24 +4392,12 @@ dependencies = [ "which", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "strsim" version = "0.11.1" @@ -5676,29 +4425,6 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" -[[package]] -name = "symbolic-common" -version = "12.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16629323a4ec5268ad23a575110a724ad4544aae623451de600c747bf87b36cf" -dependencies = [ - "debugid", - "memmap2", - "stable_deref_trait", - "uuid", -] - -[[package]] -name = "symbolic-demangle" -version = "12.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c043a45f08f41187414592b3ceb53fb0687da57209cc77401767fb69d5b596" -dependencies = [ - "cpp_demangle", - "rustc-demangle", - "symbolic-common", -] - [[package]] name = "syn" version = "1.0.109" @@ -5739,27 +4465,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tap" version = "1.0.1" @@ -5772,10 +4477,10 @@ version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" dependencies = [ - "cfg-if 1.0.0", - "fastrand 2.1.0", + "cfg-if", + "fastrand", "once_cell", - "rustix 0.38.34", + "rustix", "windows-sys 0.52.0", ] @@ -5794,12 +4499,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" -[[package]] -name = "textwrap" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" - [[package]] name = "thiserror" version = "1.0.63" @@ -5836,7 +4535,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", - "itoa 1.0.11", + "itoa", "libc", "num-conv", "num_threads", @@ -5925,59 +4624,13 @@ dependencies = [ "syn 2.0.72", ] -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.12", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" -dependencies = [ - "rustls 0.22.4", - "rustls-pki-types", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.6.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "log", - "pin-project-lite", + "rustls", "tokio", ] @@ -6022,7 +4675,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.3.0", + "indexmap", "toml_datetime", "winnow 0.5.40", ] @@ -6033,7 +4686,7 @@ version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.3.0", + "indexmap", "toml_datetime", "winnow 0.5.40", ] @@ -6044,7 +4697,7 @@ version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ - "indexmap 2.3.0", + "indexmap", "serde", "serde_spanned", "toml_datetime", @@ -6166,7 +4819,7 @@ checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" dependencies = [ "byteorder", "crunchy", - "hex 0.4.3", + "hex", "static_assertions", ] @@ -6269,7 +4922,7 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d82b1bc5417102a73e8464c686eef947bdfb99fcdfc0a4f228e81afa9526470a" dependencies = [ - "indexmap 2.3.0", + "indexmap", "serde", "serde_json", "utoipa-gen", @@ -6304,30 +4957,12 @@ dependencies = [ "zip", ] -[[package]] -name = "uuid" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" - [[package]] name = "valuable" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" -[[package]] -name = "value-bag" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - [[package]] name = "vergen" version = "8.3.2" @@ -6335,7 +4970,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2990d9ea5967266ea0ccf413a4aa5c42a93dbcfda9cb49a97de6931726b12566" dependencies = [ "anyhow", - "cfg-if 1.0.0", + "cfg-if", "rustversion", "time", ] @@ -6355,12 +4990,6 @@ dependencies = [ "libc", ] -[[package]] -name = "waker-fn" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" - [[package]] name = "walkdir" version = "2.5.0" @@ -6380,12 +5009,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6398,7 +5021,7 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] @@ -6417,18 +5040,6 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" -dependencies = [ - "cfg-if 1.0.0", - "js-sys", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasm-bindgen-macro" version = "0.2.92" @@ -6468,12 +5079,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.25.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" - [[package]] name = "which" version = "4.4.2" @@ -6483,7 +5088,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.34", + "rustix", ] [[package]] @@ -6517,15 +5122,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -6692,16 +5288,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if 1.0.0", - "windows-sys 0.48.0", -] - [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 60928e61..e956d636 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,6 +60,7 @@ arecibo = { path = "../arecibo" } itertools = "0.13.0" # zip_eq sha2 = "0.10.8" auto_impl = "1.2.0" +bincode = "1.3.3" [dev-dependencies] serial_test = "3.1.1" diff --git a/benches/zk_benchmarks.rs b/benches/zk_benchmarks.rs index 515859e2..f1f7c7f8 100644 --- a/benches/zk_benchmarks.rs +++ b/benches/zk_benchmarks.rs @@ -5,7 +5,6 @@ use indexed_merkle_tree::{ tree::{IndexedMerkleTree, Proof}, Hash, }; -use prism::{circuits::BatchMerkleProofCircuit}; use rand::Rng; use std::time::Duration; @@ -56,13 +55,13 @@ fn bench_proof_generation(c: &mut Criterion) { let (_, proofs, prev_commitment, current_commitment) = setup_tree_and_proofs(*tree_size, *batch_size); b.iter(|| { - let circuit = BatchMerkleProofCircuit::new( - black_box(&prev_commitment), - black_box(¤t_commitment), - black_box(proofs.clone()), - ) - .unwrap(); - let _ = circuit.create_and_verify_snark(); + // let circuit = BatchMerkleProofCircuit::new( + // black_box(&prev_commitment), + // black_box(¤t_commitment), + // black_box(proofs.clone()), + // ) + // .unwrap(); + // let _ = circuit.create_and_verify_snark(); }); }, ); @@ -85,10 +84,10 @@ fn bench_proof_verification(c: &mut Criterion) { |b, &(tree_size, batch_size)| { let (_, proofs, prev_commitment, current_commitment) = setup_tree_and_proofs(*tree_size, *batch_size); - let circuit = - BatchMerkleProofCircuit::new(&prev_commitment, ¤t_commitment, proofs) - .unwrap(); - let (proof, verifying_key) = circuit.create_and_verify_snark().unwrap(); + // let circuit = + // BatchMerkleProofCircuit::new(&prev_commitment, ¤t_commitment, proofs) + // .unwrap(); + // let (proof, verifying_key) = circuit.create_and_verify_snark().unwrap(); b.iter(|| { /* let _ = validate_epoch( black_box(&prev_commitment), diff --git a/src/cfg.rs b/src/cfg.rs index ba3f8e6f..9b85419e 100644 --- a/src/cfg.rs +++ b/src/cfg.rs @@ -9,6 +9,7 @@ use config::{builder::DefaultState, ConfigBuilder, File}; use dirs::home_dir; use dotenvy::dotenv; use serde::{Deserialize, Serialize}; +use std::path::PathBuf; use std::{fs, path::Path, sync::Arc}; use crate::da::{celestia::CelestiaConnection, DataAvailabilityLayer}; @@ -17,6 +18,7 @@ use crate::da::{celestia::CelestiaConnection, DataAvailabilityLayer}; pub enum Commands { LightClient, Sequencer, + GeneratePublicParams, } #[derive(Parser, Clone, Debug, Deserialize)] @@ -59,6 +61,10 @@ pub struct CommandLineArgs { #[arg(long)] config_path: Option, + /// Path to the bin file containing serialized PublicParams + #[arg(long)] + public_params_path: Option, + #[command(subcommand)] pub command: Commands, } @@ -72,6 +78,7 @@ pub struct Config { pub da_layer: Option, pub redis_config: Option, pub verifying_key: Option, + pub public_params_path: Option, } #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] @@ -137,6 +144,7 @@ impl Default for Config { celestia_config: Some(CelestiaConfig::default()), redis_config: Some(RedisConfig::default()), verifying_key: None, + public_params_path: None, } } } @@ -203,6 +211,7 @@ fn merge_configs(loaded: Config, default: Config) -> Config { celestia_config: loaded.celestia_config.or(default.celestia_config), da_layer: loaded.da_layer.or(default.da_layer), verifying_key: loaded.verifying_key.or(default.verifying_key), + public_params_path: loaded.public_params_path.or(default.public_params_path), } } @@ -266,6 +275,7 @@ fn apply_command_line_args(config: Config, args: CommandLineArgs) -> Config { }), da_layer: config.da_layer, verifying_key: args.verifying_key.or(config.verifying_key), + public_params_path: args.public_params_path.or(config.public_params_path), } } diff --git a/src/circuits/hashchain.rs b/src/circuits/hashchain.rs deleted file mode 100644 index 32e3707a..00000000 --- a/src/circuits/hashchain.rs +++ /dev/null @@ -1,55 +0,0 @@ -use crate::common::HashchainEntry; -use anyhow::Result; -use bellman::{Circuit, ConstraintSystem, SynthesisError}; -use bls12_381::Scalar; -use indexed_merkle_tree::sha256_mod; - -/// HashChainEntryCircuit is a circuit that verifies that a given value is present in a hashchain. -#[derive(Clone)] -pub struct HashChainEntryCircuit { - pub value: Scalar, - /// Represents the hashchain in the form of a vector of Scalars. - /// Each Scalar is sha256_mod(hashchain_entry.value()) - pub chain: Vec, -} - -impl HashChainEntryCircuit { - pub fn create(value: &str, hashchain: Vec) -> Result { - let hashed_value = sha256_mod(value.as_bytes()); - let parsed_value = hashed_value.try_into()?; - let mut parsed_hashchain: Vec = vec![]; - for entry in hashchain { - let hashed_entry_value = sha256_mod(entry.operation.value().as_bytes()); - parsed_hashchain.push(hashed_entry_value.try_into()?) - } - Ok(HashChainEntryCircuit { - value: parsed_value, - chain: parsed_hashchain, - }) - } -} - -impl Circuit for HashChainEntryCircuit { - fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { - if self.chain.is_empty() { - return Err(SynthesisError::AssignmentMissing); - } - - let provided_value = cs.alloc_input(|| "provided hashed value", || Ok(self.value))?; - - for entry in self.chain { - if entry == self.value { - let found_value = cs.alloc(|| "found hashed value", || Ok(entry))?; - // found_value * (1) = provided_value - cs.enforce( - || "found value check", - |lc| lc + found_value, - |lc| lc + CS::one(), - |lc| lc + provided_value, - ); - return Ok(()); - } - } - Err(SynthesisError::Unsatisfiable) - } -} diff --git a/src/circuits/less_than.rs b/src/circuits/less_than.rs deleted file mode 100644 index 08897ec6..00000000 --- a/src/circuits/less_than.rs +++ /dev/null @@ -1,66 +0,0 @@ -use anyhow::Result; -use bellman::{gadgets::boolean::Boolean, Circuit, ConstraintSystem, SynthesisError}; -use bls12_381::Scalar; -use ff::PrimeFieldBits; - -#[derive(Clone)] -pub struct LessThanCircuit { - a: Scalar, - b: Scalar, -} - -impl LessThanCircuit { - pub fn new(a: Scalar, b: Scalar) -> LessThanCircuit { - LessThanCircuit { a, b } - } -} - -impl Circuit for LessThanCircuit { - fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { - let a_bits = self.a.to_le_bits(); - let b_bits = self.b.to_le_bits(); - - let mut result = Boolean::constant(false); - - // Iterate over the bits from most significant to least significant - for i in (0..a_bits.len()).rev() { - let a_val = Boolean::constant(a_bits[i]); - let b_val = Boolean::constant(b_bits[i]); - let not_a = a_val.not(); - let not_b = b_val.not(); - - // Check if bits are equal (both 1 or both 0) - let a_and_b = Boolean::and(cs.namespace(|| format!("a_and_b_{}", i)), &a_val, &b_val)?; - let not_a_and_not_b = Boolean::and( - cs.namespace(|| format!("not_a_and_not_b_{}", i)), - ¬_a, - ¬_b, - )?; - - // If the bits are equal, continue to the next bit - if not_a_and_not_b.get_value().unwrap() || a_and_b.get_value().unwrap() { - continue; - } else { - // If bits differ: b > a if b_bit = 1 && a_bit = 0 - result = Boolean::and( - cs.namespace(|| format!("b_and_not_a_{}", i)), - &b_val, - ¬_a, - )?; - break; - } - } - - // Enforce the constraint that the result is correct - // If result is true, then a < b, otherwise a >= b - // result * (1) = 1 - cs.enforce( - || "a < b", - |_| result.lc(CS::one(), Scalar::one()), - |lc| lc + CS::one(), - |lc| lc + CS::one(), - ); - - Ok(()) - } -} diff --git a/src/circuits/merkle_batch.rs b/src/circuits/merkle_batch.rs deleted file mode 100644 index 2323ac50..00000000 --- a/src/circuits/merkle_batch.rs +++ /dev/null @@ -1,148 +0,0 @@ -use crate::{ - circuits::{ - merkle_insertion::prove_insertion, merkle_update::prove_update, InsertMerkleProofCircuit, - ProofVariantCircuit, UpdateMerkleProofCircuit, - }, - utils::create_and_verify_snark, -}; -use anyhow::Result; -use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; -use bls12_381::{Bls12, Scalar}; -use indexed_merkle_tree::{tree::Proof, Hash}; - -/// BatchMerkleProofCircuit represents a circuit for proving a batch of merkle proof circuits. -#[derive(Clone)] -pub struct BatchMerkleProofCircuit { - pub old_commitment: Scalar, - pub new_commitment: Scalar, - pub proofs: Vec, -} - -impl BatchMerkleProofCircuit { - pub fn new( - old_commitment: &Hash, - new_commitment: &Hash, - proofs: Vec, - ) -> Result { - let parsed_old_commitment: Scalar = (*old_commitment).try_into()?; - let parsed_new_commitment: Scalar = (*new_commitment).try_into()?; - let mut proof_circuit_array: Vec = vec![]; - for proof in proofs { - match proof { - Proof::Update(update_proof) => { - proof_circuit_array.push(ProofVariantCircuit::Update( - UpdateMerkleProofCircuit::new(&update_proof)?, - )); - } - Proof::Insert(insertion_proof) => { - proof_circuit_array.push(ProofVariantCircuit::Insert( - InsertMerkleProofCircuit::new(&insertion_proof)?, - )); - } - } - } - Ok(BatchMerkleProofCircuit { - old_commitment: parsed_old_commitment, - new_commitment: parsed_new_commitment, - proofs: proof_circuit_array, - }) - } - - pub fn create_and_verify_snark( - &self, - ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { - let scalars: Vec = vec![self.old_commitment, self.new_commitment]; - - create_and_verify_snark(ProofVariantCircuit::Batch(self.clone()), scalars) - } -} - -impl Circuit for BatchMerkleProofCircuit { - fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { - // If the proofs are empty, we just verify that the commitments are equal - if self.proofs.is_empty() { - let provided_old_commitment = - cs.alloc_input(|| "provided old commitment", || Ok(self.old_commitment))?; - let provided_new_commitment = - cs.alloc_input(|| "provided new commitment", || Ok(self.new_commitment))?; - - // provided_old_commitment * (1) = provided_new_commitment - cs.enforce( - || "old commitment check", - |lc| lc + provided_old_commitment, - |lc| lc + CS::one(), - |lc| lc + provided_new_commitment, - ); - - return Ok(()); - } - - // before the calculations make sure that the old root is that of the first proof - let old_root = match &self.proofs[0] { - ProofVariantCircuit::Update(update_proof_circuit) => update_proof_circuit.old_root, - ProofVariantCircuit::Insert(insert_proof_circuit) => { - insert_proof_circuit.pre_insertion_root - } - ProofVariantCircuit::Batch(batch_proof_circuit) => batch_proof_circuit.old_commitment, - }; - - let provided_old_commitment = - cs.alloc_input(|| "provided old commitment", || Ok(self.old_commitment))?; - let old_commitment_from_proofs = - cs.alloc(|| "old commitment from proofs", || Ok(old_root))?; - - // old_commitment_from_proofs * (1) = provided_old_commitment - cs.enforce( - || "old commitment check", - |lc| lc + old_commitment_from_proofs, - |lc| lc + CS::one(), - |lc| lc + provided_old_commitment, - ); - - let mut new_commitment: Scalar = Scalar::zero(); - for proof in self.proofs { - // update the new_commitment for every proof, applying the constraints of the circuit each time - match proof { - ProofVariantCircuit::Update(update_proof_circuit) => { - new_commitment = prove_update( - cs, - update_proof_circuit.old_root, - &update_proof_circuit.old_path, - update_proof_circuit.updated_root, - &update_proof_circuit.updated_path, - )?; - } - ProofVariantCircuit::Insert(insert_proof_circuit) => { - new_commitment = prove_insertion( - cs, - insert_proof_circuit.pre_insertion_root, - &insert_proof_circuit.insertion_path, - insert_proof_circuit.new_leaf_node, - insert_proof_circuit.existing_leaf_update, - insert_proof_circuit.new_leaf_activation, - )?; - } - ProofVariantCircuit::Batch(_) => { - // Batches cannot be recursively constructed - // TODO: Should they be able to? - return Err(SynthesisError::Unsatisfiable); - } - } - } - - let provided_new_commitment = - cs.alloc_input(|| "provided commitment", || Ok(self.new_commitment))?; - let recalculated_new_commitment = - cs.alloc(|| "recalculated commitment", || Ok(new_commitment))?; - - // recalculated_commitment * (1) = provided_commitment - cs.enforce( - || "new commitment check", - |lc| lc + recalculated_new_commitment, - |lc| lc + CS::one(), - |lc| lc + provided_new_commitment, - ); - - Ok(()) - } -} diff --git a/src/circuits/merkle_insertion.rs b/src/circuits/merkle_insertion.rs deleted file mode 100644 index ddd1a628..00000000 --- a/src/circuits/merkle_insertion.rs +++ /dev/null @@ -1,190 +0,0 @@ -use crate::{ - circuits::{ - merkle_update::prove_update, - utils::{recalculate_hash_as_scalar, unpack_and_process}, - LessThanCircuit, ProofVariantCircuit, UpdateMerkleProofCircuit, - }, - error::PrismError, - utils::create_and_verify_snark, -}; -use anyhow::Result; -use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; -use bls12_381::{Bls12, Scalar}; -use indexed_merkle_tree::{ - node::{LeafNode, Node}, - tree::InsertProof, -}; - -/// Represents a circuit for proving the insertion of a new leaf into a the IMT. -/// -/// This circuit encapsulates the entire process of inserting a new leaf, -/// including proving non-membership of the new leaf, updating the existing leaf's next pointer, -/// and activating the new leaf. -#[derive(Clone)] -pub struct InsertMerkleProofCircuit { - /// The root of the tree before the insertion. - pub pre_insertion_root: Scalar, - /// The path from the root to the position where the new node will be inserted, - /// proving that the node doesn't exist yet. - pub insertion_path: Vec, - /// The new node to be inserted. - pub new_leaf_node: LeafNode, - /// Proof for updating the existing leaf to point to the new leaf. - pub existing_leaf_update: UpdateMerkleProofCircuit, - /// Proof for activating the new leaf (converting an inactive leaf to active). - pub new_leaf_activation: UpdateMerkleProofCircuit, -} - -impl InsertMerkleProofCircuit { - pub fn new(proof: &InsertProof) -> Result { - let (non_membership_root, non_membership_path) = - unpack_and_process(&proof.non_membership_proof.merkle_proof)?; - - let first_merkle_circuit = UpdateMerkleProofCircuit::new(&proof.first_proof)?; - let second_merkle_circuit = UpdateMerkleProofCircuit::new(&proof.second_proof)?; - - Ok(InsertMerkleProofCircuit { - pre_insertion_root: non_membership_root, - insertion_path: non_membership_path.clone(), - new_leaf_node: proof.non_membership_proof.missing_node.clone(), - existing_leaf_update: first_merkle_circuit, - new_leaf_activation: second_merkle_circuit, - }) - } - - pub fn create_and_verify_snark( - &self, - ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { - let scalars: Vec = vec![ - self.pre_insertion_root, - self.existing_leaf_update.old_root, - self.existing_leaf_update.updated_root, - self.new_leaf_activation.old_root, - self.new_leaf_activation.updated_root, - ]; - - create_and_verify_snark(ProofVariantCircuit::Insert(self.clone()), scalars) - } -} - -impl Circuit for InsertMerkleProofCircuit { - fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { - match prove_insertion( - cs, - self.pre_insertion_root, - &self.insertion_path, - self.new_leaf_node, - self.existing_leaf_update, - self.new_leaf_activation, - ) { - Ok(_) => Ok(()), - Err(_) => Err(SynthesisError::Unsatisfiable), - } - } -} - -/// Generates constraints to prove a valid insertion in the merkle tree. -pub fn prove_insertion>( - cs: &mut CS, - pre_insertion_root: Scalar, - insertion_path: &[Node], - new_leaf_node: LeafNode, - existing_leaf_update: UpdateMerkleProofCircuit, - new_leaf_activation: UpdateMerkleProofCircuit, -) -> Result { - // Step 1: Prove non-membership - // This ensures that the new leaf we're trying to insert doesn't already exist in the tree. - prove_non_membership(cs, pre_insertion_root, insertion_path, new_leaf_node)?; - - // Step 2: Update the existing leaf - // This step updates the 'next' pointer of an existing leaf to point to our new leaf. - let updated_root_after_existing_leaf_update = prove_update( - cs, - existing_leaf_update.old_root, - &existing_leaf_update.old_path, - existing_leaf_update.updated_root, - &existing_leaf_update.updated_path, - )?; - - // Step 3: Activate the new leaf - // This step converts an inactive (empty) leaf into our new active leaf, - // effectively inserting the new data into the tree. - let new_root = prove_update( - cs, - updated_root_after_existing_leaf_update, - &new_leaf_activation.old_path, - new_leaf_activation.updated_root, - &new_leaf_activation.updated_path, - )?; - - Ok(new_root) -} - -/// Generates constraints to prove non-membership of a new leaf in the Merkle tree. -/// -/// This function ensures that the new leaf to be inserted does not already exist in the tree -/// and that it maintains the ordered structure of the tree. -/// -/// # Arguments -/// -/// * `cs` - A mutable reference to the constraint system. -/// * `pre_insertion_root` - The root of the Merkle tree before insertion. -/// * `insertion_path` - The path from the root to the insertion position. -/// * `new_leaf_node` - The new leaf node to be inserted. -/// -/// # Returns -/// -/// Returns `Ok(())` if the constraints are satisfied, or an `Err` -/// containing a `SynthesisError` if the proof generation fails. -pub fn prove_non_membership>( - cs: &mut CS, - pre_insertion_root: Scalar, - insertion_path: &[Node], - new_leaf_node: LeafNode, -) -> Result<(), SynthesisError> { - // Ensure that the label of the new leaf node lies between the first element of the path - // and its next pointer. This guarantees that no other node with a label between these values exists. - let existing_leaf_label: Scalar = insertion_path[0] - .get_label() - .try_into() - .map_err(|_| SynthesisError::Unsatisfiable)?; - let existing_leaf_next: Scalar = insertion_path[0] - .get_next() - .try_into() - .map_err(|_| SynthesisError::Unsatisfiable)?; - let new_leaf_label: Scalar = new_leaf_node - .label - .try_into() - .map_err(|_| SynthesisError::Unsatisfiable)?; - - // Enforce: existing_leaf_label < new_leaf_label < existing_leaf_next - LessThanCircuit::new(existing_leaf_label, new_leaf_label) - .synthesize(cs) - .expect("Failed to synthesize existing_leaf_label < new_leaf_label"); - LessThanCircuit::new(new_leaf_label, existing_leaf_next) - .synthesize(cs) - .expect("Failed to synthesize new_leaf_label < existing_leaf_next"); - - let allocated_pre_insertion_root = - cs.alloc(|| "pre_insertion_root", || Ok(pre_insertion_root))?; - - let recalculated_root = - recalculate_hash_as_scalar(insertion_path).map_err(|_| SynthesisError::Unsatisfiable)?; - - let allocated_recalculated_root = cs.alloc( - || "recalculated_pre_insertion_root", - || Ok(recalculated_root), - )?; - - // Enforce that the provided pre-insertion root matches the recalculated root. - // This ensures that the ordered structure of the tree is maintained in the path. - // (allocated_pre_insertion_root) * (1) = allocated_recalculated_root - cs.enforce( - || "pre_insertion_root_verification", - |lc| lc + allocated_pre_insertion_root, - |lc| lc + CS::one(), - |lc| lc + allocated_recalculated_root, - ); - - Ok(()) -} diff --git a/src/circuits/merkle_update.rs b/src/circuits/merkle_update.rs deleted file mode 100644 index 002bb935..00000000 --- a/src/circuits/merkle_update.rs +++ /dev/null @@ -1,126 +0,0 @@ -use crate::{ - circuits::{ - utils::{recalculate_hash_as_scalar, unpack_and_process}, - ProofVariantCircuit, - }, - error::PrismError, - utils::create_and_verify_snark, -}; -use anyhow::Result; -use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; -use bls12_381::{Bls12, Scalar}; -use indexed_merkle_tree::{node::Node, tree::UpdateProof}; - -#[derive(Clone)] -pub struct UpdateMerkleProofCircuit { - pub old_root: Scalar, - pub old_path: Vec, - pub updated_root: Scalar, - pub updated_path: Vec, -} - -impl UpdateMerkleProofCircuit { - pub fn new(proof: &UpdateProof) -> Result { - let (old_root, old_path) = unpack_and_process(&proof.old_proof)?; - let (updated_root, updated_path) = unpack_and_process(&proof.new_proof)?; - - // if old_root.is_none() - // || old_path.is_none() - // || updated_root.is_none() - // || updated_path.is_none() - // { - // return Err(GeneralError::MissingArgumentError); - // } - - // // TODO: are there cases where MissingArgumentError isnt the right type? - - // let old_root = - // hash_to_scalar(&old_root.ok_or(GeneralError::MissingArgumentError)?.as_str())?; - // let updated_root = hash_to_scalar( - // &updated_root - // .ok_or(GeneralError::MissingArgumentError)? - // .as_str(), - // )?; - - // let old_path = old_path.ok_or(GeneralError::MissingArgumentError)?; - // let updated_path = updated_path.ok_or(GeneralError::MissingArgumentError)?; - - Ok(UpdateMerkleProofCircuit { - old_root, - old_path: old_path.clone(), - updated_root, - updated_path: updated_path.clone(), - }) - } - - pub fn create_and_verify_snark( - &self, - ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { - let scalars: Vec = vec![self.old_root, self.updated_root]; - - create_and_verify_snark(ProofVariantCircuit::Update(self.clone()), scalars) - } -} - -impl Circuit for UpdateMerkleProofCircuit { - fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { - match prove_update( - cs, - self.old_root, - &self.old_path, - self.updated_root, - &self.updated_path, - ) { - Ok(_) => Ok(()), - Err(_) => Err(SynthesisError::Unsatisfiable), - } - } -} - -pub(crate) fn prove_update>( - cs: &mut CS, - old_root: Scalar, - old_path: &[Node], - new_root: Scalar, - new_path: &[Node], -) -> Result { - let root_with_old_pointer = - cs.alloc(|| "first update root with old pointer", || Ok(old_root))?; - let root_with_new_pointer = - cs.alloc(|| "first update root with new pointer", || Ok(new_root))?; - - // update the root hash for old and new path - let recalculated_root_with_old_pointer = - recalculate_hash_as_scalar(old_path).map_err(|_| SynthesisError::Unsatisfiable)?; - let recalculated_root_with_new_pointer = - recalculate_hash_as_scalar(new_path).map_err(|_| SynthesisError::Unsatisfiable)?; - - let allocated_recalculated_root_with_old_pointer = cs.alloc( - || "recalculated first update proof old root", - || Ok(recalculated_root_with_old_pointer), - )?; - let allocated_recalculated_root_with_new_pointer = cs.alloc( - || "recalculated first update proof new root", - || Ok(recalculated_root_with_new_pointer), - )?; - - // Check if the resulting hash is the root hash of the old tree - // allocated_recalculated_root_with_old_pointer * (1) = root_with_old_pointer - cs.enforce( - || "first update old root equality", - |lc| lc + allocated_recalculated_root_with_old_pointer, - |lc| lc + CS::one(), - |lc| lc + root_with_old_pointer, - ); - - // Check that the resulting hash is the root hash of the new tree. - // allocated_recalculated_root_with_new_pointer * (1) = root_with_new_pointer - cs.enforce( - || "first update new root equality", - |lc| lc + allocated_recalculated_root_with_new_pointer, - |lc| lc + CS::one(), - |lc| lc + root_with_new_pointer, - ); - - Ok(recalculated_root_with_new_pointer) -} diff --git a/src/circuits/mod.rs b/src/circuits/mod.rs deleted file mode 100644 index 69a2cc7c..00000000 --- a/src/circuits/mod.rs +++ /dev/null @@ -1,346 +0,0 @@ -use crate::error::{GeneralError, PrismError}; -use anyhow::{anyhow, Context, Result}; -use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; -use bls12_381::{Bls12, G1Affine, G2Affine, Scalar}; -use borsh::{BorshDeserialize, BorshSerialize}; -use std::fmt; - -pub mod hashchain; -pub mod less_than; -pub mod merkle_batch; -pub mod merkle_insertion; -pub mod merkle_update; -pub mod utils; - -pub use hashchain::HashChainEntryCircuit; -pub use less_than::LessThanCircuit; -pub use merkle_batch::BatchMerkleProofCircuit; -pub use merkle_insertion::InsertMerkleProofCircuit; -pub use merkle_update::UpdateMerkleProofCircuit; - -#[derive(Clone)] -pub enum ProofVariantCircuit { - Update(UpdateMerkleProofCircuit), - Insert(InsertMerkleProofCircuit), - Batch(BatchMerkleProofCircuit), -} - -impl Circuit for ProofVariantCircuit { - fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { - match self { - ProofVariantCircuit::Update(circuit) => circuit.synthesize(cs), - ProofVariantCircuit::Insert(circuit) => circuit.synthesize(cs), - ProofVariantCircuit::Batch(circuit) => circuit.synthesize(cs), - } - } -} - -/// G1 represents a compressed [`bls12_381::G1Affine`] -#[derive(BorshSerialize, BorshDeserialize, Clone)] -pub struct G1([u8; 48]); - -/// G2 represents a compressed [`bls12_381::G2Affine`] -#[derive(BorshSerialize, BorshDeserialize, Clone)] -pub struct G2([u8; 96]); - -// Debug impls for the Affines print their hex representation -impl fmt::Debug for G1 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "G1(0x{})", hex::encode(self.0)) - } -} - -impl fmt::Debug for G2 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "G2(0x{})", hex::encode(self.0)) - } -} - -impl TryFrom for bls12_381::G1Affine { - type Error = anyhow::Error; - - fn try_from(g1: G1) -> Result { - match bls12_381::G1Affine::from_compressed(&g1.0).into_option() { - Some(affine) => Ok(affine), - None => Err(anyhow!( - GeneralError::DecodingError("G2Affine".to_string(),) - )), - } - } -} - -impl TryFrom for bls12_381::G2Affine { - type Error = anyhow::Error; - - fn try_from(g2: G2) -> Result { - match bls12_381::G2Affine::from_compressed(&g2.0).into_option() { - Some(affine) => Ok(affine), - None => Err(anyhow!( - GeneralError::DecodingError("G2Affine".to_string(),) - )), - } - } -} - -#[derive(Clone, BorshSerialize, BorshDeserialize, Debug)] -pub struct Bls12Proof { - pub a: G1, - pub b: G2, - pub c: G1, -} - -impl TryFrom for groth16::Proof { - type Error = anyhow::Error; - - fn try_from(proof: Bls12Proof) -> Result { - let a: G1Affine = proof.a.try_into().context("affine: a")?; - let b: G2Affine = proof.b.try_into().context("affine: b")?; - let c: G1Affine = proof.c.try_into().context("affine: c")?; - - Ok(groth16::Proof { a, b, c }) - } -} - -impl From> for Bls12Proof { - fn from(proof: groth16::Proof) -> Self { - Bls12Proof { - a: G1(proof.a.to_compressed()), - b: G2(proof.b.to_compressed()), - c: G1(proof.c.to_compressed()), - } - } -} - -#[derive(Clone, BorshSerialize, BorshDeserialize, Debug)] -pub struct VerifyingKey { - pub alpha_g1: G1, - pub beta_g1: G1, - pub beta_g2: G2, - pub delta_g1: G1, - pub delta_g2: G2, - pub gamma_g2: G2, - pub ic: Vec, -} - -impl From> for VerifyingKey { - fn from(verifying_key: groth16::VerifyingKey) -> Self { - VerifyingKey { - alpha_g1: G1(verifying_key.alpha_g1.to_compressed()), - beta_g1: G1(verifying_key.beta_g1.to_compressed()), - beta_g2: G2(verifying_key.beta_g2.to_compressed()), - delta_g1: G1(verifying_key.delta_g1.to_compressed()), - delta_g2: G2(verifying_key.delta_g2.to_compressed()), - gamma_g2: G2(verifying_key.gamma_g2.to_compressed()), - ic: verifying_key - .ic - .iter() - .map(|x| G1(x.to_compressed())) - .collect::>(), - } - } -} - -impl TryFrom for groth16::VerifyingKey { - type Error = PrismError; - - fn try_from(custom_vk: VerifyingKey) -> Result { - let alpha_g1: G1Affine = custom_vk - .alpha_g1 - .try_into() - .map_err(|e| GeneralError::EncodingError(format!("{}:alpha_g1", e)))?; - let beta_g1: G1Affine = custom_vk - .beta_g1 - .try_into() - .map_err(|e| GeneralError::EncodingError(format!("{}: beta_g1", e)))?; - let beta_g2: G2Affine = custom_vk - .beta_g2 - .try_into() - .map_err(|e| GeneralError::EncodingError(format!("{}: beta_g2", e)))?; - let delta_g1: G1Affine = custom_vk - .delta_g1 - .try_into() - .map_err(|e| GeneralError::EncodingError(format!("{}: delta_g1", e)))?; - let delta_g2: G2Affine = custom_vk - .delta_g2 - .try_into() - .map_err(|e| GeneralError::EncodingError(format!("{}: delta_g1", e)))?; - let gamma_g2: G2Affine = custom_vk - .gamma_g2 - .try_into() - .map_err(|e| GeneralError::EncodingError(format!("{}: gamma_g2", e)))?; - let ic = custom_vk - .ic - .into_iter() - .map(|s| s.try_into()) - .collect::>>()?; - - Ok(bellman::groth16::VerifyingKey { - alpha_g1, - beta_g1, - beta_g2, - gamma_g2, - delta_g1, - delta_g2, - ic: ic.into_iter().collect(), - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use bellman::groth16; - use bls12_381::Bls12; - use indexed_merkle_tree::{ - node::Node, - sha256_mod, - tree::{IndexedMerkleTree, Proof}, - Hash, - }; - use rand::rngs::OsRng; - - fn head_scalar() -> Scalar { - Node::HEAD.try_into().unwrap() - } - - fn small_scalar() -> Scalar { - let small_hash = - Hash::from_hex("13ae3ed6fe76d459c9c66fe38ff187593561a1f24d34cb22e06148c77e4cc02b") - .unwrap(); - small_hash.try_into().unwrap() - } - - fn mid_scalar() -> Scalar { - let mid_hash = - Hash::from_hex("3d1e830624b2572adc05351a7cbee2d3aa3f6a52b34fa38a260c9c78f96fcd07") - .unwrap(); - mid_hash.try_into().unwrap() - } - - fn big_scalar() -> Scalar { - let big_hash = - Hash::from_hex("6714dda957170ad7720bbd2c38004152f34ea5d4350a154b84a259cc62a5dbb4") - .unwrap(); - big_hash.try_into().unwrap() - } - - fn tail_scalar() -> Scalar { - Node::TAIL.try_into().unwrap() - } - - fn create_scalars() -> (Scalar, Scalar, Scalar, Scalar, Scalar) { - ( - head_scalar(), - small_scalar(), - mid_scalar(), - big_scalar(), - tail_scalar(), - ) - } - - fn setup_and_test_less_than_circuit(a: Scalar, b: Scalar) { - let circuit = LessThanCircuit::new(a, b); - let rng = &mut OsRng; - let params = groth16::generate_random_parameters::(circuit.clone(), rng) - .expect("unable to generate random parameters"); - let proof = groth16::create_random_proof(circuit.clone(), ¶ms, rng) - .expect("unable to create random proof"); - let pvk = groth16::prepare_verifying_key(¶ms.vk); - groth16::verify_proof(&pvk, &proof, &[]).expect("unable to verify proof") - } - - #[test] - fn le_with_scalar_valid() { - let (head, small, mid, big, tail) = create_scalars(); - - setup_and_test_less_than_circuit(head, small); - setup_and_test_less_than_circuit(small, tail); - - setup_and_test_less_than_circuit(small, big); - setup_and_test_less_than_circuit(big, tail); - - setup_and_test_less_than_circuit(head, mid); - setup_and_test_less_than_circuit(mid, big); - } - - #[test] - #[should_panic(expected = "unable to verify proof")] - fn invalid_less_than_circuit_a_gt_b() { - let (_, _, _, big, tail) = create_scalars(); - - setup_and_test_less_than_circuit(tail, big) - } - - #[test] - #[should_panic(expected = "unable to verify proof")] - fn invalid_less_than_circuit_a_eq_b() { - let head = head_scalar(); - setup_and_test_less_than_circuit(head, head) - } - - #[test] - fn test_serialize_and_deserialize_proof() { - let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); - let prev_commitment = tree.get_commitment().unwrap(); - - // create two nodes to insert - let ryan = sha256_mod(b"Ryan"); - let ford = sha256_mod(b"Ford"); - let sebastian = sha256_mod(b"Sebastian"); - let pusch = sha256_mod(b"Pusch"); - let ethan = sha256_mod(b"Ethan"); - let triple_zero = sha256_mod(b"000"); - - let mut ryans_node = Node::new_leaf(true, ryan, ford, Node::TAIL); - let mut sebastians_node = Node::new_leaf(true, sebastian, pusch, Node::TAIL); - let mut ethans_node = Node::new_leaf(true, ethan, triple_zero, Node::TAIL); - - // generate proofs for the two nodes - let first_insert_proof = tree.insert_node(&mut ryans_node).unwrap(); - let second_insert_proof = tree.insert_node(&mut sebastians_node).unwrap(); - let third_insert_proof = tree.insert_node(&mut ethans_node).unwrap(); - - // create zkSNARKs for the two proofs - let first_insert_zk_snark = Proof::Insert(first_insert_proof); - let second_insert_zk_snark = Proof::Insert(second_insert_proof); - let third_insert_zk_snark = Proof::Insert(third_insert_proof); - - let proofs = vec![ - first_insert_zk_snark, - second_insert_zk_snark, - third_insert_zk_snark, - ]; - let current_commitment = tree.get_commitment().unwrap(); - - let batched_proof = - BatchMerkleProofCircuit::new(&prev_commitment, ¤t_commitment, proofs).unwrap(); - - let rng = &mut OsRng; - let params = - groth16::generate_random_parameters::(batched_proof.clone(), rng).unwrap(); - let proof = groth16::create_random_proof(batched_proof.clone(), ¶ms, rng).unwrap(); - - let serialized_proof: Bls12Proof = proof.clone().into(); - let deserialized_proof_result: Result> = - serialized_proof.clone().try_into(); - assert!(deserialized_proof_result.is_ok(), "Deserialization failed"); - - let deserialized_proof = deserialized_proof_result.unwrap(); - assert_eq!(proof.a, deserialized_proof.a); - assert_eq!(proof.b, deserialized_proof.b); - assert_eq!(proof.c, deserialized_proof.c); - } - - #[test] - fn test_deserialize_invalid_proof() { - let invalid_proof = Bls12Proof { - a: G1([1; 48]), - b: G2([2; 96]), - c: G1([3; 48]), - }; - - let deserialized_proof_result: Result> = - invalid_proof.clone().try_into(); - assert!(deserialized_proof_result.is_err()); - } -} diff --git a/src/circuits/utils.rs b/src/circuits/utils.rs deleted file mode 100644 index c201a5d3..00000000 --- a/src/circuits/utils.rs +++ /dev/null @@ -1,29 +0,0 @@ -use crate::error::ProofError; -use anyhow::{anyhow, Result}; -use bls12_381::Scalar; -use indexed_merkle_tree::{node::Node, sha256_mod, tree::MerkleProof}; - -pub fn unpack_and_process(proof: &MerkleProof) -> Result<(Scalar, &Vec)> { - if !proof.path.is_empty() { - let root: Scalar = proof.root_hash.try_into()?; - Ok((root, &proof.path)) - } else { - Err(anyhow!(ProofError::ProofUnpackError(format!( - "proof path is empty for root hash {}", - proof.root_hash - )))) - } -} - -pub fn recalculate_hash_as_scalar(path: &[Node]) -> Result { - let mut current_hash = path[0].get_hash(); - for node in path.iter().skip(1) { - let combined = if node.is_left_sibling() { - [node.get_hash().as_ref(), current_hash.as_ref()].concat() - } else { - [current_hash.as_ref(), node.get_hash().as_ref()].concat() - }; - current_hash = sha256_mod(&combined); - } - current_hash.try_into() -} diff --git a/src/lib.rs b/src/lib.rs index eab31618..8ded619a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,4 @@ pub mod cfg; -pub mod circuits; pub mod common; pub mod consts; pub mod da; diff --git a/src/main.rs b/src/main.rs index 80cdfc96..4d902b75 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,4 @@ mod cfg; -pub mod circuits; pub mod common; pub mod consts; pub mod da; @@ -14,9 +13,13 @@ mod webserver; use cfg::{initialize_da_layer, load_config}; use clap::Parser; use keystore_rs::{KeyChain, KeyStore, KeyStoreType}; +use prism::nova::utils::create_pp; use crate::cfg::{CommandLineArgs, Commands}; +use anyhow::{Context, Result}; +use arecibo::{provider::PallasEngine, supernova::PublicParams}; use node_types::{lightclient::LightClient, sequencer::Sequencer, NodeType}; +use std::io; use std::sync::Arc; use storage::RedisConnection; @@ -27,13 +30,21 @@ extern crate log; #[actix_web::main] async fn main() -> std::io::Result<()> { let args = CommandLineArgs::parse(); + let config = load_config(args.clone()) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?; + let pp = load_or_generate_public_params(&config.public_params_path) + .expect("Failed to deserialize or generate public params."); + + if let Commands::GeneratePublicParams {} = args.command { + bincode::serialize_into(io::stdout(), &pp).unwrap(); + return Ok(()); + } + let da = initialize_da_layer(&config) .await .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?; - let node: Arc = match args.command { Commands::LightClient {} => { let celestia_config = config.celestia_config.ok_or_else(|| { @@ -71,9 +82,26 @@ async fn main() -> std::io::Result<()> { })?, ) } + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "Invalid node type", + )); + } }; node.start() .await .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())) } + +fn load_or_generate_public_params(path: &Option) -> Result> { + if let Some(path) = path { + info!("Loading public params from file: {:?}", path); + let bytes = std::fs::read(path).context("Failed to read public params file")?; + bincode::deserialize(&bytes).context("Failed to deserialize public params") + } else { + warn!("No public params file provided, generating new ones. This may take a while."); + Ok(create_pp()) + } +} diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 0ea26878..0f977ef6 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -8,7 +8,7 @@ use ff::PrimeField; use std::cell::RefCell; #[derive(Clone)] -struct EpochCircuitSequence +pub struct EpochCircuitSequence where E1: CurveCycleEquipped, { @@ -84,7 +84,7 @@ where } #[derive(Clone)] -enum EpochCircuit { +pub enum EpochCircuit { Insert(InsertCircuit), Update(UpdateCircuit), } diff --git a/src/nova/utils.rs b/src/nova/utils.rs index 3b67909a..e29b11ab 100644 --- a/src/nova/utils.rs +++ b/src/nova/utils.rs @@ -1,5 +1,9 @@ // use bellpepper_core::ConstraintSystem; +use crate::nova::batch::EpochCircuitSequence; +use crate::tree::*; +use crate::{common::Hashchain, nova::batch::EpochCircuit}; use anyhow::Result; +use arecibo::{provider::PallasEngine, supernova::PublicParams, traits::snark::default_ck_hint}; use bellpepper_core::{ boolean::{AllocatedBit, Boolean}, num::AllocatedNum, @@ -8,7 +12,9 @@ use bellpepper_core::{ use ff::PrimeField; use itertools::Itertools as _; use jmt::RootHash; +use jmt::{mock::MockTreeStore, KeyHash}; use std::marker::PhantomData; +use std::sync::Arc; use crate::tree; @@ -156,50 +162,25 @@ pub fn get_selector_vec_from_index>( Ok(selector) } -// pub(crate) fn prove_update>( -// cs: &mut CS, -// old_root: Scalar, -// old_path: &[Node], -// new_root: Scalar, -// new_path: &[Node], -// ) -> Result { -// let root_with_old_pointer = -// cs.alloc(|| "first update root with old pointer", || Ok(old_root))?; -// let root_with_new_pointer = -// cs.alloc(|| "first update root with new pointer", || Ok(new_root))?; - -// // update the root hash for old and new path -// let recalculated_root_with_old_pointer = -// recalculate_hash_as_scalar(old_path).map_err(|_| SynthesisError::Unsatisfiable)?; -// let recalculated_root_with_new_pointer = -// recalculate_hash_as_scalar(new_path).map_err(|_| SynthesisError::Unsatisfiable)?; - -// let allocated_recalculated_root_with_old_pointer = cs.alloc( -// || "recalculated first update proof old root", -// || Ok(recalculated_root_with_old_pointer), -// )?; -// let allocated_recalculated_root_with_new_pointer = cs.alloc( -// || "recalculated first update proof new root", -// || Ok(recalculated_root_with_new_pointer), -// )?; - -// // Check if the resulting hash is the root hash of the old tree -// // allocated_recalculated_root_with_old_pointer * (1) = root_with_old_pointer -// cs.enforce( -// || "first update old root equality", -// |lc| lc + allocated_recalculated_root_with_old_pointer, -// |lc| lc + CS::one(), -// |lc| lc + root_with_old_pointer, -// ); - -// // Check that the resulting hash is the root hash of the new tree. -// // allocated_recalculated_root_with_new_pointer * (1) = root_with_new_pointer -// cs.enforce( -// || "first update new root equality", -// |lc| lc + allocated_recalculated_root_with_new_pointer, -// |lc| lc + CS::one(), -// |lc| lc + root_with_new_pointer, -// ); - -// Ok(recalculated_root_with_new_pointer) -// } +pub fn create_pp() -> PublicParams { + type E1 = PallasEngine; + + let store = Arc::new(MockTreeStore::default()); + let mut tree = KeyDirectoryTree::new(store); + + let mut hc = Hashchain::new("publicparams".into()); + let key = hc.get_keyhash(); + + let insert_proof = tree.insert(key, hc.clone()).unwrap(); + + hc.add("test_value".into()).unwrap(); + let update_proof = tree.update(key, hc).unwrap(); + + let operations = vec![ + (0, EpochCircuit::new_insert(insert_proof, 2)), + (1, EpochCircuit::new_update(update_proof, 2)), + ]; + + let circuit_sequence = EpochCircuitSequence::::new(operations); + PublicParams::setup(&circuit_sequence, &*default_ck_hint(), &*default_ck_hint()) +} diff --git a/src/utils.rs b/src/utils.rs index b9fd297a..2bf59b76 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,5 +1,4 @@ use crate::{ - circuits::ProofVariantCircuit, error::{GeneralError, PrismError, ProofError}, tree::Digest, }; @@ -9,7 +8,7 @@ use bellman::groth16::{self, VerifyingKey}; use bls12_381::{Bls12, Scalar}; use ed25519::Signature; use ed25519_dalek::{Verifier, VerifyingKey as Ed25519VerifyingKey}; -use indexed_merkle_tree::{tree::Proof}; +use indexed_merkle_tree::tree::Proof; use rand::rngs::OsRng; pub fn parse_json_to_proof(json_str: &str) -> Result> { @@ -32,34 +31,6 @@ pub fn decode_public_key(pub_key_str: &String) -> Result { .map_err(|_| GeneralError::DecodingError("ed25519 verifying key".to_string()).into()) } -pub fn create_and_verify_snark( - circuit: ProofVariantCircuit, - scalars: Vec, -) -> Result<(groth16::Proof, VerifyingKey)> { - let rng = &mut OsRng; - - trace!("creating parameters with BLS12-381 pairing-friendly elliptic curve construction...."); - let params = - groth16::generate_random_parameters::(circuit.clone(), rng).map_err(|e| { - PrismError::Proof(ProofError::ProofUnpackError(format!( - "generating random params: {}", - e - ))) - })?; - - trace!("creating proof for zkSNARK..."); - let proof = groth16::create_random_proof(circuit, ¶ms, rng) - .map_err(|e| PrismError::Proof(ProofError::GenerationError(e.to_string())))?; - - trace!("preparing verifying key for zkSNARK..."); - let pvk = groth16::prepare_verifying_key(¶ms.vk); - - groth16::verify_proof(&pvk, &proof, &scalars) - .map_err(|e| PrismError::Proof(ProofError::VerificationError(e.to_string())))?; - - Ok((proof, params.vk)) -} - pub fn validate_epoch( previous_commitment: &Digest, current_commitment: &Digest, @@ -120,10 +91,6 @@ pub fn verify_signature( #[cfg(test)] mod tests { - - - - use super::*; From fbf51c894983e76e66fb6a20a7900e7d1b1bbe93 Mon Sep 17 00:00:00 2001 From: Ryan Date: Fri, 23 Aug 2024 14:19:44 +0200 Subject: [PATCH 17/33] updating cargo toml --- Cargo.lock | 2 ++ Cargo.toml | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6ef8d422..7055b437 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -373,6 +373,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arecibo" version = "0.2.0" +source = "git+https://github.com/deltadevsde/arecibo#1b6b7a772893bb6f0ce54556a0d52cd13bd1175c" dependencies = [ "abomonation", "abomonation_derive_ng", @@ -2434,6 +2435,7 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jmt" version = "0.10.0" +source = "git+https://github.com/deltadevsde/jmt#dc521ef17fcba2fe96990dbd1831fcb274ebb541" dependencies = [ "anyhow", "auto_impl", diff --git a/Cargo.toml b/Cargo.toml index e956d636..b5a1aff3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,11 +52,11 @@ keystore-rs = "0.1.0" toml = "0.8.14" dirs = "5.0.1" anyhow = "1.0.44" -jmt = { path = "../jmt", features = [ +jmt = { git = "https://github.com/deltadevsde/jmt", features = [ "mocks", ] } #{ version = "0.10.0", features = ["mocks"] } bellpepper-core = { version = "0.4.0", default-features = false } -arecibo = { path = "../arecibo" } +arecibo = { git = "https://github.com/deltadevsde/arecibo" } itertools = "0.13.0" # zip_eq sha2 = "0.10.8" auto_impl = "1.2.0" From 5cefa2aa04384ba53d4fe35ceb82e6907b1e92cf Mon Sep 17 00:00:00 2001 From: sebasti810 Date: Thu, 29 Aug 2024 18:00:40 +0200 Subject: [PATCH 18/33] feat: playing around with supernova rehashing --- Cargo.lock | 17 +++- Cargo.toml | 3 +- src/common.rs | 16 ++++ src/nova/insert.rs | 209 +++++++++++++++++++++++++++++++++++++++++++-- src/nova/utils.rs | 1 + 5 files changed, 237 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7055b437..54eb0a98 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -377,7 +377,7 @@ source = "git+https://github.com/deltadevsde/arecibo#1b6b7a772893bb6f0ce54556a0d dependencies = [ "abomonation", "abomonation_derive_ng", - "bellpepper", + "bellpepper 0.4.1 (git+https://github.com/lurk-lab/bellpepper?branch=dev)", "bellpepper-core", "bincode", "bitvec", @@ -718,6 +718,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "bellpepper" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae286c2cb403324ab644c7cc68dceb25fe52ca9429908a726d7ed272c1edf7b" +dependencies = [ + "bellpepper-core", + "byteorder", + "ff", +] + [[package]] name = "bellpepper" version = "0.4.1" @@ -2435,7 +2446,6 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jmt" version = "0.10.0" -source = "git+https://github.com/deltadevsde/jmt#dc521ef17fcba2fe96990dbd1831fcb274ebb541" dependencies = [ "anyhow", "auto_impl", @@ -2893,7 +2903,7 @@ source = "git+https://github.com/lurk-lab/neptune?branch=dev#b6fb1f9372be2a50c56 dependencies = [ "abomonation", "abomonation_derive_ng", - "bellpepper", + "bellpepper 0.4.1 (git+https://github.com/lurk-lab/bellpepper?branch=dev)", "bellpepper-core", "blake2s_simd", "blstrs", @@ -3417,6 +3427,7 @@ dependencies = [ "axum", "base64 0.22.1", "bellman", + "bellpepper 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "bellpepper-core", "bincode", "bls12_381", diff --git a/Cargo.toml b/Cargo.toml index b5a1aff3..3eb82789 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,10 +52,11 @@ keystore-rs = "0.1.0" toml = "0.8.14" dirs = "5.0.1" anyhow = "1.0.44" -jmt = { git = "https://github.com/deltadevsde/jmt", features = [ +jmt = { path = "../jmt", features = [ "mocks", ] } #{ version = "0.10.0", features = ["mocks"] } bellpepper-core = { version = "0.4.0", default-features = false } +bellpepper = "0.4.1" arecibo = { git = "https://github.com/deltadevsde/arecibo" } itertools = "0.13.0" # zip_eq sha2 = "0.10.8" diff --git a/src/common.rs b/src/common.rs index 63f822fd..224ceb6a 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,5 +1,6 @@ use anyhow::{bail, Result}; use borsh::{BorshDeserialize, BorshSerialize}; +use ff::derive::bitvec::view::AsBits; use jmt::KeyHash; use serde::{Deserialize, Serialize}; use std::{ @@ -134,6 +135,11 @@ impl Hashchain { self.push(operation) } + pub fn to_bytes(&self) -> Vec { + self.last() + .map_or(Vec::new(), |last_entry| last_entry.to_bytes()) + } + pub fn get(&self, idx: usize) -> &HashchainEntry { &self.entries[idx] } @@ -207,4 +213,14 @@ impl HashchainEntry { operation, } } + + pub fn to_bytes(&self) -> Vec { + let mut bytes = Vec::new(); + + bytes.extend_from_slice(self.hash.as_ref()); + bytes.extend_from_slice(self.previous_hash.as_ref()); + bytes.extend_from_slice(self.operation.to_string().as_bytes()); + + bytes + } } diff --git a/src/nova/insert.rs b/src/nova/insert.rs index e3f6d07d..af4cf3e7 100644 --- a/src/nova/insert.rs +++ b/src/nova/insert.rs @@ -1,17 +1,216 @@ use crate::{ nova::utils::{next_rom_index_and_pc, Digest}, - tree::InsertProof, + tree::{Hasher, InsertProof, NonMembershipProof}, }; use anyhow::Result; use arecibo::supernova::StepCircuit; -use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; -use ff::PrimeField; +use bellpepper::gadgets::sha256::sha256; +use bellpepper_core::{ + boolean::{AllocatedBit, Boolean}, + num::AllocatedNum, + ConstraintSystem, SynthesisError, +}; +use ff::{PrimeField, PrimeFieldBits}; +use jmt::proof::UpdateMerkleProof; +use std::marker::PhantomData; + +#[derive(Clone)] +struct InsertProofCircuit { + proof: InsertProof, + _p: PhantomData, +} + +impl InsertProofCircuit { + pub fn new(proof: InsertProof) -> Self { + Self { + proof, + _p: PhantomData, + } + } +} + +impl StepCircuit for InsertProofCircuit { + fn arity(&self) -> usize { + 1 + } + + fn synthesize>( + &self, + cs: &mut CS, + pc: Option<&AllocatedNum>, + z: &[AllocatedNum], + ) -> Result<(Option>, Vec>), SynthesisError> { + let mut z_out: Vec> = Vec::new(); + + // Allocate the old root + let old_root = AllocatedNum::alloc(cs.namespace(|| "old_root"), || { + Ok(Digest::new(self.proof.non_membership_proof.root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable)?) + })?; + + // Allocate the new root + let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || { + Ok(Digest::new(self.proof.new_root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable)?) + })?; + + // Allocate the key + let key_bits = allocate_bits_to_binary_number( + cs.namespace(|| "key"), + Some(self.proof.non_membership_proof.key.0.to_vec()), + )?; + + // Allocate the value + let value_bytes = self.proof.value.to_bytes(); + let mut value_bits = Vec::new(); + + for (byte_idx, &byte) in value_bytes.iter().enumerate() { + for bit_idx in 0..8 { + let bit = AllocatedBit::alloc( + cs.namespace(|| format!("value bit {}.{}", byte_idx, bit_idx)), + Some((byte >> bit_idx) & 1 == 1), + )?; + value_bits.push(Boolean::from(bit)); + } + } + + // Hash the key and value + let leaf_hash = sha256( + cs.namespace(|| "leaf_hash"), + &[key_bits.clone(), value_bits].concat(), + ) + .map_err(|e| SynthesisError::Unsatisfiable)?; + + // Verify the non-membership proof + verify_non_membership_proof( + cs.namespace(|| "non_membership_proof"), + &self.proof.non_membership_proof, + &old_root, + &key_bits, + )?; + + // Verify the membership proof (update) + verify_membership_proof( + cs.namespace(|| "membership_proof"), + &self.proof.membership_proof, + &old_root, + &new_root, + &key_bits, + &leaf_hash, + )?; + + z_out.push(new_root); + + let new_pc = match pc { + Some(old_pc) => { + let new_pc = + AllocatedNum::alloc(cs.namespace(|| "new_pc"), || match old_pc.get_value() { + Some(v) => Ok(v + Scalar::from(1)), + None => Err(SynthesisError::AssignmentMissing), + })?; + + // Enforce that new_pc = old_pc + 1 + cs.enforce( + || "new_pc = old_pc + 1", + |lc| lc + old_pc.get_variable(), + |lc| lc + CS::one(), + |lc| lc + new_pc.get_variable(), + ); + + Some(new_pc) + } + None => None, + }; + + Ok((new_pc, z_out)) + } + + fn circuit_index(&self) -> usize { + 0 + } +} + +fn allocate_bits_to_binary_number>( + mut cs: CS, + value: Option>, +) -> Result, SynthesisError> { + let bits = value + .map(|bytes| { + bytes + .iter() + .flat_map(|byte| (0..8).map(move |i| (byte >> i) & 1 == 1)) + .collect::>() + }) + .unwrap_or_else(|| vec![false; 256]); + + let mut result = Vec::new(); + for (i, &bit) in bits.iter().enumerate() { + let allocated_bit = AllocatedBit::alloc(cs.namespace(|| format!("bit {}", i)), Some(bit))?; + result.push(Boolean::from(allocated_bit)); + } + Ok(result) +} + +fn verify_non_membership_proof>( + mut cs: CS, + proof: &NonMembershipProof, + root: &[Boolean], + key: &[Boolean], +) -> Result<(), SynthesisError> { + // 1. Hash the key + let key_hash = sha256(cs.namespace(|| "hash key"), key)?; + + // 2. Traverse the Merkle path + let mut current = key_hash; + for (i, sibling) in proof.proof.siblings().iter().enumerate() { + let sibling_bits = allocate_bits_to_binary_number( + cs.namespace(|| format!("sibling bits {}", i)), + Some(sibling.to_vec()), + )?; + + let (left, right) = if *is_left { + (sibling_bits, current) + } else { + (current, sibling_bits) + }; + + current = sha256( + cs.namespace(|| format!("hash node {}", i)), + &[left, right].concat(), + )?; + } + + // 3. Check that the computed root does not match the given root + for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { + Boolean::enforce_not_equal( + cs.namespace(|| format!("root bit {} should not be equal", i)), + computed_bit, + given_bit, + )?; + } + + Ok(()) +} + +fn verify_membership_proof>( + mut cs: CS, + proof: &UpdateMerkleProof, + old_root: &AllocatedNum, + new_root: &AllocatedNum, + key: &[Boolean], + leaf_hash: &[Boolean], +) -> Result<(), SynthesisError> { + // lfg implementing the logic to verify the membership proof + Ok(()) +} #[derive(Clone)] pub struct InsertCircuit { pub insertion_proof: InsertProof, rom_size: usize, - _phantom: std::marker::PhantomData, + _phantom: PhantomData, } impl InsertCircuit { @@ -19,7 +218,7 @@ impl InsertCircuit { Self { insertion_proof, rom_size, - _phantom: std::marker::PhantomData, + _phantom: PhantomData, } } } diff --git a/src/nova/utils.rs b/src/nova/utils.rs index e29b11ab..38ef1d96 100644 --- a/src/nova/utils.rs +++ b/src/nova/utils.rs @@ -4,6 +4,7 @@ use crate::tree::*; use crate::{common::Hashchain, nova::batch::EpochCircuit}; use anyhow::Result; use arecibo::{provider::PallasEngine, supernova::PublicParams, traits::snark::default_ck_hint}; +use bellpepper::gadgets::sha256::sha256; use bellpepper_core::{ boolean::{AllocatedBit, Boolean}, num::AllocatedNum, From 728e28bbb8330818f31c2907444d10cacf06e44f Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Sun, 8 Sep 2024 09:10:02 +0200 Subject: [PATCH 19/33] membership proof --- src/nova/batch.rs | 8 +- src/nova/insert.rs | 309 ++++++++++++++++----------------------------- src/tree/mod.rs | 22 ++-- 3 files changed, 127 insertions(+), 212 deletions(-) diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 0f977ef6..124ab005 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -4,7 +4,7 @@ use arecibo::{ supernova::{StepCircuit, TrivialSecondaryCircuit}, traits::{CurveCycleEquipped, Dual, Engine}, }; -use ff::PrimeField; +use ff::{PrimeField, PrimeFieldBits}; use std::cell::RefCell; #[derive(Clone)] @@ -84,12 +84,12 @@ where } #[derive(Clone)] -pub enum EpochCircuit { +pub enum EpochCircuit { Insert(InsertCircuit), Update(UpdateCircuit), } -impl EpochCircuit { +impl EpochCircuit { pub fn new_insert(insertion_proof: InsertProof, rom_size: usize) -> Self { Self::Insert(InsertCircuit::new(insertion_proof, rom_size)) } @@ -99,7 +99,7 @@ impl EpochCircuit { } } -impl StepCircuit for EpochCircuit { +impl StepCircuit for EpochCircuit { fn arity(&self) -> usize { match self { Self::Insert(x) => x.arity(), diff --git a/src/nova/insert.rs b/src/nova/insert.rs index af4cf3e7..737a89c0 100644 --- a/src/nova/insert.rs +++ b/src/nova/insert.rs @@ -1,6 +1,6 @@ use crate::{ nova::utils::{next_rom_index_and_pc, Digest}, - tree::{Hasher, InsertProof, NonMembershipProof}, + tree::{Hasher, InsertProof, SPARSE_MERKLE_PLACEHOLDER_HASH}, }; use anyhow::Result; use arecibo::supernova::StepCircuit; @@ -11,27 +11,28 @@ use bellpepper_core::{ ConstraintSystem, SynthesisError, }; use ff::{PrimeField, PrimeFieldBits}; -use jmt::proof::UpdateMerkleProof; -use std::marker::PhantomData; +use jmt::proof::{SparseMerkleLeafNode, SparseMerkleNode, SparseMerkleProof}; #[derive(Clone)] -struct InsertProofCircuit { - proof: InsertProof, - _p: PhantomData, +pub struct InsertCircuit { + pub proof: InsertProof, + rom_size: usize, + _phantom: std::marker::PhantomData, } -impl InsertProofCircuit { - pub fn new(proof: InsertProof) -> Self { +impl InsertCircuit { + pub fn new(proof: InsertProof, rom_size: usize) -> Self { Self { proof, - _p: PhantomData, + rom_size, + _phantom: std::marker::PhantomData, } } } -impl StepCircuit for InsertProofCircuit { +impl StepCircuit for InsertCircuit { fn arity(&self) -> usize { - 1 + 2 + self.rom_size // old_root + rom_index + rom[].len() } fn synthesize>( @@ -40,91 +41,68 @@ impl StepCircuit for InsertProofCir pc: Option<&AllocatedNum>, z: &[AllocatedNum], ) -> Result<(Option>, Vec>), SynthesisError> { - let mut z_out: Vec> = Vec::new(); + let old_root = &z[0]; + let rom_index = &z[1]; + let allocated_rom = &z[2..]; - // Allocate the old root - let old_root = AllocatedNum::alloc(cs.namespace(|| "old_root"), || { - Ok(Digest::new(self.proof.non_membership_proof.root) - .to_scalar() - .map_err(|_| SynthesisError::Unsatisfiable)?) - })?; + let pc = pc.ok_or(SynthesisError::AssignmentMissing)?; - // Allocate the new root - let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || { - Ok(Digest::new(self.proof.new_root) - .to_scalar() - .map_err(|_| SynthesisError::Unsatisfiable)?) - })?; + let (rom_index_next, pc_next) = next_rom_index_and_pc( + &mut cs.namespace(|| "next and rom_index and pc"), + rom_index, + allocated_rom, + pc, + )?; - // Allocate the key - let key_bits = allocate_bits_to_binary_number( - cs.namespace(|| "key"), - Some(self.proof.non_membership_proof.key.0.to_vec()), + let old_root_bits = allocate_bits_to_binary_number( + cs, + Some(self.proof.non_membership_proof.root.to_bytes().to_vec()), )?; - // Allocate the value - let value_bytes = self.proof.value.to_bytes(); - let mut value_bits = Vec::new(); + let pre_insertion_scalar = Digest::new(self.proof.non_membership_proof.root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable); + let pre_insertion_root = + AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { + pre_insertion_scalar + })?; - for (byte_idx, &byte) in value_bytes.iter().enumerate() { - for bit_idx in 0..8 { - let bit = AllocatedBit::alloc( - cs.namespace(|| format!("value bit {}.{}", byte_idx, bit_idx)), - Some((byte >> bit_idx) & 1 == 1), - )?; - value_bits.push(Boolean::from(bit)); - } - } + cs.enforce( + || "z0 == pre_insertion_root", + |lc| lc + old_root.get_variable(), + |lc| lc + CS::one(), + |lc| lc + pre_insertion_root.get_variable(), + ); - // Hash the key and value - let leaf_hash = sha256( - cs.namespace(|| "leaf_hash"), - &[key_bits.clone(), value_bits].concat(), - ) - .map_err(|e| SynthesisError::Unsatisfiable)?; + // Allocate the new root + let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || { + Digest::new(self.proof.new_root) + .to_scalar() + .map_err(|_| SynthesisError::Unsatisfiable) + })?; // Verify the non-membership proof - verify_non_membership_proof( - cs.namespace(|| "non_membership_proof"), - &self.proof.non_membership_proof, - &old_root, - &key_bits, - )?; + // verify_non_membership_proof( + // cs.namespace(|| "non_membership_proof"), + // &self.proof.non_membership_proof, + // &old_root, + // &key_bits, + // )?; + + let leaf = &self + .proof + .membership_proof + .leaf() + .ok_or(SynthesisError::AssignmentMissing)?; // Verify the membership proof (update) - verify_membership_proof( - cs.namespace(|| "membership_proof"), - &self.proof.membership_proof, - &old_root, - &new_root, - &key_bits, - &leaf_hash, - )?; + verify_membership_proof(cs, &self.proof.membership_proof, &old_root_bits, *leaf)?; - z_out.push(new_root); - - let new_pc = match pc { - Some(old_pc) => { - let new_pc = - AllocatedNum::alloc(cs.namespace(|| "new_pc"), || match old_pc.get_value() { - Some(v) => Ok(v + Scalar::from(1)), - None => Err(SynthesisError::AssignmentMissing), - })?; - - // Enforce that new_pc = old_pc + 1 - cs.enforce( - || "new_pc = old_pc + 1", - |lc| lc + old_pc.get_variable(), - |lc| lc + CS::one(), - |lc| lc + new_pc.get_variable(), - ); - - Some(new_pc) - } - None => None, - }; + let mut z_next = vec![new_root]; + z_next.push(rom_index_next); + z_next.extend(z[2..].iter().cloned()); - Ok((new_pc, z_out)) + Ok((Some(pc_next), z_next)) } fn circuit_index(&self) -> usize { @@ -133,7 +111,7 @@ impl StepCircuit for InsertProofCir } fn allocate_bits_to_binary_number>( - mut cs: CS, + cs: &mut CS, value: Option>, ) -> Result, SynthesisError> { let bits = value @@ -153,134 +131,67 @@ fn allocate_bits_to_binary_number>( - mut cs: CS, - proof: &NonMembershipProof, - root: &[Boolean], - key: &[Boolean], -) -> Result<(), SynthesisError> { - // 1. Hash the key - let key_hash = sha256(cs.namespace(|| "hash key"), key)?; +// fn verify_non_membership_proof>( +// mut cs: CS, +// proof: &NonMembershipProof, +// root: &[Boolean], +// key: &[Boolean], +// ) -> Result<(), SynthesisError> { +// // 1. Hash the key +// let key_hash = sha256(cs.namespace(|| "hash key"), key)?; - // 2. Traverse the Merkle path - let mut current = key_hash; - for (i, sibling) in proof.proof.siblings().iter().enumerate() { - let sibling_bits = allocate_bits_to_binary_number( - cs.namespace(|| format!("sibling bits {}", i)), - Some(sibling.to_vec()), - )?; +// // 2. Traverse the Merkle path - let (left, right) = if *is_left { - (sibling_bits, current) - } else { - (current, sibling_bits) - }; +// // 3. Check that the computed root does not match the given root - current = sha256( - cs.namespace(|| format!("hash node {}", i)), - &[left, right].concat(), - )?; - } +// Ok(()) +// } - // 3. Check that the computed root does not match the given root - for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { - Boolean::enforce_not_equal( - cs.namespace(|| format!("root bit {} should not be equal", i)), - computed_bit, - given_bit, - )?; +fn hash_node>( + cs: &mut CS, + node: &SparseMerkleNode, +) -> Result, SynthesisError> { + match node { + SparseMerkleNode::Leaf(node) => { + let node_bits = allocate_bits_to_binary_number(cs, Some(node.to_bytes()))?; + sha256(cs.namespace(|| "hash key"), &node_bits) + } + SparseMerkleNode::Internal(node) => { + let node_bits = allocate_bits_to_binary_number(cs, Some(node.to_bytes()))?; + sha256(cs.namespace(|| "hash key"), &node_bits) + } + SparseMerkleNode::Null => allocate_bits_to_binary_number( + cs, + Some(SPARSE_MERKLE_PLACEHOLDER_HASH.to_bytes().to_vec()), + ), } - - Ok(()) } fn verify_membership_proof>( - mut cs: CS, - proof: &UpdateMerkleProof, - old_root: &AllocatedNum, - new_root: &AllocatedNum, - key: &[Boolean], - leaf_hash: &[Boolean], + cs: &mut CS, + proof: &SparseMerkleProof, + root: &Vec, + leaf: SparseMerkleLeafNode, ) -> Result<(), SynthesisError> { - // lfg implementing the logic to verify the membership proof - Ok(()) -} - -#[derive(Clone)] -pub struct InsertCircuit { - pub insertion_proof: InsertProof, - rom_size: usize, - _phantom: PhantomData, -} - -impl InsertCircuit { - pub fn new(insertion_proof: InsertProof, rom_size: usize) -> Self { - Self { - insertion_proof, - rom_size, - _phantom: PhantomData, - } - } -} + // let leaf = self.proof.membership_proof.leaf().ok_or(SynthesisError::Unsatisfiable)?; + let mut current = hash_node(cs, &SparseMerkleNode::Leaf(leaf))?; -impl StepCircuit for InsertCircuit -where - F: PrimeField, -{ - fn arity(&self) -> usize { - 2 + self.rom_size // old_root + rom_index + rom[].len() - } + for (i, sibling) in proof.siblings().iter().enumerate() { + let sibling_hash = hash_node(cs, sibling)?; - fn circuit_index(&self) -> usize { - 0 + current = sha256( + cs.namespace(|| format!("hash node {}", i)), + &[current, sibling_hash].concat(), + )?; } - fn synthesize>( - &self, - cs: &mut CS, - pc: Option<&AllocatedNum>, - z: &[AllocatedNum], - ) -> Result<(Option>, Vec>), SynthesisError> { - let old_root = &z[0]; - let rom_index = &z[1]; - let allocated_rom = &z[2..]; - - let pc = pc.ok_or(SynthesisError::AssignmentMissing)?; - - let (rom_index_next, pc_next) = next_rom_index_and_pc( - &mut cs.namespace(|| "next and rom_index and pc"), - rom_index, - allocated_rom, - pc, + for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { + Boolean::enforce_equal( + cs.namespace(|| format!("root bit {} should be equal", i)), + computed_bit, + given_bit, )?; - - let pre_insertion_scalar = Digest::new(self.insertion_proof.non_membership_proof.root) - .to_scalar() - .map_err(|_| SynthesisError::Unsatisfiable); - let pre_insertion_root = - AllocatedNum::alloc(cs.namespace(|| "pre_insertion_root"), || { - pre_insertion_scalar - })?; - let new_scalar = Digest::new(self.insertion_proof.new_root) - .to_scalar() - .map_err(|_| SynthesisError::Unsatisfiable); - let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || new_scalar)?; - - cs.enforce( - || "z0 == pre_insertion_root", - |lc| lc + old_root.get_variable(), - |lc| lc + CS::one(), - |lc| lc + pre_insertion_root.get_variable(), - ); - // TODO: bellpepper merkle proof gadget - self.insertion_proof - .verify() - .map_err(|_| SynthesisError::Unsatisfiable)?; - - let mut z_next = vec![new_root]; - z_next.push(rom_index_next); - z_next.extend(z[2..].iter().cloned()); - - Ok((Some(pc_next), z_next)) } + + Ok(()) } diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 181f1979..c8d6fdb7 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -21,6 +21,12 @@ pub type Hasher = sha2::Sha256; )] pub struct Digest([u8; 32]); +impl Digest { + pub fn to_bytes(&self) -> [u8; 32] { + return self.0; + } +} + // implementing it for now to get things to compile, curve choice will be made later impl TryFrom for Scalar { type Error = anyhow::Error; @@ -121,7 +127,7 @@ pub struct InsertProof { pub non_membership_proof: NonMembershipProof, pub new_root: Digest, - pub membership_proof: UpdateMerkleProof, + pub membership_proof: SparseMerkleProof, pub value: Hashchain, } @@ -133,10 +139,10 @@ impl InsertProof { let value = to_vec(&self.value).unwrap(); - self.membership_proof.clone().verify_update( - self.non_membership_proof.root.into(), + self.membership_proof.clone().verify_existence( self.new_root.into(), - vec![(self.non_membership_proof.key, Some(value))], + self.non_membership_proof.key, + value, ); Ok(()) @@ -256,16 +262,14 @@ where bail!("Key already exists"); } - let (new_root, membership_proof, tree_update_batch) = self + // the update proof just contains another nm proof + let (new_root, _, tree_update_batch) = self .jmt .put_value_set_with_proof(vec![(key, Some(serialized_value))], self.epoch + 1)?; self.queue_batch(tree_update_batch); self.write_batch()?; - ensure!( - membership_proof.len() == 1, - "UpdateProof does not span only a single update" - ); + let (_, membership_proof) = self.jmt.get_with_proof(key, self.epoch)?; Ok(InsertProof { new_root: new_root.into(), From 503f862a2935e165d550c41751626d0655fb55e5 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Mon, 9 Sep 2024 13:08:37 +0200 Subject: [PATCH 20/33] invalid witness length --- src/nova/insert.rs | 116 ++++++-------------------------------------- src/nova/update.rs | 26 ++++++++-- src/nova/utils.rs | 118 ++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 152 insertions(+), 108 deletions(-) diff --git a/src/nova/insert.rs b/src/nova/insert.rs index 737a89c0..48306cec 100644 --- a/src/nova/insert.rs +++ b/src/nova/insert.rs @@ -1,17 +1,14 @@ use crate::{ - nova::utils::{next_rom_index_and_pc, Digest}, - tree::{Hasher, InsertProof, SPARSE_MERKLE_PLACEHOLDER_HASH}, + nova::utils::{ + allocate_bits_to_binary_number, next_rom_index_and_pc, verify_membership_proof, Digest, + }, + tree::InsertProof, }; use anyhow::Result; use arecibo::supernova::StepCircuit; -use bellpepper::gadgets::sha256::sha256; -use bellpepper_core::{ - boolean::{AllocatedBit, Boolean}, - num::AllocatedNum, - ConstraintSystem, SynthesisError, -}; +use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; use ff::{PrimeField, PrimeFieldBits}; -use jmt::proof::{SparseMerkleLeafNode, SparseMerkleNode, SparseMerkleProof}; +use sha2::Sha256; #[derive(Clone)] pub struct InsertCircuit { @@ -54,11 +51,6 @@ impl StepCircuit for InsertCircuit< pc, )?; - let old_root_bits = allocate_bits_to_binary_number( - cs, - Some(self.proof.non_membership_proof.root.to_bytes().to_vec()), - )?; - let pre_insertion_scalar = Digest::new(self.proof.non_membership_proof.root) .to_scalar() .map_err(|_| SynthesisError::Unsatisfiable); @@ -81,6 +73,13 @@ impl StepCircuit for InsertCircuit< .map_err(|_| SynthesisError::Unsatisfiable) })?; + let new_root_bits = + allocate_bits_to_binary_number(cs, self.proof.membership_proof.root_hash().0.to_vec())?; + + self.proof + .verify() + .map_err(|_| SynthesisError::Unsatisfiable)?; + // Verify the non-membership proof // verify_non_membership_proof( // cs.namespace(|| "non_membership_proof"), @@ -95,8 +94,7 @@ impl StepCircuit for InsertCircuit< .leaf() .ok_or(SynthesisError::AssignmentMissing)?; - // Verify the membership proof (update) - verify_membership_proof(cs, &self.proof.membership_proof, &old_root_bits, *leaf)?; + verify_membership_proof(cs, &self.proof.membership_proof, &new_root_bits, *leaf)?; let mut z_next = vec![new_root]; z_next.push(rom_index_next); @@ -109,89 +107,3 @@ impl StepCircuit for InsertCircuit< 0 } } - -fn allocate_bits_to_binary_number>( - cs: &mut CS, - value: Option>, -) -> Result, SynthesisError> { - let bits = value - .map(|bytes| { - bytes - .iter() - .flat_map(|byte| (0..8).map(move |i| (byte >> i) & 1 == 1)) - .collect::>() - }) - .unwrap_or_else(|| vec![false; 256]); - - let mut result = Vec::new(); - for (i, &bit) in bits.iter().enumerate() { - let allocated_bit = AllocatedBit::alloc(cs.namespace(|| format!("bit {}", i)), Some(bit))?; - result.push(Boolean::from(allocated_bit)); - } - Ok(result) -} - -// fn verify_non_membership_proof>( -// mut cs: CS, -// proof: &NonMembershipProof, -// root: &[Boolean], -// key: &[Boolean], -// ) -> Result<(), SynthesisError> { -// // 1. Hash the key -// let key_hash = sha256(cs.namespace(|| "hash key"), key)?; - -// // 2. Traverse the Merkle path - -// // 3. Check that the computed root does not match the given root - -// Ok(()) -// } - -fn hash_node>( - cs: &mut CS, - node: &SparseMerkleNode, -) -> Result, SynthesisError> { - match node { - SparseMerkleNode::Leaf(node) => { - let node_bits = allocate_bits_to_binary_number(cs, Some(node.to_bytes()))?; - sha256(cs.namespace(|| "hash key"), &node_bits) - } - SparseMerkleNode::Internal(node) => { - let node_bits = allocate_bits_to_binary_number(cs, Some(node.to_bytes()))?; - sha256(cs.namespace(|| "hash key"), &node_bits) - } - SparseMerkleNode::Null => allocate_bits_to_binary_number( - cs, - Some(SPARSE_MERKLE_PLACEHOLDER_HASH.to_bytes().to_vec()), - ), - } -} - -fn verify_membership_proof>( - cs: &mut CS, - proof: &SparseMerkleProof, - root: &Vec, - leaf: SparseMerkleLeafNode, -) -> Result<(), SynthesisError> { - // let leaf = self.proof.membership_proof.leaf().ok_or(SynthesisError::Unsatisfiable)?; - let mut current = hash_node(cs, &SparseMerkleNode::Leaf(leaf))?; - - for (i, sibling) in proof.siblings().iter().enumerate() { - let sibling_hash = hash_node(cs, sibling)?; - - current = sha256( - cs.namespace(|| format!("hash node {}", i)), - &[current, sibling_hash].concat(), - )?; - } - - for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { - Boolean::enforce_equal( - cs.namespace(|| format!("root bit {} should be equal", i)), - computed_bit, - given_bit, - )?; - } - - Ok(()) -} diff --git a/src/nova/update.rs b/src/nova/update.rs index 62f9ead2..74c10119 100644 --- a/src/nova/update.rs +++ b/src/nova/update.rs @@ -1,11 +1,14 @@ use crate::{ - nova::utils::{next_rom_index_and_pc, Digest as NovaDigest}, + nova::utils::{ + allocate_bits_to_binary_number, next_rom_index_and_pc, verify_membership_proof, + Digest as NovaDigest, + }, tree::UpdateProof, }; use anyhow::Result; use arecibo::supernova::StepCircuit; use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; -use ff::PrimeField; +use ff::{PrimeField, PrimeFieldBits}; #[derive(Clone)] pub struct UpdateCircuit { @@ -14,7 +17,7 @@ pub struct UpdateCircuit { _phantom: std::marker::PhantomData, } -impl UpdateCircuit { +impl UpdateCircuit { pub fn new(update_proof: UpdateProof, rom_size: usize) -> Self { Self { update_proof, @@ -26,7 +29,7 @@ impl UpdateCircuit { impl StepCircuit for UpdateCircuit where - F: PrimeField, + F: PrimeField + PrimeFieldBits, { fn arity(&self) -> usize { 2 + self.rom_size // old_root + rom_index + rom[].len() @@ -67,13 +70,26 @@ where .map_err(|_| SynthesisError::Unsatisfiable); let new_root = AllocatedNum::alloc(cs.namespace(|| "new_root"), || new_scalar)?; + // TODO: The provided merkle root is an inclusion proof of the node before the update. + // We actually need to create our own merkle proof by hashing the new node to verify the update + let old_root_bits = + allocate_bits_to_binary_number(cs, self.update_proof.old_root.0.to_vec())?; + cs.enforce( || "z0 == pre_insertion_root", |lc| lc + old_root.get_variable(), |lc| lc + CS::one(), |lc| lc + pre_insertion_root.get_variable(), ); - // // TODO: bellpepper merkle proof gadget + + let update_proof = &self.update_proof.proof.proofs()[0]; + + let leaf = &update_proof + .leaf() + .ok_or(SynthesisError::AssignmentMissing)?; + + verify_membership_proof(cs, update_proof, &old_root_bits, *leaf)?; + self.update_proof .verify() .map_err(|_| SynthesisError::Unsatisfiable)?; diff --git a/src/nova/utils.rs b/src/nova/utils.rs index 38ef1d96..f3289ae1 100644 --- a/src/nova/utils.rs +++ b/src/nova/utils.rs @@ -12,8 +12,14 @@ use bellpepper_core::{ }; use ff::PrimeField; use itertools::Itertools as _; +use jmt::bytes32ext::Bytes32Ext; +use jmt::mock::MockTreeStore; +use jmt::proof::{ + SparseMerkleInternalNode, SparseMerkleLeafNode, SparseMerkleNode, SparseMerkleProof, + INTERNAL_DOMAIN_SEPARATOR, +}; use jmt::RootHash; -use jmt::{mock::MockTreeStore, KeyHash}; +use sha2::Sha256; use std::marker::PhantomData; use std::sync::Arc; @@ -185,3 +191,113 @@ pub fn create_pp() -> PublicParams { let circuit_sequence = EpochCircuitSequence::::new(operations); PublicParams::setup(&circuit_sequence, &*default_ck_hint(), &*default_ck_hint()) } + +pub fn allocate_bits_to_binary_number>( + cs: &mut CS, + value: Vec, +) -> Result, SynthesisError> { + let bits: Vec = value + .iter() + .flat_map(|byte| (0..8).rev().map(move |i| (byte >> i) & 1 == 1)) + .collect(); + + let result: Result, SynthesisError> = bits + .into_iter() + .enumerate() + .map(|(i, bit)| { + let allocated_bit = + AllocatedBit::alloc(cs.namespace(|| format!("bit {}", i)), Some(bit))?; + Ok(Boolean::from(allocated_bit)) + }) + .collect(); + + result +} + +pub fn hash_node>( + cs: &mut CS, + node: &SparseMerkleNode, +) -> Result, SynthesisError> { + match node { + SparseMerkleNode::Leaf(node) => { + let node_bits = allocate_bits_to_binary_number(cs, node.to_bytes())?; + sha256(cs.namespace(|| "hash key"), &node_bits) + } + SparseMerkleNode::Internal(node) => { + let node_bits = allocate_bits_to_binary_number(cs, node.to_bytes())?; + sha256(cs.namespace(|| "hash key"), &node_bits) + } + SparseMerkleNode::Null => { + allocate_bits_to_binary_number(cs, SPARSE_MERKLE_PLACEHOLDER_HASH.to_bytes().to_vec()) + } + } +} + +pub fn verify_membership_proof>( + cs: &mut CS, + proof: &SparseMerkleProof, + root: &Vec, + leaf: SparseMerkleLeafNode, +) -> Result<(), SynthesisError> { + let mut current = hash_node(cs, &SparseMerkleNode::Leaf(leaf))?; + + let element_key = leaf.key_hash; + + for (i, (sibling, key_bit)) in proof + .siblings() + .iter() + .zip( + element_key + .0 + .iter_bits() + .rev() + .skip(256 - proof.siblings().len()), + ) + .enumerate() + { + let sibling_hash = hash_node(cs, sibling)?; + let separator = allocate_bits_to_binary_number(cs, INTERNAL_DOMAIN_SEPARATOR.to_vec())?; + + let mut result = Vec::new(); + if key_bit { + result.extend_from_slice(&separator); + result.extend_from_slice(&sibling_hash); + result.extend_from_slice(¤t); + } else { + result.extend_from_slice(&separator); + result.extend_from_slice(¤t); + result.extend_from_slice(&sibling_hash); + } + + current = sha256( + cs.namespace(|| format!("hash node {}", i)), + result.as_slice(), + )?; + } + + for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { + Boolean::enforce_equal( + cs.namespace(|| format!("root bit {} should be equal", i)), + computed_bit, + given_bit, + )?; + } + + Ok(()) +} + +fn boolvec_to_bytes(value: Vec) -> Vec { + let bits: Vec = value + .iter() + .map(|b| b.get_value().unwrap_or(false)) + .collect(); + + bits.chunks(8) + .map(|chunk| { + chunk + .iter() + .enumerate() + .fold(0u8, |acc, (i, &bit)| acc | ((bit as u8) << i)) + }) + .collect() +} From 06028caf8f0fc78576b5415503bfa7ee0874e595 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Mon, 9 Sep 2024 15:45:15 +0200 Subject: [PATCH 21/33] i give up --- src/nova/batch.rs | 14 ++-- src/nova/utils.rs | 196 ++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 177 insertions(+), 33 deletions(-) diff --git a/src/nova/batch.rs b/src/nova/batch.rs index 124ab005..182056ff 100644 --- a/src/nova/batch.rs +++ b/src/nova/batch.rs @@ -219,20 +219,20 @@ mod tests { let operations = vec![ ( 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 3), ), ( 1, - EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 3), ), ( 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 4), - ), - ( - 1, - EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 3), ), + // ( + // 1, + // EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), + // ), ]; let circuit_sequence = EpochCircuitSequence::::new(operations); diff --git a/src/nova/utils.rs b/src/nova/utils.rs index f3289ae1..dc26e21d 100644 --- a/src/nova/utils.rs +++ b/src/nova/utils.rs @@ -228,53 +228,131 @@ pub fn hash_node>( sha256(cs.namespace(|| "hash key"), &node_bits) } SparseMerkleNode::Null => { - allocate_bits_to_binary_number(cs, SPARSE_MERKLE_PLACEHOLDER_HASH.to_bytes().to_vec()) + let node_bits = allocate_bits_to_binary_number( + cs, + SPARSE_MERKLE_PLACEHOLDER_HASH.to_bytes().to_vec(), + )?; + sha256( + cs.namespace(|| "placeholder"), + &[node_bits.clone(), node_bits.clone(), node_bits.clone()].concat(), + )?; + Ok(node_bits) } } } +// pub fn verify_membership_proof>( +// cs: &mut CS, +// proof: &SparseMerkleProof, +// root: &Vec, +// leaf: SparseMerkleLeafNode, +// ) -> Result<(), SynthesisError> { +// dbg!(proof); +// let mut current = hash_node(cs, &SparseMerkleNode::Leaf(leaf))?; + +// let element_key = leaf.key_hash; + +// for (i, (sibling, key_bit)) in proof +// .siblings() +// .iter() +// .zip( +// element_key +// .0 +// .iter_bits() +// .rev() +// .skip(256 - proof.siblings().len()), +// ) +// .enumerate() +// { +// let sibling_hash = hash_node(cs, sibling)?; +// let separator = allocate_bits_to_binary_number(cs, INTERNAL_DOMAIN_SEPARATOR.to_vec())?; + +// let mut result = Vec::new(); +// if key_bit { +// result.extend_from_slice(&separator); +// result.extend_from_slice(&sibling_hash); +// result.extend_from_slice(¤t); +// } else { +// result.extend_from_slice(&separator); +// result.extend_from_slice(¤t); +// result.extend_from_slice(&sibling_hash); +// } + +// current = sha256( +// cs.namespace(|| format!("hash node {}", i)), +// result.as_slice(), +// )?; +// } + +// for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { +// Boolean::enforce_equal( +// cs.namespace(|| format!("root bit {} should be equal", i)), +// computed_bit, +// given_bit, +// )?; +// } + +// Ok(()) +// } + pub fn verify_membership_proof>( cs: &mut CS, proof: &SparseMerkleProof, root: &Vec, leaf: SparseMerkleLeafNode, ) -> Result<(), SynthesisError> { - let mut current = hash_node(cs, &SparseMerkleNode::Leaf(leaf))?; + let max_depth = 10; + let actual_depth = proof.siblings().len(); + let mut current = hash_node(cs, &SparseMerkleNode::Leaf(leaf))?; let element_key = leaf.key_hash; - for (i, (sibling, key_bit)) in proof - .siblings() - .iter() - .zip( - element_key - .0 - .iter_bits() - .rev() - .skip(256 - proof.siblings().len()), - ) - .enumerate() - { - let sibling_hash = hash_node(cs, sibling)?; + for i in 0..max_depth { + let cs = &mut cs.namespace(|| format!("proof step {}", i)); + + // Allocate sibling hash (use placeholder if beyond actual proof depth) + let sibling_hash = if i < actual_depth { + hash_node(cs, &proof.siblings()[i])? + } else { + let bits = allocate_bits_to_binary_number( + cs, + SPARSE_MERKLE_PLACEHOLDER_HASH.to_bytes().to_vec(), + )?; + sha256( + cs.namespace(|| "placeholder"), + &[bits.clone(), bits.clone(), bits.clone()].concat(), + )?; + bits + }; + + // Get the key bit + let key_bit = if i < actual_depth { + element_key.0.iter_bits().rev().nth(255 - i).unwrap() + } else { + false + }; + let separator = allocate_bits_to_binary_number(cs, INTERNAL_DOMAIN_SEPARATOR.to_vec())?; - let mut result = Vec::new(); + let mut hash_input = Vec::new(); if key_bit { - result.extend_from_slice(&separator); - result.extend_from_slice(&sibling_hash); - result.extend_from_slice(¤t); + hash_input.extend_from_slice(&separator); + hash_input.extend_from_slice(&sibling_hash); + hash_input.extend_from_slice(¤t); } else { - result.extend_from_slice(&separator); - result.extend_from_slice(¤t); - result.extend_from_slice(&sibling_hash); + hash_input.extend_from_slice(&separator); + hash_input.extend_from_slice(¤t); + hash_input.extend_from_slice(&sibling_hash); } - current = sha256( - cs.namespace(|| format!("hash node {}", i)), - result.as_slice(), - )?; + let hashed = sha256(cs.namespace(|| "hash node"), &hash_input)?; + + if i < actual_depth { + current = hashed; + } } + // Final equality check for (i, (computed_bit, given_bit)) in current.iter().zip(root.iter()).enumerate() { Boolean::enforce_equal( cs.namespace(|| format!("root bit {} should be equal", i)), @@ -286,6 +364,72 @@ pub fn verify_membership_proof> Ok(()) } +// Helper function to conditionally swap two vectors of Booleans +fn conditionally_swap>( + cs: &mut CS, + a: &[Boolean], + b: &[Boolean], + condition: &Boolean, +) -> Result<(Vec, Vec), SynthesisError> { + let mut left = Vec::with_capacity(a.len()); + let mut right = Vec::with_capacity(a.len()); + + for (i, (a_bit, b_bit)) in a.iter().zip(b.iter()).enumerate() { + let (left_bit, right_bit) = { + let and1 = Boolean::and(cs.namespace(|| "condition and a"), condition, a_bit)?; + let and2 = Boolean::and( + cs.namespace(|| "not condition a and b"), + &condition.not(), + b_bit, + )?; + + let left = Boolean::xor(cs.namespace(|| "left xor"), &and1, &and2)?; + + let and3 = Boolean::and(cs.namespace(|| "condition and b"), condition, b_bit)?; + let and4 = Boolean::and( + cs.namespace(|| "not condition and a"), + &condition.not(), + a_bit, + )?; + let right = Boolean::xor(cs.namespace(|| "right xor"), &and3, &and4)?; + + (left, right) + }; + + left.push(left_bit); + right.push(right_bit); + } + + Ok((left, right)) +} + +// Helper function to conditionally select between two vectors of Booleans +fn conditionally_select_vector>( + cs: &mut CS, + condition: &Boolean, + a: &[Boolean], + b: &[Boolean], +) -> Result, SynthesisError> { + assert_eq!(a.len(), b.len()); + let mut result = Vec::with_capacity(a.len()); + + for (i, (a_bit, b_bit)) in a.iter().zip(b.iter()).enumerate() { + let cs = &mut cs.namespace(|| format!("select bit {}", i)); + let and1 = Boolean::and(cs.namespace(|| "condition and a"), condition, a_bit)?; + let and2 = Boolean::and( + cs.namespace(|| "not condition and b"), + &Boolean::not(condition), + b_bit, + )?; + + let selected_bit = Boolean::xor(cs.namespace(|| "xor"), &and1, &and2)?; + + result.push(selected_bit); + } + + Ok(result) +} + fn boolvec_to_bytes(value: Vec) -> Vec { let bits: Vec = value .iter() From 9a32adf43c7d0fd018667b0bc9ec904ff4bf195b Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 10:12:46 +0200 Subject: [PATCH 22/33] restructuring to crates --- Cargo.lock | 411 +++--------------- Cargo.toml | 27 +- crates/common/Cargo.toml | 24 + .../common/src/hashchain.rs | 83 +--- crates/common/src/lib.rs | 3 + crates/common/src/operation.rs | 67 +++ src/tree/mod.rs => crates/common/src/tree.rs | 64 ++- crates/nova/Cargo.toml | 36 ++ {src/nova => crates/nova/src}/batch.rs | 5 +- {src/nova => crates/nova/src}/insert.rs | 9 +- src/nova/mod.rs => crates/nova/src/lib.rs | 0 {src/nova => crates/nova/src}/update.rs | 10 +- {src/nova => crates/nova/src}/utils.rs | 22 +- crates/prism/Cargo.toml | 58 +++ {src => crates/prism/src}/cfg.rs | 10 - {src => crates/prism/src}/consts.rs | 0 {src => crates/prism/src}/da/celestia.rs | 11 +- {src => crates/prism/src}/da/memory.rs | 6 +- {src => crates/prism/src}/da/mod.rs | 8 +- {src => crates/prism/src}/error.rs | 0 {src => crates/prism/src}/lib.rs | 3 - {src => crates/prism/src}/main.rs | 28 +- .../prism/src}/node_types/lightclient.rs | 0 {src => crates/prism/src}/node_types/mod.rs | 0 .../prism/src}/node_types/sequencer.rs | 9 +- {src => crates/prism/src}/storage.rs | 8 +- {src => crates/prism/src}/utils.rs | 7 +- {src => crates/prism/src}/webserver.rs | 2 +- 28 files changed, 362 insertions(+), 549 deletions(-) create mode 100644 crates/common/Cargo.toml rename src/common.rs => crates/common/src/hashchain.rs (65%) create mode 100644 crates/common/src/lib.rs create mode 100644 crates/common/src/operation.rs rename src/tree/mod.rs => crates/common/src/tree.rs (90%) create mode 100644 crates/nova/Cargo.toml rename {src/nova => crates/nova/src}/batch.rs (98%) rename {src/nova => crates/nova/src}/insert.rs (94%) rename src/nova/mod.rs => crates/nova/src/lib.rs (100%) rename {src/nova => crates/nova/src}/update.rs (94%) rename {src/nova => crates/nova/src}/utils.rs (96%) create mode 100644 crates/prism/Cargo.toml rename {src => crates/prism/src}/cfg.rs (95%) rename {src => crates/prism/src}/consts.rs (100%) rename {src => crates/prism/src}/da/celestia.rs (97%) rename {src => crates/prism/src}/da/memory.rs (97%) rename {src => crates/prism/src}/da/mod.rs (95%) rename {src => crates/prism/src}/error.rs (100%) rename {src => crates/prism/src}/lib.rs (78%) rename {src => crates/prism/src}/main.rs (72%) rename {src => crates/prism/src}/node_types/lightclient.rs (100%) rename {src => crates/prism/src}/node_types/mod.rs (100%) rename {src => crates/prism/src}/node_types/sequencer.rs (99%) rename {src => crates/prism/src}/storage.rs (98%) rename {src => crates/prism/src}/utils.rs (98%) rename {src => crates/prism/src}/webserver.rs (99%) diff --git a/Cargo.lock b/Cargo.lock index 54eb0a98..18bba647 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,188 +19,6 @@ dependencies = [ "syn 2.0.72", ] -[[package]] -name = "actix-codec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" -dependencies = [ - "bitflags 2.6.0", - "bytes", - "futures-core", - "futures-sink", - "memchr", - "pin-project-lite", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "actix-http" -version = "3.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae682f693a9cd7b058f2b0b5d9a6d7728a8555779bedbbc35dd88528611d020" -dependencies = [ - "actix-codec", - "actix-rt", - "actix-service", - "actix-utils", - "ahash", - "base64 0.22.1", - "bitflags 2.6.0", - "brotli", - "bytes", - "bytestring", - "derive_more", - "encoding_rs", - "flate2", - "futures-core", - "h2", - "http", - "httparse", - "httpdate", - "itoa", - "language-tags", - "local-channel", - "mime", - "percent-encoding", - "pin-project-lite", - "rand", - "sha1", - "smallvec", - "tokio", - "tokio-util", - "tracing", - "zstd", -] - -[[package]] -name = "actix-macros" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" -dependencies = [ - "quote", - "syn 2.0.72", -] - -[[package]] -name = "actix-router" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" -dependencies = [ - "bytestring", - "cfg-if", - "http", - "regex", - "regex-lite", - "serde", - "tracing", -] - -[[package]] -name = "actix-rt" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208" -dependencies = [ - "futures-core", - "tokio", -] - -[[package]] -name = "actix-server" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b02303ce8d4e8be5b855af6cf3c3a08f3eff26880faad82bab679c22d3650cb5" -dependencies = [ - "actix-rt", - "actix-service", - "actix-utils", - "futures-core", - "futures-util", - "mio 0.8.11", - "socket2 0.5.7", - "tokio", - "tracing", -] - -[[package]] -name = "actix-service" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a" -dependencies = [ - "futures-core", - "paste", - "pin-project-lite", -] - -[[package]] -name = "actix-utils" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" -dependencies = [ - "local-waker", - "pin-project-lite", -] - -[[package]] -name = "actix-web" -version = "4.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1988c02af8d2b718c05bc4aeb6a66395b7cdf32858c2c71131e5637a8c05a9ff" -dependencies = [ - "actix-codec", - "actix-http", - "actix-macros", - "actix-router", - "actix-rt", - "actix-server", - "actix-service", - "actix-utils", - "actix-web-codegen", - "ahash", - "bytes", - "bytestring", - "cfg-if", - "cookie", - "derive_more", - "encoding_rs", - "futures-core", - "futures-util", - "itoa", - "language-tags", - "log", - "mime", - "once_cell", - "pin-project-lite", - "regex", - "regex-lite", - "serde", - "serde_json", - "serde_urlencoded", - "smallvec", - "socket2 0.5.7", - "time", - "url", -] - -[[package]] -name = "actix-web-codegen" -version = "4.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" -dependencies = [ - "actix-router", - "proc-macro2", - "quote", - "syn 2.0.72", -] - [[package]] name = "addchain" version = "0.2.0" @@ -269,7 +87,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom", "once_cell", "version_check", "zerocopy", @@ -284,21 +101,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - [[package]] name = "alloy-rlp" version = "0.3.7" @@ -943,27 +745,6 @@ dependencies = [ "syn_derive", ] -[[package]] -name = "brotli" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "4.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - [[package]] name = "bs58" version = "0.5.1" @@ -1000,15 +781,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bytestring" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d80203ea6b29df88012294f62733de21cfeab47f17b41af3a38bc30a03ee72" -dependencies = [ - "bytes", -] - [[package]] name = "cast" version = "0.3.0" @@ -1020,10 +792,6 @@ name = "cc" version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26a5c3fd7bfa1ce3897a3a3501d362b2d87b7f2583ebcb4a949ec25911025cbc" -dependencies = [ - "jobserver", - "libc", -] [[package]] name = "celestia-proto" @@ -1333,17 +1101,6 @@ dependencies = [ "unicode-segmentation", ] -[[package]] -name = "cookie" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" -dependencies = [ - "percent-encoding", - "time", - "version_check", -] - [[package]] name = "core-foundation" version = "0.9.4" @@ -1731,15 +1488,6 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" -[[package]] -name = "encoding_rs" -version = "0.8.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" -dependencies = [ - "cfg-if", -] - [[package]] name = "enum_dispatch" version = "0.3.13" @@ -2465,15 +2213,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "jobserver" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" -dependencies = [ - "libc", -] - [[package]] name = "js-sys" version = "0.3.69" @@ -2642,12 +2381,6 @@ dependencies = [ "security-framework", ] -[[package]] -name = "language-tags" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" - [[package]] name = "lazy_static" version = "1.5.0" @@ -2717,23 +2450,6 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" -[[package]] -name = "local-channel" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" -dependencies = [ - "futures-core", - "futures-sink", - "local-waker", -] - -[[package]] -name = "local-waker" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" - [[package]] name = "lock_api" version = "0.4.12" @@ -2793,18 +2509,6 @@ dependencies = [ "adler", ] -[[package]] -name = "mio" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" -dependencies = [ - "libc", - "log", - "wasi", - "windows-sys 0.48.0", -] - [[package]] name = "mio" version = "1.0.1" @@ -3297,12 +3001,6 @@ dependencies = [ "spki", ] -[[package]] -name = "pkg-config" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" - [[package]] name = "plotters" version = "0.3.6" @@ -3416,20 +3114,30 @@ dependencies = [ ] [[package]] -name = "prism" +name = "prism-common" +version = "0.1.0" +dependencies = [ + "anyhow", + "blake2", + "bls12_381", + "borsh", + "celestia-types", + "hex", + "jmt", + "serde", + "sha2 0.10.8", +] + +[[package]] +name = "prism-main" version = "0.1.0" dependencies = [ - "actix-web", "anyhow", - "arecibo", "async-trait", "auto_impl", "axum", "base64 0.22.1", "bellman", - "bellpepper 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "bellpepper-core", - "bincode", "bls12_381", "borsh", "celestia-rpc", @@ -3444,13 +3152,12 @@ dependencies = [ "ff", "hex", "indexed-merkle-tree", - "itertools 0.13.0", "jmt", "keystore-rs", "log", "mockall", - "num", "pretty_env_logger", + "prism-common", "rand", "redis", "serde", @@ -3465,6 +3172,34 @@ dependencies = [ "utoipa-swagger-ui", ] +[[package]] +name = "prism-nova" +version = "0.1.0" +dependencies = [ + "anyhow", + "arecibo", + "base64 0.22.1", + "bellpepper 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bellpepper-core", + "bincode", + "bls12_381", + "borsh", + "celestia-types", + "ed25519", + "ff", + "hex", + "indexed-merkle-tree", + "itertools 0.13.0", + "jmt", + "num", + "prism-common", + "rand", + "serde", + "serde_json", + "sha2 0.10.8", + "thiserror", +] + [[package]] name = "proc-macro-crate" version = "1.3.1" @@ -3766,12 +3501,6 @@ dependencies = [ "regex-syntax", ] -[[package]] -name = "regex-lite" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" - [[package]] name = "regex-syntax" version = "0.8.4" @@ -4251,17 +3980,6 @@ dependencies = [ "opaque-debug", ] -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - [[package]] name = "sha1_smol" version = "1.0.1" @@ -4284,8 +4002,7 @@ dependencies = [ [[package]] name = "sha2" version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +source = "git+https://github.com/sp1-patches/RustCrypto-hashes?branch=patch-sha2-v0.10.8#1f224388fdede7cef649bce0d63876d1a9e3f515" dependencies = [ "cfg-if", "cpufeatures", @@ -4617,7 +4334,7 @@ dependencies = [ "backtrace", "bytes", "libc", - "mio 1.0.1", + "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", @@ -5372,30 +5089,12 @@ dependencies = [ "flate2", ] -[[package]] -name = "zstd" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" -dependencies = [ - "zstd-sys", -] +[[patch.unused]] +name = "sha2" +version = "0.10.6" +source = "git+https://github.com/sp1-patches/RustCrypto-hashes?branch=patch-sha2-v0.10.6#e5f8b7eaaa9801503bd998932a52b65848eee234" -[[package]] -name = "zstd-sys" -version = "2.0.13+zstd.1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" -dependencies = [ - "cc", - "pkg-config", -] +[[patch.unused]] +name = "sha2" +version = "0.9.8" +source = "git+https://github.com/sp1-patches/RustCrypto-hashes?branch=patch-sha2-v0.9.8#afdbfb09c325f8a69c01d540ec9a261e3637725d" diff --git a/Cargo.toml b/Cargo.toml index 3eb82789..9c5aa755 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,4 +1,4 @@ -[package] +[workspace.package] name = "prism" version = "0.1.0" authors = [ @@ -13,11 +13,11 @@ license = "MIT" keywords = ["crypto", "key-transparency"] readme = "README.md" -[features] -default = [] -key_transparency = [] +[workspace] +members = ["crates/prism", "crates/common", "crates/nova"] +resolver = "2" -[dependencies] +[workspace.dependencies] axum = "0.6" borsh = { version = "1.5.1", features = ["derive"] } tower-http = { version = "0.4", features = ["cors"] } @@ -27,7 +27,6 @@ async-trait = "0.1.68" serde = { version = "1.0.151", features = ["derive"] } serde_json = "1.0.79" redis = "0.24.0" -actix-web = { version = "4.4.1" } ed25519-dalek = "2.1.0" ed25519 = "2.2.0" base64 = "0.22.0" @@ -62,11 +61,19 @@ itertools = "0.13.0" # zip_eq sha2 = "0.10.8" auto_impl = "1.2.0" bincode = "1.3.3" +blake2 = "0.10.6" +prism-common = { path = "crates/common" } +prism-nova = { path = "crates/nova" } -[dev-dependencies] +[patch.crates-io] +sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } +sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } +sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } + +[workspace.dev-dependencies] serial_test = "3.1.1" criterion = "0.5.1" -[[bench]] -name = "zk_benchmarks" -harness = false +# [[bench]] +# name = "zk_benchmarks" +# harness = false diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml new file mode 100644 index 00000000..bc274c96 --- /dev/null +++ b/crates/common/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "prism-common" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +anyhow.workspace = true +bls12_381.workspace = true +borsh.workspace = true +jmt.workspace = true +serde.workspace = true +hex.workspace = true +sha2.workspace = true +blake2.workspace = true +celestia-types.workspace = true + +[patch.crates-io] +sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } +blake2 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "blake2", branch = "master" } +sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } +sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } diff --git a/src/common.rs b/crates/common/src/hashchain.rs similarity index 65% rename from src/common.rs rename to crates/common/src/hashchain.rs index 224ceb6a..f0ea9795 100644 --- a/src/common.rs +++ b/crates/common/src/hashchain.rs @@ -1,67 +1,13 @@ use anyhow::{bail, Result}; use borsh::{BorshDeserialize, BorshSerialize}; -use ff::derive::bitvec::view::AsBits; use jmt::KeyHash; use serde::{Deserialize, Serialize}; -use std::{ - fmt::Display, - ops::{Deref, DerefMut}, -}; - -use crate::tree::{hash, Digest, Hasher}; - -#[derive(Clone, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, PartialEq)] -// An [`Operation`] represents a state transition in the system. -// In a blockchain analogy, this would be the full set of our transaction types. -pub enum Operation { - // Creates a new account with the given id and value. - CreateAccount { - id: String, - value: String, - source: AccountSource, - }, - // Adds a value to an existing account. - Add { - id: String, - value: String, - }, - // Revokes a value from an existing account. - Revoke { - id: String, - value: String, - }, -} - -#[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)] -// An [`AccountSource`] represents the source of an account. See adr-002 for more information. -pub enum AccountSource { - SignedBySequencer { signature: String }, -} - -impl Operation { - pub fn id(&self) -> String { - match self { - Operation::CreateAccount { id, .. } => id.clone(), - Operation::Add { id, .. } => id.clone(), - Operation::Revoke { id, .. } => id.clone(), - } - } - - pub fn value(&self) -> String { - match self { - Operation::CreateAccount { value, .. } => value.clone(), - Operation::Add { value, .. } => value.clone(), - Operation::Revoke { value, .. } => value.clone(), - } - } -} +use std::ops::{Deref, DerefMut}; -impl Display for Operation { - // just print the debug - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self) - } -} +use crate::{ + operation::{AccountSource, Operation}, + tree::{hash, Digest, Hasher}, +}; #[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)] pub struct Hashchain { @@ -135,11 +81,6 @@ impl Hashchain { self.push(operation) } - pub fn to_bytes(&self) -> Vec { - self.last() - .map_or(Vec::new(), |last_entry| last_entry.to_bytes()) - } - pub fn get(&self, idx: usize) -> &HashchainEntry { &self.entries[idx] } @@ -184,6 +125,10 @@ impl Hashchain { KeyHash::with::(self.id.clone()) } + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + pub fn len(&self) -> usize { self.entries.len() } @@ -213,14 +158,4 @@ impl HashchainEntry { operation, } } - - pub fn to_bytes(&self) -> Vec { - let mut bytes = Vec::new(); - - bytes.extend_from_slice(self.hash.as_ref()); - bytes.extend_from_slice(self.previous_hash.as_ref()); - bytes.extend_from_slice(self.operation.to_string().as_bytes()); - - bytes - } } diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs new file mode 100644 index 00000000..46e99a98 --- /dev/null +++ b/crates/common/src/lib.rs @@ -0,0 +1,3 @@ +pub mod hashchain; +pub mod operation; +pub mod tree; diff --git a/crates/common/src/operation.rs b/crates/common/src/operation.rs new file mode 100644 index 00000000..f0ecad5c --- /dev/null +++ b/crates/common/src/operation.rs @@ -0,0 +1,67 @@ +use anyhow::{Context, Result}; +use borsh::{BorshDeserialize, BorshSerialize}; +use celestia_types::Blob; +use serde::{Deserialize, Serialize}; +use std::fmt::Display; + +#[derive(Clone, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, PartialEq)] +// An [`Operation`] represents a state transition in the system. +// In a blockchain analogy, this would be the full set of our transaction types. +pub enum Operation { + // Creates a new account with the given id and value. + CreateAccount { + id: String, + value: String, + source: AccountSource, + }, + // Adds a value to an existing account. + Add { + id: String, + value: String, + }, + // Revokes a value from an existing account. + Revoke { + id: String, + value: String, + }, +} + +#[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)] +// An [`AccountSource`] represents the source of an account. See adr-002 for more information. +pub enum AccountSource { + SignedBySequencer { signature: String }, +} + +impl Operation { + pub fn id(&self) -> String { + match self { + Operation::CreateAccount { id, .. } => id.clone(), + Operation::Add { id, .. } => id.clone(), + Operation::Revoke { id, .. } => id.clone(), + } + } + + pub fn value(&self) -> String { + match self { + Operation::CreateAccount { value, .. } => value.clone(), + Operation::Add { value, .. } => value.clone(), + Operation::Revoke { value, .. } => value.clone(), + } + } +} + +impl Display for Operation { + // just print the debug + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self) + } +} + +impl TryFrom<&Blob> for Operation { + type Error = anyhow::Error; + + fn try_from(value: &Blob) -> Result { + borsh::from_slice::(&value.data) + .context(format!("Failed to decode blob into Operation: {value:?}")) + } +} diff --git a/src/tree/mod.rs b/crates/common/src/tree.rs similarity index 90% rename from src/tree/mod.rs rename to crates/common/src/tree.rs index c8d6fdb7..42e00d35 100644 --- a/src/tree/mod.rs +++ b/crates/common/src/tree.rs @@ -1,20 +1,20 @@ -use anyhow::{anyhow, bail, ensure, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use bls12_381::Scalar; use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize}; use jmt::{ proof::{SparseMerkleProof, UpdateMerkleProof}, storage::{NodeBatch, TreeReader, TreeUpdateBatch, TreeWriter}, - JellyfishMerkleTree, KeyHash, RootHash, Sha256Jmt, SimpleHasher, + JellyfishMerkleTree, KeyHash, RootHash, SimpleHasher, }; use serde::{Deserialize, Serialize}; use std::sync::Arc; -use crate::common::Hashchain; +use crate::hashchain::Hashchain; pub const SPARSE_MERKLE_PLACEHOLDER_HASH: Digest = Digest::new(*b"SPARSE_MERKLE_PLACEHOLDER_HASH__"); -pub type Hasher = sha2::Sha256; +pub type Hasher = blake2::Blake2s256; #[derive( Debug, Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, PartialEq, Eq, Copy, @@ -23,7 +23,7 @@ pub struct Digest([u8; 32]); impl Digest { pub fn to_bytes(&self) -> [u8; 32] { - return self.0; + self.0 } } @@ -98,11 +98,55 @@ impl Digest { } pub fn hash(data: &[u8]) -> Digest { - let mut hasher = sha2::Sha256::new(); + let mut hasher = blake2::Blake2s256::new(); hasher.update(data); Digest(hasher.finalize()) } +#[derive(Serialize, Deserialize)] +pub struct Batch { + pub prev_root: Digest, + pub new_root: Digest, + + pub proofs: Vec, +} + +impl Serialize for Proof { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let bytes = borsh::to_vec(self).map_err(serde::ser::Error::custom)?; + serializer.serialize_bytes(&bytes) + } +} + +impl<'de> Deserialize<'de> for Proof { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct ProofVisitor; + + impl<'de> serde::de::Visitor<'de> for ProofVisitor { + type Value = Proof; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a byte array containing Borsh-serialized Proof") + } + + fn visit_bytes(self, v: &[u8]) -> Result + where + E: serde::de::Error, + { + Proof::try_from_slice(v).map_err(serde::de::Error::custom) + } + } + + deserializer.deserialize_bytes(ProofVisitor) + } +} + #[derive(BorshSerialize, BorshDeserialize, Debug, Clone)] pub enum Proof { Update(UpdateProof), @@ -135,7 +179,7 @@ impl InsertProof { pub fn verify(&self) -> Result<()> { self.non_membership_proof .verify() - .context("Invalid NonMembershipProof"); + .context("Invalid NonMembershipProof")?; let value = to_vec(&self.value).unwrap(); @@ -143,7 +187,7 @@ impl InsertProof { self.new_root.into(), self.non_membership_proof.key, value, - ); + )?; Ok(()) } @@ -195,7 +239,7 @@ where pub fn new(store: Arc) -> Self { let tree = Self { db: store.clone(), - jmt: Sha256Jmt::new(store), + jmt: JellyfishMerkleTree::, Hasher>::new(store), pending_batch: None, epoch: 0, }; @@ -227,7 +271,7 @@ where Ok(()) } - fn get_current_root(&self) -> Result { + pub fn get_current_root(&self) -> Result { self.jmt .get_root_hash(self.epoch) .map_err(|e| anyhow!("Failed to get root hash: {}", e)) diff --git a/crates/nova/Cargo.toml b/crates/nova/Cargo.toml new file mode 100644 index 00000000..272da1de --- /dev/null +++ b/crates/nova/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "prism-nova" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +borsh = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +ed25519 = { workspace = true } +base64 = { workspace = true } +num = { workspace = true } +bls12_381 = { workspace = true } +rand = { workspace = true } +hex = { workspace = true } +ff = { workspace = true } +thiserror = { workspace = true } +indexed-merkle-tree = { workspace = true } +celestia-types = { workspace = true } +anyhow = { workspace = true } +jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +bellpepper-core = { workspace = true } +bellpepper = { workspace = true } +arecibo = { workspace = true, git = "https://github.com/deltadevsde/arecibo" } +itertools = { workspace = true } +sha2 = { workspace = true } +bincode = { workspace = true } +prism-common = { workspace = true } + +[patch.crates-io] +sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } +sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } +sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } diff --git a/src/nova/batch.rs b/crates/nova/src/batch.rs similarity index 98% rename from src/nova/batch.rs rename to crates/nova/src/batch.rs index 182056ff..c31d3bb1 100644 --- a/src/nova/batch.rs +++ b/crates/nova/src/batch.rs @@ -1,10 +1,10 @@ use super::{insert::InsertCircuit, update::UpdateCircuit}; -use crate::tree::{InsertProof, UpdateProof}; use arecibo::{ supernova::{StepCircuit, TrivialSecondaryCircuit}, traits::{CurveCycleEquipped, Dual, Engine}, }; use ff::{PrimeField, PrimeFieldBits}; +use prism_common::tree::{InsertProof, UpdateProof}; use std::cell::RefCell; #[derive(Clone)] @@ -136,7 +136,7 @@ impl StepCircuit for EpochCircuit { #[cfg(test)] mod tests { use super::*; - use crate::{common::Hashchain, nova::utils::Digest as NovaDigest, tree::*}; + use crate::utils::Digest as NovaDigest; use arecibo::{ provider::PallasEngine, supernova::{NonUniformCircuit, PublicParams, RecursiveSNARK}, @@ -144,6 +144,7 @@ mod tests { }; use ff::Field; use jmt::{mock::MockTreeStore, KeyHash}; + use prism_common::{hashchain::Hashchain, tree::*}; use rand::{rngs::StdRng, Rng, SeedableRng}; use std::sync::Arc; diff --git a/src/nova/insert.rs b/crates/nova/src/insert.rs similarity index 94% rename from src/nova/insert.rs rename to crates/nova/src/insert.rs index 48306cec..c7364b9d 100644 --- a/src/nova/insert.rs +++ b/crates/nova/src/insert.rs @@ -1,14 +1,11 @@ -use crate::{ - nova::utils::{ - allocate_bits_to_binary_number, next_rom_index_and_pc, verify_membership_proof, Digest, - }, - tree::InsertProof, +use crate::utils::{ + allocate_bits_to_binary_number, next_rom_index_and_pc, verify_membership_proof, Digest, }; use anyhow::Result; use arecibo::supernova::StepCircuit; use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; use ff::{PrimeField, PrimeFieldBits}; -use sha2::Sha256; +use prism_common::tree::InsertProof; #[derive(Clone)] pub struct InsertCircuit { diff --git a/src/nova/mod.rs b/crates/nova/src/lib.rs similarity index 100% rename from src/nova/mod.rs rename to crates/nova/src/lib.rs diff --git a/src/nova/update.rs b/crates/nova/src/update.rs similarity index 94% rename from src/nova/update.rs rename to crates/nova/src/update.rs index 74c10119..240b9159 100644 --- a/src/nova/update.rs +++ b/crates/nova/src/update.rs @@ -1,14 +1,12 @@ -use crate::{ - nova::utils::{ - allocate_bits_to_binary_number, next_rom_index_and_pc, verify_membership_proof, - Digest as NovaDigest, - }, - tree::UpdateProof, +use crate::utils::{ + allocate_bits_to_binary_number, next_rom_index_and_pc, verify_membership_proof, + Digest as NovaDigest, }; use anyhow::Result; use arecibo::supernova::StepCircuit; use bellpepper_core::{num::AllocatedNum, ConstraintSystem, SynthesisError}; use ff::{PrimeField, PrimeFieldBits}; +use prism_common::tree::UpdateProof; #[derive(Clone)] pub struct UpdateCircuit { diff --git a/src/nova/utils.rs b/crates/nova/src/utils.rs similarity index 96% rename from src/nova/utils.rs rename to crates/nova/src/utils.rs index dc26e21d..cf708561 100644 --- a/src/nova/utils.rs +++ b/crates/nova/src/utils.rs @@ -1,7 +1,5 @@ // use bellpepper_core::ConstraintSystem; -use crate::nova::batch::EpochCircuitSequence; -use crate::tree::*; -use crate::{common::Hashchain, nova::batch::EpochCircuit}; +use crate::batch::{EpochCircuit, EpochCircuitSequence}; use anyhow::Result; use arecibo::{provider::PallasEngine, supernova::PublicParams, traits::snark::default_ck_hint}; use bellpepper::gadgets::sha256::sha256; @@ -12,18 +10,16 @@ use bellpepper_core::{ }; use ff::PrimeField; use itertools::Itertools as _; -use jmt::bytes32ext::Bytes32Ext; -use jmt::mock::MockTreeStore; -use jmt::proof::{ - SparseMerkleInternalNode, SparseMerkleLeafNode, SparseMerkleNode, SparseMerkleProof, - INTERNAL_DOMAIN_SEPARATOR, +use jmt::{ + bytes32ext::Bytes32Ext, + mock::MockTreeStore, + proof::{SparseMerkleLeafNode, SparseMerkleNode, SparseMerkleProof, INTERNAL_DOMAIN_SEPARATOR}, + RootHash, }; -use jmt::RootHash; -use sha2::Sha256; -use std::marker::PhantomData; -use std::sync::Arc; +use prism_common::{hashchain::Hashchain, tree::*}; +use std::{marker::PhantomData, sync::Arc}; -use crate::tree; +use prism_common::tree; pub struct Digest { digest: tree::Digest, diff --git a/crates/prism/Cargo.toml b/crates/prism/Cargo.toml new file mode 100644 index 00000000..fcb38c95 --- /dev/null +++ b/crates/prism/Cargo.toml @@ -0,0 +1,58 @@ +[package] +name = "prism-main" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +axum = { workspace = true } +borsh = { workspace = true } +tower-http = { workspace = true } +utoipa = { workspace = true } +utoipa-swagger-ui = { workspace = true } +async-trait = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +redis = { workspace = true } +ed25519-dalek = { workspace = true } +ed25519 = { workspace = true } +base64 = { workspace = true } +tokio = { workspace = true } +bellman = { workspace = true } +bls12_381 = { workspace = true } +rand = { workspace = true } +hex = { workspace = true } +ff = { workspace = true } +log = { workspace = true } +pretty_env_logger = { workspace = true } +clap = { workspace = true } +config = { workspace = true } +thiserror = { workspace = true } +indexed-merkle-tree = { workspace = true } +dotenvy = { workspace = true } +celestia-rpc = { workspace = true } +celestia-types = { workspace = true } +mockall = { workspace = true } +keystore-rs = { workspace = true } +toml = { workspace = true } +dirs = { workspace = true } +anyhow = { workspace = true } +jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +sha2 = { workspace = true } +auto_impl = { workspace = true } +prism-common = { workspace = true } + +[patch.crates-io] +sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } +sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } +sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } + +[dev-dependencies] +serial_test = "3.1.1" +criterion = "0.5.1" + +# [[bench]] +# name = "zk_benchmarks" +# harness = false diff --git a/src/cfg.rs b/crates/prism/src/cfg.rs similarity index 95% rename from src/cfg.rs rename to crates/prism/src/cfg.rs index 9b85419e..ba3f8e6f 100644 --- a/src/cfg.rs +++ b/crates/prism/src/cfg.rs @@ -9,7 +9,6 @@ use config::{builder::DefaultState, ConfigBuilder, File}; use dirs::home_dir; use dotenvy::dotenv; use serde::{Deserialize, Serialize}; -use std::path::PathBuf; use std::{fs, path::Path, sync::Arc}; use crate::da::{celestia::CelestiaConnection, DataAvailabilityLayer}; @@ -18,7 +17,6 @@ use crate::da::{celestia::CelestiaConnection, DataAvailabilityLayer}; pub enum Commands { LightClient, Sequencer, - GeneratePublicParams, } #[derive(Parser, Clone, Debug, Deserialize)] @@ -61,10 +59,6 @@ pub struct CommandLineArgs { #[arg(long)] config_path: Option, - /// Path to the bin file containing serialized PublicParams - #[arg(long)] - public_params_path: Option, - #[command(subcommand)] pub command: Commands, } @@ -78,7 +72,6 @@ pub struct Config { pub da_layer: Option, pub redis_config: Option, pub verifying_key: Option, - pub public_params_path: Option, } #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] @@ -144,7 +137,6 @@ impl Default for Config { celestia_config: Some(CelestiaConfig::default()), redis_config: Some(RedisConfig::default()), verifying_key: None, - public_params_path: None, } } } @@ -211,7 +203,6 @@ fn merge_configs(loaded: Config, default: Config) -> Config { celestia_config: loaded.celestia_config.or(default.celestia_config), da_layer: loaded.da_layer.or(default.da_layer), verifying_key: loaded.verifying_key.or(default.verifying_key), - public_params_path: loaded.public_params_path.or(default.public_params_path), } } @@ -275,7 +266,6 @@ fn apply_command_line_args(config: Config, args: CommandLineArgs) -> Config { }), da_layer: config.da_layer, verifying_key: args.verifying_key.or(config.verifying_key), - public_params_path: args.public_params_path.or(config.public_params_path), } } diff --git a/src/consts.rs b/crates/prism/src/consts.rs similarity index 100% rename from src/consts.rs rename to crates/prism/src/consts.rs diff --git a/src/da/celestia.rs b/crates/prism/src/da/celestia.rs similarity index 97% rename from src/da/celestia.rs rename to crates/prism/src/da/celestia.rs index 27bd53ec..96aabc5f 100644 --- a/src/da/celestia.rs +++ b/crates/prism/src/da/celestia.rs @@ -1,6 +1,5 @@ use crate::{ cfg::CelestiaConfig, - common::Operation, consts::CHANNEL_BUFFER_SIZE, da::{DataAvailabilityLayer, FinalizedEpoch}, error::{DataAvailabilityError, GeneralError}, @@ -10,6 +9,7 @@ use async_trait::async_trait; use borsh::from_slice; use celestia_rpc::{BlobClient, Client, HeaderClient}; use celestia_types::{blob::GasPrice, nmt::Namespace, Blob}; +use prism_common::operation::Operation; use std::{self, sync::Arc}; use tokio::{ sync::{ @@ -29,15 +29,6 @@ impl TryFrom<&Blob> for FinalizedEpoch { } } -impl TryFrom<&Blob> for Operation { - type Error = anyhow::Error; - - fn try_from(value: &Blob) -> Result { - from_slice::(&value.data) - .context(format!("Failed to decode blob into Operation: {value:?}")) - } -} - pub struct CelestiaConnection { pub client: celestia_rpc::Client, pub snark_namespace: Namespace, diff --git a/src/da/memory.rs b/crates/prism/src/da/memory.rs similarity index 97% rename from src/da/memory.rs rename to crates/prism/src/da/memory.rs index 6f24fd1b..cc4651ad 100644 --- a/src/da/memory.rs +++ b/crates/prism/src/da/memory.rs @@ -1,9 +1,7 @@ -use crate::{ - common::Operation, - da::{DataAvailabilityLayer, FinalizedEpoch}, -}; +use crate::da::{DataAvailabilityLayer, FinalizedEpoch}; use anyhow::Result; use async_trait::async_trait; +use prism_common::operation::Operation; use std::sync::Arc; use tokio::{ sync::{broadcast, RwLock}, diff --git a/src/da/mod.rs b/crates/prism/src/da/mod.rs similarity index 95% rename from src/da/mod.rs rename to crates/prism/src/da/mod.rs index 70326e5e..81b26357 100644 --- a/src/da/mod.rs +++ b/crates/prism/src/da/mod.rs @@ -1,13 +1,9 @@ -use crate::{ - common::Operation, - error::GeneralError, - tree::Digest, - utils::SignedContent, -}; +use crate::{error::GeneralError, utils::SignedContent}; use anyhow::Result; use async_trait::async_trait; use borsh::{BorshDeserialize, BorshSerialize}; use ed25519::Signature; +use prism_common::{operation::Operation, tree::Digest}; use std::{self, str::FromStr}; pub mod celestia; diff --git a/src/error.rs b/crates/prism/src/error.rs similarity index 100% rename from src/error.rs rename to crates/prism/src/error.rs diff --git a/src/lib.rs b/crates/prism/src/lib.rs similarity index 78% rename from src/lib.rs rename to crates/prism/src/lib.rs index 8ded619a..68db4283 100644 --- a/src/lib.rs +++ b/crates/prism/src/lib.rs @@ -1,12 +1,9 @@ pub mod cfg; -pub mod common; pub mod consts; pub mod da; pub mod error; pub mod node_types; -pub mod nova; pub mod storage; -pub mod tree; pub mod utils; pub mod webserver; #[macro_use] diff --git a/src/main.rs b/crates/prism/src/main.rs similarity index 72% rename from src/main.rs rename to crates/prism/src/main.rs index 4d902b75..8061f149 100644 --- a/src/main.rs +++ b/crates/prism/src/main.rs @@ -1,25 +1,18 @@ mod cfg; -pub mod common; pub mod consts; pub mod da; pub mod error; mod node_types; -mod nova; pub mod storage; -mod tree; mod utils; mod webserver; use cfg::{initialize_da_layer, load_config}; use clap::Parser; use keystore_rs::{KeyChain, KeyStore, KeyStoreType}; -use prism::nova::utils::create_pp; use crate::cfg::{CommandLineArgs, Commands}; -use anyhow::{Context, Result}; -use arecibo::{provider::PallasEngine, supernova::PublicParams}; use node_types::{lightclient::LightClient, sequencer::Sequencer, NodeType}; -use std::io; use std::sync::Arc; use storage::RedisConnection; @@ -27,21 +20,13 @@ use storage::RedisConnection; extern crate log; /// The main function that initializes and runs a prism client. -#[actix_web::main] +#[tokio::main()] async fn main() -> std::io::Result<()> { let args = CommandLineArgs::parse(); let config = load_config(args.clone()) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?; - let pp = load_or_generate_public_params(&config.public_params_path) - .expect("Failed to deserialize or generate public params."); - - if let Commands::GeneratePublicParams {} = args.command { - bincode::serialize_into(io::stdout(), &pp).unwrap(); - return Ok(()); - } - let da = initialize_da_layer(&config) .await .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?; @@ -94,14 +79,3 @@ async fn main() -> std::io::Result<()> { .await .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())) } - -fn load_or_generate_public_params(path: &Option) -> Result> { - if let Some(path) = path { - info!("Loading public params from file: {:?}", path); - let bytes = std::fs::read(path).context("Failed to read public params file")?; - bincode::deserialize(&bytes).context("Failed to deserialize public params") - } else { - warn!("No public params file provided, generating new ones. This may take a while."); - Ok(create_pp()) - } -} diff --git a/src/node_types/lightclient.rs b/crates/prism/src/node_types/lightclient.rs similarity index 100% rename from src/node_types/lightclient.rs rename to crates/prism/src/node_types/lightclient.rs diff --git a/src/node_types/mod.rs b/crates/prism/src/node_types/mod.rs similarity index 100% rename from src/node_types/mod.rs rename to crates/prism/src/node_types/mod.rs diff --git a/src/node_types/sequencer.rs b/crates/prism/src/node_types/sequencer.rs similarity index 99% rename from src/node_types/sequencer.rs rename to crates/prism/src/node_types/sequencer.rs index 08e5d467..15e316b5 100644 --- a/src/node_types/sequencer.rs +++ b/crates/prism/src/node_types/sequencer.rs @@ -1,11 +1,9 @@ -use crate::{ - tree::{hash, Digest, Hasher, KeyDirectoryTree, Proof, SnarkableTree}, -}; use anyhow::{Context, Result}; use async_trait::async_trait; use ed25519::Signature; use ed25519_dalek::{Signer, SigningKey}; use jmt::KeyHash; +use prism_common::tree::{hash, Digest, Hasher, KeyDirectoryTree, Proof, SnarkableTree}; use std::{self, str::FromStr, sync::Arc}; use tokio::{ sync::{ @@ -21,7 +19,6 @@ use crate::error::DataAvailabilityError; use crate::{ cfg::Config, - common::{AccountSource, Hashchain, HashchainEntry, Operation}, consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL}, da::{DataAvailabilityLayer, FinalizedEpoch}, error::{DatabaseError, GeneralError}, @@ -29,6 +26,10 @@ use crate::{ storage::Database, webserver::{OperationInput, WebServer}, }; +use prism_common::{ + hashchain::{Hashchain, HashchainEntry}, + operation::{AccountSource, Operation}, +}; pub struct Sequencer { pub db: Arc, diff --git a/src/storage.rs b/crates/prism/src/storage.rs similarity index 98% rename from src/storage.rs rename to crates/prism/src/storage.rs index acca317d..72d1b05a 100644 --- a/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -16,8 +16,11 @@ use std::{ use crate::{ cfg::RedisConfig, - common::{Hashchain, HashchainEntry, Operation}, error::{DatabaseError, GeneralError, PrismError}, +}; +use prism_common::{ + hashchain::{Hashchain, HashchainEntry}, + operation::Operation, tree::Digest, }; @@ -248,7 +251,8 @@ impl Database for RedisConnection { #[cfg(test)] mod tests { use super::*; - use crate::{common::Operation, storage::Database, tree::hash}; + use crate::storage::Database; + use prism_common::{operation::Operation, tree::hash}; use serde::{Deserialize, Serialize}; use serial_test::serial; diff --git a/src/utils.rs b/crates/prism/src/utils.rs similarity index 98% rename from src/utils.rs rename to crates/prism/src/utils.rs index 2bf59b76..ea952025 100644 --- a/src/utils.rs +++ b/crates/prism/src/utils.rs @@ -1,7 +1,4 @@ -use crate::{ - error::{GeneralError, PrismError, ProofError}, - tree::Digest, -}; +use crate::error::{GeneralError, PrismError, ProofError}; use anyhow::Result; use base64::{engine::general_purpose::STANDARD as engine, Engine as _}; use bellman::groth16::{self, VerifyingKey}; @@ -9,7 +6,7 @@ use bls12_381::{Bls12, Scalar}; use ed25519::Signature; use ed25519_dalek::{Verifier, VerifyingKey as Ed25519VerifyingKey}; use indexed_merkle_tree::tree::Proof; -use rand::rngs::OsRng; +use prism_common::tree::Digest; pub fn parse_json_to_proof(json_str: &str) -> Result> { let proof: Proof = serde_json::from_str(json_str)?; diff --git a/src/webserver.rs b/crates/prism/src/webserver.rs similarity index 99% rename from src/webserver.rs rename to crates/prism/src/webserver.rs index 819c3dfc..9c2ea2b6 100644 --- a/src/webserver.rs +++ b/crates/prism/src/webserver.rs @@ -1,6 +1,5 @@ use crate::{ cfg::WebServerConfig, - common::{Hashchain, Operation}, error::GeneralError, node_types::sequencer::Sequencer, utils::{verify_signature, SignedContent}, @@ -18,6 +17,7 @@ use indexed_merkle_tree::{ tree::{Proof, UpdateProof}, Hash as TreeHash, }; +use prism_common::{hashchain::Hashchain, operation::Operation}; use serde::{Deserialize, Serialize}; use std::{self, str::FromStr, sync::Arc}; use tower_http::cors::CorsLayer; From b03b8ebb899735df21bdc85508bc058ebf2134df Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 10:41:02 +0200 Subject: [PATCH 23/33] readding groth16 as new crate --- Cargo.lock | 90 +++++++++++++++++++ Cargo.toml | 11 ++- crates/prism/Cargo.toml | 1 + crates/prism/src/cfg.rs | 2 +- crates/prism/src/da/celestia.rs | 2 +- crates/prism/src/da/mod.rs | 3 +- crates/prism/src/error.rs | 100 --------------------- crates/prism/src/lib.rs | 1 - crates/prism/src/main.rs | 1 - crates/prism/src/node_types/lightclient.rs | 6 +- crates/prism/src/node_types/sequencer.rs | 4 +- crates/prism/src/storage.rs | 2 +- crates/prism/src/utils.rs | 2 +- crates/prism/src/webserver.rs | 2 +- 14 files changed, 112 insertions(+), 115 deletions(-) delete mode 100644 crates/prism/src/error.rs diff --git a/Cargo.lock b/Cargo.lock index 18bba647..309f34b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3128,6 +3128,95 @@ dependencies = [ "sha2 0.10.8", ] +[[package]] +name = "prism-errors" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "auto_impl", + "axum", + "base64 0.22.1", + "bellman", + "bls12_381", + "borsh", + "celestia-rpc", + "celestia-types", + "clap", + "config", + "criterion", + "dirs 5.0.1", + "dotenvy", + "ed25519", + "ed25519-dalek", + "ff", + "hex", + "indexed-merkle-tree", + "jmt", + "keystore-rs", + "log", + "mockall", + "pretty_env_logger", + "prism-common", + "rand", + "redis", + "serde", + "serde_json", + "serial_test", + "sha2 0.10.8", + "thiserror", + "tokio", + "toml", + "tower-http", + "utoipa", + "utoipa-swagger-ui", +] + +[[package]] +name = "prism-groth16" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "auto_impl", + "axum", + "base64 0.22.1", + "bellman", + "bls12_381", + "borsh", + "celestia-rpc", + "celestia-types", + "clap", + "config", + "criterion", + "dirs 5.0.1", + "dotenvy", + "ed25519", + "ed25519-dalek", + "ff", + "hex", + "indexed-merkle-tree", + "jmt", + "keystore-rs", + "log", + "mockall", + "pretty_env_logger", + "prism-common", + "prism-errors", + "rand", + "redis", + "serde", + "serde_json", + "serial_test", + "sha2 0.10.8", + "thiserror", + "tokio", + "toml", + "tower-http", + "utoipa", + "utoipa-swagger-ui", +] + [[package]] name = "prism-main" version = "0.1.0" @@ -3158,6 +3247,7 @@ dependencies = [ "mockall", "pretty_env_logger", "prism-common", + "prism-errors", "rand", "redis", "serde", diff --git a/Cargo.toml b/Cargo.toml index 9c5aa755..8a59ae7e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,13 @@ keywords = ["crypto", "key-transparency"] readme = "README.md" [workspace] -members = ["crates/prism", "crates/common", "crates/nova"] +members = [ + "crates/prism", + "crates/common", + "crates/nova", + "crates/groth16", + "crates/errors", +] resolver = "2" [workspace.dependencies] @@ -64,6 +70,9 @@ bincode = "1.3.3" blake2 = "0.10.6" prism-common = { path = "crates/common" } prism-nova = { path = "crates/nova" } +prism-errors = { path = "crates/errors" } +prism-main = { path = "crates/prism" } +prism-groth16 = { path = "crates/groth16" } [patch.crates-io] sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } diff --git a/crates/prism/Cargo.toml b/crates/prism/Cargo.toml index fcb38c95..a5e82702 100644 --- a/crates/prism/Cargo.toml +++ b/crates/prism/Cargo.toml @@ -43,6 +43,7 @@ jmt = { workspace = true, path = "../jmt", features = ["mocks"] } sha2 = { workspace = true } auto_impl = { workspace = true } prism-common = { workspace = true } +prism-errors = { workspace = true } [patch.crates-io] sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } diff --git a/crates/prism/src/cfg.rs b/crates/prism/src/cfg.rs index ba3f8e6f..2b3c5d36 100644 --- a/crates/prism/src/cfg.rs +++ b/crates/prism/src/cfg.rs @@ -1,8 +1,8 @@ use crate::{ consts::{DA_RETRY_COUNT, DA_RETRY_INTERVAL}, da::memory::InMemoryDataAvailabilityLayer, - error::{DataAvailabilityError, GeneralError, PrismError}, }; +use prism_errors::{DataAvailabilityError, GeneralError, PrismError}; use anyhow::{anyhow, Context, Result}; use clap::{Parser, Subcommand}; use config::{builder::DefaultState, ConfigBuilder, File}; diff --git a/crates/prism/src/da/celestia.rs b/crates/prism/src/da/celestia.rs index 96aabc5f..e3d77347 100644 --- a/crates/prism/src/da/celestia.rs +++ b/crates/prism/src/da/celestia.rs @@ -2,8 +2,8 @@ use crate::{ cfg::CelestiaConfig, consts::CHANNEL_BUFFER_SIZE, da::{DataAvailabilityLayer, FinalizedEpoch}, - error::{DataAvailabilityError, GeneralError}, }; +use prism_errors::{DataAvailabilityError, GeneralError}; use anyhow::{anyhow, bail, Context, Result}; use async_trait::async_trait; use borsh::from_slice; diff --git a/crates/prism/src/da/mod.rs b/crates/prism/src/da/mod.rs index 81b26357..ebee226d 100644 --- a/crates/prism/src/da/mod.rs +++ b/crates/prism/src/da/mod.rs @@ -1,4 +1,5 @@ -use crate::{error::GeneralError, utils::SignedContent}; +use crate::utils::SignedContent; +use prism_errors::GeneralError; use anyhow::Result; use async_trait::async_trait; use borsh::{BorshDeserialize, BorshSerialize}; diff --git a/crates/prism/src/error.rs b/crates/prism/src/error.rs deleted file mode 100644 index 5fc369f1..00000000 --- a/crates/prism/src/error.rs +++ /dev/null @@ -1,100 +0,0 @@ -use anyhow::Error as AnyhowError; -use ed25519_dalek::SignatureError; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum PrismError { - #[error(transparent)] - General(#[from] GeneralError), - #[error(transparent)] - Database(#[from] DatabaseError), - #[error(transparent)] - DataAvailability(#[from] DataAvailabilityError), - #[error(transparent)] - Proof(#[from] ProofError), - #[error("config error: {0}")] - ConfigError(String), - #[error(transparent)] - Other(#[from] AnyhowError), -} - -// general reusable errors -#[derive(Error, Debug)] -pub enum GeneralError { - #[error("parsing: {0}")] - ParsingError(String), - #[error("creating blob object: {0}")] - BlobCreationError(String), - #[error("encoding: {0}")] - EncodingError(String), - #[error("decoding: {0}")] - DecodingError(String), - #[error("missing argument: {0}")] - MissingArgumentError(String), - #[error("invalid public key")] - InvalidPublicKey, - #[error(transparent)] - InvalidSignature(#[from] SignatureError), - #[error("starting webserver")] - WebserverError, - #[error("initializing service: {0}")] - InitializationError(String), -} - -#[derive(Error, Debug)] -pub enum DatabaseError { - #[error("acquiring database lock")] - LockError, - #[error("retrieving keys from {0} dictionary")] - KeysError(String), - #[error("{0} not found")] - NotFoundError(String), - #[error("retreiving input order list")] - GetInputOrderError, - #[error("reading {0} from database")] - ReadError(String), - #[error("writing {0} to database")] - WriteError(String), - #[error("deleting {0} from database")] - DeleteError(String), - #[error(transparent)] - GeneralError(#[from] GeneralError), - #[error("connecting to database: {0}")] - ConnectionError(String), - #[error("initializing database: {0}")] - InitializationError(String), -} - -#[derive(Error, Debug)] -pub enum DataAvailabilityError { - #[error("initializing: {0}")] - InitializationError(String), - #[error("data channel is closed")] - ChannelClosed, - #[error("da networking error: {0}")] - NetworkError(String), - #[error("retrieving data at height {0}: {1}")] - DataRetrievalError(u64, String), - #[error("submitting epoch to da layer: {0}")] - SubmissionError(String), - #[error("setting new sync target: {0}")] - SyncTargetError(String), - #[error("receiving message on channel")] - ChannelReceiveError, - #[error(transparent)] - GeneralError(#[from] GeneralError), -} - -#[derive(Error, Debug)] -pub enum ProofError { - #[error("generating proof: {0}")] - GenerationError(String), - #[error("verifying proof: {0}")] - VerificationError(String), - #[error("deserializing G1Affine point")] - G1AffineDeserializationError, - #[error("unpacking proof components: {0}")] - ProofUnpackError(String), - #[error("invalid proof format")] - InvalidFormatError, -} diff --git a/crates/prism/src/lib.rs b/crates/prism/src/lib.rs index 68db4283..6aff7332 100644 --- a/crates/prism/src/lib.rs +++ b/crates/prism/src/lib.rs @@ -1,7 +1,6 @@ pub mod cfg; pub mod consts; pub mod da; -pub mod error; pub mod node_types; pub mod storage; pub mod utils; diff --git a/crates/prism/src/main.rs b/crates/prism/src/main.rs index 8061f149..14aa3c96 100644 --- a/crates/prism/src/main.rs +++ b/crates/prism/src/main.rs @@ -1,7 +1,6 @@ mod cfg; pub mod consts; pub mod da; -pub mod error; mod node_types; pub mod storage; mod utils; diff --git a/crates/prism/src/node_types/lightclient.rs b/crates/prism/src/node_types/lightclient.rs index ae19b612..e7b0117a 100644 --- a/crates/prism/src/node_types/lightclient.rs +++ b/crates/prism/src/node_types/lightclient.rs @@ -1,7 +1,5 @@ -use crate::{ - cfg::CelestiaConfig, - error::{DataAvailabilityError, GeneralError}, -}; +use crate::cfg::CelestiaConfig; +use prism_errors::{DataAvailabilityError, GeneralError}; use anyhow::{Context, Result}; use async_trait::async_trait; use std::{self, sync::Arc, time::Duration}; diff --git a/crates/prism/src/node_types/sequencer.rs b/crates/prism/src/node_types/sequencer.rs index 15e316b5..b2acde0a 100644 --- a/crates/prism/src/node_types/sequencer.rs +++ b/crates/prism/src/node_types/sequencer.rs @@ -15,13 +15,12 @@ use tokio::{ }; #[cfg(test)] -use crate::error::DataAvailabilityError; +use prism_errors::DataAvailabilityError; use crate::{ cfg::Config, consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL}, da::{DataAvailabilityLayer, FinalizedEpoch}, - error::{DatabaseError, GeneralError}, node_types::NodeType, storage::Database, webserver::{OperationInput, WebServer}, @@ -30,6 +29,7 @@ use prism_common::{ hashchain::{Hashchain, HashchainEntry}, operation::{AccountSource, Operation}, }; +use prism_errors::{DatabaseError, GeneralError}; pub struct Sequencer { pub db: Arc, diff --git a/crates/prism/src/storage.rs b/crates/prism/src/storage.rs index 72d1b05a..12b0cdd7 100644 --- a/crates/prism/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -16,8 +16,8 @@ use std::{ use crate::{ cfg::RedisConfig, - error::{DatabaseError, GeneralError, PrismError}, }; +use prism_errors::{DatabaseError, GeneralError, PrismError}; use prism_common::{ hashchain::{Hashchain, HashchainEntry}, operation::Operation, diff --git a/crates/prism/src/utils.rs b/crates/prism/src/utils.rs index ea952025..f24fc012 100644 --- a/crates/prism/src/utils.rs +++ b/crates/prism/src/utils.rs @@ -1,4 +1,4 @@ -use crate::error::{GeneralError, PrismError, ProofError}; +use prism_errors::{GeneralError, PrismError, ProofError}; use anyhow::Result; use base64::{engine::general_purpose::STANDARD as engine, Engine as _}; use bellman::groth16::{self, VerifyingKey}; diff --git a/crates/prism/src/webserver.rs b/crates/prism/src/webserver.rs index 9c2ea2b6..19bc41cb 100644 --- a/crates/prism/src/webserver.rs +++ b/crates/prism/src/webserver.rs @@ -1,9 +1,9 @@ use crate::{ cfg::WebServerConfig, - error::GeneralError, node_types::sequencer::Sequencer, utils::{verify_signature, SignedContent}, }; +use prism_errors::GeneralError; use anyhow::{Context, Result}; use axum::{ extract::State, From fcbb8817f0802c249807fd124fd5136a785e8464 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 10:41:30 +0200 Subject: [PATCH 24/33] readding groth16 as new crate --- Cargo.lock | 1 + crates/errors/Cargo.toml | 58 ++++ crates/errors/src/lib.rs | 100 ++++++ crates/groth16/Cargo.toml | 59 ++++ crates/groth16/src/hashchain.rs | 55 ++++ crates/groth16/src/less_than.rs | 66 ++++ crates/groth16/src/lib.rs | 348 +++++++++++++++++++++ crates/groth16/src/merkle_batch.rs | 146 +++++++++ crates/groth16/src/merkle_insertion.rs | 187 +++++++++++ crates/groth16/src/merkle_update.rs | 121 +++++++ crates/groth16/src/utils.rs | 90 ++++++ crates/nova/src/utils.rs | 5 - crates/prism/Cargo.toml | 1 + crates/prism/src/da/mod.rs | 2 +- crates/prism/src/node_types/lightclient.rs | 39 +-- 15 files changed, 1249 insertions(+), 29 deletions(-) create mode 100644 crates/errors/Cargo.toml create mode 100644 crates/errors/src/lib.rs create mode 100644 crates/groth16/Cargo.toml create mode 100644 crates/groth16/src/hashchain.rs create mode 100644 crates/groth16/src/less_than.rs create mode 100644 crates/groth16/src/lib.rs create mode 100644 crates/groth16/src/merkle_batch.rs create mode 100644 crates/groth16/src/merkle_insertion.rs create mode 100644 crates/groth16/src/merkle_update.rs create mode 100644 crates/groth16/src/utils.rs diff --git a/Cargo.lock b/Cargo.lock index 309f34b3..659b2e9c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3248,6 +3248,7 @@ dependencies = [ "pretty_env_logger", "prism-common", "prism-errors", + "prism-groth16", "rand", "redis", "serde", diff --git a/crates/errors/Cargo.toml b/crates/errors/Cargo.toml new file mode 100644 index 00000000..5a2b2450 --- /dev/null +++ b/crates/errors/Cargo.toml @@ -0,0 +1,58 @@ +[package] +name = "prism-errors" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +axum = { workspace = true } +borsh = { workspace = true } +tower-http = { workspace = true } +utoipa = { workspace = true } +utoipa-swagger-ui = { workspace = true } +async-trait = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +redis = { workspace = true } +ed25519-dalek = { workspace = true } +ed25519 = { workspace = true } +base64 = { workspace = true } +tokio = { workspace = true } +bellman = { workspace = true } +bls12_381 = { workspace = true } +rand = { workspace = true } +hex = { workspace = true } +ff = { workspace = true } +log = { workspace = true } +pretty_env_logger = { workspace = true } +clap = { workspace = true } +config = { workspace = true } +thiserror = { workspace = true } +indexed-merkle-tree = { workspace = true } +dotenvy = { workspace = true } +celestia-rpc = { workspace = true } +celestia-types = { workspace = true } +mockall = { workspace = true } +keystore-rs = { workspace = true } +toml = { workspace = true } +dirs = { workspace = true } +anyhow = { workspace = true } +jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +sha2 = { workspace = true } +auto_impl = { workspace = true } +prism-common = { workspace = true } + +[patch.crates-io] +sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } +sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } +sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } + +[dev-dependencies] +serial_test = "3.1.1" +criterion = "0.5.1" + +# [[bench]] +# name = "zk_benchmarks" +# harness = false diff --git a/crates/errors/src/lib.rs b/crates/errors/src/lib.rs new file mode 100644 index 00000000..5fc369f1 --- /dev/null +++ b/crates/errors/src/lib.rs @@ -0,0 +1,100 @@ +use anyhow::Error as AnyhowError; +use ed25519_dalek::SignatureError; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum PrismError { + #[error(transparent)] + General(#[from] GeneralError), + #[error(transparent)] + Database(#[from] DatabaseError), + #[error(transparent)] + DataAvailability(#[from] DataAvailabilityError), + #[error(transparent)] + Proof(#[from] ProofError), + #[error("config error: {0}")] + ConfigError(String), + #[error(transparent)] + Other(#[from] AnyhowError), +} + +// general reusable errors +#[derive(Error, Debug)] +pub enum GeneralError { + #[error("parsing: {0}")] + ParsingError(String), + #[error("creating blob object: {0}")] + BlobCreationError(String), + #[error("encoding: {0}")] + EncodingError(String), + #[error("decoding: {0}")] + DecodingError(String), + #[error("missing argument: {0}")] + MissingArgumentError(String), + #[error("invalid public key")] + InvalidPublicKey, + #[error(transparent)] + InvalidSignature(#[from] SignatureError), + #[error("starting webserver")] + WebserverError, + #[error("initializing service: {0}")] + InitializationError(String), +} + +#[derive(Error, Debug)] +pub enum DatabaseError { + #[error("acquiring database lock")] + LockError, + #[error("retrieving keys from {0} dictionary")] + KeysError(String), + #[error("{0} not found")] + NotFoundError(String), + #[error("retreiving input order list")] + GetInputOrderError, + #[error("reading {0} from database")] + ReadError(String), + #[error("writing {0} to database")] + WriteError(String), + #[error("deleting {0} from database")] + DeleteError(String), + #[error(transparent)] + GeneralError(#[from] GeneralError), + #[error("connecting to database: {0}")] + ConnectionError(String), + #[error("initializing database: {0}")] + InitializationError(String), +} + +#[derive(Error, Debug)] +pub enum DataAvailabilityError { + #[error("initializing: {0}")] + InitializationError(String), + #[error("data channel is closed")] + ChannelClosed, + #[error("da networking error: {0}")] + NetworkError(String), + #[error("retrieving data at height {0}: {1}")] + DataRetrievalError(u64, String), + #[error("submitting epoch to da layer: {0}")] + SubmissionError(String), + #[error("setting new sync target: {0}")] + SyncTargetError(String), + #[error("receiving message on channel")] + ChannelReceiveError, + #[error(transparent)] + GeneralError(#[from] GeneralError), +} + +#[derive(Error, Debug)] +pub enum ProofError { + #[error("generating proof: {0}")] + GenerationError(String), + #[error("verifying proof: {0}")] + VerificationError(String), + #[error("deserializing G1Affine point")] + G1AffineDeserializationError, + #[error("unpacking proof components: {0}")] + ProofUnpackError(String), + #[error("invalid proof format")] + InvalidFormatError, +} diff --git a/crates/groth16/Cargo.toml b/crates/groth16/Cargo.toml new file mode 100644 index 00000000..373bb139 --- /dev/null +++ b/crates/groth16/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "prism-groth16" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +axum = { workspace = true } +borsh = { workspace = true } +tower-http = { workspace = true } +utoipa = { workspace = true } +utoipa-swagger-ui = { workspace = true } +async-trait = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +redis = { workspace = true } +ed25519-dalek = { workspace = true } +ed25519 = { workspace = true } +base64 = { workspace = true } +tokio = { workspace = true } +bellman = { workspace = true } +bls12_381 = { workspace = true } +rand = { workspace = true } +hex = { workspace = true } +ff = { workspace = true } +log = { workspace = true } +pretty_env_logger = { workspace = true } +clap = { workspace = true } +config = { workspace = true } +thiserror = { workspace = true } +indexed-merkle-tree = { workspace = true } +dotenvy = { workspace = true } +celestia-rpc = { workspace = true } +celestia-types = { workspace = true } +mockall = { workspace = true } +keystore-rs = { workspace = true } +toml = { workspace = true } +dirs = { workspace = true } +anyhow = { workspace = true } +jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +sha2 = { workspace = true } +auto_impl = { workspace = true } +prism-common = { workspace = true } +prism-errors = { workspace = true } + +[patch.crates-io] +sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } +sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } +sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } + +[dev-dependencies] +serial_test = "3.1.1" +criterion = "0.5.1" + +# [[bench]] +# name = "zk_benchmarks" +# harness = false diff --git a/crates/groth16/src/hashchain.rs b/crates/groth16/src/hashchain.rs new file mode 100644 index 00000000..bc52cdfb --- /dev/null +++ b/crates/groth16/src/hashchain.rs @@ -0,0 +1,55 @@ +use anyhow::Result; +use bellman::{Circuit, ConstraintSystem, SynthesisError}; +use bls12_381::Scalar; +use indexed_merkle_tree::sha256_mod; +use prism_common::hashchain::HashchainEntry; + +/// HashChainEntryCircuit is a circuit that verifies that a given value is present in a hashchain. +#[derive(Clone)] +pub struct HashChainEntryCircuit { + pub value: Scalar, + /// Represents the hashchain in the form of a vector of Scalars. + /// Each Scalar is sha256_mod(hashchain_entry.value()) + pub chain: Vec, +} + +impl HashChainEntryCircuit { + pub fn create(value: &str, hashchain: Vec) -> Result { + let hashed_value = sha256_mod(value.as_bytes()); + let parsed_value = hashed_value.try_into()?; + let mut parsed_hashchain: Vec = vec![]; + for entry in hashchain { + let hashed_entry_value = sha256_mod(entry.operation.value().as_bytes()); + parsed_hashchain.push(hashed_entry_value.try_into()?) + } + Ok(HashChainEntryCircuit { + value: parsed_value, + chain: parsed_hashchain, + }) + } +} + +impl Circuit for HashChainEntryCircuit { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + if self.chain.is_empty() { + return Err(SynthesisError::AssignmentMissing); + } + + let provided_value = cs.alloc_input(|| "provided hashed value", || Ok(self.value))?; + + for entry in self.chain { + if entry == self.value { + let found_value = cs.alloc(|| "found hashed value", || Ok(entry))?; + // found_value * (1) = provided_value + cs.enforce( + || "found value check", + |lc| lc + found_value, + |lc| lc + CS::one(), + |lc| lc + provided_value, + ); + return Ok(()); + } + } + Err(SynthesisError::Unsatisfiable) + } +} diff --git a/crates/groth16/src/less_than.rs b/crates/groth16/src/less_than.rs new file mode 100644 index 00000000..08897ec6 --- /dev/null +++ b/crates/groth16/src/less_than.rs @@ -0,0 +1,66 @@ +use anyhow::Result; +use bellman::{gadgets::boolean::Boolean, Circuit, ConstraintSystem, SynthesisError}; +use bls12_381::Scalar; +use ff::PrimeFieldBits; + +#[derive(Clone)] +pub struct LessThanCircuit { + a: Scalar, + b: Scalar, +} + +impl LessThanCircuit { + pub fn new(a: Scalar, b: Scalar) -> LessThanCircuit { + LessThanCircuit { a, b } + } +} + +impl Circuit for LessThanCircuit { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + let a_bits = self.a.to_le_bits(); + let b_bits = self.b.to_le_bits(); + + let mut result = Boolean::constant(false); + + // Iterate over the bits from most significant to least significant + for i in (0..a_bits.len()).rev() { + let a_val = Boolean::constant(a_bits[i]); + let b_val = Boolean::constant(b_bits[i]); + let not_a = a_val.not(); + let not_b = b_val.not(); + + // Check if bits are equal (both 1 or both 0) + let a_and_b = Boolean::and(cs.namespace(|| format!("a_and_b_{}", i)), &a_val, &b_val)?; + let not_a_and_not_b = Boolean::and( + cs.namespace(|| format!("not_a_and_not_b_{}", i)), + ¬_a, + ¬_b, + )?; + + // If the bits are equal, continue to the next bit + if not_a_and_not_b.get_value().unwrap() || a_and_b.get_value().unwrap() { + continue; + } else { + // If bits differ: b > a if b_bit = 1 && a_bit = 0 + result = Boolean::and( + cs.namespace(|| format!("b_and_not_a_{}", i)), + &b_val, + ¬_a, + )?; + break; + } + } + + // Enforce the constraint that the result is correct + // If result is true, then a < b, otherwise a >= b + // result * (1) = 1 + cs.enforce( + || "a < b", + |_| result.lc(CS::one(), Scalar::one()), + |lc| lc + CS::one(), + |lc| lc + CS::one(), + ); + + Ok(()) + } +} diff --git a/crates/groth16/src/lib.rs b/crates/groth16/src/lib.rs new file mode 100644 index 00000000..68a5ed33 --- /dev/null +++ b/crates/groth16/src/lib.rs @@ -0,0 +1,348 @@ +use anyhow::{anyhow, Context, Result}; +use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; +use bls12_381::{Bls12, G1Affine, G2Affine, Scalar}; +use borsh::{BorshDeserialize, BorshSerialize}; +use prism_errors::{GeneralError, PrismError}; +use std::fmt; + +pub mod hashchain; +pub mod less_than; +pub mod merkle_batch; +pub mod merkle_insertion; +pub mod merkle_update; +pub mod utils; +#[macro_use] +extern crate log; + +pub use hashchain::HashChainEntryCircuit; +pub use less_than::LessThanCircuit; +pub use merkle_batch::BatchMerkleProofCircuit; +pub use merkle_insertion::InsertMerkleProofCircuit; +pub use merkle_update::UpdateMerkleProofCircuit; + +#[derive(Clone)] +pub enum ProofVariantCircuit { + Update(UpdateMerkleProofCircuit), + Insert(InsertMerkleProofCircuit), + Batch(BatchMerkleProofCircuit), +} + +impl Circuit for ProofVariantCircuit { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + match self { + ProofVariantCircuit::Update(circuit) => circuit.synthesize(cs), + ProofVariantCircuit::Insert(circuit) => circuit.synthesize(cs), + ProofVariantCircuit::Batch(circuit) => circuit.synthesize(cs), + } + } +} + +/// G1 represents a compressed [`bls12_381::G1Affine`] +#[derive(BorshSerialize, BorshDeserialize, Clone)] +pub struct G1([u8; 48]); + +/// G2 represents a compressed [`bls12_381::G2Affine`] +#[derive(BorshSerialize, BorshDeserialize, Clone)] +pub struct G2([u8; 96]); + +// Debug impls for the Affines print their hex representation +impl fmt::Debug for G1 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "G1(0x{})", hex::encode(self.0)) + } +} + +impl fmt::Debug for G2 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "G2(0x{})", hex::encode(self.0)) + } +} + +impl TryFrom for bls12_381::G1Affine { + type Error = anyhow::Error; + + fn try_from(g1: G1) -> Result { + match bls12_381::G1Affine::from_compressed(&g1.0).into_option() { + Some(affine) => Ok(affine), + None => Err(anyhow!( + GeneralError::DecodingError("G2Affine".to_string(),) + )), + } + } +} + +impl TryFrom for bls12_381::G2Affine { + type Error = anyhow::Error; + + fn try_from(g2: G2) -> Result { + match bls12_381::G2Affine::from_compressed(&g2.0).into_option() { + Some(affine) => Ok(affine), + None => Err(anyhow!( + GeneralError::DecodingError("G2Affine".to_string(),) + )), + } + } +} + +#[derive(Clone, BorshSerialize, BorshDeserialize, Debug)] +pub struct Bls12Proof { + pub a: G1, + pub b: G2, + pub c: G1, +} + +impl TryFrom for groth16::Proof { + type Error = anyhow::Error; + + fn try_from(proof: Bls12Proof) -> Result { + let a: G1Affine = proof.a.try_into().context("affine: a")?; + let b: G2Affine = proof.b.try_into().context("affine: b")?; + let c: G1Affine = proof.c.try_into().context("affine: c")?; + + Ok(groth16::Proof { a, b, c }) + } +} + +impl From> for Bls12Proof { + fn from(proof: groth16::Proof) -> Self { + Bls12Proof { + a: G1(proof.a.to_compressed()), + b: G2(proof.b.to_compressed()), + c: G1(proof.c.to_compressed()), + } + } +} + +#[derive(Clone, BorshSerialize, BorshDeserialize, Debug)] +pub struct VerifyingKey { + pub alpha_g1: G1, + pub beta_g1: G1, + pub beta_g2: G2, + pub delta_g1: G1, + pub delta_g2: G2, + pub gamma_g2: G2, + pub ic: Vec, +} + +impl From> for VerifyingKey { + fn from(verifying_key: groth16::VerifyingKey) -> Self { + VerifyingKey { + alpha_g1: G1(verifying_key.alpha_g1.to_compressed()), + beta_g1: G1(verifying_key.beta_g1.to_compressed()), + beta_g2: G2(verifying_key.beta_g2.to_compressed()), + delta_g1: G1(verifying_key.delta_g1.to_compressed()), + delta_g2: G2(verifying_key.delta_g2.to_compressed()), + gamma_g2: G2(verifying_key.gamma_g2.to_compressed()), + ic: verifying_key + .ic + .iter() + .map(|x| G1(x.to_compressed())) + .collect::>(), + } + } +} + +impl TryFrom for groth16::VerifyingKey { + type Error = PrismError; + + fn try_from(custom_vk: VerifyingKey) -> Result { + let alpha_g1: G1Affine = custom_vk + .alpha_g1 + .try_into() + .map_err(|e| GeneralError::EncodingError(format!("{}:alpha_g1", e)))?; + let beta_g1: G1Affine = custom_vk + .beta_g1 + .try_into() + .map_err(|e| GeneralError::EncodingError(format!("{}: beta_g1", e)))?; + let beta_g2: G2Affine = custom_vk + .beta_g2 + .try_into() + .map_err(|e| GeneralError::EncodingError(format!("{}: beta_g2", e)))?; + let delta_g1: G1Affine = custom_vk + .delta_g1 + .try_into() + .map_err(|e| GeneralError::EncodingError(format!("{}: delta_g1", e)))?; + let delta_g2: G2Affine = custom_vk + .delta_g2 + .try_into() + .map_err(|e| GeneralError::EncodingError(format!("{}: delta_g1", e)))?; + let gamma_g2: G2Affine = custom_vk + .gamma_g2 + .try_into() + .map_err(|e| GeneralError::EncodingError(format!("{}: gamma_g2", e)))?; + let ic = custom_vk + .ic + .into_iter() + .map(|s| s.try_into()) + .collect::>>()?; + + Ok(bellman::groth16::VerifyingKey { + alpha_g1, + beta_g1, + beta_g2, + gamma_g2, + delta_g1, + delta_g2, + ic: ic.into_iter().collect(), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use bellman::groth16; + use bls12_381::Bls12; + use indexed_merkle_tree::{ + node::Node, + sha256_mod, + tree::{IndexedMerkleTree, Proof}, + Hash, + }; + use rand::rngs::OsRng; + + fn head_scalar() -> Scalar { + Node::HEAD.try_into().unwrap() + } + + fn small_scalar() -> Scalar { + let small_hash = + Hash::from_hex("13ae3ed6fe76d459c9c66fe38ff187593561a1f24d34cb22e06148c77e4cc02b") + .unwrap(); + small_hash.try_into().unwrap() + } + + fn mid_scalar() -> Scalar { + let mid_hash = + Hash::from_hex("3d1e830624b2572adc05351a7cbee2d3aa3f6a52b34fa38a260c9c78f96fcd07") + .unwrap(); + mid_hash.try_into().unwrap() + } + + fn big_scalar() -> Scalar { + let big_hash = + Hash::from_hex("6714dda957170ad7720bbd2c38004152f34ea5d4350a154b84a259cc62a5dbb4") + .unwrap(); + big_hash.try_into().unwrap() + } + + fn tail_scalar() -> Scalar { + Node::TAIL.try_into().unwrap() + } + + fn create_scalars() -> (Scalar, Scalar, Scalar, Scalar, Scalar) { + ( + head_scalar(), + small_scalar(), + mid_scalar(), + big_scalar(), + tail_scalar(), + ) + } + + fn setup_and_test_less_than_circuit(a: Scalar, b: Scalar) { + let circuit = LessThanCircuit::new(a, b); + let rng = &mut OsRng; + let params = groth16::generate_random_parameters::(circuit.clone(), rng) + .expect("unable to generate random parameters"); + let proof = groth16::create_random_proof(circuit.clone(), ¶ms, rng) + .expect("unable to create random proof"); + let pvk = groth16::prepare_verifying_key(¶ms.vk); + groth16::verify_proof(&pvk, &proof, &[]).expect("unable to verify proof") + } + + #[test] + fn le_with_scalar_valid() { + let (head, small, mid, big, tail) = create_scalars(); + + setup_and_test_less_than_circuit(head, small); + setup_and_test_less_than_circuit(small, tail); + + setup_and_test_less_than_circuit(small, big); + setup_and_test_less_than_circuit(big, tail); + + setup_and_test_less_than_circuit(head, mid); + setup_and_test_less_than_circuit(mid, big); + } + + #[test] + #[should_panic(expected = "unable to verify proof")] + fn invalid_less_than_circuit_a_gt_b() { + let (_, _, _, big, tail) = create_scalars(); + + setup_and_test_less_than_circuit(tail, big) + } + + #[test] + #[should_panic(expected = "unable to verify proof")] + fn invalid_less_than_circuit_a_eq_b() { + let head = head_scalar(); + setup_and_test_less_than_circuit(head, head) + } + + #[test] + fn test_serialize_and_deserialize_proof() { + let mut tree = IndexedMerkleTree::new_with_size(4).unwrap(); + let prev_commitment = tree.get_commitment().unwrap(); + + // create two nodes to insert + let ryan = sha256_mod(b"Ryan"); + let ford = sha256_mod(b"Ford"); + let sebastian = sha256_mod(b"Sebastian"); + let pusch = sha256_mod(b"Pusch"); + let ethan = sha256_mod(b"Ethan"); + let triple_zero = sha256_mod(b"000"); + + let mut ryans_node = Node::new_leaf(true, ryan, ford, Node::TAIL); + let mut sebastians_node = Node::new_leaf(true, sebastian, pusch, Node::TAIL); + let mut ethans_node = Node::new_leaf(true, ethan, triple_zero, Node::TAIL); + + // generate proofs for the two nodes + let first_insert_proof = tree.insert_node(&mut ryans_node).unwrap(); + let second_insert_proof = tree.insert_node(&mut sebastians_node).unwrap(); + let third_insert_proof = tree.insert_node(&mut ethans_node).unwrap(); + + // create zkSNARKs for the two proofs + let first_insert_zk_snark = Proof::Insert(first_insert_proof); + let second_insert_zk_snark = Proof::Insert(second_insert_proof); + let third_insert_zk_snark = Proof::Insert(third_insert_proof); + + let proofs = vec![ + first_insert_zk_snark, + second_insert_zk_snark, + third_insert_zk_snark, + ]; + let current_commitment = tree.get_commitment().unwrap(); + + let batched_proof = + BatchMerkleProofCircuit::new(&prev_commitment, ¤t_commitment, proofs).unwrap(); + + let rng = &mut OsRng; + let params = + groth16::generate_random_parameters::(batched_proof.clone(), rng).unwrap(); + let proof = groth16::create_random_proof(batched_proof.clone(), ¶ms, rng).unwrap(); + + let serialized_proof: Bls12Proof = proof.clone().into(); + let deserialized_proof_result: Result> = + serialized_proof.clone().try_into(); + assert!(deserialized_proof_result.is_ok(), "Deserialization failed"); + + let deserialized_proof = deserialized_proof_result.unwrap(); + assert_eq!(proof.a, deserialized_proof.a); + assert_eq!(proof.b, deserialized_proof.b); + assert_eq!(proof.c, deserialized_proof.c); + } + + #[test] + fn test_deserialize_invalid_proof() { + let invalid_proof = Bls12Proof { + a: G1([1; 48]), + b: G2([2; 96]), + c: G1([3; 48]), + }; + + let deserialized_proof_result: Result> = + invalid_proof.clone().try_into(); + assert!(deserialized_proof_result.is_err()); + } +} diff --git a/crates/groth16/src/merkle_batch.rs b/crates/groth16/src/merkle_batch.rs new file mode 100644 index 00000000..c72d7772 --- /dev/null +++ b/crates/groth16/src/merkle_batch.rs @@ -0,0 +1,146 @@ +use crate::{ + merkle_insertion::prove_insertion, merkle_update::prove_update, InsertMerkleProofCircuit, + ProofVariantCircuit, UpdateMerkleProofCircuit, + utils::create_and_verify_snark +}; +use anyhow::Result; +use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; +use bls12_381::{Bls12, Scalar}; +use indexed_merkle_tree::{tree::Proof, Hash}; + +/// BatchMerkleProofCircuit represents a circuit for proving a batch of merkle proof circuits. +#[derive(Clone)] +pub struct BatchMerkleProofCircuit { + pub old_commitment: Scalar, + pub new_commitment: Scalar, + pub proofs: Vec, +} + +impl BatchMerkleProofCircuit { + pub fn new( + old_commitment: &Hash, + new_commitment: &Hash, + proofs: Vec, + ) -> Result { + let parsed_old_commitment: Scalar = (*old_commitment).try_into()?; + let parsed_new_commitment: Scalar = (*new_commitment).try_into()?; + let mut proof_circuit_array: Vec = vec![]; + for proof in proofs { + match proof { + Proof::Update(update_proof) => { + proof_circuit_array.push(ProofVariantCircuit::Update( + UpdateMerkleProofCircuit::new(&update_proof)?, + )); + } + Proof::Insert(insertion_proof) => { + proof_circuit_array.push(ProofVariantCircuit::Insert( + InsertMerkleProofCircuit::new(&insertion_proof)?, + )); + } + } + } + Ok(BatchMerkleProofCircuit { + old_commitment: parsed_old_commitment, + new_commitment: parsed_new_commitment, + proofs: proof_circuit_array, + }) + } + + pub fn create_and_verify_snark( + &self, + ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { + let scalars: Vec = vec![self.old_commitment, self.new_commitment]; + + create_and_verify_snark(ProofVariantCircuit::Batch(self.clone()), scalars) + } +} + +impl Circuit for BatchMerkleProofCircuit { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + // If the proofs are empty, we just verify that the commitments are equal + if self.proofs.is_empty() { + let provided_old_commitment = + cs.alloc_input(|| "provided old commitment", || Ok(self.old_commitment))?; + let provided_new_commitment = + cs.alloc_input(|| "provided new commitment", || Ok(self.new_commitment))?; + + // provided_old_commitment * (1) = provided_new_commitment + cs.enforce( + || "old commitment check", + |lc| lc + provided_old_commitment, + |lc| lc + CS::one(), + |lc| lc + provided_new_commitment, + ); + + return Ok(()); + } + + // before the calculations make sure that the old root is that of the first proof + let old_root = match &self.proofs[0] { + ProofVariantCircuit::Update(update_proof_circuit) => update_proof_circuit.old_root, + ProofVariantCircuit::Insert(insert_proof_circuit) => { + insert_proof_circuit.pre_insertion_root + } + ProofVariantCircuit::Batch(batch_proof_circuit) => batch_proof_circuit.old_commitment, + }; + + let provided_old_commitment = + cs.alloc_input(|| "provided old commitment", || Ok(self.old_commitment))?; + let old_commitment_from_proofs = + cs.alloc(|| "old commitment from proofs", || Ok(old_root))?; + + // old_commitment_from_proofs * (1) = provided_old_commitment + cs.enforce( + || "old commitment check", + |lc| lc + old_commitment_from_proofs, + |lc| lc + CS::one(), + |lc| lc + provided_old_commitment, + ); + + let mut new_commitment: Scalar = Scalar::zero(); + for proof in self.proofs { + // update the new_commitment for every proof, applying the constraints of the circuit each time + match proof { + ProofVariantCircuit::Update(update_proof_circuit) => { + new_commitment = prove_update( + cs, + update_proof_circuit.old_root, + &update_proof_circuit.old_path, + update_proof_circuit.updated_root, + &update_proof_circuit.updated_path, + )?; + } + ProofVariantCircuit::Insert(insert_proof_circuit) => { + new_commitment = prove_insertion( + cs, + insert_proof_circuit.pre_insertion_root, + &insert_proof_circuit.insertion_path, + insert_proof_circuit.new_leaf_node, + insert_proof_circuit.existing_leaf_update, + insert_proof_circuit.new_leaf_activation, + )?; + } + ProofVariantCircuit::Batch(_) => { + // Batches cannot be recursively constructed + // TODO: Should they be able to? + return Err(SynthesisError::Unsatisfiable); + } + } + } + + let provided_new_commitment = + cs.alloc_input(|| "provided commitment", || Ok(self.new_commitment))?; + let recalculated_new_commitment = + cs.alloc(|| "recalculated commitment", || Ok(new_commitment))?; + + // recalculated_commitment * (1) = provided_commitment + cs.enforce( + || "new commitment check", + |lc| lc + recalculated_new_commitment, + |lc| lc + CS::one(), + |lc| lc + provided_new_commitment, + ); + + Ok(()) + } +} diff --git a/crates/groth16/src/merkle_insertion.rs b/crates/groth16/src/merkle_insertion.rs new file mode 100644 index 00000000..e42ed4ca --- /dev/null +++ b/crates/groth16/src/merkle_insertion.rs @@ -0,0 +1,187 @@ +use crate::{ + merkle_update::prove_update, + utils::{recalculate_hash_as_scalar, unpack_and_process, create_and_verify_snark}, + LessThanCircuit, ProofVariantCircuit, UpdateMerkleProofCircuit, +}; +use anyhow::Result; +use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; +use bls12_381::{Bls12, Scalar}; +use indexed_merkle_tree::{ + node::{LeafNode, Node}, + tree::InsertProof, +}; +use prism_errors::PrismError; + +/// Represents a circuit for proving the insertion of a new leaf into a the IMT. +/// +/// This circuit encapsulates the entire process of inserting a new leaf, +/// including proving non-membership of the new leaf, updating the existing leaf's next pointer, +/// and activating the new leaf. +#[derive(Clone)] +pub struct InsertMerkleProofCircuit { + /// The root of the tree before the insertion. + pub pre_insertion_root: Scalar, + /// The path from the root to the position where the new node will be inserted, + /// proving that the node doesn't exist yet. + pub insertion_path: Vec, + /// The new node to be inserted. + pub new_leaf_node: LeafNode, + /// Proof for updating the existing leaf to point to the new leaf. + pub existing_leaf_update: UpdateMerkleProofCircuit, + /// Proof for activating the new leaf (converting an inactive leaf to active). + pub new_leaf_activation: UpdateMerkleProofCircuit, +} + +impl InsertMerkleProofCircuit { + pub fn new(proof: &InsertProof) -> Result { + let (non_membership_root, non_membership_path) = + unpack_and_process(&proof.non_membership_proof.merkle_proof)?; + + let first_merkle_circuit = UpdateMerkleProofCircuit::new(&proof.first_proof)?; + let second_merkle_circuit = UpdateMerkleProofCircuit::new(&proof.second_proof)?; + + Ok(InsertMerkleProofCircuit { + pre_insertion_root: non_membership_root, + insertion_path: non_membership_path.clone(), + new_leaf_node: proof.non_membership_proof.missing_node.clone(), + existing_leaf_update: first_merkle_circuit, + new_leaf_activation: second_merkle_circuit, + }) + } + + pub fn create_and_verify_snark( + &self, + ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { + let scalars: Vec = vec![ + self.pre_insertion_root, + self.existing_leaf_update.old_root, + self.existing_leaf_update.updated_root, + self.new_leaf_activation.old_root, + self.new_leaf_activation.updated_root, + ]; + + create_and_verify_snark(ProofVariantCircuit::Insert(self.clone()), scalars) + } +} + +impl Circuit for InsertMerkleProofCircuit { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + match prove_insertion( + cs, + self.pre_insertion_root, + &self.insertion_path, + self.new_leaf_node, + self.existing_leaf_update, + self.new_leaf_activation, + ) { + Ok(_) => Ok(()), + Err(_) => Err(SynthesisError::Unsatisfiable), + } + } +} + +/// Generates constraints to prove a valid insertion in the merkle tree. +pub fn prove_insertion>( + cs: &mut CS, + pre_insertion_root: Scalar, + insertion_path: &[Node], + new_leaf_node: LeafNode, + existing_leaf_update: UpdateMerkleProofCircuit, + new_leaf_activation: UpdateMerkleProofCircuit, +) -> Result { + // Step 1: Prove non-membership + // This ensures that the new leaf we're trying to insert doesn't already exist in the tree. + prove_non_membership(cs, pre_insertion_root, insertion_path, new_leaf_node)?; + + // Step 2: Update the existing leaf + // This step updates the 'next' pointer of an existing leaf to point to our new leaf. + let updated_root_after_existing_leaf_update = prove_update( + cs, + existing_leaf_update.old_root, + &existing_leaf_update.old_path, + existing_leaf_update.updated_root, + &existing_leaf_update.updated_path, + )?; + + // Step 3: Activate the new leaf + // This step converts an inactive (empty) leaf into our new active leaf, + // effectively inserting the new data into the tree. + let new_root = prove_update( + cs, + updated_root_after_existing_leaf_update, + &new_leaf_activation.old_path, + new_leaf_activation.updated_root, + &new_leaf_activation.updated_path, + )?; + + Ok(new_root) +} + +/// Generates constraints to prove non-membership of a new leaf in the Merkle tree. +/// +/// This function ensures that the new leaf to be inserted does not already exist in the tree +/// and that it maintains the ordered structure of the tree. +/// +/// # Arguments +/// +/// * `cs` - A mutable reference to the constraint system. +/// * `pre_insertion_root` - The root of the Merkle tree before insertion. +/// * `insertion_path` - The path from the root to the insertion position. +/// * `new_leaf_node` - The new leaf node to be inserted. +/// +/// # Returns +/// +/// Returns `Ok(())` if the constraints are satisfied, or an `Err` +/// containing a `SynthesisError` if the proof generation fails. +pub fn prove_non_membership>( + cs: &mut CS, + pre_insertion_root: Scalar, + insertion_path: &[Node], + new_leaf_node: LeafNode, +) -> Result<(), SynthesisError> { + // Ensure that the label of the new leaf node lies between the first element of the path + // and its next pointer. This guarantees that no other node with a label between these values exists. + let existing_leaf_label: Scalar = insertion_path[0] + .get_label() + .try_into() + .map_err(|_| SynthesisError::Unsatisfiable)?; + let existing_leaf_next: Scalar = insertion_path[0] + .get_next() + .try_into() + .map_err(|_| SynthesisError::Unsatisfiable)?; + let new_leaf_label: Scalar = new_leaf_node + .label + .try_into() + .map_err(|_| SynthesisError::Unsatisfiable)?; + + // Enforce: existing_leaf_label < new_leaf_label < existing_leaf_next + LessThanCircuit::new(existing_leaf_label, new_leaf_label) + .synthesize(cs) + .expect("Failed to synthesize existing_leaf_label < new_leaf_label"); + LessThanCircuit::new(new_leaf_label, existing_leaf_next) + .synthesize(cs) + .expect("Failed to synthesize new_leaf_label < existing_leaf_next"); + + let allocated_pre_insertion_root = + cs.alloc(|| "pre_insertion_root", || Ok(pre_insertion_root))?; + + let recalculated_root = + recalculate_hash_as_scalar(insertion_path).map_err(|_| SynthesisError::Unsatisfiable)?; + + let allocated_recalculated_root = cs.alloc( + || "recalculated_pre_insertion_root", + || Ok(recalculated_root), + )?; + + // Enforce that the provided pre-insertion root matches the recalculated root. + // This ensures that the ordered structure of the tree is maintained in the path. + // (allocated_pre_insertion_root) * (1) = allocated_recalculated_root + cs.enforce( + || "pre_insertion_root_verification", + |lc| lc + allocated_pre_insertion_root, + |lc| lc + CS::one(), + |lc| lc + allocated_recalculated_root, + ); + + Ok(()) +} diff --git a/crates/groth16/src/merkle_update.rs b/crates/groth16/src/merkle_update.rs new file mode 100644 index 00000000..8d880d7f --- /dev/null +++ b/crates/groth16/src/merkle_update.rs @@ -0,0 +1,121 @@ +use crate::utils::{create_and_verify_snark, recalculate_hash_as_scalar, unpack_and_process}; +use crate::ProofVariantCircuit; +use anyhow::Result; +use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; +use bls12_381::{Bls12, Scalar}; +use indexed_merkle_tree::{node::Node, tree::UpdateProof}; +use prism_errors::PrismError; + +#[derive(Clone)] +pub struct UpdateMerkleProofCircuit { + pub old_root: Scalar, + pub old_path: Vec, + pub updated_root: Scalar, + pub updated_path: Vec, +} + +impl UpdateMerkleProofCircuit { + pub fn new(proof: &UpdateProof) -> Result { + let (old_root, old_path) = unpack_and_process(&proof.old_proof)?; + let (updated_root, updated_path) = unpack_and_process(&proof.new_proof)?; + + // if old_root.is_none() + // || old_path.is_none() + // || updated_root.is_none() + // || updated_path.is_none() + // { + // return Err(GeneralError::MissingArgumentError); + // } + + // // TODO: are there cases where MissingArgumentError isnt the right type? + + // let old_root = + // hash_to_scalar(&old_root.ok_or(GeneralError::MissingArgumentError)?.as_str())?; + // let updated_root = hash_to_scalar( + // &updated_root + // .ok_or(GeneralError::MissingArgumentError)? + // .as_str(), + // )?; + + // let old_path = old_path.ok_or(GeneralError::MissingArgumentError)?; + // let updated_path = updated_path.ok_or(GeneralError::MissingArgumentError)?; + + Ok(UpdateMerkleProofCircuit { + old_root, + old_path: old_path.clone(), + updated_root, + updated_path: updated_path.clone(), + }) + } + + pub fn create_and_verify_snark( + &self, + ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { + let scalars: Vec = vec![self.old_root, self.updated_root]; + + create_and_verify_snark(ProofVariantCircuit::Update(self.clone()), scalars) + } +} + +impl Circuit for UpdateMerkleProofCircuit { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + match prove_update( + cs, + self.old_root, + &self.old_path, + self.updated_root, + &self.updated_path, + ) { + Ok(_) => Ok(()), + Err(_) => Err(SynthesisError::Unsatisfiable), + } + } +} + +pub(crate) fn prove_update>( + cs: &mut CS, + old_root: Scalar, + old_path: &[Node], + new_root: Scalar, + new_path: &[Node], +) -> Result { + let root_with_old_pointer = + cs.alloc(|| "first update root with old pointer", || Ok(old_root))?; + let root_with_new_pointer = + cs.alloc(|| "first update root with new pointer", || Ok(new_root))?; + + // update the root hash for old and new path + let recalculated_root_with_old_pointer = + recalculate_hash_as_scalar(old_path).map_err(|_| SynthesisError::Unsatisfiable)?; + let recalculated_root_with_new_pointer = + recalculate_hash_as_scalar(new_path).map_err(|_| SynthesisError::Unsatisfiable)?; + + let allocated_recalculated_root_with_old_pointer = cs.alloc( + || "recalculated first update proof old root", + || Ok(recalculated_root_with_old_pointer), + )?; + let allocated_recalculated_root_with_new_pointer = cs.alloc( + || "recalculated first update proof new root", + || Ok(recalculated_root_with_new_pointer), + )?; + + // Check if the resulting hash is the root hash of the old tree + // allocated_recalculated_root_with_old_pointer * (1) = root_with_old_pointer + cs.enforce( + || "first update old root equality", + |lc| lc + allocated_recalculated_root_with_old_pointer, + |lc| lc + CS::one(), + |lc| lc + root_with_old_pointer, + ); + + // Check that the resulting hash is the root hash of the new tree. + // allocated_recalculated_root_with_new_pointer * (1) = root_with_new_pointer + cs.enforce( + || "first update new root equality", + |lc| lc + allocated_recalculated_root_with_new_pointer, + |lc| lc + CS::one(), + |lc| lc + root_with_new_pointer, + ); + + Ok(recalculated_root_with_new_pointer) +} diff --git a/crates/groth16/src/utils.rs b/crates/groth16/src/utils.rs new file mode 100644 index 00000000..f4147840 --- /dev/null +++ b/crates/groth16/src/utils.rs @@ -0,0 +1,90 @@ +use crate::ProofVariantCircuit; +use anyhow::{anyhow, Result}; +use bellman::groth16::{self, VerifyingKey}; +use bls12_381::{Bls12, Scalar}; +use indexed_merkle_tree::{node::Node, sha256_mod, tree::MerkleProof, Hash}; +use prism_errors::{GeneralError, PrismError, ProofError}; +use rand::rngs::OsRng; + +pub fn create_and_verify_snark( + circuit: ProofVariantCircuit, + scalars: Vec, +) -> Result<(groth16::Proof, VerifyingKey)> { + let rng = &mut OsRng; + + trace!("creating parameters with BLS12-381 pairing-friendly elliptic curve construction...."); + let params = + groth16::generate_random_parameters::(circuit.clone(), rng).map_err(|e| { + PrismError::Proof(ProofError::ProofUnpackError(format!( + "generating random params: {}", + e + ))) + })?; + + trace!("creating proof for zkSNARK..."); + let proof = groth16::create_random_proof(circuit, ¶ms, rng) + .map_err(|e| PrismError::Proof(ProofError::GenerationError(e.to_string())))?; + + trace!("preparing verifying key for zkSNARK..."); + let pvk = groth16::prepare_verifying_key(¶ms.vk); + + groth16::verify_proof(&pvk, &proof, &scalars) + .map_err(|e| PrismError::Proof(ProofError::VerificationError(e.to_string())))?; + + Ok((proof, params.vk)) +} + +pub fn unpack_and_process(proof: &MerkleProof) -> Result<(Scalar, &Vec)> { + if !proof.path.is_empty() { + let root: Scalar = proof.root_hash.try_into()?; + Ok((root, &proof.path)) + } else { + Err(anyhow!(ProofError::ProofUnpackError(format!( + "proof path is empty for root hash {}", + proof.root_hash + )))) + } +} + +pub fn validate_epoch( + previous_commitment: &Hash, + current_commitment: &Hash, + proof: groth16::Proof, + verifying_key: VerifyingKey, +) -> Result, PrismError> { + trace!("validate_epoch: preparing verifying key for zkSNARK"); + let pvk = groth16::prepare_verifying_key(&verifying_key); + + let scalars: Result, _> = vec![ + (*previous_commitment).try_into(), + (*current_commitment).try_into(), + ] + .into_iter() + .collect(); + + let scalars = scalars.map_err(|e| { + PrismError::General(GeneralError::ParsingError(format!( + "unable to parse public input parameters: {}", + e + ))) + })?; + + trace!("validate_epoch: verifying zkSNARK proof..."); + groth16::verify_proof(&pvk, &proof, &scalars) + .map_err(|e| PrismError::Proof(ProofError::VerificationError(e.to_string())))?; + + Ok(proof) +} + +pub fn recalculate_hash_as_scalar(path: &[Node]) -> Result { + let mut current_hash = path[0].get_hash(); + for node in path.iter().skip(1) { + let combined = if node.is_left_sibling() { + [node.get_hash().as_ref(), current_hash.as_ref()].concat() + } else { + [current_hash.as_ref(), node.get_hash().as_ref()].concat() + }; + current_hash = sha256_mod(&combined); + } + current_hash.try_into() +} diff --git a/crates/nova/src/utils.rs b/crates/nova/src/utils.rs index cf708561..dc4a2e17 100644 --- a/crates/nova/src/utils.rs +++ b/crates/nova/src/utils.rs @@ -54,11 +54,6 @@ impl Digest { } } -pub struct Hash { - hash: indexed_merkle_tree::Hash, - _p: PhantomData, -} - pub fn next_rom_index_and_pc>( cs: &mut CS, rom_index: &AllocatedNum, diff --git a/crates/prism/Cargo.toml b/crates/prism/Cargo.toml index a5e82702..088b4a95 100644 --- a/crates/prism/Cargo.toml +++ b/crates/prism/Cargo.toml @@ -44,6 +44,7 @@ sha2 = { workspace = true } auto_impl = { workspace = true } prism-common = { workspace = true } prism-errors = { workspace = true } +prism-groth16 = { workspace = true } [patch.crates-io] sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } diff --git a/crates/prism/src/da/mod.rs b/crates/prism/src/da/mod.rs index ebee226d..2a279fc7 100644 --- a/crates/prism/src/da/mod.rs +++ b/crates/prism/src/da/mod.rs @@ -1,10 +1,10 @@ use crate::utils::SignedContent; -use prism_errors::GeneralError; use anyhow::Result; use async_trait::async_trait; use borsh::{BorshDeserialize, BorshSerialize}; use ed25519::Signature; use prism_common::{operation::Operation, tree::Digest}; +use prism_errors::GeneralError; use std::{self, str::FromStr}; pub mod celestia; diff --git a/crates/prism/src/node_types/lightclient.rs b/crates/prism/src/node_types/lightclient.rs index e7b0117a..56150af7 100644 --- a/crates/prism/src/node_types/lightclient.rs +++ b/crates/prism/src/node_types/lightclient.rs @@ -1,15 +1,11 @@ use crate::cfg::CelestiaConfig; -use prism_errors::{DataAvailabilityError, GeneralError}; use anyhow::{Context, Result}; use async_trait::async_trait; +use prism_errors::{DataAvailabilityError, GeneralError}; use std::{self, sync::Arc, time::Duration}; use tokio::{task::spawn, time::interval}; -use crate::{ - da::DataAvailabilityLayer, - node_types::NodeType, - utils::{verify_signature}, -}; +use crate::{da::DataAvailabilityLayer, node_types::NodeType, utils::verify_signature}; pub struct LightClient { pub da: Arc, @@ -113,23 +109,20 @@ impl LightClient { } } - /* - TODO: validation of the epoch proof - - match validate_epoch( - prev_commitment, - current_commitment, - proof, - verifying_key, - ) { - Ok(_) => { - info!( - "zkSNARK for epoch {} was validated successfully", - epoch_json.height - ) - } - Err(err) => panic!("failed to validate epoch: {:?}", err), - } */ + // match validate_epoch( + // prev_commitment, + // current_commitment, + // proof, + // verifying_key, + // ) { + // Ok(_) => { + // info!( + // "zkSNARK for epoch {} was validated successfully", + // epoch_json.height + // ) + // } + // Err(err) => panic!("failed to validate epoch: {:?}", err), + // } } } Err(e) => { From 6eea6098a38bf5f95c27f0f4234617db07abb0df Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 11:43:27 +0200 Subject: [PATCH 25/33] adding sp1 crate --- Cargo.lock | 74 ++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 10 ++++++ crates/sp1/Cargo.toml | 11 +++++++ crates/sp1/src/main.rs | 34 +++++++++++++++++++ rust-toolchain | 3 ++ 5 files changed, 132 insertions(+) create mode 100644 crates/sp1/Cargo.toml create mode 100644 crates/sp1/src/main.rs create mode 100644 rust-toolchain diff --git a/Cargo.lock b/Cargo.lock index 659b2e9c..dcde70f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3291,6 +3291,14 @@ dependencies = [ "thiserror", ] +[[package]] +name = "prism-sp1" +version = "0.1.0" +dependencies = [ + "prism-common", + "sp1-zkvm", +] + [[package]] name = "proc-macro-crate" version = "1.3.1" @@ -3844,6 +3852,30 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scale-info" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" +dependencies = [ + "cfg-if", + "derive_more", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" +dependencies = [ + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "scc" version = "2.1.7" @@ -4152,6 +4184,16 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +[[package]] +name = "snowbridge-amcl" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460a9ed63cdf03c1b9847e8a12a5f5ba19c4efd5869e4a737e05be25d7c427e5" +dependencies = [ + "parity-scale-codec", + "scale-info", +] + [[package]] name = "socket2" version = "0.4.10" @@ -4187,6 +4229,38 @@ dependencies = [ "sha-1", ] +[[package]] +name = "sp1-lib" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bea7811abd2d3a991007fcb284f41152840b8388c171288d0c52c6793956609c" +dependencies = [ + "anyhow", + "bincode", + "cfg-if", + "hex", + "serde", + "snowbridge-amcl", +] + +[[package]] +name = "sp1-zkvm" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a777787c41fffb1ce1e74229f480223ce8d0ae66763aaac689cec737a19663e" +dependencies = [ + "bincode", + "cfg-if", + "getrandom", + "lazy_static", + "libm", + "once_cell", + "rand", + "serde", + "sha2 0.10.8", + "sp1-lib", +] + [[package]] name = "spin" version = "0.9.8" diff --git a/Cargo.toml b/Cargo.toml index 8a59ae7e..87d82158 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,14 @@ members = [ "crates/nova", "crates/groth16", "crates/errors", + "crates/sp1", +] +default-members = [ + "crates/prism", + "crates/common", + "crates/nova", + "crates/groth16", + "crates/errors", ] resolver = "2" @@ -68,11 +76,13 @@ sha2 = "0.10.8" auto_impl = "1.2.0" bincode = "1.3.3" blake2 = "0.10.6" +sp1-zkvm = { version = "1.2.0" } prism-common = { path = "crates/common" } prism-nova = { path = "crates/nova" } prism-errors = { path = "crates/errors" } prism-main = { path = "crates/prism" } prism-groth16 = { path = "crates/groth16" } +sp1-helper = "1.2.0" [patch.crates-io] sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } diff --git a/crates/sp1/Cargo.toml b/crates/sp1/Cargo.toml new file mode 100644 index 00000000..593f4140 --- /dev/null +++ b/crates/sp1/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "prism-sp1" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +prism-common = { workspace = true } +sp1-zkvm = { workspace = true } diff --git a/crates/sp1/src/main.rs b/crates/sp1/src/main.rs new file mode 100644 index 00000000..29d985c0 --- /dev/null +++ b/crates/sp1/src/main.rs @@ -0,0 +1,34 @@ +#![no_main] +sp1_zkvm::entrypoint!(main); + +use prism_common::tree::{Batch, Digest, Proof}; + +pub fn main() { + println!("cycle-tracker-start: setup"); + let batch = sp1_zkvm::io::read::(); + let mut current = batch.prev_root; + println!("cycle-tracker-end: setup"); + + println!("cycle-tracker-start: proof-iteration"); + for proof in batch.proofs.iter() { + match proof { + Proof::Update(p) => { + assert_eq!(current, Digest::new(p.old_root.into())); + println!("cycle-tracker-start: update"); + assert!(p.verify().is_ok()); + println!("cycle-tracker-end: update"); + current = Digest::new(p.new_root.into()); + } + Proof::Insert(p) => { + assert_eq!(current, p.non_membership_proof.root); + println!("cycle-tracker-start: insert"); + assert!(p.verify().is_ok()); + println!("cycle-tracker-end: insert"); + current = p.new_root; + } + } + } + println!("cycle-tracker-end: proof-iteration"); + + sp1_zkvm::io::commit_slice(¤t.to_bytes()); +} diff --git a/rust-toolchain b/rust-toolchain new file mode 100644 index 00000000..b3524b52 --- /dev/null +++ b/rust-toolchain @@ -0,0 +1,3 @@ +[toolchain] +channel = "1.79.0" +components = ["llvm-tools", "rustc-dev"] From 7ec533e119625c5e6109fe4c0917d1f7a10890cb Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 13:09:15 +0200 Subject: [PATCH 26/33] trimming deps --- Cargo.lock | 63 --------------------------------------- Cargo.toml | 8 ++--- crates/common/Cargo.toml | 6 ---- crates/errors/Cargo.toml | 44 --------------------------- crates/groth16/Cargo.toml | 33 +------------------- crates/nova/Cargo.toml | 9 ++---- crates/prism/Cargo.toml | 11 +------ 7 files changed, 7 insertions(+), 167 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dcde70f8..0e8e1257 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3133,43 +3133,10 @@ name = "prism-errors" version = "0.1.0" dependencies = [ "anyhow", - "async-trait", - "auto_impl", - "axum", - "base64 0.22.1", - "bellman", - "bls12_381", - "borsh", - "celestia-rpc", - "celestia-types", - "clap", - "config", - "criterion", - "dirs 5.0.1", - "dotenvy", "ed25519", "ed25519-dalek", - "ff", - "hex", - "indexed-merkle-tree", - "jmt", - "keystore-rs", - "log", - "mockall", - "pretty_env_logger", - "prism-common", - "rand", - "redis", - "serde", - "serde_json", - "serial_test", "sha2 0.10.8", "thiserror", - "tokio", - "toml", - "tower-http", - "utoipa", - "utoipa-swagger-ui", ] [[package]] @@ -3177,44 +3144,24 @@ name = "prism-groth16" version = "0.1.0" dependencies = [ "anyhow", - "async-trait", - "auto_impl", - "axum", - "base64 0.22.1", "bellman", "bls12_381", "borsh", - "celestia-rpc", "celestia-types", - "clap", - "config", - "criterion", - "dirs 5.0.1", - "dotenvy", "ed25519", "ed25519-dalek", "ff", "hex", "indexed-merkle-tree", "jmt", - "keystore-rs", "log", - "mockall", - "pretty_env_logger", "prism-common", "prism-errors", "rand", - "redis", "serde", "serde_json", - "serial_test", "sha2 0.10.8", "thiserror", - "tokio", - "toml", - "tower-http", - "utoipa", - "utoipa-swagger-ui", ] [[package]] @@ -5253,13 +5200,3 @@ dependencies = [ "crossbeam-utils", "flate2", ] - -[[patch.unused]] -name = "sha2" -version = "0.10.6" -source = "git+https://github.com/sp1-patches/RustCrypto-hashes?branch=patch-sha2-v0.10.6#e5f8b7eaaa9801503bd998932a52b65848eee234" - -[[patch.unused]] -name = "sha2" -version = "0.9.8" -source = "git+https://github.com/sp1-patches/RustCrypto-hashes?branch=patch-sha2-v0.9.8#afdbfb09c325f8a69c01d540ec9a261e3637725d" diff --git a/Cargo.toml b/Cargo.toml index 87d82158..8b595d21 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -85,13 +85,11 @@ prism-groth16 = { path = "crates/groth16" } sp1-helper = "1.2.0" [patch.crates-io] -sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } -sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } -[workspace.dev-dependencies] -serial_test = "3.1.1" -criterion = "0.5.1" +# [workspace.dev-dependencies] +# serial_test = "3.1.1" +# criterion = "0.5.1" # [[bench]] # name = "zk_benchmarks" diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index bc274c96..b1d85b90 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -16,9 +16,3 @@ hex.workspace = true sha2.workspace = true blake2.workspace = true celestia-types.workspace = true - -[patch.crates-io] -sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } -blake2 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "blake2", branch = "master" } -sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } -sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } diff --git a/crates/errors/Cargo.toml b/crates/errors/Cargo.toml index 5a2b2450..cdd976a5 100644 --- a/crates/errors/Cargo.toml +++ b/crates/errors/Cargo.toml @@ -7,52 +7,8 @@ homepage.workspace = true repository.workspace = true [dependencies] -axum = { workspace = true } -borsh = { workspace = true } -tower-http = { workspace = true } -utoipa = { workspace = true } -utoipa-swagger-ui = { workspace = true } -async-trait = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -redis = { workspace = true } ed25519-dalek = { workspace = true } ed25519 = { workspace = true } -base64 = { workspace = true } -tokio = { workspace = true } -bellman = { workspace = true } -bls12_381 = { workspace = true } -rand = { workspace = true } -hex = { workspace = true } -ff = { workspace = true } -log = { workspace = true } -pretty_env_logger = { workspace = true } -clap = { workspace = true } -config = { workspace = true } thiserror = { workspace = true } -indexed-merkle-tree = { workspace = true } -dotenvy = { workspace = true } -celestia-rpc = { workspace = true } -celestia-types = { workspace = true } -mockall = { workspace = true } -keystore-rs = { workspace = true } -toml = { workspace = true } -dirs = { workspace = true } anyhow = { workspace = true } -jmt = { workspace = true, path = "../jmt", features = ["mocks"] } sha2 = { workspace = true } -auto_impl = { workspace = true } -prism-common = { workspace = true } - -[patch.crates-io] -sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } -sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } -sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } - -[dev-dependencies] -serial_test = "3.1.1" -criterion = "0.5.1" - -# [[bench]] -# name = "zk_benchmarks" -# harness = false diff --git a/crates/groth16/Cargo.toml b/crates/groth16/Cargo.toml index 373bb139..c4f7fb39 100644 --- a/crates/groth16/Cargo.toml +++ b/crates/groth16/Cargo.toml @@ -7,53 +7,22 @@ homepage.workspace = true repository.workspace = true [dependencies] -axum = { workspace = true } borsh = { workspace = true } -tower-http = { workspace = true } -utoipa = { workspace = true } -utoipa-swagger-ui = { workspace = true } -async-trait = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -redis = { workspace = true } ed25519-dalek = { workspace = true } ed25519 = { workspace = true } -base64 = { workspace = true } -tokio = { workspace = true } bellman = { workspace = true } bls12_381 = { workspace = true } rand = { workspace = true } hex = { workspace = true } ff = { workspace = true } log = { workspace = true } -pretty_env_logger = { workspace = true } -clap = { workspace = true } -config = { workspace = true } thiserror = { workspace = true } indexed-merkle-tree = { workspace = true } -dotenvy = { workspace = true } -celestia-rpc = { workspace = true } celestia-types = { workspace = true } -mockall = { workspace = true } -keystore-rs = { workspace = true } -toml = { workspace = true } -dirs = { workspace = true } anyhow = { workspace = true } -jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +jmt = { workspace = true } sha2 = { workspace = true } -auto_impl = { workspace = true } prism-common = { workspace = true } prism-errors = { workspace = true } - -[patch.crates-io] -sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } -sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } -sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } - -[dev-dependencies] -serial_test = "3.1.1" -criterion = "0.5.1" - -# [[bench]] -# name = "zk_benchmarks" -# harness = false diff --git a/crates/nova/Cargo.toml b/crates/nova/Cargo.toml index 272da1de..1483105b 100644 --- a/crates/nova/Cargo.toml +++ b/crates/nova/Cargo.toml @@ -21,16 +21,11 @@ thiserror = { workspace = true } indexed-merkle-tree = { workspace = true } celestia-types = { workspace = true } anyhow = { workspace = true } -jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +jmt = { workspace = true } bellpepper-core = { workspace = true } bellpepper = { workspace = true } -arecibo = { workspace = true, git = "https://github.com/deltadevsde/arecibo" } +arecibo = { workspace = true } itertools = { workspace = true } sha2 = { workspace = true } bincode = { workspace = true } prism-common = { workspace = true } - -[patch.crates-io] -sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } -sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } -sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } diff --git a/crates/prism/Cargo.toml b/crates/prism/Cargo.toml index 088b4a95..dbc8d2a5 100644 --- a/crates/prism/Cargo.toml +++ b/crates/prism/Cargo.toml @@ -39,22 +39,13 @@ keystore-rs = { workspace = true } toml = { workspace = true } dirs = { workspace = true } anyhow = { workspace = true } -jmt = { workspace = true, path = "../jmt", features = ["mocks"] } +jmt = { workspace = true } sha2 = { workspace = true } auto_impl = { workspace = true } prism-common = { workspace = true } prism-errors = { workspace = true } prism-groth16 = { workspace = true } -[patch.crates-io] -sha2-v0-9-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.9.8" } -sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.6" } -sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } - [dev-dependencies] serial_test = "3.1.1" criterion = "0.5.1" - -# [[bench]] -# name = "zk_benchmarks" -# harness = false From 1cbc0ba62bf114eded294f2a573acb1dddf8fa8e Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 13:22:33 +0200 Subject: [PATCH 27/33] readmes --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 48010001..91126ea7 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ ## What is Prism? -Prism is a decentralized key transparency protocol, strongly inspired by the paper [Tzialla et. al](https://eprint.iacr.org/2021/1263.pdf), leveraging zkSNARKs to enable trust-minimized verification of E2EE services via WASM light clients. This eliminates the possibility for hidden backdoors in E2EE services through a user-verifiable key management system. It uses transparency dictionaries under the hood, offering a generalized solution for managing a label-value map in environments where the service maintaining the map is not completely trusted. +Prism is a decentralized key transparency protocol, first inspired by the paper [Tzialla et. al](https://eprint.iacr.org/2021/1263.pdf), leveraging zkSNARKs to enable trust-minimized verification of E2EE services via WASM light clients. This eliminates the possibility for hidden backdoors in E2EE services through a user-verifiable key management system. It uses transparency dictionaries under the hood, offering a generalized solution for managing a label-value map in environments where the service maintaining the map is not completely trusted. Prism provides the first key-transparency solution to enable automatic verification of the service provider. This is achieved by providing constant size succinct proofs to WASM light clients over a data availbility layer. The system is designed to be efficient, scalable and secure, making it suitable for a wide range of applications. @@ -23,6 +23,9 @@ The project is still in the early development phase, has not been audited, and i Due to this ongoing development work, changes are still being made that may affect existing functionalities. +## Circuits +We are currently experimenting with various proof systems and have handwritten groth16 and supernova circuits to handle the epoch proofs. We are also experimenting with SP1 as an alternative, which you can find in the `prism-sp1` crate. + ## Installation ### Prerequisites From cc64352c211724c0dd5a0157216967c8be021000 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 13:31:11 +0200 Subject: [PATCH 28/33] clippy --- crates/common/src/tree.rs | 12 ++++++------ crates/groth16/src/lib.rs | 4 ++-- crates/groth16/src/merkle_batch.rs | 13 ++++++------- crates/groth16/src/merkle_insertion.rs | 4 ++-- crates/groth16/src/merkle_update.rs | 8 +++++--- crates/nova/src/insert.rs | 2 +- crates/nova/src/update.rs | 2 +- crates/nova/src/utils.rs | 11 +++++++---- crates/prism/src/main.rs | 6 ------ crates/prism/src/node_types/lightclient.rs | 4 ++-- crates/prism/src/node_types/sequencer.rs | 4 ++-- crates/prism/src/storage.rs | 8 +++----- crates/prism/src/utils.rs | 4 +++- 13 files changed, 40 insertions(+), 42 deletions(-) diff --git a/crates/common/src/tree.rs b/crates/common/src/tree.rs index 42e00d35..735a11bd 100644 --- a/crates/common/src/tree.rs +++ b/crates/common/src/tree.rs @@ -56,15 +56,15 @@ impl TryFrom for Scalar { } } -impl Into for Digest { - fn into(self) -> RootHash { - RootHash::from(self.0) +impl From for RootHash { + fn from(val: Digest) -> RootHash { + RootHash::from(val.0) } } -impl Into for RootHash { - fn into(self) -> Digest { - Digest(self.0) +impl From for Digest { + fn from(val: RootHash) -> Digest { + Digest(val.0) } } diff --git a/crates/groth16/src/lib.rs b/crates/groth16/src/lib.rs index 68a5ed33..9dec6d3c 100644 --- a/crates/groth16/src/lib.rs +++ b/crates/groth16/src/lib.rs @@ -22,8 +22,8 @@ pub use merkle_update::UpdateMerkleProofCircuit; #[derive(Clone)] pub enum ProofVariantCircuit { - Update(UpdateMerkleProofCircuit), - Insert(InsertMerkleProofCircuit), + Update(Box), + Insert(Box), Batch(BatchMerkleProofCircuit), } diff --git a/crates/groth16/src/merkle_batch.rs b/crates/groth16/src/merkle_batch.rs index c72d7772..a042d561 100644 --- a/crates/groth16/src/merkle_batch.rs +++ b/crates/groth16/src/merkle_batch.rs @@ -1,7 +1,6 @@ use crate::{ - merkle_insertion::prove_insertion, merkle_update::prove_update, InsertMerkleProofCircuit, - ProofVariantCircuit, UpdateMerkleProofCircuit, - utils::create_and_verify_snark + merkle_insertion::prove_insertion, merkle_update::prove_update, utils::create_and_verify_snark, + InsertMerkleProofCircuit, ProofVariantCircuit, UpdateMerkleProofCircuit, }; use anyhow::Result; use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; @@ -28,14 +27,14 @@ impl BatchMerkleProofCircuit { for proof in proofs { match proof { Proof::Update(update_proof) => { - proof_circuit_array.push(ProofVariantCircuit::Update( + proof_circuit_array.push(ProofVariantCircuit::Update(Box::new( UpdateMerkleProofCircuit::new(&update_proof)?, - )); + ))); } Proof::Insert(insertion_proof) => { - proof_circuit_array.push(ProofVariantCircuit::Insert( + proof_circuit_array.push(ProofVariantCircuit::Insert(Box::new( InsertMerkleProofCircuit::new(&insertion_proof)?, - )); + ))); } } } diff --git a/crates/groth16/src/merkle_insertion.rs b/crates/groth16/src/merkle_insertion.rs index e42ed4ca..cb9b94d9 100644 --- a/crates/groth16/src/merkle_insertion.rs +++ b/crates/groth16/src/merkle_insertion.rs @@ -1,6 +1,6 @@ use crate::{ merkle_update::prove_update, - utils::{recalculate_hash_as_scalar, unpack_and_process, create_and_verify_snark}, + utils::{create_and_verify_snark, recalculate_hash_as_scalar, unpack_and_process}, LessThanCircuit, ProofVariantCircuit, UpdateMerkleProofCircuit, }; use anyhow::Result; @@ -60,7 +60,7 @@ impl InsertMerkleProofCircuit { self.new_leaf_activation.updated_root, ]; - create_and_verify_snark(ProofVariantCircuit::Insert(self.clone()), scalars) + create_and_verify_snark(ProofVariantCircuit::Insert(Box::new(self.clone())), scalars) } } diff --git a/crates/groth16/src/merkle_update.rs b/crates/groth16/src/merkle_update.rs index 8d880d7f..cb850b7f 100644 --- a/crates/groth16/src/merkle_update.rs +++ b/crates/groth16/src/merkle_update.rs @@ -1,5 +1,7 @@ -use crate::utils::{create_and_verify_snark, recalculate_hash_as_scalar, unpack_and_process}; -use crate::ProofVariantCircuit; +use crate::{ + utils::{create_and_verify_snark, recalculate_hash_as_scalar, unpack_and_process}, + ProofVariantCircuit, +}; use anyhow::Result; use bellman::{groth16, Circuit, ConstraintSystem, SynthesisError}; use bls12_381::{Bls12, Scalar}; @@ -53,7 +55,7 @@ impl UpdateMerkleProofCircuit { ) -> Result<(groth16::Proof, groth16::VerifyingKey)> { let scalars: Vec = vec![self.old_root, self.updated_root]; - create_and_verify_snark(ProofVariantCircuit::Update(self.clone()), scalars) + create_and_verify_snark(ProofVariantCircuit::Update(Box::new(self.clone())), scalars) } } diff --git a/crates/nova/src/insert.rs b/crates/nova/src/insert.rs index c7364b9d..a69b5d63 100644 --- a/crates/nova/src/insert.rs +++ b/crates/nova/src/insert.rs @@ -91,7 +91,7 @@ impl StepCircuit for InsertCircuit< .leaf() .ok_or(SynthesisError::AssignmentMissing)?; - verify_membership_proof(cs, &self.proof.membership_proof, &new_root_bits, *leaf)?; + verify_membership_proof(cs, &self.proof.membership_proof, new_root_bits, *leaf)?; let mut z_next = vec![new_root]; z_next.push(rom_index_next); diff --git a/crates/nova/src/update.rs b/crates/nova/src/update.rs index 240b9159..894205ea 100644 --- a/crates/nova/src/update.rs +++ b/crates/nova/src/update.rs @@ -86,7 +86,7 @@ where .leaf() .ok_or(SynthesisError::AssignmentMissing)?; - verify_membership_proof(cs, update_proof, &old_root_bits, *leaf)?; + verify_membership_proof(cs, update_proof, old_root_bits, *leaf)?; self.update_proof .verify() diff --git a/crates/nova/src/utils.rs b/crates/nova/src/utils.rs index dc4a2e17..f1eae530 100644 --- a/crates/nova/src/utils.rs +++ b/crates/nova/src/utils.rs @@ -289,7 +289,7 @@ pub fn hash_node>( pub fn verify_membership_proof>( cs: &mut CS, proof: &SparseMerkleProof, - root: &Vec, + root: Vec, leaf: SparseMerkleLeafNode, ) -> Result<(), SynthesisError> { let max_depth = 10; @@ -355,7 +355,8 @@ pub fn verify_membership_proof> Ok(()) } -// Helper function to conditionally swap two vectors of Booleans +/// Helper function to conditionally swap two vectors of Booleans +#[allow(dead_code)] fn conditionally_swap>( cs: &mut CS, a: &[Boolean], @@ -365,7 +366,7 @@ fn conditionally_swap>( let mut left = Vec::with_capacity(a.len()); let mut right = Vec::with_capacity(a.len()); - for (i, (a_bit, b_bit)) in a.iter().zip(b.iter()).enumerate() { + for (a_bit, b_bit) in a.iter().zip(b.iter()) { let (left_bit, right_bit) = { let and1 = Boolean::and(cs.namespace(|| "condition and a"), condition, a_bit)?; let and2 = Boolean::and( @@ -394,7 +395,8 @@ fn conditionally_swap>( Ok((left, right)) } -// Helper function to conditionally select between two vectors of Booleans +/// Helper function to conditionally select between two vectors of Booleans +#[allow(dead_code)] fn conditionally_select_vector>( cs: &mut CS, condition: &Boolean, @@ -421,6 +423,7 @@ fn conditionally_select_vector> Ok(result) } +#[allow(dead_code)] fn boolvec_to_bytes(value: Vec) -> Vec { let bits: Vec = value .iter() diff --git a/crates/prism/src/main.rs b/crates/prism/src/main.rs index 14aa3c96..7f080e78 100644 --- a/crates/prism/src/main.rs +++ b/crates/prism/src/main.rs @@ -66,12 +66,6 @@ async fn main() -> std::io::Result<()> { })?, ) } - _ => { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidInput, - "Invalid node type", - )); - } }; node.start() diff --git a/crates/prism/src/node_types/lightclient.rs b/crates/prism/src/node_types/lightclient.rs index 56150af7..fdbe2316 100644 --- a/crates/prism/src/node_types/lightclient.rs +++ b/crates/prism/src/node_types/lightclient.rs @@ -71,8 +71,8 @@ impl LightClient { // todo: verify adjacency to last heights, <- for this we need some sort of storage of epochs for epoch_json in epoch_json_vec { - let prev_commitment = &epoch_json.prev_commitment; - let current_commitment = &epoch_json.current_commitment; + // let prev_commitment = &epoch_json.prev_commitment; + // let current_commitment = &epoch_json.current_commitment; // let proof = match epoch_json.proof.clone().try_into() { // Ok(proof) => proof, diff --git a/crates/prism/src/node_types/sequencer.rs b/crates/prism/src/node_types/sequencer.rs index b2acde0a..56046b7e 100644 --- a/crates/prism/src/node_types/sequencer.rs +++ b/crates/prism/src/node_types/sequencer.rs @@ -368,7 +368,7 @@ impl Sequencer { let previous_hash = current_chain.last().context("Hashchain is empty")?.hash; let new_chain_entry = HashchainEntry::new(operation.clone(), previous_hash); - current_chain.push(new_chain_entry.operation.clone()); + current_chain.push(new_chain_entry.operation.clone())?; // let updated_node = Node::new_leaf( // node.is_left_sibling(), @@ -413,7 +413,7 @@ impl Sequencer { debug!("creating new hashchain for user id {}", id.clone()); let mut chain = Hashchain::new(id.clone()); - chain.create_account(value.into(), source.clone()); + chain.create_account(value.into(), source.clone())?; self.db .update_hashchain(operation, &chain) diff --git a/crates/prism/src/storage.rs b/crates/prism/src/storage.rs index 12b0cdd7..cd820e44 100644 --- a/crates/prism/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -14,15 +14,13 @@ use std::{ time::Duration, }; -use crate::{ - cfg::RedisConfig, -}; -use prism_errors::{DatabaseError, GeneralError, PrismError}; +use crate::cfg::RedisConfig; use prism_common::{ hashchain::{Hashchain, HashchainEntry}, operation::Operation, tree::Digest, }; +use prism_errors::{DatabaseError, GeneralError, PrismError}; // there are different key prefixes for the different tables in the database // app_state:key => app state (just epoch counter for now) @@ -99,7 +97,7 @@ impl TreeReader for RedisConnection { let serialized_key = hex::encode(borsh::to_vec(node_key).unwrap()); let node_data: Option> = con.get(format!("node:{}", serialized_key))?; match node_data { - None => return Ok(None), + None => Ok(None), Some(data) => { let node: Node = borsh::from_slice::(&data).unwrap(); Ok(Some(node)) diff --git a/crates/prism/src/utils.rs b/crates/prism/src/utils.rs index f24fc012..f00dd9a4 100644 --- a/crates/prism/src/utils.rs +++ b/crates/prism/src/utils.rs @@ -1,4 +1,3 @@ -use prism_errors::{GeneralError, PrismError, ProofError}; use anyhow::Result; use base64::{engine::general_purpose::STANDARD as engine, Engine as _}; use bellman::groth16::{self, VerifyingKey}; @@ -7,7 +6,9 @@ use ed25519::Signature; use ed25519_dalek::{Verifier, VerifyingKey as Ed25519VerifyingKey}; use indexed_merkle_tree::tree::Proof; use prism_common::tree::Digest; +use prism_errors::{GeneralError, PrismError, ProofError}; +#[allow(dead_code)] pub fn parse_json_to_proof(json_str: &str) -> Result> { let proof: Proof = serde_json::from_str(json_str)?; @@ -28,6 +29,7 @@ pub fn decode_public_key(pub_key_str: &String) -> Result { .map_err(|_| GeneralError::DecodingError("ed25519 verifying key".to_string()).into()) } +#[allow(dead_code)] pub fn validate_epoch( previous_commitment: &Digest, current_commitment: &Digest, From e7d657fa49cc8c0b4a852053831d4146467f1164 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 13:31:24 +0200 Subject: [PATCH 29/33] readmes and clippy --- crates/groth16/README.md | 4 ++++ crates/nova/README.md | 8 ++++++++ crates/sp1/README.md | 6 ++++++ 3 files changed, 18 insertions(+) create mode 100644 crates/groth16/README.md create mode 100644 crates/nova/README.md create mode 100644 crates/sp1/README.md diff --git a/crates/groth16/README.md b/crates/groth16/README.md new file mode 100644 index 00000000..9b91222f --- /dev/null +++ b/crates/groth16/README.md @@ -0,0 +1,4 @@ +# Groth16 Circuits + +This crate implements the initial PoC circuits. Because the Batch structure changes for every proof, a new pk/vk is generated for every epoch which was just for experimentation. +These circuits do not include the merkle verification on this branch. diff --git a/crates/nova/README.md b/crates/nova/README.md new file mode 100644 index 00000000..90d7a6d7 --- /dev/null +++ b/crates/nova/README.md @@ -0,0 +1,8 @@ +# SuperNova Circuits + +This crate implements jmt-based Update and Insert circuits, which get combined together for NIVC using Supernova recursive SNARKs. + +They are not currently in use, because after the switch from IMT to JMT the proof size became variable, leading to both the Update and Insert steps no longer being uniform across a batch - only the first few prover steps suceed before a InvalidWitnessLength error occurs. +There is an attempt to alleviate this by padding the JMT proofs to a max depth and using selectors but it is not complete yet. + +Performance is a concern, especially during the spartan compression, which takes orders of magnitudes longer than a simple groth16 batch. diff --git a/crates/sp1/README.md b/crates/sp1/README.md new file mode 100644 index 00000000..94b0b8e0 --- /dev/null +++ b/crates/sp1/README.md @@ -0,0 +1,6 @@ +# SP1 Proof + +After experimenting with Supernova, we decided it makes sense to test a zkVM that will let us keep our small, variable sized JMT merkle proofs without padding. +At first glance, the performance seems okay but we need further benchmarks to confirm. + +In the long term, we will likely require a zkVM anyways if we need to prove over Celestia's NMT (for example, that all operations from the last Celestia block were included). From 9ef3ea5d4896e704748db3b1206429ccad6779c0 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 14:06:59 +0200 Subject: [PATCH 30/33] debugging storage.rs for jmt --- crates/common/src/hashchain.rs | 2 +- crates/prism/src/storage.rs | 8 +++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/common/src/hashchain.rs b/crates/common/src/hashchain.rs index f0ea9795..194dd836 100644 --- a/crates/common/src/hashchain.rs +++ b/crates/common/src/hashchain.rs @@ -86,7 +86,7 @@ impl Hashchain { } pub fn push(&mut self, operation: Operation) -> Result { - if let Operation::CreateAccount { .. } = operation { + if !self.is_empty() { bail!("Cannot CreateAccount on an already existing hashchain"); } if operation.id() != self.id { diff --git a/crates/prism/src/storage.rs b/crates/prism/src/storage.rs index cd820e44..8ea15bbf 100644 --- a/crates/prism/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -95,7 +95,7 @@ impl TreeReader for RedisConnection { fn get_node_option(&self, node_key: &NodeKey) -> Result> { let mut con = self.lock_connection()?; let serialized_key = hex::encode(borsh::to_vec(node_key).unwrap()); - let node_data: Option> = con.get(format!("node:{}", serialized_key))?; + let node_data: Option> = con.get(dbg!(format!("node:{}", serialized_key)))?; match node_data { None => Ok(None), Some(data) => { @@ -146,12 +146,11 @@ impl TreeWriter for RedisConnection { fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()> { let mut con = self.lock_connection()?; let mut pipe = redis::pipe(); - for (node_key, node) in node_batch.nodes() { + let serialized_key = hex::encode(borsh::to_vec(node_key).unwrap()); let node_data = borsh::to_vec(node)?; - pipe.set(format!("node:{:?}", node_key), node_data); + pipe.set(format!("node:{}", serialized_key), node_data); } - for ((version, key_hash), value) in node_batch.values() { if let Some(v) = value { pipe.zadd(format!("value_history:{:?}", key_hash), v, *version as f64); @@ -163,7 +162,6 @@ impl TreeWriter for RedisConnection { ); } } - pipe.execute(&mut con); Ok(()) } From f4931f93e51a904d9c92ea57aff354dddc8a3e7a Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 16:44:37 +0200 Subject: [PATCH 31/33] fixing tests --- crates/common/src/hashchain.rs | 7 +- crates/common/src/tree.rs | 1 + crates/nova/src/batch.rs | 14 ++-- crates/prism/src/node_types/sequencer.rs | 7 -- crates/prism/src/storage.rs | 90 +++++++++++++++--------- 5 files changed, 66 insertions(+), 53 deletions(-) diff --git a/crates/common/src/hashchain.rs b/crates/common/src/hashchain.rs index 194dd836..9b96c6bb 100644 --- a/crates/common/src/hashchain.rs +++ b/crates/common/src/hashchain.rs @@ -11,8 +11,8 @@ use crate::{ #[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)] pub struct Hashchain { - id: String, - entries: Vec, + pub id: String, + pub entries: Vec, } impl IntoIterator for Hashchain { @@ -86,9 +86,6 @@ impl Hashchain { } pub fn push(&mut self, operation: Operation) -> Result { - if !self.is_empty() { - bail!("Cannot CreateAccount on an already existing hashchain"); - } if operation.id() != self.id { bail!("Operation ID does not match Hashchain ID"); } diff --git a/crates/common/src/tree.rs b/crates/common/src/tree.rs index 735a11bd..f38588e3 100644 --- a/crates/common/src/tree.rs +++ b/crates/common/src/tree.rs @@ -294,6 +294,7 @@ where let serialized_value = Self::serialize_value(&value)?; let old_root = self.get_current_root()?; + println!("key: {:?}", key); let (old_value, non_membership_merkle_proof) = self.jmt.get_with_proof(key, self.epoch)?; let non_membership_proof = NonMembershipProof { diff --git a/crates/nova/src/batch.rs b/crates/nova/src/batch.rs index c31d3bb1..5e64a1e4 100644 --- a/crates/nova/src/batch.rs +++ b/crates/nova/src/batch.rs @@ -205,6 +205,8 @@ mod tests { state.tree.update(key, hc).expect("Update should succeed") } + // ignored because proving in CI is slow + #[ignore] #[test] fn test_recursive_epoch_circuit_proof() { type E1 = PallasEngine; @@ -220,17 +222,17 @@ mod tests { let operations = vec![ ( 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 3), + EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 2), ), ( 1, - EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 3), - ), - ( - 0, - EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 3), + EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 2), ), // ( + // 0, + // EpochCircuit::new_insert(create_random_insert(&mut state, &mut rng), 3), + // ), + // ( // 1, // EpochCircuit::new_update(create_random_update(&mut state, &mut rng), 4), // ), diff --git a/crates/prism/src/node_types/sequencer.rs b/crates/prism/src/node_types/sequencer.rs index 56046b7e..542df7c3 100644 --- a/crates/prism/src/node_types/sequencer.rs +++ b/crates/prism/src/node_types/sequencer.rs @@ -370,13 +370,6 @@ impl Sequencer { let new_chain_entry = HashchainEntry::new(operation.clone(), previous_hash); current_chain.push(new_chain_entry.operation.clone())?; - // let updated_node = Node::new_leaf( - // node.is_left_sibling(), - // hashed_id, - // new_chain_entry.hash, - // node.get_next(), - // ); - debug!("updating hashchain for user id {}", id.clone()); let proof = tree.update(KeyHash::with::(hashed_id), current_chain.clone())?; diff --git a/crates/prism/src/storage.rs b/crates/prism/src/storage.rs index 8ea15bbf..f18e6834 100644 --- a/crates/prism/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -1,4 +1,4 @@ -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context, Result}; use auto_impl::auto_impl; use jmt::{ storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, @@ -94,15 +94,9 @@ impl RedisConnection { impl TreeReader for RedisConnection { fn get_node_option(&self, node_key: &NodeKey) -> Result> { let mut con = self.lock_connection()?; - let serialized_key = hex::encode(borsh::to_vec(node_key).unwrap()); - let node_data: Option> = con.get(dbg!(format!("node:{}", serialized_key)))?; - match node_data { - None => Ok(None), - Some(data) => { - let node: Node = borsh::from_slice::(&data).unwrap(); - Ok(Some(node)) - } - } + let serialized_key = hex::encode(borsh::to_vec(node_key)?); + let node_data: Option> = con.get(format!("node:{}", serialized_key))?; + Ok(node_data.map(|data| borsh::from_slice(&data).unwrap())) } fn get_rightmost_leaf(&self) -> Result> { @@ -112,14 +106,14 @@ impl TreeReader for RedisConnection { for key in keys { let node_data: Vec = con.get(&key)?; - let node: Node = borsh::from_slice::(&node_data)?; - if let Node::Leaf(leaf) = node { - // let node_key = NodeKey::from_str(key.strip_prefix("node:").unwrap())?; - let node_key_bytes = hex::decode(key.strip_prefix("node:").unwrap()).unwrap(); - let node_key = borsh::from_slice::(node_key_bytes.as_ref()).unwrap(); - if rightmost.is_none() || leaf.key_hash() > rightmost.as_ref().unwrap().1.key_hash() + let node: Node = borsh::from_slice(&node_data)?; + if let Node::Leaf(leaf_node) = node { + let node_key_bytes = hex::decode(key.strip_prefix("node:").unwrap())?; + let node_key: NodeKey = borsh::from_slice(&node_key_bytes)?; + if rightmost.is_none() + || leaf_node.key_hash() > rightmost.as_ref().unwrap().1.key_hash() { - rightmost.replace((node_key, leaf)); + rightmost = Some((node_key, leaf_node)); } } } @@ -133,12 +127,19 @@ impl TreeReader for RedisConnection { key_hash: KeyHash, ) -> Result> { let mut con = self.lock_connection()?; - let versions: Vec<(Version, OwnedValue)> = con.zrangebyscore_withscores( - format!("value_history:{:?}", key_hash), - 0, - max_version as f64, - )?; - Ok(versions.last().map(|(_, value)| value.clone())) + let value_key = format!("value_history:{}", hex::encode(key_hash.0)); + let values: Vec<(String, f64)> = + con.zrevrangebyscore_withscores(&value_key, max_version as f64, 0f64)?; + + if let Some((encoded_value, _)) = values.first() { + if encoded_value.is_empty() { + Ok(None) + } else { + Ok(Some(hex::decode(encoded_value)?)) + } + } else { + Ok(None) + } } } @@ -146,27 +147,41 @@ impl TreeWriter for RedisConnection { fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()> { let mut con = self.lock_connection()?; let mut pipe = redis::pipe(); + for (node_key, node) in node_batch.nodes() { - let serialized_key = hex::encode(borsh::to_vec(node_key).unwrap()); + let serialized_key = hex::encode(borsh::to_vec(node_key)?); let node_data = borsh::to_vec(node)?; pipe.set(format!("node:{}", serialized_key), node_data); } + for ((version, key_hash), value) in node_batch.values() { - if let Some(v) = value { - pipe.zadd(format!("value_history:{:?}", key_hash), v, *version as f64); - } else { - pipe.zadd( - format!("value_history:{:?}", key_hash), - Vec::::new(), - *version as f64, - ); - } + let value_key = format!("value_history:{}", hex::encode(key_hash.0)); + let encoded_value = value.as_ref().map(hex::encode).unwrap_or_default(); + pipe.zadd(&value_key, encoded_value, *version as f64); } + pipe.execute(&mut con); Ok(()) } } +impl RedisConnection { + pub fn put_leaf(&self, node_key: NodeKey, leaf: LeafNode, value: Vec) -> Result<()> { + let mut con = self.lock_connection()?; + let serialized_key = hex::encode(borsh::to_vec(&node_key)?); + let node_data = borsh::to_vec(&Node::Leaf(leaf.clone()))?; + + con.set_nx::, ()>(format!("node:{}", serialized_key), node_data)?; + + // ensure!(result == Some(true), "Key {:?} already exists", node_key); + + let value_key = format!("value_history:{}", hex::encode(leaf.key_hash().0)); + con.zadd(value_key, hex::encode(&value), node_key.version() as f64)?; + + Ok(()) + } +} + impl Database for RedisConnection { fn get_hashchain(&self, key: &str) -> Result { let mut con = self.lock_connection()?; @@ -174,8 +189,13 @@ impl Database for RedisConnection { .get(format!("main:{}", key)) .map_err(|_| DatabaseError::NotFoundError(format!("hashchain key {}", key)))?; - serde_json::from_str(&value) - .map_err(|e| anyhow!(GeneralError::ParsingError(format!("hashchain: {}", e)))) + let res: Vec = serde_json::from_str(&value) + .map_err(|e| anyhow!(GeneralError::ParsingError(format!("hashchain: {}", e))))?; + + Ok(Hashchain { + id: key.to_string(), + entries: res, + }) } fn get_commitment(&self, epoch: &u64) -> Result { From 300b76d06009acac98ff7a71228d146bf9b82aa1 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 16:57:20 +0200 Subject: [PATCH 32/33] patches --- Cargo.lock | 1 + Cargo.toml | 12 +++++++----- crates/prism/src/node_types/sequencer.rs | 12 +++++------- crates/prism/src/storage.rs | 17 +++++++---------- 4 files changed, 20 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fc35cf06..3ceb3712 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2203,6 +2203,7 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jmt" version = "0.10.0" +source = "git+https://github.com/deltadevsde/jmt?branch=rehashing-circuit#1cb83bc02171dc1e9b8710f9efc7faaa9fe49f09" dependencies = [ "anyhow", "auto_impl", diff --git a/Cargo.toml b/Cargo.toml index 8b595d21..0eeb2a34 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -65,13 +65,11 @@ keystore-rs = "0.1.0" toml = "0.8.14" dirs = "5.0.1" anyhow = "1.0.44" -jmt = { path = "../jmt", features = [ - "mocks", -] } #{ version = "0.10.0", features = ["mocks"] } +jmt = { version = "0.10.0", features = ["mocks"] } bellpepper-core = { version = "0.4.0", default-features = false } bellpepper = "0.4.1" -arecibo = { git = "https://github.com/deltadevsde/arecibo" } -itertools = "0.13.0" # zip_eq +itertools = "0.13.0" # zip_eq +arecibo = "0.2.0" sha2 = "0.10.8" auto_impl = "1.2.0" bincode = "1.3.3" @@ -86,6 +84,10 @@ sp1-helper = "1.2.0" [patch.crates-io] sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" } +arecibo = { git = "https://github.com/deltadevsde/arecibo" } +jmt = { git = "https://github.com/deltadevsde/jmt", branch = "rehashing-circuit", features = [ + "mocks", +] } # [workspace.dev-dependencies] # serial_test = "3.1.1" diff --git a/crates/prism/src/node_types/sequencer.rs b/crates/prism/src/node_types/sequencer.rs index 542df7c3..ffb25185 100644 --- a/crates/prism/src/node_types/sequencer.rs +++ b/crates/prism/src/node_types/sequencer.rs @@ -374,7 +374,7 @@ impl Sequencer { let proof = tree.update(KeyHash::with::(hashed_id), current_chain.clone())?; self.db - .update_hashchain(operation, ¤t_chain) + .set_hashchain(operation, ¤t_chain) .context(format!( "Failed to update hashchain for operation {:?}", operation @@ -408,12 +408,10 @@ impl Sequencer { let mut chain = Hashchain::new(id.clone()); chain.create_account(value.into(), source.clone())?; - self.db - .update_hashchain(operation, &chain) - .context(format!( - "Failed to create hashchain for operation {:?}", - operation - ))?; + self.db.set_hashchain(operation, &chain).context(format!( + "Failed to create hashchain for operation {:?}", + operation + ))?; let mut tree = self.tree.lock().await; let hashed_id = hash(id.as_bytes()); diff --git a/crates/prism/src/storage.rs b/crates/prism/src/storage.rs index f18e6834..0eea0d0e 100644 --- a/crates/prism/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -1,4 +1,4 @@ -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, Result}; use auto_impl::auto_impl; use jmt::{ storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, @@ -32,11 +32,8 @@ pub struct RedisConnection { #[auto_impl(&, Box, Arc)] pub trait Database: Send + Sync + TreeReader + TreeWriter { fn get_hashchain(&self, key: &str) -> Result; - fn update_hashchain( - &self, - incoming_operation: &Operation, - value: &[HashchainEntry], - ) -> Result<()>; + fn set_hashchain(&self, incoming_operation: &Operation, value: &[HashchainEntry]) + -> Result<()>; fn get_commitment(&self, epoch: &u64) -> Result; fn set_commitment(&self, epoch: &u64, commitment: &Digest) -> Result<()>; @@ -220,7 +217,7 @@ impl Database for RedisConnection { .map_err(|_| anyhow!(DatabaseError::WriteError(format!("epoch: {}", epoch)))) } - fn update_hashchain( + fn set_hashchain( &self, incoming_operation: &Operation, value: &[HashchainEntry], @@ -313,7 +310,7 @@ mod tests { let chain_entry = create_mock_chain_entry(); redis_connections - .update_hashchain(&incoming_operation, &[chain_entry.clone()]) + .set_hashchain(&incoming_operation, &[chain_entry.clone()]) .unwrap(); let hashchain = redis_connections @@ -338,7 +335,7 @@ mod tests { let chain_entry = create_mock_chain_entry(); redis_connections - .update_hashchain(&incoming_operation, &[chain_entry.clone()]) + .set_hashchain(&incoming_operation, &[chain_entry.clone()]) .unwrap(); let hashchain = redis_connections.get_hashchain("main:missing_test_key"); @@ -406,7 +403,7 @@ mod tests { let chain_entries: Vec = vec![create_mock_chain_entry()]; - match redis_connections.update_hashchain(&incoming_operation, &chain_entries) { + match redis_connections.set_hashchain(&incoming_operation, &chain_entries) { Ok(_) => (), Err(e) => panic!("Failed to update hashchain: {}", e), } From 91a5e681856808540348117a6f534c440b2c1559 Mon Sep 17 00:00:00 2001 From: Ryan Quinn Ford Date: Wed, 11 Sep 2024 17:05:25 +0200 Subject: [PATCH 33/33] clippy --- crates/prism/src/storage.rs | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/crates/prism/src/storage.rs b/crates/prism/src/storage.rs index 0eea0d0e..c2d21566 100644 --- a/crates/prism/src/storage.rs +++ b/crates/prism/src/storage.rs @@ -162,23 +162,6 @@ impl TreeWriter for RedisConnection { } } -impl RedisConnection { - pub fn put_leaf(&self, node_key: NodeKey, leaf: LeafNode, value: Vec) -> Result<()> { - let mut con = self.lock_connection()?; - let serialized_key = hex::encode(borsh::to_vec(&node_key)?); - let node_data = borsh::to_vec(&Node::Leaf(leaf.clone()))?; - - con.set_nx::, ()>(format!("node:{}", serialized_key), node_data)?; - - // ensure!(result == Some(true), "Key {:?} already exists", node_key); - - let value_key = format!("value_history:{}", hex::encode(leaf.key_hash().0)); - con.zadd(value_key, hex::encode(&value), node_key.version() as f64)?; - - Ok(()) - } -} - impl Database for RedisConnection { fn get_hashchain(&self, key: &str) -> Result { let mut con = self.lock_connection()?;