Skip to content
Closed
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
612 changes: 594 additions & 18 deletions Cargo.lock

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,16 @@ pyroscope_pprofrs = "0.2.7"
toml = "0.8.14"
dirs = "5.0.1"
anyhow = "1.0.44"
jmt = { path = "../jmt", features = [
"mocks",
] } #{ version = "0.10.0", features = ["mocks"] }
bellpepper-core = { version = "0.4.0", default-features = false }
bellpepper = { version = "0.4.0", default-features = false }
arecibo = { path = "../arecibo" }
itertools = "0.13.0" # zip_eq
sha2 = "0.10.8"
proptest = "1.5.0"
auto_impl = "1.2.0"

[dev-dependencies]
serial_test = "3.1.1"
Expand Down
139 changes: 133 additions & 6 deletions src/common.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
use anyhow::{bail, Result};
use borsh::{BorshDeserialize, BorshSerialize};
use indexed_merkle_tree::{sha256_mod, Hash};
use indexed_merkle_tree::Hash;
use jmt::KeyHash;
use serde::{Deserialize, Serialize};
use std::fmt::Display;
use std::ops::{Deref, DerefMut};

use crate::tree::{hash, Digest, Hasher};

#[derive(Clone, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, PartialEq)]
// An [`Operation`] represents a state transition in the system.
Expand Down Expand Up @@ -56,22 +61,144 @@ impl Display for Operation {
}
}

#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
#[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)]
pub struct Hashchain {
id: String,
entries: Vec<HashchainEntry>,
}

impl IntoIterator for Hashchain {
type Item = HashchainEntry;
type IntoIter = std::vec::IntoIter<Self::Item>;

fn into_iter(self) -> Self::IntoIter {
self.entries.into_iter()
}
}

impl<'a> IntoIterator for &'a Hashchain {
type Item = &'a HashchainEntry;
type IntoIter = std::slice::Iter<'a, HashchainEntry>;

fn into_iter(self) -> Self::IntoIter {
self.entries.iter()
}
}

impl<'a> IntoIterator for &'a mut Hashchain {
type Item = &'a mut HashchainEntry;
type IntoIter = std::slice::IterMut<'a, HashchainEntry>;

fn into_iter(self) -> Self::IntoIter {
self.entries.iter_mut()
}
}

impl Deref for Hashchain {
type Target = Vec<HashchainEntry>;

fn deref(&self) -> &Self::Target {
&self.entries
}
}

impl DerefMut for Hashchain {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.entries
}
}

impl Hashchain {
pub fn new(id: String) -> Self {
Self {
id,
entries: Vec::new(),
}
}

pub fn iter(&self) -> std::slice::Iter<'_, HashchainEntry> {
self.entries.iter()
}

pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, HashchainEntry> {
self.entries.iter_mut()
}

pub fn create_account(&mut self, value: String, source: AccountSource) -> Result<Digest> {
let operation = Operation::CreateAccount {
id: self.id.clone(),
value,
source,
};
self.push(operation)
}

pub fn get(&self, idx: usize) -> &HashchainEntry {
&self.entries[idx]
}

pub fn push(&mut self, operation: Operation) -> Result<Digest> {
if let Operation::CreateAccount { .. } = operation {
bail!("Cannot CreateAccount on an already existing hashchain");
}
if operation.id() != self.id {
bail!("Operation ID does not match Hashchain ID");
}

let previous_hash = self
.entries
.last()
.map_or(Digest::new([0u8; 32]), |entry| entry.hash);

let entry = HashchainEntry::new(operation, previous_hash);
self.entries.push(entry.clone());

Ok(entry.hash)
}

// TODO: Obviously, this needs to be authenticated by an existing key.
pub fn add(&mut self, value: String) -> Result<Digest> {
let operation = Operation::Add {
id: self.id.clone(),
value,
};
self.push(operation)
}

pub fn revoke(&mut self, value: String) -> Result<Digest> {
let operation = Operation::Revoke {
id: self.id.clone(),
value,
};
self.push(operation)
}

pub fn get_keyhash(&self) -> KeyHash {
KeyHash::with::<Hasher>(self.id.clone())
}

pub fn len(&self) -> usize {
self.entries.len()
}
}

#[derive(Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Debug, PartialEq)]
// A [`HashchainEntry`] represents a single entry in an account's hashchain.
// The value in the leaf of the corresponding account's node in the IMT is the hash of the last node in the hashchain.
pub struct HashchainEntry {
pub hash: Hash,
pub previous_hash: Hash,
pub hash: Digest,
pub previous_hash: Digest,
pub operation: Operation,
}

impl HashchainEntry {
pub fn new(operation: Operation, previous_hash: Hash) -> Self {
pub fn new(operation: Operation, previous_hash: Digest) -> Self {
let hash = {
let mut data = Vec::new();
data.extend_from_slice(operation.to_string().as_bytes());
data.extend_from_slice(previous_hash.as_ref());
sha256_mod(&data)
// TODO: replace with sha256 after JMT complete
hash(&data)
};
Self {
hash,
Expand Down
9 changes: 5 additions & 4 deletions src/da/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use crate::{
circuits::{Bls12Proof, VerifyingKey},
common::Operation,
error::GeneralError,
tree::Digest,
utils::SignedContent,
};
use anyhow::Result;
Expand All @@ -18,10 +19,10 @@ pub mod memory;
#[derive(BorshSerialize, BorshDeserialize, Clone, Debug)]
pub struct FinalizedEpoch {
pub height: u64,
pub prev_commitment: Hash,
pub current_commitment: Hash,
pub proof: Bls12Proof,
pub verifying_key: VerifyingKey,
pub prev_commitment: Digest,
pub current_commitment: Digest,
// pub proof: Bls12Proof,
// pub verifying_key: VerifyingKey,
pub signature: Option<String>,
}

Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ pub mod consts;
pub mod da;
pub mod error;
pub mod node_types;
pub mod nova;
pub mod storage;
pub mod tree;
pub mod utils;
pub mod webserver;
#[macro_use]
Expand Down
2 changes: 2 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ pub mod consts;
pub mod da;
pub mod error;
mod node_types;
mod nova;
pub mod storage;
mod tree;
mod utils;
mod webserver;

Expand Down
36 changes: 20 additions & 16 deletions src/node_types/lightclient.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,23 +80,23 @@ impl LightClient {
let prev_commitment = &epoch_json.prev_commitment;
let current_commitment = &epoch_json.current_commitment;

let proof = match epoch_json.proof.clone().try_into() {
Ok(proof) => proof,
Err(e) => {
error!("failed to deserialize proof, skipping a blob at height {}: {:?}", i, e);
continue;
}
};
// let proof = match epoch_json.proof.clone().try_into() {
// Ok(proof) => proof,
// Err(e) => {
// error!("failed to deserialize proof, skipping a blob at height {}: {:?}", i, e);
// continue;
// }
// };

// TODO(@distractedm1nd): i don't know rust yet but this seems like non-idiomatic rust -
// is there not a Trait that can satisfy these properties for us?
let verifying_key = match epoch_json.verifying_key.clone().try_into() {
Ok(vk) => vk,
Err(e) => {
error!("failed to deserialize verifying key, skipping a blob at height {}: {:?}", i, e);
continue;
}
};
// let verifying_key = match epoch_json.verifying_key.clone().try_into() {
// Ok(vk) => vk,
// Err(e) => {
// error!("failed to deserialize verifying key, skipping a blob at height {}: {:?}", i, e);
// continue;
// }
// };

// if the user does not add a verifying key, we will not verify the signature,
// but only log a warning on startup
Expand All @@ -105,7 +105,10 @@ impl LightClient {
&epoch_json.clone(),
self.verifying_key.clone(),
) {
Ok(_) => trace!("valid signature for epoch {}", epoch_json.height),
Ok(_) => trace!(
"valid signature for epoch {}",
epoch_json.height
),
Err(e) => {
panic!("invalid signature in epoch {}: {:?}", i, e)
}
Expand Down Expand Up @@ -136,6 +139,7 @@ impl LightClient {
ticker.tick().await; // only for testing purposes
current_position = target; // Update the current position to the latest target
}
}).await
})
.await
}
}
Loading
Loading