Skip to content

Commit fbb8aa9

Browse files
refactor: redis as JMT backend (temporarily)
1 parent 7dc7200 commit fbb8aa9

File tree

7 files changed

+207
-325
lines changed

7 files changed

+207
-325
lines changed

src/common.rs

Lines changed: 23 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
use anyhow::{bail, Result};
22
use borsh::{BorshDeserialize, BorshSerialize};
3-
use indexed_merkle_tree::{sha256_mod, Hash};
3+
use indexed_merkle_tree::Hash;
44
use jmt::KeyHash;
55
use serde::{Deserialize, Serialize};
66
use std::fmt::Display;
77

8-
use crate::tree::Hasher;
8+
use crate::tree::{hash, Digest, Hasher};
99

1010
#[derive(Clone, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, PartialEq)]
1111
// An [`Operation`] represents a state transition in the system.
@@ -74,7 +74,20 @@ impl Hashchain {
7474
}
7575
}
7676

77-
pub fn push(&mut self, operation: Operation) -> Result<Hash> {
77+
pub fn create_account(&mut self, value: String, source: AccountSource) -> Result<Digest> {
78+
let operation = Operation::CreateAccount {
79+
id: self.id.clone(),
80+
value,
81+
source,
82+
};
83+
self.push(operation)
84+
}
85+
86+
pub fn get(&self, idx: usize) -> &HashchainEntry {
87+
&self.entries[idx]
88+
}
89+
90+
pub fn push(&mut self, operation: Operation) -> Result<Digest> {
7891
if let Operation::CreateAccount { .. } = operation {
7992
bail!("Cannot CreateAccount on an already existing hashchain");
8093
}
@@ -85,7 +98,7 @@ impl Hashchain {
8598
let previous_hash = self
8699
.entries
87100
.last()
88-
.map_or(Hash::new([0u8; 32]), |entry| entry.hash);
101+
.map_or(Digest::new([0u8; 32]), |entry| entry.hash);
89102

90103
let entry = HashchainEntry::new(operation, previous_hash);
91104
self.entries.push(entry.clone());
@@ -94,15 +107,15 @@ impl Hashchain {
94107
}
95108

96109
// TODO: Obviously, this needs to be authenticated by an existing key.
97-
pub fn add(&mut self, value: String) -> Result<Hash> {
110+
pub fn add(&mut self, value: String) -> Result<Digest> {
98111
let operation = Operation::Add {
99112
id: self.id.clone(),
100113
value,
101114
};
102115
self.push(operation)
103116
}
104117

105-
pub fn revoke(&mut self, value: String) -> Result<Hash> {
118+
pub fn revoke(&mut self, value: String) -> Result<Digest> {
106119
let operation = Operation::Revoke {
107120
id: self.id.clone(),
108121
value,
@@ -123,19 +136,19 @@ impl Hashchain {
123136
// A [`HashchainEntry`] represents a single entry in an account's hashchain.
124137
// The value in the leaf of the corresponding account's node in the IMT is the hash of the last node in the hashchain.
125138
pub struct HashchainEntry {
126-
pub hash: Hash,
127-
pub previous_hash: Hash,
139+
pub hash: Digest,
140+
pub previous_hash: Digest,
128141
pub operation: Operation,
129142
}
130143

131144
impl HashchainEntry {
132-
pub fn new(operation: Operation, previous_hash: Hash) -> Self {
145+
pub fn new(operation: Operation, previous_hash: Digest) -> Self {
133146
let hash = {
134147
let mut data = Vec::new();
135148
data.extend_from_slice(operation.to_string().as_bytes());
136149
data.extend_from_slice(previous_hash.as_ref());
137150
// TODO: replace with sha256 after JMT complete
138-
sha256_mod(&data)
151+
hash(&data)
139152
};
140153
Self {
141154
hash,

src/da/mod.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use crate::{
22
circuits::{Bls12Proof, VerifyingKey},
33
common::Operation,
44
error::GeneralError,
5+
tree::Digest,
56
utils::SignedContent,
67
};
78
use anyhow::Result;
@@ -18,10 +19,10 @@ pub mod memory;
1819
#[derive(BorshSerialize, BorshDeserialize, Clone, Debug)]
1920
pub struct FinalizedEpoch {
2021
pub height: u64,
21-
pub prev_commitment: Hash,
22-
pub current_commitment: Hash,
23-
pub proof: Bls12Proof,
24-
pub verifying_key: VerifyingKey,
22+
pub prev_commitment: Digest,
23+
pub current_commitment: Digest,
24+
// pub proof: Bls12Proof,
25+
// pub verifying_key: VerifyingKey,
2526
pub signature: Option<String>,
2627
}
2728

src/node_types/lightclient.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -80,13 +80,13 @@ impl LightClient {
8080
let prev_commitment = &epoch_json.prev_commitment;
8181
let current_commitment = &epoch_json.current_commitment;
8282

83-
let proof = match epoch_json.proof.clone().try_into() {
84-
Ok(proof) => proof,
85-
Err(e) => {
86-
error!("failed to deserialize proof, skipping a blob at height {}: {:?}", i, e);
87-
continue;
88-
}
89-
};
83+
// let proof = match epoch_json.proof.clone().try_into() {
84+
// Ok(proof) => proof,
85+
// Err(e) => {
86+
// error!("failed to deserialize proof, skipping a blob at height {}: {:?}", i, e);
87+
// continue;
88+
// }
89+
// };
9090

9191
// TODO(@distractedm1nd): i don't know rust yet but this seems like non-idiomatic rust -
9292
// is there not a Trait that can satisfy these properties for us?

src/node_types/sequencer.rs

Lines changed: 36 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,9 @@
1+
use crate::tree::{hash, Digest, KeyDirectoryTree, Proof, RedisKDTree};
12
use anyhow::{Context, Result};
23
use async_trait::async_trait;
34
use ed25519::Signature;
45
use ed25519_dalek::{Signer, SigningKey};
5-
use jmt::{storage::{TreeReader, TreeWriter}, KeyHash};
6-
// use indexed_merkle_tree::{
7-
// node::Node,
8-
// tree::{IndexedMerkleTree, Proof},
9-
// Hash,
10-
// };
11-
use crate::tree::{hash, Digest, KeyDirectoryTree, Proof};
6+
use jmt::KeyHash;
127
use std::{self, str::FromStr, sync::Arc};
138
use tokio::{
149
sync::{
@@ -24,7 +19,6 @@ use crate::error::DataAvailabilityError;
2419

2520
use crate::{
2621
cfg::Config,
27-
circuits::BatchMerkleProofCircuit,
2822
common::{AccountSource, Hashchain, HashchainEntry, Operation},
2923
consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL},
3024
da::{DataAvailabilityLayer, FinalizedEpoch},
@@ -34,10 +28,7 @@ use crate::{
3428
webserver::{OperationInput, WebServer},
3529
};
3630

37-
pub struct Sequencer<'a, S>
38-
where
39-
S: 'a + TreeReader + TreeWriter,
40-
{
31+
pub struct Sequencer<'a> {
4132
pub db: Arc<dyn Database>,
4233
pub da: Arc<dyn DataAvailabilityLayer>,
4334
pub ws: WebServer,
@@ -52,17 +43,14 @@ where
5243
// [`pending_operations`] is a buffer for operations that have not yet been
5344
// posted to the DA layer.
5445
pending_operations: Arc<Mutex<Vec<Operation>>>,
55-
tree: Arc<Mutex<KeyDirectoryTree<'a, S>>>,
46+
tree: Arc<Mutex<RedisKDTree<'a>>>,
5647

5748
epoch_buffer_tx: Arc<Sender<FinalizedEpoch>>,
5849
epoch_buffer_rx: Arc<Mutex<Receiver<FinalizedEpoch>>>,
5950
}
6051

6152
#[async_trait]
62-
impl<'a, S> NodeType for Sequencer<'a, S>
63-
where
64-
S: 'a + TreeReader + TreeWriter,
65-
{
53+
impl<'a> NodeType for Sequencer<'a> {
6654
async fn start(self: Arc<Self>) -> Result<()> {
6755
self.da.start().await.context("Failed to start DA layer")?;
6856

@@ -95,13 +83,15 @@ impl Sequencer {
9583

9684
let start_height = cfg.celestia_config.unwrap_or_default().start_height;
9785

86+
let tree = KeyDirectoryTree::new(&db.clone());
87+
9888
Ok(Sequencer {
9989
db,
10090
da,
10191
ws: WebServer::new(ws),
10292
key,
10393
start_height,
104-
tree: Arc::new(Mutex::new(IndexedMerkleTree::new_with_size(1024).unwrap())),
94+
tree,
10595
pending_operations: Arc::new(Mutex::new(Vec::new())),
10696
epoch_buffer_tx: Arc::new(tx),
10797
epoch_buffer_rx: Arc::new(Mutex::new(rx)),
@@ -309,22 +299,22 @@ impl Sequencer {
309299
.context("Failed to set new epoch")?;
310300
// add the commitment for the operations ran since the last epoch
311301
self.db
312-
.add_commitment(&epoch, &current_commitment)
302+
.set_commitment(&epoch, &current_commitment)
313303
.context("Failed to add commitment for new epoch")?;
314304

315-
let batch_circuit =
316-
BatchMerkleProofCircuit::new(&prev_commitment, &current_commitment, proofs)
317-
.context("Failed to create BatchMerkleProofCircuit")?;
318-
let (proof, verifying_key) = batch_circuit
319-
.create_and_verify_snark()
320-
.context("Failed to create and verify snark")?;
305+
// let batch_circuit =
306+
// BatchMerkleProofCircuit::new(&prev_commitment, &current_commitment, proofs)
307+
// .context("Failed to create BatchMerkleProofCircuit")?;
308+
// let (proof, verifying_key) = batch_circuit
309+
// .create_and_verify_snark()
310+
// .context("Failed to create and verify snark")?;
321311

322312
let epoch_json = FinalizedEpoch {
323313
height: epoch,
324314
prev_commitment,
325315
current_commitment,
326-
proof: proof.into(),
327-
verifying_key: verifying_key.into(),
316+
// proof: proof.into(),
317+
// verifying_key: verifying_key.into(),
328318
signature: None,
329319
};
330320

@@ -377,25 +367,23 @@ impl Sequencer {
377367
let new_chain_entry = HashchainEntry::new(operation.clone(), previous_hash);
378368
current_chain.push(new_chain_entry.clone());
379369

380-
let updated_node = Node::new_leaf(
381-
node.is_left_sibling(),
382-
hashed_id,
383-
new_chain_entry.hash,
384-
node.get_next(),
385-
);
370+
// let updated_node = Node::new_leaf(
371+
// node.is_left_sibling(),
372+
// hashed_id,
373+
// new_chain_entry.hash,
374+
// node.get_next(),
375+
// );
386376

387377
debug!("updating hashchain for user id {}", id.clone());
388-
self.tree.insert(KeyHash::with(hashed_id), )
378+
let proof = self.tree.update(KeyHash::with(hashed_id), current_chain)?;
389379
self.db
390380
.update_hashchain(operation, &current_chain)
391381
.context(format!(
392382
"Failed to update hashchain for operation {:?}",
393383
operation
394384
))?;
395385

396-
tree.update_node(index, updated_node)
397-
.map(Proof::Update)
398-
.context("Failed to update node in tree")
386+
proof
399387
}
400388
Operation::CreateAccount { id, value, source } => {
401389
// validation of account source
@@ -420,10 +408,11 @@ impl Sequencer {
420408
}
421409

422410
debug!("creating new hashchain for user id {}", id.clone());
423-
let new_chain = vec![HashchainEntry::new(operation.clone(), Node::HEAD)];
411+
let chain = Hashchain::new(id.clone());
412+
chain.create_account(value.into(), *source);
424413

425414
self.db
426-
.update_hashchain(operation, &new_chain)
415+
.update_hashchain(operation, &chain)
427416
.context(format!(
428417
"Failed to create hashchain for operation {:?}",
429418
operation
@@ -432,11 +421,7 @@ impl Sequencer {
432421
let mut tree = self.tree.lock().await;
433422
let hashed_id = hash(id.as_bytes());
434423

435-
let mut node =
436-
Node::new_leaf(true, hashed_id, new_chain.first().unwrap().hash, Node::TAIL);
437-
tree.insert_node(&mut node)
438-
.map(Proof::Insert)
439-
.context("Failed to insert node into tree")
424+
tree.insert(KeyHash::with(hashed_id), chain)
440425
}
441426
}
442427
}
@@ -479,7 +464,7 @@ mod tests {
479464
}
480465

481466
// Helper function to create a test Sequencer instance
482-
async fn create_test_sequencer() -> Arc<Sequencer> {
467+
async fn create_test_sequencer() -> Arc<Sequencer<'static>> {
483468
let (da_layer, _rx, _brx) = InMemoryDataAvailabilityLayer::new(1);
484469
let da_layer = Arc::new(da_layer);
485470
let db = Arc::new(setup_db());
@@ -578,7 +563,7 @@ mod tests {
578563
assert_ne!(prev_commitment, new_commitment);
579564

580565
let hashchain = sequencer.db.get_hashchain(id.as_str());
581-
let value = hashchain.unwrap().first().unwrap().operation.value();
566+
let value = hashchain.unwrap().get(0).operation.value();
582567
assert_eq!(value, "test");
583568

584569
teardown_db(&db);
@@ -696,7 +681,7 @@ mod tests {
696681

697682
let hashchain = sequencer.db.get_hashchain("user@example.com").unwrap();
698683
assert_eq!(hashchain.len(), 2);
699-
assert_eq!(hashchain[1].operation.value(), "new_value");
684+
assert_eq!(hashchain.get(1).operation.value(), "new_value");
700685
}
701686

702687
#[tokio::test]
@@ -724,7 +709,10 @@ mod tests {
724709

725710
let hashchain = sequencer.db.get_hashchain("user@example.com").unwrap();
726711
assert_eq!(hashchain.len(), 2);
727-
assert!(matches!(hashchain[1].operation, Operation::Revoke { .. }));
712+
assert!(matches!(
713+
hashchain.get(1).operation,
714+
Operation::Revoke { .. }
715+
));
728716
}
729717

730718
#[tokio::test]

0 commit comments

Comments
 (0)