From aad0d4448f4452bc90303c1764f3de56105c1222 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 13:05:34 -0600 Subject: [PATCH 001/208] Initial commit! --- plonky_block_proof_gen/src/lib.rs | 4 + plonky_block_proof_gen/src/proof_gen.rs | 186 +++++++++++++++++++++ plonky_block_proof_gen/src/proof_types.rs | 149 +++++++++++++++++ plonky_block_proof_gen/src/prover_state.rs | 55 ++++++ plonky_block_proof_gen/src/types.rs | 103 ++++++++++++ 5 files changed, 497 insertions(+) create mode 100644 plonky_block_proof_gen/src/lib.rs create mode 100644 plonky_block_proof_gen/src/proof_gen.rs create mode 100644 plonky_block_proof_gen/src/proof_types.rs create mode 100644 plonky_block_proof_gen/src/prover_state.rs create mode 100644 plonky_block_proof_gen/src/types.rs diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs new file mode 100644 index 000000000..849159860 --- /dev/null +++ b/plonky_block_proof_gen/src/lib.rs @@ -0,0 +1,4 @@ +pub mod proof_gen; +pub mod proof_types; +pub mod prover_state; +pub mod types; diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs new file mode 100644 index 000000000..09488a357 --- /dev/null +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -0,0 +1,186 @@ +use plonky2::util::timing::TimingTree; +use plonky2_evm::{all_stark::AllStark, config::StarkConfig, proof::PublicValues}; + +use crate::{ + proof_types::{ + AggregatableProof, BlockLevelData, GeneratedAggProof, GeneratedBlockProof, + GeneratedTxnProof, ProofBeforeAndAfterDeltas, ProofCommon, TxnProofGenIR, + }, + prover_state::ProverState, + types::PlonkyProofIntern, +}; + +type ProofGenResult = Result; + +pub struct ProofGenError(pub(crate) String); + +impl From for ProofGenError { + fn from(v: String) -> Self { + Self(v) + } +} + +pub fn generate_txn_proof( + p_state: &ProverState, + start_info: TxnProofGenIR, + b_data: BlockLevelData, +) -> ProofGenResult { + let b_height = start_info.b_height; + let txn_idx = start_info.txn_idx; + let deltas = start_info.deltas.clone(); + + let (txn_proof_intern, p_vals) = p_state + .state + .prove_root( + &AllStark::default(), + &StarkConfig::standard_fast_config(), + start_info.into_generation_inputs(b_data), + &mut TimingTree::default(), + ) + .map_err(|err| err.to_string())?; + + let common = ProofCommon { + b_height, + deltas, + roots_before: p_vals.trie_roots_before, + roots_after: p_vals.trie_roots_after, + }; + + Ok(GeneratedTxnProof { + txn_idx, + common, + intern: txn_proof_intern, + }) +} + +pub fn generate_agg_proof( + p_state: &ProverState, + lhs_child: &AggregatableProof, + rhs_child: &AggregatableProof, + b_data: BlockLevelData, +) -> ProofGenResult { + let expanded_agg_proofs = expand_aggregatable_proofs(lhs_child, rhs_child, b_data); + let deltas = expanded_agg_proofs.p_vals.extra_block_data.clone().into(); + + let (agg_proof_intern, p_vals) = p_state + .state + .prove_aggregation( + expanded_agg_proofs.lhs.is_agg, + expanded_agg_proofs.lhs.intern, + expanded_agg_proofs.rhs.is_agg, + expanded_agg_proofs.rhs.intern, + expanded_agg_proofs.p_vals, + ) + .map_err(|err| err.to_string())?; + + let common = ProofCommon { + b_height: lhs_child.b_height(), + deltas, + roots_before: p_vals.trie_roots_before, + roots_after: p_vals.trie_roots_after, + }; + + Ok(GeneratedAggProof { + common, + underlying_txns: lhs_child + .underlying_txns() + .combine(&rhs_child.underlying_txns()), + intern: agg_proof_intern, + }) +} + +struct ExpandedAggregatableProofs<'a> { + p_vals: PublicValues, + lhs: ExpandedAggregatableProof<'a>, + rhs: ExpandedAggregatableProof<'a>, +} + +struct ExpandedAggregatableProof<'a> { + intern: &'a PlonkyProofIntern, + is_agg: bool, +} + +fn expand_aggregatable_proofs<'a>( + lhs_child: &'a AggregatableProof, + rhs_child: &'a AggregatableProof, + b_data: BlockLevelData, +) -> ExpandedAggregatableProofs<'a> { + let (expanded_lhs, lhs_common) = expand_aggregatable_proof(lhs_child); + let (expanded_rhs, rhs_common) = expand_aggregatable_proof(rhs_child); + + let txn_idxs = lhs_child + .underlying_txns() + .combine(&rhs_child.underlying_txns()); + let deltas = merge_lhs_and_rhs_deltas(&lhs_common.deltas, &rhs_common.deltas); + let extra_block_data = + deltas.into_extra_block_data(txn_idxs.txn_idxs.start, txn_idxs.txn_idxs.end); + + let p_vals = PublicValues { + trie_roots_before: lhs_common.roots_before.clone(), + trie_roots_after: rhs_common.roots_after.clone(), + block_metadata: b_data.b_meta, + block_hashes: b_data.b_hashes, + extra_block_data, + }; + + ExpandedAggregatableProofs { + p_vals, + lhs: expanded_lhs, + rhs: expanded_rhs, + } +} + +fn merge_lhs_and_rhs_deltas( + lhs: &ProofBeforeAndAfterDeltas, + rhs: &ProofBeforeAndAfterDeltas, +) -> ProofBeforeAndAfterDeltas { + ProofBeforeAndAfterDeltas { + gas_used_before: lhs.gas_used_before, + gas_used_after: rhs.gas_used_after, + block_bloom_before: lhs.block_bloom_before, + block_bloom_after: rhs.block_bloom_after, + } +} + +fn expand_aggregatable_proof(p: &AggregatableProof) -> (ExpandedAggregatableProof, &ProofCommon) { + let (intern, is_agg, common) = match p { + AggregatableProof::Txn(txn_intern) => (&txn_intern.intern, false, &txn_intern.common), + AggregatableProof::Agg(agg_intern) => (&agg_intern.intern, true, &agg_intern.common), + }; + + let expanded = ExpandedAggregatableProof { intern, is_agg }; + + (expanded, common) +} + +pub fn generate_block_proof( + p_state: &ProverState, + prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, + curr_block_agg_proof: &GeneratedAggProof, + b_data: BlockLevelData, +) -> ProofGenResult { + let b_height = curr_block_agg_proof.common.b_height; + let parent_intern = prev_opt_parent_b_proof.map(|p| &p.intern); + + let p_vals = PublicValues { + trie_roots_before: curr_block_agg_proof.common.roots_before.clone(), + trie_roots_after: curr_block_agg_proof.common.roots_after.clone(), + block_metadata: b_data.b_meta, + block_hashes: b_data.b_hashes, + extra_block_data: curr_block_agg_proof + .common + .deltas + .clone() + .into_extra_block_data(0, curr_block_agg_proof.underlying_txns.txn_idxs.end), + }; + + let (b_proof_intern, _) = p_state + .state + .prove_block(parent_intern, &curr_block_agg_proof.intern, p_vals) + .map_err(|err| err.to_string())?; + + Ok(GeneratedBlockProof { + b_height, + intern: b_proof_intern, + }) +} diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs new file mode 100644 index 000000000..54246898f --- /dev/null +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -0,0 +1,149 @@ +use std::{borrow::Borrow, collections::HashMap}; + +use ethereum_types::{H256, U256}; +use plonky2_evm::{ + generation::{GenerationInputs, TrieInputs}, + proof::{BlockHashes, BlockMetadata, ExtraBlockData, TrieRoots}, +}; +use serde::{Deserialize, Serialize}; + +use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; + +/// Data that is specific to a block. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BlockLevelData { + pub b_meta: BlockMetadata, + pub b_hashes: BlockHashes, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct ProofCommon { + pub b_height: BlockHeight, + pub deltas: ProofBeforeAndAfterDeltas, + pub roots_before: TrieRoots, + pub roots_after: TrieRoots, +} + +/// State required to generate a transaction proof. Sent once per txn. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TxnProofGenIR { + pub signed_txn: Vec, + pub tries: TrieInputs, + pub trie_roots_after: TrieRoots, + pub deltas: ProofBeforeAndAfterDeltas, + + /// Mapping between smart contract code hashes and the contract byte code. + /// All account smart contracts that are invoked by this txn will have an + /// entry present. + pub contract_code: HashMap>, + + pub b_height: BlockHeight, + pub txn_idx: TxnIdx, +} + +impl TxnProofGenIR { + pub fn get_txn_idx(&self) -> TxnIdx { + self.txn_idx + } + + pub(crate) fn into_generation_inputs(self, b_data: BlockLevelData) -> GenerationInputs { + GenerationInputs { + txn_number_before: self.txn_idx.into(), + gas_used_before: self.deltas.gas_used_before, + block_bloom_before: self.deltas.block_bloom_before, + gas_used_after: self.deltas.gas_used_after, + block_bloom_after: self.deltas.block_bloom_after, + signed_txns: vec![self.signed_txn], + tries: self.tries, + trie_roots_after: self.trie_roots_after, + contract_code: self.contract_code, + block_metadata: b_data.b_meta, + block_hashes: b_data.b_hashes, + addresses: Vec::default(), // TODO! + } + } +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub struct ProofBeforeAndAfterDeltas { + pub gas_used_before: U256, + pub gas_used_after: U256, + pub block_bloom_before: [U256; 8], + pub block_bloom_after: [U256; 8], +} + +impl> From for ProofBeforeAndAfterDeltas { + fn from(v: T) -> Self { + let b = v.borrow(); + + Self { + gas_used_before: b.gas_used_before, + gas_used_after: b.gas_used_after, + block_bloom_before: b.block_bloom_before, + block_bloom_after: b.block_bloom_after, + } + } +} + +impl ProofBeforeAndAfterDeltas { + pub fn into_extra_block_data(self, txn_start: TxnIdx, txn_end: TxnIdx) -> ExtraBlockData { + ExtraBlockData { + txn_number_before: txn_start.into(), + txn_number_after: txn_end.into(), + gas_used_before: self.gas_used_before, + gas_used_after: self.gas_used_after, + block_bloom_before: self.block_bloom_before, + block_bloom_after: self.block_bloom_after, + } + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct GeneratedTxnProof { + pub txn_idx: TxnIdx, + pub common: ProofCommon, + pub intern: PlonkyProofIntern, +} + +impl GeneratedTxnProof { + pub fn underlying_txns(&self) -> ProofUnderlyingTxns { + (self.txn_idx..=self.txn_idx).into() + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct GeneratedAggProof { + pub underlying_txns: ProofUnderlyingTxns, + pub common: ProofCommon, + pub intern: PlonkyProofIntern, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct GeneratedBlockProof { + pub b_height: BlockHeight, + pub intern: PlonkyProofIntern, +} + +/// Sometimes we don't care about the underlying proof type and instead only if +/// we can combine it into an agg proof. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub enum AggregatableProof { + Txn(GeneratedTxnProof), + Agg(GeneratedAggProof), +} + +impl AggregatableProof { + pub fn underlying_txns(&self) -> ProofUnderlyingTxns { + match self { + AggregatableProof::Txn(info) => info.underlying_txns(), + AggregatableProof::Agg(info) => info.underlying_txns.clone(), + } + } + + pub fn b_height(&self) -> BlockHeight { + match self { + AggregatableProof::Txn(info) => info.common.b_height, + AggregatableProof::Agg(info) => info.common.b_height, + } + } +} diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs new file mode 100644 index 000000000..ce64828a5 --- /dev/null +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -0,0 +1,55 @@ +use std::ops::Range; + +use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; + +use crate::types::AllRecursiveCircuits; + +pub struct ProverState { + pub(crate) state: AllRecursiveCircuits, +} + +#[derive(Debug)] +pub struct ProverStateBuilder { + arithmetic_circuit_size: Range, + byte_packing_circuit_size: Range, + cpu_circuit_size: Range, + keccak_circuit_size: Range, + keccak_sponge_circuit_size: Range, + logic_circuit_size: Range, + memory_circuit_size: Range, +} + +impl Default for ProverStateBuilder { + fn default() -> Self { + Self { + arithmetic_circuit_size: 9..22, + byte_packing_circuit_size: 9..22, + cpu_circuit_size: 9..22, + keccak_circuit_size: 9..22, + keccak_sponge_circuit_size: 9..22, + logic_circuit_size: 9..22, + memory_circuit_size: 9..22, + } + } +} + +impl ProverStateBuilder { + pub fn build(self) -> ProverState { + // ... Yeah I don't understand the mysterious ranges either :) + let state = AllRecursiveCircuits::new( + &AllStark::default(), + &[ + self.arithmetic_circuit_size, + self.byte_packing_circuit_size, + self.cpu_circuit_size, + self.keccak_circuit_size, + self.keccak_sponge_circuit_size, + self.logic_circuit_size, + self.memory_circuit_size, + ], + &StarkConfig::standard_fast_config(), + ); + + ProverState { state } + } +} diff --git a/plonky_block_proof_gen/src/types.rs b/plonky_block_proof_gen/src/types.rs new file mode 100644 index 000000000..4ed24af1d --- /dev/null +++ b/plonky_block_proof_gen/src/types.rs @@ -0,0 +1,103 @@ +use std::{ + cmp::Ordering, + fmt::{self, Display, Formatter}, + ops::{Range, RangeInclusive}, +}; + +use plonky2::{ + field::goldilocks_field::GoldilocksField, + plonk::{config::PoseidonGoldilocksConfig, proof::ProofWithPublicInputs}, +}; +use serde::{Deserialize, Serialize}; + +pub type BlockHeight = u64; + +pub type TxnIdx = usize; + +pub type PlonkyProofIntern = ProofWithPublicInputs; + +pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits< + GoldilocksField, + PoseidonGoldilocksConfig, + 2, +>; + +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +pub struct ProofUnderlyingTxns { + pub txn_idxs: Range, +} + +impl Display for ProofUnderlyingTxns { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self.num_txns() { + 0 => write!(f, "EMPTY_TXN"), + _ => write!(f, "{}-{}", self.txn_idxs.start, self.txn_idxs.end - 1), + } + } +} + +impl ProofUnderlyingTxns { + pub fn combine(&self, other: &Self) -> ProofUnderlyingTxns { + let combined_range = (self.txn_idxs.start.min(other.txn_idxs.start)) + ..(self.txn_idxs.end.max(other.txn_idxs.end)); + + combined_range.into() + } + + pub fn num_txns(&self) -> usize { + self.txn_idxs.end - self.txn_idxs.start + } + + pub fn contains_all_txns_in_block(&self, num_txns_in_block: usize) -> bool { + self.num_txns() == num_txns_in_block + } +} + +impl From> for ProofUnderlyingTxns { + fn from(txn_idxs: Range) -> Self { + Self { txn_idxs } + } +} + +impl From> for ProofUnderlyingTxns { + fn from(txn_idxs: RangeInclusive) -> Self { + Self { + txn_idxs: Range { + start: *txn_idxs.start(), + end: *txn_idxs.end() + 1, + }, + } + } +} + +impl From for Range { + fn from(underlying_txns: ProofUnderlyingTxns) -> Self { + underlying_txns.txn_idxs + } +} + +impl Ord for ProofUnderlyingTxns { + /// Compare two txn ranges. + /// + /// Assumes that empty txns (eg. `1..1`) will never be compared. + fn cmp(&self, other: &Self) -> Ordering { + match self == other { + true => Ordering::Equal, + false => match (self.txn_idxs.end - 1).cmp(&other.txn_idxs.start) { + Ordering::Less => Ordering::Less, + Ordering::Greater => Ordering::Greater, + Ordering::Equal => match self.txn_idxs.start.cmp(&(other.txn_idxs.end - 1)) { + Ordering::Less => Ordering::Greater, + Ordering::Equal => Ordering::Equal, + Ordering::Greater => Ordering::Less, + }, + }, + } + } +} + +impl PartialOrd for ProofUnderlyingTxns { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} From 6cec1d4aaa5bf5d409b629f745a849ef4401da58 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 13:13:25 -0600 Subject: [PATCH 002/208] Added `README.md` --- plonky_block_proof_gen/README.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 plonky_block_proof_gen/README.md diff --git a/plonky_block_proof_gen/README.md b/plonky_block_proof_gen/README.md new file mode 100644 index 000000000..2316da068 --- /dev/null +++ b/plonky_block_proof_gen/README.md @@ -0,0 +1,19 @@ +# Plonky Edge block trace parser + +Library for generating proofs from proof IR. + +For the time being, the only library that produces proof IR is currently [plonky-edge-block-trace-parser](https://github.com/mir-protocol/plonky-edge-block-trace-parser). Down the road, the IR will be produced by decoding the proof gen protocol. + +## License + +Licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. From 2bc1a15a74dee8dccc8f2d6e8a414336074271ea Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 14:42:03 -0600 Subject: [PATCH 003/208] Added some ad-hoc docs in `README.md` --- plonky_block_proof_gen/README.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/plonky_block_proof_gen/README.md b/plonky_block_proof_gen/README.md index 2316da068..0facaf9c7 100644 --- a/plonky_block_proof_gen/README.md +++ b/plonky_block_proof_gen/README.md @@ -4,6 +4,31 @@ Library for generating proofs from proof IR. For the time being, the only library that produces proof IR is currently [plonky-edge-block-trace-parser](https://github.com/mir-protocol/plonky-edge-block-trace-parser). Down the road, the IR will be produced by decoding the proof gen protocol. +# General Usage (Extremely rough, will change) + +In [proof_gen.rs](https://github.com/mir-protocol/plonky-block-proof-gen/blob/main/src/proof_gen.rs), there are three core functions: + +- `generate_txn_proof` +- `generate_agg_proof` +- `generate_block_proof` + +Both libraries are currently targeting the latest [plonky2](https://github.com/mir-protocol/plonky2). One noteworthy piece of data that all proofs need is this: + +```rust +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BlockHashes { + pub prev_hashes: Vec, + pub cur_hash: H256, +} +``` +Note that `prev_hashes` is going to be `256` elements long (!) most of the time. + +`generate_txn_proof` takes in the output from the parser lib (`TxnProofGenIR`) along with some constant block data. + +`generate_agg_proof` takes in the two child proofs (wrapped in `AggregatableProof`` to support txn or agg proofs) & constant block data. + +`generate_block_proof` is a bit less obvious. You give it an agg proof that contains all txns in the entire block, but also pass in an optional previous block proof. The previous block proof is able to be `None` on checkpoint heights. + ## License Licensed under either of From ff75ce7fe97d5d110df5afadbae106cf0cd3b698 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 14:46:48 -0600 Subject: [PATCH 004/208] Added `From` impls for proof types --- plonky_block_proof_gen/src/proof_types.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 54246898f..79da17b23 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -147,3 +147,15 @@ impl AggregatableProof { } } } + +impl From for AggregatableProof { + fn from(v: GeneratedTxnProof) -> Self { + Self::Txn(v) + } +} + +impl From for AggregatableProof { + fn from(v: GeneratedAggProof) -> Self { + Self::Agg(v) + } +} From 12fed3559660b00d0ab37938d80a0993a055407a Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 15:02:52 -0600 Subject: [PATCH 005/208] Added missing methods to builder --- plonky_block_proof_gen/src/prover_state.rs | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index ce64828a5..569d234fe 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -1,5 +1,6 @@ use std::ops::Range; +use paste::paste; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; use crate::types::AllRecursiveCircuits; @@ -33,7 +34,26 @@ impl Default for ProverStateBuilder { } } +macro_rules! define_set_circuit_size_method { + ($name:ident) => { + paste! { + pub fn [](mut self, size: Range) -> Self { + self.[<$name _circuit_size>] = size; + self + } + } + }; +} + impl ProverStateBuilder { + define_set_circuit_size_method!(arithmetic); + define_set_circuit_size_method!(byte_packing); + define_set_circuit_size_method!(cpu); + define_set_circuit_size_method!(keccak); + define_set_circuit_size_method!(keccak_sponge); + define_set_circuit_size_method!(logic); + define_set_circuit_size_method!(memory); + pub fn build(self) -> ProverState { // ... Yeah I don't understand the mysterious ranges either :) let state = AllRecursiveCircuits::new( From 639ba3c521dbd3eff875ef8d49f84da36d94215d Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 15:27:30 -0600 Subject: [PATCH 006/208] Added two logs --- plonky_block_proof_gen/src/prover_state.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 569d234fe..7d4902ce9 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -54,7 +54,12 @@ impl ProverStateBuilder { define_set_circuit_size_method!(logic); define_set_circuit_size_method!(memory); + // TODO: Consider adding async version? + /// Instantiate the prover state from the builder. Note that this is a very + /// expensive call! pub fn build(self) -> ProverState { + info!("Initializing Plonky2 aggregation prover state (This may take a while)..."); + // ... Yeah I don't understand the mysterious ranges either :) let state = AllRecursiveCircuits::new( &AllStark::default(), @@ -70,6 +75,8 @@ impl ProverStateBuilder { &StarkConfig::standard_fast_config(), ); + info!("Finished initializing Plonky2 aggregation prover state!"); + ProverState { state } } } From 3948a6f7f3189777de08989eceb4a59c90029547 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 15 Sep 2023 15:28:58 -0600 Subject: [PATCH 007/208] Added some docs --- plonky_block_proof_gen/src/proof_gen.rs | 10 ++++++++++ plonky_block_proof_gen/src/proof_types.rs | 20 +++++++++++++++++--- plonky_block_proof_gen/src/prover_state.rs | 4 ++++ plonky_block_proof_gen/src/types.rs | 2 ++ 4 files changed, 33 insertions(+), 3 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 09488a357..33e5348db 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -12,6 +12,8 @@ use crate::{ type ProofGenResult = Result; +// Plonky2 is still using `anyhow` for proof gen, and since this is a library, +// it's probably best if we at least convert it to a `String`. pub struct ProofGenError(pub(crate) String); impl From for ProofGenError { @@ -20,6 +22,7 @@ impl From for ProofGenError { } } +/// Generate a txn proof from proof IR data. pub fn generate_txn_proof( p_state: &ProverState, start_info: TxnProofGenIR, @@ -53,6 +56,9 @@ pub fn generate_txn_proof( }) } +/// Generate a agg proof from two child proofs. +/// +/// Note that the child proofs may be either txn or agg proofs. pub fn generate_agg_proof( p_state: &ProverState, lhs_child: &AggregatableProof, @@ -153,6 +159,10 @@ fn expand_aggregatable_proof(p: &AggregatableProof) -> (ExpandedAggregatableProo (expanded, common) } +/// Generate a block proof. +/// +/// Note that `prev_opt_parent_b_proof` is able to be `None` on checkpoint +/// heights. pub fn generate_block_proof( p_state: &ProverState, prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 79da17b23..3fcd9b12e 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -9,7 +9,8 @@ use serde::{Deserialize, Serialize}; use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; -/// Data that is specific to a block. +/// Data that is specific to a block and is constant for all txns in a given +/// block. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct BlockLevelData { pub b_meta: BlockMetadata, @@ -24,12 +25,21 @@ pub struct ProofCommon { pub roots_after: TrieRoots, } -/// State required to generate a transaction proof. Sent once per txn. +/// An `IR` (Intermediate Representation) for a given txn in a block that we can +/// use to generate a proof for that txn. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct TxnProofGenIR { + /// Signed txn bytecode. pub signed_txn: Vec, + + /// The partial trie states at the start of the txn. pub tries: TrieInputs, + + /// The expected root hashes of all tries (except storage tries) after the + /// txn is executed. pub trie_roots_after: TrieRoots, + + /// Additional info of state that changed before and after the txn executed. pub deltas: ProofBeforeAndAfterDeltas, /// Mapping between smart contract code hashes and the contract byte code. @@ -37,7 +47,10 @@ pub struct TxnProofGenIR { /// entry present. pub contract_code: HashMap>, + /// The height of the block. pub b_height: BlockHeight, + + /// The index of the txn in the block. pub txn_idx: TxnIdx, } @@ -125,7 +138,8 @@ pub struct GeneratedBlockProof { } /// Sometimes we don't care about the underlying proof type and instead only if -/// we can combine it into an agg proof. +/// we can combine it into an agg proof. For these cases, we want to abstract +/// away whether or not the proof was a txn or agg proof. #[derive(Clone, Debug, Deserialize, Serialize)] pub enum AggregatableProof { Txn(GeneratedTxnProof), diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 7d4902ce9..1c4b5ce0a 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -1,14 +1,18 @@ use std::ops::Range; +use log::info; use paste::paste; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; use crate::types::AllRecursiveCircuits; +/// Plonky2 proving state. Note that is is generally going to be massive in +/// terms of memory and has a long spin-up time, pub struct ProverState { pub(crate) state: AllRecursiveCircuits, } +/// Builder for the prover state. #[derive(Debug)] pub struct ProverStateBuilder { arithmetic_circuit_size: Range, diff --git a/plonky_block_proof_gen/src/types.rs b/plonky_block_proof_gen/src/types.rs index 4ed24af1d..60ccc46fd 100644 --- a/plonky_block_proof_gen/src/types.rs +++ b/plonky_block_proof_gen/src/types.rs @@ -22,6 +22,8 @@ pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecurs 2, >; +/// Underlying txns idxs associated with a proof. +/// Note that the range for a single txn for index `n` is `n..n+1`. #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] pub struct ProofUnderlyingTxns { pub txn_idxs: Range, From 43da2b18c6c07b1a162a0debb101e5fb0201cada Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 18 Sep 2023 11:49:26 -0600 Subject: [PATCH 008/208] Few small fixes --- plonky_block_proof_gen/src/proof_gen.rs | 4 ++-- plonky_block_proof_gen/src/prover_state.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 33e5348db..6f0c68aa6 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -10,11 +10,11 @@ use crate::{ types::PlonkyProofIntern, }; -type ProofGenResult = Result; +pub type ProofGenResult = Result; // Plonky2 is still using `anyhow` for proof gen, and since this is a library, // it's probably best if we at least convert it to a `String`. -pub struct ProofGenError(pub(crate) String); +pub struct ProofGenError(pub String); impl From for ProofGenError { fn from(v: String) -> Self { diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 1c4b5ce0a..0c404447b 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -6,7 +6,7 @@ use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; use crate::types::AllRecursiveCircuits; -/// Plonky2 proving state. Note that is is generally going to be massive in +/// Plonky2 proving state. Note that this is generally going to be massive in /// terms of memory and has a long spin-up time, pub struct ProverState { pub(crate) state: AllRecursiveCircuits, From 09c129fb092bc902e530afc4dc00d2846cadc5ac Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 27 Sep 2023 15:19:48 -0600 Subject: [PATCH 009/208] Initial commit! --- .gitignore | 2 + Cargo.toml | 7 ++ LICENSE-APACHE | 201 +++++++++++++++++++++++++++++++++++++++++++++++++ LICENSE-MIT | 21 ++++++ README.md | 21 ++++++ rustfmt.toml | 3 + src/lib.rs | 0 7 files changed, 255 insertions(+) create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 rustfmt.toml create mode 100644 src/lib.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..4fffb2f89 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/Cargo.lock diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..d137582a8 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "proof_protocol_decoder" +authors = ["Polygon Zero "] +version = "0.1.0" +edition = "2021" + +[dependencies] diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 000000000..a0d98f55c --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright 2023 Polygon Zero + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 000000000..4aa5d4cb8 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2023 Polygon Zero + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 000000000..a66ff4d64 --- /dev/null +++ b/README.md @@ -0,0 +1,21 @@ +# Proof Protocol Decoder + +A flexible protocol that clients (eg. full nodes) can use to easily generate block proofs for different chains. + +## Specification + +In progress... + +## License + +Licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 000000000..bccbdb6fc --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,3 @@ +unstable_features = true +group_imports = "StdExternalCrate" +wrap_comments = true diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 000000000..e69de29bb From 31cedfa43865e2db94b969bb8050b56690e8f1ba Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 28 Sep 2023 14:14:59 -0600 Subject: [PATCH 010/208] Initial spec for the trace protocol --- Cargo.toml | 2 + src/lib.rs | 1 + src/trace_protocol.rs | 170 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 173 insertions(+) create mode 100644 src/trace_protocol.rs diff --git a/Cargo.toml b/Cargo.toml index d137582a8..de7df7f30 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,3 +5,5 @@ version = "0.1.0" edition = "2021" [dependencies] +ethereum-types = "0.14.1" +plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } diff --git a/src/lib.rs b/src/lib.rs index e69de29bb..faae7a183 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -0,0 +1 @@ +pub mod trace_protocol; diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs new file mode 100644 index 000000000..80d4ed5a4 --- /dev/null +++ b/src/trace_protocol.rs @@ -0,0 +1,170 @@ +//! The trace protocol for sending proof information to a prover scheduler. +//! +//! Because parsing performance has a very negligible impact on overall proof +//! generation latency & throughput, the overall priority of this protocol is +//! ease of implementation for clients. The flexibility comes from giving +//! multiple ways to the client to provide the data for the protocol, where the +//! implementors can pick whichever way is the most convenient for them. +//! +//! It might not be obvious why we need traces for each txn in order to generate +//! proofs. While it's true that we could just run all the txns of a block in an +//! EVM to generate the traces ourselves, there are a few major downsides: +//! - The client is likely a full node and already has to run the txns in an EVM +//! anyways. +//! - We want this protocol to be as agnostic as possible to the underlying +//! chain that we're generating proofs for, and running our own EVM would +//! likely cause us to loose this genericness. +//! +//! While it's also true that we run our own zk-EVM (plonky2) to generate +//! proofs, it's critical that we are able to generate txn proofs in parallel. +//! Since generating proofs with plonky2 is very slow, this would force us to +//! sequentialize the entire proof generation process. So in the end, it's ideal +//! if we can get this information sent to us instead. + +use std::collections::HashMap; + +use ethereum_types::{Address, H256, U256}; + +pub type BlockHeight = u64; +pub type Bloom = [U256; 8]; +pub type CodeHash = H256; +pub type HashedAccountAddress = H256; +pub type StorageAddr = H256; +pub type StorageVal = U256; + +/// Core payload needed to generate a proof for a block. Note that the scheduler +/// may need to request some additional data from the client along with this in +/// order to generate a proof. +#[derive(Debug)] +pub struct BlockTrace { + /// State trie pre-image. + pub state_trie: TriePreImage, + + /// Map of hashed account addr --> storage trie pre-image. + pub storage_tries: StorageTriesPreImage, + + /// All contract code used by txns in the block. + pub contract_code: BlockUsedContractCode, + + /// Traces and other info per txn. The index of the txn corresponds to the + /// slot in this vec. + pub txn_info: Vec, +} + +/// Minimal hashed out tries needed by all txns in the block. +#[derive(Debug)] +pub enum TriePreImage { + Uncompressed(TrieUncompressed), + Compact(TrieCompact), + Direct(TrieDirect), +} + +// TODO +/// Bulkier format that is quicker to process. +#[derive(Debug)] +pub struct TrieUncompressed {} + +// TODO +/// Compact representation of a trie (will likely be very close to https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md) +#[derive(Debug)] +pub struct TrieCompact {} + +// TODO +/// Trie format that is in exactly the same format of our internal trie format. +/// This is the fastest format for us to processes. +#[derive(Debug)] +pub struct TrieDirect {} + +#[derive(Debug)] +pub enum StorageTriesPreImage { + /// A single hash map that contains all node hashes from all storage tries + /// involved in the block. We can reconstruct the individual storage tries + /// by the storage root hash in the state entries. + SingleTrie(TriePreImage), + + /// Each storage trie is sent over in a hashmap with the hashed account + /// address as a key. + MultipleTries(HashMap), +} + +/// Contract code hit by txns in the block. +#[derive(Debug)] +pub enum BlockUsedContractCode { + /// Contains a map of the code hash to the actual contract code. + Full(HashMap>), + + /// Only contains the code hashes that were used. It's up to the prover + /// generation scheduler to get the code for each hash. This is the more + /// data efficient option. + Digests(Vec), +} + +/// Info specific to txns in the block. +#[derive(Debug)] +pub struct TxnInfo { + /// Trace data for the txn. This is used by the protocol to: + /// - Mutate it's own trie state between txns to arrive at the correct trie + /// state for the start of each txn. + /// - Create minimal partial tries needed for proof gen based on what state + /// the txn accesses. (eg. What trie nodes are accessed). + pub traces: HashMap, + + /// Data that is specific to the txn as a whole. + pub meta: TxnMeta, +} + +#[derive(Debug)] +pub struct TxnMeta { + /// Txn byte code. + pub byte_code: Vec, + + /// Rlped bytes of the new txn node inserted into the txn trie by this txn. + pub new_txn_trie_node_byte: Vec, + + /// Rlped bytes of the new receipt node inserted into the receipt trie by + /// this txn. + pub new_receipt_trie_node_byte: Vec, + + /// Gas used by this txn (Note: not cumulative gas used). + pub gas_used: u64, + + /// Bloom after txn execution. + pub bloom: Bloom, +} + +/// A "trace" specific to an account for a txn. +/// +/// Specifically, since we can not execute the txn before proof generation, we +/// rely on a separate EVM to run the txn and supply this data for us. +#[derive(Debug)] +pub struct TxnTrace { + /// If the balance changed, then the new balance will appear here. + pub balance: Option, + + /// If the nonce changed, then the new nonce will appear here. + pub nonce: Option, + + /// Account addresses that were only read by the txn. + /// + /// Note that if storage is written to, then it does not need to appear in + /// this list (but is also fine if it does). + pub storage_read: Option>, + + /// Account storage addresses that were mutated by the txn along with their + /// new value. + pub storage_written: Option>, + + /// Contract code that this address accessed. + pub code_usage: Option, +} + +/// Contract code access type. Used by txn traces. +#[derive(Debug)] +pub enum ContractCodeUsage { + /// Contract was read. + Read(CodeHash), + + /// Contract was created (and these are the bytes). Note that this new + /// contract code will not appear in the [`BlockTrace`] map. + Write(Vec), +} From 255f6c9acf8ee88b84625f4eec7b1f4e734b24ca Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 28 Sep 2023 15:55:43 -0600 Subject: [PATCH 011/208] Added more types to wrap trace - Needed for actual proof gen. --- Cargo.toml | 1 + src/lib.rs | 2 ++ src/proof_gen_types.rs | 59 ++++++++++++++++++++++++++++++++++++++++++ src/trace_protocol.rs | 11 +++----- src/types.rs | 9 +++++++ 5 files changed, 74 insertions(+), 8 deletions(-) create mode 100644 src/proof_gen_types.rs create mode 100644 src/types.rs diff --git a/Cargo.toml b/Cargo.toml index de7df7f30..33dd6a740 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,3 +7,4 @@ edition = "2021" [dependencies] ethereum-types = "0.14.1" plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } +serde = "1.0.166" diff --git a/src/lib.rs b/src/lib.rs index faae7a183..cbe5eb3c0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1 +1,3 @@ +pub mod proof_gen_types; pub mod trace_protocol; +pub mod types; diff --git a/src/proof_gen_types.rs b/src/proof_gen_types.rs new file mode 100644 index 000000000..95af8ad69 --- /dev/null +++ b/src/proof_gen_types.rs @@ -0,0 +1,59 @@ +use std::{borrow::Borrow, iter::empty}; + +use ethereum_types::U256; +use plonky2_evm::{ + generation::GenerationInputs, + proof::{BlockHashes, BlockMetadata, ExtraBlockData}, +}; +use serde::{Deserialize, Serialize}; + +use crate::{trace_protocol::BlockTrace, types::TxnIdx}; + +#[derive(Debug)] +pub struct BlockLevelData { + pub b_meta: BlockMetadata, + pub b_hashes: BlockHashes, +} + +impl BlockTrace { + pub fn into_txn_proof_gen_inputs( + self, + _b_data: BlockLevelData, + ) -> impl Iterator { + empty() // TODO + } +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub struct ProofBeforeAndAfterDeltas { + pub gas_used_before: U256, + pub gas_used_after: U256, + pub block_bloom_before: [U256; 8], + pub block_bloom_after: [U256; 8], +} + +impl> From for ProofBeforeAndAfterDeltas { + fn from(v: T) -> Self { + let b = v.borrow(); + + Self { + gas_used_before: b.gas_used_before, + gas_used_after: b.gas_used_after, + block_bloom_before: b.block_bloom_before, + block_bloom_after: b.block_bloom_after, + } + } +} + +impl ProofBeforeAndAfterDeltas { + pub fn into_extra_block_data(self, txn_start: TxnIdx, txn_end: TxnIdx) -> ExtraBlockData { + ExtraBlockData { + txn_number_before: txn_start.into(), + txn_number_after: txn_end.into(), + gas_used_before: self.gas_used_before, + gas_used_after: self.gas_used_after, + block_bloom_before: self.block_bloom_before, + block_bloom_after: self.block_bloom_after, + } + } +} diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 80d4ed5a4..99bc66e21 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -23,14 +23,9 @@ use std::collections::HashMap; -use ethereum_types::{Address, H256, U256}; - -pub type BlockHeight = u64; -pub type Bloom = [U256; 8]; -pub type CodeHash = H256; -pub type HashedAccountAddress = H256; -pub type StorageAddr = H256; -pub type StorageVal = U256; +use ethereum_types::{Address, U256}; + +use crate::types::{Bloom, CodeHash, HashedAccountAddress, StorageAddr, StorageVal}; /// Core payload needed to generate a proof for a block. Note that the scheduler /// may need to request some additional data from the client along with this in diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 000000000..4447e2503 --- /dev/null +++ b/src/types.rs @@ -0,0 +1,9 @@ +use ethereum_types::{H256, U256}; + +pub type BlockHeight = u64; +pub type Bloom = [U256; 8]; +pub type CodeHash = H256; +pub type HashedAccountAddress = H256; +pub type StorageAddr = H256; +pub type StorageVal = U256; +pub type TxnIdx = usize; From 1e751e6ec951286ec3b1eca3d13e672098327bf7 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 29 Sep 2023 10:31:13 -0600 Subject: [PATCH 012/208] Added `ProcessedBlockTrace` - Intent is to process the incoming protocol payload into a single non-variant type that is easy to process into txn proof payloads. --- Cargo.toml | 1 + src/lib.rs | 1 + src/processed_block_trace.rs | 102 +++++++++++++++++++++++++++++++++++ src/proof_gen_types.rs | 18 ++----- src/trace_protocol.rs | 3 +- 5 files changed, 109 insertions(+), 16 deletions(-) create mode 100644 src/processed_block_trace.rs diff --git a/Cargo.toml b/Cargo.toml index 33dd6a740..4569bfde2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ version = "0.1.0" edition = "2021" [dependencies] +eth_trie_utils = "0.6.0" ethereum-types = "0.14.1" plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } serde = "1.0.166" diff --git a/src/lib.rs b/src/lib.rs index cbe5eb3c0..ec580b6e3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,4 @@ +pub mod processed_block_trace; pub mod proof_gen_types; pub mod trace_protocol; pub mod types; diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs new file mode 100644 index 000000000..961821c30 --- /dev/null +++ b/src/processed_block_trace.rs @@ -0,0 +1,102 @@ +use std::collections::HashMap; + +use eth_trie_utils::partial_trie::HashedPartialTrie; + +use crate::trace_protocol::{ + BlockTrace, BlockUsedContractCode, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, +}; +use crate::types::{CodeHash, HashedAccountAddress}; + +pub(crate) struct ProcessedBlockTrace { + state_trie: HashedPartialTrie, + storage_tries: HashMap, + contract_code: HashMap>, + txn_info: Vec, +} + +impl BlockTrace { + fn into_processed_block_trace(self, p_meta: &ProcessingMeta) -> ProcessedBlockTrace + where + F: Fn(&CodeHash) -> Vec, + { + ProcessedBlockTrace { + state_trie: process_state_trie(self.state_trie), + storage_tries: process_storage_tries(self.storage_tries), + contract_code: process_block_used_contract_code( + self.contract_code, + &p_meta.resolve_code_hash_fn, + ), + txn_info: self.txn_info, + } + } +} + +fn process_state_trie(trie: TriePreImage) -> HashedPartialTrie { + match trie { + TriePreImage::Uncompressed(_) => todo!(), + TriePreImage::Compact(t) => process_compact_trie(t), + TriePreImage::Direct(t) => t.0, + } +} + +fn process_storage_tries( + trie: StorageTriesPreImage, +) -> HashMap { + match trie { + StorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), + StorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), + } +} + +fn process_single_storage_trie( + _trie: TriePreImage, +) -> HashMap { + todo!() +} + +fn process_multiple_storage_tries( + _tries: HashMap, +) -> HashMap { + todo!() +} + +fn process_compact_trie(_trie: TrieCompact) -> HashedPartialTrie { + todo!() +} + +fn process_block_used_contract_code( + code: BlockUsedContractCode, + resolve_code_hash_fn: &F, +) -> HashMap> +where + F: Fn(&CodeHash) -> Vec, +{ + match code { + BlockUsedContractCode::Full(c) => c, + BlockUsedContractCode::Digests(d) => { + let code_hash_to_code_iter = d + .into_iter() + .map(|c_hash| (c_hash, resolve_code_hash_fn(&c_hash))); + HashMap::from_iter(code_hash_to_code_iter) + } + } +} + +#[derive(Debug)] +pub struct ProcessingMeta +where + F: Fn(&CodeHash) -> Vec, +{ + resolve_code_hash_fn: F, +} + +impl ProcessingMeta +where + F: Fn(&CodeHash) -> Vec, +{ + pub fn new(resolve_code_hash_fn: F) -> Self { + Self { + resolve_code_hash_fn, + } + } +} diff --git a/src/proof_gen_types.rs b/src/proof_gen_types.rs index 95af8ad69..935c5c6a2 100644 --- a/src/proof_gen_types.rs +++ b/src/proof_gen_types.rs @@ -1,13 +1,10 @@ -use std::{borrow::Borrow, iter::empty}; +use std::borrow::Borrow; use ethereum_types::U256; -use plonky2_evm::{ - generation::GenerationInputs, - proof::{BlockHashes, BlockMetadata, ExtraBlockData}, -}; +use plonky2_evm::proof::{BlockHashes, BlockMetadata, ExtraBlockData}; use serde::{Deserialize, Serialize}; -use crate::{trace_protocol::BlockTrace, types::TxnIdx}; +use crate::types::TxnIdx; #[derive(Debug)] pub struct BlockLevelData { @@ -15,15 +12,6 @@ pub struct BlockLevelData { pub b_hashes: BlockHashes, } -impl BlockTrace { - pub fn into_txn_proof_gen_inputs( - self, - _b_data: BlockLevelData, - ) -> impl Iterator { - empty() // TODO - } -} - #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ProofBeforeAndAfterDeltas { pub gas_used_before: U256, diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 99bc66e21..3198a66bd 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -23,6 +23,7 @@ use std::collections::HashMap; +use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; use crate::types::{Bloom, CodeHash, HashedAccountAddress, StorageAddr, StorageVal}; @@ -68,7 +69,7 @@ pub struct TrieCompact {} /// Trie format that is in exactly the same format of our internal trie format. /// This is the fastest format for us to processes. #[derive(Debug)] -pub struct TrieDirect {} +pub struct TrieDirect(pub HashedPartialTrie); #[derive(Debug)] pub enum StorageTriesPreImage { From 2f5d364adba9fc0307e57212212e293a47efdf9f Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 2 Oct 2023 12:20:55 -0600 Subject: [PATCH 013/208] More work on processed traces --- Cargo.toml | 1 + src/decoding.rs | 36 +++++++++++ src/lib.rs | 2 + src/processed_block_trace.rs | 113 +++++++++++++++++++++++++++++++---- src/trace_protocol.rs | 4 +- src/types.rs | 6 +- src/utils.rs | 6 ++ 7 files changed, 153 insertions(+), 15 deletions(-) create mode 100644 src/decoding.rs create mode 100644 src/utils.rs diff --git a/Cargo.toml b/Cargo.toml index 4569bfde2..bba8c7f71 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,5 +7,6 @@ edition = "2021" [dependencies] eth_trie_utils = "0.6.0" ethereum-types = "0.14.1" +keccak-hash = "0.10.0" plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } serde = "1.0.166" diff --git a/src/decoding.rs b/src/decoding.rs new file mode 100644 index 000000000..f6dfb802c --- /dev/null +++ b/src/decoding.rs @@ -0,0 +1,36 @@ +use std::collections::HashMap; + +use eth_trie_utils::partial_trie::HashedPartialTrie; +use plonky2_evm::generation::GenerationInputs; + +use crate::{ + processed_block_trace::{BlockMetaState, ProcessedBlockTrace}, + proof_gen_types::BlockLevelData, + types::{Bloom, HashedAccountAddr}, +}; + +/// The current state of all tries as we process txn deltas. These are mutated +/// after every txn we process in the trace. +#[derive(Debug, Default)] +struct PartialTrieState { + state: HashedPartialTrie, + storage: HashMap, + txn: HashedPartialTrie, + receipt: HashedPartialTrie, +} + +impl ProcessedBlockTrace { + fn into_generation_inputs(self, b_data: BlockLevelData) -> Vec { + let mut trie_state = PartialTrieState::default(); + let mut b_meta_state = BlockMetaState::default(); + + let txn_gen_inputs = self + .txn_info + .into_iter() + .enumerate() + .map(|(txn_idx, trace)| todo!()) + .collect(); + + txn_gen_inputs + } +} diff --git a/src/lib.rs b/src/lib.rs index ec580b6e3..137a8267d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,6 @@ +pub mod decoding; pub mod processed_block_trace; pub mod proof_gen_types; pub mod trace_protocol; pub mod types; +pub mod utils; diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 961821c30..d8f925410 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -1,17 +1,25 @@ -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; +use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; +use ethereum_types::U256; use crate::trace_protocol::{ - BlockTrace, BlockUsedContractCode, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, + BlockTrace, BlockUsedContractCode, ContractCodeUsage, StorageTriesPreImage, TrieCompact, + TriePreImage, TxnInfo, }; -use crate::types::{CodeHash, HashedAccountAddress}; +use crate::types::{ + Bloom, CodeHash, HashedAccountAddr, HashedNodeAddr, HashedStorageAddr, + HashedStorageAddrNibbles, StorageAddr, StorageVal, +}; +use crate::utils::hash; pub(crate) struct ProcessedBlockTrace { - state_trie: HashedPartialTrie, - storage_tries: HashMap, - contract_code: HashMap>, - txn_info: Vec, + pub(crate) state_trie: HashedPartialTrie, + pub(crate) storage_tries: HashMap, + pub(crate) contract_code: HashMap>, + pub(crate) txn_info: Vec, } impl BlockTrace { @@ -26,7 +34,7 @@ impl BlockTrace { self.contract_code, &p_meta.resolve_code_hash_fn, ), - txn_info: self.txn_info, + txn_info: self.txn_info.into_iter().map(|t| t.into()).collect(), } } } @@ -41,7 +49,7 @@ fn process_state_trie(trie: TriePreImage) -> HashedPartialTrie { fn process_storage_tries( trie: StorageTriesPreImage, -) -> HashMap { +) -> HashMap { match trie { StorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), StorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), @@ -50,13 +58,13 @@ fn process_storage_tries( fn process_single_storage_trie( _trie: TriePreImage, -) -> HashMap { +) -> HashMap { todo!() } fn process_multiple_storage_tries( - _tries: HashMap, -) -> HashMap { + _tries: HashMap, +) -> HashMap { todo!() } @@ -100,3 +108,84 @@ where } } } + +#[derive(Debug)] +pub(crate) struct ProcessedTxnInfo { + pub(crate) contract_code: Vec, + pub(crate) nodes_used_by_txn: NodesUsedByTxn, + pub(crate) contract_code_created: Vec<(CodeHash, Vec)>, + pub(crate) new_meta_state: BlockMetaState, +} + +impl From for ProcessedTxnInfo { + fn from(v: TxnInfo) -> Self { + let mut nodes_used_by_txn = NodesUsedByTxn::default(); + let mut contract_code_created = Vec::new(); + // let mut state_trie_writes = Vec::with_capacity(v.traces.len()); // Good + // assumption? + + for (addr, trace) in v.traces { + let hashed_addr = hash(addr.as_bytes()); + + let s_writes = trace.storage_written.unwrap_or_default(); + + let s_read_keys = trace.storage_read.into_iter().flat_map(|reads| { + reads + .into_iter() + .map(|addr| storage_addr_to_nibbles_even_nibble_fixed_hashed(&addr)) + }); + + let s_write_keys = s_writes + .keys() + .map(|k| storage_addr_to_nibbles_even_nibble_fixed_hashed(k)); + let s_access_keys = s_read_keys.chain(s_write_keys); + + nodes_used_by_txn + .storage_accesses + .push((hashed_addr, s_access_keys.collect())); + // nodes_used_by_txn.storage_writes.push((hashed_addr, s_writes)); + } + + // TODO + + Self { + contract_code: todo!(), + nodes_used_by_txn, + contract_code_created, + new_meta_state: todo!(), + } + } +} + +#[derive(Debug, Default)] +struct NodesUsedByTxn { + state_accesses: Vec, + state_writes: Vec, + storage_accesses: Vec<(HashedAccountAddr, Vec)>, + storage_writes: Vec<( + HashedAccountAddr, + Vec<(HashedStorageAddrNibbles, StorageVal)>, + )>, +} + +#[derive(Debug)] +struct StateTrieWrites { + balance: Option, + nonce: Option, +} + +#[derive(Debug)] +enum TraceStorageAccess { + Read(StorageAddr), + Write(StorageAddr, StorageVal), +} + +#[derive(Debug, Default)] +pub(crate) struct BlockMetaState { + pub(crate) gas_used: u64, + pub(crate) block_bloom: Bloom, +} + +fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbles { + todo!() +} diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 3198a66bd..10e8932d4 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -26,7 +26,7 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; -use crate::types::{Bloom, CodeHash, HashedAccountAddress, StorageAddr, StorageVal}; +use crate::types::{Bloom, CodeHash, HashedAccountAddr, StorageAddr, StorageVal}; /// Core payload needed to generate a proof for a block. Note that the scheduler /// may need to request some additional data from the client along with this in @@ -80,7 +80,7 @@ pub enum StorageTriesPreImage { /// Each storage trie is sent over in a hashmap with the hashed account /// address as a key. - MultipleTries(HashMap), + MultipleTries(HashMap), } /// Contract code hit by txns in the block. diff --git a/src/types.rs b/src/types.rs index 4447e2503..7ab6267f5 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,9 +1,13 @@ +use eth_trie_utils::nibbles::Nibbles; use ethereum_types::{H256, U256}; pub type BlockHeight = u64; pub type Bloom = [U256; 8]; pub type CodeHash = H256; -pub type HashedAccountAddress = H256; +pub type HashedAccountAddr = H256; +pub type HashedNodeAddr = H256; +pub type HashedStorageAddr = H256; +pub type HashedStorageAddrNibbles = Nibbles; pub type StorageAddr = H256; pub type StorageVal = U256; pub type TxnIdx = usize; diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 000000000..8ad74323d --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,6 @@ +use ethereum_types::H256; +use keccak_hash::keccak; + +pub(crate) fn hash(bytes: &[u8]) -> H256 { + H256::from(keccak(bytes).0) +} From 35b6bfb35d2060ee993fbbd2ae301a323bfc0235 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 2 Oct 2023 12:29:25 -0600 Subject: [PATCH 014/208] Rough structure for processing block traces --- Cargo.toml | 1 + src/decoding.rs | 50 ++++++++++++++++++++++++++++++------ src/processed_block_trace.rs | 2 +- 3 files changed, 44 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index bba8c7f71..b38efef20 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,4 +9,5 @@ eth_trie_utils = "0.6.0" ethereum-types = "0.14.1" keccak-hash = "0.10.0" plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } +thiserror = "1.0.49" serde = "1.0.166" diff --git a/src/decoding.rs b/src/decoding.rs index f6dfb802c..ecb5ee3ea 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,14 +1,37 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; -use plonky2_evm::generation::GenerationInputs; +use plonky2_evm::generation::{GenerationInputs, TrieInputs}; +use thiserror::Error; use crate::{ - processed_block_trace::{BlockMetaState, ProcessedBlockTrace}, + processed_block_trace::{ + BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, + }, proof_gen_types::BlockLevelData, types::{Bloom, HashedAccountAddr}, }; +pub type TraceParsingResult = Result; + +#[derive(Debug, Error)] +pub enum TraceParsingError { + #[error("Failed to decode RLP bytes ({0}) as an Ethereum account due to the error: {1}")] + AccountDecode(String, String), + + #[error("Missing account storage trie in base trie when constructing subset partial trie for txn (account: {0})")] + MissingAccountStorageTrie(HashedAccountAddr), + + // TODO: Make this error nicer... + #[error( + "Non-existent account addr given when creating a sub partial trie from the base state trie" + )] + NonExistentAcctAddrsCreatingSubPartialTrie, + + #[error("Creating a subset partial trie for account storage for account {0}, mem addrs accessed: {1:?}")] + NonExistentStorageAddrsCreatingStorageSubPartialTrie(HashedAccountAddr, Vec, String), +} + /// The current state of all tries as we process txn deltas. These are mutated /// after every txn we process in the trace. #[derive(Debug, Default)] @@ -23,14 +46,25 @@ impl ProcessedBlockTrace { fn into_generation_inputs(self, b_data: BlockLevelData) -> Vec { let mut trie_state = PartialTrieState::default(); let mut b_meta_state = BlockMetaState::default(); + let mut txn_gen_inputs = Vec::with_capacity(self.txn_info.len()); - let txn_gen_inputs = self - .txn_info - .into_iter() - .enumerate() - .map(|(txn_idx, trace)| todo!()) - .collect(); + for (txn_idx, txn_info) in self.txn_info.into_iter().enumerate() {} txn_gen_inputs } + + fn create_minimal_partial_tries_needed_by_txn( + curr_block_tries: &PartialTrieState, + nodes_used_by_txn: NodesUsedByTxn, + ) -> TraceParsingResult { + todo!() + } + + fn apply_deltas_to_trie_state( + trie_state: &mut PartialTrieState, + deltas: Vec, + addrs_to_code: &mut HashMap>, + ) -> TraceParsingResult<()> { + todo!() + } } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index d8f925410..9cb577ebb 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -158,7 +158,7 @@ impl From for ProcessedTxnInfo { } #[derive(Debug, Default)] -struct NodesUsedByTxn { +pub(crate) struct NodesUsedByTxn { state_accesses: Vec, state_writes: Vec, storage_accesses: Vec<(HashedAccountAddr, Vec)>, From 81c9066272a8cf0ba10365f2bf1f4da75a072e48 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 2 Oct 2023 13:08:49 -0600 Subject: [PATCH 015/208] Ported logic to apply deltas to tries --- Cargo.toml | 3 ++ src/decoding.rs | 78 ++++++++++++++++++++++++++++++------ src/processed_block_trace.rs | 19 +++++---- src/utils.rs | 6 +++ 4 files changed, 85 insertions(+), 21 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b38efef20..c901bbec5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,10 @@ edition = "2021" [dependencies] eth_trie_utils = "0.6.0" ethereum-types = "0.14.1" +hex = "0.4.3" keccak-hash = "0.10.0" plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } thiserror = "1.0.49" +rlp = "0.5.2" +rlp-derive = "0.1.0" serde = "1.0.166" diff --git a/src/decoding.rs b/src/decoding.rs index ecb5ee3ea..158b439a6 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,15 +1,24 @@ -use std::collections::HashMap; +use std::{ + collections::HashMap, + fmt::{self, Display, Formatter}, +}; -use eth_trie_utils::partial_trie::HashedPartialTrie; -use plonky2_evm::generation::{GenerationInputs, TrieInputs}; +use eth_trie_utils::{ + nibbles::Nibbles, + partial_trie::{HashedPartialTrie, PartialTrie}, +}; +use ethereum_types::H256; +use plonky2_evm::generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}; +use rlp::decode; use thiserror::Error; use crate::{ processed_block_trace::{ - BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, + BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, }, proof_gen_types::BlockLevelData, types::{Bloom, HashedAccountAddr}, + utils::{hash, update_val_if_some}, }; pub type TraceParsingResult = Result; @@ -22,14 +31,27 @@ pub enum TraceParsingError { #[error("Missing account storage trie in base trie when constructing subset partial trie for txn (account: {0})")] MissingAccountStorageTrie(HashedAccountAddr), - // TODO: Make this error nicer... - #[error( - "Non-existent account addr given when creating a sub partial trie from the base state trie" - )] - NonExistentAcctAddrsCreatingSubPartialTrie, + #[error("Tried accessing a non-existent key ({1}) in the {0} trie (root hash: {2:x})")] + NonExistentTrieEntry(TrieType, Nibbles, H256), +} - #[error("Creating a subset partial trie for account storage for account {0}, mem addrs accessed: {1:?}")] - NonExistentStorageAddrsCreatingStorageSubPartialTrie(HashedAccountAddr, Vec, String), +#[derive(Debug)] +pub enum TrieType { + State, + Storage, + Receipt, + Txn, +} + +impl Display for TrieType { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + TrieType::State => write!(f, "state"), + TrieType::Storage => write!(f, "storage"), + TrieType::Receipt => write!(f, "receipt"), + TrieType::Txn => write!(f, "transaction"), + } + } } /// The current state of all tries as we process txn deltas. These are mutated @@ -62,9 +84,39 @@ impl ProcessedBlockTrace { fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, - deltas: Vec, + deltas: ProcessedTxnInfo, addrs_to_code: &mut HashMap>, ) -> TraceParsingResult<()> { - todo!() + for (hashed_acc_addr, s_trie_writes) in deltas.nodes_used_by_txn.state_writes { + let val_k = Nibbles::from_h256_be(hashed_acc_addr); + let val_bytes = trie_state.state.get(val_k).ok_or_else(|| { + TraceParsingError::NonExistentTrieEntry( + TrieType::State, + val_k, + trie_state.state.hash(), + ) + })?; + + let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { + TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) + })?; + s_trie_writes.apply_writes_to_state_node(&mut account); + + let updated_account_bytes = rlp::encode(&account); + trie_state + .state + .insert(val_k, updated_account_bytes.to_vec()); + } + + Ok(()) + } +} + +impl StateTrieWrites { + fn apply_writes_to_state_node(&self, state_node: &mut AccountRlp) { + update_val_if_some(&mut state_node.balance, self.balance); + update_val_if_some(&mut state_node.nonce, self.nonce); + update_val_if_some(&mut state_node.storage_root, self.storage_root); + update_val_if_some(&mut state_node.code_hash, self.code_root); } } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 9cb577ebb..26ee1ea7c 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -3,7 +3,8 @@ use std::fmt::Debug; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; -use ethereum_types::U256; +use ethereum_types::{H256, U256}; +use plonky2_evm::generation::mpt::AccountRlp; use crate::trace_protocol::{ BlockTrace, BlockUsedContractCode, ContractCodeUsage, StorageTriesPreImage, TrieCompact, @@ -159,19 +160,21 @@ impl From for ProcessedTxnInfo { #[derive(Debug, Default)] pub(crate) struct NodesUsedByTxn { - state_accesses: Vec, - state_writes: Vec, - storage_accesses: Vec<(HashedAccountAddr, Vec)>, - storage_writes: Vec<( + pub(crate) state_accesses: Vec, + pub(crate) state_writes: Vec<(HashedAccountAddr, StateTrieWrites)>, + pub(crate) storage_accesses: Vec<(HashedAccountAddr, Vec)>, + pub(crate) storage_writes: Vec<( HashedAccountAddr, Vec<(HashedStorageAddrNibbles, StorageVal)>, )>, } #[derive(Debug)] -struct StateTrieWrites { - balance: Option, - nonce: Option, +pub(crate) struct StateTrieWrites { + pub(crate) balance: Option, + pub(crate) nonce: Option, + pub(crate) storage_root: Option, + pub(crate) code_root: Option, } #[derive(Debug)] diff --git a/src/utils.rs b/src/utils.rs index 8ad74323d..4f42fbac3 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -4,3 +4,9 @@ use keccak_hash::keccak; pub(crate) fn hash(bytes: &[u8]) -> H256 { H256::from(keccak(bytes).0) } + +pub(crate) fn update_val_if_some(target: &mut T, opt: Option) { + if let Some(new_val) = opt { + *target = new_val; + } +} From ebf7b865bedde574988cd187d4caf40b016b3ba2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 3 Oct 2023 12:52:06 -0600 Subject: [PATCH 016/208] More work on updating storage tries from deltas --- src/decoding.rs | 60 ++++++++++++++++++-------- src/processed_block_trace.rs | 84 +++++++++++++++++++++++++++--------- src/trace_protocol.rs | 14 +++++- src/types.rs | 1 + 4 files changed, 120 insertions(+), 39 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 158b439a6..2370d172b 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -7,9 +7,7 @@ use eth_trie_utils::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, PartialTrie}, }; -use ethereum_types::H256; use plonky2_evm::generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}; -use rlp::decode; use thiserror::Error; use crate::{ @@ -17,8 +15,8 @@ use crate::{ BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, }, proof_gen_types::BlockLevelData, - types::{Bloom, HashedAccountAddr}, - utils::{hash, update_val_if_some}, + types::{HashedAccountAddr, TrieRootHash}, + utils::update_val_if_some, }; pub type TraceParsingResult = Result; @@ -32,7 +30,7 @@ pub enum TraceParsingError { MissingAccountStorageTrie(HashedAccountAddr), #[error("Tried accessing a non-existent key ({1}) in the {0} trie (root hash: {2:x})")] - NonExistentTrieEntry(TrieType, Nibbles, H256), + NonExistentTrieEntry(TrieType, Nibbles, TrieRootHash), } #[derive(Debug)] @@ -65,19 +63,19 @@ struct PartialTrieState { } impl ProcessedBlockTrace { - fn into_generation_inputs(self, b_data: BlockLevelData) -> Vec { - let mut trie_state = PartialTrieState::default(); - let mut b_meta_state = BlockMetaState::default(); - let mut txn_gen_inputs = Vec::with_capacity(self.txn_info.len()); + fn into_generation_inputs(self, _b_data: BlockLevelData) -> Vec { + let _trie_state = PartialTrieState::default(); + let _b_meta_state = BlockMetaState::default(); + let txn_gen_inputs = Vec::with_capacity(self.txn_info.len()); - for (txn_idx, txn_info) in self.txn_info.into_iter().enumerate() {} + for (_txn_idx, _txn_info) in self.txn_info.into_iter().enumerate() {} txn_gen_inputs } fn create_minimal_partial_tries_needed_by_txn( - curr_block_tries: &PartialTrieState, - nodes_used_by_txn: NodesUsedByTxn, + _curr_block_tries: &PartialTrieState, + _nodes_used_by_txn: NodesUsedByTxn, ) -> TraceParsingResult { todo!() } @@ -85,8 +83,15 @@ impl ProcessedBlockTrace { fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: ProcessedTxnInfo, - addrs_to_code: &mut HashMap>, + _addrs_to_code: &mut HashMap>, ) -> TraceParsingResult<()> { + for (hashed_acc_addr, storage_writes) in deltas.nodes_used_by_txn.storage_writes { + let storage_trie = trie_state.storage.get_mut(&hashed_acc_addr).ok_or( + TraceParsingError::MissingAccountStorageTrie(hashed_acc_addr), + )?; + storage_trie.extend(storage_writes); + } + for (hashed_acc_addr, s_trie_writes) in deltas.nodes_used_by_txn.state_writes { let val_k = Nibbles::from_h256_be(hashed_acc_addr); let val_bytes = trie_state.state.get(val_k).ok_or_else(|| { @@ -100,7 +105,11 @@ impl ProcessedBlockTrace { let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) })?; - s_trie_writes.apply_writes_to_state_node(&mut account); + s_trie_writes.apply_writes_to_state_node( + &mut account, + &hashed_acc_addr, + &trie_state.storage, + )?; let updated_account_bytes = rlp::encode(&account); trie_state @@ -113,10 +122,27 @@ impl ProcessedBlockTrace { } impl StateTrieWrites { - fn apply_writes_to_state_node(&self, state_node: &mut AccountRlp) { + fn apply_writes_to_state_node( + &self, + state_node: &mut AccountRlp, + h_addr: &HashedAccountAddr, + acc_storage_tries: &HashMap, + ) -> TraceParsingResult<()> { + let storage_root_hash_change = match self.storage_trie_change { + false => None, + true => { + let storage_trie = acc_storage_tries + .get(h_addr) + .ok_or(TraceParsingError::MissingAccountStorageTrie(*h_addr))?; + Some(storage_trie.hash()) + } + }; + update_val_if_some(&mut state_node.balance, self.balance); update_val_if_some(&mut state_node.nonce, self.nonce); - update_val_if_some(&mut state_node.storage_root, self.storage_root); - update_val_if_some(&mut state_node.code_hash, self.code_root); + update_val_if_some(&mut state_node.storage_root, storage_root_hash_change); + update_val_if_some(&mut state_node.code_hash, self.code_hash); + + Ok(()) } } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 26ee1ea7c..2a24dc4c5 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -1,18 +1,17 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::fmt::Debug; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; -use ethereum_types::{H256, U256}; -use plonky2_evm::generation::mpt::AccountRlp; +use ethereum_types::U256; use crate::trace_protocol::{ BlockTrace, BlockUsedContractCode, ContractCodeUsage, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, }; use crate::types::{ - Bloom, CodeHash, HashedAccountAddr, HashedNodeAddr, HashedStorageAddr, - HashedStorageAddrNibbles, StorageAddr, StorageVal, + Bloom, CodeHash, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, StorageAddr, + StorageVal, }; use crate::utils::hash; @@ -112,8 +111,8 @@ where #[derive(Debug)] pub(crate) struct ProcessedTxnInfo { - pub(crate) contract_code: Vec, pub(crate) nodes_used_by_txn: NodesUsedByTxn, + pub(crate) contract_code_read: Vec, pub(crate) contract_code_created: Vec<(CodeHash, Vec)>, pub(crate) new_meta_state: BlockMetaState, } @@ -121,39 +120,82 @@ pub(crate) struct ProcessedTxnInfo { impl From for ProcessedTxnInfo { fn from(v: TxnInfo) -> Self { let mut nodes_used_by_txn = NodesUsedByTxn::default(); + let mut contract_code_read = Vec::new(); let mut contract_code_created = Vec::new(); - // let mut state_trie_writes = Vec::with_capacity(v.traces.len()); // Good - // assumption? for (addr, trace) in v.traces { let hashed_addr = hash(addr.as_bytes()); - let s_writes = trace.storage_written.unwrap_or_default(); + let storage_writes = trace.storage_written.unwrap_or_default(); - let s_read_keys = trace.storage_read.into_iter().flat_map(|reads| { + let storage_read_keys = trace.storage_read.into_iter().flat_map(|reads| { reads .into_iter() .map(|addr| storage_addr_to_nibbles_even_nibble_fixed_hashed(&addr)) }); - let s_write_keys = s_writes + let storage_write_keys = storage_writes .keys() - .map(|k| storage_addr_to_nibbles_even_nibble_fixed_hashed(k)); - let s_access_keys = s_read_keys.chain(s_write_keys); + .map(storage_addr_to_nibbles_even_nibble_fixed_hashed); + let storage_access_keys = storage_read_keys.chain(storage_write_keys); nodes_used_by_txn .storage_accesses - .push((hashed_addr, s_access_keys.collect())); - // nodes_used_by_txn.storage_writes.push((hashed_addr, s_writes)); + .push((hashed_addr, storage_access_keys.collect())); + + let storage_trie_change = !storage_writes.is_empty(); + let code_change = trace.code_usage.is_some(); + let state_write_occurred = trace.balance.is_some() + || trace.nonce.is_some() + || storage_trie_change + || code_change; + + if state_write_occurred { + let state_trie_writes = StateTrieWrites { + balance: trace.balance, + nonce: trace.nonce, + storage_trie_change, + code_hash: trace.code_usage.as_ref().map(|usage| usage.get_code_hash()), + }; + + nodes_used_by_txn + .state_writes + .push((hashed_addr, state_trie_writes)) + } + + let storage_writes_vec = storage_writes + .into_iter() + .map(|(k, v)| (storage_addr_to_nibbles_even_nibble_fixed_hashed(&k), v)) + .collect(); + nodes_used_by_txn + .storage_writes + .push((hashed_addr, storage_writes_vec)); + + nodes_used_by_txn.state_accesses.push(hashed_addr); + + if let Some(c_usage) = trace.code_usage { + match c_usage { + ContractCodeUsage::Read(c_hash) => contract_code_read.push(c_hash), + ContractCodeUsage::Write(c_bytes) => { + let c_hash = hash(&c_bytes); + + contract_code_read.push(c_hash); + contract_code_created.push((c_hash, c_bytes)); + } + } + } } - // TODO + let new_meta_state = BlockMetaState { + gas_used: v.meta.gas_used, + block_bloom: v.meta.bloom, + }; Self { - contract_code: todo!(), nodes_used_by_txn, + contract_code_read, contract_code_created, - new_meta_state: todo!(), + new_meta_state, } } } @@ -173,8 +215,8 @@ pub(crate) struct NodesUsedByTxn { pub(crate) struct StateTrieWrites { pub(crate) balance: Option, pub(crate) nonce: Option, - pub(crate) storage_root: Option, - pub(crate) code_root: Option, + pub(crate) storage_trie_change: bool, + pub(crate) code_hash: Option, } #[derive(Debug)] @@ -189,6 +231,6 @@ pub(crate) struct BlockMetaState { pub(crate) block_bloom: Bloom, } -fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbles { +fn storage_addr_to_nibbles_even_nibble_fixed_hashed(_addr: &StorageAddr) -> Nibbles { todo!() } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 10e8932d4..c51f6cc73 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -26,7 +26,10 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; -use crate::types::{Bloom, CodeHash, HashedAccountAddr, StorageAddr, StorageVal}; +use crate::{ + types::{Bloom, CodeHash, HashedAccountAddr, StorageAddr, StorageVal}, + utils::hash, +}; /// Core payload needed to generate a proof for a block. Note that the scheduler /// may need to request some additional data from the client along with this in @@ -164,3 +167,12 @@ pub enum ContractCodeUsage { /// contract code will not appear in the [`BlockTrace`] map. Write(Vec), } + +impl ContractCodeUsage { + pub(crate) fn get_code_hash(&self) -> CodeHash { + match self { + ContractCodeUsage::Read(hash) => *hash, + ContractCodeUsage::Write(bytes) => hash(bytes), + } + } +} diff --git a/src/types.rs b/src/types.rs index 7ab6267f5..98f1aaac0 100644 --- a/src/types.rs +++ b/src/types.rs @@ -10,4 +10,5 @@ pub type HashedStorageAddr = H256; pub type HashedStorageAddrNibbles = Nibbles; pub type StorageAddr = H256; pub type StorageVal = U256; +pub type TrieRootHash = H256; pub type TxnIdx = usize; From c96354322801cf917f22fbf33d325f8aa25fb752 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 3 Oct 2023 13:31:20 -0600 Subject: [PATCH 017/208] Logic for creating the state partial trie --- Cargo.toml | 2 +- src/decoding.rs | 33 ++++++++++++++++++++++++++++----- src/processed_block_trace.rs | 1 + 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index c901bbec5..f9377e82e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ version = "0.1.0" edition = "2021" [dependencies] -eth_trie_utils = "0.6.0" +eth_trie_utils = { git = "https://github.com/mir-protocol/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" hex = "0.4.3" keccak-hash = "0.10.0" diff --git a/src/decoding.rs b/src/decoding.rs index 2370d172b..6c6e36202 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -6,6 +6,7 @@ use std::{ use eth_trie_utils::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, PartialTrie}, + trie_subsets::create_trie_subset, }; use plonky2_evm::generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}; use thiserror::Error; @@ -15,7 +16,7 @@ use crate::{ BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, }, proof_gen_types::BlockLevelData, - types::{HashedAccountAddr, TrieRootHash}, + types::{HashedAccountAddr, HashedNodeAddr, TrieRootHash}, utils::update_val_if_some, }; @@ -31,6 +32,11 @@ pub enum TraceParsingError { #[error("Tried accessing a non-existent key ({1}) in the {0} trie (root hash: {2:x})")] NonExistentTrieEntry(TrieType, Nibbles, TrieRootHash), + + // TODO: Figure out how to make this error useful/meaningful... For now this is just a + // placeholder. + #[error("Missing keys when creating sub-partial tries (Trie type: {0})")] + MissingKeysCreatingSubPartialTrie(TrieType), } #[derive(Debug)] @@ -74,16 +80,33 @@ impl ProcessedBlockTrace { } fn create_minimal_partial_tries_needed_by_txn( - _curr_block_tries: &PartialTrieState, - _nodes_used_by_txn: NodesUsedByTxn, + curr_block_tries: &PartialTrieState, + nodes_used_by_txn: NodesUsedByTxn, ) -> TraceParsingResult { - todo!() + let state_trie = Self::create_minimal_state_partial_trie( + &curr_block_tries.state, + nodes_used_by_txn.state_accesses.iter().cloned(), + )?; + + Ok(TrieInputs { + state_trie, + transactions_trie: todo!(), + receipts_trie: todo!(), + storage_tries: todo!(), + }) + } + + fn create_minimal_state_partial_trie( + state_trie: &HashedPartialTrie, + state_accesses: impl Iterator, + ) -> TraceParsingResult { + create_trie_subset(state_trie, state_accesses.map(Nibbles::from_h256_be)) + .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(TrieType::State)) } fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: ProcessedTxnInfo, - _addrs_to_code: &mut HashMap>, ) -> TraceParsingResult<()> { for (hashed_acc_addr, storage_writes) in deltas.nodes_used_by_txn.storage_writes { let storage_trie = trie_state.storage.get_mut(&hashed_acc_addr).ok_or( diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 2a24dc4c5..29f6472b9 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -200,6 +200,7 @@ impl From for ProcessedTxnInfo { } } +/// Note that "*_accesses" includes writes. #[derive(Debug, Default)] pub(crate) struct NodesUsedByTxn { pub(crate) state_accesses: Vec, From 09f095b842ee8f8cfd8d89779e42ed4c2d1b421d Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 3 Oct 2023 14:41:02 -0600 Subject: [PATCH 018/208] Finished logic for remaining partial tries --- src/decoding.rs | 64 ++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 58 insertions(+), 6 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 6c6e36202..7048f1d8b 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,6 +1,7 @@ use std::{ collections::HashMap, fmt::{self, Display, Formatter}, + iter::once, }; use eth_trie_utils::{ @@ -16,7 +17,7 @@ use crate::{ BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, }, proof_gen_types::BlockLevelData, - types::{HashedAccountAddr, HashedNodeAddr, TrieRootHash}, + types::{HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, TrieRootHash, TxnIdx}, utils::update_val_if_some, }; @@ -82,17 +83,36 @@ impl ProcessedBlockTrace { fn create_minimal_partial_tries_needed_by_txn( curr_block_tries: &PartialTrieState, nodes_used_by_txn: NodesUsedByTxn, + txn_idx: TxnIdx, ) -> TraceParsingResult { let state_trie = Self::create_minimal_state_partial_trie( &curr_block_tries.state, nodes_used_by_txn.state_accesses.iter().cloned(), )?; + // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... + let transactions_trie = Self::create_trie_subset_wrapped( + &curr_block_tries.txn, + once((txn_idx as u32).into()), + TrieType::Txn, + )?; + + let receipts_trie = Self::create_trie_subset_wrapped( + &curr_block_tries.receipt, + once((txn_idx as u32).into()), + TrieType::Receipt, + )?; + + let storage_tries = Self::create_minimal_storage_partial_tries( + &curr_block_tries.storage, + nodes_used_by_txn.storage_accesses.into_iter(), + )?; + Ok(TrieInputs { state_trie, - transactions_trie: todo!(), - receipts_trie: todo!(), - storage_tries: todo!(), + transactions_trie, + receipts_trie, + storage_tries, }) } @@ -100,8 +120,40 @@ impl ProcessedBlockTrace { state_trie: &HashedPartialTrie, state_accesses: impl Iterator, ) -> TraceParsingResult { - create_trie_subset(state_trie, state_accesses.map(Nibbles::from_h256_be)) - .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(TrieType::State)) + Self::create_trie_subset_wrapped( + state_trie, + state_accesses.map(Nibbles::from_h256_be), + TrieType::State, + ) + } + + fn create_minimal_storage_partial_tries<'a>( + storage_tries: &HashMap, + accesses_per_account: impl Iterator)>, + ) -> TraceParsingResult> { + accesses_per_account + .map(|(h_addr, mem_accesses)| { + let base_storage_trie = storage_tries + .get(&h_addr) + .ok_or(TraceParsingError::MissingAccountStorageTrie(h_addr))?; + let partial_storage_trie = Self::create_trie_subset_wrapped( + base_storage_trie, + mem_accesses.into_iter(), + TrieType::Storage, + )?; + + Ok((h_addr, partial_storage_trie)) + }) + .collect::>() + } + + fn create_trie_subset_wrapped( + trie: &HashedPartialTrie, + accesses: impl Iterator, + trie_type: TrieType, + ) -> TraceParsingResult { + create_trie_subset(trie, accesses) + .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(trie_type)) } fn apply_deltas_to_trie_state( From 0eb3a5225529c1eaf070888687a692446abc0670 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 3 Oct 2023 14:48:06 -0600 Subject: [PATCH 019/208] Added a `pub` method to generate `GenerationInputs` --- src/decoding.rs | 2 +- src/lib.rs | 4 ++-- src/processed_block_trace.rs | 14 ++++++++++++++ 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 7048f1d8b..7dd03bc73 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -70,7 +70,7 @@ struct PartialTrieState { } impl ProcessedBlockTrace { - fn into_generation_inputs(self, _b_data: BlockLevelData) -> Vec { + pub(crate) fn into_generation_inputs(self, _b_data: BlockLevelData) -> Vec { let _trie_state = PartialTrieState::default(); let _b_meta_state = BlockMetaState::default(); let txn_gen_inputs = Vec::with_capacity(self.txn_info.len()); diff --git a/src/lib.rs b/src/lib.rs index 137a8267d..b18f3816f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,5 @@ -pub mod decoding; -pub mod processed_block_trace; +mod decoding; +mod processed_block_trace; pub mod proof_gen_types; pub mod trace_protocol; pub mod types; diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 29f6472b9..361b853f2 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -4,7 +4,9 @@ use std::fmt::Debug; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::U256; +use plonky2_evm::generation::GenerationInputs; +use crate::proof_gen_types::BlockLevelData; use crate::trace_protocol::{ BlockTrace, BlockUsedContractCode, ContractCodeUsage, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, @@ -23,6 +25,18 @@ pub(crate) struct ProcessedBlockTrace { } impl BlockTrace { + pub fn into_proof_generation_inputs( + self, + p_meta: &ProcessingMeta, + b_data: BlockLevelData, + ) -> Vec + where + F: Fn(&CodeHash) -> Vec, + { + let proced_block_trace = self.into_processed_block_trace(p_meta); + proced_block_trace.into_generation_inputs(b_data) + } + fn into_processed_block_trace(self, p_meta: &ProcessingMeta) -> ProcessedBlockTrace where F: Fn(&CodeHash) -> Vec, From e61a9c11774a45e95b1619593201ef760e3f7d9d Mon Sep 17 00:00:00 2001 From: cpu Date: Tue, 3 Oct 2023 17:42:05 -0700 Subject: [PATCH 020/208] Add `create_dummy` method to `TxnProofGenIR` --- plonky_block_proof_gen/src/proof_types.rs | 33 +++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 3fcd9b12e..a3870d8c7 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -75,6 +75,39 @@ impl TxnProofGenIR { addresses: Vec::default(), // TODO! } } + + /// Creates a dummy transaction. + /// + /// These can be used to pad a block if the number of transactions in the + /// block is below `2`. + pub fn create_dummy(b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + Self { + signed_txn: Default::default(), + tries: Default::default(), + trie_roots_after: Default::default(), + deltas: Default::default(), + contract_code: Default::default(), + b_height, + txn_idx, + } + } + + /// Clone the `TxnProofGenIR` to a new `TxnProofGenIR` with a different + /// `b_height` and `txn_idx`. + /// + /// This can be used to pad a block if there is only one transaction in the + /// block. Block proofs need a minimum of two transactions. + pub fn clone_as(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + Self { + signed_txn: self.signed_txn.clone(), + tries: self.tries.clone(), + trie_roots_after: self.trie_roots_after.clone(), + deltas: self.deltas.clone(), + contract_code: self.contract_code.clone(), + b_height, + txn_idx, + } + } } #[derive(Clone, Debug, Default, Deserialize, Serialize)] From c2866e89a79d3eb13c90f234d6c63137d224a1a3 Mon Sep 17 00:00:00 2001 From: cpu Date: Wed, 4 Oct 2023 08:53:27 -0700 Subject: [PATCH 021/208] Fix clone_as --- plonky_block_proof_gen/src/proof_types.rs | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index a3870d8c7..724cec1cc 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -92,21 +92,16 @@ impl TxnProofGenIR { } } - /// Clone the `TxnProofGenIR` to a new `TxnProofGenIR` with a different - /// `b_height` and `txn_idx`. + /// Copy relevant fields of the `TxnProofGenIR` to a new `TxnProofGenIR` + /// with a different `b_height` and `txn_idx`. /// /// This can be used to pad a block if there is only one transaction in the /// block. Block proofs need a minimum of two transactions. - pub fn clone_as(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { - Self { - signed_txn: self.signed_txn.clone(), - tries: self.tries.clone(), - trie_roots_after: self.trie_roots_after.clone(), - deltas: self.deltas.clone(), - contract_code: self.contract_code.clone(), - b_height, - txn_idx, - } + pub fn dummy_with_at(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + let mut dummy = Self::create_dummy(b_height, txn_idx); + dummy.deltas = self.deltas.clone(); + dummy.trie_roots_after = self.trie_roots_after.clone(); + dummy } } From fe936f854524ae94378e265959bc7c61d2326151 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 5 Oct 2023 10:21:44 -0600 Subject: [PATCH 022/208] Fixed `dummy_with_at` not handling the deltas correctly --- plonky_block_proof_gen/src/proof_types.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 724cec1cc..84f3ff4e7 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -99,7 +99,15 @@ impl TxnProofGenIR { /// block. Block proofs need a minimum of two transactions. pub fn dummy_with_at(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { let mut dummy = Self::create_dummy(b_height, txn_idx); - dummy.deltas = self.deltas.clone(); + + let deltas = ProofBeforeAndAfterDeltas { + gas_used_before: self.deltas.gas_used_after, + gas_used_after: self.deltas.gas_used_after, + block_bloom_before: self.deltas.block_bloom_after, + block_bloom_after: self.deltas.block_bloom_after, + }; + + dummy.deltas = deltas; dummy.trie_roots_after = self.trie_roots_after.clone(); dummy } From 77b66fb0e89bcea9ed318e5e5714cd3cdb49cb83 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 5 Oct 2023 14:08:58 -0600 Subject: [PATCH 023/208] Now uses correct hashes for empty tries --- plonky_block_proof_gen/src/proof_types.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 84f3ff4e7..858d08df1 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -9,6 +9,12 @@ use serde::{Deserialize, Serialize}; use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; +/// 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421 +const EMPTY_TRIE_HASH: H256 = H256([ + 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, + 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, +]); + /// Data that is specific to a block and is constant for all txns in a given /// block. #[derive(Clone, Debug, Deserialize, Serialize)] @@ -81,10 +87,16 @@ impl TxnProofGenIR { /// These can be used to pad a block if the number of transactions in the /// block is below `2`. pub fn create_dummy(b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + let trie_roots_after = TrieRoots { + state_root: EMPTY_TRIE_HASH, + transactions_root: EMPTY_TRIE_HASH, + receipts_root: EMPTY_TRIE_HASH, + }; + Self { signed_txn: Default::default(), tries: Default::default(), - trie_roots_after: Default::default(), + trie_roots_after, deltas: Default::default(), contract_code: Default::default(), b_height, From e6035bfa977fec84eeea1f9f5fb488b29a3962b2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 5 Oct 2023 17:39:03 -0600 Subject: [PATCH 024/208] Fixed dummy txns not being converted into gen inputs correctly - Needed to be an empty `Vec` and not a `Vec` containing an empty `Vec`. --- plonky_block_proof_gen/src/proof_types.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 858d08df1..e9984f648 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -66,13 +66,18 @@ impl TxnProofGenIR { } pub(crate) fn into_generation_inputs(self, b_data: BlockLevelData) -> GenerationInputs { + let signed_txns = match self.signed_txn.is_empty() { + false => vec![self.signed_txn], + true => Vec::new(), + }; + GenerationInputs { txn_number_before: self.txn_idx.into(), gas_used_before: self.deltas.gas_used_before, block_bloom_before: self.deltas.block_bloom_before, gas_used_after: self.deltas.gas_used_after, block_bloom_after: self.deltas.block_bloom_after, - signed_txns: vec![self.signed_txn], + signed_txns, tries: self.tries, trie_roots_after: self.trie_roots_after, contract_code: self.contract_code, From 7040cc345e27b1f64a9805cf4c4a50b07e5c59a5 Mon Sep 17 00:00:00 2001 From: cpu Date: Wed, 11 Oct 2023 18:29:53 +0400 Subject: [PATCH 025/208] Impl std::error::Error for ProofGenError --- plonky_block_proof_gen/src/proof_gen.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 6f0c68aa6..08bf28ed4 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -14,8 +14,17 @@ pub type ProofGenResult = Result; // Plonky2 is still using `anyhow` for proof gen, and since this is a library, // it's probably best if we at least convert it to a `String`. +#[derive(Debug)] pub struct ProofGenError(pub String); +impl std::fmt::Display for ProofGenError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:#?}", self.0) + } +} + +impl std::error::Error for ProofGenError {} + impl From for ProofGenError { fn from(v: String) -> Self { Self(v) From 0e9479d89a287918d31cfbd9dcd4ade9c3831c08 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 11 Oct 2023 09:16:30 -0600 Subject: [PATCH 026/208] Finished implementing core trace processing logic --- src/decoding.rs | 228 +++++++++++++++++++++++++++++++---- src/lib.rs | 2 + src/processed_block_trace.rs | 98 ++++++--------- src/trace_protocol.rs | 15 --- src/types.rs | 2 + 5 files changed, 249 insertions(+), 96 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 7dd03bc73..b899738ee 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,7 +1,7 @@ use std::{ collections::HashMap, fmt::{self, Display, Formatter}, - iter::once, + iter::{empty, once}, }; use eth_trie_utils::{ @@ -9,18 +9,27 @@ use eth_trie_utils::{ partial_trie::{HashedPartialTrie, PartialTrie}, trie_subsets::create_trie_subset, }; -use plonky2_evm::generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}; +use ethereum_types::{Address, H256, U256}; +use plonky2_evm::{ + generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, + proof::TrieRoots, +}; use thiserror::Error; use crate::{ - processed_block_trace::{ - BlockMetaState, NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, - }, + processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites}, proof_gen_types::BlockLevelData, - types::{HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, TrieRootHash, TxnIdx}, + types::{ + Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, TrieRootHash, TxnIdx, + }, utils::update_val_if_some, }; +const EMPTY_TRIE_HASH: H256 = H256([ + 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, + 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, +]); + pub type TraceParsingResult = Result; #[derive(Debug, Error)] @@ -70,19 +79,65 @@ struct PartialTrieState { } impl ProcessedBlockTrace { - pub(crate) fn into_generation_inputs(self, _b_data: BlockLevelData) -> Vec { - let _trie_state = PartialTrieState::default(); - let _b_meta_state = BlockMetaState::default(); - let txn_gen_inputs = Vec::with_capacity(self.txn_info.len()); + pub(crate) fn into_generation_inputs( + self, + b_data: BlockLevelData, + ) -> TraceParsingResult> { + let mut curr_block_tries = PartialTrieState::default(); + + let mut tot_gas_used = U256::zero(); + let mut curr_bloom = Bloom::default(); + + let mut txn_gen_inputs = self + .txn_info + .into_iter() + .enumerate() + .map(|(txn_idx, txn_info)| { + let tries = Self::create_minimal_partial_tries_needed_by_txn( + &curr_block_tries, + &txn_info.nodes_used_by_txn, + txn_idx, + )?; + let trie_roots_after = calculate_trie_input_hashes(&tries); + let addresses = Self::get_known_addresses_if_enabled(); + + let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; + let new_bloom = txn_info.meta.block_bloom; + + let proof_gen_input = GenerationInputs { + txn_number_before: txn_idx.saturating_sub(1).into(), + gas_used_before: tot_gas_used, + block_bloom_before: curr_bloom, + gas_used_after: new_tot_gas_used, + block_bloom_after: new_bloom, + signed_txns: vec![txn_info.meta.txn_bytes], + tries, + trie_roots_after, + contract_code: txn_info.contract_code_accessed, + block_metadata: b_data.b_meta.clone(), + block_hashes: b_data.b_hashes.clone(), + addresses, + }; + + Self::apply_deltas_to_trie_state( + &mut curr_block_tries, + txn_info.nodes_used_by_txn, + )?; - for (_txn_idx, _txn_info) in self.txn_info.into_iter().enumerate() {} + tot_gas_used = new_tot_gas_used; + curr_bloom = new_bloom; - txn_gen_inputs + Ok(proof_gen_input) + }) + .collect::>>()?; + + Self::pad_gen_inputs_with_dummy_inputs_if_needed(&mut txn_gen_inputs, &b_data); + Ok(txn_gen_inputs) } fn create_minimal_partial_tries_needed_by_txn( curr_block_tries: &PartialTrieState, - nodes_used_by_txn: NodesUsedByTxn, + nodes_used_by_txn: &NodesUsedByTxn, txn_idx: TxnIdx, ) -> TraceParsingResult { let state_trie = Self::create_minimal_state_partial_trie( @@ -105,7 +160,7 @@ impl ProcessedBlockTrace { let storage_tries = Self::create_minimal_storage_partial_tries( &curr_block_tries.storage, - nodes_used_by_txn.storage_accesses.into_iter(), + nodes_used_by_txn.storage_accesses.iter(), )?; Ok(TrieInputs { @@ -129,20 +184,22 @@ impl ProcessedBlockTrace { fn create_minimal_storage_partial_tries<'a>( storage_tries: &HashMap, - accesses_per_account: impl Iterator)>, + accesses_per_account: impl Iterator< + Item = &'a (HashedAccountAddr, Vec), + >, ) -> TraceParsingResult> { accesses_per_account .map(|(h_addr, mem_accesses)| { let base_storage_trie = storage_tries - .get(&h_addr) - .ok_or(TraceParsingError::MissingAccountStorageTrie(h_addr))?; + .get(h_addr) + .ok_or(TraceParsingError::MissingAccountStorageTrie(*h_addr))?; let partial_storage_trie = Self::create_trie_subset_wrapped( base_storage_trie, - mem_accesses.into_iter(), + mem_accesses.iter().cloned(), TrieType::Storage, )?; - Ok((h_addr, partial_storage_trie)) + Ok((*h_addr, partial_storage_trie)) }) .collect::>() } @@ -158,16 +215,16 @@ impl ProcessedBlockTrace { fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, - deltas: ProcessedTxnInfo, + deltas: NodesUsedByTxn, ) -> TraceParsingResult<()> { - for (hashed_acc_addr, storage_writes) in deltas.nodes_used_by_txn.storage_writes { + for (hashed_acc_addr, storage_writes) in deltas.storage_writes { let storage_trie = trie_state.storage.get_mut(&hashed_acc_addr).ok_or( TraceParsingError::MissingAccountStorageTrie(hashed_acc_addr), )?; storage_trie.extend(storage_writes); } - for (hashed_acc_addr, s_trie_writes) in deltas.nodes_used_by_txn.state_writes { + for (hashed_acc_addr, s_trie_writes) in deltas.state_writes { let val_k = Nibbles::from_h256_be(hashed_acc_addr); let val_bytes = trie_state.state.get(val_k).ok_or_else(|| { TraceParsingError::NonExistentTrieEntry( @@ -194,6 +251,34 @@ impl ProcessedBlockTrace { Ok(()) } + + fn pad_gen_inputs_with_dummy_inputs_if_needed<'a>( + gen_inputs: &mut Vec, + b_data: &BlockLevelData, + ) { + match gen_inputs.len() { + 0 => { + // Need to pad with two dummy txns. + gen_inputs.extend(create_dummy_txn_pair_for_empty_block(b_data)) + } + 1 => { + // Only need one dummy txn, but it needs info from the one real txn in the + // block. + gen_inputs.push(create_dummy_txn_gen_input_single_dummy_txn( + &gen_inputs[0], + b_data, + )) + } + _ => (), + } + } + + // TODO: No idea how to implement this, so I'll come back to later... + /// If there are known addresses, return them here. + /// Only needed for debugging purposes. + fn get_known_addresses_if_enabled() -> Vec
{ + Vec::new() // TODO + } } impl StateTrieWrites { @@ -221,3 +306,102 @@ impl StateTrieWrites { Ok(()) } } + +fn calculate_trie_input_hashes(t_inputs: &TrieInputs) -> TrieRoots { + TrieRoots { + state_root: t_inputs.state_trie.hash(), + transactions_root: t_inputs.transactions_trie.hash(), + receipts_root: t_inputs.receipts_trie.hash(), + } +} + +fn create_dummy_txn_gen_input_single_dummy_txn( + prev_real_gen_input: &GenerationInputs, + b_data: &BlockLevelData, +) -> GenerationInputs { + let partial_sub_storage_tries: Vec<_> = prev_real_gen_input + .tries + .storage_tries + .iter() + .map(|(hashed_acc_addr, s_trie)| { + ( + *hashed_acc_addr, + create_fully_hashed_out_sub_partial_trie(s_trie), + ) + }) + .collect(); + + let tries = TrieInputs { + state_trie: create_fully_hashed_out_sub_partial_trie(&prev_real_gen_input.tries.state_trie), + transactions_trie: create_fully_hashed_out_sub_partial_trie( + &prev_real_gen_input.tries.transactions_trie, + ), + receipts_trie: create_fully_hashed_out_sub_partial_trie( + &prev_real_gen_input.tries.receipts_trie, + ), + storage_tries: partial_sub_storage_tries, + }; + + GenerationInputs { + txn_number_before: 0.into(), + gas_used_before: prev_real_gen_input.gas_used_after, + block_bloom_before: prev_real_gen_input.block_bloom_after, + gas_used_after: prev_real_gen_input.gas_used_after, + block_bloom_after: prev_real_gen_input.block_bloom_after, + signed_txns: Vec::default(), + tries, + trie_roots_after: prev_real_gen_input.trie_roots_after.clone(), + contract_code: HashMap::default(), + block_metadata: b_data.b_meta.clone(), + block_hashes: b_data.b_hashes.clone(), + addresses: Vec::default(), + } +} + +// We really want to get a trie with just a hash node here, and this is an easy +// way to do it. +fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedPartialTrie { + // Impossible to actually fail with an empty iter. + create_trie_subset(trie, empty::()).unwrap() +} + +fn create_dummy_txn_pair_for_empty_block(b_data: &BlockLevelData) -> [GenerationInputs; 2] { + [ + create_dummy_gen_input(b_data, 0), + create_dummy_gen_input(b_data, 1), + ] +} + +fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> GenerationInputs { + GenerationInputs { + txn_number_before: txn_idx.saturating_sub(1).into(), + gas_used_before: 0.into(), + block_bloom_before: Bloom::default(), + gas_used_after: 0.into(), + block_bloom_after: Bloom::default(), + signed_txns: Vec::default(), + tries: create_empty_trie_inputs(), + trie_roots_after: create_trie_roots_for_empty_tries(), + contract_code: HashMap::default(), + block_metadata: b_data.b_meta.clone(), + block_hashes: b_data.b_hashes.clone(), + addresses: Vec::default(), + } +} + +fn create_empty_trie_inputs() -> TrieInputs { + TrieInputs { + state_trie: HashedPartialTrie::default(), + transactions_trie: HashedPartialTrie::default(), + receipts_trie: HashedPartialTrie::default(), + storage_tries: Vec::default(), + } +} + +const fn create_trie_roots_for_empty_tries() -> TrieRoots { + TrieRoots { + state_root: EMPTY_TRIE_HASH, + transactions_root: EMPTY_TRIE_HASH, + receipts_root: EMPTY_TRIE_HASH, + } +} diff --git a/src/lib.rs b/src/lib.rs index b18f3816f..4b092e6a3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,5 @@ +#![feature(trait_alias)] + mod decoding; mod processed_block_trace; pub mod proof_gen_types; diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 361b853f2..6697f0fb4 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -6,21 +6,20 @@ use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::U256; use plonky2_evm::generation::GenerationInputs; +use crate::decoding::TraceParsingResult; use crate::proof_gen_types::BlockLevelData; use crate::trace_protocol::{ - BlockTrace, BlockUsedContractCode, ContractCodeUsage, StorageTriesPreImage, TrieCompact, - TriePreImage, TxnInfo, + BlockTrace, ContractCodeUsage, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, }; use crate::types::{ - Bloom, CodeHash, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, StorageAddr, - StorageVal, + Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, + HashedStorageAddrNibbles, StorageAddr, StorageVal, }; use crate::utils::hash; pub(crate) struct ProcessedBlockTrace { pub(crate) state_trie: HashedPartialTrie, pub(crate) storage_tries: HashMap, - pub(crate) contract_code: HashMap>, pub(crate) txn_info: Vec, } @@ -29,9 +28,9 @@ impl BlockTrace { self, p_meta: &ProcessingMeta, b_data: BlockLevelData, - ) -> Vec + ) -> TraceParsingResult> where - F: Fn(&CodeHash) -> Vec, + F: CodeHashResolveFunc, { let proced_block_trace = self.into_processed_block_trace(p_meta); proced_block_trace.into_generation_inputs(b_data) @@ -39,16 +38,16 @@ impl BlockTrace { fn into_processed_block_trace(self, p_meta: &ProcessingMeta) -> ProcessedBlockTrace where - F: Fn(&CodeHash) -> Vec, + F: CodeHashResolveFunc, { ProcessedBlockTrace { state_trie: process_state_trie(self.state_trie), storage_tries: process_storage_tries(self.storage_tries), - contract_code: process_block_used_contract_code( - self.contract_code, - &p_meta.resolve_code_hash_fn, - ), - txn_info: self.txn_info.into_iter().map(|t| t.into()).collect(), + txn_info: self + .txn_info + .into_iter() + .map(|t| t.into_processed_txn_info(&p_meta.resolve_code_hash_fn)) + .collect(), } } } @@ -86,35 +85,17 @@ fn process_compact_trie(_trie: TrieCompact) -> HashedPartialTrie { todo!() } -fn process_block_used_contract_code( - code: BlockUsedContractCode, - resolve_code_hash_fn: &F, -) -> HashMap> -where - F: Fn(&CodeHash) -> Vec, -{ - match code { - BlockUsedContractCode::Full(c) => c, - BlockUsedContractCode::Digests(d) => { - let code_hash_to_code_iter = d - .into_iter() - .map(|c_hash| (c_hash, resolve_code_hash_fn(&c_hash))); - HashMap::from_iter(code_hash_to_code_iter) - } - } -} - #[derive(Debug)] pub struct ProcessingMeta where - F: Fn(&CodeHash) -> Vec, + F: CodeHashResolveFunc, { resolve_code_hash_fn: F, } impl ProcessingMeta where - F: Fn(&CodeHash) -> Vec, + F: CodeHashResolveFunc, { pub fn new(resolve_code_hash_fn: F) -> Self { Self { @@ -126,18 +107,19 @@ where #[derive(Debug)] pub(crate) struct ProcessedTxnInfo { pub(crate) nodes_used_by_txn: NodesUsedByTxn, - pub(crate) contract_code_read: Vec, - pub(crate) contract_code_created: Vec<(CodeHash, Vec)>, - pub(crate) new_meta_state: BlockMetaState, + pub(crate) contract_code_accessed: HashMap>, + pub(crate) meta: TxnMetaState, } -impl From for ProcessedTxnInfo { - fn from(v: TxnInfo) -> Self { +impl TxnInfo { + fn into_processed_txn_info( + self, + code_hash_resolve_f: &F, + ) -> ProcessedTxnInfo { let mut nodes_used_by_txn = NodesUsedByTxn::default(); - let mut contract_code_read = Vec::new(); - let mut contract_code_created = Vec::new(); + let mut contract_code_accessed = HashMap::new(); - for (addr, trace) in v.traces { + for (addr, trace) in self.traces { let hashed_addr = hash(addr.as_bytes()); let storage_writes = trace.storage_written.unwrap_or_default(); @@ -181,6 +163,7 @@ impl From for ProcessedTxnInfo { .into_iter() .map(|(k, v)| (storage_addr_to_nibbles_even_nibble_fixed_hashed(&k), v)) .collect(); + nodes_used_by_txn .storage_writes .push((hashed_addr, storage_writes_vec)); @@ -189,27 +172,29 @@ impl From for ProcessedTxnInfo { if let Some(c_usage) = trace.code_usage { match c_usage { - ContractCodeUsage::Read(c_hash) => contract_code_read.push(c_hash), + ContractCodeUsage::Read(c_hash) => { + contract_code_accessed + .entry(c_hash) + .or_insert_with(|| code_hash_resolve_f(&c_hash)); + } ContractCodeUsage::Write(c_bytes) => { let c_hash = hash(&c_bytes); - - contract_code_read.push(c_hash); - contract_code_created.push((c_hash, c_bytes)); + contract_code_accessed.insert(c_hash, c_bytes); } } } } - let new_meta_state = BlockMetaState { - gas_used: v.meta.gas_used, - block_bloom: v.meta.bloom, + let new_meta_state = TxnMetaState { + txn_bytes: self.meta.byte_code, + gas_used: self.meta.gas_used, + block_bloom: self.meta.bloom, }; - Self { + ProcessedTxnInfo { nodes_used_by_txn, - contract_code_read, - contract_code_created, - new_meta_state, + contract_code_accessed, + meta: new_meta_state, } } } @@ -234,14 +219,9 @@ pub(crate) struct StateTrieWrites { pub(crate) code_hash: Option, } -#[derive(Debug)] -enum TraceStorageAccess { - Read(StorageAddr), - Write(StorageAddr, StorageVal), -} - #[derive(Debug, Default)] -pub(crate) struct BlockMetaState { +pub(crate) struct TxnMetaState { + pub(crate) txn_bytes: Vec, pub(crate) gas_used: u64, pub(crate) block_bloom: Bloom, } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index c51f6cc73..a013da1c0 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -42,9 +42,6 @@ pub struct BlockTrace { /// Map of hashed account addr --> storage trie pre-image. pub storage_tries: StorageTriesPreImage, - /// All contract code used by txns in the block. - pub contract_code: BlockUsedContractCode, - /// Traces and other info per txn. The index of the txn corresponds to the /// slot in this vec. pub txn_info: Vec, @@ -86,18 +83,6 @@ pub enum StorageTriesPreImage { MultipleTries(HashMap), } -/// Contract code hit by txns in the block. -#[derive(Debug)] -pub enum BlockUsedContractCode { - /// Contains a map of the code hash to the actual contract code. - Full(HashMap>), - - /// Only contains the code hashes that were used. It's up to the prover - /// generation scheduler to get the code for each hash. This is the more - /// data efficient option. - Digests(Vec), -} - /// Info specific to txns in the block. #[derive(Debug)] pub struct TxnInfo { diff --git a/src/types.rs b/src/types.rs index 98f1aaac0..e39947852 100644 --- a/src/types.rs +++ b/src/types.rs @@ -12,3 +12,5 @@ pub type StorageAddr = H256; pub type StorageVal = U256; pub type TrieRootHash = H256; pub type TxnIdx = usize; + +pub trait CodeHashResolveFunc = Fn(&CodeHash) -> Vec; From cc213fb90955b16bc27240aac2e0aff0a0c6c513 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 11 Oct 2023 13:12:35 -0600 Subject: [PATCH 027/208] Updated to work with the latest `plonky2` --- plonky_block_proof_gen/src/proof_gen.rs | 48 +++++++++++++---------- plonky_block_proof_gen/src/proof_types.rs | 38 ++++++++++++------ 2 files changed, 53 insertions(+), 33 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 08bf28ed4..e8eb2b29d 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -3,8 +3,8 @@ use plonky2_evm::{all_stark::AllStark, config::StarkConfig, proof::PublicValues} use crate::{ proof_types::{ - AggregatableProof, BlockLevelData, GeneratedAggProof, GeneratedBlockProof, - GeneratedTxnProof, ProofBeforeAndAfterDeltas, ProofCommon, TxnProofGenIR, + create_extra_block_data, AggregatableProof, GeneratedAggProof, GeneratedBlockProof, + GeneratedTxnProof, OtherBlockData, ProofBeforeAndAfterDeltas, ProofCommon, TxnProofGenIR, }, prover_state::ProverState, types::PlonkyProofIntern, @@ -35,7 +35,7 @@ impl From for ProofGenError { pub fn generate_txn_proof( p_state: &ProverState, start_info: TxnProofGenIR, - b_data: BlockLevelData, + other_data: OtherBlockData, ) -> ProofGenResult { let b_height = start_info.b_height; let txn_idx = start_info.txn_idx; @@ -46,7 +46,7 @@ pub fn generate_txn_proof( .prove_root( &AllStark::default(), &StarkConfig::standard_fast_config(), - start_info.into_generation_inputs(b_data), + start_info.into_generation_inputs(other_data), &mut TimingTree::default(), ) .map_err(|err| err.to_string())?; @@ -72,9 +72,9 @@ pub fn generate_agg_proof( p_state: &ProverState, lhs_child: &AggregatableProof, rhs_child: &AggregatableProof, - b_data: BlockLevelData, + other_data: OtherBlockData, ) -> ProofGenResult { - let expanded_agg_proofs = expand_aggregatable_proofs(lhs_child, rhs_child, b_data); + let expanded_agg_proofs = expand_aggregatable_proofs(lhs_child, rhs_child, other_data); let deltas = expanded_agg_proofs.p_vals.extra_block_data.clone().into(); let (agg_proof_intern, p_vals) = p_state @@ -118,23 +118,28 @@ struct ExpandedAggregatableProof<'a> { fn expand_aggregatable_proofs<'a>( lhs_child: &'a AggregatableProof, rhs_child: &'a AggregatableProof, - b_data: BlockLevelData, + other_data: OtherBlockData, ) -> ExpandedAggregatableProofs<'a> { let (expanded_lhs, lhs_common) = expand_aggregatable_proof(lhs_child); let (expanded_rhs, rhs_common) = expand_aggregatable_proof(rhs_child); - let txn_idxs = lhs_child + let p_underlying_txns = lhs_child .underlying_txns() .combine(&rhs_child.underlying_txns()); let deltas = merge_lhs_and_rhs_deltas(&lhs_common.deltas, &rhs_common.deltas); - let extra_block_data = - deltas.into_extra_block_data(txn_idxs.txn_idxs.start, txn_idxs.txn_idxs.end); + + let extra_block_data = create_extra_block_data( + deltas, + other_data.genesis_state_trie_root, + p_underlying_txns.txn_idxs.start, + p_underlying_txns.txn_idxs.end, + ); let p_vals = PublicValues { trie_roots_before: lhs_common.roots_before.clone(), trie_roots_after: rhs_common.roots_after.clone(), - block_metadata: b_data.b_meta, - block_hashes: b_data.b_hashes, + block_metadata: other_data.b_data.b_meta, + block_hashes: other_data.b_data.b_hashes, extra_block_data, }; @@ -176,21 +181,24 @@ pub fn generate_block_proof( p_state: &ProverState, prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, curr_block_agg_proof: &GeneratedAggProof, - b_data: BlockLevelData, + other_data: OtherBlockData, ) -> ProofGenResult { let b_height = curr_block_agg_proof.common.b_height; let parent_intern = prev_opt_parent_b_proof.map(|p| &p.intern); + let extra_block_data = create_extra_block_data( + curr_block_agg_proof.common.deltas.clone(), + other_data.genesis_state_trie_root, + curr_block_agg_proof.underlying_txns.txn_idxs.start, + curr_block_agg_proof.underlying_txns.txn_idxs.end, + ); + let p_vals = PublicValues { trie_roots_before: curr_block_agg_proof.common.roots_before.clone(), trie_roots_after: curr_block_agg_proof.common.roots_after.clone(), - block_metadata: b_data.b_meta, - block_hashes: b_data.b_hashes, - extra_block_data: curr_block_agg_proof - .common - .deltas - .clone() - .into_extra_block_data(0, curr_block_agg_proof.underlying_txns.txn_idxs.end), + block_metadata: other_data.b_data.b_meta, + block_hashes: other_data.b_data.b_hashes, + extra_block_data, }; let (b_proof_intern, _) = p_state diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index e9984f648..fac9e6ac6 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -15,6 +15,13 @@ const EMPTY_TRIE_HASH: H256 = H256([ 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, ]); +/// Other data that is needed for proof gen. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct OtherBlockData { + pub b_data: BlockLevelData, + pub genesis_state_trie_root: H256, +} + /// Data that is specific to a block and is constant for all txns in a given /// block. #[derive(Clone, Debug, Deserialize, Serialize)] @@ -65,13 +72,14 @@ impl TxnProofGenIR { self.txn_idx } - pub(crate) fn into_generation_inputs(self, b_data: BlockLevelData) -> GenerationInputs { + pub(crate) fn into_generation_inputs(self, other_data: OtherBlockData) -> GenerationInputs { let signed_txns = match self.signed_txn.is_empty() { false => vec![self.signed_txn], true => Vec::new(), }; GenerationInputs { + genesis_state_trie_root: other_data.genesis_state_trie_root, txn_number_before: self.txn_idx.into(), gas_used_before: self.deltas.gas_used_before, block_bloom_before: self.deltas.block_bloom_before, @@ -81,8 +89,8 @@ impl TxnProofGenIR { tries: self.tries, trie_roots_after: self.trie_roots_after, contract_code: self.contract_code, - block_metadata: b_data.b_meta, - block_hashes: b_data.b_hashes, + block_metadata: other_data.b_data.b_meta, + block_hashes: other_data.b_data.b_hashes, addresses: Vec::default(), // TODO! } } @@ -151,16 +159,20 @@ impl> From for ProofBeforeAndAfterDeltas { } } -impl ProofBeforeAndAfterDeltas { - pub fn into_extra_block_data(self, txn_start: TxnIdx, txn_end: TxnIdx) -> ExtraBlockData { - ExtraBlockData { - txn_number_before: txn_start.into(), - txn_number_after: txn_end.into(), - gas_used_before: self.gas_used_before, - gas_used_after: self.gas_used_after, - block_bloom_before: self.block_bloom_before, - block_bloom_after: self.block_bloom_after, - } +pub fn create_extra_block_data( + deltas: ProofBeforeAndAfterDeltas, + genesis_root: H256, + txn_start: TxnIdx, + txn_end: TxnIdx, +) -> ExtraBlockData { + ExtraBlockData { + genesis_state_trie_root: genesis_root, + txn_number_before: txn_start.into(), + txn_number_after: txn_end.into(), + gas_used_before: deltas.gas_used_before, + gas_used_after: deltas.gas_used_after, + block_bloom_before: deltas.block_bloom_before, + block_bloom_after: deltas.block_bloom_after, } } From 27578a789e36f43be1542a219e31132a60e8efdc Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 11 Oct 2023 15:29:13 -0600 Subject: [PATCH 028/208] Updated deps to use `0xPolygonZero` instead of `mir-protocol` - How this was building successfully on it's own is beyond me... --- plonky_block_proof_gen/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plonky_block_proof_gen/README.md b/plonky_block_proof_gen/README.md index 0facaf9c7..9ef43f7ec 100644 --- a/plonky_block_proof_gen/README.md +++ b/plonky_block_proof_gen/README.md @@ -2,17 +2,17 @@ Library for generating proofs from proof IR. -For the time being, the only library that produces proof IR is currently [plonky-edge-block-trace-parser](https://github.com/mir-protocol/plonky-edge-block-trace-parser). Down the road, the IR will be produced by decoding the proof gen protocol. +For the time being, the only library that produces proof IR is currently [plonky-edge-block-trace-parser](https://github.com/0xPolygonZero/plonky-edge-block-trace-parser). Down the road, the IR will be produced by decoding the proof gen protocol. # General Usage (Extremely rough, will change) -In [proof_gen.rs](https://github.com/mir-protocol/plonky-block-proof-gen/blob/main/src/proof_gen.rs), there are three core functions: +In [proof_gen.rs](https://github.com/0xPolygonZero/plonky-block-proof-gen/blob/main/src/proof_gen.rs), there are three core functions: - `generate_txn_proof` - `generate_agg_proof` - `generate_block_proof` -Both libraries are currently targeting the latest [plonky2](https://github.com/mir-protocol/plonky2). One noteworthy piece of data that all proofs need is this: +Both libraries are currently targeting the latest [plonky2](https://github.com/0xPolygonZero/plonky2). One noteworthy piece of data that all proofs need is this: ```rust #[derive(Debug, Clone, Serialize, Deserialize)] From 076d51df6589fe24a4bc82607223a06492ae6b8f Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 18 Oct 2023 11:09:26 -0600 Subject: [PATCH 029/208] Added simple usage diagrams --- README.md | 4 +++- docs/usage_seq_diagrams.md | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 docs/usage_seq_diagrams.md diff --git a/README.md b/README.md index a66ff4d64..bbbc4c4f6 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,9 @@ A flexible protocol that clients (eg. full nodes) can use to easily generate blo ## Specification -In progress... +Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. + +TODO... ## License diff --git a/docs/usage_seq_diagrams.md b/docs/usage_seq_diagrams.md new file mode 100644 index 000000000..aff040465 --- /dev/null +++ b/docs/usage_seq_diagrams.md @@ -0,0 +1,36 @@ +# Usage Diagrams +These are some hacked together diagrams showing how the protocol will (likely) be used. Also included what the old Edge proof generation process looked like as a reference. + +## Proof Protocol + +```mermaid +sequenceDiagram + proof protocol client->>proof scheduler: protocol_payload + proof scheduler->>protocol decoder (lib): protolcol_payload + Note over proof scheduler,protocol decoder (lib): "txn_proof_gen_ir" are the payloads sent to Paladin for a txn + protocol decoder (lib)->>proof scheduler: [txn_proof_gen_ir] + proof scheduler->>paladin: [txn_proof_gen_ir] + Note over proof scheduler,paladin: Paladin schedules jobs on mulitple machines and returns a block proof + loop txn_proof_gen_ir + paladin->>worker machine: proof_gen_payload (txn, agg, block) + worker machine->>paladin: generated_proof (txn, agg, block) + end + paladin->>proof scheduler: block_proof + Note over proof scheduler,checkpoint contract: Note: Might send to an external service instead that compresses the proof + proof scheduler->>checkpoint contract: block_proof +``` + +## Edge Proof Generation + +```mermaid +sequenceDiagram + edge->>zero provers (leader): block_trace + zero provers (leader)->>trace parsing lib: block_trace + Note over zero provers (leader),trace parsing lib: "txn_proof_gen_ir" are the payloads sent to each worker for a txn + trace parsing lib->>zero provers (leader): [txn_proof_gen_ir] + loop txn_proof_gen_ir + zero provers (leader)->>zero provers (worker): proof_gen_payload (txn, agg, block) + zero provers (worker)->>zero provers (leader): generated_proof (txn, agg, block) + end + zero provers (leader)->>checkpoint contract: block_proof +``` From be8f38fa74230c11c8055abe1f2e9b93ae15d226 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 18 Oct 2023 15:20:22 -0600 Subject: [PATCH 030/208] Switch some deps to use `0xPolygonZero` instead of `0xPolygonZero` - Our org changed names, and this was causing some crazy dep issues. --- Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f9377e82e..f9b54b168 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,11 +5,11 @@ version = "0.1.0" edition = "2021" [dependencies] -eth_trie_utils = { git = "https://github.com/mir-protocol/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } +eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" hex = "0.4.3" keccak-hash = "0.10.0" -plonky2_evm = { git = "https://github.com/mir-protocol/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" From b69b1d4032d777699366706525d1979aa19ecb21 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 19 Oct 2023 13:01:11 -0600 Subject: [PATCH 031/208] Updated deps --- Cargo.toml | 2 +- src/decoding.rs | 15 +++++++++------ src/processed_block_trace.rs | 7 +++---- src/proof_gen_types.rs | 18 +++++++++--------- src/types.rs | 17 +++++++++++++++++ 5 files changed, 39 insertions(+), 20 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f9b54b168..93a33072b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,7 +9,7 @@ eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", ethereum-types = "0.14.1" hex = "0.4.3" keccak-hash = "0.10.0" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "0b5ac312c0f9efdcc6d85c10256d2843d42215a2" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "49976ea2a98dcb6052bd6cf3a65f730e55727330" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" diff --git a/src/decoding.rs b/src/decoding.rs index b899738ee..77170b58c 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -18,9 +18,9 @@ use thiserror::Error; use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites}, - proof_gen_types::BlockLevelData, types::{ - Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, TrieRootHash, TxnIdx, + BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, + OtherBlockData, TrieRootHash, TxnIdx, }, utils::update_val_if_some, }; @@ -81,7 +81,7 @@ struct PartialTrieState { impl ProcessedBlockTrace { pub(crate) fn into_generation_inputs( self, - b_data: BlockLevelData, + other_data: OtherBlockData, ) -> TraceParsingResult> { let mut curr_block_tries = PartialTrieState::default(); @@ -113,9 +113,10 @@ impl ProcessedBlockTrace { signed_txns: vec![txn_info.meta.txn_bytes], tries, trie_roots_after, + genesis_state_trie_root: other_data.genesis_state_trie_root, contract_code: txn_info.contract_code_accessed, - block_metadata: b_data.b_meta.clone(), - block_hashes: b_data.b_hashes.clone(), + block_metadata: other_data.b_data.b_meta.clone(), + block_hashes: other_data.b_data.b_hashes.clone(), addresses, }; @@ -131,7 +132,7 @@ impl ProcessedBlockTrace { }) .collect::>>()?; - Self::pad_gen_inputs_with_dummy_inputs_if_needed(&mut txn_gen_inputs, &b_data); + Self::pad_gen_inputs_with_dummy_inputs_if_needed(&mut txn_gen_inputs, &other_data.b_data); Ok(txn_gen_inputs) } @@ -351,6 +352,7 @@ fn create_dummy_txn_gen_input_single_dummy_txn( signed_txns: Vec::default(), tries, trie_roots_after: prev_real_gen_input.trie_roots_after.clone(), + genesis_state_trie_root: prev_real_gen_input.genesis_state_trie_root, contract_code: HashMap::default(), block_metadata: b_data.b_meta.clone(), block_hashes: b_data.b_hashes.clone(), @@ -382,6 +384,7 @@ fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> Generatio signed_txns: Vec::default(), tries: create_empty_trie_inputs(), trie_roots_after: create_trie_roots_for_empty_tries(), + genesis_state_trie_root: TrieRootHash::default(), contract_code: HashMap::default(), block_metadata: b_data.b_meta.clone(), block_hashes: b_data.b_hashes.clone(), diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 6697f0fb4..2a675ba66 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -7,13 +7,12 @@ use ethereum_types::U256; use plonky2_evm::generation::GenerationInputs; use crate::decoding::TraceParsingResult; -use crate::proof_gen_types::BlockLevelData; use crate::trace_protocol::{ BlockTrace, ContractCodeUsage, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, }; use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, - HashedStorageAddrNibbles, StorageAddr, StorageVal, + HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, }; use crate::utils::hash; @@ -27,13 +26,13 @@ impl BlockTrace { pub fn into_proof_generation_inputs( self, p_meta: &ProcessingMeta, - b_data: BlockLevelData, + other_data: OtherBlockData, ) -> TraceParsingResult> where F: CodeHashResolveFunc, { let proced_block_trace = self.into_processed_block_trace(p_meta); - proced_block_trace.into_generation_inputs(b_data) + proced_block_trace.into_generation_inputs(other_data) } fn into_processed_block_trace(self, p_meta: &ProcessingMeta) -> ProcessedBlockTrace diff --git a/src/proof_gen_types.rs b/src/proof_gen_types.rs index 935c5c6a2..8b8145773 100644 --- a/src/proof_gen_types.rs +++ b/src/proof_gen_types.rs @@ -1,16 +1,10 @@ use std::borrow::Borrow; use ethereum_types::U256; -use plonky2_evm::proof::{BlockHashes, BlockMetadata, ExtraBlockData}; +use plonky2_evm::proof::ExtraBlockData; use serde::{Deserialize, Serialize}; -use crate::types::TxnIdx; - -#[derive(Debug)] -pub struct BlockLevelData { - pub b_meta: BlockMetadata, - pub b_hashes: BlockHashes, -} +use crate::types::{TrieRootHash, TxnIdx}; #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ProofBeforeAndAfterDeltas { @@ -34,8 +28,14 @@ impl> From for ProofBeforeAndAfterDeltas { } impl ProofBeforeAndAfterDeltas { - pub fn into_extra_block_data(self, txn_start: TxnIdx, txn_end: TxnIdx) -> ExtraBlockData { + pub fn into_extra_block_data( + self, + genesis_state_trie_root: TrieRootHash, + txn_start: TxnIdx, + txn_end: TxnIdx, + ) -> ExtraBlockData { ExtraBlockData { + genesis_state_trie_root, txn_number_before: txn_start.into(), txn_number_after: txn_end.into(), gas_used_before: self.gas_used_before, diff --git a/src/types.rs b/src/types.rs index e39947852..ffb007dd6 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,5 +1,7 @@ use eth_trie_utils::nibbles::Nibbles; use ethereum_types::{H256, U256}; +use plonky2_evm::proof::{BlockHashes, BlockMetadata}; +use serde::{Deserialize, Serialize}; pub type BlockHeight = u64; pub type Bloom = [U256; 8]; @@ -14,3 +16,18 @@ pub type TrieRootHash = H256; pub type TxnIdx = usize; pub trait CodeHashResolveFunc = Fn(&CodeHash) -> Vec; + +/// Other data that is needed for proof gen. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct OtherBlockData { + pub b_data: BlockLevelData, + pub genesis_state_trie_root: TrieRootHash, +} + +/// Data that is specific to a block and is constant for all txns in a given +/// block. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BlockLevelData { + pub b_meta: BlockMetadata, + pub b_hashes: BlockHashes, +} From bda84fa3c86626aa6a7282e3c0d787667fae77de Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 19 Oct 2023 13:04:13 -0600 Subject: [PATCH 032/208] Dep update --- plonky_block_proof_gen/src/proof_gen.rs | 5 +++-- plonky_block_proof_gen/src/proof_types.rs | 18 ++---------------- 2 files changed, 5 insertions(+), 18 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index e8eb2b29d..5f4673142 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,10 +1,11 @@ use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig, proof::PublicValues}; +use proof_protocol_decoder::types::OtherBlockData; use crate::{ proof_types::{ create_extra_block_data, AggregatableProof, GeneratedAggProof, GeneratedBlockProof, - GeneratedTxnProof, OtherBlockData, ProofBeforeAndAfterDeltas, ProofCommon, TxnProofGenIR, + GeneratedTxnProof, ProofBeforeAndAfterDeltas, ProofCommon, TxnProofGenIR, }, prover_state::ProverState, types::PlonkyProofIntern, @@ -127,7 +128,7 @@ fn expand_aggregatable_proofs<'a>( .underlying_txns() .combine(&rhs_child.underlying_txns()); let deltas = merge_lhs_and_rhs_deltas(&lhs_common.deltas, &rhs_common.deltas); - + let extra_block_data = create_extra_block_data( deltas, other_data.genesis_state_trie_root, diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index fac9e6ac6..1830b126a 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -3,8 +3,9 @@ use std::{borrow::Borrow, collections::HashMap}; use ethereum_types::{H256, U256}; use plonky2_evm::{ generation::{GenerationInputs, TrieInputs}, - proof::{BlockHashes, BlockMetadata, ExtraBlockData, TrieRoots}, + proof::{ExtraBlockData, TrieRoots}, }; +use proof_protocol_decoder::types::OtherBlockData; use serde::{Deserialize, Serialize}; use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; @@ -15,21 +16,6 @@ const EMPTY_TRIE_HASH: H256 = H256([ 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, ]); -/// Other data that is needed for proof gen. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct OtherBlockData { - pub b_data: BlockLevelData, - pub genesis_state_trie_root: H256, -} - -/// Data that is specific to a block and is constant for all txns in a given -/// block. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct BlockLevelData { - pub b_meta: BlockMetadata, - pub b_hashes: BlockHashes, -} - #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ProofCommon { pub b_height: BlockHeight, From ddd9a3600a7159b5f7fde2330f00a403224e2161 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 19 Oct 2023 13:16:28 -0600 Subject: [PATCH 033/208] Made two modules pub --- src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 4b092e6a3..cf8d032ab 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,7 +1,7 @@ #![feature(trait_alias)] -mod decoding; -mod processed_block_trace; +pub mod decoding; +pub mod processed_block_trace; pub mod proof_gen_types; pub mod trace_protocol; pub mod types; From d188c627bef77d916e7847b89e77e050c421ac8b Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 24 Oct 2023 11:07:55 -0700 Subject: [PATCH 034/208] Refactored `TxnProofGenIR` - Now is just a wrapper for `GenerationInputs` that also contains the txn idx. --- plonky_block_proof_gen/src/proof_gen.rs | 7 +- plonky_block_proof_gen/src/proof_types.rs | 93 ++++++++--------------- 2 files changed, 35 insertions(+), 65 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 5f4673142..762f09567 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -36,18 +36,17 @@ impl From for ProofGenError { pub fn generate_txn_proof( p_state: &ProverState, start_info: TxnProofGenIR, - other_data: OtherBlockData, ) -> ProofGenResult { - let b_height = start_info.b_height; + let b_height = start_info.b_height(); let txn_idx = start_info.txn_idx; - let deltas = start_info.deltas.clone(); + let deltas = start_info.deltas(); let (txn_proof_intern, p_vals) = p_state .state .prove_root( &AllStark::default(), &StarkConfig::standard_fast_config(), - start_info.into_generation_inputs(other_data), + start_info.gen_inputs, &mut TimingTree::default(), ) .map_err(|err| err.to_string())?; diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 1830b126a..5ae37efe8 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -1,11 +1,10 @@ -use std::{borrow::Borrow, collections::HashMap}; +use std::borrow::Borrow; use ethereum_types::{H256, U256}; use plonky2_evm::{ - generation::{GenerationInputs, TrieInputs}, - proof::{ExtraBlockData, TrieRoots}, + generation::GenerationInputs, + proof::{BlockMetadata, ExtraBlockData, TrieRoots}, }; -use proof_protocol_decoder::types::OtherBlockData; use serde::{Deserialize, Serialize}; use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; @@ -28,56 +27,25 @@ pub struct ProofCommon { /// use to generate a proof for that txn. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct TxnProofGenIR { - /// Signed txn bytecode. - pub signed_txn: Vec, - - /// The partial trie states at the start of the txn. - pub tries: TrieInputs, - - /// The expected root hashes of all tries (except storage tries) after the - /// txn is executed. - pub trie_roots_after: TrieRoots, - - /// Additional info of state that changed before and after the txn executed. - pub deltas: ProofBeforeAndAfterDeltas, - - /// Mapping between smart contract code hashes and the contract byte code. - /// All account smart contracts that are invoked by this txn will have an - /// entry present. - pub contract_code: HashMap>, - - /// The height of the block. - pub b_height: BlockHeight, - - /// The index of the txn in the block. pub txn_idx: TxnIdx, + pub gen_inputs: GenerationInputs, } impl TxnProofGenIR { - pub fn get_txn_idx(&self) -> TxnIdx { - self.txn_idx + pub fn b_height(&self) -> BlockHeight { + self.gen_inputs.block_metadata.block_number.as_u64() } - pub(crate) fn into_generation_inputs(self, other_data: OtherBlockData) -> GenerationInputs { - let signed_txns = match self.signed_txn.is_empty() { - false => vec![self.signed_txn], - true => Vec::new(), - }; + pub fn txn_idx(&self) -> TxnIdx { + self.txn_idx + } - GenerationInputs { - genesis_state_trie_root: other_data.genesis_state_trie_root, - txn_number_before: self.txn_idx.into(), - gas_used_before: self.deltas.gas_used_before, - block_bloom_before: self.deltas.block_bloom_before, - gas_used_after: self.deltas.gas_used_after, - block_bloom_after: self.deltas.block_bloom_after, - signed_txns, - tries: self.tries, - trie_roots_after: self.trie_roots_after, - contract_code: self.contract_code, - block_metadata: other_data.b_data.b_meta, - block_hashes: other_data.b_data.b_hashes, - addresses: Vec::default(), // TODO! + pub fn deltas(&self) -> ProofBeforeAndAfterDeltas { + ProofBeforeAndAfterDeltas { + gas_used_before: self.gen_inputs.gas_used_before, + gas_used_after: self.gen_inputs.gas_used_after, + block_bloom_before: self.gen_inputs.block_bloom_before, + block_bloom_after: self.gen_inputs.block_bloom_after, } } @@ -92,14 +60,20 @@ impl TxnProofGenIR { receipts_root: EMPTY_TRIE_HASH, }; - Self { - signed_txn: Default::default(), - tries: Default::default(), + let block_metadata = BlockMetadata { + block_number: b_height.into(), + ..Default::default() + }; + + let gen_inputs = GenerationInputs { trie_roots_after, - deltas: Default::default(), - contract_code: Default::default(), - b_height, + block_metadata, + ..Default::default() + }; + + Self { txn_idx, + gen_inputs, } } @@ -111,15 +85,12 @@ impl TxnProofGenIR { pub fn dummy_with_at(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { let mut dummy = Self::create_dummy(b_height, txn_idx); - let deltas = ProofBeforeAndAfterDeltas { - gas_used_before: self.deltas.gas_used_after, - gas_used_after: self.deltas.gas_used_after, - block_bloom_before: self.deltas.block_bloom_after, - block_bloom_after: self.deltas.block_bloom_after, - }; + dummy.gen_inputs.gas_used_before = self.gen_inputs.gas_used_after; + dummy.gen_inputs.gas_used_after = self.gen_inputs.gas_used_after; + dummy.gen_inputs.block_bloom_before = self.gen_inputs.block_bloom_after; + dummy.gen_inputs.block_bloom_after = self.gen_inputs.block_bloom_after; - dummy.deltas = deltas; - dummy.trie_roots_after = self.trie_roots_after.clone(); + dummy.gen_inputs.trie_roots_after = self.gen_inputs.trie_roots_after.clone(); dummy } } From 2080f41b60fa5d4664f47f5527f70359a6471b6b Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 24 Oct 2023 11:27:43 -0700 Subject: [PATCH 035/208] Moved `TxnProofGenIR` into this project - It was starting to cause some cyclic dependency issues, and I think it might make more sense for it to live here anyways. --- src/decoding.rs | 44 +++++++++++++------ src/processed_block_trace.rs | 9 ++-- src/types.rs | 84 +++++++++++++++++++++++++++++++++++- 3 files changed, 117 insertions(+), 20 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 77170b58c..977758670 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -20,7 +20,7 @@ use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites}, types::{ BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, - OtherBlockData, TrieRootHash, TxnIdx, + OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, }, utils::update_val_if_some, }; @@ -79,10 +79,10 @@ struct PartialTrieState { } impl ProcessedBlockTrace { - pub(crate) fn into_generation_inputs( + pub(crate) fn into_txn_proof_gen_ir( self, other_data: OtherBlockData, - ) -> TraceParsingResult> { + ) -> TraceParsingResult> { let mut curr_block_tries = PartialTrieState::default(); let mut tot_gas_used = U256::zero(); @@ -104,7 +104,7 @@ impl ProcessedBlockTrace { let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; let new_bloom = txn_info.meta.block_bloom; - let proof_gen_input = GenerationInputs { + let gen_inputs = GenerationInputs { txn_number_before: txn_idx.saturating_sub(1).into(), gas_used_before: tot_gas_used, block_bloom_before: curr_bloom, @@ -120,6 +120,11 @@ impl ProcessedBlockTrace { addresses, }; + let txn_proof_gen_ir = TxnProofGenIR { + txn_idx, + gen_inputs, + }; + Self::apply_deltas_to_trie_state( &mut curr_block_tries, txn_info.nodes_used_by_txn, @@ -128,7 +133,7 @@ impl ProcessedBlockTrace { tot_gas_used = new_tot_gas_used; curr_bloom = new_bloom; - Ok(proof_gen_input) + Ok(txn_proof_gen_ir) }) .collect::>>()?; @@ -253,8 +258,8 @@ impl ProcessedBlockTrace { Ok(()) } - fn pad_gen_inputs_with_dummy_inputs_if_needed<'a>( - gen_inputs: &mut Vec, + fn pad_gen_inputs_with_dummy_inputs_if_needed( + gen_inputs: &mut Vec, b_data: &BlockLevelData, ) { match gen_inputs.len() { @@ -266,7 +271,7 @@ impl ProcessedBlockTrace { // Only need one dummy txn, but it needs info from the one real txn in the // block. gen_inputs.push(create_dummy_txn_gen_input_single_dummy_txn( - &gen_inputs[0], + &gen_inputs[0].gen_inputs, b_data, )) } @@ -319,7 +324,7 @@ fn calculate_trie_input_hashes(t_inputs: &TrieInputs) -> TrieRoots { fn create_dummy_txn_gen_input_single_dummy_txn( prev_real_gen_input: &GenerationInputs, b_data: &BlockLevelData, -) -> GenerationInputs { +) -> TxnProofGenIR { let partial_sub_storage_tries: Vec<_> = prev_real_gen_input .tries .storage_tries @@ -343,7 +348,7 @@ fn create_dummy_txn_gen_input_single_dummy_txn( storage_tries: partial_sub_storage_tries, }; - GenerationInputs { + let gen_inputs = GenerationInputs { txn_number_before: 0.into(), gas_used_before: prev_real_gen_input.gas_used_after, block_bloom_before: prev_real_gen_input.block_bloom_after, @@ -357,7 +362,9 @@ fn create_dummy_txn_gen_input_single_dummy_txn( block_metadata: b_data.b_meta.clone(), block_hashes: b_data.b_hashes.clone(), addresses: Vec::default(), - } + }; + + gen_inputs_to_ir(gen_inputs, 1) } // We really want to get a trie with just a hash node here, and this is an easy @@ -367,15 +374,15 @@ fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedP create_trie_subset(trie, empty::()).unwrap() } -fn create_dummy_txn_pair_for_empty_block(b_data: &BlockLevelData) -> [GenerationInputs; 2] { +fn create_dummy_txn_pair_for_empty_block(b_data: &BlockLevelData) -> [TxnProofGenIR; 2] { [ create_dummy_gen_input(b_data, 0), create_dummy_gen_input(b_data, 1), ] } -fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> GenerationInputs { - GenerationInputs { +fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> TxnProofGenIR { + let gen_inputs = GenerationInputs { txn_number_before: txn_idx.saturating_sub(1).into(), gas_used_before: 0.into(), block_bloom_before: Bloom::default(), @@ -389,6 +396,15 @@ fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> Generatio block_metadata: b_data.b_meta.clone(), block_hashes: b_data.b_hashes.clone(), addresses: Vec::default(), + }; + + gen_inputs_to_ir(gen_inputs, txn_idx) +} + +fn gen_inputs_to_ir(gen_inputs: GenerationInputs, txn_idx: TxnIdx) -> TxnProofGenIR { + TxnProofGenIR { + txn_idx, + gen_inputs, } } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 2a675ba66..bdf945723 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -4,7 +4,6 @@ use std::fmt::Debug; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::U256; -use plonky2_evm::generation::GenerationInputs; use crate::decoding::TraceParsingResult; use crate::trace_protocol::{ @@ -12,7 +11,7 @@ use crate::trace_protocol::{ }; use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, - HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, + HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, TxnProofGenIR, }; use crate::utils::hash; @@ -23,16 +22,16 @@ pub(crate) struct ProcessedBlockTrace { } impl BlockTrace { - pub fn into_proof_generation_inputs( + pub fn into_txn_proof_gen_ir( self, p_meta: &ProcessingMeta, other_data: OtherBlockData, - ) -> TraceParsingResult> + ) -> TraceParsingResult> where F: CodeHashResolveFunc, { let proced_block_trace = self.into_processed_block_trace(p_meta); - proced_block_trace.into_generation_inputs(other_data) + proced_block_trace.into_txn_proof_gen_ir(other_data) } fn into_processed_block_trace(self, p_meta: &ProcessingMeta) -> ProcessedBlockTrace diff --git a/src/types.rs b/src/types.rs index ffb007dd6..4dfd58a99 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,8 +1,13 @@ use eth_trie_utils::nibbles::Nibbles; use ethereum_types::{H256, U256}; -use plonky2_evm::proof::{BlockHashes, BlockMetadata}; +use plonky2_evm::{ + generation::GenerationInputs, + proof::{BlockHashes, BlockMetadata, TrieRoots}, +}; use serde::{Deserialize, Serialize}; +use crate::proof_gen_types::ProofBeforeAndAfterDeltas; + pub type BlockHeight = u64; pub type Bloom = [U256; 8]; pub type CodeHash = H256; @@ -17,6 +22,20 @@ pub type TxnIdx = usize; pub trait CodeHashResolveFunc = Fn(&CodeHash) -> Vec; +/// 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421 +const EMPTY_TRIE_HASH: H256 = H256([ + 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, + 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, +]); + +/// An `IR` (Intermediate Representation) for a given txn in a block that we can +/// use to generate a proof for that txn. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TxnProofGenIR { + pub txn_idx: TxnIdx, + pub gen_inputs: GenerationInputs, +} + /// Other data that is needed for proof gen. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct OtherBlockData { @@ -31,3 +50,66 @@ pub struct BlockLevelData { pub b_meta: BlockMetadata, pub b_hashes: BlockHashes, } +impl TxnProofGenIR { + pub fn b_height(&self) -> BlockHeight { + self.gen_inputs.block_metadata.block_number.as_u64() + } + + pub fn txn_idx(&self) -> TxnIdx { + self.txn_idx + } + + pub fn deltas(&self) -> ProofBeforeAndAfterDeltas { + ProofBeforeAndAfterDeltas { + gas_used_before: self.gen_inputs.gas_used_before, + gas_used_after: self.gen_inputs.gas_used_after, + block_bloom_before: self.gen_inputs.block_bloom_before, + block_bloom_after: self.gen_inputs.block_bloom_after, + } + } + + /// Creates a dummy transaction. + /// + /// These can be used to pad a block if the number of transactions in the + /// block is below `2`. + pub fn create_dummy(b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + let trie_roots_after = TrieRoots { + state_root: EMPTY_TRIE_HASH, + transactions_root: EMPTY_TRIE_HASH, + receipts_root: EMPTY_TRIE_HASH, + }; + + let block_metadata = BlockMetadata { + block_number: b_height.into(), + ..Default::default() + }; + + let gen_inputs = GenerationInputs { + trie_roots_after, + block_metadata, + ..Default::default() + }; + + Self { + txn_idx, + gen_inputs, + } + } + + /// Copy relevant fields of the `TxnProofGenIR` to a new `TxnProofGenIR` + /// with a different `b_height` and `txn_idx`. + /// + /// This can be used to pad a block if there is only one transaction in the + /// block. Block proofs need a minimum of two transactions. + pub fn dummy_with_at(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + let mut dummy = Self::create_dummy(b_height, txn_idx); + + dummy.gen_inputs.gas_used_before = self.gen_inputs.gas_used_after; + dummy.gen_inputs.gas_used_after = self.gen_inputs.gas_used_after; + dummy.gen_inputs.block_bloom_before = self.gen_inputs.block_bloom_after; + dummy.gen_inputs.block_bloom_after = self.gen_inputs.block_bloom_after; + + dummy.gen_inputs.trie_roots_after = self.gen_inputs.trie_roots_after.clone(); + dummy + } +} From 80048d4a5316ced355b577da8b6329ddaf891469 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 24 Oct 2023 11:41:26 -0700 Subject: [PATCH 036/208] Moved some types to the protocol --- plonky_block_proof_gen/src/proof_gen.rs | 7 +- plonky_block_proof_gen/src/proof_types.rs | 109 +--------------------- 2 files changed, 8 insertions(+), 108 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 762f09567..3e34a2d99 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,11 +1,14 @@ use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig, proof::PublicValues}; -use proof_protocol_decoder::types::OtherBlockData; +use proof_protocol_decoder::{ + proof_gen_types::ProofBeforeAndAfterDeltas, + types::{OtherBlockData, TxnProofGenIR}, +}; use crate::{ proof_types::{ create_extra_block_data, AggregatableProof, GeneratedAggProof, GeneratedBlockProof, - GeneratedTxnProof, ProofBeforeAndAfterDeltas, ProofCommon, TxnProofGenIR, + GeneratedTxnProof, ProofCommon, }, prover_state::ProverState, types::PlonkyProofIntern, diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 5ae37efe8..c73d95742 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -1,20 +1,10 @@ -use std::borrow::Borrow; - -use ethereum_types::{H256, U256}; -use plonky2_evm::{ - generation::GenerationInputs, - proof::{BlockMetadata, ExtraBlockData, TrieRoots}, -}; +use ethereum_types::H256; +use plonky2_evm::proof::{ExtraBlockData, TrieRoots}; +use proof_protocol_decoder::proof_gen_types::ProofBeforeAndAfterDeltas; use serde::{Deserialize, Serialize}; use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; -/// 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421 -const EMPTY_TRIE_HASH: H256 = H256([ - 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, - 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, -]); - #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ProofCommon { pub b_height: BlockHeight, @@ -23,99 +13,6 @@ pub struct ProofCommon { pub roots_after: TrieRoots, } -/// An `IR` (Intermediate Representation) for a given txn in a block that we can -/// use to generate a proof for that txn. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct TxnProofGenIR { - pub txn_idx: TxnIdx, - pub gen_inputs: GenerationInputs, -} - -impl TxnProofGenIR { - pub fn b_height(&self) -> BlockHeight { - self.gen_inputs.block_metadata.block_number.as_u64() - } - - pub fn txn_idx(&self) -> TxnIdx { - self.txn_idx - } - - pub fn deltas(&self) -> ProofBeforeAndAfterDeltas { - ProofBeforeAndAfterDeltas { - gas_used_before: self.gen_inputs.gas_used_before, - gas_used_after: self.gen_inputs.gas_used_after, - block_bloom_before: self.gen_inputs.block_bloom_before, - block_bloom_after: self.gen_inputs.block_bloom_after, - } - } - - /// Creates a dummy transaction. - /// - /// These can be used to pad a block if the number of transactions in the - /// block is below `2`. - pub fn create_dummy(b_height: BlockHeight, txn_idx: TxnIdx) -> Self { - let trie_roots_after = TrieRoots { - state_root: EMPTY_TRIE_HASH, - transactions_root: EMPTY_TRIE_HASH, - receipts_root: EMPTY_TRIE_HASH, - }; - - let block_metadata = BlockMetadata { - block_number: b_height.into(), - ..Default::default() - }; - - let gen_inputs = GenerationInputs { - trie_roots_after, - block_metadata, - ..Default::default() - }; - - Self { - txn_idx, - gen_inputs, - } - } - - /// Copy relevant fields of the `TxnProofGenIR` to a new `TxnProofGenIR` - /// with a different `b_height` and `txn_idx`. - /// - /// This can be used to pad a block if there is only one transaction in the - /// block. Block proofs need a minimum of two transactions. - pub fn dummy_with_at(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { - let mut dummy = Self::create_dummy(b_height, txn_idx); - - dummy.gen_inputs.gas_used_before = self.gen_inputs.gas_used_after; - dummy.gen_inputs.gas_used_after = self.gen_inputs.gas_used_after; - dummy.gen_inputs.block_bloom_before = self.gen_inputs.block_bloom_after; - dummy.gen_inputs.block_bloom_after = self.gen_inputs.block_bloom_after; - - dummy.gen_inputs.trie_roots_after = self.gen_inputs.trie_roots_after.clone(); - dummy - } -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct ProofBeforeAndAfterDeltas { - pub gas_used_before: U256, - pub gas_used_after: U256, - pub block_bloom_before: [U256; 8], - pub block_bloom_after: [U256; 8], -} - -impl> From for ProofBeforeAndAfterDeltas { - fn from(v: T) -> Self { - let b = v.borrow(); - - Self { - gas_used_before: b.gas_used_before, - gas_used_after: b.gas_used_after, - block_bloom_before: b.block_bloom_before, - block_bloom_after: b.block_bloom_after, - } - } -} - pub fn create_extra_block_data( deltas: ProofBeforeAndAfterDeltas, genesis_root: H256, From 87bfe7ac842103fd1eb5886e8be978b2f31c998a Mon Sep 17 00:00:00 2001 From: cpu Date: Thu, 26 Oct 2023 08:51:09 -0700 Subject: [PATCH 037/208] Make BlockTrace serializeable/deserializeable --- rust-toolchain.toml | 2 ++ src/trace_protocol.rs | 21 +++++++++++---------- 2 files changed, 13 insertions(+), 10 deletions(-) create mode 100644 rust-toolchain.toml diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 000000000..5d56faf9a --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly" diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index a013da1c0..3b892f535 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -25,6 +25,7 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; +use serde::{Deserialize, Serialize}; use crate::{ types::{Bloom, CodeHash, HashedAccountAddr, StorageAddr, StorageVal}, @@ -34,7 +35,7 @@ use crate::{ /// Core payload needed to generate a proof for a block. Note that the scheduler /// may need to request some additional data from the client along with this in /// order to generate a proof. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct BlockTrace { /// State trie pre-image. pub state_trie: TriePreImage, @@ -48,7 +49,7 @@ pub struct BlockTrace { } /// Minimal hashed out tries needed by all txns in the block. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub enum TriePreImage { Uncompressed(TrieUncompressed), Compact(TrieCompact), @@ -57,21 +58,21 @@ pub enum TriePreImage { // TODO /// Bulkier format that is quicker to process. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct TrieUncompressed {} // TODO /// Compact representation of a trie (will likely be very close to https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md) -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct TrieCompact {} // TODO /// Trie format that is in exactly the same format of our internal trie format. /// This is the fastest format for us to processes. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct TrieDirect(pub HashedPartialTrie); -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub enum StorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries /// involved in the block. We can reconstruct the individual storage tries @@ -84,7 +85,7 @@ pub enum StorageTriesPreImage { } /// Info specific to txns in the block. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct TxnInfo { /// Trace data for the txn. This is used by the protocol to: /// - Mutate it's own trie state between txns to arrive at the correct trie @@ -97,7 +98,7 @@ pub struct TxnInfo { pub meta: TxnMeta, } -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct TxnMeta { /// Txn byte code. pub byte_code: Vec, @@ -120,7 +121,7 @@ pub struct TxnMeta { /// /// Specifically, since we can not execute the txn before proof generation, we /// rely on a separate EVM to run the txn and supply this data for us. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct TxnTrace { /// If the balance changed, then the new balance will appear here. pub balance: Option, @@ -143,7 +144,7 @@ pub struct TxnTrace { } /// Contract code access type. Used by txn traces. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub enum ContractCodeUsage { /// Contract was read. Read(CodeHash), From ad5f71eccc04e5d309cab7abe396b40cf63489da Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 13:27:18 -0600 Subject: [PATCH 038/208] Made txn/receipt nodes comments a bit more clear --- src/trace_protocol.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 3b892f535..ca40e8e9d 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -103,11 +103,14 @@ pub struct TxnMeta { /// Txn byte code. pub byte_code: Vec, - /// Rlped bytes of the new txn node inserted into the txn trie by this txn. + /// Rlped bytes of the new txn value inserted into the txn trie by + /// this txn. Note that the key is not included and this is only the rlped + /// value of the node! pub new_txn_trie_node_byte: Vec, - /// Rlped bytes of the new receipt node inserted into the receipt trie by - /// this txn. + /// Rlped bytes of the new receipt value inserted into the receipt trie by + /// this txn. Note that the key is not included and this is only the rlped + /// value of the node! pub new_receipt_trie_node_byte: Vec, /// Gas used by this txn (Note: not cumulative gas used). From cff12f6d90785a5ae984eb767e1ad83c147fc705 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 14:04:01 -0600 Subject: [PATCH 039/208] Now can extract code from compact pre-images - Realized that the compact pre-image contains code used, so we should probably take advantage of that. - Not sure if the other formats will provide this (they probably shouldn't), so this might change a bit down the road. --- src/processed_block_trace.rs | 32 +++++++++++++++++++++++++++----- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index bdf945723..828bf1db4 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -38,13 +38,26 @@ impl BlockTrace { where F: CodeHashResolveFunc, { + let (storage_tries, provided_contract_code) = process_storage_tries(self.storage_tries); + + let code_hash_resolve_f = |c_hash: &_| { + let provided_contract_code_ref = provided_contract_code.as_ref(); + + provided_contract_code_ref.and_then(|included_c_hash_lookup| { + included_c_hash_lookup + .get(c_hash) + .cloned() + .or_else(|| Some((p_meta.resolve_code_hash_fn)(c_hash))) + }).expect("Code hash resolve function should always be able to resolve a code hash to it's byte code but failed to!") + }; + ProcessedBlockTrace { state_trie: process_state_trie(self.state_trie), - storage_tries: process_storage_tries(self.storage_tries), + storage_tries, txn_info: self .txn_info .into_iter() - .map(|t| t.into_processed_txn_info(&p_meta.resolve_code_hash_fn)) + .map(|t| t.into_processed_txn_info(&code_hash_resolve_f)) .collect(), } } @@ -60,7 +73,10 @@ fn process_state_trie(trie: TriePreImage) -> HashedPartialTrie { fn process_storage_tries( trie: StorageTriesPreImage, -) -> HashMap { +) -> ( + HashMap, + Option>>, +) { match trie { StorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), StorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), @@ -69,13 +85,19 @@ fn process_storage_tries( fn process_single_storage_trie( _trie: TriePreImage, -) -> HashMap { +) -> ( + HashMap, + Option>>, +) { todo!() } fn process_multiple_storage_tries( _tries: HashMap, -) -> HashMap { +) -> ( + HashMap, + Option>>, +) { todo!() } From 9abafeefa07760ebedef7c4ea5ef5d058e7dec4c Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 27 Oct 2023 17:23:46 -0400 Subject: [PATCH 040/208] Fix starting ranges --- plonky_block_proof_gen/src/prover_state.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 0c404447b..1389b5bc4 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -27,13 +27,13 @@ pub struct ProverStateBuilder { impl Default for ProverStateBuilder { fn default() -> Self { Self { - arithmetic_circuit_size: 9..22, - byte_packing_circuit_size: 9..22, - cpu_circuit_size: 9..22, - keccak_circuit_size: 9..22, + arithmetic_circuit_size: 16..22, + byte_packing_circuit_size: 10..22, + cpu_circuit_size: 15..22, + keccak_circuit_size: 14..22, keccak_sponge_circuit_size: 9..22, - logic_circuit_size: 9..22, - memory_circuit_size: 9..22, + logic_circuit_size: 12..22, + memory_circuit_size: 18..22, } } } From 4b2dd99679de3fc0ff88c354269310fa890f26d0 Mon Sep 17 00:00:00 2001 From: vuittont60 <81072379+vuittont60@users.noreply.github.com> Date: Tue, 31 Oct 2023 15:02:42 +0800 Subject: [PATCH 041/208] docs: fix typo --- docs/usage_seq_diagrams.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/usage_seq_diagrams.md b/docs/usage_seq_diagrams.md index aff040465..e0cf6bb43 100644 --- a/docs/usage_seq_diagrams.md +++ b/docs/usage_seq_diagrams.md @@ -10,7 +10,7 @@ sequenceDiagram Note over proof scheduler,protocol decoder (lib): "txn_proof_gen_ir" are the payloads sent to Paladin for a txn protocol decoder (lib)->>proof scheduler: [txn_proof_gen_ir] proof scheduler->>paladin: [txn_proof_gen_ir] - Note over proof scheduler,paladin: Paladin schedules jobs on mulitple machines and returns a block proof + Note over proof scheduler,paladin: Paladin schedules jobs on multiple machines and returns a block proof loop txn_proof_gen_ir paladin->>worker machine: proof_gen_payload (txn, agg, block) worker machine->>paladin: generated_proof (txn, agg, block) From 3ce695a8f4a1e98c4feebef1ee993a7b15a2c66b Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 31 Oct 2023 11:54:59 -0700 Subject: [PATCH 042/208] Updated trace spec to handle combined pre-images - Did not realize that the compact format has both tries combined into one payload, so we needed to adjust for this. --- src/processed_block_trace.rs | 75 +++++++++++++++++++++++------------- src/trace_protocol.rs | 42 ++++++++++++++------ 2 files changed, 78 insertions(+), 39 deletions(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 828bf1db4..4f8925c74 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -7,7 +7,9 @@ use ethereum_types::U256; use crate::decoding::TraceParsingResult; use crate::trace_protocol::{ - BlockTrace, ContractCodeUsage, StorageTriesPreImage, TrieCompact, TriePreImage, TxnInfo, + BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, + SeperateStorageTriesPreImage, SeperateTriePreImage, SeperateTriePreImages, TrieCompact, + TxnInfo, }; use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, @@ -38,10 +40,10 @@ impl BlockTrace { where F: CodeHashResolveFunc, { - let (storage_tries, provided_contract_code) = process_storage_tries(self.storage_tries); + let pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); let code_hash_resolve_f = |c_hash: &_| { - let provided_contract_code_ref = provided_contract_code.as_ref(); + let provided_contract_code_ref = pre_image_data.extra_code_hash_mappings.as_ref(); provided_contract_code_ref.and_then(|included_c_hash_lookup| { included_c_hash_lookup @@ -52,8 +54,8 @@ impl BlockTrace { }; ProcessedBlockTrace { - state_trie: process_state_trie(self.state_trie), - storage_tries, + state_trie: pre_image_data.state, + storage_tries: pre_image_data.storage, txn_info: self .txn_info .into_iter() @@ -63,45 +65,64 @@ impl BlockTrace { } } -fn process_state_trie(trie: TriePreImage) -> HashedPartialTrie { +struct ProcessedBlockTracePreImages { + state: HashedPartialTrie, + storage: HashMap, + extra_code_hash_mappings: Option>>, +} + +fn process_block_trace_trie_pre_images( + block_trace_pre_images: BlockTraceTriePreImages, +) -> ProcessedBlockTracePreImages { + match block_trace_pre_images { + BlockTraceTriePreImages::Seperate(t) => process_seperate_trie_pre_images(t), + BlockTraceTriePreImages::Combined(t) => process_combined_trie_pre_images(t), + } +} + +fn process_combined_trie_pre_images(tries: CombinedPreImages) -> ProcessedBlockTracePreImages { + match tries { + CombinedPreImages::Compact(t) => process_compact_trie(t), + } +} + +fn process_seperate_trie_pre_images(tries: SeperateTriePreImages) -> ProcessedBlockTracePreImages { + ProcessedBlockTracePreImages { + state: process_state_trie(tries.state), + storage: process_storage_tries(tries.storage), + extra_code_hash_mappings: None, + } +} + +fn process_state_trie(trie: SeperateTriePreImage) -> HashedPartialTrie { match trie { - TriePreImage::Uncompressed(_) => todo!(), - TriePreImage::Compact(t) => process_compact_trie(t), - TriePreImage::Direct(t) => t.0, + SeperateTriePreImage::Uncompressed(_) => todo!(), + SeperateTriePreImage::Direct(t) => t.0, } } fn process_storage_tries( - trie: StorageTriesPreImage, -) -> ( - HashMap, - Option>>, -) { + trie: SeperateStorageTriesPreImage, +) -> HashMap { match trie { - StorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), - StorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), + SeperateStorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), + SeperateStorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), } } fn process_single_storage_trie( - _trie: TriePreImage, -) -> ( - HashMap, - Option>>, -) { + _trie: SeperateTriePreImage, +) -> HashMap { todo!() } fn process_multiple_storage_tries( - _tries: HashMap, -) -> ( - HashMap, - Option>>, -) { + _tries: HashMap, +) -> HashMap { todo!() } -fn process_compact_trie(_trie: TrieCompact) -> HashedPartialTrie { +fn process_compact_trie(_trie: TrieCompact) -> ProcessedBlockTracePreImages { todo!() } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index ca40e8e9d..1b9001dd3 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -37,11 +37,8 @@ use crate::{ /// order to generate a proof. #[derive(Debug, Serialize, Deserialize)] pub struct BlockTrace { - /// State trie pre-image. - pub state_trie: TriePreImage, - - /// Map of hashed account addr --> storage trie pre-image. - pub storage_tries: StorageTriesPreImage, + /// The trie pre-images (state & storage) in multiple possible formats. + pub trie_pre_images: BlockTraceTriePreImages, /// Traces and other info per txn. The index of the txn corresponds to the /// slot in this vec. @@ -50,12 +47,31 @@ pub struct BlockTrace { /// Minimal hashed out tries needed by all txns in the block. #[derive(Debug, Serialize, Deserialize)] -pub enum TriePreImage { +pub enum BlockTraceTriePreImages { + Seperate(SeperateTriePreImages), + Combined(CombinedPreImages), +} + +/// State/Storage trie pre-images that are seperate. +#[derive(Debug, Serialize, Deserialize)] +pub struct SeperateTriePreImages { + pub state: SeperateTriePreImage, + pub storage: SeperateStorageTriesPreImage, +} + +/// A trie pre-image where state & storage are seperate. +#[derive(Debug, Serialize, Deserialize)] +pub enum SeperateTriePreImage { Uncompressed(TrieUncompressed), - Compact(TrieCompact), Direct(TrieDirect), } +/// A trie pre-image where both state & storage are combined into one payload. +#[derive(Debug, Serialize, Deserialize)] +pub enum CombinedPreImages { + Compact(TrieCompact), +} + // TODO /// Bulkier format that is quicker to process. #[derive(Debug, Serialize, Deserialize)] @@ -73,15 +89,15 @@ pub struct TrieCompact {} pub struct TrieDirect(pub HashedPartialTrie); #[derive(Debug, Serialize, Deserialize)] -pub enum StorageTriesPreImage { +pub enum SeperateStorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries /// involved in the block. We can reconstruct the individual storage tries /// by the storage root hash in the state entries. - SingleTrie(TriePreImage), + SingleTrie(SeperateTriePreImage), /// Each storage trie is sent over in a hashmap with the hashed account /// address as a key. - MultipleTries(HashMap), + MultipleTries(HashMap), } /// Info specific to txns in the block. @@ -126,10 +142,12 @@ pub struct TxnMeta { /// rely on a separate EVM to run the txn and supply this data for us. #[derive(Debug, Serialize, Deserialize)] pub struct TxnTrace { - /// If the balance changed, then the new balance will appear here. + /// If the balance changed, then the new balance will appear here. Will be + /// `None` if no change. pub balance: Option, - /// If the nonce changed, then the new nonce will appear here. + /// If the nonce changed, then the new nonce will appear here. Will be + /// `None` if no change. pub nonce: Option, /// Account addresses that were only read by the txn. From a431f9f649629e36692c9f1edd702ff3f4bc12a9 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 10:16:58 -0600 Subject: [PATCH 043/208] Fixed incorrect project name in readme - Thanks to @npwardberkeley for pointing this out! --- plonky_block_proof_gen/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/README.md b/plonky_block_proof_gen/README.md index 9ef43f7ec..3919b0a8a 100644 --- a/plonky_block_proof_gen/README.md +++ b/plonky_block_proof_gen/README.md @@ -1,4 +1,4 @@ -# Plonky Edge block trace parser +# Plonky block proof generator Library for generating proofs from proof IR. From 9193d76333adab2f885ace129fdef5aa34659517 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 10:49:55 -0600 Subject: [PATCH 044/208] Added more to the readme --- README.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index bbbc4c4f6..a8ab7c55b 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,18 @@ -# Proof Protocol Decoder +# Proof protocol decoder A flexible protocol that clients (eg. full nodes) can use to easily generate block proofs for different chains. ## Specification -Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. +Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. The specification itself is in the repo [here](src/trace_protocol.rs). -TODO... +Because processing the incoming proof protocol payload is not a resource bottleneck, the design is not worrying too much about performance. Instead, the core focus is flexibility in clients creating their own implementation, where the protocol supports multiple ways to provide different pieces of data. For example, there are multiple different formats available to provide the trie pre-images in, and the implementor can choose whichever is closest to its own internal data structures. + +TODO + +## Adding more to the specification + +We want this to be as easy to write an implementation for as possible! If you are finding that you need to do heavy work on your end to adhere to this spec, it may also be the case that other clients have internal data structures similar to your own and are potentially doing the same work. Since it's probably best to only do the work once, please feel free to create a PR or open an issue to add support to the spec/decoder! ## License From e1a657a8662764a6a4f7a8bdbd5a84e5bb09c923 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 24 Oct 2023 11:27:43 -0700 Subject: [PATCH 045/208] Moved `TxnProofGenIR` into this project - It was starting to cause some cyclic dependency issues, and I think it might make more sense for it to live here anyways. --- src/compact_prestate_processing.rs | 89 ++++++++++++++++++++++++++++++ src/lib.rs | 1 + src/trace_protocol.rs | 4 +- 3 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 src/compact_prestate_processing.rs diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs new file mode 100644 index 000000000..f170901b2 --- /dev/null +++ b/src/compact_prestate_processing.rs @@ -0,0 +1,89 @@ +//! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md + +use eth_trie_utils::partial_trie::HashedPartialTrie; +use ethereum_types::H256; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::trace_protocol::TrieCompact; + +pub type CompactParsingResult = Result; + +type NodeHash = H256; +type Value = Vec; + +#[derive(Debug, Error)] +pub enum CompactParsingError {} + +#[derive(Debug)] +struct Header { + version: u8, +} + +#[derive(Debug, Deserialize, Serialize)] +struct Key { + is_even: bool, + bytes: Vec, +} + +#[derive(Debug)] +enum OperatorCode { + Leaf = 0x00, + Extension = 0x01, + Branch = 0x02, + Hash = 0x03, + Code = 0x04, + AccountLeaf = 0x05, + EmptyRoot = 0x06, +} + +#[derive(Debug)] +enum Operator { + Leaf(), + Extension, + Branch, + Hash, + Code, + AccountLeaf, + EmptyRoot, +} + +#[derive(Debug, Default)] +struct ParserState { + stack: Vec, +} + +impl ParserState { + fn process_operator( + &mut self, + _bytes: &mut impl Iterator, + ) -> CompactParsingResult { + todo!() + } +} + +#[derive(Debug)] +enum StackEntry { + AccountLeaf(AccountLeafData), + Code(Vec), + Empty, + Hash(NodeHash), + Leaf(Key, Value), + Extension(Key), +} + +#[derive(Debug)] +struct AccountLeafData {} + +pub(crate) fn process_compact_prestate(state: TrieCompact) -> HashedPartialTrie { + let _parser = ParserState::default(); + let _byte_iter = state.bytes.into_iter(); + + loop {} + + todo!() +} + +fn parse_header(_bytes: &mut impl Iterator) -> CompactParsingResult
{ + todo!() +} diff --git a/src/lib.rs b/src/lib.rs index cf8d032ab..47e376751 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,6 @@ #![feature(trait_alias)] +mod compact_prestate_processing; pub mod decoding; pub mod processed_block_trace; pub mod proof_gen_types; diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 1b9001dd3..0dccdee0d 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -80,7 +80,9 @@ pub struct TrieUncompressed {} // TODO /// Compact representation of a trie (will likely be very close to https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md) #[derive(Debug, Serialize, Deserialize)] -pub struct TrieCompact {} +pub struct TrieCompact { + pub bytes: Vec, +} // TODO /// Trie format that is in exactly the same format of our internal trie format. From ee2b18f1fe195350c78f5c82683f4f0cb326d7b5 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 13 Oct 2023 15:41:37 -0600 Subject: [PATCH 046/208] Impled more rough structure for compact decoding --- Cargo.toml | 1 + src/compact_prestate_processing.rs | 109 ++++++++++++++++++++++++++--- 2 files changed, 100 insertions(+), 10 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 93a33072b..72314ce09 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ version = "0.1.0" edition = "2021" [dependencies] +enumn = "0.1.12" eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" hex = "0.4.3" diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index f170901b2..868c28047 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -1,5 +1,7 @@ //! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md +use std::fmt::{self, Display}; + use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::H256; use serde::{Deserialize, Serialize}; @@ -13,21 +15,36 @@ type NodeHash = H256; type Value = Vec; #[derive(Debug, Error)] -pub enum CompactParsingError {} +pub enum CompactParsingError { + #[error("Missing header")] + MissingHeader, + + #[error("Invalid opcode operator (\"{0:x}\"")] + InvalidOperator(u8), + + #[error("Reached the end of the byte stream when we still expected more data")] + UnexpectedEndOfStream, +} #[derive(Debug)] struct Header { version: u8, } +impl Display for Header { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Erigon block witness version {}", self.version) + } +} + #[derive(Debug, Deserialize, Serialize)] struct Key { is_even: bool, bytes: Vec, } -#[derive(Debug)] -enum OperatorCode { +#[derive(Debug, enumn::N)] +enum Opcode { Leaf = 0x00, Extension = 0x01, Branch = 0x02, @@ -56,8 +73,72 @@ struct ParserState { impl ParserState { fn process_operator( &mut self, - _bytes: &mut impl Iterator, + bytes: &mut impl Iterator, ) -> CompactParsingResult { + let opcode_byte = bytes + .next() + .ok_or(CompactParsingError::UnexpectedEndOfStream)?; + let opcode = + Opcode::n(opcode_byte).ok_or(CompactParsingError::InvalidOperator(opcode_byte))?; + + self.process_data_following_opcode(opcode, bytes)?; + + todo!() + } + + fn process_data_following_opcode( + &mut self, + opcode: Opcode, + bytes: &mut impl Iterator, + ) -> CompactParsingResult<()> { + match opcode { + Opcode::Leaf => self.process_leaf(bytes), + Opcode::Extension => self.process_extension(bytes), + Opcode::Branch => self.process_branch(bytes), + Opcode::Hash => self.process_hash(bytes), + Opcode::Code => self.process_code(bytes), + Opcode::AccountLeaf => self.process_leaf(bytes), + Opcode::EmptyRoot => self.process_empty_root(bytes), + } + } + + fn process_leaf(&mut self, _bytes: &mut impl Iterator) -> CompactParsingResult<()> { + todo!() + } + + fn process_extension( + &mut self, + _bytes: &mut impl Iterator, + ) -> CompactParsingResult<()> { + todo!() + } + + fn process_branch( + &mut self, + _bytes: &mut impl Iterator, + ) -> CompactParsingResult<()> { + todo!() + } + + fn process_hash(&mut self, _bytes: &mut impl Iterator) -> CompactParsingResult<()> { + todo!() + } + + fn process_code(&mut self, _bytes: &mut impl Iterator) -> CompactParsingResult<()> { + todo!() + } + + fn process_account_leaf( + &mut self, + _bytes: &mut impl Iterator, + ) -> CompactParsingResult<()> { + todo!() + } + + fn process_empty_root( + &mut self, + _bytes: &mut impl Iterator, + ) -> CompactParsingResult<()> { todo!() } } @@ -75,15 +156,23 @@ enum StackEntry { #[derive(Debug)] struct AccountLeafData {} -pub(crate) fn process_compact_prestate(state: TrieCompact) -> HashedPartialTrie { - let _parser = ParserState::default(); - let _byte_iter = state.bytes.into_iter(); +pub(crate) fn process_compact_prestate( + state: TrieCompact, +) -> CompactParsingResult { + let mut parser = ParserState::default(); + let mut byte_iter = state.bytes.into_iter(); + + let _header = parse_header(&mut byte_iter)?; - loop {} + loop { + let _operator = parser.process_operator(&mut byte_iter)?; + } todo!() } -fn parse_header(_bytes: &mut impl Iterator) -> CompactParsingResult
{ - todo!() +fn parse_header(bytes: &mut impl Iterator) -> CompactParsingResult
{ + let h_byte = bytes.next().ok_or(CompactParsingError::MissingHeader)?; + + Ok(Header { version: h_byte }) } From be9ebf2b21ce0a8f2caf32fe70068b0d1e674db6 Mon Sep 17 00:00:00 2001 From: BGluth Date: Sun, 15 Oct 2023 14:55:47 -0600 Subject: [PATCH 047/208] Impled basic processing logic for compact tries --- Cargo.toml | 1 + src/compact_prestate_processing.rs | 170 +++++++++++++++++++---------- 2 files changed, 114 insertions(+), 57 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 72314ce09..ad768b74a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ version = "0.1.0" edition = "2021" [dependencies] +ciborium = "0.2.1" enumn = "0.1.12" eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 868c28047..c2327854a 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -1,10 +1,13 @@ //! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md -use std::fmt::{self, Display}; +use std::{ + fmt::{self, Display}, + io::{Cursor, Read}, +}; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::H256; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use thiserror::Error; use crate::trace_protocol::TrieCompact; @@ -24,6 +27,9 @@ pub enum CompactParsingError { #[error("Reached the end of the byte stream when we still expected more data")] UnexpectedEndOfStream, + + #[error("Unable to parse an expected byte vector (error: {0})")] + InvalidByteVector(String), } #[derive(Debug)] @@ -37,7 +43,7 @@ impl Display for Header { } } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize)] struct Key { is_even: bool, bytes: Vec, @@ -65,81 +71,118 @@ enum Operator { EmptyRoot, } -#[derive(Debug, Default)] +#[derive(Debug, Deserialize)] +struct LeafData { + key: Key, + value: Vec, +} + +#[derive(Debug)] struct ParserState { stack: Vec, + byte_cursor: CompactCursor, } impl ParserState { - fn process_operator( - &mut self, - bytes: &mut impl Iterator, - ) -> CompactParsingResult { - let opcode_byte = bytes - .next() - .ok_or(CompactParsingError::UnexpectedEndOfStream)?; + fn new(payload_bytes: Vec) -> Self { + let byte_cursor = CompactCursor { + intern: Cursor::new(payload_bytes), + }; + + Self { + byte_cursor, + stack: Vec::default(), + } + } + + fn process_stream(self) -> CompactParsingResult { + let (_, trie) = self.process_stream_and_get_header()?; + Ok(trie) + } + + fn process_stream_and_get_header( + mut self, + ) -> CompactParsingResult<(Header, HashedPartialTrie)> { + let header = self.parse_header()?; + + loop { + let _operator = self.process_operator()?; + + if self.byte_cursor.at_eof() { + break; + } + } + + // TODO + Ok((header, HashedPartialTrie::default())) + } + + fn parse_header(&mut self) -> CompactParsingResult
{ + let h_byte = self + .byte_cursor + .read_byte() + .map_err(|_| CompactParsingError::MissingHeader)?; + + Ok(Header { version: h_byte }) + } + + fn process_operator(&mut self) -> CompactParsingResult { + let opcode_byte = self.byte_cursor.read_byte()?; + let opcode = Opcode::n(opcode_byte).ok_or(CompactParsingError::InvalidOperator(opcode_byte))?; - self.process_data_following_opcode(opcode, bytes)?; + self.process_data_following_opcode(opcode)?; todo!() } - fn process_data_following_opcode( - &mut self, - opcode: Opcode, - bytes: &mut impl Iterator, - ) -> CompactParsingResult<()> { + fn process_data_following_opcode(&mut self, opcode: Opcode) -> CompactParsingResult<()> { match opcode { - Opcode::Leaf => self.process_leaf(bytes), - Opcode::Extension => self.process_extension(bytes), - Opcode::Branch => self.process_branch(bytes), - Opcode::Hash => self.process_hash(bytes), - Opcode::Code => self.process_code(bytes), - Opcode::AccountLeaf => self.process_leaf(bytes), - Opcode::EmptyRoot => self.process_empty_root(bytes), + Opcode::Leaf => self.process_leaf(), + Opcode::Extension => self.process_extension(), + Opcode::Branch => self.process_branch(), + Opcode::Hash => self.process_hash(), + Opcode::Code => self.process_code(), + Opcode::AccountLeaf => self.process_leaf(), + Opcode::EmptyRoot => self.process_empty_root(), } } - fn process_leaf(&mut self, _bytes: &mut impl Iterator) -> CompactParsingResult<()> { + fn process_leaf(&mut self) -> CompactParsingResult<()> { + let _key_raw = self.byte_cursor.read_byte_array()?; + let _value_raw = self.byte_cursor.read_byte_array()?; + todo!() } - fn process_extension( - &mut self, - _bytes: &mut impl Iterator, - ) -> CompactParsingResult<()> { + fn process_extension(&mut self) -> CompactParsingResult<()> { todo!() } - fn process_branch( - &mut self, - _bytes: &mut impl Iterator, - ) -> CompactParsingResult<()> { + fn process_branch(&mut self) -> CompactParsingResult<()> { todo!() } - fn process_hash(&mut self, _bytes: &mut impl Iterator) -> CompactParsingResult<()> { + fn process_hash(&mut self) -> CompactParsingResult<()> { todo!() } - fn process_code(&mut self, _bytes: &mut impl Iterator) -> CompactParsingResult<()> { + fn process_code(&mut self) -> CompactParsingResult<()> { todo!() } - fn process_account_leaf( - &mut self, - _bytes: &mut impl Iterator, - ) -> CompactParsingResult<()> { + fn process_account_leaf(&mut self) -> CompactParsingResult<()> { todo!() } - fn process_empty_root( - &mut self, - _bytes: &mut impl Iterator, - ) -> CompactParsingResult<()> { - todo!() + fn process_empty_root(&mut self) -> CompactParsingResult<()> { + self.push_to_stack(StackEntry::Empty); + Ok(()) + } + + fn push_to_stack(&mut self, entry: StackEntry) { + self.stack.push(entry) } } @@ -156,23 +199,36 @@ enum StackEntry { #[derive(Debug)] struct AccountLeafData {} -pub(crate) fn process_compact_prestate( - state: TrieCompact, -) -> CompactParsingResult { - let mut parser = ParserState::default(); - let mut byte_iter = state.bytes.into_iter(); +#[derive(Debug)] +struct CompactCursor { + intern: Cursor>, +} + +impl CompactCursor { + fn read_byte(&mut self) -> CompactParsingResult { + let mut single_byte_buf = [0]; - let _header = parse_header(&mut byte_iter)?; + // Assume this is always caused by hitting the end of the stream? + self.intern + .read_exact(&mut single_byte_buf) + .map_err(|_err| CompactParsingError::UnexpectedEndOfStream)?; - loop { - let _operator = parser.process_operator(&mut byte_iter)?; + Ok(single_byte_buf[0]) } - todo!() -} + fn read_byte_array(&mut self) -> CompactParsingResult> { + ciborium::from_reader(&mut self.intern) + .map_err(|err| CompactParsingError::InvalidByteVector(err.to_string())) + } -fn parse_header(bytes: &mut impl Iterator) -> CompactParsingResult
{ - let h_byte = bytes.next().ok_or(CompactParsingError::MissingHeader)?; + fn at_eof(&self) -> bool { + self.intern.position() as usize == self.intern.get_ref().len() + } +} - Ok(Header { version: h_byte }) +pub(crate) fn process_compact_prestate( + state: TrieCompact, +) -> CompactParsingResult { + let parser = ParserState::new(state.bytes); + parser.process_stream() } From d876e4cbc99fcdf1e9cf29d2c1e2b03dac774cbc Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 10:33:14 -0600 Subject: [PATCH 048/208] Final overall compact parsing structure refactor --- src/compact_prestate_processing.rs | 167 +++++++++++++++++++---------- src/processed_block_trace.rs | 17 ++- 2 files changed, 125 insertions(+), 59 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index c2327854a..a983fd2f5 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -1,6 +1,7 @@ //! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md use std::{ + collections::VecDeque, fmt::{self, Display}, io::{Cursor, Read}, }; @@ -32,17 +33,6 @@ pub enum CompactParsingError { InvalidByteVector(String), } -#[derive(Debug)] -struct Header { - version: u8, -} - -impl Display for Header { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "Erigon block witness version {}", self.version) - } -} - #[derive(Debug, Deserialize)] struct Key { is_even: bool, @@ -61,7 +51,13 @@ enum Opcode { } #[derive(Debug)] -enum Operator { +enum StackEntry { + Instruction(Instruction), + Node(NodeEntry), +} + +#[derive(Debug)] +enum Instruction { Leaf(), Extension, Branch, @@ -71,62 +67,121 @@ enum Operator { EmptyRoot, } +impl From for StackEntry { + fn from(v: Instruction) -> Self { + Self::Instruction(v) + } +} + +#[derive(Debug)] +enum NodeEntry { + AccountLeaf(AccountLeafData), + Code(Vec), + Empty, + Hash(NodeHash), + Leaf(Key, Value), + Extension(Key), +} + +#[derive(Debug)] +struct AccountLeafData {} + #[derive(Debug, Deserialize)] struct LeafData { key: Key, value: Vec, } +#[derive(Debug)] +pub(crate) struct Header { + version: u8, +} + +impl Display for Header { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Erigon block witness version {}", self.version) + } +} + +impl Header { + pub(crate) fn version_is_compatible(&self, target_ver: u8) -> bool { + self.version == target_ver + } +} + #[derive(Debug)] struct ParserState { - stack: Vec, - byte_cursor: CompactCursor, + stack: VecDeque, } impl ParserState { - fn new(payload_bytes: Vec) -> Self { - let byte_cursor = CompactCursor { - intern: Cursor::new(payload_bytes), - }; + fn create_and_extract_header( + witness_bytes_raw: Vec, + ) -> CompactParsingResult<(Header, Self)> { + let witness_bytes = WitnessBytes::new(witness_bytes_raw); + let (header, stack) = witness_bytes.process_into_instructions_and_header()?; - Self { - byte_cursor, - stack: Vec::default(), - } + let p_state = Self { stack }; + + Ok((header, p_state)) } - fn process_stream(self) -> CompactParsingResult { - let (_, trie) = self.process_stream_and_get_header()?; + fn parse(self) -> CompactParsingResult { + let trie = self.parse_into_trie()?; Ok(trie) } - fn process_stream_and_get_header( + fn parse_into_trie(mut self) -> CompactParsingResult { + loop { + let num_rules_applied = self.apply_rules_to_stack(); + + if num_rules_applied == 0 { + break; + } + } + + todo!() + } + + fn apply_rules_to_stack(&mut self) -> usize { + todo!() + } +} + +struct WitnessBytes { + byte_cursor: CompactCursor, + instrs: VecDeque, +} + +impl WitnessBytes { + fn new(witness_bytes: Vec) -> Self { + Self { + byte_cursor: CompactCursor { + intern: Cursor::new(witness_bytes), + }, + instrs: VecDeque::default(), + } + } + + fn process_into_instructions_and_header( mut self, - ) -> CompactParsingResult<(Header, HashedPartialTrie)> { + ) -> CompactParsingResult<(Header, VecDeque)> { let header = self.parse_header()?; + // TODO loop { - let _operator = self.process_operator()?; + let instr = self.process_operator()?; + self.instrs.push_front(instr.into()); if self.byte_cursor.at_eof() { break; } } - // TODO - Ok((header, HashedPartialTrie::default())) + Ok((header, self.instrs)) } - fn parse_header(&mut self) -> CompactParsingResult
{ - let h_byte = self - .byte_cursor - .read_byte() - .map_err(|_| CompactParsingError::MissingHeader)?; - - Ok(Header { version: h_byte }) - } - - fn process_operator(&mut self) -> CompactParsingResult { + fn process_operator(&mut self) -> CompactParsingResult { let opcode_byte = self.byte_cursor.read_byte()?; let opcode = @@ -177,27 +232,23 @@ impl ParserState { } fn process_empty_root(&mut self) -> CompactParsingResult<()> { - self.push_to_stack(StackEntry::Empty); + self.push_to_stack(Instruction::EmptyRoot); Ok(()) } - fn push_to_stack(&mut self, entry: StackEntry) { - self.stack.push(entry) + fn push_to_stack(&mut self, instr: Instruction) { + self.instrs.push_front(instr.into()) } -} -#[derive(Debug)] -enum StackEntry { - AccountLeaf(AccountLeafData), - Code(Vec), - Empty, - Hash(NodeHash), - Leaf(Key, Value), - Extension(Key), -} + fn parse_header(&mut self) -> CompactParsingResult
{ + let h_byte = self + .byte_cursor + .read_byte() + .map_err(|_| CompactParsingError::MissingHeader)?; -#[derive(Debug)] -struct AccountLeafData {} + Ok(Header { version: h_byte }) + } +} #[derive(Debug)] struct CompactCursor { @@ -228,7 +279,9 @@ impl CompactCursor { pub(crate) fn process_compact_prestate( state: TrieCompact, -) -> CompactParsingResult { - let parser = ParserState::new(state.bytes); - parser.process_stream() +) -> CompactParsingResult<(Header, HashedPartialTrie)> { + let (header, parser) = ParserState::create_and_extract_header(state.bytes)?; + let trie = parser.parse()?; + + Ok((header, trie)) } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 4f8925c74..4ecb3350a 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -5,6 +5,7 @@ use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::U256; +use crate::compact_prestate_processing::process_compact_prestate; use crate::decoding::TraceParsingResult; use crate::trace_protocol::{ BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, @@ -23,6 +24,8 @@ pub(crate) struct ProcessedBlockTrace { pub(crate) txn_info: Vec, } +const COMPATIBLE_HEADER_VERSION: u8 = 1; + impl BlockTrace { pub fn into_txn_proof_gen_ir( self, @@ -122,8 +125,18 @@ fn process_multiple_storage_tries( todo!() } -fn process_compact_trie(_trie: TrieCompact) -> ProcessedBlockTracePreImages { - todo!() +fn process_compact_trie(trie_compact: TrieCompact) -> ProcessedBlockTracePreImages { + // TODO: Wrap in proper result type... + let (header, trie) = process_compact_prestate(trie_compact).unwrap(); + + // TODO: Make this into a result... + assert!(header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); + + ProcessedBlockTracePreImages { + state: trie, + storage: todo!(), + extra_code_hash_mappings: todo!(), + } } #[derive(Debug)] From 218022fd879aaf128b9da0f7cd59cb6acc8dea09 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 11:04:58 -0600 Subject: [PATCH 049/208] Added more parsing logic for compact witnesses --- src/compact_prestate_processing.rs | 75 ++++++++++++++++++++++-------- 1 file changed, 55 insertions(+), 20 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index a983fd2f5..147ae164a 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -1,13 +1,15 @@ //! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md use std::{ + borrow::Borrow, collections::VecDeque, + error::Error, fmt::{self, Display}, io::{Cursor, Read}, }; use eth_trie_utils::partial_trie::HashedPartialTrie; -use ethereum_types::H256; +use ethereum_types::{H256, U256}; use serde::Deserialize; use thiserror::Error; @@ -15,8 +17,16 @@ use crate::trace_protocol::TrieCompact; pub type CompactParsingResult = Result; -type NodeHash = H256; -type Value = Vec; +type BranchMask = u16; + +type Balance = U256; +type Nonce = U256; +type HasCode = bool; +type HasStorage = bool; + +type HashValue = H256; +type RawValue = Vec; +type RawCode = Vec; #[derive(Debug, Error)] pub enum CompactParsingError { @@ -39,6 +49,12 @@ struct Key { bytes: Vec, } +impl> From for Key { + fn from(_value: K) -> Self { + todo!() + } +} + #[derive(Debug, enumn::N)] enum Opcode { Leaf = 0x00, @@ -58,12 +74,12 @@ enum StackEntry { #[derive(Debug)] enum Instruction { - Leaf(), - Extension, - Branch, - Hash, - Code, - AccountLeaf, + Leaf(Key, RawValue), + Extension(Key), + Branch(BranchMask), + Hash(HashValue), + Code(RawCode), + AccountLeaf(Key, Nonce, Balance, HasCode, HasStorage), EmptyRoot, } @@ -78,8 +94,8 @@ enum NodeEntry { AccountLeaf(AccountLeafData), Code(Vec), Empty, - Hash(NodeHash), - Leaf(Key, Value), + Hash(HashValue), + Leaf(Key, RawValue), Extension(Key), } @@ -156,9 +172,7 @@ struct WitnessBytes { impl WitnessBytes { fn new(witness_bytes: Vec) -> Self { Self { - byte_cursor: CompactCursor { - intern: Cursor::new(witness_bytes), - }, + byte_cursor: CompactCursor::new(witness_bytes), instrs: VecDeque::default(), } } @@ -205,10 +219,11 @@ impl WitnessBytes { } fn process_leaf(&mut self) -> CompactParsingResult<()> { - let _key_raw = self.byte_cursor.read_byte_array()?; - let _value_raw = self.byte_cursor.read_byte_array()?; + let key = self.byte_cursor.read_cbor_byte_array()?.into(); + let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; - todo!() + self.push_to_stack(Instruction::Leaf(key, value_raw)); + Ok(()) } fn process_extension(&mut self) -> CompactParsingResult<()> { @@ -253,9 +268,17 @@ impl WitnessBytes { #[derive(Debug)] struct CompactCursor { intern: Cursor>, + temp_buf: Vec, } impl CompactCursor { + fn new(bytes: Vec) -> Self { + Self { + intern: Cursor::new(bytes), + temp_buf: Vec::default(), + } + } + fn read_byte(&mut self) -> CompactParsingResult { let mut single_byte_buf = [0]; @@ -267,9 +290,21 @@ impl CompactCursor { Ok(single_byte_buf[0]) } - fn read_byte_array(&mut self) -> CompactParsingResult> { - ciborium::from_reader(&mut self.intern) - .map_err(|err| CompactParsingError::InvalidByteVector(err.to_string())) + fn read_cbor_byte_array(&mut self) -> CompactParsingResult<&[u8]> { + self.temp_buf.clear(); + Self::ciborium_err_reader_res_to_parsing_res(self.intern.read_exact(&mut self.temp_buf)); + + Ok(&self.temp_buf) + } + + fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { + Self::ciborium_err_reader_res_to_parsing_res(ciborium::from_reader(&mut self.intern)) + } + + fn ciborium_err_reader_res_to_parsing_res( + res: Result, + ) -> CompactParsingResult { + res.map_err(|err| CompactParsingError::InvalidByteVector(err.to_string())) } fn at_eof(&self) -> bool { From 8748b8f4f8b6b6f22ccfc6b261f72a1d5c9460ad Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 12:11:31 -0600 Subject: [PATCH 050/208] Added a function to read cbor to a type --- Cargo.toml | 1 + src/compact_prestate_processing.rs | 47 +++++++++++++++++++++++++----- 2 files changed, 41 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index ad768b74a..dd38e9d87 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" [dependencies] ciborium = "0.2.1" +ciborium-io = "0.2.1" enumn = "0.1.12" eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 147ae164a..352a0aea9 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -1,6 +1,7 @@ //! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md use std::{ + any::type_name, borrow::Borrow, collections::VecDeque, error::Error, @@ -10,14 +11,14 @@ use std::{ use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{H256, U256}; -use serde::Deserialize; +use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; use crate::trace_protocol::TrieCompact; pub type CompactParsingResult = Result; -type BranchMask = u16; +type BranchMask = u32; type Balance = U256; type Nonce = U256; @@ -41,6 +42,9 @@ pub enum CompactParsingError { #[error("Unable to parse an expected byte vector (error: {0})")] InvalidByteVector(String), + + #[error("Unable to parse the type \"{0}\" from cbor bytes {1}")] + InvalidBytesForType(&'static str, String, String), } #[derive(Debug, Deserialize)] @@ -227,11 +231,17 @@ impl WitnessBytes { } fn process_extension(&mut self) -> CompactParsingResult<()> { - todo!() + let key = self.byte_cursor.read_cbor_byte_array()?.into(); + + self.push_to_stack(Instruction::Extension(key)); + Ok(()) } fn process_branch(&mut self) -> CompactParsingResult<()> { - todo!() + let mask = self.byte_cursor.read_t()?; + + self.push_to_stack(Instruction::Branch(mask)); + Ok(()) } fn process_hash(&mut self) -> CompactParsingResult<()> { @@ -279,6 +289,24 @@ impl CompactCursor { } } + fn read_t(&mut self) -> CompactParsingResult { + let starting_pos = self.intern.position(); + + ciborium::from_reader(&mut self.intern).map_err(move |err| { + let ending_pos = self.intern.position(); + let type_bytes = self.intern.clone().into_inner() + [starting_pos as usize..ending_pos as usize] + .to_vec(); + let type_bytes_hex = hex::encode(type_bytes); + + CompactParsingError::InvalidBytesForType( + type_name::(), + type_bytes_hex, + err.to_string(), + ) + }) + } + fn read_byte(&mut self) -> CompactParsingResult { let mut single_byte_buf = [0]; @@ -292,16 +320,21 @@ impl CompactCursor { fn read_cbor_byte_array(&mut self) -> CompactParsingResult<&[u8]> { self.temp_buf.clear(); - Self::ciborium_err_reader_res_to_parsing_res(self.intern.read_exact(&mut self.temp_buf)); + Self::ciborium_byte_vec_err_reader_res_to_parsing_res(ciborium_io::Read::read_exact( + &mut self.intern, + &mut self.temp_buf, + ))?; Ok(&self.temp_buf) } fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { - Self::ciborium_err_reader_res_to_parsing_res(ciborium::from_reader(&mut self.intern)) + Self::ciborium_byte_vec_err_reader_res_to_parsing_res(ciborium::from_reader( + &mut self.intern, + )) } - fn ciborium_err_reader_res_to_parsing_res( + fn ciborium_byte_vec_err_reader_res_to_parsing_res( res: Result, ) -> CompactParsingResult { res.map_err(|err| CompactParsingError::InvalidByteVector(err.to_string())) From cc24c915b5910564d2b68376c4077634e3badfdf Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 12:16:13 -0600 Subject: [PATCH 051/208] Filled in remaining instruction processing methods --- src/compact_prestate_processing.rs | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 352a0aea9..cf3008067 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -245,15 +245,34 @@ impl WitnessBytes { } fn process_hash(&mut self) -> CompactParsingResult<()> { - todo!() + let hash = self.byte_cursor.read_t()?; + + self.push_to_stack(Instruction::Hash(hash)); + Ok(()) } fn process_code(&mut self) -> CompactParsingResult<()> { - todo!() + let code = self.byte_cursor.read_t()?; + + self.push_to_stack(Instruction::Code(code)); + Ok(()) } fn process_account_leaf(&mut self) -> CompactParsingResult<()> { - todo!() + let key = self.byte_cursor.read_cbor_byte_array()?.into(); + let nonce = self.byte_cursor.read_t()?; + let balance = self.byte_cursor.read_t()?; + let has_code = self.byte_cursor.read_t()?; + let has_storage = self.byte_cursor.read_t()?; + + self.push_to_stack(Instruction::AccountLeaf( + key, + nonce, + balance, + has_code, + has_storage, + )); + Ok(()) } fn process_empty_root(&mut self) -> CompactParsingResult<()> { From 3b5756a519cffef86da7c54bd33fe3d77b497c10 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 12:59:37 -0600 Subject: [PATCH 052/208] Rough structure for traversing & collapsing instructions - I think we actually might really want a LL here. Yeah it will hurt cache performance, but we're going to be doing so many deletes & inserts mid-list that I think this is going to be the better choice for performance if the list gets very large. --- src/compact_prestate_processing.rs | 56 +++++++++++++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index cf3008067..f4dcdf9d2 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -3,10 +3,11 @@ use std::{ any::type_name, borrow::Borrow, - collections::VecDeque, + collections::{LinkedList, VecDeque}, error::Error, fmt::{self, Display}, io::{Cursor, Read}, + ops::Range, }; use eth_trie_utils::partial_trie::HashedPartialTrie; @@ -164,6 +165,8 @@ impl ParserState { } fn apply_rules_to_stack(&mut self) -> usize { + let _num_rules_applied = 0; + todo!() } } @@ -364,6 +367,57 @@ impl CompactCursor { } } +/// We kind of want a wrapper around the actual data structure I think since +/// there's a good chance this will change a few times in the future. +struct WitnessEntries { + // Yeah a LL is actually (unfortunately) a very good choice here. We will be doing a ton of + // inserts mid-list, and the list can get very large. There might be a better choice for a data + // structure, but for now, this will make performance not scale exponentially with list + // size. + intern: LinkedList, +} + +impl WitnessEntries { + fn push_entry(&mut self, _entry: StackEntry) { + todo!() + } + + fn replace_entries_with_single_entry( + &mut self, + _idxs_to_replace: Range, + _entry_to_replace_with: StackEntry, + ) { + todo!() + } + + fn create_collapseable_iter(&mut self) -> CollapsableStackElementTraverser { + todo!() + } +} + +// It's not quite an iterator, so this is the next best name that I can come up +// with. +struct CollapsableStackElementTraverser {} + +impl CollapsableStackElementTraverser { + fn advance(&mut self) { + todo!() + } + + fn get_next_n_elems(&self, _n: usize) -> impl Iterator { + // TODO + std::iter::empty() + } + + fn get_next_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&StackEntry>) { + todo!() + } + + fn replace_next_n_entry_with_single_entry(&mut self, _n: usize, _entry: StackEntry) { + todo!() + } +} + pub(crate) fn process_compact_prestate( state: TrieCompact, ) -> CompactParsingResult<(Header, HashedPartialTrie)> { From 70157dc11449516bd3e0dead1f3da8ff969c3d29 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 13:31:22 -0600 Subject: [PATCH 053/208] A bit of API work on traverser - Going to first see if this API works well with what the callers need. --- src/compact_prestate_processing.rs | 20 ++++++++++++++------ src/lib.rs | 1 + 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index f4dcdf9d2..a59f81bf9 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -3,7 +3,7 @@ use std::{ any::type_name, borrow::Borrow, - collections::{LinkedList, VecDeque}, + collections::{linked_list::CursorMut, LinkedList, VecDeque}, error::Error, fmt::{self, Display}, io::{Cursor, Read}, @@ -275,6 +275,7 @@ impl WitnessBytes { has_code, has_storage, )); + Ok(()) } @@ -390,16 +391,19 @@ impl WitnessEntries { todo!() } - fn create_collapseable_iter(&mut self) -> CollapsableStackElementTraverser { + fn create_collapseable_traverser(&mut self) -> CollapsableStackElementTraverser { todo!() } } // It's not quite an iterator, so this is the next best name that I can come up // with. -struct CollapsableStackElementTraverser {} +struct CollapsableStackElementTraverser<'a> { + entries: &'a mut WitnessEntries, + entry_cursor: CursorMut<'a, StackEntry>, +} -impl CollapsableStackElementTraverser { +impl<'a> CollapsableStackElementTraverser<'a> { fn advance(&mut self) { todo!() } @@ -413,8 +417,12 @@ impl CollapsableStackElementTraverser { todo!() } - fn replace_next_n_entry_with_single_entry(&mut self, _n: usize, _entry: StackEntry) { - todo!() + fn replace_next_n_entries_with_single_entry(&mut self, n: usize, entry: StackEntry) { + for _ in 0..n { + self.entry_cursor.remove_current(); + } + + self.entry_cursor.insert_after(entry) } } diff --git a/src/lib.rs b/src/lib.rs index 47e376751..221f12e5b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,4 @@ +#![feature(linked_list_cursors)] #![feature(trait_alias)] mod compact_prestate_processing; From 3827b149ba4c97d0bf6d979f604ccb4311d339dc Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 13:47:53 -0600 Subject: [PATCH 054/208] Naming and type refactoring --- src/compact_prestate_processing.rs | 65 ++++++++++++++++-------------- 1 file changed, 34 insertions(+), 31 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index a59f81bf9..dcf83795c 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -3,7 +3,7 @@ use std::{ any::type_name, borrow::Borrow, - collections::{linked_list::CursorMut, LinkedList, VecDeque}, + collections::{linked_list::CursorMut, LinkedList}, error::Error, fmt::{self, Display}, io::{Cursor, Read}, @@ -72,7 +72,7 @@ enum Opcode { } #[derive(Debug)] -enum StackEntry { +enum WitnessEntry { Instruction(Instruction), Node(NodeEntry), } @@ -88,7 +88,7 @@ enum Instruction { EmptyRoot, } -impl From for StackEntry { +impl From for WitnessEntry { fn from(v: Instruction) -> Self { Self::Instruction(v) } @@ -132,7 +132,7 @@ impl Header { #[derive(Debug)] struct ParserState { - stack: VecDeque, + entries: WitnessEntries, } impl ParserState { @@ -140,9 +140,9 @@ impl ParserState { witness_bytes_raw: Vec, ) -> CompactParsingResult<(Header, Self)> { let witness_bytes = WitnessBytes::new(witness_bytes_raw); - let (header, stack) = witness_bytes.process_into_instructions_and_header()?; + let (header, entries) = witness_bytes.process_into_instructions_and_header()?; - let p_state = Self { stack }; + let p_state = Self { entries }; Ok((header, p_state)) } @@ -154,7 +154,7 @@ impl ParserState { fn parse_into_trie(mut self) -> CompactParsingResult { loop { - let num_rules_applied = self.apply_rules_to_stack(); + let num_rules_applied = self.apply_rules_to_witness_entries(); if num_rules_applied == 0 { break; @@ -164,35 +164,37 @@ impl ParserState { todo!() } - fn apply_rules_to_stack(&mut self) -> usize { + fn apply_rules_to_witness_entries(&mut self) -> usize { let _num_rules_applied = 0; todo!() } + + fn try_apply_rules_to_curr_entry() {} } struct WitnessBytes { byte_cursor: CompactCursor, - instrs: VecDeque, + instrs: WitnessEntries, } impl WitnessBytes { fn new(witness_bytes: Vec) -> Self { Self { byte_cursor: CompactCursor::new(witness_bytes), - instrs: VecDeque::default(), + instrs: WitnessEntries::default(), } } fn process_into_instructions_and_header( mut self, - ) -> CompactParsingResult<(Header, VecDeque)> { + ) -> CompactParsingResult<(Header, WitnessEntries)> { let header = self.parse_header()?; // TODO loop { let instr = self.process_operator()?; - self.instrs.push_front(instr.into()); + self.instrs.push_entry(instr.into()); if self.byte_cursor.at_eof() { break; @@ -229,35 +231,35 @@ impl WitnessBytes { let key = self.byte_cursor.read_cbor_byte_array()?.into(); let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; - self.push_to_stack(Instruction::Leaf(key, value_raw)); + self.push_entry(Instruction::Leaf(key, value_raw)); Ok(()) } fn process_extension(&mut self) -> CompactParsingResult<()> { let key = self.byte_cursor.read_cbor_byte_array()?.into(); - self.push_to_stack(Instruction::Extension(key)); + self.push_entry(Instruction::Extension(key)); Ok(()) } fn process_branch(&mut self) -> CompactParsingResult<()> { let mask = self.byte_cursor.read_t()?; - self.push_to_stack(Instruction::Branch(mask)); + self.push_entry(Instruction::Branch(mask)); Ok(()) } fn process_hash(&mut self) -> CompactParsingResult<()> { let hash = self.byte_cursor.read_t()?; - self.push_to_stack(Instruction::Hash(hash)); + self.push_entry(Instruction::Hash(hash)); Ok(()) } fn process_code(&mut self) -> CompactParsingResult<()> { let code = self.byte_cursor.read_t()?; - self.push_to_stack(Instruction::Code(code)); + self.push_entry(Instruction::Code(code)); Ok(()) } @@ -268,7 +270,7 @@ impl WitnessBytes { let has_code = self.byte_cursor.read_t()?; let has_storage = self.byte_cursor.read_t()?; - self.push_to_stack(Instruction::AccountLeaf( + self.push_entry(Instruction::AccountLeaf( key, nonce, balance, @@ -280,12 +282,12 @@ impl WitnessBytes { } fn process_empty_root(&mut self) -> CompactParsingResult<()> { - self.push_to_stack(Instruction::EmptyRoot); + self.push_entry(Instruction::EmptyRoot); Ok(()) } - fn push_to_stack(&mut self, instr: Instruction) { - self.instrs.push_front(instr.into()) + fn push_entry(&mut self, instr: Instruction) { + self.instrs.push_entry(instr.into()) } fn parse_header(&mut self) -> CompactParsingResult
{ @@ -370,54 +372,55 @@ impl CompactCursor { /// We kind of want a wrapper around the actual data structure I think since /// there's a good chance this will change a few times in the future. +#[derive(Debug, Default)] struct WitnessEntries { // Yeah a LL is actually (unfortunately) a very good choice here. We will be doing a ton of // inserts mid-list, and the list can get very large. There might be a better choice for a data // structure, but for now, this will make performance not scale exponentially with list // size. - intern: LinkedList, + intern: LinkedList, } impl WitnessEntries { - fn push_entry(&mut self, _entry: StackEntry) { + fn push_entry(&mut self, _entry: WitnessEntry) { todo!() } fn replace_entries_with_single_entry( &mut self, _idxs_to_replace: Range, - _entry_to_replace_with: StackEntry, + _entry_to_replace_with: WitnessEntry, ) { todo!() } - fn create_collapseable_traverser(&mut self) -> CollapsableStackElementTraverser { + fn create_collapseable_traverser(&mut self) -> CollapsableWitnessEntryTraverser { todo!() } } // It's not quite an iterator, so this is the next best name that I can come up // with. -struct CollapsableStackElementTraverser<'a> { +struct CollapsableWitnessEntryTraverser<'a> { entries: &'a mut WitnessEntries, - entry_cursor: CursorMut<'a, StackEntry>, + entry_cursor: CursorMut<'a, WitnessEntry>, } -impl<'a> CollapsableStackElementTraverser<'a> { +impl<'a> CollapsableWitnessEntryTraverser<'a> { fn advance(&mut self) { todo!() } - fn get_next_n_elems(&self, _n: usize) -> impl Iterator { + fn get_next_n_elems(&self, _n: usize) -> impl Iterator { // TODO std::iter::empty() } - fn get_next_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&StackEntry>) { + fn get_next_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&WitnessEntry>) { todo!() } - fn replace_next_n_entries_with_single_entry(&mut self, n: usize, entry: StackEntry) { + fn replace_next_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { for _ in 0..n { self.entry_cursor.remove_current(); } From 2550fd226593d148c02e0f80e5a234024451b6b5 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 17 Oct 2023 14:18:42 -0600 Subject: [PATCH 055/208] Initial logic for rule matching --- src/compact_prestate_processing.rs | 76 ++++++++++++++++++++++++++---- 1 file changed, 66 insertions(+), 10 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index dcf83795c..a1f6a01b0 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -30,6 +30,8 @@ type HashValue = H256; type RawValue = Vec; type RawCode = Vec; +const MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE: usize = 3; + #[derive(Debug, Error)] pub enum CompactParsingError { #[error("Missing header")] @@ -46,9 +48,12 @@ pub enum CompactParsingError { #[error("Unable to parse the type \"{0}\" from cbor bytes {1}")] InvalidBytesForType(&'static str, String, String), + + #[error("Invalid block witness entries: {0:?}")] + InvalidWitnessFormat(Vec), } -#[derive(Debug, Deserialize)] +#[derive(Clone, Debug, Deserialize)] struct Key { is_even: bool, bytes: Vec, @@ -71,13 +76,14 @@ enum Opcode { EmptyRoot = 0x06, } -#[derive(Debug)] +#[derive(Clone, Debug)] enum WitnessEntry { Instruction(Instruction), Node(NodeEntry), } -#[derive(Debug)] +// TODO: Ignore `NEW_TRIE` for now... +#[derive(Clone, Debug)] enum Instruction { Leaf(Key, RawValue), Extension(Key), @@ -94,7 +100,7 @@ impl From for WitnessEntry { } } -#[derive(Debug)] +#[derive(Clone, Debug)] enum NodeEntry { AccountLeaf(AccountLeafData), Code(Vec), @@ -104,7 +110,7 @@ enum NodeEntry { Extension(Key), } -#[derive(Debug)] +#[derive(Clone, Debug)] struct AccountLeafData {} #[derive(Debug, Deserialize)] @@ -153,8 +159,10 @@ impl ParserState { } fn parse_into_trie(mut self) -> CompactParsingResult { + let mut entry_buf = Vec::new(); + loop { - let num_rules_applied = self.apply_rules_to_witness_entries(); + let num_rules_applied = self.apply_rules_to_witness_entries(&mut entry_buf)?; if num_rules_applied == 0 { break; @@ -164,13 +172,57 @@ impl ParserState { todo!() } - fn apply_rules_to_witness_entries(&mut self) -> usize { - let _num_rules_applied = 0; + fn apply_rules_to_witness_entries( + &mut self, + entry_buf: &mut Vec<&WitnessEntry>, + ) -> CompactParsingResult { + let mut tot_rules_applied = 0; + + let mut traverser = self.entries.create_collapsable_traverser(); + + while !traverser.at_end() { + let num_rules_applied = Self::try_apply_rules_to_curr_entry(&mut traverser, entry_buf)?; + tot_rules_applied += num_rules_applied; + } todo!() } - fn try_apply_rules_to_curr_entry() {} + fn try_apply_rules_to_curr_entry( + traverser: &mut CollapsableWitnessEntryTraverser, + buf: &mut Vec<&WitnessEntry>, + ) -> CompactParsingResult { + traverser.get_next_n_elems_into_buf(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, buf); + + match buf[0] { + WitnessEntry::Instruction(Instruction::Hash(_h)) => { + todo!() + } + WitnessEntry::Instruction(Instruction::Leaf(_k, _v)) => { + todo!() + } + WitnessEntry::Instruction(Instruction::Extension(_k)) => { + todo!() + } + WitnessEntry::Instruction(Instruction::Code(_c)) => { + todo!() + } + WitnessEntry::Instruction(Instruction::AccountLeaf(_k, _n, _b, _h_c, _h_s)) => { + todo!() + } + WitnessEntry::Instruction(Instruction::Branch(_mask)) => { + todo!() + } + _ => { + // TODO: This needs to be cleaned up and put into a separate function... + let invalid_entry_buf = traverser + .get_next_n_elems(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE) + .cloned() + .collect(); + Err(CompactParsingError::InvalidWitnessFormat(invalid_entry_buf)) + } + } + } } struct WitnessBytes { @@ -394,7 +446,7 @@ impl WitnessEntries { todo!() } - fn create_collapseable_traverser(&mut self) -> CollapsableWitnessEntryTraverser { + fn create_collapsable_traverser(&mut self) -> CollapsableWitnessEntryTraverser { todo!() } } @@ -427,6 +479,10 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { self.entry_cursor.insert_after(entry) } + + fn at_end(&self) -> bool { + self.entry_cursor.as_cursor().peek_next().is_none() + } } pub(crate) fn process_compact_prestate( From 1e55f20b2b0c050905337c5e215b521a0c13be01 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 20 Oct 2023 11:43:24 -0600 Subject: [PATCH 056/208] Finished first version of rule matching logic --- src/compact_prestate_processing.rs | 200 +++++++++++++++++++++++++---- 1 file changed, 178 insertions(+), 22 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index a1f6a01b0..01c132cc0 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -15,7 +15,7 @@ use ethereum_types::{H256, U256}; use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; -use crate::trace_protocol::TrieCompact; +use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; pub type CompactParsingResult = Result; @@ -102,16 +102,59 @@ impl From for WitnessEntry { #[derive(Clone, Debug)] enum NodeEntry { - AccountLeaf(AccountLeafData), + Account(AccountNodeData), Code(Vec), Empty, Hash(HashValue), - Leaf(Key, RawValue), + Leaf(Key, LeafNodeData), Extension(Key), + Value(ValueNodeData), +} + +#[derive(Clone, Debug)] +struct ValueNodeData(Vec); + +impl From> for ValueNodeData { + fn from(v: Vec) -> Self { + Self(v) + } +} + +#[derive(Clone, Debug)] +enum LeafNodeData { + Value(ValueNodeData), + Account(AccountNodeData), +} + +#[derive(Clone, Debug)] +enum AccountNodeCode { + CodeNode(Vec), + HashNode(TrieRootHash), } #[derive(Clone, Debug)] -struct AccountLeafData {} +struct AccountNodeData { + nonce: Nonce, + balance: Balance, + storage_root: Option, + account_node_code: Option, +} + +impl AccountNodeData { + fn new( + nonce: Nonce, + balance: Balance, + storage_root: Option, + account_node_code: Option, + ) -> Self { + Self { + nonce, + balance, + storage_root, + account_node_code, + } + } +} #[derive(Debug, Deserialize)] struct LeafData { @@ -195,34 +238,123 @@ impl ParserState { traverser.get_next_n_elems_into_buf(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, buf); match buf[0] { - WitnessEntry::Instruction(Instruction::Hash(_h)) => { - todo!() + WitnessEntry::Instruction(Instruction::Hash(h)) => { + Self::replace_next_traverser_node_entry_helper(traverser, NodeEntry::Hash(*h)) } - WitnessEntry::Instruction(Instruction::Leaf(_k, _v)) => { - todo!() + WitnessEntry::Instruction(Instruction::Leaf(k, v)) => { + Self::replace_next_traverser_node_entry_helper( + traverser, + NodeEntry::Leaf(k.clone(), LeafNodeData::Value(v.clone().into())), + ) } - WitnessEntry::Instruction(Instruction::Extension(_k)) => { - todo!() + WitnessEntry::Instruction(Instruction::Extension(k)) => { + Self::replace_next_traverser_node_entry_helper( + traverser, + NodeEntry::Extension(k.clone()), + ) } - WitnessEntry::Instruction(Instruction::Code(_c)) => { - todo!() + WitnessEntry::Instruction(Instruction::Code(c)) => { + Self::replace_next_traverser_node_entry_helper( + traverser, + NodeEntry::Code(c.clone()), + ) } - WitnessEntry::Instruction(Instruction::AccountLeaf(_k, _n, _b, _h_c, _h_s)) => { - todo!() + WitnessEntry::Instruction(Instruction::AccountLeaf(k, n, b, has_code, has_storage)) => { + let (n_nodes_to_replace, account_node_code, s_root) = match (has_code, has_storage) + { + (false, false) => Ok((1, None, None)), + (false, true) => { + traverser.get_prev_n_elems_into_buf(1, buf); + + match buf[0] { + // TODO: Really match against branch, hash, and value nodes... + WitnessEntry::Node(_node) => Ok((1, None, None)), // TODO + _ => Self::invalid_witness_err( + 2, + TraverserDirection::Backwards, + traverser, + ), + } + } + (true, false) => { + traverser.get_prev_n_elems_into_buf(1, buf); + + match buf[0] { + WitnessEntry::Node(NodeEntry::Code(code)) => { + Ok((1, Some(AccountNodeCode::CodeNode(code.clone())), None)) + } + WitnessEntry::Node(NodeEntry::Hash(h)) => { + Ok((1, Some(AccountNodeCode::HashNode(*h)), None)) + } + _ => Self::invalid_witness_err( + 2, + TraverserDirection::Backwards, + traverser, + ), + } + } + (true, true) => { + traverser.get_prev_n_elems_into_buf(2, buf); + + match buf[0..=1] { + [WitnessEntry::Node(NodeEntry::Code(_c)), WitnessEntry::Node(_node)] => { + todo!() + } + [WitnessEntry::Node(NodeEntry::Hash(_h)), WitnessEntry::Node(_node)] => { + todo!() + } + _ => Self::invalid_witness_err( + 3, + TraverserDirection::Backwards, + traverser, + ), + } + } + }?; + + let account_leaf_data = AccountNodeData::new(*n, *b, s_root, account_node_code); + let leaf_node = WitnessEntry::Node(NodeEntry::Leaf( + k.clone(), + LeafNodeData::Account(account_leaf_data), + )); + traverser.replace_prev_n_entries_with_single_entry(n_nodes_to_replace, leaf_node); + + Ok(1) } WitnessEntry::Instruction(Instruction::Branch(_mask)) => { todo!() } - _ => { - // TODO: This needs to be cleaned up and put into a separate function... - let invalid_entry_buf = traverser - .get_next_n_elems(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE) - .cloned() - .collect(); - Err(CompactParsingError::InvalidWitnessFormat(invalid_entry_buf)) - } + _ => Self::invalid_witness_err( + MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, + TraverserDirection::Both, + traverser, + ), } } + + fn invalid_witness_err( + n: usize, + t_dir: TraverserDirection, + traverser: &mut CollapsableWitnessEntryTraverser, + ) -> CompactParsingResult { + let adjacent_elems_buf = match t_dir { + TraverserDirection::Forwards => traverser.get_next_n_elems(n).cloned().collect(), + TraverserDirection::Backwards => traverser.get_prev_n_elems(n).cloned().collect(), + TraverserDirection::Both => todo!(), + }; + + Err(CompactParsingError::InvalidWitnessFormat( + adjacent_elems_buf, + )) + } + + fn replace_next_traverser_node_entry_helper( + traverser: &mut CollapsableWitnessEntryTraverser, + entry: NodeEntry, + ) -> CompactParsingResult { + traverser.replace_next_n_entries_with_single_entry(1, WitnessEntry::Node(entry)); + Ok(1) + } } struct WitnessBytes { @@ -468,6 +600,19 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { std::iter::empty() } + fn get_prev_n_elems(&self, _n: usize) -> impl Iterator { + // TODO + std::iter::empty() + } + + /// Get the previous `n` elements into a buf. Note that this does not + /// include the element that we are currently pointing to. + fn get_prev_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&WitnessEntry>) { + todo!() + } + + /// Get the next `n` elements into a buf. Note that this includes the + /// element that we are currently pointing to. fn get_next_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&WitnessEntry>) { todo!() } @@ -480,11 +625,22 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { self.entry_cursor.insert_after(entry) } + fn replace_prev_n_entries_with_single_entry(&mut self, _n: usize, _entry: WitnessEntry) { + todo!() + } + fn at_end(&self) -> bool { self.entry_cursor.as_cursor().peek_next().is_none() } } +#[derive(Debug)] +enum TraverserDirection { + Forwards, + Backwards, + Both, +} + pub(crate) fn process_compact_prestate( state: TrieCompact, ) -> CompactParsingResult<(Header, HashedPartialTrie)> { From fd88529cca7377e5fc612dac4884c5d9ab929da1 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 20 Oct 2023 14:35:00 -0600 Subject: [PATCH 057/208] Filled in one missing pattern match --- src/compact_prestate_processing.rs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 01c132cc0..79acb0824 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -237,6 +237,8 @@ impl ParserState { ) -> CompactParsingResult { traverser.get_next_n_elems_into_buf(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, buf); + // TODO: There is a decent amount of code duplication with the matches and the + // calls to `invalid_witness_err`. We should condense this... match buf[0] { WitnessEntry::Instruction(Instruction::Hash(h)) => { Self::replace_next_traverser_node_entry_helper(traverser, NodeEntry::Hash(*h)) @@ -267,8 +269,16 @@ impl ParserState { traverser.get_prev_n_elems_into_buf(1, buf); match buf[0] { - // TODO: Really match against branch, hash, and value nodes... - WitnessEntry::Node(_node) => Ok((1, None, None)), // TODO + WitnessEntry::Node(node) => { + match Self::try_get_storage_hash_from_node(node) { + Some(s_hash) => Ok((1, None, Some(s_hash))), + None => Self::invalid_witness_err( + 1, + TraverserDirection::Backwards, + traverser, + ), + } + } _ => Self::invalid_witness_err( 2, TraverserDirection::Backwards, @@ -332,6 +342,10 @@ impl ParserState { } } + fn try_get_storage_hash_from_node(_node: &NodeEntry) -> Option { + todo!() + } + fn invalid_witness_err( n: usize, t_dir: TraverserDirection, From eaf436d5e0e5f52d77731086facd151a270a9f61 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 20 Oct 2023 14:43:10 -0600 Subject: [PATCH 058/208] Moved match logic to seperate functions - I think this helps with readability. --- src/compact_prestate_processing.rs | 110 +++++++++++++++-------------- 1 file changed, 58 insertions(+), 52 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 79acb0824..123d2b32b 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -264,62 +264,14 @@ impl ParserState { WitnessEntry::Instruction(Instruction::AccountLeaf(k, n, b, has_code, has_storage)) => { let (n_nodes_to_replace, account_node_code, s_root) = match (has_code, has_storage) { - (false, false) => Ok((1, None, None)), + (false, false) => Self::match_account_leaf_no_code_and_no_storage(), (false, true) => { - traverser.get_prev_n_elems_into_buf(1, buf); - - match buf[0] { - WitnessEntry::Node(node) => { - match Self::try_get_storage_hash_from_node(node) { - Some(s_hash) => Ok((1, None, Some(s_hash))), - None => Self::invalid_witness_err( - 1, - TraverserDirection::Backwards, - traverser, - ), - } - } - _ => Self::invalid_witness_err( - 2, - TraverserDirection::Backwards, - traverser, - ), - } + Self::match_account_leaf_no_code_but_has_storage(traverser, buf) } (true, false) => { - traverser.get_prev_n_elems_into_buf(1, buf); - - match buf[0] { - WitnessEntry::Node(NodeEntry::Code(code)) => { - Ok((1, Some(AccountNodeCode::CodeNode(code.clone())), None)) - } - WitnessEntry::Node(NodeEntry::Hash(h)) => { - Ok((1, Some(AccountNodeCode::HashNode(*h)), None)) - } - _ => Self::invalid_witness_err( - 2, - TraverserDirection::Backwards, - traverser, - ), - } - } - (true, true) => { - traverser.get_prev_n_elems_into_buf(2, buf); - - match buf[0..=1] { - [WitnessEntry::Node(NodeEntry::Code(_c)), WitnessEntry::Node(_node)] => { - todo!() - } - [WitnessEntry::Node(NodeEntry::Hash(_h)), WitnessEntry::Node(_node)] => { - todo!() - } - _ => Self::invalid_witness_err( - 3, - TraverserDirection::Backwards, - traverser, - ), - } + Self::match_account_leaf_has_code_but_no_storage(traverser, buf) } + (true, true) => Self::match_account_leaf_has_code_and_storage(traverser, buf), }?; let account_leaf_data = AccountNodeData::new(*n, *b, s_root, account_node_code); @@ -342,6 +294,60 @@ impl ParserState { } } + fn match_account_leaf_no_code_and_no_storage( + ) -> CompactParsingResult<(usize, Option, Option)> { + Ok((0, None, None)) + } + + fn match_account_leaf_no_code_but_has_storage( + traverser: &mut CollapsableWitnessEntryTraverser, + buf: &mut Vec<&WitnessEntry>, + ) -> CompactParsingResult<(usize, Option, Option)> { + traverser.get_prev_n_elems_into_buf(1, buf); + + match buf[0] { + WitnessEntry::Node(node) => match Self::try_get_storage_hash_from_node(node) { + Some(s_hash) => Ok((1, None, Some(s_hash))), + None => Self::invalid_witness_err(1, TraverserDirection::Backwards, traverser), + }, + _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), + } + } + + fn match_account_leaf_has_code_but_no_storage( + traverser: &mut CollapsableWitnessEntryTraverser, + buf: &mut Vec<&WitnessEntry>, + ) -> CompactParsingResult<(usize, Option, Option)> { + traverser.get_prev_n_elems_into_buf(1, buf); + + match buf[0] { + WitnessEntry::Node(NodeEntry::Code(code)) => { + Ok((1, Some(AccountNodeCode::CodeNode(code.clone())), None)) + } + WitnessEntry::Node(NodeEntry::Hash(h)) => { + Ok((1, Some(AccountNodeCode::HashNode(*h)), None)) + } + _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), + } + } + + fn match_account_leaf_has_code_and_storage( + traverser: &mut CollapsableWitnessEntryTraverser, + buf: &mut Vec<&WitnessEntry>, + ) -> CompactParsingResult<(usize, Option, Option)> { + traverser.get_prev_n_elems_into_buf(2, buf); + + match buf[0..=1] { + [WitnessEntry::Node(NodeEntry::Code(_c)), WitnessEntry::Node(_node)] => { + todo!() + } + [WitnessEntry::Node(NodeEntry::Hash(_h)), WitnessEntry::Node(_node)] => { + todo!() + } + _ => Self::invalid_witness_err(3, TraverserDirection::Backwards, traverser), + } + } + fn try_get_storage_hash_from_node(_node: &NodeEntry) -> Option { todo!() } From 1de5d5b1f8b46287a7548e89a960ea09d9bd8804 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 20 Oct 2023 15:27:52 -0600 Subject: [PATCH 059/208] Fixed a few issues on how some instructions were processed --- src/compact_prestate_processing.rs | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 123d2b32b..c9ee450b3 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -107,7 +107,7 @@ enum NodeEntry { Empty, Hash(HashValue), Leaf(Key, LeafNodeData), - Extension(Key), + Extension(Key, Box), Value(ValueNodeData), } @@ -241,22 +241,30 @@ impl ParserState { // calls to `invalid_witness_err`. We should condense this... match buf[0] { WitnessEntry::Instruction(Instruction::Hash(h)) => { - Self::replace_next_traverser_node_entry_helper(traverser, NodeEntry::Hash(*h)) + Self::traverser_replace_prev_n_nodes_entry_helper(1, traverser, NodeEntry::Hash(*h)) } WitnessEntry::Instruction(Instruction::Leaf(k, v)) => { - Self::replace_next_traverser_node_entry_helper( + Self::traverser_replace_prev_n_nodes_entry_helper( + 1, traverser, NodeEntry::Leaf(k.clone(), LeafNodeData::Value(v.clone().into())), ) } WitnessEntry::Instruction(Instruction::Extension(k)) => { - Self::replace_next_traverser_node_entry_helper( - traverser, - NodeEntry::Extension(k.clone()), - ) + traverser.get_prev_n_elems_into_buf(1, buf); + + match buf[0] { + WitnessEntry::Node(node) => Self::traverser_replace_prev_n_nodes_entry_helper( + 2, + traverser, + NodeEntry::Extension(k.clone(), Box::new(node.clone())), + ), + _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), + } } WitnessEntry::Instruction(Instruction::Code(c)) => { - Self::replace_next_traverser_node_entry_helper( + Self::traverser_replace_prev_n_nodes_entry_helper( + 1, traverser, NodeEntry::Code(c.clone()), ) @@ -368,11 +376,12 @@ impl ParserState { )) } - fn replace_next_traverser_node_entry_helper( + fn traverser_replace_prev_n_nodes_entry_helper( + n: usize, traverser: &mut CollapsableWitnessEntryTraverser, entry: NodeEntry, ) -> CompactParsingResult { - traverser.replace_next_n_entries_with_single_entry(1, WitnessEntry::Node(entry)); + traverser.replace_prev_n_entries_with_single_entry(n, WitnessEntry::Node(entry)); Ok(1) } } From 0214b61b5ac6df23041d493da72720e9bdea962f Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 20 Oct 2023 15:46:39 -0600 Subject: [PATCH 060/208] Filled in logic to know when witness processing is done --- src/compact_prestate_processing.rs | 41 +++++++++++++++++++++++++----- 1 file changed, 34 insertions(+), 7 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index c9ee450b3..474f3ee26 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -51,6 +51,9 @@ pub enum CompactParsingError { #[error("Invalid block witness entries: {0:?}")] InvalidWitnessFormat(Vec), + + #[error("There were multiple entries remaining after the compact block witness was processed (Remaining entries: {0:?})")] + NonSingleEntryAfterProcessing(WitnessEntries), } #[derive(Clone, Debug, Deserialize)] @@ -212,23 +215,39 @@ impl ParserState { } } - todo!() + match self.entries.len() { + 1 => Self::create_partial_trie_from_remaining_witness_elem(self.entries.pop().unwrap()), + _ => Err(CompactParsingError::NonSingleEntryAfterProcessing( + self.entries, + )), + } + } + + fn create_partial_trie_from_remaining_witness_elem( + remaining_entry: WitnessEntry, + ) -> CompactParsingResult { + todo!(); } fn apply_rules_to_witness_entries( &mut self, entry_buf: &mut Vec<&WitnessEntry>, ) -> CompactParsingResult { - let mut tot_rules_applied = 0; - let mut traverser = self.entries.create_collapsable_traverser(); + let mut tot_rules_applied = 0; + while !traverser.at_end() { let num_rules_applied = Self::try_apply_rules_to_curr_entry(&mut traverser, entry_buf)?; tot_rules_applied += num_rules_applied; + + if num_rules_applied == 0 { + // Unable to apply rule at current position, so advance the traverser. + traverser.advance(); + } } - todo!() + Ok(tot_rules_applied) } fn try_apply_rules_to_curr_entry( @@ -407,7 +426,7 @@ impl WitnessBytes { // TODO loop { let instr = self.process_operator()?; - self.instrs.push_entry(instr.into()); + self.instrs.push(instr.into()); if self.byte_cursor.at_eof() { break; @@ -500,7 +519,7 @@ impl WitnessBytes { } fn push_entry(&mut self, instr: Instruction) { - self.instrs.push_entry(instr.into()) + self.instrs.push(instr.into()) } fn parse_header(&mut self) -> CompactParsingResult
{ @@ -595,7 +614,11 @@ struct WitnessEntries { } impl WitnessEntries { - fn push_entry(&mut self, _entry: WitnessEntry) { + fn push(&mut self, _entry: WitnessEntry) { + todo!() + } + + fn pop(&mut self) -> Option { todo!() } @@ -610,6 +633,10 @@ impl WitnessEntries { fn create_collapsable_traverser(&mut self) -> CollapsableWitnessEntryTraverser { todo!() } + + fn len(&self) -> usize { + self.intern.len() + } } // It's not quite an iterator, so this is the next best name that I can come up From f66c21ffe98452b09fd8234d3b935ce895d784e6 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 20 Oct 2023 17:33:08 -0600 Subject: [PATCH 061/208] Impled logic to process branch instrs --- src/compact_prestate_processing.rs | 140 ++++++++++++++++++++++++++++- src/utils.rs | 4 + 2 files changed, 140 insertions(+), 4 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 474f3ee26..a7f4eb915 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -15,7 +15,7 @@ use ethereum_types::{H256, U256}; use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; -use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; +use crate::{trace_protocol::TrieCompact, types::TrieRootHash, utils::clone_vec_and_remove_refs}; pub type CompactParsingResult = Result; @@ -31,6 +31,7 @@ type RawValue = Vec; type RawCode = Vec; const MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE: usize = 3; +const BRANCH_MAX_CHILDREN: usize = 16; #[derive(Debug, Error)] pub enum CompactParsingError { @@ -54,6 +55,17 @@ pub enum CompactParsingError { #[error("There were multiple entries remaining after the compact block witness was processed (Remaining entries: {0:?})")] NonSingleEntryAfterProcessing(WitnessEntries), + + #[error("Branch mask {0:#b} stated there should be {1} preceding nodes but instead found {2} (nodes: {3:?})")] + IncorrectNumberOfNodesPrecedingBranch(BranchMask, usize, usize, Vec), + + #[error( + "Expected a branch to have {0} preceding nodes but only had {1} (mask: {2}, nodes: {3:?})" + )] + MissingExpectedNodesPrecedingBranch(usize, usize, BranchMask, Vec), + + #[error("Expected the entry preceding {0} positions behind a {1} entry to be a node but instead found a {2}. (nodes: {3:?})")] + PrecedingNonNodeEntryFoundWhenProcessingRule(usize, &'static str, String, Vec), } #[derive(Clone, Debug, Deserialize)] @@ -85,6 +97,15 @@ enum WitnessEntry { Node(NodeEntry), } +impl Display for WitnessEntry { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + WitnessEntry::Instruction(i) => write!(f, "Instruction({})", i), + WitnessEntry::Node(n) => write!(f, "Node({})", n), + } + } +} + // TODO: Ignore `NEW_TRIE` for now... #[derive(Clone, Debug)] enum Instruction { @@ -97,6 +118,26 @@ enum Instruction { EmptyRoot, } +impl Display for Instruction { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Instruction::Leaf(_, _) => write!(f, "Leaf"), + Instruction::Extension(_) => write!(f, "Extension"), + Instruction::Branch(_) => write!(f, "Branch"), + Instruction::Hash(_) => write!(f, "Hash"), + Instruction::Code(_) => write!(f, "Code"), + Instruction::AccountLeaf(_, _, _, _, _) => write!(f, "AccountLeaf"), + Instruction::EmptyRoot => write!(f, "EmptyRoot"), + } + } +} + +impl From for WitnessEntry { + fn from(v: NodeEntry) -> Self { + WitnessEntry::Node(v) + } +} + impl From for WitnessEntry { fn from(v: Instruction) -> Self { Self::Instruction(v) @@ -106,6 +147,7 @@ impl From for WitnessEntry { #[derive(Clone, Debug)] enum NodeEntry { Account(AccountNodeData), + Branch([Option>; 16]), Code(Vec), Empty, Hash(HashValue), @@ -114,6 +156,21 @@ enum NodeEntry { Value(ValueNodeData), } +impl Display for NodeEntry { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + NodeEntry::Account(_) => write!(f, "Account"), + NodeEntry::Branch(_) => write!(f, "Branch"), + NodeEntry::Code(_) => write!(f, "Code"), + NodeEntry::Empty => write!(f, "Empty"), + NodeEntry::Hash(_) => write!(f, "Hash"), + NodeEntry::Leaf(_, _) => write!(f, "Leaf"), + NodeEntry::Extension(_, _) => write!(f, "Extension"), + NodeEntry::Value(_) => write!(f, "Value"), + } + } +} + #[derive(Clone, Debug)] struct ValueNodeData(Vec); @@ -224,7 +281,7 @@ impl ParserState { } fn create_partial_trie_from_remaining_witness_elem( - remaining_entry: WitnessEntry, + _remaining_entry: WitnessEntry, ) -> CompactParsingResult { todo!(); } @@ -310,8 +367,8 @@ impl ParserState { Ok(1) } - WitnessEntry::Instruction(Instruction::Branch(_mask)) => { - todo!() + WitnessEntry::Instruction(Instruction::Branch(mask)) => { + Self::process_branch_instr(traverser, buf, *mask) } _ => Self::invalid_witness_err( MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, @@ -321,6 +378,74 @@ impl ParserState { } } + fn process_branch_instr( + traverser: &mut CollapsableWitnessEntryTraverser, + buf: &mut Vec<&WitnessEntry>, + mask: BranchMask, + ) -> CompactParsingResult { + let expected_number_of_preceding_nodes = mask.count_ones() as usize; + + traverser.get_prev_n_elems_into_buf(expected_number_of_preceding_nodes, buf); + let number_available_preceding_elems = buf.len(); + + if buf.len() != expected_number_of_preceding_nodes { + return Err(CompactParsingError::IncorrectNumberOfNodesPrecedingBranch( + mask, + expected_number_of_preceding_nodes, + number_available_preceding_elems, + clone_vec_and_remove_refs(buf), + )); + } + + let mut branch_nodes = Self::create_empty_branch_node_entry(); + let mut curr_traverser_node_idx = 0; + + for i in 0..BRANCH_MAX_CHILDREN { + if mask as usize & (i << 1) != 0 { + let entry_to_check = buf[curr_traverser_node_idx]; + let node_entry = try_get_node_entry_from_witness_entry(entry_to_check) + .ok_or_else(|| { + let n_entries_behind_cursor = number_available_preceding_elems - i; + + CompactParsingError::PrecedingNonNodeEntryFoundWhenProcessingRule( + n_entries_behind_cursor, + "Branch", + entry_to_check.to_string(), + clone_vec_and_remove_refs(buf), + ) + })? + .clone(); + + branch_nodes[i] = Some(Box::new(node_entry)); + curr_traverser_node_idx += 1; + } + } + + let number_of_nodes_traversed = curr_traverser_node_idx; // For readability. + if curr_traverser_node_idx != buf.len() { + return Err(CompactParsingError::MissingExpectedNodesPrecedingBranch( + expected_number_of_preceding_nodes, + number_of_nodes_traversed, + mask, + clone_vec_and_remove_refs(buf), + )); + } + + traverser.replace_prev_n_entries_with_single_entry( + number_of_nodes_traversed + 1, + NodeEntry::Branch(branch_nodes).into(), + ); + Ok(1) + } + + // ... Because we can't do `[None; 16]` without implementing `Copy`. + fn create_empty_branch_node_entry() -> [Option>; 16] { + [ + None, None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, + ] + } + fn match_account_leaf_no_code_and_no_storage( ) -> CompactParsingResult<(usize, Option, Option)> { Ok((0, None, None)) @@ -690,6 +815,13 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { } } +fn try_get_node_entry_from_witness_entry(entry: &WitnessEntry) -> Option<&NodeEntry> { + match entry { + WitnessEntry::Node(n_entry) => Some(n_entry), + _ => None, + } +} + #[derive(Debug)] enum TraverserDirection { Forwards, diff --git a/src/utils.rs b/src/utils.rs index 4f42fbac3..866e088ef 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -10,3 +10,7 @@ pub(crate) fn update_val_if_some(target: &mut T, opt: Option) { *target = new_val; } } + +pub(crate) fn clone_vec_and_remove_refs(vec_of_refs: &Vec<&T>) -> Vec { + vec_of_refs.iter().map(|r| (*r).clone()).collect() +} From 5e1862f166e45487bfa57fc1557cbf4b322cb374 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 24 Oct 2023 16:42:27 -0700 Subject: [PATCH 062/208] Filled in more `todo!()`s - Temporarily added a bunch of clones, but will remove these later on. --- src/compact_prestate_processing.rs | 125 ++++++++++++++++++----------- 1 file changed, 76 insertions(+), 49 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index a7f4eb915..0d16ea49c 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -7,7 +7,7 @@ use std::{ error::Error, fmt::{self, Display}, io::{Cursor, Read}, - ops::Range, + iter, }; use eth_trie_utils::partial_trie::HashedPartialTrie; @@ -15,7 +15,7 @@ use ethereum_types::{H256, U256}; use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; -use crate::{trace_protocol::TrieCompact, types::TrieRootHash, utils::clone_vec_and_remove_refs}; +use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; pub type CompactParsingResult = Result; @@ -288,7 +288,7 @@ impl ParserState { fn apply_rules_to_witness_entries( &mut self, - entry_buf: &mut Vec<&WitnessEntry>, + entry_buf: &mut Vec, ) -> CompactParsingResult { let mut traverser = self.entries.create_collapsable_traverser(); @@ -309,15 +309,17 @@ impl ParserState { fn try_apply_rules_to_curr_entry( traverser: &mut CollapsableWitnessEntryTraverser, - buf: &mut Vec<&WitnessEntry>, + buf: &mut Vec, ) -> CompactParsingResult { traverser.get_next_n_elems_into_buf(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, buf); // TODO: There is a decent amount of code duplication with the matches and the // calls to `invalid_witness_err`. We should condense this... - match buf[0] { + + // TODO: These clones are really bad, but we will clean this up once it works. + match buf[0].clone() { WitnessEntry::Instruction(Instruction::Hash(h)) => { - Self::traverser_replace_prev_n_nodes_entry_helper(1, traverser, NodeEntry::Hash(*h)) + Self::traverser_replace_prev_n_nodes_entry_helper(1, traverser, NodeEntry::Hash(h)) } WitnessEntry::Instruction(Instruction::Leaf(k, v)) => { Self::traverser_replace_prev_n_nodes_entry_helper( @@ -329,7 +331,7 @@ impl ParserState { WitnessEntry::Instruction(Instruction::Extension(k)) => { traverser.get_prev_n_elems_into_buf(1, buf); - match buf[0] { + match buf[0].clone() { WitnessEntry::Node(node) => Self::traverser_replace_prev_n_nodes_entry_helper( 2, traverser, @@ -358,7 +360,7 @@ impl ParserState { (true, true) => Self::match_account_leaf_has_code_and_storage(traverser, buf), }?; - let account_leaf_data = AccountNodeData::new(*n, *b, s_root, account_node_code); + let account_leaf_data = AccountNodeData::new(n, b, s_root, account_node_code); let leaf_node = WitnessEntry::Node(NodeEntry::Leaf( k.clone(), LeafNodeData::Account(account_leaf_data), @@ -368,7 +370,7 @@ impl ParserState { Ok(1) } WitnessEntry::Instruction(Instruction::Branch(mask)) => { - Self::process_branch_instr(traverser, buf, *mask) + Self::process_branch_instr(traverser, buf, mask) } _ => Self::invalid_witness_err( MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, @@ -380,7 +382,7 @@ impl ParserState { fn process_branch_instr( traverser: &mut CollapsableWitnessEntryTraverser, - buf: &mut Vec<&WitnessEntry>, + buf: &mut Vec, mask: BranchMask, ) -> CompactParsingResult { let expected_number_of_preceding_nodes = mask.count_ones() as usize; @@ -393,7 +395,7 @@ impl ParserState { mask, expected_number_of_preceding_nodes, number_available_preceding_elems, - clone_vec_and_remove_refs(buf), + buf.clone(), )); } @@ -402,7 +404,7 @@ impl ParserState { for i in 0..BRANCH_MAX_CHILDREN { if mask as usize & (i << 1) != 0 { - let entry_to_check = buf[curr_traverser_node_idx]; + let entry_to_check = &buf[curr_traverser_node_idx]; let node_entry = try_get_node_entry_from_witness_entry(entry_to_check) .ok_or_else(|| { let n_entries_behind_cursor = number_available_preceding_elems - i; @@ -411,7 +413,7 @@ impl ParserState { n_entries_behind_cursor, "Branch", entry_to_check.to_string(), - clone_vec_and_remove_refs(buf), + buf.clone(), ) })? .clone(); @@ -427,7 +429,7 @@ impl ParserState { expected_number_of_preceding_nodes, number_of_nodes_traversed, mask, - clone_vec_and_remove_refs(buf), + buf.clone(), )); } @@ -453,12 +455,12 @@ impl ParserState { fn match_account_leaf_no_code_but_has_storage( traverser: &mut CollapsableWitnessEntryTraverser, - buf: &mut Vec<&WitnessEntry>, + buf: &mut Vec, ) -> CompactParsingResult<(usize, Option, Option)> { traverser.get_prev_n_elems_into_buf(1, buf); - match buf[0] { - WitnessEntry::Node(node) => match Self::try_get_storage_hash_from_node(node) { + match buf[0].clone() { + WitnessEntry::Node(node) => match Self::try_get_storage_hash_from_node(&node) { Some(s_hash) => Ok((1, None, Some(s_hash))), None => Self::invalid_witness_err(1, TraverserDirection::Backwards, traverser), }, @@ -468,16 +470,16 @@ impl ParserState { fn match_account_leaf_has_code_but_no_storage( traverser: &mut CollapsableWitnessEntryTraverser, - buf: &mut Vec<&WitnessEntry>, + buf: &mut Vec, ) -> CompactParsingResult<(usize, Option, Option)> { traverser.get_prev_n_elems_into_buf(1, buf); - match buf[0] { + match buf[0].clone() { WitnessEntry::Node(NodeEntry::Code(code)) => { Ok((1, Some(AccountNodeCode::CodeNode(code.clone())), None)) } WitnessEntry::Node(NodeEntry::Hash(h)) => { - Ok((1, Some(AccountNodeCode::HashNode(*h)), None)) + Ok((1, Some(AccountNodeCode::HashNode(h)), None)) } _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), } @@ -485,11 +487,11 @@ impl ParserState { fn match_account_leaf_has_code_and_storage( traverser: &mut CollapsableWitnessEntryTraverser, - buf: &mut Vec<&WitnessEntry>, + buf: &mut Vec, ) -> CompactParsingResult<(usize, Option, Option)> { traverser.get_prev_n_elems_into_buf(2, buf); - match buf[0..=1] { + match &buf[0..=1] { [WitnessEntry::Node(NodeEntry::Code(_c)), WitnessEntry::Node(_node)] => { todo!() } @@ -512,7 +514,17 @@ impl ParserState { let adjacent_elems_buf = match t_dir { TraverserDirection::Forwards => traverser.get_next_n_elems(n).cloned().collect(), TraverserDirection::Backwards => traverser.get_prev_n_elems(n).cloned().collect(), - TraverserDirection::Both => todo!(), + TraverserDirection::Both => { + let prev_elems = traverser.get_prev_n_elems(n); + let curr_elem = traverser.get_curr_elem(); + let next_elems = traverser.get_next_n_elems(n); + + prev_elems + .chain(curr_elem) + .chain(next_elems) + .cloned() + .collect() + } }; Err(CompactParsingError::InvalidWitnessFormat( @@ -739,24 +751,18 @@ struct WitnessEntries { } impl WitnessEntries { - fn push(&mut self, _entry: WitnessEntry) { - todo!() + fn push(&mut self, entry: WitnessEntry) { + self.intern.push_back(entry) } fn pop(&mut self) -> Option { - todo!() - } - - fn replace_entries_with_single_entry( - &mut self, - _idxs_to_replace: Range, - _entry_to_replace_with: WitnessEntry, - ) { - todo!() + self.intern.pop_back() } fn create_collapsable_traverser(&mut self) -> CollapsableWitnessEntryTraverser { - todo!() + let entry_cursor = self.intern.cursor_front_mut(); + + CollapsableWitnessEntryTraverser { entry_cursor } } fn len(&self) -> usize { @@ -767,35 +773,50 @@ impl WitnessEntries { // It's not quite an iterator, so this is the next best name that I can come up // with. struct CollapsableWitnessEntryTraverser<'a> { - entries: &'a mut WitnessEntries, entry_cursor: CursorMut<'a, WitnessEntry>, } +// TODO: For now, lets just use pure values in the buffer, but we probably want +// to switch over to references later... impl<'a> CollapsableWitnessEntryTraverser<'a> { fn advance(&mut self) { - todo!() + self.entry_cursor.move_next(); } - fn get_next_n_elems(&self, _n: usize) -> impl Iterator { - // TODO - std::iter::empty() + fn get_curr_elem(&self) -> Option<&WitnessEntry> { + self.entry_cursor.as_cursor().current() } - fn get_prev_n_elems(&self, _n: usize) -> impl Iterator { - // TODO - std::iter::empty() + fn get_next_n_elems(&self, n: usize) -> impl Iterator { + let mut read_only_cursor = self.entry_cursor.as_cursor(); + + iter::from_fn(move || { + read_only_cursor.move_next(); + read_only_cursor.current() + }) + .take(n) + } + + fn get_prev_n_elems(&self, n: usize) -> impl Iterator { + let mut read_only_cursor = self.entry_cursor.as_cursor(); + + iter::from_fn(move || { + read_only_cursor.move_prev(); + read_only_cursor.current() + }) + .take(n) } /// Get the previous `n` elements into a buf. Note that this does not /// include the element that we are currently pointing to. - fn get_prev_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&WitnessEntry>) { - todo!() + fn get_prev_n_elems_into_buf(&self, n: usize, buf: &mut Vec) { + buf.extend(self.get_next_n_elems(n).cloned()) } /// Get the next `n` elements into a buf. Note that this includes the /// element that we are currently pointing to. - fn get_next_n_elems_into_buf(&self, _n: usize, _buf: &mut Vec<&WitnessEntry>) { - todo!() + fn get_next_n_elems_into_buf(&self, n: usize, buf: &mut Vec) { + buf.extend(self.get_prev_n_elems(n).cloned()); } fn replace_next_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { @@ -806,8 +827,14 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { self.entry_cursor.insert_after(entry) } - fn replace_prev_n_entries_with_single_entry(&mut self, _n: usize, _entry: WitnessEntry) { - todo!() + fn replace_prev_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { + for _ in 0..n { + // ... Does this work? + self.entry_cursor.move_prev(); + self.entry_cursor.remove_current(); + } + + self.entry_cursor.insert_after(entry) } fn at_end(&self) -> bool { From c7fe3804172edb5c9bea7dc58d4cbf0d1fbdbabc Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 25 Oct 2023 13:42:58 -0700 Subject: [PATCH 063/208] Created a simple test and cleaned up some warnings --- src/compact_prestate_processing.rs | 35 +++++++++++++++++++++--------- src/utils.rs | 2 +- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 0d16ea49c..c8c7d0734 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -92,7 +92,7 @@ enum Opcode { } #[derive(Clone, Debug)] -enum WitnessEntry { +pub enum WitnessEntry { Instruction(Instruction), Node(NodeEntry), } @@ -256,12 +256,7 @@ impl ParserState { Ok((header, p_state)) } - fn parse(self) -> CompactParsingResult { - let trie = self.parse_into_trie()?; - Ok(trie) - } - - fn parse_into_trie(mut self) -> CompactParsingResult { + fn parse(mut self) -> CompactParsingResult { let mut entry_buf = Vec::new(); loop { @@ -402,7 +397,11 @@ impl ParserState { let mut branch_nodes = Self::create_empty_branch_node_entry(); let mut curr_traverser_node_idx = 0; - for i in 0..BRANCH_MAX_CHILDREN { + for (i, branch_node) in branch_nodes + .iter_mut() + .enumerate() + .take(BRANCH_MAX_CHILDREN) + { if mask as usize & (i << 1) != 0 { let entry_to_check = &buf[curr_traverser_node_idx]; let node_entry = try_get_node_entry_from_witness_entry(entry_to_check) @@ -418,7 +417,7 @@ impl ParserState { })? .clone(); - branch_nodes[i] = Some(Box::new(node_entry)); + *branch_node = Some(Box::new(node_entry)); curr_traverser_node_idx += 1; } } @@ -742,7 +741,7 @@ impl CompactCursor { /// We kind of want a wrapper around the actual data structure I think since /// there's a good chance this will change a few times in the future. #[derive(Debug, Default)] -struct WitnessEntries { +pub struct WitnessEntries { // Yeah a LL is actually (unfortunately) a very good choice here. We will be doing a ton of // inserts mid-list, and the list can get very large. There might be a better choice for a data // structure, but for now, this will make performance not scale exponentially with list @@ -864,3 +863,19 @@ pub(crate) fn process_compact_prestate( Ok((header, trie)) } + +#[cfg(test)] +mod tests { + use crate::compact_prestate_processing::ParserState; + + #[test] + fn simple() { + const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; + + let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); + let (header, parser) = ParserState::create_and_extract_header(bytes).unwrap(); + + assert_eq!(header.version, 1); + let _trie = parser.parse().unwrap(); + } +} diff --git a/src/utils.rs b/src/utils.rs index 866e088ef..5f0b0016d 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -11,6 +11,6 @@ pub(crate) fn update_val_if_some(target: &mut T, opt: Option) { } } -pub(crate) fn clone_vec_and_remove_refs(vec_of_refs: &Vec<&T>) -> Vec { +pub(crate) fn clone_vec_and_remove_refs(vec_of_refs: &[&T]) -> Vec { vec_of_refs.iter().map(|r| (*r).clone()).collect() } From a546f2af932c87b341d5950ca753181522029a57 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 25 Oct 2023 14:00:45 -0700 Subject: [PATCH 064/208] Added some more debugging tools --- src/compact_prestate_processing.rs | 33 ++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index c8c7d0734..42e52d5e5 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -864,18 +864,47 @@ pub(crate) fn process_compact_prestate( Ok((header, trie)) } +// TODO: Move behind a feature flag just used for debugging (but probably not +// `debug`)... +fn parse_just_to_instructions(bytes: Vec) -> Vec { + let witness_bytes = WitnessBytes::new(bytes); + let (_header, entries) = witness_bytes + .process_into_instructions_and_header() + .unwrap(); + + entries + .intern + .into_iter() + .map(|entry| match entry { + WitnessEntry::Instruction(instr) => instr, + _ => unreachable!( + "Found a non-instruction at a stage when we should only have instructions!" + ), + }) + .collect() +} + #[cfg(test)] mod tests { + use super::parse_just_to_instructions; use crate::compact_prestate_processing::ParserState; + const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; + #[test] fn simple() { - const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; - let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); let (header, parser) = ParserState::create_and_extract_header(bytes).unwrap(); assert_eq!(header.version, 1); let _trie = parser.parse().unwrap(); } + + #[test] + fn simple_instructions_are_parsed_correctly() { + let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); + let instrs = parse_just_to_instructions(bytes); + + println!("{:?}", instrs); + } } From 032b35bd3ab4ae3c14be1a8a196b92236bf55ba2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 07:22:35 -0700 Subject: [PATCH 065/208] SQUASH!! --- src/compact_prestate_processing.rs | 35 +++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 42e52d5e5..a711fb85c 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -38,7 +38,7 @@ pub enum CompactParsingError { #[error("Missing header")] MissingHeader, - #[error("Invalid opcode operator (\"{0:x}\"")] + #[error("Invalid opcode operator (\"{0:x}\")")] InvalidOperator(u8), #[error("Reached the end of the byte stream when we still expected more data")] @@ -75,8 +75,11 @@ struct Key { } impl> From for Key { - fn from(_value: K) -> Self { - todo!() + fn from(v: K) -> Self { + Self { + is_even: false, // TODO! + bytes: v.borrow().to_owned(), + } } } @@ -559,10 +562,8 @@ impl WitnessBytes { ) -> CompactParsingResult<(Header, WitnessEntries)> { let header = self.parse_header()?; - // TODO loop { - let instr = self.process_operator()?; - self.instrs.push(instr.into()); + self.process_operator()?; if self.byte_cursor.at_eof() { break; @@ -572,15 +573,15 @@ impl WitnessBytes { Ok((header, self.instrs)) } - fn process_operator(&mut self) -> CompactParsingResult { + fn process_operator(&mut self) -> CompactParsingResult<()> { let opcode_byte = self.byte_cursor.read_byte()?; let opcode = Opcode::n(opcode_byte).ok_or(CompactParsingError::InvalidOperator(opcode_byte))?; - self.process_data_following_opcode(opcode)?; + println!("Processed {:?}", opcode); - todo!() + self.process_data_following_opcode(opcode) } fn process_data_following_opcode(&mut self, opcode: Opcode) -> CompactParsingResult<()> { @@ -866,13 +867,12 @@ pub(crate) fn process_compact_prestate( // TODO: Move behind a feature flag just used for debugging (but probably not // `debug`)... -fn parse_just_to_instructions(bytes: Vec) -> Vec { +fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult> { let witness_bytes = WitnessBytes::new(bytes); - let (_header, entries) = witness_bytes - .process_into_instructions_and_header() - .unwrap(); + let (_, entries) = witness_bytes + .process_into_instructions_and_header()?; - entries + Ok(entries .intern .into_iter() .map(|entry| match entry { @@ -881,7 +881,7 @@ fn parse_just_to_instructions(bytes: Vec) -> Vec { "Found a non-instruction at a stage when we should only have instructions!" ), }) - .collect() + .collect()) } #[cfg(test)] @@ -905,6 +905,11 @@ mod tests { let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); let instrs = parse_just_to_instructions(bytes); + let instrs = match instrs { + Ok(x) => x, + Err(err) => panic!("{}", err), + }; + println!("{:?}", instrs); } } From 5cec46cc953fe70b274ee945b48e30ae6dc3287b Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 14:39:09 -0600 Subject: [PATCH 066/208] Now parses instructions properly! --- src/compact_prestate_processing.rs | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index a711fb85c..9e78383f8 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -597,7 +597,7 @@ impl WitnessBytes { } fn process_leaf(&mut self) -> CompactParsingResult<()> { - let key = self.byte_cursor.read_cbor_byte_array()?.into(); + let key = self.byte_cursor.read_cbor_byte_array_to_vec()?.into(); let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; self.push_entry(Instruction::Leaf(key, value_raw)); @@ -605,7 +605,7 @@ impl WitnessBytes { } fn process_extension(&mut self) -> CompactParsingResult<()> { - let key = self.byte_cursor.read_cbor_byte_array()?.into(); + let key = self.byte_cursor.read_cbor_byte_array_to_vec()?.into(); self.push_entry(Instruction::Extension(key)); Ok(()) @@ -633,7 +633,7 @@ impl WitnessBytes { } fn process_account_leaf(&mut self) -> CompactParsingResult<()> { - let key = self.byte_cursor.read_cbor_byte_array()?.into(); + let key = self.byte_cursor.read_cbor_byte_array_to_vec()?.into(); let nonce = self.byte_cursor.read_t()?; let balance = self.byte_cursor.read_t()?; let has_code = self.byte_cursor.read_t()?; @@ -712,16 +712,8 @@ impl CompactCursor { Ok(single_byte_buf[0]) } - fn read_cbor_byte_array(&mut self) -> CompactParsingResult<&[u8]> { - self.temp_buf.clear(); - Self::ciborium_byte_vec_err_reader_res_to_parsing_res(ciborium_io::Read::read_exact( - &mut self.intern, - &mut self.temp_buf, - ))?; - - Ok(&self.temp_buf) - } - + // I don't think it's possible to not read to a vec here with `ciborium`... In + // theory this should be doable, but the way the library I don't think we can. fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { Self::ciborium_byte_vec_err_reader_res_to_parsing_res(ciborium::from_reader( &mut self.intern, @@ -869,8 +861,7 @@ pub(crate) fn process_compact_prestate( // `debug`)... fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult> { let witness_bytes = WitnessBytes::new(bytes); - let (_, entries) = witness_bytes - .process_into_instructions_and_header()?; + let (_, entries) = witness_bytes.process_into_instructions_and_header()?; Ok(entries .intern From 286fcbff181e44edc7d0cebfcd29cef18c0ecfd7 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 16:22:31 -0600 Subject: [PATCH 067/208] Now parses instructions properly and fixed key parsing --- Cargo.toml | 1 + src/compact_prestate_processing.rs | 65 ++++++++++++++++++++++-------- 2 files changed, 49 insertions(+), 17 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index dd38e9d87..1cab6cbab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", ethereum-types = "0.14.1" hex = "0.4.3" keccak-hash = "0.10.0" +log = "0.4.20" plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "49976ea2a98dcb6052bd6cf3a65f730e55727330" } thiserror = "1.0.49" rlp = "0.5.2" diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 9e78383f8..150044729 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -10,8 +10,12 @@ use std::{ iter, }; -use eth_trie_utils::partial_trie::HashedPartialTrie; +use eth_trie_utils::{ + nibbles::{BytesToNibblesError, Nibbles}, + partial_trie::HashedPartialTrie, +}; use ethereum_types::{H256, U256}; +use log::trace; use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; @@ -66,20 +70,25 @@ pub enum CompactParsingError { #[error("Expected the entry preceding {0} positions behind a {1} entry to be a node but instead found a {2}. (nodes: {3:?})")] PrecedingNonNodeEntryFoundWhenProcessingRule(usize, &'static str, String, Vec), + + #[error("Unable to create key nibbles from bytes {0}")] + KeyError(#[from] BytesToNibblesError), } -#[derive(Clone, Debug, Deserialize)] +#[derive(Clone, Debug, Deserialize, Eq, PartialEq)] struct Key { - is_even: bool, - bytes: Vec, + nibbles: Nibbles, } -impl> From for Key { - fn from(v: K) -> Self { - Self { - is_even: false, // TODO! - bytes: v.borrow().to_owned(), - } +impl Key { + fn new>(bytes: B) -> CompactParsingResult { + let bytes = bytes.borrow(); + let mut nibbles = Nibbles::from_bytes_be(bytes)?; + + // Drop flag bits. + nibbles.pop_next_nibble_front(); + + Ok(Self { nibbles }) } } @@ -110,7 +119,7 @@ impl Display for WitnessEntry { } // TODO: Ignore `NEW_TRIE` for now... -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] enum Instruction { Leaf(Key, RawValue), Extension(Key), @@ -579,7 +588,7 @@ impl WitnessBytes { let opcode = Opcode::n(opcode_byte).ok_or(CompactParsingError::InvalidOperator(opcode_byte))?; - println!("Processed {:?}", opcode); + trace!("Processed {:?} opcode", opcode); self.process_data_following_opcode(opcode) } @@ -597,7 +606,7 @@ impl WitnessBytes { } fn process_leaf(&mut self) -> CompactParsingResult<()> { - let key = self.byte_cursor.read_cbor_byte_array_to_vec()?.into(); + let key = Key::new(self.byte_cursor.read_cbor_byte_array_to_vec()?)?; let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; self.push_entry(Instruction::Leaf(key, value_raw)); @@ -605,7 +614,7 @@ impl WitnessBytes { } fn process_extension(&mut self) -> CompactParsingResult<()> { - let key = self.byte_cursor.read_cbor_byte_array_to_vec()?.into(); + let key = Key::new(self.byte_cursor.read_cbor_byte_array_to_vec()?)?; self.push_entry(Instruction::Extension(key)); Ok(()) @@ -633,7 +642,7 @@ impl WitnessBytes { } fn process_account_leaf(&mut self) -> CompactParsingResult<()> { - let key = self.byte_cursor.read_cbor_byte_array_to_vec()?.into(); + let key = Key::new(self.byte_cursor.read_cbor_byte_array_to_vec()?)?; let nonce = self.byte_cursor.read_t()?; let balance = self.byte_cursor.read_t()?; let has_code = self.byte_cursor.read_t()?; @@ -878,10 +887,19 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult Key { + let bytes = hex::decode(h_bytes).unwrap(); + Key::new(bytes).unwrap() + } + + fn h_decode(b_str: &str) -> Vec { + hex::decode(b_str).unwrap() + } + #[test] fn simple() { let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); @@ -901,6 +919,19 @@ mod tests { Err(err) => panic!("{}", err), }; - println!("{:?}", instrs); + let expected_instrs = vec![ + Instruction::Leaf(h_decode_key("10"), h_decode("31323334")), + Instruction::Leaf(h_decode_key("10"), h_decode("31323334")), + Instruction::Branch(0b00110000), + Instruction::Leaf(h_decode_key("0350"), h_decode("31323335")), + Instruction::Branch(0b00011000), + Instruction::Extension(h_decode_key( + "0000000000000000000000000000000000000000000000000000000000000012", + )), + ]; + + for (i, expected_instr) in expected_instrs.into_iter().enumerate() { + assert_eq!(expected_instr, instrs[i]) + } } } From 61972ae630983d9bbe3b16d50c4e72c9a4835f8c Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 19:18:38 -0600 Subject: [PATCH 068/208] Simple payloads now pattern match correctly --- Cargo.toml | 3 ++ src/compact_prestate_processing.rs | 75 ++++++++++++++++++++---------- 2 files changed, 53 insertions(+), 25 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1cab6cbab..7d803d373 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,3 +18,6 @@ thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" serde = "1.0.166" + +[dev-dependencies] +pretty_env_logger = "0.5.0" diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index 150044729..fcff7e3d5 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -57,7 +57,7 @@ pub enum CompactParsingError { #[error("Invalid block witness entries: {0:?}")] InvalidWitnessFormat(Vec), - #[error("There were multiple entries remaining after the compact block witness was processed (Remaining entries: {0:?})")] + #[error("There were multiple entries remaining after the compact block witness was processed (Remaining entries: {0:#?})")] NonSingleEntryAfterProcessing(WitnessEntries), #[error("Branch mask {0:#b} stated there should be {1} preceding nodes but instead found {2} (nodes: {3:?})")] @@ -379,11 +379,7 @@ impl ParserState { WitnessEntry::Instruction(Instruction::Branch(mask)) => { Self::process_branch_instr(traverser, buf, mask) } - _ => Self::invalid_witness_err( - MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, - TraverserDirection::Both, - traverser, - ), + _ => Ok(0), } } @@ -414,11 +410,12 @@ impl ParserState { .enumerate() .take(BRANCH_MAX_CHILDREN) { - if mask as usize & (i << 1) != 0 { + if mask as usize & (1 << i) != 0 { let entry_to_check = &buf[curr_traverser_node_idx]; let node_entry = try_get_node_entry_from_witness_entry(entry_to_check) .ok_or_else(|| { - let n_entries_behind_cursor = number_available_preceding_elems - i; + let n_entries_behind_cursor = + number_available_preceding_elems - curr_traverser_node_idx; CompactParsingError::PrecedingNonNodeEntryFoundWhenProcessingRule( n_entries_behind_cursor, @@ -527,12 +524,12 @@ impl ParserState { TraverserDirection::Backwards => traverser.get_prev_n_elems(n).cloned().collect(), TraverserDirection::Both => { let prev_elems = traverser.get_prev_n_elems(n); - let curr_elem = traverser.get_curr_elem(); - let next_elems = traverser.get_next_n_elems(n); + let next_elems_including_curr = traverser.get_next_n_elems(n + 1); + let prev_elems_vec: Vec<_> = prev_elems.collect(); - prev_elems - .chain(curr_elem) - .chain(next_elems) + prev_elems_vec + .into_iter() + .chain(next_elems_including_curr) .cloned() .collect() } @@ -773,6 +770,7 @@ impl WitnessEntries { // It's not quite an iterator, so this is the next best name that I can come up // with. +#[derive(Debug)] struct CollapsableWitnessEntryTraverser<'a> { entry_cursor: CursorMut<'a, WitnessEntry>, } @@ -792,9 +790,14 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { let mut read_only_cursor = self.entry_cursor.as_cursor(); iter::from_fn(move || { - read_only_cursor.move_next(); - read_only_cursor.current() + // Index returns a `None` if we are at the end of the LL. + read_only_cursor.index().map(|_| { + let entry = read_only_cursor.current(); + read_only_cursor.move_next(); + entry + }) }) + .flatten() .take(n) } @@ -802,24 +805,30 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { let mut read_only_cursor = self.entry_cursor.as_cursor(); iter::from_fn(move || { - read_only_cursor.move_prev(); - read_only_cursor.current() + read_only_cursor.index().map(|_| { + read_only_cursor.move_prev(); + read_only_cursor.current() + }) }) + .flatten() .take(n) } /// Get the previous `n` elements into a buf. Note that this does not /// include the element that we are currently pointing to. fn get_prev_n_elems_into_buf(&self, n: usize, buf: &mut Vec) { - buf.extend(self.get_next_n_elems(n).cloned()) + buf.clear(); + buf.extend(self.get_prev_n_elems(n).cloned()) } /// Get the next `n` elements into a buf. Note that this includes the /// element that we are currently pointing to. fn get_next_n_elems_into_buf(&self, n: usize, buf: &mut Vec) { - buf.extend(self.get_prev_n_elems(n).cloned()); + buf.clear(); + buf.extend(self.get_next_n_elems(n).cloned()); } + // Inclusive. fn replace_next_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { for _ in 0..n { self.entry_cursor.remove_current(); @@ -828,18 +837,23 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { self.entry_cursor.insert_after(entry) } + // Inclusive. fn replace_prev_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { for _ in 0..n { - // ... Does this work? - self.entry_cursor.move_prev(); self.entry_cursor.remove_current(); + self.entry_cursor.move_prev(); + + if self.entry_cursor.index().is_none() { + break; + } } + self.entry_cursor.insert_after(entry); - self.entry_cursor.insert_after(entry) + self.entry_cursor.move_next(); } fn at_end(&self) -> bool { - self.entry_cursor.as_cursor().peek_next().is_none() + self.entry_cursor.as_cursor().current().is_none() } } @@ -891,6 +905,10 @@ mod tests { const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; + fn init() { + let _ = pretty_env_logger::try_init(); + } + fn h_decode_key(h_bytes: &str) -> Key { let bytes = hex::decode(h_bytes).unwrap(); Key::new(bytes).unwrap() @@ -901,16 +919,23 @@ mod tests { } #[test] - fn simple() { + fn simple_full() { + init(); + let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); let (header, parser) = ParserState::create_and_extract_header(bytes).unwrap(); assert_eq!(header.version, 1); - let _trie = parser.parse().unwrap(); + let _trie = match parser.parse() { + Ok(trie) => trie, + Err(err) => panic!("{}", err), + }; } #[test] fn simple_instructions_are_parsed_correctly() { + init(); + let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); let instrs = parse_just_to_instructions(bytes); From e996a9f4dc60927fc1231b224b5b66f3173bacdb Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 19:35:55 -0600 Subject: [PATCH 069/208] Preparing to impl logic to convert compact to partial tries --- Cargo.toml | 1 + src/compact_prestate_processing.rs | 48 ++++++++++++++++++++++++------ 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 7d803d373..f71c5d780 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" [dependencies] ciborium = "0.2.1" ciborium-io = "0.2.1" +enum-as-inner = "0.6.0" enumn = "0.1.12" eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" diff --git a/src/compact_prestate_processing.rs b/src/compact_prestate_processing.rs index fcff7e3d5..8938cdca4 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact_prestate_processing.rs @@ -10,6 +10,7 @@ use std::{ iter, }; +use enum_as_inner::EnumAsInner; use eth_trie_utils::{ nibbles::{BytesToNibblesError, Nibbles}, partial_trie::HashedPartialTrie, @@ -103,7 +104,7 @@ enum Opcode { EmptyRoot = 0x06, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, EnumAsInner)] pub enum WitnessEntry { Instruction(Instruction), Node(NodeEntry), @@ -157,7 +158,7 @@ impl From for WitnessEntry { } #[derive(Clone, Debug)] -enum NodeEntry { +pub(crate) enum NodeEntry { Account(AccountNodeData), Branch([Option>; 16]), Code(Vec), @@ -280,19 +281,13 @@ impl ParserState { } match self.entries.len() { - 1 => Self::create_partial_trie_from_remaining_witness_elem(self.entries.pop().unwrap()), + 1 => create_partial_trie_from_remaining_witness_elem(self.entries.pop().unwrap()), _ => Err(CompactParsingError::NonSingleEntryAfterProcessing( self.entries, )), } } - fn create_partial_trie_from_remaining_witness_elem( - _remaining_entry: WitnessEntry, - ) -> CompactParsingResult { - todo!(); - } - fn apply_rules_to_witness_entries( &mut self, entry_buf: &mut Vec, @@ -864,6 +859,41 @@ fn try_get_node_entry_from_witness_entry(entry: &WitnessEntry) -> Option<&NodeEn } } +// TODO: Consider moving this to a separate module... +pub(crate) fn create_partial_trie_from_remaining_witness_elem( + remaining_entry: WitnessEntry, +) -> CompactParsingResult { + let remaining_node = remaining_entry + .into_node() + .expect("Final node in compact entries was not a node! This is a bug!"); + let mut trie = HashedPartialTrie::default(); + + create_partial_trie_from_remaining_witness_elem_rec( + Nibbles::default(), + &remaining_node, + &mut trie, + )?; + + Ok(trie) +} + +pub(crate) fn create_partial_trie_from_remaining_witness_elem_rec( + _curr_key: Nibbles, + curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + match curr_node { + NodeEntry::Account(_) => todo!(), + NodeEntry::Branch(_) => todo!(), + NodeEntry::Code(_) => todo!(), + NodeEntry::Empty => todo!(), + NodeEntry::Hash(_) => todo!(), + NodeEntry::Leaf(_, _) => todo!(), + NodeEntry::Extension(_, _) => todo!(), + NodeEntry::Value(_) => todo!(), + } +} + #[derive(Debug)] enum TraverserDirection { Forwards, From 6d7467486767e0422ebba8f97500fb87630808fc Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 19:36:50 -0600 Subject: [PATCH 070/208] Moved compact logic to seperate directory - Preparing to split into multiple modules as it's getting too large. --- src/{ => compact}/compact_prestate_processing.rs | 4 ++-- src/compact/mod.rs | 1 + src/lib.rs | 2 +- src/processed_block_trace.rs | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) rename src/{ => compact}/compact_prestate_processing.rs (99%) create mode 100644 src/compact/mod.rs diff --git a/src/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs similarity index 99% rename from src/compact_prestate_processing.rs rename to src/compact/compact_prestate_processing.rs index 8938cdca4..264d612e8 100644 --- a/src/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -930,8 +930,8 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult Date: Thu, 26 Oct 2023 19:41:40 -0600 Subject: [PATCH 071/208] Started a seperate module for compact to partial trie logic --- src/compact/compact_prestate_processing.rs | 44 +++------------------- src/compact/compact_to_partial_trie.rs | 37 ++++++++++++++++++ src/compact/mod.rs | 1 + 3 files changed, 43 insertions(+), 39 deletions(-) create mode 100644 src/compact/compact_to_partial_trie.rs diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 264d612e8..b70c1b8a7 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -20,6 +20,7 @@ use log::trace; use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; +use super::compact_to_partial_trie::create_partial_trie_from_remaining_witness_elem; use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; pub type CompactParsingResult = Result; @@ -77,7 +78,7 @@ pub enum CompactParsingError { } #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] -struct Key { +pub(super) struct Key { nibbles: Nibbles, } @@ -185,7 +186,7 @@ impl Display for NodeEntry { } #[derive(Clone, Debug)] -struct ValueNodeData(Vec); +pub(super) struct ValueNodeData(Vec); impl From> for ValueNodeData { fn from(v: Vec) -> Self { @@ -194,7 +195,7 @@ impl From> for ValueNodeData { } #[derive(Clone, Debug)] -enum LeafNodeData { +pub(super) enum LeafNodeData { Value(ValueNodeData), Account(AccountNodeData), } @@ -206,7 +207,7 @@ enum AccountNodeCode { } #[derive(Clone, Debug)] -struct AccountNodeData { +pub(super) struct AccountNodeData { nonce: Nonce, balance: Balance, storage_root: Option, @@ -859,41 +860,6 @@ fn try_get_node_entry_from_witness_entry(entry: &WitnessEntry) -> Option<&NodeEn } } -// TODO: Consider moving this to a separate module... -pub(crate) fn create_partial_trie_from_remaining_witness_elem( - remaining_entry: WitnessEntry, -) -> CompactParsingResult { - let remaining_node = remaining_entry - .into_node() - .expect("Final node in compact entries was not a node! This is a bug!"); - let mut trie = HashedPartialTrie::default(); - - create_partial_trie_from_remaining_witness_elem_rec( - Nibbles::default(), - &remaining_node, - &mut trie, - )?; - - Ok(trie) -} - -pub(crate) fn create_partial_trie_from_remaining_witness_elem_rec( - _curr_key: Nibbles, - curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, -) -> CompactParsingResult<()> { - match curr_node { - NodeEntry::Account(_) => todo!(), - NodeEntry::Branch(_) => todo!(), - NodeEntry::Code(_) => todo!(), - NodeEntry::Empty => todo!(), - NodeEntry::Hash(_) => todo!(), - NodeEntry::Leaf(_, _) => todo!(), - NodeEntry::Extension(_, _) => todo!(), - NodeEntry::Value(_) => todo!(), - } -} - #[derive(Debug)] enum TraverserDirection { Forwards, diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs new file mode 100644 index 000000000..f137934b0 --- /dev/null +++ b/src/compact/compact_to_partial_trie.rs @@ -0,0 +1,37 @@ +use eth_trie_utils::{nibbles::Nibbles, partial_trie::HashedPartialTrie}; + +use super::compact_prestate_processing::{CompactParsingResult, NodeEntry, WitnessEntry}; + +pub(super) fn create_partial_trie_from_remaining_witness_elem( + remaining_entry: WitnessEntry, +) -> CompactParsingResult { + let remaining_node = remaining_entry + .into_node() + .expect("Final node in compact entries was not a node! This is a bug!"); + let mut trie = HashedPartialTrie::default(); + + create_partial_trie_from_remaining_witness_elem_rec( + Nibbles::default(), + &remaining_node, + &mut trie, + )?; + + Ok(trie) +} + +pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( + _curr_key: Nibbles, + curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + match curr_node { + NodeEntry::Account(_) => todo!(), + NodeEntry::Branch(_) => todo!(), + NodeEntry::Code(_) => todo!(), + NodeEntry::Empty => todo!(), + NodeEntry::Hash(_) => todo!(), + NodeEntry::Leaf(_, _) => todo!(), + NodeEntry::Extension(_, _) => todo!(), + NodeEntry::Value(_) => todo!(), + } +} diff --git a/src/compact/mod.rs b/src/compact/mod.rs index 6ccfda67d..3343b1b0d 100644 --- a/src/compact/mod.rs +++ b/src/compact/mod.rs @@ -1 +1,2 @@ pub(crate) mod compact_prestate_processing; +mod compact_to_partial_trie; From 6d21046099729c3c4e7081caf9b5fd42cf706ad4 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 26 Oct 2023 19:46:11 -0600 Subject: [PATCH 072/208] Boilerplate for compact nodes --- src/compact/compact_to_partial_trie.rs | 84 +++++++++++++++++++++++--- 1 file changed, 74 insertions(+), 10 deletions(-) diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index f137934b0..74c7ae73a 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -20,18 +20,82 @@ pub(super) fn create_partial_trie_from_remaining_witness_elem( } pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( - _curr_key: Nibbles, + curr_key: Nibbles, curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, + p_trie: &mut HashedPartialTrie, ) -> CompactParsingResult<()> { match curr_node { - NodeEntry::Account(_) => todo!(), - NodeEntry::Branch(_) => todo!(), - NodeEntry::Code(_) => todo!(), - NodeEntry::Empty => todo!(), - NodeEntry::Hash(_) => todo!(), - NodeEntry::Leaf(_, _) => todo!(), - NodeEntry::Extension(_, _) => todo!(), - NodeEntry::Value(_) => todo!(), + NodeEntry::Account(_) => process_account(curr_key, curr_node, p_trie), + NodeEntry::Branch(_) => process_branch(curr_key, curr_node, p_trie), + NodeEntry::Code(_) => process_code(curr_key, curr_node, p_trie), + NodeEntry::Empty => process_empty(curr_key, curr_node, p_trie), + NodeEntry::Hash(_) => process_hash(curr_key, curr_node, p_trie), + NodeEntry::Leaf(_, _) => process_leaf(curr_key, curr_node, p_trie), + NodeEntry::Extension(_, _) => process_extension(curr_key, curr_node, p_trie), + NodeEntry::Value(_) => process_value(curr_key, curr_node, p_trie), } } + +fn process_account( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_branch( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_code( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_empty( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_hash( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_leaf( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_extension( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} + +fn process_value( + _curr_key: Nibbles, + _curr_node: &NodeEntry, + _p_trie: &mut HashedPartialTrie, +) -> CompactParsingResult<()> { + todo!() +} From 43d83981ef84107eba532011953fb019c80010ca Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 30 Oct 2023 09:45:18 -0600 Subject: [PATCH 073/208] More work on converion to partial trie work --- src/compact/compact_prestate_processing.rs | 67 +++++----- src/compact/compact_to_partial_trie.rs | 135 +++++++++++++++------ src/decoding.rs | 9 +- src/processed_block_trace.rs | 46 +++---- src/trace_protocol.rs | 16 +-- src/types.rs | 8 +- 6 files changed, 181 insertions(+), 100 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index b70c1b8a7..0e60870e0 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -3,7 +3,7 @@ use std::{ any::type_name, borrow::Borrow, - collections::{linked_list::CursorMut, LinkedList}, + collections::{linked_list::CursorMut, HashMap, LinkedList}, error::Error, fmt::{self, Display}, io::{Cursor, Read}, @@ -20,8 +20,13 @@ use log::trace; use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; -use super::compact_to_partial_trie::create_partial_trie_from_remaining_witness_elem; -use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; +use super::compact_to_partial_trie::{ + create_partial_trie_from_remaining_witness_elem, CompactToPartialOutput, +}; +use crate::{ + trace_protocol::TrieCompact, + types::{CodeHash, TrieRootHash}, +}; pub type CompactParsingResult = Result; @@ -123,12 +128,12 @@ impl Display for WitnessEntry { // TODO: Ignore `NEW_TRIE` for now... #[derive(Clone, Debug, Eq, PartialEq)] enum Instruction { - Leaf(Key, RawValue), - Extension(Key), + Leaf(Nibbles, RawValue), + Extension(Nibbles), Branch(BranchMask), Hash(HashValue), Code(RawCode), - AccountLeaf(Key, Nonce, Balance, HasCode, HasStorage), + AccountLeaf(Nibbles, Nonce, Balance, HasCode, HasStorage), EmptyRoot, } @@ -165,8 +170,8 @@ pub(crate) enum NodeEntry { Code(Vec), Empty, Hash(HashValue), - Leaf(Key, LeafNodeData), - Extension(Key, Box), + Leaf(Nibbles, LeafNodeData), + Extension(Nibbles, Box), Value(ValueNodeData), } @@ -186,7 +191,7 @@ impl Display for NodeEntry { } #[derive(Clone, Debug)] -pub(super) struct ValueNodeData(Vec); +pub(super) struct ValueNodeData(pub(super) Vec); impl From> for ValueNodeData { fn from(v: Vec) -> Self { @@ -201,17 +206,17 @@ pub(super) enum LeafNodeData { } #[derive(Clone, Debug)] -enum AccountNodeCode { +pub(super) enum AccountNodeCode { CodeNode(Vec), HashNode(TrieRootHash), } #[derive(Clone, Debug)] pub(super) struct AccountNodeData { - nonce: Nonce, - balance: Balance, - storage_root: Option, - account_node_code: Option, + pub(super) nonce: Nonce, + pub(super) balance: Balance, + pub(super) storage_root: Option, + pub(super) account_node_code: Option, } impl AccountNodeData { @@ -270,7 +275,7 @@ impl ParserState { Ok((header, p_state)) } - fn parse(mut self) -> CompactParsingResult { + fn parse(mut self) -> CompactParsingResult { let mut entry_buf = Vec::new(); loop { @@ -328,7 +333,7 @@ impl ParserState { Self::traverser_replace_prev_n_nodes_entry_helper( 1, traverser, - NodeEntry::Leaf(k.clone(), LeafNodeData::Value(v.clone().into())), + NodeEntry::Leaf(k, LeafNodeData::Value(v.clone().into())), ) } WitnessEntry::Instruction(Instruction::Extension(k)) => { @@ -338,7 +343,7 @@ impl ParserState { WitnessEntry::Node(node) => Self::traverser_replace_prev_n_nodes_entry_helper( 2, traverser, - NodeEntry::Extension(k.clone(), Box::new(node.clone())), + NodeEntry::Extension(k, Box::new(node.clone())), ), _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), } @@ -365,7 +370,7 @@ impl ParserState { let account_leaf_data = AccountNodeData::new(n, b, s_root, account_node_code); let leaf_node = WitnessEntry::Node(NodeEntry::Leaf( - k.clone(), + k, LeafNodeData::Account(account_leaf_data), )); traverser.replace_prev_n_entries_with_single_entry(n_nodes_to_replace, leaf_node); @@ -599,7 +604,7 @@ impl WitnessBytes { } fn process_leaf(&mut self) -> CompactParsingResult<()> { - let key = Key::new(self.byte_cursor.read_cbor_byte_array_to_vec()?)?; + let key = Nibbles::from_bytes_be(&self.byte_cursor.read_cbor_byte_array_to_vec()?)?; let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; self.push_entry(Instruction::Leaf(key, value_raw)); @@ -607,7 +612,7 @@ impl WitnessBytes { } fn process_extension(&mut self) -> CompactParsingResult<()> { - let key = Key::new(self.byte_cursor.read_cbor_byte_array_to_vec()?)?; + let key = Nibbles::from_bytes_be(&self.byte_cursor.read_cbor_byte_array_to_vec()?)?; self.push_entry(Instruction::Extension(key)); Ok(()) @@ -635,7 +640,7 @@ impl WitnessBytes { } fn process_account_leaf(&mut self) -> CompactParsingResult<()> { - let key = Key::new(self.byte_cursor.read_cbor_byte_array_to_vec()?)?; + let key = Nibbles::from_bytes_be(&self.byte_cursor.read_cbor_byte_array_to_vec()?)?; let nonce = self.byte_cursor.read_t()?; let balance = self.byte_cursor.read_t()?; let has_code = self.byte_cursor.read_t()?; @@ -869,11 +874,17 @@ enum TraverserDirection { pub(crate) fn process_compact_prestate( state: TrieCompact, -) -> CompactParsingResult<(Header, HashedPartialTrie)> { +) -> CompactParsingResult<( + Header, + HashedPartialTrie, + Option>>, +)> { let (header, parser) = ParserState::create_and_extract_header(state.bytes)?; - let trie = parser.parse()?; + let out = parser.parse()?; - Ok((header, trie)) + let extra_code_hash_mappings = (!out.code.is_empty()).then_some(out.code); + + Ok((header, out.trie, extra_code_hash_mappings)) } // TODO: Move behind a feature flag just used for debugging (but probably not @@ -896,7 +907,9 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult Key { + fn h_decode_key(h_bytes: &str) -> Nibbles { let bytes = hex::decode(h_bytes).unwrap(); - Key::new(bytes).unwrap() + Nibbles::from_bytes_be(&bytes).unwrap() } fn h_decode(b_str: &str) -> Vec { diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 74c7ae73a..06653b94d 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -1,70 +1,95 @@ -use eth_trie_utils::{nibbles::Nibbles, partial_trie::HashedPartialTrie}; +use std::collections::HashMap; -use super::compact_prestate_processing::{CompactParsingResult, NodeEntry, WitnessEntry}; +use eth_trie_utils::{ + nibbles::{Nibble, Nibbles}, + partial_trie::{HashedPartialTrie, PartialTrie}, +}; +use plonky2_evm::generation::mpt::AccountRlp; + +use super::compact_prestate_processing::{ + AccountNodeCode, AccountNodeData, CompactParsingResult, LeafNodeData, NodeEntry, WitnessEntry, +}; +use crate::{ + types::{CodeHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, + utils::hash, +}; + +#[derive(Debug, Default)] +pub(super) struct CompactToPartialOutput { + pub(super) trie: HashedPartialTrie, + + // TODO: `code` is ever only available for storage tries, so we should come up with a better + // API that represents this... + pub(super) code: HashMap>, +} pub(super) fn create_partial_trie_from_remaining_witness_elem( remaining_entry: WitnessEntry, -) -> CompactParsingResult { +) -> CompactParsingResult { let remaining_node = remaining_entry .into_node() .expect("Final node in compact entries was not a node! This is a bug!"); - let mut trie = HashedPartialTrie::default(); + let mut output = CompactToPartialOutput::default(); create_partial_trie_from_remaining_witness_elem_rec( Nibbles::default(), &remaining_node, - &mut trie, + &mut output, )?; - Ok(trie) + Ok(output) } pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( curr_key: Nibbles, curr_node: &NodeEntry, - p_trie: &mut HashedPartialTrie, + output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { match curr_node { - NodeEntry::Account(_) => process_account(curr_key, curr_node, p_trie), - NodeEntry::Branch(_) => process_branch(curr_key, curr_node, p_trie), - NodeEntry::Code(_) => process_code(curr_key, curr_node, p_trie), - NodeEntry::Empty => process_empty(curr_key, curr_node, p_trie), - NodeEntry::Hash(_) => process_hash(curr_key, curr_node, p_trie), - NodeEntry::Leaf(_, _) => process_leaf(curr_key, curr_node, p_trie), - NodeEntry::Extension(_, _) => process_extension(curr_key, curr_node, p_trie), - NodeEntry::Value(_) => process_value(curr_key, curr_node, p_trie), + NodeEntry::Account(_) => process_account(curr_key, curr_node, output), + NodeEntry::Branch(n) => process_branch(curr_key, n, output), + NodeEntry::Code(c_bytes) => process_code(c_bytes.clone(), output), + NodeEntry::Empty => process_empty(curr_key, curr_node), + NodeEntry::Hash(_) => process_hash(curr_key, curr_node, &mut output.trie), + NodeEntry::Leaf(k, v) => process_leaf(curr_key, k, v, output), + NodeEntry::Extension(_, _) => process_extension(curr_key, curr_node, output), + NodeEntry::Value(_) => process_value(curr_key, curr_node, &mut output.trie), } } fn process_account( _curr_key: Nibbles, _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, + _output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { todo!() } fn process_branch( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, + curr_key: Nibbles, + branch: &[Option>], + output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { - todo!() + for i in 0..16 { + if let Some(child) = &branch[i] { + // TODO: Seriously update `eth_trie_utils` to have a better API... + let mut new_k = curr_key; + new_k.push_nibble_front(i as Nibble); + create_partial_trie_from_remaining_witness_elem_rec(new_k, child, output)?; + } + } + + Ok(()) } -fn process_code( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, -) -> CompactParsingResult<()> { - todo!() +fn process_code(c_bytes: Vec, output: &mut CompactToPartialOutput) -> CompactParsingResult<()> { + let c_hash = hash(&c_bytes); + output.code.insert(c_hash, c_bytes); + + Ok(()) } -fn process_empty( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, -) -> CompactParsingResult<()> { +fn process_empty(_curr_key: Nibbles, _curr_node: &NodeEntry) -> CompactParsingResult<()> { todo!() } @@ -77,17 +102,29 @@ fn process_hash( } fn process_leaf( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, + curr_key: Nibbles, + leaf_key: &Nibbles, + leaf_node_data: &LeafNodeData, + output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { - todo!() + let full_k = curr_key.merge_nibbles(leaf_key); + + let l_val = match leaf_node_data { + LeafNodeData::Value(v_bytes) => v_bytes.0.clone(), + LeafNodeData::Account(acc_data) => { + convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup(acc_data, output) + } + }; + + output.trie.insert(full_k, l_val); + + Ok(()) } fn process_extension( _curr_key: Nibbles, _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, + _output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { todo!() } @@ -99,3 +136,29 @@ fn process_value( ) -> CompactParsingResult<()> { todo!() } + +fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( + acc_data: &AccountNodeData, + output: &mut CompactToPartialOutput, +) -> Vec { + let code_hash = match &acc_data.account_node_code { + Some(AccountNodeCode::CodeNode(c_bytes)) => { + let c_hash = hash(c_bytes); + output.code.insert(c_hash, c_bytes.clone()); + + c_hash + } + Some(AccountNodeCode::HashNode(c_hash)) => *c_hash, + None => EMPTY_CODE_HASH, + }; + + let account = AccountRlp { + nonce: acc_data.nonce, + balance: acc_data.balance, + storage_root: acc_data.storage_root.unwrap_or(EMPTY_TRIE_HASH), + code_hash, + }; + + // TODO: Avoid the unnecessary allocation... + rlp::encode(&account).into() +} diff --git a/src/decoding.rs b/src/decoding.rs index 977758670..eccae255b 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -9,7 +9,7 @@ use eth_trie_utils::{ partial_trie::{HashedPartialTrie, PartialTrie}, trie_subsets::create_trie_subset, }; -use ethereum_types::{Address, H256, U256}; +use ethereum_types::{Address, U256}; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -20,16 +20,11 @@ use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites}, types::{ BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, - OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, + OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_TRIE_HASH, }, utils::update_val_if_some, }; -const EMPTY_TRIE_HASH: H256 = H256([ - 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, - 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, -]); - pub type TraceParsingResult = Result; #[derive(Debug, Error)] diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index bf745d961..32e6a0dd2 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -9,7 +9,7 @@ use crate::compact::compact_prestate_processing::process_compact_prestate; use crate::decoding::TraceParsingResult; use crate::trace_protocol::{ BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, - SeperateStorageTriesPreImage, SeperateTriePreImage, SeperateTriePreImages, TrieCompact, + SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, TrieCompact, TxnInfo, }; use crate::types::{ @@ -43,17 +43,21 @@ impl BlockTrace { where F: CodeHashResolveFunc, { + // The compact format is able to provide actual code, so if it does, we should + // take advantage of it. let pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); - let code_hash_resolve_f = |c_hash: &_| { - let provided_contract_code_ref = pre_image_data.extra_code_hash_mappings.as_ref(); + let resolve_code_hash_fn = |c_hash: &_| { + let resolve_code_hash_fn_ref = &p_meta.resolve_code_hash_fn; + let extra_code_hash_mappings_ref = &pre_image_data.extra_code_hash_mappings; - provided_contract_code_ref.and_then(|included_c_hash_lookup| { - included_c_hash_lookup + match extra_code_hash_mappings_ref { + Some(m) => m .get(c_hash) .cloned() - .or_else(|| Some((p_meta.resolve_code_hash_fn)(c_hash))) - }).expect("Code hash resolve function should always be able to resolve a code hash to it's byte code but failed to!") + .unwrap_or_else(|| (resolve_code_hash_fn_ref)(c_hash)), + None => (resolve_code_hash_fn_ref)(c_hash), + } }; ProcessedBlockTrace { @@ -62,7 +66,7 @@ impl BlockTrace { txn_info: self .txn_info .into_iter() - .map(|t| t.into_processed_txn_info(&code_hash_resolve_f)) + .map(|t| t.into_processed_txn_info(&resolve_code_hash_fn)) .collect(), } } @@ -78,7 +82,7 @@ fn process_block_trace_trie_pre_images( block_trace_pre_images: BlockTraceTriePreImages, ) -> ProcessedBlockTracePreImages { match block_trace_pre_images { - BlockTraceTriePreImages::Seperate(t) => process_seperate_trie_pre_images(t), + BlockTraceTriePreImages::Separate(t) => process_separate_trie_pre_images(t), BlockTraceTriePreImages::Combined(t) => process_combined_trie_pre_images(t), } } @@ -89,7 +93,7 @@ fn process_combined_trie_pre_images(tries: CombinedPreImages) -> ProcessedBlockT } } -fn process_seperate_trie_pre_images(tries: SeperateTriePreImages) -> ProcessedBlockTracePreImages { +fn process_separate_trie_pre_images(tries: SeparateTriePreImages) -> ProcessedBlockTracePreImages { ProcessedBlockTracePreImages { state: process_state_trie(tries.state), storage: process_storage_tries(tries.storage), @@ -97,37 +101,37 @@ fn process_seperate_trie_pre_images(tries: SeperateTriePreImages) -> ProcessedBl } } -fn process_state_trie(trie: SeperateTriePreImage) -> HashedPartialTrie { +fn process_state_trie(trie: SeparateTriePreImage) -> HashedPartialTrie { match trie { - SeperateTriePreImage::Uncompressed(_) => todo!(), - SeperateTriePreImage::Direct(t) => t.0, + SeparateTriePreImage::Uncompressed(_) => todo!(), + SeparateTriePreImage::Direct(t) => t.0, } } fn process_storage_tries( - trie: SeperateStorageTriesPreImage, + trie: SeparateStorageTriesPreImage, ) -> HashMap { match trie { - SeperateStorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), - SeperateStorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), + SeparateStorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), + SeparateStorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), } } fn process_single_storage_trie( - _trie: SeperateTriePreImage, + _trie: SeparateTriePreImage, ) -> HashMap { todo!() } fn process_multiple_storage_tries( - _tries: HashMap, + _tries: HashMap, ) -> HashMap { todo!() } -fn process_compact_trie(trie_compact: TrieCompact) -> ProcessedBlockTracePreImages { +fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { // TODO: Wrap in proper result type... - let (header, trie) = process_compact_prestate(trie_compact).unwrap(); + let (header, trie, extra_code_hash_mappings) = process_compact_prestate(trie).unwrap(); // TODO: Make this into a result... assert!(header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); @@ -135,7 +139,7 @@ fn process_compact_trie(trie_compact: TrieCompact) -> ProcessedBlockTracePreImag ProcessedBlockTracePreImages { state: trie, storage: todo!(), - extra_code_hash_mappings: todo!(), + extra_code_hash_mappings, } } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 0dccdee0d..9f0cb92c7 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -48,20 +48,20 @@ pub struct BlockTrace { /// Minimal hashed out tries needed by all txns in the block. #[derive(Debug, Serialize, Deserialize)] pub enum BlockTraceTriePreImages { - Seperate(SeperateTriePreImages), + Separate(SeparateTriePreImages), Combined(CombinedPreImages), } /// State/Storage trie pre-images that are seperate. #[derive(Debug, Serialize, Deserialize)] -pub struct SeperateTriePreImages { - pub state: SeperateTriePreImage, - pub storage: SeperateStorageTriesPreImage, +pub struct SeparateTriePreImages { + pub state: SeparateTriePreImage, + pub storage: SeparateStorageTriesPreImage, } /// A trie pre-image where state & storage are seperate. #[derive(Debug, Serialize, Deserialize)] -pub enum SeperateTriePreImage { +pub enum SeparateTriePreImage { Uncompressed(TrieUncompressed), Direct(TrieDirect), } @@ -91,15 +91,15 @@ pub struct TrieCompact { pub struct TrieDirect(pub HashedPartialTrie); #[derive(Debug, Serialize, Deserialize)] -pub enum SeperateStorageTriesPreImage { +pub enum SeparateStorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries /// involved in the block. We can reconstruct the individual storage tries /// by the storage root hash in the state entries. - SingleTrie(SeperateTriePreImage), + SingleTrie(SeparateTriePreImage), /// Each storage trie is sent over in a hashmap with the hashed account /// address as a key. - MultipleTries(HashMap), + MultipleTries(HashMap), } /// Info specific to txns in the block. diff --git a/src/types.rs b/src/types.rs index 4dfd58a99..86e569f68 100644 --- a/src/types.rs +++ b/src/types.rs @@ -22,8 +22,14 @@ pub type TxnIdx = usize; pub trait CodeHashResolveFunc = Fn(&CodeHash) -> Vec; +// 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 +pub(crate) const EMPTY_CODE_HASH: H256 = H256([ + 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, + 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112, +]); + /// 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421 -const EMPTY_TRIE_HASH: H256 = H256([ +pub(crate) const EMPTY_TRIE_HASH: H256 = H256([ 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, ]); From 4f8879028cfaa38b3ab21b4db5b624a71278a0c7 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 30 Oct 2023 10:18:09 -0600 Subject: [PATCH 074/208] Ext node logic & removed `Value` nodes --- src/compact/compact_prestate_processing.rs | 2 -- src/compact/compact_to_partial_trie.rs | 15 +++++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 0e60870e0..bcd27301c 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -172,7 +172,6 @@ pub(crate) enum NodeEntry { Hash(HashValue), Leaf(Nibbles, LeafNodeData), Extension(Nibbles, Box), - Value(ValueNodeData), } impl Display for NodeEntry { @@ -185,7 +184,6 @@ impl Display for NodeEntry { NodeEntry::Hash(_) => write!(f, "Hash"), NodeEntry::Leaf(_, _) => write!(f, "Leaf"), NodeEntry::Extension(_, _) => write!(f, "Extension"), - NodeEntry::Value(_) => write!(f, "Value"), } } } diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 06653b94d..51e19ce13 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -52,8 +52,7 @@ pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( NodeEntry::Empty => process_empty(curr_key, curr_node), NodeEntry::Hash(_) => process_hash(curr_key, curr_node, &mut output.trie), NodeEntry::Leaf(k, v) => process_leaf(curr_key, k, v, output), - NodeEntry::Extension(_, _) => process_extension(curr_key, curr_node, output), - NodeEntry::Value(_) => process_value(curr_key, curr_node, &mut output.trie), + NodeEntry::Extension(k, c) => process_extension(curr_key, k, c, output), } } @@ -122,11 +121,15 @@ fn process_leaf( } fn process_extension( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _output: &mut CompactToPartialOutput, + curr_key: Nibbles, + ext_node_key: &Nibbles, + ext_child: &NodeEntry, + output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { - todo!() + let new_k = curr_key.merge_nibbles(ext_node_key); + create_partial_trie_from_remaining_witness_elem_rec(new_k, ext_child, output)?; + + Ok(()) } fn process_value( From 3afb3a00b1258cd4bb70d984d699a5738737366c Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 30 Oct 2023 10:29:04 -0600 Subject: [PATCH 075/208] Finished initial impl of partial trie conversion --- src/compact/compact_prestate_processing.rs | 2 -- src/compact/compact_to_partial_trie.rs | 42 +++++++++------------- 2 files changed, 16 insertions(+), 28 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index bcd27301c..c9942ef00 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -165,7 +165,6 @@ impl From for WitnessEntry { #[derive(Clone, Debug)] pub(crate) enum NodeEntry { - Account(AccountNodeData), Branch([Option>; 16]), Code(Vec), Empty, @@ -177,7 +176,6 @@ pub(crate) enum NodeEntry { impl Display for NodeEntry { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - NodeEntry::Account(_) => write!(f, "Account"), NodeEntry::Branch(_) => write!(f, "Branch"), NodeEntry::Code(_) => write!(f, "Code"), NodeEntry::Empty => write!(f, "Empty"), diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 51e19ce13..6dcab9d45 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -10,7 +10,7 @@ use super::compact_prestate_processing::{ AccountNodeCode, AccountNodeData, CompactParsingResult, LeafNodeData, NodeEntry, WitnessEntry, }; use crate::{ - types::{CodeHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, + types::{CodeHash, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, utils::hash, }; @@ -40,30 +40,23 @@ pub(super) fn create_partial_trie_from_remaining_witness_elem( Ok(output) } +// TODO: Consider putting in some asserts that invalid nodes are not appearing +// in the wrong trie type (eg. account ) pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( curr_key: Nibbles, curr_node: &NodeEntry, output: &mut CompactToPartialOutput, ) -> CompactParsingResult<()> { match curr_node { - NodeEntry::Account(_) => process_account(curr_key, curr_node, output), NodeEntry::Branch(n) => process_branch(curr_key, n, output), NodeEntry::Code(c_bytes) => process_code(c_bytes.clone(), output), - NodeEntry::Empty => process_empty(curr_key, curr_node), - NodeEntry::Hash(_) => process_hash(curr_key, curr_node, &mut output.trie), + NodeEntry::Empty => process_empty(), + NodeEntry::Hash(h) => process_hash(curr_key, *h, &mut output.trie), NodeEntry::Leaf(k, v) => process_leaf(curr_key, k, v, output), NodeEntry::Extension(k, c) => process_extension(curr_key, k, c, output), } } -fn process_account( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _output: &mut CompactToPartialOutput, -) -> CompactParsingResult<()> { - todo!() -} - fn process_branch( curr_key: Nibbles, branch: &[Option>], @@ -88,16 +81,21 @@ fn process_code(c_bytes: Vec, output: &mut CompactToPartialOutput) -> Compac Ok(()) } -fn process_empty(_curr_key: Nibbles, _curr_node: &NodeEntry) -> CompactParsingResult<()> { - todo!() +fn process_empty() -> CompactParsingResult<()> { + // Nothing to do. + Ok(()) } fn process_hash( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, + curr_key: Nibbles, + hash: TrieRootHash, + p_trie: &mut HashedPartialTrie, ) -> CompactParsingResult<()> { - todo!() + // If we see a hash node at this stage, it must be a hashed out node in the + // trie. + p_trie.insert(curr_key, hash); + + Ok(()) } fn process_leaf( @@ -132,14 +130,6 @@ fn process_extension( Ok(()) } -fn process_value( - _curr_key: Nibbles, - _curr_node: &NodeEntry, - _p_trie: &mut HashedPartialTrie, -) -> CompactParsingResult<()> { - todo!() -} - fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( acc_data: &AccountNodeData, output: &mut CompactToPartialOutput, From eacf7a6ebeb513cbe846d080fea9d993ec57cf19 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 31 Oct 2023 10:00:53 -0600 Subject: [PATCH 076/208] Now parses the simple payload into our trie format! --- src/compact/compact_prestate_processing.rs | 80 ++++++++++++++-------- 1 file changed, 53 insertions(+), 27 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index c9942ef00..eca26bdd6 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -2,7 +2,6 @@ use std::{ any::type_name, - borrow::Borrow, collections::{linked_list::CursorMut, HashMap, LinkedList}, error::Error, fmt::{self, Display}, @@ -12,7 +11,7 @@ use std::{ use enum_as_inner::EnumAsInner; use eth_trie_utils::{ - nibbles::{BytesToNibblesError, Nibbles}, + nibbles::{FromHexPrefixError, Nibbles}, partial_trie::HashedPartialTrie, }; use ethereum_types::{H256, U256}; @@ -79,24 +78,7 @@ pub enum CompactParsingError { PrecedingNonNodeEntryFoundWhenProcessingRule(usize, &'static str, String, Vec), #[error("Unable to create key nibbles from bytes {0}")] - KeyError(#[from] BytesToNibblesError), -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq)] -pub(super) struct Key { - nibbles: Nibbles, -} - -impl Key { - fn new>(bytes: B) -> CompactParsingResult { - let bytes = bytes.borrow(); - let mut nibbles = Nibbles::from_bytes_be(bytes)?; - - // Drop flag bits. - nibbles.pop_next_nibble_front(); - - Ok(Self { nibbles }) - } + KeyError(#[from] FromHexPrefixError), } #[derive(Debug, enumn::N)] @@ -233,7 +215,7 @@ impl AccountNodeData { #[derive(Debug, Deserialize)] struct LeafData { - key: Key, + key: Nibbles, value: Vec, } @@ -600,7 +582,7 @@ impl WitnessBytes { } fn process_leaf(&mut self) -> CompactParsingResult<()> { - let key = Nibbles::from_bytes_be(&self.byte_cursor.read_cbor_byte_array_to_vec()?)?; + let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; self.push_entry(Instruction::Leaf(key, value_raw)); @@ -608,7 +590,7 @@ impl WitnessBytes { } fn process_extension(&mut self) -> CompactParsingResult<()> { - let key = Nibbles::from_bytes_be(&self.byte_cursor.read_cbor_byte_array_to_vec()?)?; + let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); self.push_entry(Instruction::Extension(key)); Ok(()) @@ -636,7 +618,7 @@ impl WitnessBytes { } fn process_account_leaf(&mut self) -> CompactParsingResult<()> { - let key = Nibbles::from_bytes_be(&self.byte_cursor.read_cbor_byte_array_to_vec()?)?; + let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); let nonce = self.byte_cursor.read_t()?; let balance = self.byte_cursor.read_t()?; let has_code = self.byte_cursor.read_t()?; @@ -901,11 +883,55 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult Nibbles { + let flags = bytes[0]; + let is_odd = (flags & 0b00000001) != 0; + let has_term = (flags & 0b00000010) != 0; + + let mut key = Nibbles::default(); + + let actual_key_bytes = match has_term { + false => &bytes[1..], + true => &bytes[1..(bytes.len() - 1)], + }; + + if actual_key_bytes.is_empty() { + // Key is just 0. + return key; + } + + let final_byte_idx = actual_key_bytes.len() - 1; + + // The compact key format is kind of weird. We need to read the nibbles + // backwards from how we expect it internally. + for byte in &actual_key_bytes[..(final_byte_idx)] { + let high_nib = (byte & 0b11110000) >> 4; + let low_nib = byte & 0b00001111; + + key.push_nibble_front(high_nib); + key.push_nibble_front(low_nib); + } + + // The final byte we might need to ignore the last nibble, so we need to do it + // separately. + let final_byte = actual_key_bytes[final_byte_idx]; + let high_nib = (final_byte & 0b11110000) >> 4; + key.push_nibble_front(high_nib); + + if !is_odd { + let low_nib = final_byte & 0b00001111; + key.push_nibble_front(low_nib); + } + + key +} + #[cfg(test)] mod tests { use eth_trie_utils::nibbles::Nibbles; - use super::{parse_just_to_instructions, Instruction}; + use super::{key_bytes_to_nibbles, parse_just_to_instructions, Instruction}; use crate::compact::compact_prestate_processing::ParserState; const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; @@ -916,7 +942,7 @@ mod tests { fn h_decode_key(h_bytes: &str) -> Nibbles { let bytes = hex::decode(h_bytes).unwrap(); - Nibbles::from_bytes_be(&bytes).unwrap() + key_bytes_to_nibbles(&bytes) } fn h_decode(b_str: &str) -> Vec { @@ -931,7 +957,7 @@ mod tests { let (header, parser) = ParserState::create_and_extract_header(bytes).unwrap(); assert_eq!(header.version, 1); - let _trie = match parser.parse() { + let _output = match parser.parse() { Ok(trie) => trie, Err(err) => panic!("{}", err), }; From 059095ab94b053c46d9281e9aa24cc8b3378f753 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 31 Oct 2023 15:01:41 -0600 Subject: [PATCH 077/208] Fixed keys not being parsed correctly - Also fixed branch nodes being processed backwards during pattern matching. --- src/compact/compact_prestate_processing.rs | 29 ++++++++++++++-------- src/compact/compact_to_partial_trie.rs | 2 +- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index eca26bdd6..af09b2937 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -390,7 +390,7 @@ impl ParserState { .take(BRANCH_MAX_CHILDREN) { if mask as usize & (1 << i) != 0 { - let entry_to_check = &buf[curr_traverser_node_idx]; + let entry_to_check = &buf[buf.len() - 1 - curr_traverser_node_idx]; let node_entry = try_get_node_entry_from_witness_entry(entry_to_check) .ok_or_else(|| { let n_entries_behind_cursor = @@ -885,19 +885,26 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult Nibbles { + let mut key = Nibbles::default(); + + // I have no idea why Erigon is doing this with their keys, as I'm don't think + // this is part of the yellow paper at all? + let is_just_term_byte = bytes.len() == 1 && bytes[0] == 0x10; + if is_just_term_byte { + return key; + } + let flags = bytes[0]; let is_odd = (flags & 0b00000001) != 0; let has_term = (flags & 0b00000010) != 0; - let mut key = Nibbles::default(); - + // ... Term bit seems to have no effect on the key? let actual_key_bytes = match has_term { false => &bytes[1..], - true => &bytes[1..(bytes.len() - 1)], + true => &bytes[1..], }; - if actual_key_bytes.is_empty() { - // Key is just 0. + if actual_key_bytes.is_empty() || is_just_term_byte { return key; } @@ -909,19 +916,19 @@ fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { let high_nib = (byte & 0b11110000) >> 4; let low_nib = byte & 0b00001111; - key.push_nibble_front(high_nib); - key.push_nibble_front(low_nib); + key.push_nibble_back(high_nib); + key.push_nibble_back(low_nib); } // The final byte we might need to ignore the last nibble, so we need to do it // separately. let final_byte = actual_key_bytes[final_byte_idx]; let high_nib = (final_byte & 0b11110000) >> 4; - key.push_nibble_front(high_nib); + key.push_nibble_back(high_nib); if !is_odd { let low_nib = final_byte & 0b00001111; - key.push_nibble_front(low_nib); + key.push_nibble_back(low_nib); } key @@ -929,7 +936,7 @@ fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { #[cfg(test)] mod tests { - use eth_trie_utils::nibbles::Nibbles; + use eth_trie_utils::{nibbles::Nibbles, partial_trie::PartialTrie}; use super::{key_bytes_to_nibbles, parse_just_to_instructions, Instruction}; use crate::compact::compact_prestate_processing::ParserState; diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 6dcab9d45..7e41e214c 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -66,7 +66,7 @@ fn process_branch( if let Some(child) = &branch[i] { // TODO: Seriously update `eth_trie_utils` to have a better API... let mut new_k = curr_key; - new_k.push_nibble_front(i as Nibble); + new_k.push_nibble_back(i as Nibble); create_partial_trie_from_remaining_witness_elem_rec(new_k, child, output)?; } } From 130c224c488722d9ac69a6a57b7bf64b53f2024d Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 31 Oct 2023 15:41:41 -0600 Subject: [PATCH 078/208] Added tests for more complex payloads --- src/compact/compact_prestate_processing.rs | 20 ++++++++++++++- src/compact/complex_test_payloads.rs | 29 ++++++++++++++++++++++ src/compact/mod.rs | 3 +++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 src/compact/complex_test_payloads.rs diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index af09b2937..5e62a08d1 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -939,7 +939,10 @@ mod tests { use eth_trie_utils::{nibbles::Nibbles, partial_trie::PartialTrie}; use super::{key_bytes_to_nibbles, parse_just_to_instructions, Instruction}; - use crate::compact::compact_prestate_processing::ParserState; + use crate::compact::{ + compact_prestate_processing::ParserState, + complex_test_payloads::{TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3}, + }; const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; @@ -997,4 +1000,19 @@ mod tests { assert_eq!(expected_instr, instrs[i]) } } + + #[test] + fn complex_payload_1() { + TEST_PAYLOAD_1.parse_and_check_hash_matches(); + } + + #[test] + fn complex_payload_2() { + TEST_PAYLOAD_2.parse_and_check_hash_matches(); + } + + #[test] + fn complex_payload_3() { + TEST_PAYLOAD_3.parse_and_check_hash_matches(); + } } diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs new file mode 100644 index 000000000..c5821f1ab --- /dev/null +++ b/src/compact/complex_test_payloads.rs @@ -0,0 +1,29 @@ +use eth_trie_utils::partial_trie::PartialTrie; + +use super::compact_prestate_processing::process_compact_prestate; +use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; + +pub(crate) const TEST_PAYLOAD_1: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79084a021e19e0c9bab2400000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d190841010219102005582103876da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6100841010558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6a0673c691edfa4c4528323986bb43c579316f436ff6f8b4ac70854bbd95340b" }; +pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a021e0c000250c782fa00055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480de0b6b3a76400000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "e779761e7f0cf4bb2b5e5a2ebac65406d3a7516d46798040803488825a01c19c" }; +pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; + +pub(crate) struct TestProtocolInputAndRoot { + pub(crate) byte_str: &'static str, + pub(crate) root_str: &'static str, +} + +impl TestProtocolInputAndRoot { + pub(crate) fn parse_and_check_hash_matches(self) { + let protocol_bytes = hex::decode(self.byte_str).unwrap(); + let expected_hash = TrieRootHash::from_slice(&hex::decode(self.root_str).unwrap()); + + let (header, trie, _) = process_compact_prestate(TrieCompact { + bytes: protocol_bytes, + }) + .unwrap(); + let trie_hash = trie.hash(); + + assert!(header.version_is_compatible(1)); + assert_eq!(trie_hash, expected_hash); + } +} diff --git a/src/compact/mod.rs b/src/compact/mod.rs index 3343b1b0d..4fe23ab33 100644 --- a/src/compact/mod.rs +++ b/src/compact/mod.rs @@ -1,2 +1,5 @@ pub(crate) mod compact_prestate_processing; mod compact_to_partial_trie; + +#[cfg(test)] +pub(crate) mod complex_test_payloads; From 6f40b549461b6ad1b226466d2bd303c136e20d65 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 10:35:59 -0700 Subject: [PATCH 079/208] Made a few errors print more info - A few errors really needed to give more output on where in the incoming bytes the parsing problem was. --- src/compact/compact_prestate_processing.rs | 63 +++++++++++++++++++--- src/compact/complex_test_payloads.rs | 8 +-- 2 files changed, 61 insertions(+), 10 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 5e62a08d1..d430b6daf 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -42,6 +42,7 @@ type RawCode = Vec; const MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE: usize = 3; const BRANCH_MAX_CHILDREN: usize = 16; +const CURSOR_ERROR_BYTES_MAX_LEN: usize = 10; #[derive(Debug, Error)] pub enum CompactParsingError { @@ -54,11 +55,13 @@ pub enum CompactParsingError { #[error("Reached the end of the byte stream when we still expected more data")] UnexpectedEndOfStream, - #[error("Unable to parse an expected byte vector (error: {0})")] - InvalidByteVector(String), + #[error("Unable to parse an expected byte vector (error: {0}). Cursor error info: {1}")] + InvalidByteVector(String, CursorBytesErrorInfo), - #[error("Unable to parse the type \"{0}\" from cbor bytes {1}")] - InvalidBytesForType(&'static str, String, String), + #[error( + "Unable to parse the type \"{0}\" from cbor bytes {1}. Cursor error info: {2} (err: {3})" + )] + InvalidBytesForType(&'static str, String, CursorBytesErrorInfo, String), #[error("Invalid block witness entries: {0:?}")] InvalidWitnessFormat(Vec), @@ -81,6 +84,40 @@ pub enum CompactParsingError { KeyError(#[from] FromHexPrefixError), } +#[derive(Debug)] +pub struct CursorBytesErrorInfo { + error_start_pos: usize, + bad_bytes_hex: String, +} + +impl CursorBytesErrorInfo { + fn new(cursor: &Cursor>, error_start_pos: u64) -> Self { + let mut cursor_cloned = cursor.clone(); + + cursor_cloned.set_position(error_start_pos); + let mut buf = vec![0; CURSOR_ERROR_BYTES_MAX_LEN]; + let num_bytes_read = cursor_cloned.read(&mut buf).unwrap(); + buf.truncate(num_bytes_read); + + let bad_bytes_hex = hex::encode(buf); + + Self { + error_start_pos: error_start_pos as usize, + bad_bytes_hex, + } + } +} + +impl Display for CursorBytesErrorInfo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error byte position: {}, bad bytes hex ({} bytes following bad bytes): {}", + self.error_start_pos, CURSOR_ERROR_BYTES_MAX_LEN, self.bad_bytes_hex + ) + } +} + #[derive(Debug, enumn::N)] enum Opcode { Leaf = 0x00, @@ -678,9 +715,12 @@ impl CompactCursor { .to_vec(); let type_bytes_hex = hex::encode(type_bytes); + let cursor_err_info = CursorBytesErrorInfo::new(&self.intern, starting_pos); + CompactParsingError::InvalidBytesForType( type_name::(), type_bytes_hex, + cursor_err_info, err.to_string(), ) }) @@ -700,15 +740,24 @@ impl CompactCursor { // I don't think it's possible to not read to a vec here with `ciborium`... In // theory this should be doable, but the way the library I don't think we can. fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { - Self::ciborium_byte_vec_err_reader_res_to_parsing_res(ciborium::from_reader( + let cursor_start_pos = self.intern.position(); + + Self::ciborium_byte_vec_err_reader_res_to_parsing_res( + ciborium::from_reader(&mut self.intern), + cursor_start_pos, &mut self.intern, - )) + ) } fn ciborium_byte_vec_err_reader_res_to_parsing_res( res: Result, + cursor_start_pos: u64, + cursor: &mut Cursor>, ) -> CompactParsingResult { - res.map_err(|err| CompactParsingError::InvalidByteVector(err.to_string())) + res.map_err(|err| { + let cursor_err_info = CursorBytesErrorInfo::new(cursor, cursor_start_pos); + CompactParsingError::InvalidByteVector(err.to_string(), cursor_err_info) + }) } fn at_eof(&self) -> bool { diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index c5821f1ab..c81595260 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -17,10 +17,12 @@ impl TestProtocolInputAndRoot { let protocol_bytes = hex::decode(self.byte_str).unwrap(); let expected_hash = TrieRootHash::from_slice(&hex::decode(self.root_str).unwrap()); - let (header, trie, _) = process_compact_prestate(TrieCompact { + let (header, trie, _) = match process_compact_prestate(TrieCompact { bytes: protocol_bytes, - }) - .unwrap(); + }) { + Ok(x) => x, + Err(err) => panic!("{}", err), + }; let trie_hash = trie.hash(); assert!(header.version_is_compatible(1)); From 586b99b9c357ecf0a258f431eecd5036a12bc250 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 11:10:58 -0700 Subject: [PATCH 080/208] Added a debug function for instr parsing --- src/compact/compact_prestate_processing.rs | 49 ++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index d430b6daf..8b4811187 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -595,6 +595,47 @@ impl WitnessBytes { Ok((header, self.instrs)) } + // TODO: Look at removing code duplication... + // TODO: Move behind a feature flag... + // TODO: Fairly hacky... + // TODO: Replace `unwrap()`s with `Result`s? + fn process_into_instructions_and_keep_bytes_parsed_to_instruction( + mut self, + ) -> CompactParsingResult)>> { + // Skip header. + self.byte_cursor.intern.set_position(1); + let mut instr_and_bytes = Vec::new(); + + loop { + let mut cloned_cursor = self.byte_cursor.intern.clone(); + + let op_start_pos = self.byte_cursor.intern.position(); + self.process_operator()?; + let op_byte_end_pos = self.byte_cursor.intern.position(); + let num_instr_bytes = (op_byte_end_pos - op_start_pos) as usize; + + let mut instr_bytes = vec![0; num_instr_bytes]; + cloned_cursor.read_exact(&mut instr_bytes).unwrap(); + + let instr_added = self + .instrs + .intern + .front() + .cloned() + .unwrap() + .into_instruction() + .unwrap(); + + instr_and_bytes.push((instr_added, instr_bytes)); + + if self.byte_cursor.at_eof() { + break; + } + } + + Ok(instr_and_bytes) + } + fn process_operator(&mut self) -> CompactParsingResult<()> { let opcode_byte = self.byte_cursor.read_byte()?; @@ -932,6 +973,14 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult, +) -> CompactParsingResult)>> { + let witness_bytes = WitnessBytes::new(bytes); + witness_bytes.process_into_instructions_and_keep_bytes_parsed_to_instruction() +} + // TODO: This could probably be made a bit faster... fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { let mut key = Nibbles::default(); From f7936f61dd822c42cb524f548bda4a1a0f3e73c1 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 11:27:44 -0700 Subject: [PATCH 081/208] Made the instr debug function still give some output on errors --- src/compact/compact_prestate_processing.rs | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 8b4811187..41049a4a9 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -599,9 +599,19 @@ impl WitnessBytes { // TODO: Move behind a feature flag... // TODO: Fairly hacky... // TODO: Replace `unwrap()`s with `Result`s? - fn process_into_instructions_and_keep_bytes_parsed_to_instruction( + fn process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure( + self, + ) -> (Vec<(Instruction, Vec)>, CompactParsingResult<()>) { + let mut instr_and_bytes_buf = Vec::new(); + let res = self.process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure_intern(&mut instr_and_bytes_buf); + + (instr_and_bytes_buf, res) + } + + fn process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure_intern( mut self, - ) -> CompactParsingResult)>> { + _instr_and_bytes_buf: &mut Vec<(Instruction, Vec)>, + ) -> CompactParsingResult<()> { // Skip header. self.byte_cursor.intern.set_position(1); let mut instr_and_bytes = Vec::new(); @@ -633,7 +643,7 @@ impl WitnessBytes { } } - Ok(instr_and_bytes) + Ok(()) } fn process_operator(&mut self) -> CompactParsingResult<()> { @@ -976,9 +986,10 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult, -) -> CompactParsingResult)>> { +) -> (Vec<(Instruction, Vec)>, CompactParsingResult<()>) { let witness_bytes = WitnessBytes::new(bytes); - witness_bytes.process_into_instructions_and_keep_bytes_parsed_to_instruction() + witness_bytes + .process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure() } // TODO: This could probably be made a bit faster... From ad6312fa80f66ffd5f094f56f8834cd7e73c795d Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 11:33:49 -0700 Subject: [PATCH 082/208] `InvalidBytesForType` now provides the field name - Was not as useful without it. --- src/compact/compact_prestate_processing.rs | 27 ++++++++++++++-------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 41049a4a9..5428b5161 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -59,9 +59,15 @@ pub enum CompactParsingError { InvalidByteVector(String, CursorBytesErrorInfo), #[error( - "Unable to parse the type \"{0}\" from cbor bytes {1}. Cursor error info: {2} (err: {3})" + "Unable to parse the type \"{0}\" (field name: {1}) from cbor bytes {2}. Cursor error info: {3} (err: {4})" )] - InvalidBytesForType(&'static str, String, CursorBytesErrorInfo, String), + InvalidBytesForType( + &'static str, + &'static str, + String, + CursorBytesErrorInfo, + String, + ), #[error("Invalid block witness entries: {0:?}")] InvalidWitnessFormat(Vec), @@ -685,21 +691,21 @@ impl WitnessBytes { } fn process_branch(&mut self) -> CompactParsingResult<()> { - let mask = self.byte_cursor.read_t()?; + let mask = self.byte_cursor.read_t("mask")?; self.push_entry(Instruction::Branch(mask)); Ok(()) } fn process_hash(&mut self) -> CompactParsingResult<()> { - let hash = self.byte_cursor.read_t()?; + let hash = self.byte_cursor.read_t("hash")?; self.push_entry(Instruction::Hash(hash)); Ok(()) } fn process_code(&mut self) -> CompactParsingResult<()> { - let code = self.byte_cursor.read_t()?; + let code = self.byte_cursor.read_t("code")?; self.push_entry(Instruction::Code(code)); Ok(()) @@ -707,10 +713,10 @@ impl WitnessBytes { fn process_account_leaf(&mut self) -> CompactParsingResult<()> { let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); - let nonce = self.byte_cursor.read_t()?; - let balance = self.byte_cursor.read_t()?; - let has_code = self.byte_cursor.read_t()?; - let has_storage = self.byte_cursor.read_t()?; + let nonce = self.byte_cursor.read_t("nonce")?; + let balance = self.byte_cursor.read_t("balance")?; + let has_code = self.byte_cursor.read_t("has_code")?; + let has_storage = self.byte_cursor.read_t("has_storage")?; self.push_entry(Instruction::AccountLeaf( key, @@ -756,7 +762,7 @@ impl CompactCursor { } } - fn read_t(&mut self) -> CompactParsingResult { + fn read_t(&mut self, field_name: &'static str) -> CompactParsingResult { let starting_pos = self.intern.position(); ciborium::from_reader(&mut self.intern).map_err(move |err| { @@ -770,6 +776,7 @@ impl CompactCursor { CompactParsingError::InvalidBytesForType( type_name::(), + field_name, type_bytes_hex, cursor_err_info, err.to_string(), From 03166c188b4a162d293002b3002c498e11ef6a62 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 11:35:20 -0700 Subject: [PATCH 083/208] Fixed processing account leaf opcodes calling wrong function - Was calling the leaf function. --- src/compact/compact_prestate_processing.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 5428b5161..b189e35e8 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -670,7 +670,7 @@ impl WitnessBytes { Opcode::Branch => self.process_branch(), Opcode::Hash => self.process_hash(), Opcode::Code => self.process_code(), - Opcode::AccountLeaf => self.process_leaf(), + Opcode::AccountLeaf => self.process_account_leaf(), Opcode::EmptyRoot => self.process_empty_root(), } } From 7e95d6dc2c079f839c113c31972554ffe58df0d2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 12:32:02 -0700 Subject: [PATCH 084/208] Added a debug compact cursor type - Going to be really worthwhile for debugging. --- src/compact/compact_prestate_processing.rs | 126 ++++++++++++++++++--- 1 file changed, 111 insertions(+), 15 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index b189e35e8..4ce05b17a 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -288,7 +288,19 @@ impl ParserState { fn create_and_extract_header( witness_bytes_raw: Vec, ) -> CompactParsingResult<(Header, Self)> { - let witness_bytes = WitnessBytes::new(witness_bytes_raw); + let witness_bytes = WitnessBytes::::new(witness_bytes_raw); + let (header, entries) = witness_bytes.process_into_instructions_and_header()?; + + let p_state = Self { entries }; + + Ok((header, p_state)) + } + + // TODO: Move behind a feature flag... + fn create_and_extract_header_debug( + witness_bytes_raw: Vec, + ) -> CompactParsingResult<(Header, Self)> { + let witness_bytes = WitnessBytes::::new(witness_bytes_raw); let (header, entries) = witness_bytes.process_into_instructions_and_header()?; let p_state = Self { entries }; @@ -572,15 +584,15 @@ impl ParserState { } } -struct WitnessBytes { - byte_cursor: CompactCursor, +struct WitnessBytes { + byte_cursor: C, instrs: WitnessEntries, } -impl WitnessBytes { +impl WitnessBytes { fn new(witness_bytes: Vec) -> Self { Self { - byte_cursor: CompactCursor::new(witness_bytes), + byte_cursor: C::new(witness_bytes), instrs: WitnessEntries::default(), } } @@ -619,15 +631,15 @@ impl WitnessBytes { _instr_and_bytes_buf: &mut Vec<(Instruction, Vec)>, ) -> CompactParsingResult<()> { // Skip header. - self.byte_cursor.intern.set_position(1); + self.byte_cursor.intern().set_position(1); let mut instr_and_bytes = Vec::new(); loop { - let mut cloned_cursor = self.byte_cursor.intern.clone(); + let mut cloned_cursor = self.byte_cursor.intern().clone(); - let op_start_pos = self.byte_cursor.intern.position(); + let op_start_pos = self.byte_cursor.intern().position(); self.process_operator()?; - let op_byte_end_pos = self.byte_cursor.intern.position(); + let op_byte_end_pos = self.byte_cursor.intern().position(); let num_instr_bytes = (op_byte_end_pos - op_start_pos) as usize; let mut instr_bytes = vec![0; num_instr_bytes]; @@ -658,7 +670,7 @@ impl WitnessBytes { let opcode = Opcode::n(opcode_byte).ok_or(CompactParsingError::InvalidOperator(opcode_byte))?; - trace!("Processed {:?} opcode", opcode); + trace!("Processed \"{:?}\" opcode", opcode); self.process_data_following_opcode(opcode) } @@ -748,13 +760,22 @@ impl WitnessBytes { } } +trait CompactCursor { + fn new(bytes: Vec) -> Self; + fn intern(&mut self) -> &mut Cursor>; + fn read_t(&mut self, field_name: &'static str) -> CompactParsingResult; + fn read_byte(&mut self) -> CompactParsingResult; + fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult>; + fn at_eof(&self) -> bool; +} + #[derive(Debug)] -struct CompactCursor { +struct CompactCursorFast { intern: Cursor>, temp_buf: Vec, } -impl CompactCursor { +impl CompactCursor for CompactCursorFast { fn new(bytes: Vec) -> Self { Self { intern: Cursor::new(bytes), @@ -762,6 +783,10 @@ impl CompactCursor { } } + fn intern(&mut self) -> &mut Cursor> { + &mut self.intern + } + fn read_t(&mut self, field_name: &'static str) -> CompactParsingResult { let starting_pos = self.intern.position(); @@ -807,6 +832,14 @@ impl CompactCursor { ) } + fn at_eof(&self) -> bool { + self.intern.position() as usize == self.intern.get_ref().len() + } +} + +impl CompactCursorFast { + // TODO: Keep around until we decide if we want to attempt the non-vec + // allocating route... fn ciborium_byte_vec_err_reader_res_to_parsing_res( res: Result, cursor_start_pos: u64, @@ -817,9 +850,72 @@ impl CompactCursor { CompactParsingError::InvalidByteVector(err.to_string(), cursor_err_info) }) } +} + +#[derive(Debug)] +struct DebugCompactCursor(CompactCursorFast); + +impl CompactCursor for DebugCompactCursor { + fn new(bytes: Vec) -> Self { + Self(CompactCursorFast::new(bytes)) + } + + fn intern(&mut self) -> &mut Cursor> { + self.0.intern() + } + + fn read_t(&mut self, field_name: &'static str) -> CompactParsingResult { + let cursor_start_pos = self.0.intern.position(); + let mut cloned_cursor = self.0.intern.clone(); + + let res = self.0.read_t(field_name); + let cursor_end_pos = self.0.intern.position(); + let num_bytes_read = (cursor_end_pos - cursor_start_pos) as usize; + + if res.is_ok() { + let mut t_bytes = vec![0; num_bytes_read]; + cloned_cursor.read_exact(&mut t_bytes).unwrap(); + + let hex_bytes = hex::encode(&t_bytes); + let hex_start_pos = cursor_start_pos * 2; + + trace!("`read_t` successfully parsed \"{}\" from bytes \"{}\" at byte position \"{}\" (hex start position: \"{}\")", field_name, hex_bytes, cursor_start_pos, hex_start_pos); + } + + res + } + + fn read_byte(&mut self) -> CompactParsingResult { + let res = self.0.read_byte(); + + if let Ok(byte) = res.as_ref() { + trace!("`read_byte` successfully parsed \"{}\"", byte); + } + + res + } + + fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { + let cursor_start_pos = self.0.intern.position(); + let res = self.0.read_cbor_byte_array_to_vec(); + + if let Ok(bytes) = res.as_ref() { + let hex_bytes = hex::encode(bytes); + let hex_start_pos = cursor_start_pos * 2; + trace!("`read_cbor_byte_array_to_vec` successfully parsed \"{}\" into a byte array at position \"{}\" (hex start position: \"{}\")", hex_bytes, cursor_start_pos, hex_start_pos); + } + + res + } fn at_eof(&self) -> bool { - self.intern.position() as usize == self.intern.get_ref().len() + let res = self.0.at_eof(); + + if res { + trace!("`at_eof` returned \"true\" for initial byte payload"); + } + + res } } @@ -975,7 +1071,7 @@ pub(crate) fn process_compact_prestate( // TODO: Move behind a feature flag just used for debugging (but probably not // `debug`)... fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult> { - let witness_bytes = WitnessBytes::new(bytes); + let witness_bytes = WitnessBytes::::new(bytes); let (_, entries) = witness_bytes.process_into_instructions_and_header()?; Ok(entries @@ -994,7 +1090,7 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult, ) -> (Vec<(Instruction, Vec)>, CompactParsingResult<()>) { - let witness_bytes = WitnessBytes::new(bytes); + let witness_bytes = WitnessBytes::::new(bytes); witness_bytes .process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure() } From 55f1dd978ef6773954971645cfe73bb7d7dc67c7 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 Nov 2023 22:23:04 -0600 Subject: [PATCH 085/208] Now parses most of a complex test payload --- src/compact/compact_prestate_processing.rs | 242 +++++++++++++++++---- 1 file changed, 197 insertions(+), 45 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 4ce05b17a..8ad2ebce8 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -55,11 +55,11 @@ pub enum CompactParsingError { #[error("Reached the end of the byte stream when we still expected more data")] UnexpectedEndOfStream, - #[error("Unable to parse an expected byte vector (error: {0}). Cursor error info: {1}")] - InvalidByteVector(String, CursorBytesErrorInfo), + #[error("Unable to parse an expected byte vector (field name: {0}) (error: {1}). Cursor error info: {2}")] + InvalidByteVector(&'static str, String, CursorBytesErrorInfo), #[error( - "Unable to parse the type \"{0}\" (field name: {1}) from cbor bytes {2}. Cursor error info: {3} (err: {4})" + "Unable to parse the type \"{0}\" (field name: {1}) from bytes {2}. Cursor error info: {3} (err: {4})" )] InvalidBytesForType( &'static str, @@ -619,31 +619,25 @@ impl WitnessBytes { // TODO: Replace `unwrap()`s with `Result`s? fn process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure( self, - ) -> (Vec<(Instruction, Vec)>, CompactParsingResult<()>) { + ) -> (InstructionAndBytesParsedFromBuf, CompactParsingResult<()>) { let mut instr_and_bytes_buf = Vec::new(); let res = self.process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure_intern(&mut instr_and_bytes_buf); - (instr_and_bytes_buf, res) + (instr_and_bytes_buf.into(), res) } fn process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure_intern( mut self, - _instr_and_bytes_buf: &mut Vec<(Instruction, Vec)>, + instr_and_bytes_buf: &mut Vec<(Instruction, Vec)>, ) -> CompactParsingResult<()> { // Skip header. self.byte_cursor.intern().set_position(1); - let mut instr_and_bytes = Vec::new(); loop { - let mut cloned_cursor = self.byte_cursor.intern().clone(); - let op_start_pos = self.byte_cursor.intern().position(); self.process_operator()?; - let op_byte_end_pos = self.byte_cursor.intern().position(); - let num_instr_bytes = (op_byte_end_pos - op_start_pos) as usize; - let mut instr_bytes = vec![0; num_instr_bytes]; - cloned_cursor.read_exact(&mut instr_bytes).unwrap(); + let instr_bytes = get_bytes_from_cursor(&mut self.byte_cursor, op_start_pos); let instr_added = self .instrs @@ -653,8 +647,8 @@ impl WitnessBytes { .unwrap() .into_instruction() .unwrap(); - - instr_and_bytes.push((instr_added, instr_bytes)); + + instr_and_bytes_buf.push((instr_added, instr_bytes)); if self.byte_cursor.at_eof() { break; @@ -688,15 +682,19 @@ impl WitnessBytes { } fn process_leaf(&mut self) -> CompactParsingResult<()> { - let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); - let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec()?; + let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec("leaf key")?); + let value_raw = self.byte_cursor.read_cbor_byte_array_to_vec("leaf value")?; self.push_entry(Instruction::Leaf(key, value_raw)); Ok(()) } fn process_extension(&mut self) -> CompactParsingResult<()> { - let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); + let key = key_bytes_to_nibbles( + &self + .byte_cursor + .read_cbor_byte_array_to_vec("extension key")?, + ); self.push_entry(Instruction::Extension(key)); Ok(()) @@ -710,7 +708,7 @@ impl WitnessBytes { } fn process_hash(&mut self) -> CompactParsingResult<()> { - let hash = self.byte_cursor.read_t("hash")?; + let hash = self.byte_cursor.read_h256("hash")?; self.push_entry(Instruction::Hash(hash)); Ok(()) @@ -724,23 +722,48 @@ impl WitnessBytes { } fn process_account_leaf(&mut self) -> CompactParsingResult<()> { - let key = key_bytes_to_nibbles(&self.byte_cursor.read_cbor_byte_array_to_vec()?); - let nonce = self.byte_cursor.read_t("nonce")?; - let balance = self.byte_cursor.read_t("balance")?; - let has_code = self.byte_cursor.read_t("has_code")?; - let has_storage = self.byte_cursor.read_t("has_storage")?; + let key = key_bytes_to_nibbles( + &self + .byte_cursor + .read_cbor_byte_array_to_vec("account leaf key")?, + ); + let flags: AccountLeafFlags = self.byte_cursor.read_byte()?.into(); + let nonce = Self::read_account_flag_field_if_present_or_default( + &mut self.byte_cursor, + "account leaf nonce", + flags.nonce_present, + )?; + let balance = Self::read_account_flag_field_if_present_or_default( + &mut self.byte_cursor, + "account leaf balance", + flags.balance_present, + )?; + + // TODO: process actual storage trie probably? Wait until we know what is going + // on here. self.push_entry(Instruction::AccountLeaf( key, nonce, balance, - has_code, - has_storage, + flags.code_present, + flags.storage_present, )); Ok(()) } + fn read_account_flag_field_if_present_or_default( + cursor: &mut C, + field_name: &'static str, + present_flag: bool, + ) -> CompactParsingResult { + Ok(match present_flag { + false => U256::default(), + true => deserialize_u256_from_compact(cursor, field_name)?, + }) + } + fn process_empty_root(&mut self) -> CompactParsingResult<()> { self.push_entry(Instruction::EmptyRoot); Ok(()) @@ -760,12 +783,35 @@ impl WitnessBytes { } } +#[derive(Debug)] +struct AccountLeafFlags { + code_present: bool, + storage_present: bool, + nonce_present: bool, + balance_present: bool, +} + +impl From for AccountLeafFlags { + fn from(v: u8) -> Self { + Self { + code_present: v & 0b0001 != 0, + storage_present: v & 0b0010 != 0, + nonce_present: v & 0b0100 != 0, + balance_present: v & 0b1000 != 0, + } + } +} + trait CompactCursor { fn new(bytes: Vec) -> Self; fn intern(&mut self) -> &mut Cursor>; fn read_t(&mut self, field_name: &'static str) -> CompactParsingResult; fn read_byte(&mut self) -> CompactParsingResult; - fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult>; + fn read_cbor_byte_array_to_vec( + &mut self, + field_name: &'static str, + ) -> CompactParsingResult>; + fn read_h256(&mut self, field_name: &'static str) -> CompactParsingResult; fn at_eof(&self) -> bool; } @@ -822,16 +868,39 @@ impl CompactCursor for CompactCursorFast { // I don't think it's possible to not read to a vec here with `ciborium`... In // theory this should be doable, but the way the library I don't think we can. - fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { + fn read_cbor_byte_array_to_vec( + &mut self, + field_name: &'static str, + ) -> CompactParsingResult> { let cursor_start_pos = self.intern.position(); Self::ciborium_byte_vec_err_reader_res_to_parsing_res( ciborium::from_reader(&mut self.intern), cursor_start_pos, &mut self.intern, + field_name, ) } + fn read_h256(&mut self, field_name: &'static str) -> CompactParsingResult { + let cursor_start_pos = self.intern().position(); + let mut h256_bytes = [0; 32]; + + self.intern.read_exact(&mut h256_bytes).map_err(|err| { + let hex_bytes = hex::encode(h256_bytes); + let cursor_err_info = CursorBytesErrorInfo::new(self.intern(), cursor_start_pos); + CompactParsingError::InvalidBytesForType( + type_name::(), + field_name, + hex_bytes, + cursor_err_info, + err.to_string(), + ) + })?; + + Ok(H256(h256_bytes)) + } + fn at_eof(&self) -> bool { self.intern.position() as usize == self.intern.get_ref().len() } @@ -844,10 +913,11 @@ impl CompactCursorFast { res: Result, cursor_start_pos: u64, cursor: &mut Cursor>, + field_name: &'static str, ) -> CompactParsingResult { res.map_err(|err| { let cursor_err_info = CursorBytesErrorInfo::new(cursor, cursor_start_pos); - CompactParsingError::InvalidByteVector(err.to_string(), cursor_err_info) + CompactParsingError::InvalidByteVector(field_name, err.to_string(), cursor_err_info) }) } } @@ -866,21 +936,12 @@ impl CompactCursor for DebugCompactCursor { fn read_t(&mut self, field_name: &'static str) -> CompactParsingResult { let cursor_start_pos = self.0.intern.position(); - let mut cloned_cursor = self.0.intern.clone(); - let res = self.0.read_t(field_name); - let cursor_end_pos = self.0.intern.position(); - let num_bytes_read = (cursor_end_pos - cursor_start_pos) as usize; if res.is_ok() { - let mut t_bytes = vec![0; num_bytes_read]; - cloned_cursor.read_exact(&mut t_bytes).unwrap(); - - let hex_bytes = hex::encode(&t_bytes); - let hex_start_pos = cursor_start_pos * 2; - - trace!("`read_t` successfully parsed \"{}\" from bytes \"{}\" at byte position \"{}\" (hex start position: \"{}\")", field_name, hex_bytes, cursor_start_pos, hex_start_pos); - } + let info_payload = get_bytes_and_debug_info_from_cursor(self, cursor_start_pos); + trace!("`read_t` successfully parsed \"{}\" from bytes \"{}\" at byte position \"{}\" (hex start position: \"{}\")", field_name, info_payload.bytes_hex, cursor_start_pos, info_payload.hex_start_pos); + } res } @@ -895,9 +956,12 @@ impl CompactCursor for DebugCompactCursor { res } - fn read_cbor_byte_array_to_vec(&mut self) -> CompactParsingResult> { + fn read_cbor_byte_array_to_vec( + &mut self, + field_name: &'static str, + ) -> CompactParsingResult> { let cursor_start_pos = self.0.intern.position(); - let res = self.0.read_cbor_byte_array_to_vec(); + let res = self.0.read_cbor_byte_array_to_vec(field_name); if let Ok(bytes) = res.as_ref() { let hex_bytes = hex::encode(bytes); @@ -908,12 +972,29 @@ impl CompactCursor for DebugCompactCursor { res } + fn read_h256(&mut self, field_name: &'static str) -> CompactParsingResult { + let cursor_start_pos = self.0.intern.position(); + let res = self.0.read_h256(field_name); + + if let Ok(v) = res.as_ref() { + // Need to use hex formatting otherwise the default display formatting truncates + // it. + let v_full_readable = format!("{:x}", v); + let hex_bytes = hex::encode(get_bytes_from_cursor(self, cursor_start_pos)); + let hex_start_pos = cursor_start_pos * 2; + + trace!("`read_h256` successfully parsed \"{}\" (hex bytes: {}) into an H256 at position \"{}\" (hex start position: \"{}\")", v_full_readable, hex_bytes, cursor_start_pos, hex_start_pos); + } + + res + } + fn at_eof(&self) -> bool { let res = self.0.at_eof(); if res { trace!("`at_eof` returned \"true\" for initial byte payload"); - } + } res } @@ -1086,10 +1167,37 @@ fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult)>); + +impl From)>> for InstructionAndBytesParsedFromBuf { + fn from(v: Vec<(Instruction, Vec)>) -> Self { + Self(v) + } +} + +impl Display for InstructionAndBytesParsedFromBuf { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "Instructions and bytes there were parsed from:")?; + + for (instr, parsed_from_bytes) in &self.0 { + writeln!( + f, + "Instruction: {}, Bytes: {}", + instr, + hex::encode(parsed_from_bytes) + )?; + } + + Ok(()) + } +} + // TODO: Also move behind a feature flag... fn parse_to_instructions_and_bytes_for_instruction( bytes: Vec, -) -> (Vec<(Instruction, Vec)>, CompactParsingResult<()>) { +) -> (InstructionAndBytesParsedFromBuf, CompactParsingResult<()>) { let witness_bytes = WitnessBytes::::new(bytes); witness_bytes .process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure() @@ -1146,6 +1254,50 @@ fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { key } +fn deserialize_u256_from_compact( + cursor: &mut C, + field_name: &'static str, +) -> CompactParsingResult { + let b_array = cursor.read_cbor_byte_array_to_vec(field_name)?; + Ok(U256::from_big_endian(&b_array)) +} + +struct CursorBytesDebugInfo { + bytes: Vec, + bytes_hex: String, + hex_start_pos: usize, +} + +fn get_bytes_and_debug_info_from_cursor( + cursor: &mut C, + cursor_start_pos: u64, +) -> CursorBytesDebugInfo { + let bytes = get_bytes_from_cursor(cursor, cursor_start_pos); + + let bytes_hex = hex::encode(&bytes); + let hex_start_pos = cursor_start_pos as usize * 2; + + CursorBytesDebugInfo { + bytes, + bytes_hex, + hex_start_pos, + } +} + +fn get_bytes_from_cursor(cursor: &mut C, cursor_start_pos: u64) -> Vec { + let cursor_end_pos = cursor.intern().position(); + let mut cloned_cursor = cursor.intern().clone(); + + // Rewind the cursor. + cloned_cursor.set_position(cursor_start_pos); + + let num_bytes_read = (cursor_end_pos - cursor_start_pos) as usize; + let mut t_bytes = vec![0; num_bytes_read]; + cloned_cursor.read_exact(&mut t_bytes).unwrap(); + + t_bytes +} + #[cfg(test)] mod tests { use eth_trie_utils::{nibbles::Nibbles, partial_trie::PartialTrie}; From 95f323979c2531fa978809aad1082fad9d6f48f8 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 2 Nov 2023 10:50:04 -0600 Subject: [PATCH 086/208] Now parses test payload 1!!! --- src/compact/compact_prestate_processing.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 8ad2ebce8..bdbc82e71 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -492,7 +492,7 @@ impl ParserState { fn match_account_leaf_no_code_and_no_storage( ) -> CompactParsingResult<(usize, Option, Option)> { - Ok((0, None, None)) + Ok((1, None, None)) } fn match_account_leaf_no_code_but_has_storage( @@ -503,8 +503,8 @@ impl ParserState { match buf[0].clone() { WitnessEntry::Node(node) => match Self::try_get_storage_hash_from_node(&node) { - Some(s_hash) => Ok((1, None, Some(s_hash))), - None => Self::invalid_witness_err(1, TraverserDirection::Backwards, traverser), + Some(s_hash) => Ok((2, None, Some(s_hash))), + None => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), }, _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), } @@ -518,10 +518,10 @@ impl ParserState { match buf[0].clone() { WitnessEntry::Node(NodeEntry::Code(code)) => { - Ok((1, Some(AccountNodeCode::CodeNode(code.clone())), None)) + Ok((2, Some(AccountNodeCode::CodeNode(code.clone())), None)) } WitnessEntry::Node(NodeEntry::Hash(h)) => { - Ok((1, Some(AccountNodeCode::HashNode(h)), None)) + Ok((2, Some(AccountNodeCode::HashNode(h)), None)) } _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), } From 35931a3837abcdcdb2d67093059083c5a00a08d2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 2 Nov 2023 13:02:24 -0600 Subject: [PATCH 087/208] Test payload 2 now passes!!! --- src/compact/compact_prestate_processing.rs | 109 ++++++++++++++------- src/compact/complex_test_payloads.rs | 34 ++++++- 2 files changed, 104 insertions(+), 39 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index bdbc82e71..ee936488d 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -708,7 +708,7 @@ impl WitnessBytes { } fn process_hash(&mut self) -> CompactParsingResult<()> { - let hash = self.byte_cursor.read_h256("hash")?; + let hash = self.byte_cursor.read_non_cbor_h256("hash")?; self.push_entry(Instruction::Hash(hash)); Ok(()) @@ -728,16 +728,15 @@ impl WitnessBytes { .read_cbor_byte_array_to_vec("account leaf key")?, ); let flags: AccountLeafFlags = self.byte_cursor.read_byte()?.into(); - let nonce = Self::read_account_flag_field_if_present_or_default( - &mut self.byte_cursor, - "account leaf nonce", - flags.nonce_present, - )?; - let balance = Self::read_account_flag_field_if_present_or_default( - &mut self.byte_cursor, - "account leaf balance", - flags.balance_present, - )?; + let nonce: U256 = + Self::read_account_flag_field_if_present_or_default(flags.nonce_present, || { + self.byte_cursor.read_t::("account leaf nonce") + })? + .into(); + let balance = + Self::read_account_flag_field_if_present_or_default(flags.balance_present, || { + self.byte_cursor.read_cbor_u256("account leaf balance") + })?; // TODO: process actual storage trie probably? Wait until we know what is going // on here. @@ -753,14 +752,17 @@ impl WitnessBytes { Ok(()) } - fn read_account_flag_field_if_present_or_default( - cursor: &mut C, - field_name: &'static str, + fn read_account_flag_field_if_present_or_default( present_flag: bool, - ) -> CompactParsingResult { + mut read_f: F, + ) -> CompactParsingResult + where + F: FnMut() -> CompactParsingResult, + T: Default, + { Ok(match present_flag { - false => U256::default(), - true => deserialize_u256_from_compact(cursor, field_name)?, + false => T::default(), + true => (read_f)()?, }) } @@ -811,7 +813,8 @@ trait CompactCursor { &mut self, field_name: &'static str, ) -> CompactParsingResult>; - fn read_h256(&mut self, field_name: &'static str) -> CompactParsingResult; + fn read_cbor_u256(&mut self, field_name: &'static str) -> CompactParsingResult; + fn read_non_cbor_h256(&mut self, field_name: &'static str) -> CompactParsingResult; fn at_eof(&self) -> bool; } @@ -882,7 +885,13 @@ impl CompactCursor for CompactCursorFast { ) } - fn read_h256(&mut self, field_name: &'static str) -> CompactParsingResult { + // TODO: Clean up code duplication... + fn read_cbor_u256(&mut self, field_name: &'static str) -> CompactParsingResult { + let b_array = self.read_cbor_byte_array_to_vec(field_name)?; + Ok(U256::from_big_endian(&b_array)) + } + + fn read_non_cbor_h256(&mut self, field_name: &'static str) -> CompactParsingResult { let cursor_start_pos = self.intern().position(); let mut h256_bytes = [0; 32]; @@ -925,6 +934,7 @@ impl CompactCursorFast { #[derive(Debug)] struct DebugCompactCursor(CompactCursorFast); +// TODO: There are some decent opportunities to reduce code duplication here... impl CompactCursor for DebugCompactCursor { fn new(bytes: Vec) -> Self { Self(CompactCursorFast::new(bytes)) @@ -972,9 +982,22 @@ impl CompactCursor for DebugCompactCursor { res } - fn read_h256(&mut self, field_name: &'static str) -> CompactParsingResult { + fn read_cbor_u256(&mut self, field_name: &'static str) -> CompactParsingResult { + let cursor_start_pos = self.0.intern.position(); + let res = self.0.read_cbor_u256(field_name); + + if let Ok(v) = res.as_ref() { + let hex_bytes = format!("{:x}", v); + let hex_start_pos = cursor_start_pos * 2; + trace!("`read_cbor_u256` successfully parsed \"{}\" (hex bytes: {}) into an U256 at position \"{}\" (hex start position: \"{}\")", v, hex_bytes, cursor_start_pos, hex_start_pos); + } + + res + } + + fn read_non_cbor_h256(&mut self, field_name: &'static str) -> CompactParsingResult { let cursor_start_pos = self.0.intern.position(); - let res = self.0.read_h256(field_name); + let res = self.0.read_non_cbor_h256(field_name); if let Ok(v) = res.as_ref() { // Need to use hex formatting otherwise the default display formatting truncates @@ -983,7 +1006,7 @@ impl CompactCursor for DebugCompactCursor { let hex_bytes = hex::encode(get_bytes_from_cursor(self, cursor_start_pos)); let hex_start_pos = cursor_start_pos * 2; - trace!("`read_h256` successfully parsed \"{}\" (hex bytes: {}) into an H256 at position \"{}\" (hex start position: \"{}\")", v_full_readable, hex_bytes, cursor_start_pos, hex_start_pos); + trace!("`read_non_cbor_h256` successfully parsed \"{}\" (hex bytes: {}) into an H256 at position \"{}\" (hex start position: \"{}\")", v_full_readable, hex_bytes, cursor_start_pos, hex_start_pos); } res @@ -1141,9 +1164,30 @@ pub(crate) fn process_compact_prestate( HashedPartialTrie, Option>>, )> { - let (header, parser) = ParserState::create_and_extract_header(state.bytes)?; - let out = parser.parse()?; + process_compact_prestate_common(state, ParserState::create_and_extract_header) +} +// TODO: Move behind a feature flag... +pub(crate) fn process_compact_prestate_debug( + state: TrieCompact, +) -> CompactParsingResult<( + Header, + HashedPartialTrie, + Option>>, +)> { + process_compact_prestate_common(state, ParserState::create_and_extract_header_debug) +} + +fn process_compact_prestate_common( + state: TrieCompact, + create_and_extract_header_f: fn(Vec) -> CompactParsingResult<(Header, ParserState)>, +) -> CompactParsingResult<( + Header, + HashedPartialTrie, + Option>>, +)> { + let (header, parser) = create_and_extract_header_f(state.bytes)?; + let out = parser.parse()?; let extra_code_hash_mappings = (!out.code.is_empty()).then_some(out.code); Ok((header, out.trie, extra_code_hash_mappings)) @@ -1254,14 +1298,6 @@ fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { key } -fn deserialize_u256_from_compact( - cursor: &mut C, - field_name: &'static str, -) -> CompactParsingResult { - let b_array = cursor.read_cbor_byte_array_to_vec(field_name)?; - Ok(U256::from_big_endian(&b_array)) -} - struct CursorBytesDebugInfo { bytes: Vec, bytes_hex: String, @@ -1367,16 +1403,19 @@ mod tests { #[test] fn complex_payload_1() { - TEST_PAYLOAD_1.parse_and_check_hash_matches(); + init(); + TEST_PAYLOAD_1.parse_and_check_hash_matches_with_debug(); } #[test] fn complex_payload_2() { - TEST_PAYLOAD_2.parse_and_check_hash_matches(); + init(); + TEST_PAYLOAD_2.parse_and_check_hash_matches_with_debug(); } #[test] fn complex_payload_3() { - TEST_PAYLOAD_3.parse_and_check_hash_matches(); + init(); + TEST_PAYLOAD_3.parse_and_check_hash_matches_with_debug(); } } diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index c81595260..37b510db6 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -1,12 +1,27 @@ -use eth_trie_utils::partial_trie::PartialTrie; +use std::collections::HashMap; -use super::compact_prestate_processing::process_compact_prestate; -use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; +use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; + +use super::compact_prestate_processing::{ + process_compact_prestate, process_compact_prestate_debug, CompactParsingResult, Header, +}; +use crate::{ + trace_protocol::TrieCompact, + types::{CodeHash, TrieRootHash}, +}; pub(crate) const TEST_PAYLOAD_1: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79084a021e19e0c9bab2400000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d190841010219102005582103876da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6100841010558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6a0673c691edfa4c4528323986bb43c579316f436ff6f8b4ac70854bbd95340b" }; pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a021e0c000250c782fa00055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480de0b6b3a76400000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "e779761e7f0cf4bb2b5e5a2ebac65406d3a7516d46798040803488825a01c19c" }; pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; +type ProcessCompactPrestateFn = fn( + TrieCompact, +) -> CompactParsingResult<( + Header, + HashedPartialTrie, + Option>>, +)>; + pub(crate) struct TestProtocolInputAndRoot { pub(crate) byte_str: &'static str, pub(crate) root_str: &'static str, @@ -14,10 +29,21 @@ pub(crate) struct TestProtocolInputAndRoot { impl TestProtocolInputAndRoot { pub(crate) fn parse_and_check_hash_matches(self) { + self.parse_and_check_hash_matches_common(process_compact_prestate); + } + + pub(crate) fn parse_and_check_hash_matches_with_debug(self) { + self.parse_and_check_hash_matches_common(process_compact_prestate_debug); + } + + fn parse_and_check_hash_matches_common( + self, + process_compact_prestate_f: ProcessCompactPrestateFn, + ) { let protocol_bytes = hex::decode(self.byte_str).unwrap(); let expected_hash = TrieRootHash::from_slice(&hex::decode(self.root_str).unwrap()); - let (header, trie, _) = match process_compact_prestate(TrieCompact { + let (header, trie, _) = match process_compact_prestate_f(TrieCompact { bytes: protocol_bytes, }) { Ok(x) => x, From d5452c12bd4b9775e5ce43899238823aa6e37b13 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 2 Nov 2023 14:04:26 -0600 Subject: [PATCH 088/208] Fixed the third test payload being truncated --- src/compact/complex_test_payloads.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 37b510db6..0ee2bd005 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -12,7 +12,7 @@ use crate::{ pub(crate) const TEST_PAYLOAD_1: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79084a021e19e0c9bab2400000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d190841010219102005582103876da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6100841010558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6a0673c691edfa4c4528323986bb43c579316f436ff6f8b4ac70854bbd95340b" }; pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a021e0c000250c782fa00055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480de0b6b3a76400000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "e779761e7f0cf4bb2b5e5a2ebac65406d3a7516d46798040803488825a01c19c" }; -pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; +pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; type ProcessCompactPrestateFn = fn( TrieCompact, From aadbc5fdfde7307a2300a9bcc0f912e33e489bc4 Mon Sep 17 00:00:00 2001 From: cpu Date: Thu, 2 Nov 2023 14:36:43 -0700 Subject: [PATCH 089/208] serde rename enum variants to snake_case --- src/trace_protocol.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 9f0cb92c7..9f136378b 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -47,6 +47,7 @@ pub struct BlockTrace { /// Minimal hashed out tries needed by all txns in the block. #[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] pub enum BlockTraceTriePreImages { Separate(SeparateTriePreImages), Combined(CombinedPreImages), @@ -61,6 +62,7 @@ pub struct SeparateTriePreImages { /// A trie pre-image where state & storage are seperate. #[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] pub enum SeparateTriePreImage { Uncompressed(TrieUncompressed), Direct(TrieDirect), @@ -68,6 +70,7 @@ pub enum SeparateTriePreImage { /// A trie pre-image where both state & storage are combined into one payload. #[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] pub enum CombinedPreImages { Compact(TrieCompact), } @@ -91,6 +94,7 @@ pub struct TrieCompact { pub struct TrieDirect(pub HashedPartialTrie); #[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] pub enum SeparateStorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries /// involved in the block. We can reconstruct the individual storage tries @@ -168,6 +172,7 @@ pub struct TxnTrace { /// Contract code access type. Used by txn traces. #[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] pub enum ContractCodeUsage { /// Contract was read. Read(CodeHash), From e083413eb5232c2184265453a1f6ed6059250c84 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 2 Nov 2023 16:23:38 -0600 Subject: [PATCH 090/208] Updated API a bit and removed more `todo!()`s --- src/compact/compact_prestate_processing.rs | 41 +++++++++++++--------- src/compact/compact_to_partial_trie.rs | 9 ++--- src/compact/complex_test_payloads.rs | 26 +++++--------- src/processed_block_trace.rs | 36 ++++++++++--------- src/trace_protocol.rs | 6 ++-- 5 files changed, 59 insertions(+), 59 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index ee936488d..15cf7516b 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -24,7 +24,7 @@ use super::compact_to_partial_trie::{ }; use crate::{ trace_protocol::TrieCompact, - types::{CodeHash, TrieRootHash}, + types::{CodeHash, HashedAccountAddr, TrieRootHash}, }; pub type CompactParsingResult = Result; @@ -1157,40 +1157,47 @@ enum TraverserDirection { Both, } +#[derive(Debug, Default)] +pub(crate) struct PartialTriePreImages { + pub(crate) state: HashedPartialTrie, + pub(crate) storage: HashMap, +} + +#[derive(Debug)] +pub(crate) struct ProcessedCompactOutput { + pub(crate) header: Header, + pub(crate) tries: PartialTriePreImages, + pub(crate) code: Option>>, +} + pub(crate) fn process_compact_prestate( state: TrieCompact, -) -> CompactParsingResult<( - Header, - HashedPartialTrie, - Option>>, -)> { +) -> CompactParsingResult { process_compact_prestate_common(state, ParserState::create_and_extract_header) } // TODO: Move behind a feature flag... pub(crate) fn process_compact_prestate_debug( state: TrieCompact, -) -> CompactParsingResult<( - Header, - HashedPartialTrie, - Option>>, -)> { +) -> CompactParsingResult { process_compact_prestate_common(state, ParserState::create_and_extract_header_debug) } fn process_compact_prestate_common( state: TrieCompact, create_and_extract_header_f: fn(Vec) -> CompactParsingResult<(Header, ParserState)>, -) -> CompactParsingResult<( - Header, - HashedPartialTrie, - Option>>, -)> { +) -> CompactParsingResult { let (header, parser) = create_and_extract_header_f(state.bytes)?; let out = parser.parse()?; let extra_code_hash_mappings = (!out.code.is_empty()).then_some(out.code); - Ok((header, out.trie, extra_code_hash_mappings)) + let out = ProcessedCompactOutput { + header, + tries: out.tries, + code: extra_code_hash_mappings, + }; + + Ok(out) } // TODO: Move behind a feature flag just used for debugging (but probably not diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 7e41e214c..b6332214a 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -7,7 +7,8 @@ use eth_trie_utils::{ use plonky2_evm::generation::mpt::AccountRlp; use super::compact_prestate_processing::{ - AccountNodeCode, AccountNodeData, CompactParsingResult, LeafNodeData, NodeEntry, WitnessEntry, + AccountNodeCode, AccountNodeData, CompactParsingResult, LeafNodeData, NodeEntry, + PartialTriePreImages, WitnessEntry, }; use crate::{ types::{CodeHash, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, @@ -16,7 +17,7 @@ use crate::{ #[derive(Debug, Default)] pub(super) struct CompactToPartialOutput { - pub(super) trie: HashedPartialTrie, + pub(super) tries: PartialTriePreImages, // TODO: `code` is ever only available for storage tries, so we should come up with a better // API that represents this... @@ -51,7 +52,7 @@ pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( NodeEntry::Branch(n) => process_branch(curr_key, n, output), NodeEntry::Code(c_bytes) => process_code(c_bytes.clone(), output), NodeEntry::Empty => process_empty(), - NodeEntry::Hash(h) => process_hash(curr_key, *h, &mut output.trie), + NodeEntry::Hash(h) => process_hash(curr_key, *h, &mut output.tries.state), NodeEntry::Leaf(k, v) => process_leaf(curr_key, k, v, output), NodeEntry::Extension(k, c) => process_extension(curr_key, k, c, output), } @@ -113,7 +114,7 @@ fn process_leaf( } }; - output.trie.insert(full_k, l_val); + output.tries.state.insert(full_k, l_val); Ok(()) } diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 0ee2bd005..ecf67b99e 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -1,26 +1,16 @@ -use std::collections::HashMap; - -use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; +use eth_trie_utils::partial_trie::PartialTrie; use super::compact_prestate_processing::{ - process_compact_prestate, process_compact_prestate_debug, CompactParsingResult, Header, -}; -use crate::{ - trace_protocol::TrieCompact, - types::{CodeHash, TrieRootHash}, + process_compact_prestate, process_compact_prestate_debug, CompactParsingResult, + ProcessedCompactOutput, }; +use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; pub(crate) const TEST_PAYLOAD_1: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79084a021e19e0c9bab2400000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d190841010219102005582103876da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6100841010558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6a0673c691edfa4c4528323986bb43c579316f436ff6f8b4ac70854bbd95340b" }; pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a021e0c000250c782fa00055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480de0b6b3a76400000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "e779761e7f0cf4bb2b5e5a2ebac65406d3a7516d46798040803488825a01c19c" }; pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; -type ProcessCompactPrestateFn = fn( - TrieCompact, -) -> CompactParsingResult<( - Header, - HashedPartialTrie, - Option>>, -)>; +type ProcessCompactPrestateFn = fn(TrieCompact) -> CompactParsingResult; pub(crate) struct TestProtocolInputAndRoot { pub(crate) byte_str: &'static str, @@ -43,15 +33,15 @@ impl TestProtocolInputAndRoot { let protocol_bytes = hex::decode(self.byte_str).unwrap(); let expected_hash = TrieRootHash::from_slice(&hex::decode(self.root_str).unwrap()); - let (header, trie, _) = match process_compact_prestate_f(TrieCompact { + let out = match process_compact_prestate_f(TrieCompact { bytes: protocol_bytes, }) { Ok(x) => x, Err(err) => panic!("{}", err), }; - let trie_hash = trie.hash(); + let trie_hash = out.tries.state.hash(); - assert!(header.version_is_compatible(1)); + assert!(out.header.version_is_compatible(1)); assert_eq!(trie_hash, expected_hash); } } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 32e6a0dd2..5b0ddf961 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -5,12 +5,12 @@ use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::U256; -use crate::compact::compact_prestate_processing::process_compact_prestate; +use crate::compact::compact_prestate_processing::{process_compact_prestate, PartialTriePreImages}; use crate::decoding::TraceParsingResult; use crate::trace_protocol::{ BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, TrieCompact, - TxnInfo, + TrieUncompressed, TxnInfo, }; use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, @@ -18,9 +18,9 @@ use crate::types::{ }; use crate::utils::hash; +#[derive(Debug)] pub(crate) struct ProcessedBlockTrace { - pub(crate) state_trie: HashedPartialTrie, - pub(crate) storage_tries: HashMap, + pub(crate) tries: PartialTriePreImages, pub(crate) txn_info: Vec, } @@ -61,8 +61,7 @@ impl BlockTrace { }; ProcessedBlockTrace { - state_trie: pre_image_data.state, - storage_tries: pre_image_data.storage, + tries: pre_image_data.tries, txn_info: self .txn_info .into_iter() @@ -72,9 +71,9 @@ impl BlockTrace { } } +#[derive(Debug)] struct ProcessedBlockTracePreImages { - state: HashedPartialTrie, - storage: HashMap, + tries: PartialTriePreImages, extra_code_hash_mappings: Option>>, } @@ -94,9 +93,13 @@ fn process_combined_trie_pre_images(tries: CombinedPreImages) -> ProcessedBlockT } fn process_separate_trie_pre_images(tries: SeparateTriePreImages) -> ProcessedBlockTracePreImages { - ProcessedBlockTracePreImages { + let tries = PartialTriePreImages { state: process_state_trie(tries.state), storage: process_storage_tries(tries.storage), + }; + + ProcessedBlockTracePreImages { + tries, extra_code_hash_mappings: None, } } @@ -112,13 +115,13 @@ fn process_storage_tries( trie: SeparateStorageTriesPreImage, ) -> HashMap { match trie { - SeparateStorageTriesPreImage::SingleTrie(t) => process_single_storage_trie(t), + SeparateStorageTriesPreImage::SingleTrie(t) => process_single_combined_storage_tries(t), SeparateStorageTriesPreImage::MultipleTries(t) => process_multiple_storage_tries(t), } } -fn process_single_storage_trie( - _trie: SeparateTriePreImage, +fn process_single_combined_storage_tries( + _trie: TrieUncompressed, ) -> HashMap { todo!() } @@ -131,15 +134,14 @@ fn process_multiple_storage_tries( fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { // TODO: Wrap in proper result type... - let (header, trie, extra_code_hash_mappings) = process_compact_prestate(trie).unwrap(); + let out = process_compact_prestate(trie).unwrap(); // TODO: Make this into a result... - assert!(header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); + assert!(out.header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); ProcessedBlockTracePreImages { - state: trie, - storage: todo!(), - extra_code_hash_mappings, + tries: out.tries, + extra_code_hash_mappings: out.code, } } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 9f136378b..3d0b86201 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -53,14 +53,14 @@ pub enum BlockTraceTriePreImages { Combined(CombinedPreImages), } -/// State/Storage trie pre-images that are seperate. +/// State/Storage trie pre-images that are separate. #[derive(Debug, Serialize, Deserialize)] pub struct SeparateTriePreImages { pub state: SeparateTriePreImage, pub storage: SeparateStorageTriesPreImage, } -/// A trie pre-image where state & storage are seperate. +/// A trie pre-image where state & storage are separate. #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum SeparateTriePreImage { @@ -99,7 +99,7 @@ pub enum SeparateStorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries /// involved in the block. We can reconstruct the individual storage tries /// by the storage root hash in the state entries. - SingleTrie(SeparateTriePreImage), + SingleTrie(TrieUncompressed), /// Each storage trie is sent over in a hashmap with the hashed account /// address as a key. From ac9afbebab1eb4f95a6883b1bc58bd360ff8f8d9 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 2 Nov 2023 16:31:13 -0600 Subject: [PATCH 091/208] Filled in a critical function --- src/processed_block_trace.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 5b0ddf961..b5df0fe59 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -286,6 +286,11 @@ pub(crate) struct TxnMetaState { pub(crate) block_bloom: Bloom, } -fn storage_addr_to_nibbles_even_nibble_fixed_hashed(_addr: &StorageAddr) -> Nibbles { - todo!() +// TODO: Remove/rename function based on how complex this gets... +fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbles { + // I think this is all we need to do? Yell at me if this breaks things. + // H256's are never going to be truncated I think. + + let hashed_addr = hash(addr.as_bytes()); + Nibbles::from_h256_be(hashed_addr) } From 9651985cc38b60a09ca9c9d349f0acee801d1480 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 3 Nov 2023 10:37:56 -0600 Subject: [PATCH 092/208] Feature complete! - Now rebuilds embedded storage tries, although I'm sure it's buggy... --- src/compact/compact_prestate_processing.rs | 136 +++++++++++++++++---- src/compact/compact_to_partial_trie.rs | 75 ++++++++---- src/compact/complex_test_payloads.rs | 2 +- src/processed_block_trace.rs | 4 +- src/utils.rs | 5 + 5 files changed, 169 insertions(+), 53 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 15cf7516b..c6bf1497f 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -12,7 +12,7 @@ use std::{ use enum_as_inner::EnumAsInner; use eth_trie_utils::{ nibbles::{FromHexPrefixError, Nibbles}, - partial_trie::HashedPartialTrie, + partial_trie::{HashedPartialTrie, PartialTrie}, }; use ethereum_types::{H256, U256}; use log::trace; @@ -20,7 +20,8 @@ use serde::{de::DeserializeOwned, Deserialize}; use thiserror::Error; use super::compact_to_partial_trie::{ - create_partial_trie_from_remaining_witness_elem, CompactToPartialOutput, + convert_storage_trie_root_keyed_hashmap_to_account_addr_keyed, + create_partial_trie_from_compact_node, create_partial_trie_from_remaining_witness_elem, }; use crate::{ trace_protocol::TrieCompact, @@ -232,6 +233,18 @@ pub(super) enum AccountNodeCode { HashNode(TrieRootHash), } +impl From> for AccountNodeCode { + fn from(v: Vec) -> Self { + Self::CodeNode(v) + } +} + +impl From for AccountNodeCode { + fn from(v: TrieRootHash) -> Self { + Self::HashNode(v) + } +} + #[derive(Clone, Debug)] pub(super) struct AccountNodeData { pub(super) nonce: Nonce, @@ -279,6 +292,12 @@ impl Header { } } +#[derive(Debug)] +pub(crate) struct WitnessOutput { + pub(crate) tries: PartialTriePreImages, + pub(crate) code: Option>>, +} + #[derive(Debug)] struct ParserState { entries: WitnessEntries, @@ -308,27 +327,45 @@ impl ParserState { Ok((header, p_state)) } - fn parse(mut self) -> CompactParsingResult { + fn parse(mut self) -> CompactParsingResult { let mut entry_buf = Vec::new(); + // TODO: Consider moving this into the `Self`... + let mut storage_tries = HashMap::new(); + loop { - let num_rules_applied = self.apply_rules_to_witness_entries(&mut entry_buf)?; + let num_rules_applied = + self.apply_rules_to_witness_entries(&mut storage_tries, &mut entry_buf)?; if num_rules_applied == 0 { break; } } - match self.entries.len() { + let res = match self.entries.len() { 1 => create_partial_trie_from_remaining_witness_elem(self.entries.pop().unwrap()), _ => Err(CompactParsingError::NonSingleEntryAfterProcessing( self.entries, )), - } + }?; + + let storage = + convert_storage_trie_root_keyed_hashmap_to_account_addr_keyed(&res.trie, storage_tries); + + let tries = PartialTriePreImages { + state: res.trie, + storage, + }; + + // Replace with a none if there are no entries. + let code = (!res.code.is_empty()).then_some(res.code); + + Ok(WitnessOutput { tries, code }) } fn apply_rules_to_witness_entries( &mut self, + storage_tries: &mut HashMap, entry_buf: &mut Vec, ) -> CompactParsingResult { let mut traverser = self.entries.create_collapsable_traverser(); @@ -336,7 +373,8 @@ impl ParserState { let mut tot_rules_applied = 0; while !traverser.at_end() { - let num_rules_applied = Self::try_apply_rules_to_curr_entry(&mut traverser, entry_buf)?; + let num_rules_applied = + Self::try_apply_rules_to_curr_entry(&mut traverser, storage_tries, entry_buf)?; tot_rules_applied += num_rules_applied; if num_rules_applied == 0 { @@ -350,6 +388,7 @@ impl ParserState { fn try_apply_rules_to_curr_entry( traverser: &mut CollapsableWitnessEntryTraverser, + storage_tries: &mut HashMap, buf: &mut Vec, ) -> CompactParsingResult { traverser.get_next_n_elems_into_buf(MAX_WITNESS_ENTRIES_NEEDED_TO_MATCH_A_RULE, buf); @@ -392,13 +431,17 @@ impl ParserState { let (n_nodes_to_replace, account_node_code, s_root) = match (has_code, has_storage) { (false, false) => Self::match_account_leaf_no_code_and_no_storage(), - (false, true) => { - Self::match_account_leaf_no_code_but_has_storage(traverser, buf) - } + (false, true) => Self::match_account_leaf_no_code_but_has_storage( + traverser, + storage_tries, + buf, + ), (true, false) => { Self::match_account_leaf_has_code_but_no_storage(traverser, buf) } - (true, true) => Self::match_account_leaf_has_code_and_storage(traverser, buf), + (true, true) => { + Self::match_account_leaf_has_code_and_storage(traverser, storage_tries, buf) + } }?; let account_leaf_data = AccountNodeData::new(n, b, s_root, account_node_code); @@ -497,15 +540,19 @@ impl ParserState { fn match_account_leaf_no_code_but_has_storage( traverser: &mut CollapsableWitnessEntryTraverser, + storage_tries: &mut HashMap, buf: &mut Vec, ) -> CompactParsingResult<(usize, Option, Option)> { traverser.get_prev_n_elems_into_buf(1, buf); match buf[0].clone() { - WitnessEntry::Node(node) => match Self::try_get_storage_hash_from_node(&node) { - Some(s_hash) => Ok((2, None, Some(s_hash))), - None => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), - }, + WitnessEntry::Node(node) => Self::try_create_and_insert_partial_trie_from_node( + &node, + None, + storage_tries, + 2, + traverser, + ), _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), } } @@ -529,23 +576,58 @@ impl ParserState { fn match_account_leaf_has_code_and_storage( traverser: &mut CollapsableWitnessEntryTraverser, + storage_tries: &mut HashMap, buf: &mut Vec, ) -> CompactParsingResult<(usize, Option, Option)> { traverser.get_prev_n_elems_into_buf(2, buf); match &buf[0..=1] { - [WitnessEntry::Node(NodeEntry::Code(_c)), WitnessEntry::Node(_node)] => { - todo!() + [WitnessEntry::Node(NodeEntry::Code(c_bytes)), WitnessEntry::Node(node)] => { + Self::try_create_and_insert_partial_trie_from_node( + node, + Some(c_bytes.clone().into()), + storage_tries, + 3, + traverser, + ) } - [WitnessEntry::Node(NodeEntry::Hash(_h)), WitnessEntry::Node(_node)] => { - todo!() + [WitnessEntry::Node(NodeEntry::Hash(c_hash)), WitnessEntry::Node(node)] => { + Self::try_create_and_insert_partial_trie_from_node( + node, + Some((*c_hash).into()), + storage_tries, + 3, + traverser, + ) } _ => Self::invalid_witness_err(3, TraverserDirection::Backwards, traverser), } } - fn try_get_storage_hash_from_node(_node: &NodeEntry) -> Option { - todo!() + fn try_create_and_insert_partial_trie_from_node( + node: &NodeEntry, + account_node_code: Option, + storage_tries: &mut HashMap, + n: usize, + traverser: &mut CollapsableWitnessEntryTraverser, + ) -> CompactParsingResult<(usize, Option, Option)> { + match Self::try_get_storage_root_node(node) { + Some(storage_root_node) => { + let s_trie_out = create_partial_trie_from_compact_node(storage_root_node)?; + let s_trie_hash = s_trie_out.trie.hash(); + storage_tries.insert(s_trie_hash, s_trie_out.trie); + + Ok((n, account_node_code, Some(s_trie_hash))) + } + None => Self::invalid_witness_err(n, TraverserDirection::Backwards, traverser), + } + } + + fn try_get_storage_root_node(node: &NodeEntry) -> Option { + match node { + NodeEntry::Code(_) => None, + _ => Some(node.clone()), + } } fn invalid_witness_err( @@ -1166,8 +1248,7 @@ pub(crate) struct PartialTriePreImages { #[derive(Debug)] pub(crate) struct ProcessedCompactOutput { pub(crate) header: Header, - pub(crate) tries: PartialTriePreImages, - pub(crate) code: Option>>, + pub(crate) witness_out: WitnessOutput, } pub(crate) fn process_compact_prestate( @@ -1188,13 +1269,11 @@ fn process_compact_prestate_common( create_and_extract_header_f: fn(Vec) -> CompactParsingResult<(Header, ParserState)>, ) -> CompactParsingResult { let (header, parser) = create_and_extract_header_f(state.bytes)?; - let out = parser.parse()?; - let extra_code_hash_mappings = (!out.code.is_empty()).then_some(out.code); + let witness_out = parser.parse()?; let out = ProcessedCompactOutput { header, - tries: out.tries, - code: extra_code_hash_mappings, + witness_out, }; Ok(out) @@ -1366,7 +1445,10 @@ mod tests { hex::decode(b_str).unwrap() } + // TODO: Refactor (or remove?) this test as it will crash when it tries to + // deserialize the trie leaves into `AccountRlp`... #[test] + #[ignore] fn simple_full() { init(); diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index b6332214a..7caeb2484 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -4,20 +4,20 @@ use eth_trie_utils::{ nibbles::{Nibble, Nibbles}, partial_trie::{HashedPartialTrie, PartialTrie}, }; +use ethereum_types::H256; use plonky2_evm::generation::mpt::AccountRlp; use super::compact_prestate_processing::{ - AccountNodeCode, AccountNodeData, CompactParsingResult, LeafNodeData, NodeEntry, - PartialTriePreImages, WitnessEntry, + AccountNodeCode, AccountNodeData, CompactParsingResult, LeafNodeData, NodeEntry, WitnessEntry, }; use crate::{ - types::{CodeHash, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, + types::{CodeHash, HashedAccountAddr, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, utils::hash, }; #[derive(Debug, Default)] -pub(super) struct CompactToPartialOutput { - pub(super) tries: PartialTriePreImages, +pub(super) struct CompactToPartialTrieExtractionOutput { + pub(super) trie: HashedPartialTrie, // TODO: `code` is ever only available for storage tries, so we should come up with a better // API that represents this... @@ -26,33 +26,36 @@ pub(super) struct CompactToPartialOutput { pub(super) fn create_partial_trie_from_remaining_witness_elem( remaining_entry: WitnessEntry, -) -> CompactParsingResult { +) -> CompactParsingResult { let remaining_node = remaining_entry .into_node() .expect("Final node in compact entries was not a node! This is a bug!"); - let mut output = CompactToPartialOutput::default(); - create_partial_trie_from_remaining_witness_elem_rec( - Nibbles::default(), - &remaining_node, - &mut output, - )?; + create_partial_trie_from_compact_node(remaining_node) +} + +pub(super) fn create_partial_trie_from_compact_node( + node: NodeEntry, +) -> CompactParsingResult { + let mut output = CompactToPartialTrieExtractionOutput::default(); + + create_partial_trie_from_compact_node_rec(Nibbles::default(), &node, &mut output)?; Ok(output) } // TODO: Consider putting in some asserts that invalid nodes are not appearing // in the wrong trie type (eg. account ) -pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( +pub(super) fn create_partial_trie_from_compact_node_rec( curr_key: Nibbles, curr_node: &NodeEntry, - output: &mut CompactToPartialOutput, + output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { match curr_node { NodeEntry::Branch(n) => process_branch(curr_key, n, output), NodeEntry::Code(c_bytes) => process_code(c_bytes.clone(), output), NodeEntry::Empty => process_empty(), - NodeEntry::Hash(h) => process_hash(curr_key, *h, &mut output.tries.state), + NodeEntry::Hash(h) => process_hash(curr_key, *h, &mut output.trie), NodeEntry::Leaf(k, v) => process_leaf(curr_key, k, v, output), NodeEntry::Extension(k, c) => process_extension(curr_key, k, c, output), } @@ -61,21 +64,24 @@ pub(super) fn create_partial_trie_from_remaining_witness_elem_rec( fn process_branch( curr_key: Nibbles, branch: &[Option>], - output: &mut CompactToPartialOutput, + output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { for i in 0..16 { if let Some(child) = &branch[i] { // TODO: Seriously update `eth_trie_utils` to have a better API... let mut new_k = curr_key; new_k.push_nibble_back(i as Nibble); - create_partial_trie_from_remaining_witness_elem_rec(new_k, child, output)?; + create_partial_trie_from_compact_node_rec(new_k, child, output)?; } } Ok(()) } -fn process_code(c_bytes: Vec, output: &mut CompactToPartialOutput) -> CompactParsingResult<()> { +fn process_code( + c_bytes: Vec, + output: &mut CompactToPartialTrieExtractionOutput, +) -> CompactParsingResult<()> { let c_hash = hash(&c_bytes); output.code.insert(c_hash, c_bytes); @@ -103,7 +109,7 @@ fn process_leaf( curr_key: Nibbles, leaf_key: &Nibbles, leaf_node_data: &LeafNodeData, - output: &mut CompactToPartialOutput, + output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { let full_k = curr_key.merge_nibbles(leaf_key); @@ -114,7 +120,7 @@ fn process_leaf( } }; - output.tries.state.insert(full_k, l_val); + output.trie.insert(full_k, l_val); Ok(()) } @@ -123,17 +129,17 @@ fn process_extension( curr_key: Nibbles, ext_node_key: &Nibbles, ext_child: &NodeEntry, - output: &mut CompactToPartialOutput, + output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { let new_k = curr_key.merge_nibbles(ext_node_key); - create_partial_trie_from_remaining_witness_elem_rec(new_k, ext_child, output)?; + create_partial_trie_from_compact_node_rec(new_k, ext_child, output)?; Ok(()) } fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( acc_data: &AccountNodeData, - output: &mut CompactToPartialOutput, + output: &mut CompactToPartialTrieExtractionOutput, ) -> Vec { let code_hash = match &acc_data.account_node_code { Some(AccountNodeCode::CodeNode(c_bytes)) => { @@ -156,3 +162,26 @@ fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( // TODO: Avoid the unnecessary allocation... rlp::encode(&account).into() } + +pub(crate) fn convert_storage_trie_root_keyed_hashmap_to_account_addr_keyed( + state_trie: &HashedPartialTrie, + storage_root_trie: HashMap, +) -> HashMap { + let mut acc_addr_to_storage_trie_map = HashMap::new(); + + let account_addr_and_storage_root_iter = state_trie.items() + .filter_map(|(h_addr_nibs, acc_bytes)| acc_bytes.as_val().map(|acc_bytes| (H256::from_slice(&h_addr_nibs.bytes_be()), rlp::decode::(acc_bytes).expect("Encoder lib managed to improperly encode an account node in the state trie! This is a major bug in the encoder.").storage_root))); + + // TODO: Replace with a map... + for (acc_addr, storage_root) in account_addr_and_storage_root_iter { + if let Some(s_trie) = storage_root_trie.get(&storage_root) { + let hashed_addr = hash(acc_addr.as_bytes()); + + // Possibility of identical tries between accounts, so we need to do a clone + // here. + acc_addr_to_storage_trie_map.insert(hashed_addr, s_trie.clone()); + } + } + + acc_addr_to_storage_trie_map +} diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index ecf67b99e..03521947d 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -39,7 +39,7 @@ impl TestProtocolInputAndRoot { Ok(x) => x, Err(err) => panic!("{}", err), }; - let trie_hash = out.tries.state.hash(); + let trie_hash = out.witness_out.tries.state.hash(); assert!(out.header.version_is_compatible(1)); assert_eq!(trie_hash, expected_hash); diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index b5df0fe59..8a5502f1c 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -140,8 +140,8 @@ fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { assert!(out.header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); ProcessedBlockTracePreImages { - tries: out.tries, - extra_code_hash_mappings: out.code, + tries: out.witness_out.tries, + extra_code_hash_mappings: out.witness_out.code, } } diff --git a/src/utils.rs b/src/utils.rs index 5f0b0016d..7a40f1434 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,3 +1,4 @@ +use eth_trie_utils::nibbles::Nibbles; use ethereum_types::H256; use keccak_hash::keccak; @@ -14,3 +15,7 @@ pub(crate) fn update_val_if_some(target: &mut T, opt: Option) { pub(crate) fn clone_vec_and_remove_refs(vec_of_refs: &[&T]) -> Vec { vec_of_refs.iter().map(|r| (*r).clone()).collect() } + +pub(crate) fn h256_to_nibbles(v: H256) -> Nibbles { + Nibbles::from_h256_be(v) +} From acbfc9563e6aeda001da079f61c8f1ba72d77e1c Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 3 Nov 2023 11:12:01 -0600 Subject: [PATCH 093/208] Added a fourth very complex test --- src/compact/compact_prestate_processing.rs | 8 +++++++- src/compact/complex_test_payloads.rs | 4 ++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index c6bf1497f..77cca6b2d 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -1427,7 +1427,7 @@ mod tests { use super::{key_bytes_to_nibbles, parse_just_to_instructions, Instruction}; use crate::compact::{ compact_prestate_processing::ParserState, - complex_test_payloads::{TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3}, + complex_test_payloads::{TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3, TEST_PAYLOAD_4}, }; const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; @@ -1507,4 +1507,10 @@ mod tests { init(); TEST_PAYLOAD_3.parse_and_check_hash_matches_with_debug(); } + + #[test] + fn complex_payload_4() { + init(); + TEST_PAYLOAD_4.parse_and_check_hash_matches_with_debug(); + } } diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 03521947d..47c8c7b87 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -7,9 +7,13 @@ use super::compact_prestate_processing::{ use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; pub(crate) const TEST_PAYLOAD_1: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79084a021e19e0c9bab2400000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d190841010219102005582103876da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6100841010558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6a0673c691edfa4c4528323986bb43c579316f436ff6f8b4ac70854bbd95340b" }; + pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a021e0c000250c782fa00055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480de0b6b3a76400000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "e779761e7f0cf4bb2b5e5a2ebac65406d3a7516d46798040803488825a01c19c" }; + pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; +pub(crate) const TEST_PAYLOAD_4: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73", root_str: "5d18708fa7f7c751cf1528a5dd7ce11911a4eaeaef2b06f0c3e6e0cbce303e19" }; + type ProcessCompactPrestateFn = fn(TrieCompact) -> CompactParsingResult; pub(crate) struct TestProtocolInputAndRoot { From e1e94d6247143ef6a7552ada454768f8b2ca67c1 Mon Sep 17 00:00:00 2001 From: cpu Date: Fri, 3 Nov 2023 11:44:25 -0700 Subject: [PATCH 094/208] Handle bytestrings and LegacyReceiptRlp decoding --- Cargo.toml | 1 + src/compact/compact_prestate_processing.rs | 2 +- src/compact/complex_test_payloads.rs | 4 +- src/deserializers.rs | 64 ++++++++++++++++++++++ src/lib.rs | 2 + src/processed_block_trace.rs | 22 +++++++- src/trace_protocol.rs | 47 ++++++++-------- 7 files changed, 115 insertions(+), 27 deletions(-) create mode 100644 src/deserializers.rs diff --git a/Cargo.toml b/Cargo.toml index f71c5d780..49acde180 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" serde = "1.0.166" +serde_with = "3.4.0" [dev-dependencies] pretty_env_logger = "0.5.0" diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 77cca6b2d..264a5edb1 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -1268,7 +1268,7 @@ fn process_compact_prestate_common( state: TrieCompact, create_and_extract_header_f: fn(Vec) -> CompactParsingResult<(Header, ParserState)>, ) -> CompactParsingResult { - let (header, parser) = create_and_extract_header_f(state.bytes)?; + let (header, parser) = create_and_extract_header_f(state.0)?; let witness_out = parser.parse()?; let out = ProcessedCompactOutput { diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 47c8c7b87..1778d91e8 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -37,9 +37,7 @@ impl TestProtocolInputAndRoot { let protocol_bytes = hex::decode(self.byte_str).unwrap(); let expected_hash = TrieRootHash::from_slice(&hex::decode(self.root_str).unwrap()); - let out = match process_compact_prestate_f(TrieCompact { - bytes: protocol_bytes, - }) { + let out = match process_compact_prestate_f(TrieCompact(protocol_bytes)) { Ok(x) => x, Err(err) => panic!("{}", err), }; diff --git a/src/deserializers.rs b/src/deserializers.rs new file mode 100644 index 000000000..88af007d5 --- /dev/null +++ b/src/deserializers.rs @@ -0,0 +1,64 @@ +//! Custom deserializers for Serde. +use hex::FromHex; +use plonky2_evm::generation::mpt::LegacyReceiptRlp; +use rlp::DecoderError; +use serde::{ + de::{Error, Visitor}, + Deserialize, Deserializer, +}; + +#[derive(Clone, Debug, Default, Deserialize)] +pub(crate) struct ByteString(#[serde(with = "self")] pub(crate) Vec); + +impl From for Vec { + fn from(v: ByteString) -> Self { + v.0 + } +} + +impl TryFrom for LegacyReceiptRlp { + type Error = DecoderError; + + fn try_from(value: ByteString) -> Result { + rlp::decode(&value.0) + } +} + +fn remove_hex_prefix_if_present(data: &str) -> &str { + let prefix = &data[..2]; + + match matches!(prefix, "0x" | "0X") { + false => data, + true => &data[2..], + } +} + +// Gross, but there is no Serde crate that can both parse a hex string with a +// prefix and also deserialize from a `Vec`. +fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + struct PrefixHexStrVisitor(); + + impl<'de> Visitor<'de> for PrefixHexStrVisitor { + type Value = Vec; + + fn visit_str(self, data: &str) -> Result + where + E: Error, + { + FromHex::from_hex(remove_hex_prefix_if_present(data)).map_err(Error::custom) + } + + fn visit_borrowed_str(self, data: &'de str) -> Result + where + E: Error, + { + FromHex::from_hex(remove_hex_prefix_if_present(data)).map_err(Error::custom) + } + + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "a hex encoded string with a prefix") + } + } + + deserializer.deserialize_string(PrefixHexStrVisitor()) +} diff --git a/src/lib.rs b/src/lib.rs index c2a52631a..e1e004f09 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,8 +1,10 @@ #![feature(linked_list_cursors)] #![feature(trait_alias)] +#![feature(iter_array_chunks)] mod compact; pub mod decoding; +mod deserializers; pub mod processed_block_trace; pub mod proof_gen_types; pub mod trace_protocol; diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 8a5502f1c..77d601176 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -178,6 +178,7 @@ impl TxnInfo { ) -> ProcessedTxnInfo { let mut nodes_used_by_txn = NodesUsedByTxn::default(); let mut contract_code_accessed = HashMap::new(); + let block_bloom = self.block_bloom(); for (addr, trace) in self.traces { let hashed_addr = hash(addr.as_bytes()); @@ -248,7 +249,7 @@ impl TxnInfo { let new_meta_state = TxnMetaState { txn_bytes: self.meta.byte_code, gas_used: self.meta.gas_used, - block_bloom: self.meta.bloom, + block_bloom, }; ProcessedTxnInfo { @@ -257,6 +258,25 @@ impl TxnInfo { meta: new_meta_state, } } + + fn block_bloom(&self) -> Bloom { + let mut bloom = [U256::zero(); 8]; + + // Note that bloom can be empty. + for (i, v) in self + .meta + .new_receipt_trie_node_byte + .bloom + .clone() + .into_iter() + .array_chunks::<32>() + .enumerate() + { + bloom[i] = U256::from_big_endian(v.as_slice()); + } + + bloom + } } /// Note that "*_accesses" includes writes. diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 3d0b86201..e6420db46 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -25,17 +25,20 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; -use serde::{Deserialize, Serialize}; +use plonky2_evm::generation::mpt::LegacyReceiptRlp; +use serde::Deserialize; +use serde_with::{serde_as, FromInto, TryFromInto}; use crate::{ - types::{Bloom, CodeHash, HashedAccountAddr, StorageAddr, StorageVal}, + deserializers::ByteString, + types::{CodeHash, HashedAccountAddr, StorageAddr, StorageVal}, utils::hash, }; /// Core payload needed to generate a proof for a block. Note that the scheduler /// may need to request some additional data from the client along with this in /// order to generate a proof. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] pub struct BlockTrace { /// The trie pre-images (state & storage) in multiple possible formats. pub trie_pre_images: BlockTraceTriePreImages, @@ -46,7 +49,7 @@ pub struct BlockTrace { } /// Minimal hashed out tries needed by all txns in the block. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum BlockTraceTriePreImages { Separate(SeparateTriePreImages), @@ -54,14 +57,14 @@ pub enum BlockTraceTriePreImages { } /// State/Storage trie pre-images that are separate. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] pub struct SeparateTriePreImages { pub state: SeparateTriePreImage, pub storage: SeparateStorageTriesPreImage, } /// A trie pre-image where state & storage are separate. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum SeparateTriePreImage { Uncompressed(TrieUncompressed), @@ -69,7 +72,7 @@ pub enum SeparateTriePreImage { } /// A trie pre-image where both state & storage are combined into one payload. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum CombinedPreImages { Compact(TrieCompact), @@ -77,23 +80,22 @@ pub enum CombinedPreImages { // TODO /// Bulkier format that is quicker to process. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] pub struct TrieUncompressed {} // TODO +#[serde_as] /// Compact representation of a trie (will likely be very close to https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md) -#[derive(Debug, Serialize, Deserialize)] -pub struct TrieCompact { - pub bytes: Vec, -} +#[derive(Debug, Deserialize)] +pub struct TrieCompact(#[serde_as(as = "FromInto")] pub Vec); // TODO /// Trie format that is in exactly the same format of our internal trie format. /// This is the fastest format for us to processes. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] pub struct TrieDirect(pub HashedPartialTrie); -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum SeparateStorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries @@ -107,7 +109,7 @@ pub enum SeparateStorageTriesPreImage { } /// Info specific to txns in the block. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] pub struct TxnInfo { /// Trace data for the txn. This is used by the protocol to: /// - Mutate it's own trie state between txns to arrive at the correct trie @@ -120,33 +122,34 @@ pub struct TxnInfo { pub meta: TxnMeta, } -#[derive(Debug, Serialize, Deserialize)] +#[serde_as] +#[derive(Debug, Deserialize)] pub struct TxnMeta { /// Txn byte code. + #[serde_as(as = "FromInto")] pub byte_code: Vec, /// Rlped bytes of the new txn value inserted into the txn trie by /// this txn. Note that the key is not included and this is only the rlped /// value of the node! + #[serde_as(as = "FromInto")] pub new_txn_trie_node_byte: Vec, /// Rlped bytes of the new receipt value inserted into the receipt trie by /// this txn. Note that the key is not included and this is only the rlped /// value of the node! - pub new_receipt_trie_node_byte: Vec, + #[serde_as(as = "TryFromInto")] + pub new_receipt_trie_node_byte: LegacyReceiptRlp, /// Gas used by this txn (Note: not cumulative gas used). pub gas_used: u64, - - /// Bloom after txn execution. - pub bloom: Bloom, } /// A "trace" specific to an account for a txn. /// /// Specifically, since we can not execute the txn before proof generation, we /// rely on a separate EVM to run the txn and supply this data for us. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] pub struct TxnTrace { /// If the balance changed, then the new balance will appear here. Will be /// `None` if no change. @@ -171,7 +174,7 @@ pub struct TxnTrace { } /// Contract code access type. Used by txn traces. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum ContractCodeUsage { /// Contract was read. From ab8be290d6136ffb931966c9008c5baa12d59acb Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 3 Nov 2023 15:36:51 -0600 Subject: [PATCH 095/208] Now parses the undocumented field of `code_size` - This does not appear in the docs anywhere! --- src/compact/compact_prestate_processing.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 264a5edb1..245cb62b0 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -815,11 +815,17 @@ impl WitnessBytes { self.byte_cursor.read_t::("account leaf nonce") })? .into(); + let balance = Self::read_account_flag_field_if_present_or_default(flags.balance_present, || { self.byte_cursor.read_cbor_u256("account leaf balance") })?; + // I don't think we need code size? + let _ = Self::read_account_flag_field_if_present_or_default(flags.code_present, || { + self.byte_cursor.read_t::("code size") + })?; + // TODO: process actual storage trie probably? Wait until we know what is going // on here. From f1fe3ce0f4dc9d6ed5eeba9bf37d6866bcebeba3 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 3 Nov 2023 15:58:50 -0600 Subject: [PATCH 096/208] Now fully parses test payload 4 - ... But the hashes don't match yet. --- src/compact/compact_prestate_processing.rs | 7 +++++-- src/compact/complex_test_payloads.rs | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 245cb62b0..82b082108 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -398,6 +398,9 @@ impl ParserState { // TODO: These clones are really bad, but we will clean this up once it works. match buf[0].clone() { + WitnessEntry::Instruction(Instruction::EmptyRoot) => { + Self::traverser_replace_prev_n_nodes_entry_helper(1, traverser, NodeEntry::Empty) + } WitnessEntry::Instruction(Instruction::Hash(h)) => { Self::traverser_replace_prev_n_nodes_entry_helper(1, traverser, NodeEntry::Hash(h)) } @@ -582,7 +585,7 @@ impl ParserState { traverser.get_prev_n_elems_into_buf(2, buf); match &buf[0..=1] { - [WitnessEntry::Node(NodeEntry::Code(c_bytes)), WitnessEntry::Node(node)] => { + [WitnessEntry::Node(node), WitnessEntry::Node(NodeEntry::Code(c_bytes))] => { Self::try_create_and_insert_partial_trie_from_node( node, Some(c_bytes.clone().into()), @@ -591,7 +594,7 @@ impl ParserState { traverser, ) } - [WitnessEntry::Node(NodeEntry::Hash(c_hash)), WitnessEntry::Node(node)] => { + [WitnessEntry::Node(node), WitnessEntry::Node(NodeEntry::Hash(c_hash))] => { Self::try_create_and_insert_partial_trie_from_node( node, Some((*c_hash).into()), diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 1778d91e8..2b3ab2fe0 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -39,7 +39,7 @@ impl TestProtocolInputAndRoot { let out = match process_compact_prestate_f(TrieCompact(protocol_bytes)) { Ok(x) => x, - Err(err) => panic!("{}", err), + Err(err) => panic!("{}", err.to_string()), }; let trie_hash = out.witness_out.tries.state.hash(); From d28e3afa4a7852e68c3cc9e5ee509cc32a5be6c7 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 3 Nov 2023 16:54:05 -0600 Subject: [PATCH 097/208] Test payload 4 now uses correct hash - Also now split the massive witness bytecode onto a seperate line. --- src/compact/complex_test_payloads.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 2b3ab2fe0..78b0a57a6 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -12,7 +12,8 @@ pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAnd pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; -pub(crate) const TEST_PAYLOAD_4: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73", root_str: "5d18708fa7f7c751cf1528a5dd7ce11911a4eaeaef2b06f0c3e6e0cbce303e19" }; +pub(crate) const TEST_PAYLOAD_4: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73", root_str: +"2b5a703bdec53099c42d7575f8cd6db85d6f2226a04e98e966fcaef87868869b" }; type ProcessCompactPrestateFn = fn(TrieCompact) -> CompactParsingResult; From 6330d7ac069f642bed3fac51be06327f3d77063c Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 6 Nov 2023 14:07:23 -0700 Subject: [PATCH 098/208] All tests now pass!! --- src/compact/compact_prestate_processing.rs | 10 +++++- src/compact/compact_to_partial_trie.rs | 2 +- src/utils.rs | 36 +++++++++++++++++++++- 3 files changed, 45 insertions(+), 3 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 82b082108..16ed82fe7 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -1436,7 +1436,9 @@ mod tests { use super::{key_bytes_to_nibbles, parse_just_to_instructions, Instruction}; use crate::compact::{ compact_prestate_processing::ParserState, - complex_test_payloads::{TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3, TEST_PAYLOAD_4}, + complex_test_payloads::{ + TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3, TEST_PAYLOAD_4, TEST_PAYLOAD_5, + }, }; const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; @@ -1522,4 +1524,10 @@ mod tests { init(); TEST_PAYLOAD_4.parse_and_check_hash_matches_with_debug(); } + + #[test] + fn complex_payload_5() { + init(); + TEST_PAYLOAD_5.parse_and_check_hash_matches_with_debug(); + } } diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 7caeb2484..cc08962a2 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -114,7 +114,7 @@ fn process_leaf( let full_k = curr_key.merge_nibbles(leaf_key); let l_val = match leaf_node_data { - LeafNodeData::Value(v_bytes) => v_bytes.0.clone(), + LeafNodeData::Value(v_bytes) => rlp::encode(&v_bytes.0).to_vec(), LeafNodeData::Account(acc_data) => { convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup(acc_data, output) } diff --git a/src/utils.rs b/src/utils.rs index 7a40f1434..cb7b12d13 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,7 +1,13 @@ -use eth_trie_utils::nibbles::Nibbles; +use eth_trie_utils::{ + nibbles::Nibbles, + partial_trie::{HashedPartialTrie, PartialTrie}, + trie_ops::ValOrHash, +}; use ethereum_types::H256; use keccak_hash::keccak; +use crate::types::HashedStorageAddr; + pub(crate) fn hash(bytes: &[u8]) -> H256 { H256::from(keccak(bytes).0) } @@ -19,3 +25,31 @@ pub(crate) fn clone_vec_and_remove_refs(vec_of_refs: &[&T]) -> Vec pub(crate) fn h256_to_nibbles(v: H256) -> Nibbles { Nibbles::from_h256_be(v) } + +// TODO: Move under a feature flag... +pub(crate) fn print_value_and_hash_nodes_of_trie(trie: &HashedPartialTrie) { + let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); + println!("State trie for {:#?}", trie_elems); +} + +// TODO: Move under a feature flag... +pub(crate) fn print_value_and_hash_nodes_of_storage_trie( + s_trie_addr: HashedStorageAddr, + trie: &HashedPartialTrie, +) { + let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); + println!("Storage trie for {:x}: {:#?}", s_trie_addr, trie_elems); +} + +// TODO: Move under a feature flag... +pub(crate) fn print_value_and_hash_nodes_of_trie_common(trie: &HashedPartialTrie) -> Vec { + trie.items() + .map(|(k, v_or_h)| { + let v_or_h_char = match v_or_h { + ValOrHash::Val(_) => 'L', + ValOrHash::Hash(_) => 'H', + }; + format!("{} - {:x}", v_or_h_char, k) + }) + .collect() +} From 3d5ce9c31c9924bbcbd173f5804c6eb74a6c4f0b Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 6 Nov 2023 14:35:58 -0700 Subject: [PATCH 099/208] Fixed a few easy clippy warnings --- src/compact/compact_prestate_processing.rs | 18 ++---------------- src/compact/compact_to_partial_trie.rs | 4 ++-- src/compact/complex_test_payloads.rs | 18 +++++++++++++++--- 3 files changed, 19 insertions(+), 21 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 16ed82fe7..51133fdf0 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -16,7 +16,7 @@ use eth_trie_utils::{ }; use ethereum_types::{H256, U256}; use log::trace; -use serde::{de::DeserializeOwned, Deserialize}; +use serde::de::DeserializeOwned; use thiserror::Error; use super::compact_to_partial_trie::{ @@ -269,12 +269,6 @@ impl AccountNodeData { } } -#[derive(Debug, Deserialize)] -struct LeafData { - key: Nibbles, - value: Vec, -} - #[derive(Debug)] pub(crate) struct Header { version: u8, @@ -912,14 +906,12 @@ trait CompactCursor { #[derive(Debug)] struct CompactCursorFast { intern: Cursor>, - temp_buf: Vec, } impl CompactCursor for CompactCursorFast { fn new(bytes: Vec) -> Self { Self { intern: Cursor::new(bytes), - temp_buf: Vec::default(), } } @@ -1159,10 +1151,6 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { self.entry_cursor.move_next(); } - fn get_curr_elem(&self) -> Option<&WitnessEntry> { - self.entry_cursor.as_cursor().current() - } - fn get_next_n_elems(&self, n: usize) -> impl Iterator { let mut read_only_cursor = self.entry_cursor.as_cursor(); @@ -1394,7 +1382,6 @@ fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { } struct CursorBytesDebugInfo { - bytes: Vec, bytes_hex: String, hex_start_pos: usize, } @@ -1405,11 +1392,10 @@ fn get_bytes_and_debug_info_from_cursor( ) -> CursorBytesDebugInfo { let bytes = get_bytes_from_cursor(cursor, cursor_start_pos); - let bytes_hex = hex::encode(&bytes); + let bytes_hex = hex::encode(bytes); let hex_start_pos = cursor_start_pos as usize * 2; CursorBytesDebugInfo { - bytes, bytes_hex, hex_start_pos, } diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index cc08962a2..fb8c8f87a 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -66,8 +66,8 @@ fn process_branch( branch: &[Option>], output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { - for i in 0..16 { - if let Some(child) = &branch[i] { + for (i, slot) in branch.iter().enumerate().take(16) { + if let Some(child) = slot { // TODO: Seriously update `eth_trie_utils` to have a better API... let mut new_k = curr_key; new_k.push_nibble_back(i as Nibble); diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 78b0a57a6..32a89d67e 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -4,7 +4,11 @@ use super::compact_prestate_processing::{ process_compact_prestate, process_compact_prestate_debug, CompactParsingResult, ProcessedCompactOutput, }; -use crate::{trace_protocol::TrieCompact, types::TrieRootHash}; +use crate::{ + trace_protocol::TrieCompact, + types::TrieRootHash, + utils::{print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie}, +}; pub(crate) const TEST_PAYLOAD_1: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79084a021e19e0c9bab2400000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d190841010219102005582103876da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6100841010558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6a0673c691edfa4c4528323986bb43c579316f436ff6f8b4ac70854bbd95340b" }; @@ -12,8 +16,10 @@ pub(crate) const TEST_PAYLOAD_2: TestProtocolInputAndRoot = TestProtocolInputAnd pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d0084101031a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a0405582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c024a021e0a9cae36fa8e4788055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d08480f43fc2c04ee00000558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca61084101021901200558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410102196573", root_str: "6978d65a3f2fc887408cc28dbb796836ff991af73c21ea74d03a11f6cdeb119c" }; -pub(crate) const TEST_PAYLOAD_4: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73", root_str: -"2b5a703bdec53099c42d7575f8cd6db85d6f2226a04e98e966fcaef87868869b" }; +pub(crate) const TEST_PAYLOAD_4: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "0103a6885b3731702da62e8e4a8f584ac46a7f6822f4e2ba50fba902f67b1588d23b005821028015657e298d35290e69628be03d91f74d613caf3afdbe09138cfa415efe2f5044deadbeef0558210218b289936a0874cccee65712c88cdaa0a305b004d3fda2942b2b2dc54f14f6110b443b9aca0004", root_str: "69a5a7a8f99161a35e8b64975d8c6af10db4eee7bd956418839d8ff763aaf00c" }; + +pub(crate) const TEST_PAYLOAD_5: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73", +root_str: "2b5a703bdec53099c42d7575f8cd6db85d6f2226a04e98e966fcaef87868869b" }; type ProcessCompactPrestateFn = fn(TrieCompact) -> CompactParsingResult; @@ -44,6 +50,12 @@ impl TestProtocolInputAndRoot { }; let trie_hash = out.witness_out.tries.state.hash(); + print_value_and_hash_nodes_of_trie(&out.witness_out.tries.state); + + for (hashed_addr, s_trie) in out.witness_out.tries.storage { + print_value_and_hash_nodes_of_storage_trie(hashed_addr, &s_trie); + } + assert!(out.header.version_is_compatible(1)); assert_eq!(trie_hash, expected_hash); } From 4def7a1b0caff60c487999268964c31ff64bc39a Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 6 Nov 2023 14:39:39 -0700 Subject: [PATCH 100/208] Moved the giant test payload to a seperate file - The massive line of text was causing an issue for the language server, and this is also just a bit cleaner I think. --- src/compact/complex_test_payloads.rs | 6 ++++-- src/compact/large_test_payloads/test_payload_5.txt | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 src/compact/large_test_payloads/test_payload_5.txt diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 32a89d67e..a4ded7e54 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -18,8 +18,10 @@ pub(crate) const TEST_PAYLOAD_3: TestProtocolInputAndRoot = TestProtocolInputAnd pub(crate) const TEST_PAYLOAD_4: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "0103a6885b3731702da62e8e4a8f584ac46a7f6822f4e2ba50fba902f67b1588d23b005821028015657e298d35290e69628be03d91f74d613caf3afdbe09138cfa415efe2f5044deadbeef0558210218b289936a0874cccee65712c88cdaa0a305b004d3fda2942b2b2dc54f14f6110b443b9aca0004", root_str: "69a5a7a8f99161a35e8b64975d8c6af10db4eee7bd956418839d8ff763aaf00c" }; -pub(crate) const TEST_PAYLOAD_5: TestProtocolInputAndRoot = TestProtocolInputAndRoot { byte_str: "01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73", -root_str: "2b5a703bdec53099c42d7575f8cd6db85d6f2226a04e98e966fcaef87868869b" }; +pub(crate) const TEST_PAYLOAD_5: TestProtocolInputAndRoot = TestProtocolInputAndRoot { + byte_str: include_str!("large_test_payloads/test_payload_5.txt"), + root_str: "2b5a703bdec53099c42d7575f8cd6db85d6f2226a04e98e966fcaef87868869b", +}; type ProcessCompactPrestateFn = fn(TrieCompact) -> CompactParsingResult; diff --git a/src/compact/large_test_payloads/test_payload_5.txt b/src/compact/large_test_payloads/test_payload_5.txt new file mode 100644 index 000000000..0d9eb1188 --- /dev/null +++ b/src/compact/large_test_payloads/test_payload_5.txt @@ -0,0 +1 @@ +01055821033601462093b5945d1676df093446790fd31b20e7b12a2e8e5e09d068109616b0084a021e19e0c9bab240000005582103468288056310c82aa4c01a7e12a10f8111a0560e72b700555479031b86c357d00841010359458c01cf05df7b300bb6768f77e774f47e91b1d1dd358c98b2f2118466f37305582103b70e80538acdabd6137353b0f9d8d149f4dba91e8be2e7946e409bfdbe685b900841010558210389802d6ed1a28b049e9d4fe5334c5902fd9bc00c42821c82f82ee2da10be90800841010558200256274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a790c014a0218ae73977cea178000055820023ab0970b73895b8c9959bae685c3a19f45eb5ad89d42b52a340ec4ac204d1908410102191020055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c0e49056b5974e248d87b700558200276da518a393dbd067dc72abfa08d475ed6447fca96d92ec3f9e7eba503ca6108410102190120035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582103f417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb42724300701191c86037eea3a48563e7b938852aafc93d760d31a84ad520adf1128af576cdd65ee9a8e0605582103558c2c1ac06ad29eab5b631a2a76f7997030f5468deb7f384eb6e276208d04600701192b420558210352688a8f926c816ca1e079067caba944f158e764817b83fc43594370ca9cf62008410105582103690b239ba3aaf993e443ae14aeffc44cf8d9931a79baed9fa141d0e4506e131008410104592ef4608060405234801561001057600080fd5b50600436106104545760003560e01c806380947f8011610241578063bf529ca11161013b578063dd9bef60116100c3578063f279ca8111610087578063f279ca8114611161578063f4d1fc6114611191578063f58fc36a146111c1578063f6b0bbf7146111f1578063fde7721c1461122157610454565b8063dd9bef6014611071578063de97a363146110a1578063e9f9b3f2146110d1578063ea5141e614611101578063edf003cf1461113157610454565b8063ce3cf4ef1161010a578063ce3cf4ef14610f81578063d117320b14610fb1578063d51e7b5b14610fe1578063d53ff3fd14611011578063d93cd5581461104157610454565b8063bf529ca114610ec1578063c360aba614610ef1578063c420eb6114610f21578063c4bd65d514610f5157610454565b8063a18683cb116101c9578063b374012b1161018d578063b374012b14610dd1578063b3d847f214610e01578063b7b8620714610e31578063b81c148414610e61578063bdc875fc14610e9157610454565b8063a18683cb14610cf3578063a271b72114610d23578063a60a108714610d41578063a645c9c214610d71578063acaebdf614610da157610454565b8063962e4dc211610210578063962e4dc214610c0357806398456f3e14610c335780639a2b7c8114610c635780639cce7cf914610c93578063a040aec614610cc357610454565b806380947f8014610b43578063880eff3914610b73578063918a5fcd14610ba357806391e7b27714610bd357610454565b80633430ec061161035257806360e13cde116102da5780636f099c8d1161029e5780636f099c8d14610a5357806371d91d2814610a835780637b6e0b0e14610ab35780637c191d2014610ae35780637de8c6f814610b1357610454565b806360e13cde14610975578063613d0a82146109a557806363138d4f146109d5578063659bbb4f14610a055780636e7f1fe714610a2357610454565b806340fe26621161032157806340fe26621461088557806344cf3bc7146108b55780634a61af1f146108e55780634d2c74b3146109155780635590c2d91461094557610454565b80633430ec06146107d7578063371303c0146108075780633a411f12146108255780633a425dfc1461085557610454565b806318093b46116103e0578063219cddeb116103a4578063219cddeb146106e75780632294fc7f146107175780632871ef85146107475780632b21ef44146107775780632d34e798146107a757610454565b806318093b46146105f757806319b621d6146106275780631aba07ea146106575780631de2f343146106875780632007332e146106b757610454565b80630ba8a73b116104275780630ba8a73b146105195780631287a68c14610549578063135d52f7146105675780631581cf191461059757806316582150146105c757610454565b8063034aef7114610459578063050082f814610489578063087b4e84146104b95780630b3b996a146104e9575b600080fd5b610473600480360381019061046e9190612611565b611251565b604051610480919061264d565b60405180910390f35b6104a3600480360381019061049e9190612611565b61128c565b6040516104b0919061264d565b60405180910390f35b6104d360048036038101906104ce9190612611565b6112c7565b6040516104e0919061264d565b60405180910390f35b61050360048036038101906104fe91906127ae565b611301565b6040516105109190612876565b60405180910390f35b610533600480360381019061052e9190612611565b611328565b604051610540919061264d565b60405180910390f35b610551611364565b60405161055e919061264d565b60405180910390f35b610581600480360381019061057c9190612611565b61136d565b60405161058e919061264d565b60405180910390f35b6105b160048036038101906105ac9190612611565b6113a9565b6040516105be919061264d565b60405180910390f35b6105e160048036038101906105dc9190612611565b6113e4565b6040516105ee919061264d565b60405180910390f35b610611600480360381019061060c9190612611565b61143f565b60405161061e919061264d565b60405180910390f35b610641600480360381019061063c9190612611565b61147d565b60405161064e919061264d565b60405180910390f35b610671600480360381019061066c9190612611565b61150c565b60405161067e919061264d565b60405180910390f35b6106a1600480360381019061069c9190612611565b611552565b6040516106ae919061264d565b60405180910390f35b6106d160048036038101906106cc9190612611565b611590565b6040516106de919061264d565b60405180910390f35b61070160048036038101906106fc9190612611565b6115cc565b60405161070e919061264d565b60405180910390f35b610731600480360381019061072c9190612611565b611607565b60405161073e919061264d565b60405180910390f35b610761600480360381019061075c9190612611565b611646565b60405161076e919061264d565b60405180910390f35b610791600480360381019061078c9190612611565b611681565b60405161079e919061264d565b60405180910390f35b6107c160048036038101906107bc9190612611565b6116bc565b6040516107ce919061264d565b60405180910390f35b6107f160048036038101906107ec9190612611565b6116f7565b6040516107fe9190612876565b60405180910390f35b61080f6117a3565b60405161081c919061264d565b60405180910390f35b61083f600480360381019061083a9190612611565b6117c2565b60405161084c919061264d565b60405180910390f35b61086f600480360381019061086a9190612611565b6117fe565b60405161087c919061264d565b60405180910390f35b61089f600480360381019061089a9190612611565b61183a565b6040516108ac919061264d565b60405180910390f35b6108cf60048036038101906108ca9190612611565b611879565b6040516108dc919061264d565b60405180910390f35b6108ff60048036038101906108fa9190612611565b6118b4565b60405161090c919061264d565b60405180910390f35b61092f600480360381019061092a9190612611565b6118f2565b60405161093c919061264d565b60405180910390f35b61095f600480360381019061095a9190612611565b61192d565b60405161096c919061264d565b60405180910390f35b61098f600480360381019061098a9190612611565b611972565b60405161099c919061264d565b60405180910390f35b6109bf60048036038101906109ba91906127ae565b6119ae565b6040516109cc9190612876565b60405180910390f35b6109ef60048036038101906109ea91906127ae565b6119e0565b6040516109fc91906128b1565b60405180910390f35b610a0d611a0c565b604051610a1a919061264d565b60405180910390f35b610a3d6004803603810190610a389190612611565b611a48565b604051610a4a919061264d565b60405180910390f35b610a6d6004803603810190610a689190612611565b611a86565b604051610a7a919061264d565b60405180910390f35b610a9d6004803603810190610a989190612611565b611ac1565b604051610aaa919061264d565b60405180910390f35b610acd6004803603810190610ac89190612611565b611aff565b604051610ada919061264d565b60405180910390f35b610afd6004803603810190610af89190612611565b611b3b565b604051610b0a919061264d565b60405180910390f35b610b2d6004803603810190610b289190612611565b611b76565b604051610b3a919061264d565b60405180910390f35b610b5d6004803603810190610b589190612611565b611bb2565b604051610b6a919061264d565b60405180910390f35b610b8d6004803603810190610b889190612611565b611c0f565b604051610b9a919061264d565b60405180910390f35b610bbd6004803603810190610bb89190612611565b611c4e565b604051610bca919061264d565b60405180910390f35b610bed6004803603810190610be89190612611565b611c89565b604051610bfa919061264d565b60405180910390f35b610c1d6004803603810190610c1891906127ae565b611cd5565b604051610c2a9190612876565b60405180910390f35b610c4d6004803603810190610c489190612611565b611d43565b604051610c5a919061264d565b60405180910390f35b610c7d6004803603810190610c789190612611565b611d83565b604051610c8a919061264d565b60405180910390f35b610cad6004803603810190610ca891906127ae565b611dbe565b604051610cba9190612876565b60405180910390f35b610cdd6004803603810190610cd891906127ae565b611def565b604051610cea9190612876565b60405180910390f35b610d0d6004803603810190610d0891906127ae565b611e16565b604051610d1a919061290d565b60405180910390f35b610d2b611e98565b604051610d38919061264d565b60405180910390f35b610d5b6004803603810190610d569190612611565b611ee3565b604051610d68919061264d565b60405180910390f35b610d8b6004803603810190610d869190612611565b611f1e565b604051610d98919061264d565b60405180910390f35b610dbb6004803603810190610db69190612611565b611f5a565b604051610dc8919061264d565b60405180910390f35b610deb6004803603810190610de69190612988565b611f96565b604051610df8919061264d565b60405180910390f35b610e1b6004803603810190610e169190612611565b611fe4565b604051610e28919061264d565b60405180910390f35b610e4b6004803603810190610e469190612611565b61201f565b604051610e58919061264d565b60405180910390f35b610e7b6004803603810190610e769190612611565b61205a565b604051610e88919061264d565b60405180910390f35b610eab6004803603810190610ea69190612611565b612095565b604051610eb8919061264d565b60405180910390f35b610edb6004803603810190610ed69190612611565b6120d0565b604051610ee8919061264d565b60405180910390f35b610f0b6004803603810190610f069190612611565b612114565b604051610f18919061264d565b60405180910390f35b610f3b6004803603810190610f369190612611565b612150565b604051610f48919061264d565b60405180910390f35b610f6b6004803603810190610f669190612611565b61218b565b604051610f78919061264d565b60405180910390f35b610f9b6004803603810190610f969190612611565b6121c9565b604051610fa8919061264d565b60405180910390f35b610fcb6004803603810190610fc69190612611565b612206565b604051610fd8919061264d565b60405180910390f35b610ffb6004803603810190610ff69190612611565b612240565b604051611008919061264d565b60405180910390f35b61102b60048036038101906110269190612611565b61227c565b604051611038919061264d565b60405180910390f35b61105b60048036038101906110569190612611565b6122b8565b604051611068919061264d565b60405180910390f35b61108b60048036038101906110869190612611565b612313565b604051611098919061264d565b60405180910390f35b6110bb60048036038101906110b69190612611565b612355565b6040516110c8919061264d565b60405180910390f35b6110eb60048036038101906110e69190612611565b612391565b6040516110f8919061264d565b60405180910390f35b61111b60048036038101906111169190612611565b6123ce565b604051611128919061264d565b60405180910390f35b61114b600480360381019061114691906127ae565b612410565b6040516111589190612876565b60405180910390f35b61117b60048036038101906111769190612611565b61247f565b604051611188919061264d565b60405180910390f35b6111ab60048036038101906111a69190612611565b6124bb565b6040516111b8919061264d565b60405180910390f35b6111db60048036038101906111d69190612611565b6124f9565b6040516111e8919061264d565b60405180910390f35b61120b600480360381019061120691906127ae565b612538565b6040516112189190612a10565b60405180910390f35b61123b60048036038101906112369190612611565b61256a565b604051611248919061264d565b60405180910390f35b600061125b6117a3565b50600065deadbeef003690506000805b848110156112815736915060018101905061126b565b505080915050919050565b60006112966117a3565b50600065deadbeef003290506000805b848110156112bc573291506001810190506112a6565b505080915050919050565b60006112d16117a3565b50600065deadbeef0052905060005b838110156112f757816000526001810190506112e0565b5080915050919050565b60606000600890506040828451602086016000855af18061132157600080fd5b5050919050565b60006113326117a3565b50600065deadbeef0001905060005b8381101561135a57600082019150600181019050611341565b5080915050919050565b60008054905090565b60006113776117a3565b50600065deadbeef0017905060005b8381101561139f57600082179150600181019050611386565b5080915050919050565b60006113b36117a3565b50600065deadbeef003490506000805b848110156113d9573491506001810190506113c3565b505080915050919050565b60006113ee6117a3565b50600065deadbeef0006905060005b83811015611435577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820691506001810190506113fd565b5080915050919050565b60006114496117a3565b50600065deadbeef001390506000805b8481101561147257600183139150600181019050611459565b505080915050919050565b60006114876117a3565b50600065deadbeef002090507fffffffff000000000000000000000000000000000000000000000000000000006000526000805b848110156114d557600460002091506001810190506114bb565b507f29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238811461150257600091505b5080915050919050565b60006115166117a3565b50600065deadbeef00a490508060105260005b83811015611548576004600360028360066010a4600181019050611529565b5080915050919050565b600061155c6117a3565b50600065deadbeef001a90506000805b84811015611585578260001a915060018101905061156c565b505080915050919050565b600061159a6117a3565b50600065deadbeef001b905060005b838110156115c2578160001b91506001810190506115a9565b5080915050919050565b60006115d66117a3565b50600065deadbeef004290506000805b848110156115fc574291506001810190506115e6565b505080915050919050565b60006116116117a3565b50600065deadbeef0031905060003060005b8581101561163a5781319250600181019050611623565b50505080915050919050565b60006116506117a3565b50600065deadbeef004890506000805b8481101561167657489150600181019050611660565b505080915050919050565b600061168b6117a3565b50600065deadbeef003d90506000805b848110156116b1573d915060018101905061169b565b505080915050919050565b60006116c66117a3565b50600065deadbeef004390506000805b848110156116ec574391506001810190506116d6565b505080915050919050565b6002818154811061170757600080fd5b90600052602060002001600091509050805461172290612a5a565b80601f016020809104026020016040519081016040528092919081815260200182805461174e90612a5a565b801561179b5780601f106117705761010080835404028352916020019161179b565b820191906000526020600020905b81548152906001019060200180831161177e57829003601f168201915b505050505081565b600060016000546117b49190612aba565b600081905550600054905090565b60006117cc6117a3565b50600065deadbeef0004905060005b838110156117f4576001820491506001810190506117db565b5080915050919050565b60006118086117a3565b50600065deadbeef0037905060005b8381101561183057602060008037600181019050611817565b5080915050919050565b60006118446117a3565b50600065deadbeef00a090508060105260005b8381101561186f5760066010a0600181019050611857565b5080915050919050565b60006118836117a3565b50600065deadbeef003390506000805b848110156118a957339150600181019050611893565b505080915050919050565b60006118be6117a3565b50600065deadbeef0053905060005b838110156118e85763deadbeef6000526001810190506118cd565b5080915050919050565b60006118fc6117a3565b50600065deadbeef003a90506000805b84811015611922573a915060018101905061190c565b505080915050919050565b60006119376117a3565b50600065deadbeef0051905060008160005260005b8481101561196457600051915060018101905061194c565b508091505080915050919050565b600061197c6117a3565b50600065deadbeef001d905060005b838110156119a4578160001d915060018101905061198b565b5080915050919050565b606060006005905060208301835160405160208183856000885af1806119d357600080fd5b8195505050505050919050565b600080600290506020830183518360208183856000885af180611a0257600080fd5b5050505050919050565b6000611a166117a3565b505b6103e85a1115611a40576001806000828254611a349190612aba565b92505081905550611a18565b600154905090565b6000611a526117a3565b50600065deadbeef001090506000805b84811015611a7b57826001109150600181019050611a62565b505080915050919050565b6000611a906117a3565b50600065deadbeef004490506000805b84811015611ab657449150600181019050611aa0565b505080915050919050565b6000611acb6117a3565b50600065deadbeef001190506000805b84811015611af457600183119150600181019050611adb565b505080915050919050565b6000611b096117a3565b50600065deadbeef003e905060005b83811015611b315760206000803e600181019050611b18565b5080915050919050565b6000611b456117a3565b50600065deadbeef004590506000805b84811015611b6b57459150600181019050611b55565b505080915050919050565b6000611b806117a3565b50600065deadbeef0002905060005b83811015611ba857600182029150600181019050611b8f565b5080915050919050565b6000611bbc6117a3565b50600065deadbeef0008905060005b83811015611c05577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600083089150600181019050611bcb565b5080915050919050565b6000611c196117a3565b50600065deadbeef005490508060005560005b83811015611c44576000549150600181019050611c2c565b5080915050919050565b6000611c586117a3565b50600065deadbeef005a90506000805b84811015611c7e575a9150600181019050611c68565b505080915050919050565b6000611c936117a3565b50600065deadbeef0019905060005b83811015611cb95781199150600181019050611ca2565b5065deadbeef00198114611ccc57801990505b80915050919050565b606080825114611d1a576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611d1190612b4b565b60405180910390fd5b60006007905060208301835160408482846000875af180611d3a57600080fd5b50505050919050565b6000611d4d6117a3565b50600065deadbeef00a190508060105260005b83811015611d79578060066010a1600181019050611d60565b5080915050919050565b6000611d8d6117a3565b50600065deadbeef0016905060005b83811015611db4578182169150600181019050611d9c565b5080915050919050565b6060600060049050602083018351604051818183856000885af180611de257600080fd5b8195505050505050919050565b60606000600890506040828451602086016000855af180611e0f57600080fd5b5050919050565b60006080825114611e5c576040517f08c379a0000000000000000000000000000000000000000000000000000000008152600401611e5390612bb7565b60405180910390fd5b600060019050602083016020810151601f1a602082015260206040516080836000865af180611e8a57600080fd5b604051519350505050919050565b6000611ea26117a3565b505b6103e85a1115611edb576001806000828254611ec09190612aba565b9250508190555043600154611ed59190612c06565b50611ea4565b600154905090565b6000611eed6117a3565b50600065deadbeef004690506000805b84811015611f1357469150600181019050611efd565b505080915050919050565b6000611f286117a3565b50600065deadbeef0005905060005b83811015611f5057600182059150600181019050611f37565b5080915050919050565b6000611f646117a3565b50600065deadbeef0039905060005b83811015611f8c57602060008039600181019050611f73565b5080915050919050565b60006002838390918060018154018082558091505060019003906000526020600020016000909192909192909192909192509182611fd5929190612dee565b50600280549050905092915050565b6000611fee6117a3565b50600065deadbeef005990506000805b8481101561201457599150600181019050611ffe565b505080915050919050565b60006120296117a3565b50600065deadbeef003890506000805b8481101561204f57389150600181019050612039565b505080915050919050565b60006120646117a3565b50600065deadbeef004190506000805b8481101561208a57419150600181019050612074565b505080915050919050565b600061209f6117a3565b50600065deadbeef003090506000805b848110156120c5573091506001810190506120af565b505080915050919050565b60006120da6117a3565b50600065deadbeef00a390508060105260005b8381101561210a57600360028260066010a36001810190506120ed565b5080915050919050565b600061211e6117a3565b50600065deadbeef000b905060005b83811015612146578160200b915060018101905061212d565b5080915050919050565b600061215a6117a3565b50600065deadbeef004790506000805b848110156121805747915060018101905061216a565b505080915050919050565b60006121956117a3565b50600065deadbeef001c90506000805b848110156121be578260001c92506001810190506121a5565b505080915050919050565b60006121d36117a3565b50600065deadbeef003590506000805b848110156121fb5760003591506001810190506121e3565b505080915050919050565b60006122106117a3565b50600065deadbeef0055905060005b83811015612236578160005560018101905061221f565b5080915050919050565b600061224a6117a3565b50600065deadbeef0018905060005b8381101561227257600082189150600181019050612259565b5080915050919050565b60006122866117a3565b50600065deadbeef0003905060005b838110156122ae57600082039150600181019050612295565b5080915050919050565b60006122c26117a3565b50600065deadbeef0007905060005b83811015612309577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820791506001810190506122d1565b5080915050919050565b600061231d6117a3565b50600065deadbeef00a290508060105260005b8381101561234b5760028160066010a2600181019050612330565b5080915050919050565b600061235f6117a3565b50600065deadbeef000a905060005b83811015612387576001820a915060018101905061236e565b5080915050919050565b600061239b6117a3565b50600065deadbeef001490506000805b848110156123c35782831491506001810190506123ab565b505080915050919050565b60006123d86117a3565b50600065deadbeef0040905060006001430360005b8581101561240457814092506001810190506123ed565b50505080915050919050565b60606080825114612456576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161244d90612b4b565b60405180910390fd5b60006006905060208301835160408482846000875af18061247657600080fd5b50505050919050565b60006124896117a3565b50600065deadbeef001590506000805b848110156124b05782159150600181019050612499565b505080915050919050565b60006124c56117a3565b50600065deadbeef001290506000805b848110156124ee578260011291506001810190506124d5565b505080915050919050565b60006125036117a3565b50600065deadbeef003b905060003060005b8581101561252c57813b9250600181019050612515565b50505080915050919050565b6000806003905060208301835160405160148183856000885af18061255c57600080fd5b815195505050505050919050565b60006125746117a3565b50600065deadbeef0009905060005b838110156125bd577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600183099150600181019050612583565b5080915050919050565b6000604051905090565b600080fd5b600080fd5b6000819050919050565b6125ee816125db565b81146125f957600080fd5b50565b60008135905061260b816125e5565b92915050565b600060208284031215612627576126266125d1565b5b6000612635848285016125fc565b91505092915050565b612647816125db565b82525050565b6000602082019050612662600083018461263e565b92915050565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6126bb82612672565b810181811067ffffffffffffffff821117156126da576126d9612683565b5b80604052505050565b60006126ed6125c7565b90506126f982826126b2565b919050565b600067ffffffffffffffff82111561271957612718612683565b5b61272282612672565b9050602081019050919050565b82818337600083830152505050565b600061275161274c846126fe565b6126e3565b90508281526020810184848401111561276d5761276c61266d565b5b61277884828561272f565b509392505050565b600082601f83011261279557612794612668565b5b81356127a584826020860161273e565b91505092915050565b6000602082840312156127c4576127c36125d1565b5b600082013567ffffffffffffffff8111156127e2576127e16125d6565b5b6127ee84828501612780565b91505092915050565b600081519050919050565b600082825260208201905092915050565b60005b83811015612831578082015181840152602081019050612816565b60008484015250505050565b6000612848826127f7565b6128528185612802565b9350612862818560208601612813565b61286b81612672565b840191505092915050565b60006020820190508181036000830152612890818461283d565b905092915050565b6000819050919050565b6128ab81612898565b82525050565b60006020820190506128c660008301846128a2565b92915050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006128f7826128cc565b9050919050565b612907816128ec565b82525050565b600060208201905061292260008301846128fe565b92915050565b600080fd5b600080fd5b60008083601f84011261294857612947612668565b5b8235905067ffffffffffffffff81111561296557612964612928565b5b6020830191508360018202830111156129815761298061292d565b5b9250929050565b6000806020838503121561299f5761299e6125d1565b5b600083013567ffffffffffffffff8111156129bd576129bc6125d6565b5b6129c985828601612932565b92509250509250929050565b60007fffffffffffffffffffffffffffffffffffffffff00000000000000000000000082169050919050565b612a0a816129d5565b82525050565b6000602082019050612a256000830184612a01565b92915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b60006002820490506001821680612a7257607f821691505b602082108103612a8557612a84612a2b565b5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000612ac5826125db565b9150612ad0836125db565b9250828201905080821115612ae857612ae7612a8b565b5b92915050565b600082825260208201905092915050565b7f496e76616c696420696e707574206c656e677468000000000000000000000000600082015250565b6000612b35601483612aee565b9150612b4082612aff565b602082019050919050565b60006020820190508181036000830152612b6481612b28565b9050919050565b7f496e76616c696420696e7075742064617461206c656e6774682e000000000000600082015250565b6000612ba1601a83612aee565b9150612bac82612b6b565b602082019050919050565b60006020820190508181036000830152612bd081612b94565b9050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b6000612c11826125db565b9150612c1c836125db565b925082612c2c57612c2b612bd7565b5b828206905092915050565b600082905092915050565b60008190508160005260206000209050919050565b60006020601f8301049050919050565b600082821b905092915050565b600060088302612ca47fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82612c67565b612cae8683612c67565b95508019841693508086168417925050509392505050565b6000819050919050565b6000612ceb612ce6612ce1846125db565b612cc6565b6125db565b9050919050565b6000819050919050565b612d0583612cd0565b612d19612d1182612cf2565b848454612c74565b825550505050565b600090565b612d2e612d21565b612d39818484612cfc565b505050565b5b81811015612d5d57612d52600082612d26565b600181019050612d3f565b5050565b601f821115612da257612d7381612c42565b612d7c84612c57565b81016020851015612d8b578190505b612d9f612d9785612c57565b830182612d3e565b50505b505050565b600082821c905092915050565b6000612dc560001984600802612da7565b1980831691505092915050565b6000612dde8383612db4565b9150826002028217905092915050565b612df88383612c37565b67ffffffffffffffff811115612e1157612e10612683565b5b612e1b8254612a5a565b612e26828285612d61565b6000601f831160018114612e555760008415612e43578287013590505b612e4d8582612dd2565b865550612eb5565b601f198416612e6386612c42565b60005b82811015612e8b57848901358255600182019150602085019450602081019050612e66565b86831015612ea85784890135612ea4601f891682612db4565b8355505b6001600288020188555050505b5050505050505056fea26469706673582212203124213488c2f1fca5968787f0c3e96fba8469129a80798e11ee752903b4bfdc64736f6c634300081300330058200252130bf561a2ad9468cb2919d5ff2cda5c508338aaa5a12ee06e43acf1fa335820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff005820020b976be9384d1bb7a9ba3c6f92f3dffbefb6aaa4a07626c32489cd66e20473581f0ff1ce00bab10c1badb0028badf00dabadbabeb105f00db16b00b50b00b1350219080400582002b0c6948a275349ae45a06aad66a8bd65ac18074615d53676c09b67809099e0410200582002c72455231bf4548b418278aebda259695706344fedffefb40d8218532f72125820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed02190c00005820027eff41a0dce30a6e5bdeb23d1bbb96709facaf0abff8949749f89c697a7edd5820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe034d6a690768a0ea387b759e0bef01ee064b5d04cf830ff8fa74104e5dbeafab090219a000005820025787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace410900582002a69471df6e569a3d0da24943b5a847e21da73a0d58b0a25836633793cbf2dc5820deadbeafdeadbeefdeadc0dedeaddeaddeadd00ddeadfa11dead10ccdeadfeed00582002ee6d38ad948303a0117a3e3deee4d912b62481681bd892442a7d720eee5d2c581f0ff1ce000000000000000000000000000000000000000000000000000000080219044100582103780bd76754cd8bdf6ebbcf526b1e9c300885e157b72e09c4f68214c616f7bd30418100582103700f56bdfffe5f336e60cc5d9ad093591a43a048d8c82013fa9eb71ae98739905820baaaaaadbaadf00dbad22222baddcafecafeb0bab0bababebeefbabec00010ff00582103f64f60661322b36af17ffae1d83bdb731d45dce1596efffa3ccfc42c4aa182a05820b105f00db16b00b50b00b135baaaaaadbaadf00dbad22222baddcafecafeb0ba0334f927d8cb7dd37b23b0e1760984f38c0654cade533e23af873c94318811099903f399c14a1aca218d9f65fde0fede5584dd350446a9b85edb2531cd8ca793008f00582002b7834d611e25670b584f73a3e810d0a47c773fe173fc6975449e876b0a6a70581f0ff1ce00bab10c00000000000000000000000000000000000000000000001003eea55a2063723ec5f83b1bc2fd4a14edd99b58afad68631b87dc0ac06cf12a3500582002ca152920095f2fe7984b9ce1a725c3bc9436952ed17113f5fc7b7b613c401d420201021902c003316c463a8777740576aedfdd3d859851b8cc455ec2c3c2fe2b235e102e59eeb6005821035126a4d711f2dd98aa7df46b100c291503dddb43ad8180ae07f600704524a9d0414100582103605e486497dbb470ce04bc6cd8d6aa1cc0fa707511d6bcc61d0dbc85551736605820cafebabecafed00dcefaedfe0d15ea5edabbad00dead2baddeadbaaddeadbabe0219df770558210336d6fadc19b5ec9189ae65683241081f7c772ec596ea1facb9daef2a139663700701192ef40219fd73 \ No newline at end of file From af14be1c89599678a0b35d4ce60296e8c19477da Mon Sep 17 00:00:00 2001 From: cpu Date: Mon, 6 Nov 2023 15:34:25 -0800 Subject: [PATCH 101/208] Make prover state public --- plonky_block_proof_gen/src/prover_state.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 0c404447b..fcef11e06 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -9,7 +9,7 @@ use crate::types::AllRecursiveCircuits; /// Plonky2 proving state. Note that this is generally going to be massive in /// terms of memory and has a long spin-up time, pub struct ProverState { - pub(crate) state: AllRecursiveCircuits, + pub state: AllRecursiveCircuits, } /// Builder for the prover state. From 4da19826c07dfefde9575b9e3f9ef75757395975 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 7 Nov 2023 02:38:54 -0700 Subject: [PATCH 102/208] Now parses incoming payloads! --- src/compact/complex_test_payloads.rs | 2 +- src/decoding.rs | 6 +++++- src/processed_block_trace.rs | 20 ++++++++++++++++++-- src/utils.rs | 6 +++--- 4 files changed, 27 insertions(+), 7 deletions(-) diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index a4ded7e54..b8e793a26 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -55,7 +55,7 @@ impl TestProtocolInputAndRoot { print_value_and_hash_nodes_of_trie(&out.witness_out.tries.state); for (hashed_addr, s_trie) in out.witness_out.tries.storage { - print_value_and_hash_nodes_of_storage_trie(hashed_addr, &s_trie); + print_value_and_hash_nodes_of_storage_trie(&hashed_addr, &s_trie); } assert!(out.header.version_is_compatible(1)); diff --git a/src/decoding.rs b/src/decoding.rs index eccae255b..bfd879c05 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -78,7 +78,11 @@ impl ProcessedBlockTrace { self, other_data: OtherBlockData, ) -> TraceParsingResult> { - let mut curr_block_tries = PartialTrieState::default(); + let mut curr_block_tries = PartialTrieState { + state: self.tries.state, + storage: self.tries.storage, + ..Default::default() + }; let mut tot_gas_used = U256::zero(); let mut curr_bloom = Bloom::default(); diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 77d601176..74d8cb16f 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -16,7 +16,7 @@ use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, TxnProofGenIR, }; -use crate::utils::hash; +use crate::utils::{hash, print_value_and_hash_nodes_of_trie, print_value_and_hash_nodes_of_storage_trie}; #[derive(Debug)] pub(crate) struct ProcessedBlockTrace { @@ -45,7 +45,15 @@ impl BlockTrace { { // The compact format is able to provide actual code, so if it does, we should // take advantage of it. - let pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); + let mut pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); + + add_empty_storage_tries_that_appear_in_trace_but_not_pre_image(&mut pre_image_data.tries.storage, &self.txn_info); + + print_value_and_hash_nodes_of_trie(&pre_image_data.tries.state); + + for (h_addr, s_trie) in pre_image_data.tries.storage.iter() { + print_value_and_hash_nodes_of_storage_trie(h_addr, s_trie); + } let resolve_code_hash_fn = |c_hash: &_| { let resolve_code_hash_fn_ref = &p_meta.resolve_code_hash_fn; @@ -71,6 +79,14 @@ impl BlockTrace { } } +// It's not clear to me if the client should have an empty storage trie for when a txn performs the accounts first storage access, but we're going to assume they won't for now and deal with that case here. +fn add_empty_storage_tries_that_appear_in_trace_but_not_pre_image(s_tries: &mut HashMap, txn_traces: &[TxnInfo]) { + let all_addrs_that_access_storage_iter = txn_traces.iter().flat_map(|x| x.traces.keys().map(|addr| hash(addr.as_bytes()))); + let addrs_with_storage_access_without_s_tries_iter: Vec<_> = all_addrs_that_access_storage_iter.filter(|addr| !s_tries.contains_key(addr)).collect(); + + s_tries.extend(addrs_with_storage_access_without_s_tries_iter.into_iter().map(|k| (k, HashedPartialTrie::default()))); +} + #[derive(Debug)] struct ProcessedBlockTracePreImages { tries: PartialTriePreImages, diff --git a/src/utils.rs b/src/utils.rs index cb7b12d13..663e81924 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -29,12 +29,12 @@ pub(crate) fn h256_to_nibbles(v: H256) -> Nibbles { // TODO: Move under a feature flag... pub(crate) fn print_value_and_hash_nodes_of_trie(trie: &HashedPartialTrie) { let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); - println!("State trie for {:#?}", trie_elems); + println!("State trie {:#?}", trie_elems); } // TODO: Move under a feature flag... pub(crate) fn print_value_and_hash_nodes_of_storage_trie( - s_trie_addr: HashedStorageAddr, + s_trie_addr: &HashedStorageAddr, trie: &HashedPartialTrie, ) { let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); @@ -42,7 +42,7 @@ pub(crate) fn print_value_and_hash_nodes_of_storage_trie( } // TODO: Move under a feature flag... -pub(crate) fn print_value_and_hash_nodes_of_trie_common(trie: &HashedPartialTrie) -> Vec { +fn print_value_and_hash_nodes_of_trie_common(trie: &HashedPartialTrie) -> Vec { trie.items() .map(|(k, v_or_h)| { let v_or_h_char = match v_or_h { From 8a27e8cf860fe71c84e1d052dad4ecfe78bc75dd Mon Sep 17 00:00:00 2001 From: cpu Date: Tue, 7 Nov 2023 11:38:09 -0800 Subject: [PATCH 103/208] Implement serialization for trace protocol format --- src/deserializers.rs | 29 ++++++++++++++++++++++++++--- src/trace_protocol.rs | 33 +++++++++++++++++++-------------- 2 files changed, 45 insertions(+), 17 deletions(-) diff --git a/src/deserializers.rs b/src/deserializers.rs index 88af007d5..f5da56341 100644 --- a/src/deserializers.rs +++ b/src/deserializers.rs @@ -1,10 +1,10 @@ -//! Custom deserializers for Serde. -use hex::FromHex; +//! Custom deserializers / serializers for Serde. +use hex::{FromHex, ToHex}; use plonky2_evm::generation::mpt::LegacyReceiptRlp; use rlp::DecoderError; use serde::{ de::{Error, Visitor}, - Deserialize, Deserializer, + Deserialize, Deserializer, Serialize, Serializer, }; #[derive(Clone, Debug, Default, Deserialize)] @@ -16,6 +16,12 @@ impl From for Vec { } } +impl From> for ByteString { + fn from(v: Vec) -> Self { + Self(v) + } +} + impl TryFrom for LegacyReceiptRlp { type Error = DecoderError; @@ -24,6 +30,12 @@ impl TryFrom for LegacyReceiptRlp { } } +impl From for ByteString { + fn from(value: LegacyReceiptRlp) -> Self { + Self(rlp::encode(&value).into()) + } +} + fn remove_hex_prefix_if_present(data: &str) -> &str { let prefix = &data[..2]; @@ -62,3 +74,14 @@ fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D: deserializer.deserialize_string(PrefixHexStrVisitor()) } + +impl Serialize for ByteString { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let hex_string = format!("0x{}", self.0.encode_hex::()); + + serializer.serialize_str(&hex_string) + } +} diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index e6420db46..d1bff6ac0 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -26,7 +26,7 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; use plonky2_evm::generation::mpt::LegacyReceiptRlp; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use serde_with::{serde_as, FromInto, TryFromInto}; use crate::{ @@ -38,7 +38,7 @@ use crate::{ /// Core payload needed to generate a proof for a block. Note that the scheduler /// may need to request some additional data from the client along with this in /// order to generate a proof. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct BlockTrace { /// The trie pre-images (state & storage) in multiple possible formats. pub trie_pre_images: BlockTraceTriePreImages, @@ -49,7 +49,7 @@ pub struct BlockTrace { } /// Minimal hashed out tries needed by all txns in the block. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum BlockTraceTriePreImages { Separate(SeparateTriePreImages), @@ -57,14 +57,14 @@ pub enum BlockTraceTriePreImages { } /// State/Storage trie pre-images that are separate. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct SeparateTriePreImages { pub state: SeparateTriePreImage, pub storage: SeparateStorageTriesPreImage, } /// A trie pre-image where state & storage are separate. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum SeparateTriePreImage { Uncompressed(TrieUncompressed), @@ -72,7 +72,7 @@ pub enum SeparateTriePreImage { } /// A trie pre-image where both state & storage are combined into one payload. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum CombinedPreImages { Compact(TrieCompact), @@ -80,22 +80,22 @@ pub enum CombinedPreImages { // TODO /// Bulkier format that is quicker to process. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TrieUncompressed {} // TODO #[serde_as] /// Compact representation of a trie (will likely be very close to https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md) -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TrieCompact(#[serde_as(as = "FromInto")] pub Vec); // TODO /// Trie format that is in exactly the same format of our internal trie format. /// This is the fastest format for us to processes. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TrieDirect(pub HashedPartialTrie); -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum SeparateStorageTriesPreImage { /// A single hash map that contains all node hashes from all storage tries @@ -109,7 +109,7 @@ pub enum SeparateStorageTriesPreImage { } /// Info specific to txns in the block. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TxnInfo { /// Trace data for the txn. This is used by the protocol to: /// - Mutate it's own trie state between txns to arrive at the correct trie @@ -123,7 +123,7 @@ pub struct TxnInfo { } #[serde_as] -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TxnMeta { /// Txn byte code. #[serde_as(as = "FromInto")] @@ -149,32 +149,37 @@ pub struct TxnMeta { /// /// Specifically, since we can not execute the txn before proof generation, we /// rely on a separate EVM to run the txn and supply this data for us. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TxnTrace { /// If the balance changed, then the new balance will appear here. Will be /// `None` if no change. + #[serde(skip_serializing_if = "Option::is_none")] pub balance: Option, /// If the nonce changed, then the new nonce will appear here. Will be /// `None` if no change. + #[serde(skip_serializing_if = "Option::is_none")] pub nonce: Option, /// Account addresses that were only read by the txn. /// /// Note that if storage is written to, then it does not need to appear in /// this list (but is also fine if it does). + #[serde(skip_serializing_if = "Option::is_none")] pub storage_read: Option>, /// Account storage addresses that were mutated by the txn along with their /// new value. + #[serde(skip_serializing_if = "Option::is_none")] pub storage_written: Option>, /// Contract code that this address accessed. + #[serde(skip_serializing_if = "Option::is_none")] pub code_usage: Option, } /// Contract code access type. Used by txn traces. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum ContractCodeUsage { /// Contract was read. From 1d1f5ccfde137400b5fe97184667b371ac5e8b12 Mon Sep 17 00:00:00 2001 From: cpu Date: Tue, 7 Nov 2023 12:00:09 -0800 Subject: [PATCH 104/208] Fix se/deserialization of contract write --- src/deserializers.rs | 10 +++++++++- src/processed_block_trace.rs | 2 +- src/trace_protocol.rs | 3 ++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/deserializers.rs b/src/deserializers.rs index f5da56341..e64461ca4 100644 --- a/src/deserializers.rs +++ b/src/deserializers.rs @@ -8,7 +8,7 @@ use serde::{ }; #[derive(Clone, Debug, Default, Deserialize)] -pub(crate) struct ByteString(#[serde(with = "self")] pub(crate) Vec); +pub struct ByteString(#[serde(with = "self")] pub Vec); impl From for Vec { fn from(v: ByteString) -> Self { @@ -16,6 +16,14 @@ impl From for Vec { } } +impl std::ops::Deref for ByteString { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + impl From> for ByteString { fn from(v: Vec) -> Self { Self(v) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 74d8cb16f..e11f359be 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -256,7 +256,7 @@ impl TxnInfo { } ContractCodeUsage::Write(c_bytes) => { let c_hash = hash(&c_bytes); - contract_code_accessed.insert(c_hash, c_bytes); + contract_code_accessed.insert(c_hash, c_bytes.0); } } } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index d1bff6ac0..6599c09f4 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -179,6 +179,7 @@ pub struct TxnTrace { } /// Contract code access type. Used by txn traces. +#[serde_as] #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum ContractCodeUsage { @@ -187,7 +188,7 @@ pub enum ContractCodeUsage { /// Contract was created (and these are the bytes). Note that this new /// contract code will not appear in the [`BlockTrace`] map. - Write(Vec), + Write(#[serde_as(as = "FromInto")] ByteString), } impl ContractCodeUsage { From 4a356f49da1da9c46edea0d89826b367f72ee21b Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 8 Nov 2023 13:40:11 -0700 Subject: [PATCH 105/208] Fixed double hashing storage account addrs --- src/compact/compact_to_partial_trie.rs | 4 +-- src/compact/complex_test_payloads.rs | 39 +++++++++++++++++++++++--- 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index fb8c8f87a..966e7b183 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -175,11 +175,9 @@ pub(crate) fn convert_storage_trie_root_keyed_hashmap_to_account_addr_keyed( // TODO: Replace with a map... for (acc_addr, storage_root) in account_addr_and_storage_root_iter { if let Some(s_trie) = storage_root_trie.get(&storage_root) { - let hashed_addr = hash(acc_addr.as_bytes()); - // Possibility of identical tries between accounts, so we need to do a clone // here. - acc_addr_to_storage_trie_map.insert(hashed_addr, s_trie.clone()); + acc_addr_to_storage_trie_map.insert(acc_addr, s_trie.clone()); } } diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index b8e793a26..3907987ec 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -1,12 +1,13 @@ use eth_trie_utils::partial_trie::PartialTrie; +use plonky2_evm::generation::mpt::AccountRlp; use super::compact_prestate_processing::{ process_compact_prestate, process_compact_prestate_debug, CompactParsingResult, - ProcessedCompactOutput, + PartialTriePreImages, ProcessedCompactOutput, }; use crate::{ trace_protocol::TrieCompact, - types::TrieRootHash, + types::{HashedAccountAddr, TrieRootHash, EMPTY_TRIE_HASH}, utils::{print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie}, }; @@ -54,11 +55,41 @@ impl TestProtocolInputAndRoot { print_value_and_hash_nodes_of_trie(&out.witness_out.tries.state); - for (hashed_addr, s_trie) in out.witness_out.tries.storage { - print_value_and_hash_nodes_of_storage_trie(&hashed_addr, &s_trie); + for (hashed_addr, s_trie) in out.witness_out.tries.storage.iter() { + print_value_and_hash_nodes_of_storage_trie(hashed_addr, s_trie); } assert!(out.header.version_is_compatible(1)); assert_eq!(trie_hash, expected_hash); + + Self::assert_non_all_storage_roots_exist_in_storage_trie_map(&out.witness_out.tries); + } + + fn assert_non_all_storage_roots_exist_in_storage_trie_map(images: &PartialTriePreImages) { + let non_empty_account_s_roots = images + .state + .items() + .filter_map(|(addr, data)| { + data.as_val().map(|data| { + ( + HashedAccountAddr::from_slice(&addr.bytes_be()), + rlp::decode::(data).unwrap().storage_root, + ) + }) + }) + .filter(|(_, s_root)| *s_root != EMPTY_TRIE_HASH) + .map(|(addr, _)| addr); + + let x: Vec<_> = non_empty_account_s_roots.collect(); + println!("non empty account s_roots: {:#?}", x); + + println!( + "All keys for storage tries: {:#?}", + images.storage.keys().collect::>() + ); + + for account_with_non_empty_root in x.into_iter() { + assert!(images.storage.contains_key(&account_with_non_empty_root)); + } } } From 7c1ecdc9ccfa85f333fa083aba7421eb9f1e3ffd Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 9 Nov 2023 11:46:40 -0700 Subject: [PATCH 106/208] Some fixes --- src/decoding.rs | 93 +++++++++++++++++++++++++++++------- src/processed_block_trace.rs | 75 +++++++++++++++++++++-------- 2 files changed, 132 insertions(+), 36 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index bfd879c05..2ada52801 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,12 +1,12 @@ use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, fmt::{self, Display, Formatter}, iter::{empty, once}, }; use eth_trie_utils::{ nibbles::Nibbles, - partial_trie::{HashedPartialTrie, PartialTrie}, + partial_trie::{HashedPartialTrie, Node, PartialTrie}, trie_subsets::create_trie_subset, }; use ethereum_types::{Address, U256}; @@ -18,11 +18,12 @@ use thiserror::Error; use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites}, + trace_protocol::TxnInfo, types::{ BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_TRIE_HASH, }, - utils::update_val_if_some, + utils::{hash, update_val_if_some}, }; pub type TraceParsingResult = Result; @@ -93,16 +94,28 @@ impl ProcessedBlockTrace { .enumerate() .map(|(txn_idx, txn_info)| { let tries = Self::create_minimal_partial_tries_needed_by_txn( - &curr_block_tries, + &mut curr_block_tries, &txn_info.nodes_used_by_txn, txn_idx, )?; - let trie_roots_after = calculate_trie_input_hashes(&tries); + let addresses = Self::get_known_addresses_if_enabled(); let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; let new_bloom = txn_info.meta.block_bloom; + Self::apply_deltas_to_trie_state( + &mut curr_block_tries, + txn_info.nodes_used_by_txn, + )?; + + // TODO: Clean up if this works... + let trie_roots_after = TrieRoots { + state_root: curr_block_tries.state.hash(), + transactions_root: curr_block_tries.txn.hash(), + receipts_root: curr_block_tries.receipt.hash(), + }; + let gen_inputs = GenerationInputs { txn_number_before: txn_idx.saturating_sub(1).into(), gas_used_before: tot_gas_used, @@ -124,11 +137,6 @@ impl ProcessedBlockTrace { gen_inputs, }; - Self::apply_deltas_to_trie_state( - &mut curr_block_tries, - txn_info.nodes_used_by_txn, - )?; - tot_gas_used = new_tot_gas_used; curr_bloom = new_bloom; @@ -141,7 +149,7 @@ impl ProcessedBlockTrace { } fn create_minimal_partial_tries_needed_by_txn( - curr_block_tries: &PartialTrieState, + curr_block_tries: &mut PartialTrieState, nodes_used_by_txn: &NodesUsedByTxn, txn_idx: TxnIdx, ) -> TraceParsingResult { @@ -164,7 +172,8 @@ impl ProcessedBlockTrace { )?; let storage_tries = Self::create_minimal_storage_partial_tries( - &curr_block_tries.storage, + &mut curr_block_tries.storage, + &nodes_used_by_txn.state_accounts_with_no_accesses_but_storage_tries, nodes_used_by_txn.storage_accesses.iter(), )?; @@ -176,6 +185,19 @@ impl ProcessedBlockTrace { }) } + fn get_accounts_with_no_storage_access_that_have_entries_in_state_trie( + storage_accesses: &[(HashedAccountAddr, Vec)], + state_accesses: &[HashedNodeAddr], + ) -> Vec<(HashedAccountAddr, Vec)> { + let storage_accesses_set: HashSet = + HashSet::from_iter(storage_accesses.iter().map(|(k, _)| k).cloned()); + state_accesses + .iter() + .filter(|h_addr| !storage_accesses_set.contains(h_addr)) + .map(|h_addr| (*h_addr, Vec::default())) + .collect() + } + fn create_minimal_state_partial_trie( state_trie: &HashedPartialTrie, state_accesses: impl Iterator, @@ -187,17 +209,33 @@ impl ProcessedBlockTrace { ) } + // TODO!!!: We really need to be appending the empty storage tries to the base + // trie somewhere else! This is a big hack! fn create_minimal_storage_partial_tries<'a>( - storage_tries: &HashMap, + storage_tries: &mut HashMap, + state_accounts_with_no_accesses_but_storage_tries: &HashMap< + HashedAccountAddr, + TrieRootHash, + >, accesses_per_account: impl Iterator< Item = &'a (HashedAccountAddr, Vec), >, ) -> TraceParsingResult> { accesses_per_account .map(|(h_addr, mem_accesses)| { - let base_storage_trie = storage_tries - .get(h_addr) - .ok_or(TraceParsingError::MissingAccountStorageTrie(*h_addr))?; + // TODO: Clean up... + let base_storage_trie = match storage_tries.get(h_addr) { + Some(s_trie) => s_trie, + None => { + let trie = state_accounts_with_no_accesses_but_storage_tries + .get(h_addr) + .map(|s_root| HashedPartialTrie::new(Node::Hash(*s_root))) + .unwrap_or_default(); + storage_tries.insert(*h_addr, trie); // TODO: Really change this... + storage_tries.get(h_addr).unwrap() + } + }; + let partial_storage_trie = Self::create_trie_subset_wrapped( base_storage_trie, mem_accesses.iter().cloned(), @@ -218,6 +256,29 @@ impl ProcessedBlockTrace { .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(trie_type)) } + // It's not clear to me if the client should have an empty storage trie for when + // a txn performs the accounts first storage access, but we're going to assume + // they won't for now and deal with that case here. + fn add_empty_storage_tries_that_appear_in_trace_but_not_pre_image( + s_tries: &mut Vec<(HashedAccountAddr, HashedPartialTrie)>, + txn_traces: &[TxnInfo], + ) { + // TODO: Make a bit more efficient... + let all_addrs_that_access_storage_iter = txn_traces + .iter() + .flat_map(|x| x.traces.keys().map(|addr| hash(addr.as_bytes()))); + let addrs_with_storage_access_without_s_tries_iter: Vec<_> = + all_addrs_that_access_storage_iter + .filter(|addr| !s_tries.iter().any(|(a, _)| addr == a)) + .collect(); + + s_tries.extend( + addrs_with_storage_access_without_s_tries_iter + .into_iter() + .map(|k| (k, HashedPartialTrie::default())), + ); + } + fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: NodesUsedByTxn, diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index e11f359be..f9bcc4f12 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -1,9 +1,10 @@ -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::fmt::Debug; use eth_trie_utils::nibbles::Nibbles; -use eth_trie_utils::partial_trie::HashedPartialTrie; +use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; use ethereum_types::U256; +use plonky2_evm::generation::mpt::AccountRlp; use crate::compact::compact_prestate_processing::{process_compact_prestate, PartialTriePreImages}; use crate::decoding::TraceParsingResult; @@ -14,9 +15,12 @@ use crate::trace_protocol::{ }; use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, - HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, TxnProofGenIR, + HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, TrieRootHash, TxnProofGenIR, + EMPTY_TRIE_HASH, +}; +use crate::utils::{ + hash, print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie, }; -use crate::utils::{hash, print_value_and_hash_nodes_of_trie, print_value_and_hash_nodes_of_storage_trie}; #[derive(Debug)] pub(crate) struct ProcessedBlockTrace { @@ -45,9 +49,7 @@ impl BlockTrace { { // The compact format is able to provide actual code, so if it does, we should // take advantage of it. - let mut pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); - - add_empty_storage_tries_that_appear_in_trace_but_not_pre_image(&mut pre_image_data.tries.storage, &self.txn_info); + let pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); print_value_and_hash_nodes_of_trie(&pre_image_data.tries.state); @@ -68,25 +70,33 @@ impl BlockTrace { } }; + let all_accounts_in_pre_image: Vec<_> = pre_image_data + .tries + .state + .items() + .filter_map(|(addr, data)| { + data.as_val().map(|data| { + ( + HashedAccountAddr::from_slice(&addr.bytes_be()), + rlp::decode::(data).unwrap(), + ) + }) + }) + .collect(); + + let txn_info = self + .txn_info + .into_iter() + .map(|t| t.into_processed_txn_info(&all_accounts_in_pre_image, &resolve_code_hash_fn)) + .collect::>(); + ProcessedBlockTrace { tries: pre_image_data.tries, - txn_info: self - .txn_info - .into_iter() - .map(|t| t.into_processed_txn_info(&resolve_code_hash_fn)) - .collect(), + txn_info, } } } -// It's not clear to me if the client should have an empty storage trie for when a txn performs the accounts first storage access, but we're going to assume they won't for now and deal with that case here. -fn add_empty_storage_tries_that_appear_in_trace_but_not_pre_image(s_tries: &mut HashMap, txn_traces: &[TxnInfo]) { - let all_addrs_that_access_storage_iter = txn_traces.iter().flat_map(|x| x.traces.keys().map(|addr| hash(addr.as_bytes()))); - let addrs_with_storage_access_without_s_tries_iter: Vec<_> = all_addrs_that_access_storage_iter.filter(|addr| !s_tries.contains_key(addr)).collect(); - - s_tries.extend(addrs_with_storage_access_without_s_tries_iter.into_iter().map(|k| (k, HashedPartialTrie::default()))); -} - #[derive(Debug)] struct ProcessedBlockTracePreImages { tries: PartialTriePreImages, @@ -190,6 +200,7 @@ pub(crate) struct ProcessedTxnInfo { impl TxnInfo { fn into_processed_txn_info( self, + all_accounts_in_pre_image: &[(HashedAccountAddr, AccountRlp)], code_hash_resolve_f: &F, ) -> ProcessedTxnInfo { let mut nodes_used_by_txn = NodesUsedByTxn::default(); @@ -262,6 +273,26 @@ impl TxnInfo { } } + let accounts_with_storage_accesses: HashSet<_> = HashSet::from_iter( + nodes_used_by_txn + .storage_accesses + .iter() + .filter(|(_, slots)| !slots.is_empty()) + .map(|(addr, _)| *addr), + ); + + let all_accounts_with_non_empty_storage = all_accounts_in_pre_image + .iter() + .filter(|(_, data)| data.storage_root != EMPTY_TRIE_HASH); + + let accounts_with_storage_but_no_storage_accesses = all_accounts_with_non_empty_storage + .filter(|&(addr, _data)| (!accounts_with_storage_accesses.contains(addr))) + .map(|(addr, data)| (*addr, data.storage_root)); + + nodes_used_by_txn + .state_accounts_with_no_accesses_but_storage_tries + .extend(accounts_with_storage_but_no_storage_accesses); + let new_meta_state = TxnMetaState { txn_bytes: self.meta.byte_code, gas_used: self.meta.gas_used, @@ -300,11 +331,15 @@ impl TxnInfo { pub(crate) struct NodesUsedByTxn { pub(crate) state_accesses: Vec, pub(crate) state_writes: Vec<(HashedAccountAddr, StateTrieWrites)>, + + // Note: All entries in `storage_writes` also appear in `storage_accesses`. pub(crate) storage_accesses: Vec<(HashedAccountAddr, Vec)>, pub(crate) storage_writes: Vec<( HashedAccountAddr, Vec<(HashedStorageAddrNibbles, StorageVal)>, )>, + pub(crate) state_accounts_with_no_accesses_but_storage_tries: + HashMap, } #[derive(Debug)] From 25c51abfccd66171053fe7094000a3ebe78aa14b Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 9 Nov 2023 11:29:51 -0700 Subject: [PATCH 107/208] Now handles account creation --- src/decoding.rs | 12 ++++-------- src/types.rs | 2 ++ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 2ada52801..dd995a6c6 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -21,7 +21,7 @@ use crate::{ trace_protocol::TxnInfo, types::{ BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, - OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_TRIE_HASH, + OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_TRIE_HASH, EMPTY_ACCOUNT_BYTES_RLPED, }, utils::{hash, update_val_if_some}, }; @@ -292,13 +292,9 @@ impl ProcessedBlockTrace { for (hashed_acc_addr, s_trie_writes) in deltas.state_writes { let val_k = Nibbles::from_h256_be(hashed_acc_addr); - let val_bytes = trie_state.state.get(val_k).ok_or_else(|| { - TraceParsingError::NonExistentTrieEntry( - TrieType::State, - val_k, - trie_state.state.hash(), - ) - })?; + + // If the account was created, then it will not exist in the trie. + let val_bytes = trie_state.state.get(val_k).unwrap_or(&EMPTY_ACCOUNT_BYTES_RLPED); let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) diff --git a/src/types.rs b/src/types.rs index 86e569f68..577d692db 100644 --- a/src/types.rs +++ b/src/types.rs @@ -34,6 +34,8 @@ pub(crate) const EMPTY_TRIE_HASH: H256 = H256([ 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, ]); +pub(crate) const EMPTY_ACCOUNT_BYTES_RLPED: [u8; 70] = [248, 68, 128, 128, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112]; + /// An `IR` (Intermediate Representation) for a given txn in a block that we can /// use to generate a proof for that txn. #[derive(Clone, Debug, Deserialize, Serialize)] From 15ae1a1e5b8e60a056a36926746cb563507073c2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 9 Nov 2023 12:58:36 -0700 Subject: [PATCH 108/208] First attempt at txn & receipt tries --- src/decoding.rs | 22 +++++++++++++++++++--- src/processed_block_trace.rs | 2 ++ src/types.rs | 7 ++++++- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index dd995a6c6..3379ca91a 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -17,11 +17,12 @@ use plonky2_evm::{ use thiserror::Error; use crate::{ - processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites}, + processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites, TxnMetaState}, trace_protocol::TxnInfo, types::{ BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, - OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_TRIE_HASH, EMPTY_ACCOUNT_BYTES_RLPED, + OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, + EMPTY_TRIE_HASH, }, utils::{hash, update_val_if_some}, }; @@ -107,6 +108,8 @@ impl ProcessedBlockTrace { Self::apply_deltas_to_trie_state( &mut curr_block_tries, txn_info.nodes_used_by_txn, + &txn_info.meta, + txn_idx, )?; // TODO: Clean up if this works... @@ -137,6 +140,8 @@ impl ProcessedBlockTrace { gen_inputs, }; + println!("IR: {:#?}", txn_proof_gen_ir); + tot_gas_used = new_tot_gas_used; curr_bloom = new_bloom; @@ -282,6 +287,8 @@ impl ProcessedBlockTrace { fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: NodesUsedByTxn, + meta: &TxnMetaState, + txn_idx: TxnIdx, ) -> TraceParsingResult<()> { for (hashed_acc_addr, storage_writes) in deltas.storage_writes { let storage_trie = trie_state.storage.get_mut(&hashed_acc_addr).ok_or( @@ -294,7 +301,10 @@ impl ProcessedBlockTrace { let val_k = Nibbles::from_h256_be(hashed_acc_addr); // If the account was created, then it will not exist in the trie. - let val_bytes = trie_state.state.get(val_k).unwrap_or(&EMPTY_ACCOUNT_BYTES_RLPED); + let val_bytes = trie_state + .state + .get(val_k) + .unwrap_or(&EMPTY_ACCOUNT_BYTES_RLPED); let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) @@ -311,6 +321,12 @@ impl ProcessedBlockTrace { .insert(val_k, updated_account_bytes.to_vec()); } + let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); + trie_state.txn.insert(txn_k, meta.txn_bytes.clone()); + trie_state + .receipt + .insert(txn_k, meta.receipt_node_bytes.clone()); + Ok(()) } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index f9bcc4f12..fefb778d9 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -295,6 +295,7 @@ impl TxnInfo { let new_meta_state = TxnMetaState { txn_bytes: self.meta.byte_code, + receipt_node_bytes: rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(), gas_used: self.meta.gas_used, block_bloom, }; @@ -353,6 +354,7 @@ pub(crate) struct StateTrieWrites { #[derive(Debug, Default)] pub(crate) struct TxnMetaState { pub(crate) txn_bytes: Vec, + pub(crate) receipt_node_bytes: Vec, pub(crate) gas_used: u64, pub(crate) block_bloom: Bloom, } diff --git a/src/types.rs b/src/types.rs index 577d692db..3bf36063c 100644 --- a/src/types.rs +++ b/src/types.rs @@ -34,7 +34,12 @@ pub(crate) const EMPTY_TRIE_HASH: H256 = H256([ 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, ]); -pub(crate) const EMPTY_ACCOUNT_BYTES_RLPED: [u8; 70] = [248, 68, 128, 128, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112]; +pub(crate) const EMPTY_ACCOUNT_BYTES_RLPED: [u8; 70] = [ + 248, 68, 128, 128, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, + 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, + 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, + 123, 250, 216, 4, 93, 133, 164, 112, +]; /// An `IR` (Intermediate Representation) for a given txn in a block that we can /// use to generate a proof for that txn. From ab951e367ffa2b8e59878113ceda2a5f0b13ff46 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 13 Nov 2023 14:40:07 -0700 Subject: [PATCH 109/208] DNM!! Debug prints and fixes --- src/compact/compact_debug_tools.rs | 28 +++++ src/compact/compact_prestate_processing.rs | 1 + src/compact/compact_to_partial_trie.rs | 2 + src/decoding.rs | 138 +++++++++++++++++++-- src/processed_block_trace.rs | 89 +++++++++---- src/types.rs | 1 + 6 files changed, 224 insertions(+), 35 deletions(-) create mode 100644 src/compact/compact_debug_tools.rs diff --git a/src/compact/compact_debug_tools.rs b/src/compact/compact_debug_tools.rs new file mode 100644 index 000000000..5d3063d71 --- /dev/null +++ b/src/compact/compact_debug_tools.rs @@ -0,0 +1,28 @@ +// Using struct to make printing this nicer easier. +#[derive(Debug)] +pub struct InstructionAndBytesParsedFromBuf(Vec<(Instruction, Vec)>); + +impl From)>> for InstructionAndBytesParsedFromBuf { + fn from(v: Vec<(Instruction, Vec)>) -> Self { + Self(v) + } +} + +// TODO: Move behind a feature flag just used for debugging (but probably not +// `debug`)... +pub fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult> { + let witness_bytes = WitnessBytes::::new(bytes); + let (_, entries) = witness_bytes.process_into_instructions_and_header()?; + + Ok(entries + .intern + .into_iter() + .map(|entry| match entry { + WitnessEntry::Instruction(instr) => instr, + _ => unreachable!( + "Found a non-instruction at a stage when we should only have instructions!" + ), + }) + .collect()) +} + diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 51133fdf0..0868eb3de 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -589,6 +589,7 @@ impl ParserState { ) } [WitnessEntry::Node(node), WitnessEntry::Node(NodeEntry::Hash(c_hash))] => { + println!("CREATING STORAGE TRIE FROM COMPACT!!"); Self::try_create_and_insert_partial_trie_from_node( node, Some((*c_hash).into()), diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 966e7b183..0c671d52f 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -120,6 +120,8 @@ fn process_leaf( } }; + println!("INSERTING KEY {:x}!", full_k); + output.trie.insert(full_k, l_val); Ok(()) diff --git a/src/decoding.rs b/src/decoding.rs index 3379ca91a..f83a87a17 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -9,7 +9,7 @@ use eth_trie_utils::{ partial_trie::{HashedPartialTrie, Node, PartialTrie}, trie_subsets::create_trie_subset, }; -use ethereum_types::{Address, U256}; +use ethereum_types::{Address, H256, U256}; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -94,6 +94,15 @@ impl ProcessedBlockTrace { .into_iter() .enumerate() .map(|(txn_idx, txn_info)| { + let all_storage_roots = curr_block_tries + .state + .items() + .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) + .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) + .collect::>(); + + println!("All storage roots (before): {:#?}", all_storage_roots); + let tries = Self::create_minimal_partial_tries_needed_by_txn( &mut curr_block_tries, &txn_info.nodes_used_by_txn, @@ -102,6 +111,14 @@ impl ProcessedBlockTrace { let addresses = Self::get_known_addresses_if_enabled(); + let account_and_storage_hashes = curr_block_tries + .state + .items() + .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) + .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) + .collect::>(); + println!("{:#?}", account_and_storage_hashes); + let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; let new_bloom = txn_info.meta.block_bloom; @@ -119,6 +136,12 @@ impl ProcessedBlockTrace { receipts_root: curr_block_tries.receipt.hash(), }; + println!("PARTIAL TRIES BEFORE: {:?}", tries); + + println!("TRIE ROOTS AFTER: {:?}", trie_roots_after); + + println!("SIGNED BYTES: {}", hex::encode(&txn_info.meta.txn_bytes)); + let gen_inputs = GenerationInputs { txn_number_before: txn_idx.saturating_sub(1).into(), gas_used_before: tot_gas_used, @@ -140,11 +163,37 @@ impl ProcessedBlockTrace { gen_inputs, }; - println!("IR: {:#?}", txn_proof_gen_ir); + // println!("IR: {:#?}", txn_proof_gen_ir); tot_gas_used = new_tot_gas_used; curr_bloom = new_bloom; + let all_storage_roots = curr_block_tries + .state + .items() + .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) + .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) + .collect::>(); + println!("All storage roots: {:#?}", all_storage_roots); + + println!( + "All state nodes: {:#?}", + curr_block_tries + .state + .keys() + .map(|k| format!("{:x}, {:x}", k, hash(&k.bytes_be()))) + .collect::>() + ); + + for (addr, trie) in curr_block_tries.storage.iter() { + println!("Storage slots for hashed addr {:x}:", addr); + + let slots = trie.keys().map(|s| format!("{:x}", s)).collect::>(); + println!("----------"); + println!("{:#?}", slots); + println!("----------"); + } + Ok(txn_proof_gen_ir) }) .collect::>>()?; @@ -176,12 +225,25 @@ impl ProcessedBlockTrace { TrieType::Receipt, )?; + let x = nodes_used_by_txn + .storage_accesses + .iter() + .map(|(k, v)| (H256::from_slice(&k.bytes_be()), v.clone())) + .collect::>(); + let storage_tries = Self::create_minimal_storage_partial_tries( &mut curr_block_tries.storage, &nodes_used_by_txn.state_accounts_with_no_accesses_but_storage_tries, - nodes_used_by_txn.storage_accesses.iter(), + x.iter(), )?; + println!( + "{:#?}", + storage_tries + .iter() + .map(|(a, t)| format!("hashed account addr: {:x}: {}", a, t.keys().count())) + .collect::>() + ); Ok(TrieInputs { state_trie, transactions_trie, @@ -226,10 +288,15 @@ impl ProcessedBlockTrace { Item = &'a (HashedAccountAddr, Vec), >, ) -> TraceParsingResult> { + println!( + "BASE TRIES KEYS: {:#?}", + storage_tries.keys().collect::>() + ); + accesses_per_account .map(|(h_addr, mem_accesses)| { // TODO: Clean up... - let base_storage_trie = match storage_tries.get(h_addr) { + let base_storage_trie = match storage_tries.get(&H256(h_addr.0)) { Some(s_trie) => s_trie, None => { let trie = state_accounts_with_no_accesses_but_storage_tries @@ -290,21 +357,61 @@ impl ProcessedBlockTrace { meta: &TxnMetaState, txn_idx: TxnIdx, ) -> TraceParsingResult<()> { + println!("Applying deltas!"); + for (hashed_acc_addr, storage_writes) in deltas.storage_writes { - let storage_trie = trie_state.storage.get_mut(&hashed_acc_addr).ok_or( - TraceParsingError::MissingAccountStorageTrie(hashed_acc_addr), - )?; - storage_trie.extend(storage_writes); + let storage_trie = trie_state + .storage + .get_mut(&H256::from_slice(&hashed_acc_addr.bytes_be())) + .ok_or( + TraceParsingError::MissingAccountStorageTrie(H256::zero()), // TODO!!! FIX + )?; + + println!("Applying storage writes of {:?}", storage_writes); + + println!( + "All storage slots before write apply: {:#?}", + storage_trie + .keys() + .map(|k| format!("{:x}", k)) + .collect::>() + ); + + for (addr, write) in storage_writes.iter() { + if storage_trie.get(*addr).is_none() { + println!( + "STORAGE SLOT CREATED! (h_account: {:x}) {:x} --> {}", + hashed_acc_addr, + addr, + hex::encode(write) + ); + } + } + + // // TODO: Move hash of slot addr to block trace processing... + // storage_trie.extend(storage_writes.into_iter().map(|(slot, v)| + // (Nibbles::from_h256_be(hash(&slot.bytes_be())), v))); + + storage_trie.extend( + storage_writes + .into_iter() + .map(|(k, v)| (Nibbles::from_h256_be(hash(&k.bytes_be())), v)), + ); } for (hashed_acc_addr, s_trie_writes) in deltas.state_writes { let val_k = Nibbles::from_h256_be(hashed_acc_addr); // If the account was created, then it will not exist in the trie. - let val_bytes = trie_state - .state - .get(val_k) - .unwrap_or(&EMPTY_ACCOUNT_BYTES_RLPED); + let val_bytes = trie_state.state.get(val_k).unwrap_or_else(|| { + println!("ACCOUNT CREATED DURING DELTA APPLY! {}", hashed_acc_addr); + &EMPTY_ACCOUNT_BYTES_RLPED + }); + + println!( + "Empty RLP account: {:?}", + rlp::decode::(&EMPTY_ACCOUNT_BYTES_RLPED).unwrap() + ); let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) @@ -366,16 +473,23 @@ impl StateTrieWrites { h_addr: &HashedAccountAddr, acc_storage_tries: &HashMap, ) -> TraceParsingResult<()> { + println!("Applying writes!"); + let storage_root_hash_change = match self.storage_trie_change { false => None, true => { let storage_trie = acc_storage_tries .get(h_addr) .ok_or(TraceParsingError::MissingAccountStorageTrie(*h_addr))?; + Some(storage_trie.hash()) } }; + if let Some(new_t) = storage_root_hash_change { + println!("NEW STORAGE ROOT BEING APPLIED: {:x}", new_t); + } + update_val_if_some(&mut state_node.balance, self.balance); update_val_if_some(&mut state_node.nonce, self.nonce); update_val_if_some(&mut state_node.storage_root, storage_root_hash_change); diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index fefb778d9..8e83333f5 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -1,5 +1,6 @@ use std::collections::{HashMap, HashSet}; use std::fmt::Debug; +use std::str::FromStr; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; @@ -15,7 +16,7 @@ use crate::trace_protocol::{ }; use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, - HashedStorageAddrNibbles, OtherBlockData, StorageAddr, StorageVal, TrieRootHash, TxnProofGenIR, + HashedStorageAddrNibbles, OtherBlockData, StorageAddr, TrieRootHash, TxnProofGenIR, EMPTY_TRIE_HASH, }; use crate::utils::{ @@ -53,6 +54,14 @@ impl BlockTrace { print_value_and_hash_nodes_of_trie(&pre_image_data.tries.state); + println!("SPECIAL QUERY"); + let res = pre_image_data.tries.state.get( + Nibbles::from_str("F36D6FADC19B5EC9189AE65683241081F7C772EC596EA1FACB9DAEF2A1396637") + .unwrap(), + ); + + println!("SPECIAL QUERY RES: {:?}", res); + for (h_addr, s_trie) in pre_image_data.tries.storage.iter() { print_value_and_hash_nodes_of_storage_trie(h_addr, s_trie); } @@ -212,20 +221,20 @@ impl TxnInfo { let storage_writes = trace.storage_written.unwrap_or_default(); - let storage_read_keys = trace.storage_read.into_iter().flat_map(|reads| { - reads - .into_iter() - .map(|addr| storage_addr_to_nibbles_even_nibble_fixed_hashed(&addr)) - }); + let storage_read_keys = trace + .storage_read + .into_iter() + .flat_map(|reads| reads.into_iter()); - let storage_write_keys = storage_writes - .keys() - .map(storage_addr_to_nibbles_even_nibble_fixed_hashed); - let storage_access_keys = storage_read_keys.chain(storage_write_keys); + let storage_write_keys = storage_writes.keys(); + let storage_access_keys = storage_read_keys.chain(storage_write_keys.copied()); - nodes_used_by_txn - .storage_accesses - .push((hashed_addr, storage_access_keys.collect())); + nodes_used_by_txn.storage_accesses.push(( + Nibbles::from_h256_be(hashed_addr), + storage_access_keys + .map(|k| storage_addr_to_nibbles_even_nibble_fixed_hashed(&k)) + .collect(), + )); let storage_trie_change = !storage_writes.is_empty(); let code_change = trace.code_usage.is_some(); @@ -249,12 +258,12 @@ impl TxnInfo { let storage_writes_vec = storage_writes .into_iter() - .map(|(k, v)| (storage_addr_to_nibbles_even_nibble_fixed_hashed(&k), v)) + .map(|(k, v)| (Nibbles::from_h256_be(k), rlp::encode(&v).to_vec())) .collect(); nodes_used_by_txn .storage_writes - .push((hashed_addr, storage_writes_vec)); + .push((Nibbles::from_h256_be(hashed_addr), storage_writes_vec)); nodes_used_by_txn.state_accesses.push(hashed_addr); @@ -273,6 +282,11 @@ impl TxnInfo { } } + // println!( + // "Storage accesses for {:x} (hashed: {:x}): {:#?}", + // addr, hashed_addr, nodes_used_by_txn + // ); + let accounts_with_storage_accesses: HashSet<_> = HashSet::from_iter( nodes_used_by_txn .storage_accesses @@ -280,13 +294,19 @@ impl TxnInfo { .filter(|(_, slots)| !slots.is_empty()) .map(|(addr, _)| *addr), ); + println!( + "Account with storage accesses: {:#?}", + accounts_with_storage_accesses + ); let all_accounts_with_non_empty_storage = all_accounts_in_pre_image .iter() .filter(|(_, data)| data.storage_root != EMPTY_TRIE_HASH); let accounts_with_storage_but_no_storage_accesses = all_accounts_with_non_empty_storage - .filter(|&(addr, _data)| (!accounts_with_storage_accesses.contains(addr))) + .filter(|&(addr, _data)| { + !accounts_with_storage_accesses.contains(&Nibbles::from_h256_be(*addr)) + }) .map(|(addr, data)| (*addr, data.storage_root)); nodes_used_by_txn @@ -334,11 +354,8 @@ pub(crate) struct NodesUsedByTxn { pub(crate) state_writes: Vec<(HashedAccountAddr, StateTrieWrites)>, // Note: All entries in `storage_writes` also appear in `storage_accesses`. - pub(crate) storage_accesses: Vec<(HashedAccountAddr, Vec)>, - pub(crate) storage_writes: Vec<( - HashedAccountAddr, - Vec<(HashedStorageAddrNibbles, StorageVal)>, - )>, + pub(crate) storage_accesses: Vec<(Nibbles, Vec)>, + pub(crate) storage_writes: Vec<(Nibbles, Vec<(HashedStorageAddrNibbles, Vec)>)>, pub(crate) state_accounts_with_no_accesses_but_storage_tries: HashMap, } @@ -364,6 +381,32 @@ fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbl // I think this is all we need to do? Yell at me if this breaks things. // H256's are never going to be truncated I think. - let hashed_addr = hash(addr.as_bytes()); - Nibbles::from_h256_be(hashed_addr) + // // TODO: Disgusting hack! Remove if this works... + // let s = hex::encode(addr.as_bytes()); + + // let mut n = Nibbles::from_str(&s).unwrap(); + // let odd_count = (n.count & 1) == 1; + + // if odd_count { + // n.push_nibble_front(0); + // } + + // n + + // let hashed_addr = hash(addr.as_bytes()); + // Nibbles::from_h256_be(hashed_addr) + + Nibbles::from_h256_be(hash(&addr.0)) +} + +// TODO: Extreme hack! Please don't keep... +fn string_to_nibbles_even_nibble_fixed(s: &str) -> Nibbles { + let mut n = Nibbles::from_str(s).unwrap(); + let odd_count = (n.count & 1) == 1; + + if odd_count { + n.push_nibble_front(0); + } + + n } diff --git a/src/types.rs b/src/types.rs index 3bf36063c..faa1a976b 100644 --- a/src/types.rs +++ b/src/types.rs @@ -16,6 +16,7 @@ pub type HashedNodeAddr = H256; pub type HashedStorageAddr = H256; pub type HashedStorageAddrNibbles = Nibbles; pub type StorageAddr = H256; +pub type StorageAddrNibbles = H256; pub type StorageVal = U256; pub type TrieRootHash = H256; pub type TxnIdx = usize; From 76d99da587f9613afefa97d27346f588f3d7f1e5 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 13 Nov 2023 16:42:54 -0700 Subject: [PATCH 110/208] DNM! More prints and fixes --- Cargo.toml | 1 + src/processed_block_trace.rs | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 49acde180..32748da5f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ version = "0.1.0" edition = "2021" [dependencies] +bytes = "1.5.0" ciborium = "0.2.1" ciborium-io = "0.2.1" enum-as-inner = "0.6.0" diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 8e83333f5..6a54b844d 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -313,9 +313,13 @@ impl TxnInfo { .state_accounts_with_no_accesses_but_storage_tries .extend(accounts_with_storage_but_no_storage_accesses); + // TODO: Make more efficient... + let mut receipt_node_bytes = rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(); + receipt_node_bytes.insert(0, 2); + let new_meta_state = TxnMetaState { txn_bytes: self.meta.byte_code, - receipt_node_bytes: rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(), + receipt_node_bytes, gas_used: self.meta.gas_used, block_bloom, }; @@ -396,7 +400,7 @@ fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbl // let hashed_addr = hash(addr.as_bytes()); // Nibbles::from_h256_be(hashed_addr) - Nibbles::from_h256_be(hash(&addr.0)) + Nibbles::from_h256_be(hash(&addr.0)) // TODO CHeck endianness... } // TODO: Extreme hack! Please don't keep... From 76e0eaa03defae2412944ecdbcce6f9f2f26aa06 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 14 Nov 2023 13:30:21 -0700 Subject: [PATCH 111/208] Fixed dummy txns not producing the right trie hashes --- src/decoding.rs | 51 ++++++++++++++++++++++++++++++++++++------------- 1 file changed, 38 insertions(+), 13 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index f83a87a17..f0b1e0eb0 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,7 +1,7 @@ use std::{ collections::{HashMap, HashSet}, fmt::{self, Display, Formatter}, - iter::{empty, once}, + iter::once, }; use eth_trie_utils::{ @@ -198,7 +198,11 @@ impl ProcessedBlockTrace { }) .collect::>>()?; - Self::pad_gen_inputs_with_dummy_inputs_if_needed(&mut txn_gen_inputs, &other_data.b_data); + Self::pad_gen_inputs_with_dummy_inputs_if_needed( + &mut txn_gen_inputs, + &other_data.b_data, + &curr_block_tries, + ); Ok(txn_gen_inputs) } @@ -440,7 +444,10 @@ impl ProcessedBlockTrace { fn pad_gen_inputs_with_dummy_inputs_if_needed( gen_inputs: &mut Vec, b_data: &BlockLevelData, + final_trie_state: &PartialTrieState, ) { + println!("Padding len: {}", gen_inputs.len()); + match gen_inputs.len() { 0 => { // Need to pad with two dummy txns. @@ -451,6 +458,7 @@ impl ProcessedBlockTrace { // block. gen_inputs.push(create_dummy_txn_gen_input_single_dummy_txn( &gen_inputs[0].gen_inputs, + final_trie_state, b_data, )) } @@ -509,11 +517,11 @@ fn calculate_trie_input_hashes(t_inputs: &TrieInputs) -> TrieRoots { fn create_dummy_txn_gen_input_single_dummy_txn( prev_real_gen_input: &GenerationInputs, + final_trie_state: &PartialTrieState, b_data: &BlockLevelData, ) -> TxnProofGenIR { - let partial_sub_storage_tries: Vec<_> = prev_real_gen_input - .tries - .storage_tries + let partial_sub_storage_tries: Vec<_> = final_trie_state + .storage .iter() .map(|(hashed_acc_addr, s_trie)| { ( @@ -524,16 +532,33 @@ fn create_dummy_txn_gen_input_single_dummy_txn( .collect(); let tries = TrieInputs { - state_trie: create_fully_hashed_out_sub_partial_trie(&prev_real_gen_input.tries.state_trie), - transactions_trie: create_fully_hashed_out_sub_partial_trie( - &prev_real_gen_input.tries.transactions_trie, - ), - receipts_trie: create_fully_hashed_out_sub_partial_trie( - &prev_real_gen_input.tries.receipts_trie, - ), + state_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.state), + transactions_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.txn), + receipts_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.receipt), storage_tries: partial_sub_storage_tries, }; + println!( + "Orig trie hash: {:x}", + prev_real_gen_input.tries.state_trie.hash() + ); + println!("State sub trie: {:#?}", tries.state_trie); + + assert_eq!( + tries.state_trie.hash(), + prev_real_gen_input.trie_roots_after.state_root + ); + println!( + "{} == {}", + tries.state_trie.hash(), + prev_real_gen_input.trie_roots_after.state_root + ); + + println!( + "Fully hashed out dummy state trie: {:x}", + tries.state_trie.hash() + ); + let gen_inputs = GenerationInputs { txn_number_before: 0.into(), gas_used_before: prev_real_gen_input.gas_used_after, @@ -557,7 +582,7 @@ fn create_dummy_txn_gen_input_single_dummy_txn( // way to do it. fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedPartialTrie { // Impossible to actually fail with an empty iter. - create_trie_subset(trie, empty::()).unwrap() + create_trie_subset(trie, once(0_u64)).unwrap() } fn create_dummy_txn_pair_for_empty_block(b_data: &BlockLevelData) -> [TxnProofGenIR; 2] { From 44310f49f849d19282fcb30360d74ef14f52cce4 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 14 Nov 2023 18:12:26 -0700 Subject: [PATCH 112/208] Now adds a mapping for the empty code hash --- src/processed_block_trace.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 6a54b844d..7f395372d 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -213,7 +213,8 @@ impl TxnInfo { code_hash_resolve_f: &F, ) -> ProcessedTxnInfo { let mut nodes_used_by_txn = NodesUsedByTxn::default(); - let mut contract_code_accessed = HashMap::new(); + let mut contract_code_accessed = create_empty_code_access_map(); + let block_bloom = self.block_bloom(); for (addr, trace) in self.traces { @@ -351,6 +352,10 @@ impl TxnInfo { } } +fn create_empty_code_access_map() -> HashMap> { + HashMap::from_iter(once((EMPTY_CODE_HASH, Vec::new()))) +} + /// Note that "*_accesses" includes writes. #[derive(Debug, Default)] pub(crate) struct NodesUsedByTxn { From b95fbc6fe7371a5a8ba17799991d8c2e37d80b5c Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 15 Nov 2023 09:39:54 -0700 Subject: [PATCH 113/208] More debug prints (remove later) --- src/compact/compact_to_partial_trie.rs | 10 ++++++++++ src/decoding.rs | 7 +++++++ src/processed_block_trace.rs | 3 ++- src/trace_protocol.rs | 5 ++++- 4 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index 0c671d52f..bb6844853 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -5,6 +5,7 @@ use eth_trie_utils::{ partial_trie::{HashedPartialTrie, PartialTrie}, }; use ethereum_types::H256; +use log::trace; use plonky2_evm::generation::mpt::AccountRlp; use super::compact_prestate_processing::{ @@ -51,6 +52,8 @@ pub(super) fn create_partial_trie_from_compact_node_rec( curr_node: &NodeEntry, output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { + trace!("Processing node {} into `PartialTrie` node...", curr_node); + match curr_node { NodeEntry::Branch(n) => process_branch(curr_key, n, output), NodeEntry::Code(c_bytes) => process_code(c_bytes.clone(), output), @@ -82,6 +85,7 @@ fn process_code( c_bytes: Vec, output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { + println!("PROCESSING CODE NODE!!"); let c_hash = hash(&c_bytes); output.code.insert(c_hash, c_bytes); @@ -146,6 +150,12 @@ fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( let code_hash = match &acc_data.account_node_code { Some(AccountNodeCode::CodeNode(c_bytes)) => { let c_hash = hash(c_bytes); + println!( + "Adding code hash mapping ({:x} --> {})", + c_hash, + hex::encode(c_bytes) + ); + output.code.insert(c_hash, c_bytes.clone()); c_hash diff --git a/src/decoding.rs b/src/decoding.rs index f0b1e0eb0..1c85e7529 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -158,6 +158,8 @@ impl ProcessedBlockTrace { addresses, }; + println!("Code mapping: {:?}", gen_inputs.contract_code); + let txn_proof_gen_ir = TxnProofGenIR { txn_idx, gen_inputs, @@ -420,12 +422,17 @@ impl ProcessedBlockTrace { let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) })?; + + println!("Account for (before) {:x}: {:#?}", hashed_acc_addr, account); + s_trie_writes.apply_writes_to_state_node( &mut account, &hashed_acc_addr, &trie_state.storage, )?; + println!("Account for {:x}: {:#?}", hashed_acc_addr, account); + let updated_account_bytes = rlp::encode(&account); trie_state .state diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 7f395372d..45364e662 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -1,5 +1,6 @@ use std::collections::{HashMap, HashSet}; use std::fmt::Debug; +use std::iter::once; use std::str::FromStr; use eth_trie_utils::nibbles::Nibbles; @@ -17,7 +18,7 @@ use crate::trace_protocol::{ use crate::types::{ Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, StorageAddr, TrieRootHash, TxnProofGenIR, - EMPTY_TRIE_HASH, + EMPTY_CODE_HASH, EMPTY_TRIE_HASH, }; use crate::utils::{ hash, print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie, diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 6599c09f4..2cf38d96c 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -195,7 +195,10 @@ impl ContractCodeUsage { pub(crate) fn get_code_hash(&self) -> CodeHash { match self { ContractCodeUsage::Read(hash) => *hash, - ContractCodeUsage::Write(bytes) => hash(bytes), + ContractCodeUsage::Write(bytes) => { + println!("Hashing code {} ...", hex::encode(&bytes.0)); + hash(bytes) + } } } } From 46353b3b470f1ac21363d870c0f6bb239d191e39 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 15 Nov 2023 10:18:56 -0700 Subject: [PATCH 114/208] Updated to the latest plonky2 --- Cargo.toml | 2 +- src/decoding.rs | 23 +++++++++++++++++------ src/processed_block_trace.rs | 9 +++++++-- 3 files changed, 25 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 32748da5f..86b3816c8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,7 +15,7 @@ ethereum-types = "0.14.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "49976ea2a98dcb6052bd6cf3a65f730e55727330" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "0e63e66196fba06aa496a5249a0c9d997a549829" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" diff --git a/src/decoding.rs b/src/decoding.rs index 1c85e7529..faa2cb187 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -140,15 +140,15 @@ impl ProcessedBlockTrace { println!("TRIE ROOTS AFTER: {:?}", trie_roots_after); - println!("SIGNED BYTES: {}", hex::encode(&txn_info.meta.txn_bytes)); - let gen_inputs = GenerationInputs { txn_number_before: txn_idx.saturating_sub(1).into(), gas_used_before: tot_gas_used, block_bloom_before: curr_bloom, gas_used_after: new_tot_gas_used, block_bloom_after: new_bloom, - signed_txns: vec![txn_info.meta.txn_bytes], + signed_txn: txn_info.meta.txn_bytes, + withdrawals: Vec::new(), /* TODO: Once this is added to the trace spec, add + * it here... */ tries, trie_roots_after, genesis_state_trie_root: other_data.genesis_state_trie_root, @@ -440,7 +440,7 @@ impl ProcessedBlockTrace { } let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); - trie_state.txn.insert(txn_k, meta.txn_bytes.clone()); + trie_state.txn.insert(txn_k, meta.txn_bytes()); trie_state .receipt .insert(txn_k, meta.receipt_node_bytes.clone()); @@ -572,7 +572,8 @@ fn create_dummy_txn_gen_input_single_dummy_txn( block_bloom_before: prev_real_gen_input.block_bloom_after, gas_used_after: prev_real_gen_input.gas_used_after, block_bloom_after: prev_real_gen_input.block_bloom_after, - signed_txns: Vec::default(), + signed_txn: None, + withdrawals: Vec::default(), tries, trie_roots_after: prev_real_gen_input.trie_roots_after.clone(), genesis_state_trie_root: prev_real_gen_input.genesis_state_trie_root, @@ -606,7 +607,8 @@ fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> TxnProofG block_bloom_before: Bloom::default(), gas_used_after: 0.into(), block_bloom_after: Bloom::default(), - signed_txns: Vec::default(), + signed_txn: None, + withdrawals: Vec::default(), tries: create_empty_trie_inputs(), trie_roots_after: create_trie_roots_for_empty_tries(), genesis_state_trie_root: TrieRootHash::default(), @@ -619,6 +621,15 @@ fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> TxnProofG gen_inputs_to_ir(gen_inputs, txn_idx) } +impl TxnMetaState { + fn txn_bytes(&self) -> Vec { + match self.txn_bytes.as_ref() { + Some(v) => v.clone(), + None => Vec::default(), + } + } +} + fn gen_inputs_to_ir(gen_inputs: GenerationInputs, txn_idx: TxnIdx) -> TxnProofGenIR { TxnProofGenIR { txn_idx, diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 45364e662..a12c86c7d 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -319,8 +319,13 @@ impl TxnInfo { let mut receipt_node_bytes = rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(); receipt_node_bytes.insert(0, 2); + let txn_bytes = match self.meta.byte_code.is_empty() { + false => Some(self.meta.byte_code), + true => None, + }; + let new_meta_state = TxnMetaState { - txn_bytes: self.meta.byte_code, + txn_bytes, receipt_node_bytes, gas_used: self.meta.gas_used, block_bloom, @@ -380,7 +385,7 @@ pub(crate) struct StateTrieWrites { #[derive(Debug, Default)] pub(crate) struct TxnMetaState { - pub(crate) txn_bytes: Vec, + pub(crate) txn_bytes: Option>, pub(crate) receipt_node_bytes: Vec, pub(crate) gas_used: u64, pub(crate) block_bloom: Bloom, From 96c27e8030a74ca20bf964ad685b1a0adcc069a8 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 15 Nov 2023 11:29:55 -0700 Subject: [PATCH 115/208] Fixed dummy txns - If we need to add exactly one dummy txn, we now add the dummy txn to the first slot instead of the second. --- src/decoding.rs | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index faa2cb187..da4fde097 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -458,16 +458,10 @@ impl ProcessedBlockTrace { match gen_inputs.len() { 0 => { // Need to pad with two dummy txns. - gen_inputs.extend(create_dummy_txn_pair_for_empty_block(b_data)) + gen_inputs.extend(create_dummy_txn_pair_for_empty_block(b_data)); } 1 => { - // Only need one dummy txn, but it needs info from the one real txn in the - // block. - gen_inputs.push(create_dummy_txn_gen_input_single_dummy_txn( - &gen_inputs[0].gen_inputs, - final_trie_state, - b_data, - )) + gen_inputs.insert(0, create_dummy_gen_input(b_data, 0)); } _ => (), } From 6ca88359ff11a8333a4e13102daabccca27f9023 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 15 Nov 2023 12:10:43 -0700 Subject: [PATCH 116/208] Fixed dummy txns again --- src/decoding.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index da4fde097..63c8c30a5 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -461,7 +461,12 @@ impl ProcessedBlockTrace { gen_inputs.extend(create_dummy_txn_pair_for_empty_block(b_data)); } 1 => { - gen_inputs.insert(0, create_dummy_gen_input(b_data, 0)); + let dummy_txn = create_dummy_txn_gen_input_single_dummy_txn( + &gen_inputs[0].gen_inputs, + final_trie_state, + b_data, + ); + gen_inputs.insert(0, dummy_txn); } _ => (), } @@ -534,8 +539,8 @@ fn create_dummy_txn_gen_input_single_dummy_txn( let tries = TrieInputs { state_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.state), - transactions_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.txn), - receipts_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.receipt), + transactions_trie: HashedPartialTrie::default(), + receipts_trie: HashedPartialTrie::default(), storage_tries: partial_sub_storage_tries, }; @@ -577,7 +582,7 @@ fn create_dummy_txn_gen_input_single_dummy_txn( addresses: Vec::default(), }; - gen_inputs_to_ir(gen_inputs, 1) + gen_inputs_to_ir(gen_inputs, 0) } // We really want to get a trie with just a hash node here, and this is an easy From 25b69bf93df51824887bd182094a53da224f7959 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 15 Nov 2023 13:19:00 -0700 Subject: [PATCH 117/208] Really really fixed dummy txns --- src/decoding.rs | 29 +++++++++++------------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 63c8c30a5..2c029cbaa 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -464,7 +464,6 @@ impl ProcessedBlockTrace { let dummy_txn = create_dummy_txn_gen_input_single_dummy_txn( &gen_inputs[0].gen_inputs, final_trie_state, - b_data, ); gen_inputs.insert(0, dummy_txn); } @@ -522,9 +521,8 @@ fn calculate_trie_input_hashes(t_inputs: &TrieInputs) -> TrieRoots { } fn create_dummy_txn_gen_input_single_dummy_txn( - prev_real_gen_input: &GenerationInputs, + next_real_gen_input: &GenerationInputs, final_trie_state: &PartialTrieState, - b_data: &BlockLevelData, ) -> TxnProofGenIR { let partial_sub_storage_tries: Vec<_> = final_trie_state .storage @@ -546,18 +544,18 @@ fn create_dummy_txn_gen_input_single_dummy_txn( println!( "Orig trie hash: {:x}", - prev_real_gen_input.tries.state_trie.hash() + next_real_gen_input.tries.state_trie.hash() ); println!("State sub trie: {:#?}", tries.state_trie); assert_eq!( tries.state_trie.hash(), - prev_real_gen_input.trie_roots_after.state_root + next_real_gen_input.trie_roots_after.state_root ); println!( "{} == {}", tries.state_trie.hash(), - prev_real_gen_input.trie_roots_after.state_root + next_real_gen_input.trie_roots_after.state_root ); println!( @@ -567,19 +565,14 @@ fn create_dummy_txn_gen_input_single_dummy_txn( let gen_inputs = GenerationInputs { txn_number_before: 0.into(), - gas_used_before: prev_real_gen_input.gas_used_after, - block_bloom_before: prev_real_gen_input.block_bloom_after, - gas_used_after: prev_real_gen_input.gas_used_after, - block_bloom_after: prev_real_gen_input.block_bloom_after, + gas_used_before: 0.into(), + gas_used_after: 0.into(), + block_bloom_before: [0.into(); 8], + block_bloom_after: [0.into(); 8], signed_txn: None, - withdrawals: Vec::default(), - tries, - trie_roots_after: prev_real_gen_input.trie_roots_after.clone(), - genesis_state_trie_root: prev_real_gen_input.genesis_state_trie_root, - contract_code: HashMap::default(), - block_metadata: b_data.b_meta.clone(), - block_hashes: b_data.b_hashes.clone(), - addresses: Vec::default(), + withdrawals: vec![], + trie_roots_after: next_real_gen_input.trie_roots_after.clone(), + ..(next_real_gen_input.clone()) }; gen_inputs_to_ir(gen_inputs, 0) From b9b36e3153b09b0688f6242173dff427bb176c7c Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 15 Nov 2023 13:55:40 -0700 Subject: [PATCH 118/208] Fixed dummy txns post roots --- src/decoding.rs | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 2c029cbaa..cbcbb21ba 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -563,6 +563,12 @@ fn create_dummy_txn_gen_input_single_dummy_txn( tries.state_trie.hash() ); + let trie_roots_after = TrieRoots { + state_root: next_real_gen_input.tries.state_trie.hash(), + transactions_root: EMPTY_TRIE_HASH, + receipts_root: EMPTY_TRIE_HASH, + }; + let gen_inputs = GenerationInputs { txn_number_before: 0.into(), gas_used_before: 0.into(), @@ -571,7 +577,7 @@ fn create_dummy_txn_gen_input_single_dummy_txn( block_bloom_after: [0.into(); 8], signed_txn: None, withdrawals: vec![], - trie_roots_after: next_real_gen_input.trie_roots_after.clone(), + trie_roots_after, ..(next_real_gen_input.clone()) }; @@ -593,6 +599,12 @@ fn create_dummy_txn_pair_for_empty_block(b_data: &BlockLevelData) -> [TxnProofGe } fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> TxnProofGenIR { + let trie_roots_after = TrieRoots { + state_root: b_data.b_hashes.prev_hashes[255], + transactions_root: EMPTY_TRIE_HASH, + receipts_root: EMPTY_TRIE_HASH, + }; + let gen_inputs = GenerationInputs { txn_number_before: txn_idx.saturating_sub(1).into(), gas_used_before: 0.into(), @@ -602,7 +614,7 @@ fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> TxnProofG signed_txn: None, withdrawals: Vec::default(), tries: create_empty_trie_inputs(), - trie_roots_after: create_trie_roots_for_empty_tries(), + trie_roots_after, genesis_state_trie_root: TrieRootHash::default(), contract_code: HashMap::default(), block_metadata: b_data.b_meta.clone(), @@ -637,11 +649,3 @@ fn create_empty_trie_inputs() -> TrieInputs { storage_tries: Vec::default(), } } - -const fn create_trie_roots_for_empty_tries() -> TrieRoots { - TrieRoots { - state_root: EMPTY_TRIE_HASH, - transactions_root: EMPTY_TRIE_HASH, - receipts_root: EMPTY_TRIE_HASH, - } -} From 51f8ac91d21990a2f8243a72a3716ce6f4317910 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Wed, 15 Nov 2023 16:31:26 -0500 Subject: [PATCH 119/208] Fix empty block with dummy pair --- src/decoding.rs | 52 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index cbcbb21ba..5b74d4539 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -458,7 +458,10 @@ impl ProcessedBlockTrace { match gen_inputs.len() { 0 => { // Need to pad with two dummy txns. - gen_inputs.extend(create_dummy_txn_pair_for_empty_block(b_data)); + gen_inputs.extend(create_dummy_txn_pair_for_empty_block( + b_data, + final_trie_state, + )); } 1 => { let dummy_txn = create_dummy_txn_gen_input_single_dummy_txn( @@ -591,35 +594,37 @@ fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedP create_trie_subset(trie, once(0_u64)).unwrap() } -fn create_dummy_txn_pair_for_empty_block(b_data: &BlockLevelData) -> [TxnProofGenIR; 2] { +fn create_dummy_txn_pair_for_empty_block( + b_data: &BlockLevelData, + final_trie_state: &PartialTrieState, +) -> [TxnProofGenIR; 2] { [ - create_dummy_gen_input(b_data, 0), - create_dummy_gen_input(b_data, 1), + create_dummy_gen_input(b_data, final_trie_state, 0), + create_dummy_gen_input(b_data, final_trie_state, 1), ] } -fn create_dummy_gen_input(b_data: &BlockLevelData, txn_idx: TxnIdx) -> TxnProofGenIR { +fn create_dummy_gen_input( + b_data: &BlockLevelData, + final_trie_state: &PartialTrieState, + txn_idx: TxnIdx, +) -> TxnProofGenIR { + let tries = create_dummy_proof_trie_inputs(final_trie_state); + let trie_roots_after = TrieRoots { - state_root: b_data.b_hashes.prev_hashes[255], + state_root: tries.state_trie.hash(), transactions_root: EMPTY_TRIE_HASH, receipts_root: EMPTY_TRIE_HASH, }; let gen_inputs = GenerationInputs { - txn_number_before: txn_idx.saturating_sub(1).into(), - gas_used_before: 0.into(), - block_bloom_before: Bloom::default(), - gas_used_after: 0.into(), - block_bloom_after: Bloom::default(), signed_txn: None, - withdrawals: Vec::default(), - tries: create_empty_trie_inputs(), + tries, trie_roots_after, genesis_state_trie_root: TrieRootHash::default(), - contract_code: HashMap::default(), block_metadata: b_data.b_meta.clone(), block_hashes: b_data.b_hashes.clone(), - addresses: Vec::default(), + ..GenerationInputs::default() }; gen_inputs_to_ir(gen_inputs, txn_idx) @@ -641,11 +646,22 @@ fn gen_inputs_to_ir(gen_inputs: GenerationInputs, txn_idx: TxnIdx) -> TxnProofGe } } -fn create_empty_trie_inputs() -> TrieInputs { +fn create_dummy_proof_trie_inputs(final_trie_state: &PartialTrieState) -> TrieInputs { + let partial_sub_storage_tries: Vec<_> = final_trie_state + .storage + .iter() + .map(|(hashed_acc_addr, s_trie)| { + ( + *hashed_acc_addr, + create_fully_hashed_out_sub_partial_trie(s_trie), + ) + }) + .collect(); + TrieInputs { - state_trie: HashedPartialTrie::default(), + state_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.state), transactions_trie: HashedPartialTrie::default(), receipts_trie: HashedPartialTrie::default(), - storage_tries: Vec::default(), + storage_tries: partial_sub_storage_tries, } } From 40aa848085461e4c0d3f3e624d0f24c033f6cf98 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Wed, 15 Nov 2023 16:42:08 -0500 Subject: [PATCH 120/208] Reduce overhead in preprocessing --- plonky_block_proof_gen/src/prover_state.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 5b1c02423..15305fd5e 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -27,12 +27,12 @@ pub struct ProverStateBuilder { impl Default for ProverStateBuilder { fn default() -> Self { Self { - arithmetic_circuit_size: 16..22, - byte_packing_circuit_size: 10..22, + arithmetic_circuit_size: 16..17, + byte_packing_circuit_size: 10..20, cpu_circuit_size: 15..22, - keccak_circuit_size: 14..22, - keccak_sponge_circuit_size: 9..22, - logic_circuit_size: 12..22, + keccak_circuit_size: 14..17, + keccak_sponge_circuit_size: 9..16, + logic_circuit_size: 12..16, memory_circuit_size: 18..22, } } From c7b9e8914e4bc2dbfefc2110a2b0be6fd38587c8 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 17 Nov 2023 09:23:31 -0700 Subject: [PATCH 121/208] Fixed & traces --- src/decoding.rs | 176 +++++++++++++++++++++-------------- src/processed_block_trace.rs | 8 -- 2 files changed, 107 insertions(+), 77 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 5b74d4539..226742576 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,7 +1,8 @@ use std::{ collections::{HashMap, HashSet}, fmt::{self, Display, Formatter}, - iter::once, + iter::{empty, once}, + str::FromStr, }; use eth_trie_utils::{ @@ -101,8 +102,31 @@ impl ProcessedBlockTrace { .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) .collect::>(); + let all_state_nodes = curr_block_tries + .state + .items() + .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) + .map(|(a, v)| (a, rlp::decode::(&v).unwrap())) + .map(|(a, d)| format!("{:x} --> {:#?}", a, d)) + .collect::>(); + + println!("All state nodes: {:#?}", all_state_nodes); + println!("All storage roots (before): {:#?}", all_storage_roots); + println!("Full storage trie (before):"); + for (addr, trie) in curr_block_tries.storage.iter() { + println!("ALL (before) Storage slots for hashed addr {:x}:", addr); + + let slots = trie + .items() + .map(|(k, v)| format!("{:x}: {:?}", k, v)) + .collect::>(); + println!("----------"); + println!("{:#?}", slots); + println!("----------\n"); + } + let tries = Self::create_minimal_partial_tries_needed_by_txn( &mut curr_block_tries, &txn_info.nodes_used_by_txn, @@ -129,6 +153,8 @@ impl ProcessedBlockTrace { txn_idx, )?; + // hacky_rpc_call_to_update_new_coinbase_balance(&mut curr_block_tries.state); + // TODO: Clean up if this works... let trie_roots_after = TrieRoots { state_root: curr_block_tries.state.hash(), @@ -213,19 +239,27 @@ impl ProcessedBlockTrace { nodes_used_by_txn: &NodesUsedByTxn, txn_idx: TxnIdx, ) -> TraceParsingResult { - let state_trie = Self::create_minimal_state_partial_trie( + let state_trie = create_minimal_state_partial_trie( &curr_block_tries.state, nodes_used_by_txn.state_accesses.iter().cloned(), )?; + println!("SPECIAL QUERY ON PARTIAL"); + let res = state_trie.get( + Nibbles::from_str("8556274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79") + .unwrap(), + ); + + println!("SPECIAL QUERY ON PARTIAL RES: {:?}", res.map(hex::encode)); + // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... - let transactions_trie = Self::create_trie_subset_wrapped( + let transactions_trie = create_trie_subset_wrapped( &curr_block_tries.txn, once((txn_idx as u32).into()), TrieType::Txn, )?; - let receipts_trie = Self::create_trie_subset_wrapped( + let receipts_trie = create_trie_subset_wrapped( &curr_block_tries.receipt, once((txn_idx as u32).into()), TrieType::Receipt, @@ -237,7 +271,7 @@ impl ProcessedBlockTrace { .map(|(k, v)| (H256::from_slice(&k.bytes_be()), v.clone())) .collect::>(); - let storage_tries = Self::create_minimal_storage_partial_tries( + let storage_tries = create_minimal_storage_partial_tries( &mut curr_block_tries.storage, &nodes_used_by_txn.state_accounts_with_no_accesses_but_storage_tries, x.iter(), @@ -271,69 +305,6 @@ impl ProcessedBlockTrace { .collect() } - fn create_minimal_state_partial_trie( - state_trie: &HashedPartialTrie, - state_accesses: impl Iterator, - ) -> TraceParsingResult { - Self::create_trie_subset_wrapped( - state_trie, - state_accesses.map(Nibbles::from_h256_be), - TrieType::State, - ) - } - - // TODO!!!: We really need to be appending the empty storage tries to the base - // trie somewhere else! This is a big hack! - fn create_minimal_storage_partial_tries<'a>( - storage_tries: &mut HashMap, - state_accounts_with_no_accesses_but_storage_tries: &HashMap< - HashedAccountAddr, - TrieRootHash, - >, - accesses_per_account: impl Iterator< - Item = &'a (HashedAccountAddr, Vec), - >, - ) -> TraceParsingResult> { - println!( - "BASE TRIES KEYS: {:#?}", - storage_tries.keys().collect::>() - ); - - accesses_per_account - .map(|(h_addr, mem_accesses)| { - // TODO: Clean up... - let base_storage_trie = match storage_tries.get(&H256(h_addr.0)) { - Some(s_trie) => s_trie, - None => { - let trie = state_accounts_with_no_accesses_but_storage_tries - .get(h_addr) - .map(|s_root| HashedPartialTrie::new(Node::Hash(*s_root))) - .unwrap_or_default(); - storage_tries.insert(*h_addr, trie); // TODO: Really change this... - storage_tries.get(h_addr).unwrap() - } - }; - - let partial_storage_trie = Self::create_trie_subset_wrapped( - base_storage_trie, - mem_accesses.iter().cloned(), - TrieType::Storage, - )?; - - Ok((*h_addr, partial_storage_trie)) - }) - .collect::>() - } - - fn create_trie_subset_wrapped( - trie: &HashedPartialTrie, - accesses: impl Iterator, - trie_type: TrieType, - ) -> TraceParsingResult { - create_trie_subset(trie, accesses) - .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(trie_type)) - } - // It's not clear to me if the client should have an empty storage trie for when // a txn performs the accounts first storage access, but we're going to assume // they won't for now and deal with that case here. @@ -538,8 +509,11 @@ fn create_dummy_txn_gen_input_single_dummy_txn( }) .collect(); + let state_trie_with_coinbase = + create_minimal_state_partial_trie(&final_trie_state.state, empty()).unwrap(); + let tries = TrieInputs { - state_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.state), + state_trie: state_trie_with_coinbase, transactions_trie: HashedPartialTrie::default(), receipts_trie: HashedPartialTrie::default(), storage_tries: partial_sub_storage_tries, @@ -665,3 +639,67 @@ fn create_dummy_proof_trie_inputs(final_trie_state: &PartialTrieState) -> TrieIn storage_tries: partial_sub_storage_tries, } } + +fn create_minimal_state_partial_trie( + state_trie: &HashedPartialTrie, + state_accesses: impl Iterator, +) -> TraceParsingResult { + // TODO: Remove once coinbase issue is fixed... + + let state_accesses_plus_coinbase = state_accesses.chain(once( + H256::from_str("8556274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79").unwrap(), + )); + + create_trie_subset_wrapped( + state_trie, + state_accesses_plus_coinbase.map(Nibbles::from_h256_be), + TrieType::State, + ) +} + +// TODO!!!: We really need to be appending the empty storage tries to the base +// trie somewhere else! This is a big hack! +fn create_minimal_storage_partial_tries<'a>( + storage_tries: &mut HashMap, + state_accounts_with_no_accesses_but_storage_tries: &HashMap, + accesses_per_account: impl Iterator)>, +) -> TraceParsingResult> { + println!( + "BASE TRIES KEYS: {:#?}", + storage_tries.keys().collect::>() + ); + + accesses_per_account + .map(|(h_addr, mem_accesses)| { + // TODO: Clean up... + let base_storage_trie = match storage_tries.get(&H256(h_addr.0)) { + Some(s_trie) => s_trie, + None => { + let trie = state_accounts_with_no_accesses_but_storage_tries + .get(h_addr) + .map(|s_root| HashedPartialTrie::new(Node::Hash(*s_root))) + .unwrap_or_default(); + storage_tries.insert(*h_addr, trie); // TODO: Really change this... + storage_tries.get(h_addr).unwrap() + } + }; + + let partial_storage_trie = create_trie_subset_wrapped( + base_storage_trie, + mem_accesses.iter().cloned(), + TrieType::Storage, + )?; + + Ok((*h_addr, partial_storage_trie)) + }) + .collect::>() +} + +fn create_trie_subset_wrapped( + trie: &HashedPartialTrie, + accesses: impl Iterator, + trie_type: TrieType, +) -> TraceParsingResult { + create_trie_subset(trie, accesses) + .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(trie_type)) +} diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index a12c86c7d..47b704d9e 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -55,14 +55,6 @@ impl BlockTrace { print_value_and_hash_nodes_of_trie(&pre_image_data.tries.state); - println!("SPECIAL QUERY"); - let res = pre_image_data.tries.state.get( - Nibbles::from_str("F36D6FADC19B5EC9189AE65683241081F7C772EC596EA1FACB9DAEF2A1396637") - .unwrap(), - ); - - println!("SPECIAL QUERY RES: {:?}", res); - for (h_addr, s_trie) in pre_image_data.tries.storage.iter() { print_value_and_hash_nodes_of_storage_trie(h_addr, s_trie); } From 4fee6c3e6d50489fe31c0e004cc11daf3f06d41b Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 17 Nov 2023 11:59:52 -0500 Subject: [PATCH 122/208] Fix ranges --- plonky_block_proof_gen/src/prover_state.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 15305fd5e..2d47ac016 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -27,13 +27,13 @@ pub struct ProverStateBuilder { impl Default for ProverStateBuilder { fn default() -> Self { Self { - arithmetic_circuit_size: 16..17, + arithmetic_circuit_size: 16..19, byte_packing_circuit_size: 10..20, cpu_circuit_size: 15..22, keccak_circuit_size: 14..17, keccak_sponge_circuit_size: 9..16, logic_circuit_size: 12..16, - memory_circuit_size: 18..22, + memory_circuit_size: 18..24, } } } From f2c9da855ed3234d90bfd726c990322bdd08ee68 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 17 Nov 2023 15:51:24 -0700 Subject: [PATCH 123/208] Plonky2 bump --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 86b3816c8..3f1dda664 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,7 +15,7 @@ ethereum-types = "0.14.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "0e63e66196fba06aa496a5249a0c9d997a549829" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "b9328815e666981d6485b0d8dc04160e93797993" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" From d74e63721d7e1eb079536020682fd9768d2264f6 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 17 Nov 2023 16:00:10 -0700 Subject: [PATCH 124/208] Removed hack coinbase in partial trie hack --- src/decoding.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 226742576..4de44c5ab 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -646,13 +646,9 @@ fn create_minimal_state_partial_trie( ) -> TraceParsingResult { // TODO: Remove once coinbase issue is fixed... - let state_accesses_plus_coinbase = state_accesses.chain(once( - H256::from_str("8556274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79").unwrap(), - )); - create_trie_subset_wrapped( state_trie, - state_accesses_plus_coinbase.map(Nibbles::from_h256_be), + state_accesses.map(Nibbles::from_h256_be), TrieType::State, ) } From 2bfc3f3f07a7c3a51f9f31f8958d4ffbd3e6908c Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 17 Nov 2023 15:52:41 -0700 Subject: [PATCH 125/208] Plonky2 bump & API update --- plonky_block_proof_gen/src/proof_gen.rs | 69 ++++++++++++++++++------- 1 file changed, 49 insertions(+), 20 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 3e34a2d99..426fe91b9 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,5 +1,9 @@ use plonky2::util::timing::TimingTree; -use plonky2_evm::{all_stark::AllStark, config::StarkConfig, proof::PublicValues}; +use plonky2_evm::{ + all_stark::AllStark, + config::StarkConfig, + proof::{ExtraBlockData, PublicValues}, +}; use proof_protocol_decoder::{ proof_gen_types::ProofBeforeAndAfterDeltas, types::{OtherBlockData, TxnProofGenIR}, @@ -11,7 +15,7 @@ use crate::{ GeneratedTxnProof, ProofCommon, }, prover_state::ProverState, - types::PlonkyProofIntern, + types::{PlonkyProofIntern, ProofUnderlyingTxns}, }; pub type ProofGenResult = Result; @@ -44,6 +48,8 @@ pub fn generate_txn_proof( let txn_idx = start_info.txn_idx; let deltas = start_info.deltas(); + println!("BLOCK PROOF INPUTS: {:#?}", start_info); + let (txn_proof_intern, p_vals) = p_state .state .prove_root( @@ -78,22 +84,22 @@ pub fn generate_agg_proof( other_data: OtherBlockData, ) -> ProofGenResult { let expanded_agg_proofs = expand_aggregatable_proofs(lhs_child, rhs_child, other_data); - let deltas = expanded_agg_proofs.p_vals.extra_block_data.clone().into(); let (agg_proof_intern, p_vals) = p_state .state .prove_aggregation( expanded_agg_proofs.lhs.is_agg, expanded_agg_proofs.lhs.intern, + expanded_agg_proofs.p_vals_lhs, expanded_agg_proofs.rhs.is_agg, expanded_agg_proofs.rhs.intern, - expanded_agg_proofs.p_vals, + expanded_agg_proofs.p_vals_rhs, ) .map_err(|err| err.to_string())?; let common = ProofCommon { b_height: lhs_child.b_height(), - deltas, + deltas: expanded_agg_proofs.combined_deltas, roots_before: p_vals.trie_roots_before, roots_after: p_vals.trie_roots_after, }; @@ -108,9 +114,11 @@ pub fn generate_agg_proof( } struct ExpandedAggregatableProofs<'a> { - p_vals: PublicValues, + p_vals_lhs: PublicValues, + p_vals_rhs: PublicValues, lhs: ExpandedAggregatableProof<'a>, rhs: ExpandedAggregatableProof<'a>, + combined_deltas: ProofBeforeAndAfterDeltas, } struct ExpandedAggregatableProof<'a> { @@ -118,6 +126,8 @@ struct ExpandedAggregatableProof<'a> { is_agg: bool, } +// TODO: Remove of simplify, as most of this work is occurring inside plonky2 +// now. fn expand_aggregatable_proofs<'a>( lhs_child: &'a AggregatableProof, rhs_child: &'a AggregatableProof, @@ -126,33 +136,52 @@ fn expand_aggregatable_proofs<'a>( let (expanded_lhs, lhs_common) = expand_aggregatable_proof(lhs_child); let (expanded_rhs, rhs_common) = expand_aggregatable_proof(rhs_child); - let p_underlying_txns = lhs_child - .underlying_txns() - .combine(&rhs_child.underlying_txns()); - let deltas = merge_lhs_and_rhs_deltas(&lhs_common.deltas, &rhs_common.deltas); + let lhs_extra_data = + create_extra_block_data_for_child(lhs_common, &other_data, &lhs_child.underlying_txns()); - let extra_block_data = create_extra_block_data( - deltas, - other_data.genesis_state_trie_root, - p_underlying_txns.txn_idxs.start, - p_underlying_txns.txn_idxs.end, - ); - - let p_vals = PublicValues { + let p_vals_lhs = PublicValues { trie_roots_before: lhs_common.roots_before.clone(), + trie_roots_after: lhs_common.roots_after.clone(), + block_metadata: other_data.b_data.b_meta.clone(), + block_hashes: other_data.b_data.b_hashes.clone(), + extra_block_data: lhs_extra_data, + }; + + let rhs_extra_data = + create_extra_block_data_for_child(rhs_common, &other_data, &rhs_child.underlying_txns()); + + let p_vals_rhs = PublicValues { + trie_roots_before: rhs_common.roots_before.clone(), trie_roots_after: rhs_common.roots_after.clone(), block_metadata: other_data.b_data.b_meta, block_hashes: other_data.b_data.b_hashes, - extra_block_data, + extra_block_data: rhs_extra_data, }; + let combined_deltas = merge_lhs_and_rhs_deltas(&lhs_common.deltas, &rhs_common.deltas); + ExpandedAggregatableProofs { - p_vals, + p_vals_lhs, + p_vals_rhs, lhs: expanded_lhs, rhs: expanded_rhs, + combined_deltas, } } +fn create_extra_block_data_for_child( + common: &ProofCommon, + other_data: &OtherBlockData, + txn_range: &ProofUnderlyingTxns, +) -> ExtraBlockData { + create_extra_block_data( + common.deltas.clone(), + other_data.genesis_state_trie_root, + txn_range.txn_idxs.start, + txn_range.txn_idxs.end, + ) +} + fn merge_lhs_and_rhs_deltas( lhs: &ProofBeforeAndAfterDeltas, rhs: &ProofBeforeAndAfterDeltas, From 1544467ae57efb53eec50d391c44149c455aa0e6 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 20 Nov 2023 13:08:13 -0700 Subject: [PATCH 126/208] Fixed receipt tries being incorrectly created - Receipt trie hashes should once again be correct. --- src/decoding.rs | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 4de44c5ab..c10cd354f 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -365,10 +365,6 @@ impl ProcessedBlockTrace { } } - // // TODO: Move hash of slot addr to block trace processing... - // storage_trie.extend(storage_writes.into_iter().map(|(slot, v)| - // (Nibbles::from_h256_be(hash(&slot.bytes_be())), v))); - storage_trie.extend( storage_writes .into_iter() @@ -412,9 +408,14 @@ impl ProcessedBlockTrace { let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); trie_state.txn.insert(txn_k, meta.txn_bytes()); + + // TODO: Re-evaluate if we can do this a bit nicer... Plonky2 needs this byte + // but we don't want it for the receipt trie. + let receipt_node_without_txn_type_byte = &meta.receipt_node_bytes[1..]; + trie_state .receipt - .insert(txn_k, meta.receipt_node_bytes.clone()); + .insert(txn_k, receipt_node_without_txn_type_byte); Ok(()) } @@ -509,11 +510,10 @@ fn create_dummy_txn_gen_input_single_dummy_txn( }) .collect(); - let state_trie_with_coinbase = - create_minimal_state_partial_trie(&final_trie_state.state, empty()).unwrap(); + let state_trie = create_minimal_state_partial_trie(&final_trie_state.state, empty()).unwrap(); let tries = TrieInputs { - state_trie: state_trie_with_coinbase, + state_trie, transactions_trie: HashedPartialTrie::default(), receipts_trie: HashedPartialTrie::default(), storage_tries: partial_sub_storage_tries, @@ -644,8 +644,6 @@ fn create_minimal_state_partial_trie( state_trie: &HashedPartialTrie, state_accesses: impl Iterator, ) -> TraceParsingResult { - // TODO: Remove once coinbase issue is fixed... - create_trie_subset_wrapped( state_trie, state_accesses.map(Nibbles::from_h256_be), From 2a6a657d2df8ef808773fa37ce9afba55397fb2b Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 20 Nov 2023 14:31:34 -0700 Subject: [PATCH 127/208] Fixed a few issues --- src/decoding.rs | 32 +++++++++++++++++++++++++------- src/types.rs | 3 +++ 2 files changed, 28 insertions(+), 7 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index c10cd354f..4cf3abd87 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -23,7 +23,7 @@ use crate::{ types::{ BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, - EMPTY_TRIE_HASH, + EMPTY_TRIE_HASH, ZERO_STORAGE_SLOT_VAL_RLPED, }, utils::{hash, update_val_if_some}, }; @@ -131,6 +131,7 @@ impl ProcessedBlockTrace { &mut curr_block_tries, &txn_info.nodes_used_by_txn, txn_idx, + &other_data.b_data.b_meta.block_beneficiary, )?; let addresses = Self::get_known_addresses_if_enabled(); @@ -238,10 +239,20 @@ impl ProcessedBlockTrace { curr_block_tries: &mut PartialTrieState, nodes_used_by_txn: &NodesUsedByTxn, txn_idx: TxnIdx, + coin_base_addr: &Address, ) -> TraceParsingResult { + let hashed_coinbase = hash(coin_base_addr.as_bytes()); + + // TODO: Remove once the full node adds this to the trace... + let node_accesses_plus_coinbase = nodes_used_by_txn + .state_accesses + .iter() + .cloned() + .chain(once(hashed_coinbase)); + let state_trie = create_minimal_state_partial_trie( &curr_block_tries.state, - nodes_used_by_txn.state_accesses.iter().cloned(), + node_accesses_plus_coinbase, )?; println!("SPECIAL QUERY ON PARTIAL"); @@ -365,11 +376,18 @@ impl ProcessedBlockTrace { } } - storage_trie.extend( - storage_writes - .into_iter() - .map(|(k, v)| (Nibbles::from_h256_be(hash(&k.bytes_be())), v)), - ); + for (slot, val) in storage_writes + .into_iter() + .map(|(k, v)| (Nibbles::from_h256_be(hash(&k.bytes_be())), v)) + { + // If we are writing a zero, then we actually need to perform a delete. + match val == ZERO_STORAGE_SLOT_VAL_RLPED { + false => storage_trie.insert(slot, val), + true => { + storage_trie.delete(slot); + } + }; + } } for (hashed_acc_addr, s_trie_writes) in deltas.state_writes { diff --git a/src/types.rs b/src/types.rs index faa1a976b..ef7ff058f 100644 --- a/src/types.rs +++ b/src/types.rs @@ -42,6 +42,9 @@ pub(crate) const EMPTY_ACCOUNT_BYTES_RLPED: [u8; 70] = [ 123, 250, 216, 4, 93, 133, 164, 112, ]; +// This is just `rlp(0)`. +pub(crate) const ZERO_STORAGE_SLOT_VAL_RLPED: [u8; 1] = [128]; + /// An `IR` (Intermediate Representation) for a given txn in a block that we can /// use to generate a proof for that txn. #[derive(Clone, Debug, Deserialize, Serialize)] From c4a7ad9b0b988fadfa7e0562dd0ae9e4681ee99c Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 21 Nov 2023 14:17:39 -0700 Subject: [PATCH 128/208] Fixed incorrect txn/receipt - Any block with more than `1` txn was always generating the incorrect roots for every txn after the first txn. This was due to a bug of subtracting `1` from the txn idx before giving it to `GenerationInputs`. --- src/decoding.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 4cf3abd87..bc041889f 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -168,7 +168,7 @@ impl ProcessedBlockTrace { println!("TRIE ROOTS AFTER: {:?}", trie_roots_after); let gen_inputs = GenerationInputs { - txn_number_before: txn_idx.saturating_sub(1).into(), + txn_number_before: txn_idx.into(), gas_used_before: tot_gas_used, block_bloom_before: curr_bloom, gas_used_after: new_tot_gas_used, @@ -263,16 +263,17 @@ impl ProcessedBlockTrace { println!("SPECIAL QUERY ON PARTIAL RES: {:?}", res.map(hex::encode)); + let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... let transactions_trie = create_trie_subset_wrapped( &curr_block_tries.txn, - once((txn_idx as u32).into()), + once(txn_k), TrieType::Txn, )?; let receipts_trie = create_trie_subset_wrapped( &curr_block_tries.receipt, - once((txn_idx as u32).into()), + once(txn_k), TrieType::Receipt, )?; From 204e5e0d8ccf89dab8e66dd2e48da200eb51b54b Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 22 Nov 2023 13:35:24 -0700 Subject: [PATCH 129/208] Cleanup and bugfixes --- src/decoding.rs | 14 ++++---------- src/processed_block_trace.rs | 14 ++++++++++---- src/trace_protocol.rs | 4 ++-- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index bc041889f..ceeea01df 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -265,17 +265,11 @@ impl ProcessedBlockTrace { let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... - let transactions_trie = create_trie_subset_wrapped( - &curr_block_tries.txn, - once(txn_k), - TrieType::Txn, - )?; + let transactions_trie = + create_trie_subset_wrapped(&curr_block_tries.txn, once(txn_k), TrieType::Txn)?; - let receipts_trie = create_trie_subset_wrapped( - &curr_block_tries.receipt, - once(txn_k), - TrieType::Receipt, - )?; + let receipts_trie = + create_trie_subset_wrapped(&curr_block_tries.receipt, once(txn_k), TrieType::Receipt)?; let x = nodes_used_by_txn .storage_accesses diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 47b704d9e..a0ac1425a 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -115,9 +115,7 @@ fn process_block_trace_trie_pre_images( } fn process_combined_trie_pre_images(tries: CombinedPreImages) -> ProcessedBlockTracePreImages { - match tries { - CombinedPreImages::Compact(t) => process_compact_trie(t), - } + process_compact_trie(tries.compact.unwrap_or(TrieCompact(Vec::default()))) } fn process_separate_trie_pre_images(tries: SeparateTriePreImages) -> ProcessedBlockTracePreImages { @@ -161,6 +159,14 @@ fn process_multiple_storage_tries( } fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { + // TODO!!! HACK! REMOVE WHEN FIXED! + if trie.0.is_empty() { + return ProcessedBlockTracePreImages { + tries: PartialTriePreImages::default(), + extra_code_hash_mappings: None, + }; + } + // TODO: Wrap in proper result type... let out = process_compact_prestate(trie).unwrap(); @@ -403,7 +409,7 @@ fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbl // let hashed_addr = hash(addr.as_bytes()); // Nibbles::from_h256_be(hashed_addr) - Nibbles::from_h256_be(hash(&addr.0)) // TODO CHeck endianness... + Nibbles::from_h256_be(hash(&addr.0)) } // TODO: Extreme hack! Please don't keep... diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 2cf38d96c..5ba8e487d 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -74,8 +74,8 @@ pub enum SeparateTriePreImage { /// A trie pre-image where both state & storage are combined into one payload. #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] -pub enum CombinedPreImages { - Compact(TrieCompact), +pub struct CombinedPreImages { + pub compact: Option, } // TODO From 52fc30f3320618089e1bb4a90e127cc00be55456 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 22 Nov 2023 16:12:57 -0700 Subject: [PATCH 130/208] Fixes related to empty compact payloads --- src/compact/compact_prestate_processing.rs | 8 ++++++-- src/processed_block_trace.rs | 10 +--------- src/trace_protocol.rs | 2 +- 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index 0868eb3de..d9a1f8806 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -22,6 +22,7 @@ use thiserror::Error; use super::compact_to_partial_trie::{ convert_storage_trie_root_keyed_hashmap_to_account_addr_keyed, create_partial_trie_from_compact_node, create_partial_trie_from_remaining_witness_elem, + CompactToPartialTrieExtractionOutput, }; use crate::{ trace_protocol::TrieCompact, @@ -338,6 +339,9 @@ impl ParserState { let res = match self.entries.len() { 1 => create_partial_trie_from_remaining_witness_elem(self.entries.pop().unwrap()), + 0 => Ok(CompactToPartialTrieExtractionOutput::default()), /* Case for when nothing */ + // except the header is + // passed in. _ => Err(CompactParsingError::NonSingleEntryAfterProcessing( self.entries, )), @@ -683,11 +687,11 @@ impl WitnessBytes { let header = self.parse_header()?; loop { - self.process_operator()?; - if self.byte_cursor.at_eof() { break; } + + self.process_operator()?; } Ok((header, self.instrs)) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index a0ac1425a..88e76e55f 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -115,7 +115,7 @@ fn process_block_trace_trie_pre_images( } fn process_combined_trie_pre_images(tries: CombinedPreImages) -> ProcessedBlockTracePreImages { - process_compact_trie(tries.compact.unwrap_or(TrieCompact(Vec::default()))) + process_compact_trie(tries.compact) } fn process_separate_trie_pre_images(tries: SeparateTriePreImages) -> ProcessedBlockTracePreImages { @@ -159,14 +159,6 @@ fn process_multiple_storage_tries( } fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { - // TODO!!! HACK! REMOVE WHEN FIXED! - if trie.0.is_empty() { - return ProcessedBlockTracePreImages { - tries: PartialTriePreImages::default(), - extra_code_hash_mappings: None, - }; - } - // TODO: Wrap in proper result type... let out = process_compact_prestate(trie).unwrap(); diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 5ba8e487d..659a5ef9f 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -75,7 +75,7 @@ pub enum SeparateTriePreImage { #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub struct CombinedPreImages { - pub compact: Option, + pub compact: TrieCompact, } // TODO From cc19b5a0d551ea066c99b7aec6222e77dd21b0b2 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 23 Nov 2023 18:22:43 -0500 Subject: [PATCH 131/208] Fix TxnIdx for dummy proofs --- src/types.rs | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/types.rs b/src/types.rs index ef7ff058f..94e23d5cf 100644 --- a/src/types.rs +++ b/src/types.rs @@ -85,11 +85,12 @@ impl TxnProofGenIR { } } - /// Creates a dummy transaction. + /// Creates a dummy proof, corresponding to no actual transaction. /// /// These can be used to pad a block if the number of transactions in the - /// block is below `2`. - pub fn create_dummy(b_height: BlockHeight, txn_idx: TxnIdx) -> Self { + /// block is below `2`. Dummy proofs will always be executed at the start + /// of a block. + pub fn create_dummy(b_height: BlockHeight) -> Self { let trie_roots_after = TrieRoots { state_root: EMPTY_TRIE_HASH, transactions_root: EMPTY_TRIE_HASH, @@ -108,18 +109,19 @@ impl TxnProofGenIR { }; Self { - txn_idx, + txn_idx: 0, gen_inputs, } } /// Copy relevant fields of the `TxnProofGenIR` to a new `TxnProofGenIR` - /// with a different `b_height` and `txn_idx`. + /// with a different `b_height`. /// /// This can be used to pad a block if there is only one transaction in the - /// block. Block proofs need a minimum of two transactions. - pub fn dummy_with_at(&self, b_height: BlockHeight, txn_idx: TxnIdx) -> Self { - let mut dummy = Self::create_dummy(b_height, txn_idx); + /// block. Block proofs need a minimum of two transactions. Dummy proofs + /// will always be executed at the start of a block. + pub fn dummy_with_at(&self, b_height: BlockHeight) -> Self { + let mut dummy = Self::create_dummy(b_height); dummy.gen_inputs.gas_used_before = self.gen_inputs.gas_used_after; dummy.gen_inputs.gas_used_after = self.gen_inputs.gas_used_after; From 5644b74d4eb6d81a4026bdd1b35d36c11946c218 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 23 Nov 2023 18:59:31 -0500 Subject: [PATCH 132/208] Fix index for dummy block --- src/decoding.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/decoding.rs b/src/decoding.rs index ceeea01df..a6be2edfa 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -587,7 +587,7 @@ fn create_dummy_txn_pair_for_empty_block( ) -> [TxnProofGenIR; 2] { [ create_dummy_gen_input(b_data, final_trie_state, 0), - create_dummy_gen_input(b_data, final_trie_state, 1), + create_dummy_gen_input(b_data, final_trie_state, 0), ] } From ea37d824ee993fb966f6c16a9eedb22408440912 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 23 Nov 2023 22:43:56 -0500 Subject: [PATCH 133/208] Fix genesis root for dummy txns --- src/decoding.rs | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index a6be2edfa..9a8313f7a 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -229,7 +229,7 @@ impl ProcessedBlockTrace { Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, - &other_data.b_data, + &other_data, &curr_block_tries, ); Ok(txn_gen_inputs) @@ -435,7 +435,7 @@ impl ProcessedBlockTrace { fn pad_gen_inputs_with_dummy_inputs_if_needed( gen_inputs: &mut Vec, - b_data: &BlockLevelData, + other_data: &OtherBlockData, final_trie_state: &PartialTrieState, ) { println!("Padding len: {}", gen_inputs.len()); @@ -444,15 +444,12 @@ impl ProcessedBlockTrace { 0 => { // Need to pad with two dummy txns. gen_inputs.extend(create_dummy_txn_pair_for_empty_block( - b_data, + other_data, final_trie_state, )); } 1 => { - let dummy_txn = create_dummy_txn_gen_input_single_dummy_txn( - &gen_inputs[0].gen_inputs, - final_trie_state, - ); + let dummy_txn = create_dummy_gen_input(other_data, final_trie_state, 0); gen_inputs.insert(0, dummy_txn); } _ => (), @@ -582,17 +579,17 @@ fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedP } fn create_dummy_txn_pair_for_empty_block( - b_data: &BlockLevelData, + other_data: &OtherBlockData, final_trie_state: &PartialTrieState, ) -> [TxnProofGenIR; 2] { [ - create_dummy_gen_input(b_data, final_trie_state, 0), - create_dummy_gen_input(b_data, final_trie_state, 0), + create_dummy_gen_input(other_data, final_trie_state, 0), + create_dummy_gen_input(other_data, final_trie_state, 0), ] } fn create_dummy_gen_input( - b_data: &BlockLevelData, + other_data: &OtherBlockData, final_trie_state: &PartialTrieState, txn_idx: TxnIdx, ) -> TxnProofGenIR { @@ -608,9 +605,9 @@ fn create_dummy_gen_input( signed_txn: None, tries, trie_roots_after, - genesis_state_trie_root: TrieRootHash::default(), - block_metadata: b_data.b_meta.clone(), - block_hashes: b_data.b_hashes.clone(), + genesis_state_trie_root: other_data.genesis_state_trie_root, + block_metadata: other_data.b_data.b_meta.clone(), + block_hashes: other_data.b_data.b_hashes.clone(), ..GenerationInputs::default() }; From f25ca5a6188e1709b50206a03bc9cd7a103f1d0c Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 24 Nov 2023 06:13:40 -0500 Subject: [PATCH 134/208] Fix genesis root for dummy txns --- Cargo.toml | 1 + src/decoding.rs | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 3f1dda664..1056acf3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ enum-as-inner = "0.6.0" enumn = "0.1.12" eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } ethereum-types = "0.14.1" +hex-literal = "0.4.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" diff --git a/src/decoding.rs b/src/decoding.rs index 9a8313f7a..423d137b5 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -11,6 +11,7 @@ use eth_trie_utils::{ trie_subsets::create_trie_subset, }; use ethereum_types::{Address, H256, U256}; +use hex_literal::hex; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -178,7 +179,7 @@ impl ProcessedBlockTrace { * it here... */ tries, trie_roots_after, - genesis_state_trie_root: other_data.genesis_state_trie_root, + genesis_state_trie_root: EMPTY_TRIE_HASH, // TODO: fetch this on Jerigon side contract_code: txn_info.contract_code_accessed, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), @@ -605,7 +606,10 @@ fn create_dummy_gen_input( signed_txn: None, tries, trie_roots_after, - genesis_state_trie_root: other_data.genesis_state_trie_root, + // TODO: fetch this on Jerigon side + genesis_state_trie_root: H256(hex!( + "c12c57a1ecc38176fa8016fed174a23264e71d2166ea7e18cb954f0f3231e36a" + )), block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), ..GenerationInputs::default() From 831b823afacd863a297f237155c1b2894d9ccc92 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Sat, 25 Nov 2023 08:21:09 -0500 Subject: [PATCH 135/208] Use initial tries for dummy transaction padding --- src/decoding.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 423d137b5..fdb510c31 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -83,6 +83,13 @@ impl ProcessedBlockTrace { other_data: OtherBlockData, ) -> TraceParsingResult> { let mut curr_block_tries = PartialTrieState { + state: self.tries.state.clone(), + storage: self.tries.storage.clone(), + ..Default::default() + }; + + // This is just a copy of `curr_block_tries`. + let initial_tries_for_dummies = PartialTrieState { state: self.tries.state, storage: self.tries.storage, ..Default::default() @@ -231,7 +238,7 @@ impl ProcessedBlockTrace { Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, &other_data, - &curr_block_tries, + &initial_tries_for_dummies, ); Ok(txn_gen_inputs) } @@ -437,7 +444,7 @@ impl ProcessedBlockTrace { fn pad_gen_inputs_with_dummy_inputs_if_needed( gen_inputs: &mut Vec, other_data: &OtherBlockData, - final_trie_state: &PartialTrieState, + initial_trie_state: &PartialTrieState, ) { println!("Padding len: {}", gen_inputs.len()); @@ -446,11 +453,11 @@ impl ProcessedBlockTrace { // Need to pad with two dummy txns. gen_inputs.extend(create_dummy_txn_pair_for_empty_block( other_data, - final_trie_state, + initial_trie_state, )); } 1 => { - let dummy_txn = create_dummy_gen_input(other_data, final_trie_state, 0); + let dummy_txn = create_dummy_gen_input(other_data, initial_trie_state, 0); gen_inputs.insert(0, dummy_txn); } _ => (), From 3731b168a30e35ae4d760337ee4929791a632686 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Mon, 27 Nov 2023 09:16:03 -0500 Subject: [PATCH 136/208] Fix genesis for non-dummy --- src/decoding.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/decoding.rs b/src/decoding.rs index fdb510c31..1c5743f4c 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -186,7 +186,9 @@ impl ProcessedBlockTrace { * it here... */ tries, trie_roots_after, - genesis_state_trie_root: EMPTY_TRIE_HASH, // TODO: fetch this on Jerigon side + genesis_state_trie_root: H256(hex!( + "c12c57a1ecc38176fa8016fed174a23264e71d2166ea7e18cb954f0f3231e36a" + )), // TODO: fetch this on Jerigon side contract_code: txn_info.contract_code_accessed, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), From ce1c10978564e80e3afbd666ba086e7c20c5b996 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Mon, 27 Nov 2023 11:38:52 -0500 Subject: [PATCH 137/208] Bump plonky2 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 1056acf3c..8df03ba9b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,7 +16,7 @@ hex-literal = "0.4.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "b9328815e666981d6485b0d8dc04160e93797993" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "2039e18f29c83510e96fd985359c549344d6d601" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" From f23af20a674caf83a0136c1779619fe5dca63cc4 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 28 Nov 2023 13:40:49 -0700 Subject: [PATCH 138/208] Removed hardcoded genesis state root hash --- src/decoding.rs | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 1c5743f4c..0a5614bf7 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -11,7 +11,6 @@ use eth_trie_utils::{ trie_subsets::create_trie_subset, }; use ethereum_types::{Address, H256, U256}; -use hex_literal::hex; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -22,9 +21,9 @@ use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites, TxnMetaState}, trace_protocol::TxnInfo, types::{ - BlockLevelData, Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, - OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, - EMPTY_TRIE_HASH, ZERO_STORAGE_SLOT_VAL_RLPED, + Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, + TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, EMPTY_TRIE_HASH, + ZERO_STORAGE_SLOT_VAL_RLPED, }, utils::{hash, update_val_if_some}, }; @@ -186,9 +185,7 @@ impl ProcessedBlockTrace { * it here... */ tries, trie_roots_after, - genesis_state_trie_root: H256(hex!( - "c12c57a1ecc38176fa8016fed174a23264e71d2166ea7e18cb954f0f3231e36a" - )), // TODO: fetch this on Jerigon side + genesis_state_trie_root: other_data.genesis_state_trie_root, contract_code: txn_info.contract_code_accessed, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), @@ -615,10 +612,7 @@ fn create_dummy_gen_input( signed_txn: None, tries, trie_roots_after, - // TODO: fetch this on Jerigon side - genesis_state_trie_root: H256(hex!( - "c12c57a1ecc38176fa8016fed174a23264e71d2166ea7e18cb954f0f3231e36a" - )), + genesis_state_trie_root: other_data.genesis_state_trie_root, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), ..GenerationInputs::default() From 855133a4a3dcef26c6eb9f08717fad016ab2f9a4 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 28 Nov 2023 15:35:28 -0700 Subject: [PATCH 139/208] Plonky2 bump --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 8df03ba9b..ab736d688 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,7 +16,7 @@ hex-literal = "0.4.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "2039e18f29c83510e96fd985359c549344d6d601" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "64cc1000e7dbe83d88ca9fa6811d41adfa7796c5" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" From 9bfb0c7f8768112e2cf2101ba72cf062ddb40ce7 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Tue, 28 Nov 2023 18:45:04 -0500 Subject: [PATCH 140/208] Fix txn indexing for dummy proofs --- plonky_block_proof_gen/src/proof_types.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index c73d95742..59b6fd25f 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -39,7 +39,12 @@ pub struct GeneratedTxnProof { impl GeneratedTxnProof { pub fn underlying_txns(&self) -> ProofUnderlyingTxns { - (self.txn_idx..=self.txn_idx).into() + if self.common.roots_before.transactions_root == self.common.roots_after.transactions_root { + // This is a dummy proof no transaction was executed. + (self.txn_idx..self.txn_idx).into() + } else { + (self.txn_idx..=self.txn_idx).into() + } } } From 396e9213d6d3c7724124cee5013b3ace4dfe07fe Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Wed, 29 Nov 2023 10:15:04 -0500 Subject: [PATCH 141/208] Do not prune first byte of receipt --- src/decoding.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 0a5614bf7..0f237b36b 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -429,13 +429,9 @@ impl ProcessedBlockTrace { let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); trie_state.txn.insert(txn_k, meta.txn_bytes()); - // TODO: Re-evaluate if we can do this a bit nicer... Plonky2 needs this byte - // but we don't want it for the receipt trie. - let receipt_node_without_txn_type_byte = &meta.receipt_node_bytes[1..]; - trie_state .receipt - .insert(txn_k, receipt_node_without_txn_type_byte); + .insert(txn_k, meta.receipt_node_bytes.as_ref()); Ok(()) } From f856dba0e6e96e6be29fcf215ebcea8252be9a5c Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 29 Nov 2023 10:06:59 -0700 Subject: [PATCH 142/208] Removed logic for appending txn bytes to receipt nodes - Doesn't seem to be needed anymore. --- src/processed_block_trace.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 88e76e55f..1b0c2a3ff 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -305,9 +305,7 @@ impl TxnInfo { .state_accounts_with_no_accesses_but_storage_tries .extend(accounts_with_storage_but_no_storage_accesses); - // TODO: Make more efficient... - let mut receipt_node_bytes = rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(); - receipt_node_bytes.insert(0, 2); + let receipt_node_bytes = rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(); let txn_bytes = match self.meta.byte_code.is_empty() { false => Some(self.meta.byte_code), From f75c29a93c8302ffff08cb47a64fa600fdca4774 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 29 Nov 2023 14:06:11 -0700 Subject: [PATCH 143/208] Fixed receipt bytes being decoded wrong --- src/processed_block_trace.rs | 30 +++++++++++++++++------------- src/trace_protocol.rs | 3 +-- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 1b0c2a3ff..060ff72e0 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -6,7 +6,7 @@ use std::str::FromStr; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; use ethereum_types::U256; -use plonky2_evm::generation::mpt::AccountRlp; +use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use crate::compact::compact_prestate_processing::{process_compact_prestate, PartialTriePreImages}; use crate::decoding::TraceParsingResult; @@ -305,8 +305,6 @@ impl TxnInfo { .state_accounts_with_no_accesses_but_storage_tries .extend(accounts_with_storage_but_no_storage_accesses); - let receipt_node_bytes = rlp::encode(&self.meta.new_receipt_trie_node_byte).to_vec(); - let txn_bytes = match self.meta.byte_code.is_empty() { false => Some(self.meta.byte_code), true => None, @@ -314,7 +312,7 @@ impl TxnInfo { let new_meta_state = TxnMetaState { txn_bytes, - receipt_node_bytes, + receipt_node_bytes: self.meta.new_receipt_trie_node_byte, gas_used: self.meta.gas_used, block_bloom, }; @@ -328,17 +326,11 @@ impl TxnInfo { fn block_bloom(&self) -> Bloom { let mut bloom = [U256::zero(); 8]; + let bloom_bytes = + extract_bloom_from_receipt_node_bytes(&self.meta.new_receipt_trie_node_byte); // Note that bloom can be empty. - for (i, v) in self - .meta - .new_receipt_trie_node_byte - .bloom - .clone() - .into_iter() - .array_chunks::<32>() - .enumerate() - { + for (i, v) in bloom_bytes.into_iter().array_chunks::<32>().enumerate() { bloom[i] = U256::from_big_endian(v.as_slice()); } @@ -413,3 +405,15 @@ fn string_to_nibbles_even_nibble_fixed(s: &str) -> Nibbles { n } + +fn extract_bloom_from_receipt_node_bytes(r_bytes: &[u8]) -> Vec { + let legacy_payload = match r_bytes[0] { + 1 | 2 => &r_bytes[1..], + _ => r_bytes, + }; + + rlp::decode::(legacy_payload) + .unwrap() + .bloom + .to_vec() +} diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 659a5ef9f..49f92c5fc 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -25,7 +25,6 @@ use std::collections::HashMap; use eth_trie_utils::partial_trie::HashedPartialTrie; use ethereum_types::{Address, U256}; -use plonky2_evm::generation::mpt::LegacyReceiptRlp; use serde::{Deserialize, Serialize}; use serde_with::{serde_as, FromInto, TryFromInto}; @@ -139,7 +138,7 @@ pub struct TxnMeta { /// this txn. Note that the key is not included and this is only the rlped /// value of the node! #[serde_as(as = "TryFromInto")] - pub new_receipt_trie_node_byte: LegacyReceiptRlp, + pub new_receipt_trie_node_byte: Vec, /// Gas used by this txn (Note: not cumulative gas used). pub gas_used: u64, From 3242e9562633461c43cd2d234481c120d27163c4 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 29 Nov 2023 15:07:34 -0700 Subject: [PATCH 144/208] Fixed receipt rlp some more --- src/processed_block_trace.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 060ff72e0..eee0d9ba1 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -310,9 +310,11 @@ impl TxnInfo { true => None, }; + let receipt_node_bytes = rlp::decode::>(&self.meta.new_receipt_trie_node_byte).unwrap(); + let new_meta_state = TxnMetaState { txn_bytes, - receipt_node_bytes: self.meta.new_receipt_trie_node_byte, + receipt_node_bytes, gas_used: self.meta.gas_used, block_bloom, }; @@ -407,9 +409,11 @@ fn string_to_nibbles_even_nibble_fixed(s: &str) -> Nibbles { } fn extract_bloom_from_receipt_node_bytes(r_bytes: &[u8]) -> Vec { - let legacy_payload = match r_bytes[0] { - 1 | 2 => &r_bytes[1..], - _ => r_bytes, + let decoded = rlp::decode::>(r_bytes).unwrap(); + + let legacy_payload = match decoded[0] { + 1 | 2 => &decoded[1..], + _ => &decoded, }; rlp::decode::(legacy_payload) From b213eb2508dd2c7797af6298e975e32902a43775 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 29 Nov 2023 17:37:40 -0700 Subject: [PATCH 145/208] Now processes legacy & non-legacy receipts - Might not be the best solution, but this is the only way I could get it to work. --- src/decoding.rs | 2 +- src/processed_block_trace.rs | 51 ++++++++++++++++++++++++------------ 2 files changed, 35 insertions(+), 18 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 0f237b36b..c8a8f5fc8 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -152,7 +152,7 @@ impl ProcessedBlockTrace { println!("{:#?}", account_and_storage_hashes); let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; - let new_bloom = txn_info.meta.block_bloom; + let new_bloom = txn_info.meta.bloom; Self::apply_deltas_to_trie_state( &mut curr_block_tries, diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index eee0d9ba1..f3f8d24c5 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -206,7 +206,7 @@ impl TxnInfo { let mut nodes_used_by_txn = NodesUsedByTxn::default(); let mut contract_code_accessed = create_empty_code_access_map(); - let block_bloom = self.block_bloom(); + let bloom = self.bloom(); for (addr, trace) in self.traces { let hashed_addr = hash(addr.as_bytes()); @@ -310,13 +310,14 @@ impl TxnInfo { true => None, }; - let receipt_node_bytes = rlp::decode::>(&self.meta.new_receipt_trie_node_byte).unwrap(); + let receipt_node_bytes = + process_rlped_receipt_node_bytes(self.meta.new_receipt_trie_node_byte); let new_meta_state = TxnMetaState { txn_bytes, receipt_node_bytes, gas_used: self.meta.gas_used, - block_bloom, + bloom, }; ProcessedTxnInfo { @@ -326,7 +327,7 @@ impl TxnInfo { } } - fn block_bloom(&self) -> Bloom { + fn bloom(&self) -> Bloom { let mut bloom = [U256::zero(); 8]; let bloom_bytes = extract_bloom_from_receipt_node_bytes(&self.meta.new_receipt_trie_node_byte); @@ -340,6 +341,21 @@ impl TxnInfo { } } +fn process_rlped_receipt_node_bytes(raw_bytes: Vec) -> Vec { + println!("PROC RAW: {}", hex::encode(&raw_bytes)); + + match rlp::decode::(&raw_bytes) { + Ok(_) => raw_bytes, + Err(_) => { + // Must be non-legacy. + + let decoded = rlp::decode::>(&raw_bytes).unwrap(); + println!("PROC Non-legacy: {}", hex::encode(&decoded)); + decoded + } + } +} + fn create_empty_code_access_map() -> HashMap> { HashMap::from_iter(once((EMPTY_CODE_HASH, Vec::new()))) } @@ -370,7 +386,7 @@ pub(crate) struct TxnMetaState { pub(crate) txn_bytes: Option>, pub(crate) receipt_node_bytes: Vec, pub(crate) gas_used: u64, - pub(crate) block_bloom: Bloom, + pub(crate) bloom: Bloom, } // TODO: Remove/rename function based on how complex this gets... @@ -408,16 +424,17 @@ fn string_to_nibbles_even_nibble_fixed(s: &str) -> Nibbles { n } -fn extract_bloom_from_receipt_node_bytes(r_bytes: &[u8]) -> Vec { - let decoded = rlp::decode::>(r_bytes).unwrap(); - - let legacy_payload = match decoded[0] { - 1 | 2 => &decoded[1..], - _ => &decoded, - }; - - rlp::decode::(legacy_payload) - .unwrap() - .bloom - .to_vec() +fn extract_bloom_from_receipt_node_bytes(raw_bytes: &[u8]) -> Vec { + match rlp::decode::(raw_bytes) { + Ok(v) => v.bloom.to_vec(), + Err(_) => { + // Must be non-legacy. + + let decoded = rlp::decode::>(raw_bytes).unwrap(); + rlp::decode::(&decoded[1..]) + .unwrap() + .bloom + .to_vec() + } + } } From 2952c5cce4e66211e89db933741038ad4ca60098 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 30 Nov 2023 11:37:03 -0500 Subject: [PATCH 146/208] Remove intermediary bloom filters --- src/decoding.rs | 77 ++---------------------------------- src/processed_block_trace.rs | 37 +---------------- src/proof_gen_types.rs | 6 --- src/types.rs | 4 -- 4 files changed, 5 insertions(+), 119 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index c8a8f5fc8..6d258c3ff 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,7 +1,7 @@ use std::{ collections::{HashMap, HashSet}, fmt::{self, Display, Formatter}, - iter::{empty, once}, + iter::once, str::FromStr, }; @@ -21,8 +21,8 @@ use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites, TxnMetaState}, trace_protocol::TxnInfo, types::{ - Bloom, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, - TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, EMPTY_TRIE_HASH, + HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, TrieRootHash, + TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, EMPTY_TRIE_HASH, ZERO_STORAGE_SLOT_VAL_RLPED, }, utils::{hash, update_val_if_some}, @@ -95,7 +95,6 @@ impl ProcessedBlockTrace { }; let mut tot_gas_used = U256::zero(); - let mut curr_bloom = Bloom::default(); let mut txn_gen_inputs = self .txn_info @@ -152,7 +151,6 @@ impl ProcessedBlockTrace { println!("{:#?}", account_and_storage_hashes); let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; - let new_bloom = txn_info.meta.bloom; Self::apply_deltas_to_trie_state( &mut curr_block_tries, @@ -177,9 +175,7 @@ impl ProcessedBlockTrace { let gen_inputs = GenerationInputs { txn_number_before: txn_idx.into(), gas_used_before: tot_gas_used, - block_bloom_before: curr_bloom, gas_used_after: new_tot_gas_used, - block_bloom_after: new_bloom, signed_txn: txn_info.meta.txn_bytes, withdrawals: Vec::new(), /* TODO: Once this is added to the trace spec, add * it here... */ @@ -202,7 +198,6 @@ impl ProcessedBlockTrace { // println!("IR: {:#?}", txn_proof_gen_ir); tot_gas_used = new_tot_gas_used; - curr_bloom = new_bloom; let all_storage_roots = curr_block_tries .state @@ -508,72 +503,6 @@ fn calculate_trie_input_hashes(t_inputs: &TrieInputs) -> TrieRoots { } } -fn create_dummy_txn_gen_input_single_dummy_txn( - next_real_gen_input: &GenerationInputs, - final_trie_state: &PartialTrieState, -) -> TxnProofGenIR { - let partial_sub_storage_tries: Vec<_> = final_trie_state - .storage - .iter() - .map(|(hashed_acc_addr, s_trie)| { - ( - *hashed_acc_addr, - create_fully_hashed_out_sub_partial_trie(s_trie), - ) - }) - .collect(); - - let state_trie = create_minimal_state_partial_trie(&final_trie_state.state, empty()).unwrap(); - - let tries = TrieInputs { - state_trie, - transactions_trie: HashedPartialTrie::default(), - receipts_trie: HashedPartialTrie::default(), - storage_tries: partial_sub_storage_tries, - }; - - println!( - "Orig trie hash: {:x}", - next_real_gen_input.tries.state_trie.hash() - ); - println!("State sub trie: {:#?}", tries.state_trie); - - assert_eq!( - tries.state_trie.hash(), - next_real_gen_input.trie_roots_after.state_root - ); - println!( - "{} == {}", - tries.state_trie.hash(), - next_real_gen_input.trie_roots_after.state_root - ); - - println!( - "Fully hashed out dummy state trie: {:x}", - tries.state_trie.hash() - ); - - let trie_roots_after = TrieRoots { - state_root: next_real_gen_input.tries.state_trie.hash(), - transactions_root: EMPTY_TRIE_HASH, - receipts_root: EMPTY_TRIE_HASH, - }; - - let gen_inputs = GenerationInputs { - txn_number_before: 0.into(), - gas_used_before: 0.into(), - gas_used_after: 0.into(), - block_bloom_before: [0.into(); 8], - block_bloom_after: [0.into(); 8], - signed_txn: None, - withdrawals: vec![], - trie_roots_after, - ..(next_real_gen_input.clone()) - }; - - gen_inputs_to_ir(gen_inputs, 0) -} - // We really want to get a trie with just a hash node here, and this is an easy // way to do it. fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedPartialTrie { diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index f3f8d24c5..aa81001d6 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -16,9 +16,8 @@ use crate::trace_protocol::{ TrieUncompressed, TxnInfo, }; use crate::types::{ - Bloom, CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, - HashedStorageAddrNibbles, OtherBlockData, StorageAddr, TrieRootHash, TxnProofGenIR, - EMPTY_CODE_HASH, EMPTY_TRIE_HASH, + CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, + OtherBlockData, StorageAddr, TrieRootHash, TxnProofGenIR, EMPTY_CODE_HASH, EMPTY_TRIE_HASH, }; use crate::utils::{ hash, print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie, @@ -206,8 +205,6 @@ impl TxnInfo { let mut nodes_used_by_txn = NodesUsedByTxn::default(); let mut contract_code_accessed = create_empty_code_access_map(); - let bloom = self.bloom(); - for (addr, trace) in self.traces { let hashed_addr = hash(addr.as_bytes()); @@ -317,7 +314,6 @@ impl TxnInfo { txn_bytes, receipt_node_bytes, gas_used: self.meta.gas_used, - bloom, }; ProcessedTxnInfo { @@ -326,19 +322,6 @@ impl TxnInfo { meta: new_meta_state, } } - - fn bloom(&self) -> Bloom { - let mut bloom = [U256::zero(); 8]; - let bloom_bytes = - extract_bloom_from_receipt_node_bytes(&self.meta.new_receipt_trie_node_byte); - - // Note that bloom can be empty. - for (i, v) in bloom_bytes.into_iter().array_chunks::<32>().enumerate() { - bloom[i] = U256::from_big_endian(v.as_slice()); - } - - bloom - } } fn process_rlped_receipt_node_bytes(raw_bytes: Vec) -> Vec { @@ -386,7 +369,6 @@ pub(crate) struct TxnMetaState { pub(crate) txn_bytes: Option>, pub(crate) receipt_node_bytes: Vec, pub(crate) gas_used: u64, - pub(crate) bloom: Bloom, } // TODO: Remove/rename function based on how complex this gets... @@ -423,18 +405,3 @@ fn string_to_nibbles_even_nibble_fixed(s: &str) -> Nibbles { n } - -fn extract_bloom_from_receipt_node_bytes(raw_bytes: &[u8]) -> Vec { - match rlp::decode::(raw_bytes) { - Ok(v) => v.bloom.to_vec(), - Err(_) => { - // Must be non-legacy. - - let decoded = rlp::decode::>(raw_bytes).unwrap(); - rlp::decode::(&decoded[1..]) - .unwrap() - .bloom - .to_vec() - } - } -} diff --git a/src/proof_gen_types.rs b/src/proof_gen_types.rs index 8b8145773..c3a91ab81 100644 --- a/src/proof_gen_types.rs +++ b/src/proof_gen_types.rs @@ -10,8 +10,6 @@ use crate::types::{TrieRootHash, TxnIdx}; pub struct ProofBeforeAndAfterDeltas { pub gas_used_before: U256, pub gas_used_after: U256, - pub block_bloom_before: [U256; 8], - pub block_bloom_after: [U256; 8], } impl> From for ProofBeforeAndAfterDeltas { @@ -21,8 +19,6 @@ impl> From for ProofBeforeAndAfterDeltas { Self { gas_used_before: b.gas_used_before, gas_used_after: b.gas_used_after, - block_bloom_before: b.block_bloom_before, - block_bloom_after: b.block_bloom_after, } } } @@ -40,8 +36,6 @@ impl ProofBeforeAndAfterDeltas { txn_number_after: txn_end.into(), gas_used_before: self.gas_used_before, gas_used_after: self.gas_used_after, - block_bloom_before: self.block_bloom_before, - block_bloom_after: self.block_bloom_after, } } } diff --git a/src/types.rs b/src/types.rs index 94e23d5cf..5a4334989 100644 --- a/src/types.rs +++ b/src/types.rs @@ -80,8 +80,6 @@ impl TxnProofGenIR { ProofBeforeAndAfterDeltas { gas_used_before: self.gen_inputs.gas_used_before, gas_used_after: self.gen_inputs.gas_used_after, - block_bloom_before: self.gen_inputs.block_bloom_before, - block_bloom_after: self.gen_inputs.block_bloom_after, } } @@ -125,8 +123,6 @@ impl TxnProofGenIR { dummy.gen_inputs.gas_used_before = self.gen_inputs.gas_used_after; dummy.gen_inputs.gas_used_after = self.gen_inputs.gas_used_after; - dummy.gen_inputs.block_bloom_before = self.gen_inputs.block_bloom_after; - dummy.gen_inputs.block_bloom_after = self.gen_inputs.block_bloom_after; dummy.gen_inputs.trie_roots_after = self.gen_inputs.trie_roots_after.clone(); dummy From e3fd08887cfed6bbdd986a1f826a66f4c9070046 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 30 Nov 2023 11:38:48 -0500 Subject: [PATCH 147/208] Remove intermediary block bloom filters --- plonky_block_proof_gen/src/proof_gen.rs | 2 -- plonky_block_proof_gen/src/proof_types.rs | 2 -- 2 files changed, 4 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 426fe91b9..db883e67a 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -189,8 +189,6 @@ fn merge_lhs_and_rhs_deltas( ProofBeforeAndAfterDeltas { gas_used_before: lhs.gas_used_before, gas_used_after: rhs.gas_used_after, - block_bloom_before: lhs.block_bloom_before, - block_bloom_after: rhs.block_bloom_after, } } diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 59b6fd25f..55706079c 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -25,8 +25,6 @@ pub fn create_extra_block_data( txn_number_after: txn_end.into(), gas_used_before: deltas.gas_used_before, gas_used_after: deltas.gas_used_after, - block_bloom_before: deltas.block_bloom_before, - block_bloom_after: deltas.block_bloom_after, } } From 65326cfebc2dd4078eafdce2479d8f2484642b19 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 30 Nov 2023 13:12:17 -0500 Subject: [PATCH 148/208] Bump plonky2 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index ab736d688..1109707d6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,7 +16,7 @@ hex-literal = "0.4.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "64cc1000e7dbe83d88ca9fa6811d41adfa7796c5" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "5572da30d7ab818594cf8659839fa832dfcf1d3d" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" From ca8fef9923255efb87958a7a2dc3e33deb517c69 Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 29 Nov 2023 09:39:33 -0700 Subject: [PATCH 149/208] Removed some traces and unused code --- src/compact/complex_test_payloads.rs | 10 +- src/decoding.rs | 222 +++------------------------ src/deserializers.rs | 9 +- src/processed_block_trace.rs | 48 +----- src/types.rs | 60 +++++++- src/utils.rs | 9 -- 6 files changed, 94 insertions(+), 264 deletions(-) diff --git a/src/compact/complex_test_payloads.rs b/src/compact/complex_test_payloads.rs index 3907987ec..db5196add 100644 --- a/src/compact/complex_test_payloads.rs +++ b/src/compact/complex_test_payloads.rs @@ -80,15 +80,7 @@ impl TestProtocolInputAndRoot { .filter(|(_, s_root)| *s_root != EMPTY_TRIE_HASH) .map(|(addr, _)| addr); - let x: Vec<_> = non_empty_account_s_roots.collect(); - println!("non empty account s_roots: {:#?}", x); - - println!( - "All keys for storage tries: {:#?}", - images.storage.keys().collect::>() - ); - - for account_with_non_empty_root in x.into_iter() { + for account_with_non_empty_root in non_empty_account_s_roots { assert!(images.storage.contains_key(&account_with_non_empty_root)); } } diff --git a/src/decoding.rs b/src/decoding.rs index 6d258c3ff..8b299f50f 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -1,8 +1,7 @@ use std::{ - collections::{HashMap, HashSet}, + collections::HashMap, fmt::{self, Display, Formatter}, iter::once, - str::FromStr, }; use eth_trie_utils::{ @@ -11,6 +10,7 @@ use eth_trie_utils::{ trie_subsets::create_trie_subset, }; use ethereum_types::{Address, H256, U256}; +use log::trace; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -19,7 +19,6 @@ use thiserror::Error; use crate::{ processed_block_trace::{NodesUsedByTxn, ProcessedBlockTrace, StateTrieWrites, TxnMetaState}, - trace_protocol::TxnInfo, types::{ HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, OtherBlockData, TrieRootHash, TxnIdx, TxnProofGenIR, EMPTY_ACCOUNT_BYTES_RLPED, EMPTY_TRIE_HASH, @@ -101,38 +100,6 @@ impl ProcessedBlockTrace { .into_iter() .enumerate() .map(|(txn_idx, txn_info)| { - let all_storage_roots = curr_block_tries - .state - .items() - .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) - .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) - .collect::>(); - - let all_state_nodes = curr_block_tries - .state - .items() - .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) - .map(|(a, v)| (a, rlp::decode::(&v).unwrap())) - .map(|(a, d)| format!("{:x} --> {:#?}", a, d)) - .collect::>(); - - println!("All state nodes: {:#?}", all_state_nodes); - - println!("All storage roots (before): {:#?}", all_storage_roots); - - println!("Full storage trie (before):"); - for (addr, trie) in curr_block_tries.storage.iter() { - println!("ALL (before) Storage slots for hashed addr {:x}:", addr); - - let slots = trie - .items() - .map(|(k, v)| format!("{:x}: {:?}", k, v)) - .collect::>(); - println!("----------"); - println!("{:#?}", slots); - println!("----------\n"); - } - let tries = Self::create_minimal_partial_tries_needed_by_txn( &mut curr_block_tries, &txn_info.nodes_used_by_txn, @@ -159,18 +126,12 @@ impl ProcessedBlockTrace { txn_idx, )?; - // hacky_rpc_call_to_update_new_coinbase_balance(&mut curr_block_tries.state); - - // TODO: Clean up if this works... - let trie_roots_after = TrieRoots { - state_root: curr_block_tries.state.hash(), - transactions_root: curr_block_tries.txn.hash(), - receipts_root: curr_block_tries.receipt.hash(), - }; - - println!("PARTIAL TRIES BEFORE: {:?}", tries); - - println!("TRIE ROOTS AFTER: {:?}", trie_roots_after); + let trie_roots_after = calculate_trie_input_hashes(&curr_block_tries); + trace!( + "Protocol expected trie roots after txn {}: {:?}", + txn_idx, + trie_roots_after + ); let gen_inputs = GenerationInputs { txn_number_before: txn_idx.into(), @@ -188,43 +149,13 @@ impl ProcessedBlockTrace { addresses, }; - println!("Code mapping: {:?}", gen_inputs.contract_code); - let txn_proof_gen_ir = TxnProofGenIR { txn_idx, gen_inputs, }; - // println!("IR: {:#?}", txn_proof_gen_ir); - tot_gas_used = new_tot_gas_used; - let all_storage_roots = curr_block_tries - .state - .items() - .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) - .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) - .collect::>(); - println!("All storage roots: {:#?}", all_storage_roots); - - println!( - "All state nodes: {:#?}", - curr_block_tries - .state - .keys() - .map(|k| format!("{:x}, {:x}", k, hash(&k.bytes_be()))) - .collect::>() - ); - - for (addr, trie) in curr_block_tries.storage.iter() { - println!("Storage slots for hashed addr {:x}:", addr); - - let slots = trie.keys().map(|s| format!("{:x}", s)).collect::>(); - println!("----------"); - println!("{:#?}", slots); - println!("----------"); - } - Ok(txn_proof_gen_ir) }) .collect::>>()?; @@ -241,30 +172,13 @@ impl ProcessedBlockTrace { curr_block_tries: &mut PartialTrieState, nodes_used_by_txn: &NodesUsedByTxn, txn_idx: TxnIdx, - coin_base_addr: &Address, + _coin_base_addr: &Address, ) -> TraceParsingResult { - let hashed_coinbase = hash(coin_base_addr.as_bytes()); - - // TODO: Remove once the full node adds this to the trace... - let node_accesses_plus_coinbase = nodes_used_by_txn - .state_accesses - .iter() - .cloned() - .chain(once(hashed_coinbase)); - let state_trie = create_minimal_state_partial_trie( &curr_block_tries.state, - node_accesses_plus_coinbase, + nodes_used_by_txn.state_accesses.iter().cloned(), )?; - println!("SPECIAL QUERY ON PARTIAL"); - let res = state_trie.get( - Nibbles::from_str("8556274a27dd7524955417c11ecd917251cc7c4c8310f4c7e4bd3c304d3d9a79") - .unwrap(), - ); - - println!("SPECIAL QUERY ON PARTIAL RES: {:?}", res.map(hex::encode)); - let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... let transactions_trie = @@ -273,7 +187,8 @@ impl ProcessedBlockTrace { let receipts_trie = create_trie_subset_wrapped(&curr_block_tries.receipt, once(txn_k), TrieType::Receipt)?; - let x = nodes_used_by_txn + // TODO: Refactor so we can remove this vec alloc... + let storage_access_vec = nodes_used_by_txn .storage_accesses .iter() .map(|(k, v)| (H256::from_slice(&k.bytes_be()), v.clone())) @@ -282,16 +197,9 @@ impl ProcessedBlockTrace { let storage_tries = create_minimal_storage_partial_tries( &mut curr_block_tries.storage, &nodes_used_by_txn.state_accounts_with_no_accesses_but_storage_tries, - x.iter(), + storage_access_vec.iter(), )?; - println!( - "{:#?}", - storage_tries - .iter() - .map(|(a, t)| format!("hashed account addr: {:x}: {}", a, t.keys().count())) - .collect::>() - ); Ok(TrieInputs { state_trie, transactions_trie, @@ -299,79 +207,19 @@ impl ProcessedBlockTrace { storage_tries, }) } - - fn get_accounts_with_no_storage_access_that_have_entries_in_state_trie( - storage_accesses: &[(HashedAccountAddr, Vec)], - state_accesses: &[HashedNodeAddr], - ) -> Vec<(HashedAccountAddr, Vec)> { - let storage_accesses_set: HashSet = - HashSet::from_iter(storage_accesses.iter().map(|(k, _)| k).cloned()); - state_accesses - .iter() - .filter(|h_addr| !storage_accesses_set.contains(h_addr)) - .map(|h_addr| (*h_addr, Vec::default())) - .collect() - } - - // It's not clear to me if the client should have an empty storage trie for when - // a txn performs the accounts first storage access, but we're going to assume - // they won't for now and deal with that case here. - fn add_empty_storage_tries_that_appear_in_trace_but_not_pre_image( - s_tries: &mut Vec<(HashedAccountAddr, HashedPartialTrie)>, - txn_traces: &[TxnInfo], - ) { - // TODO: Make a bit more efficient... - let all_addrs_that_access_storage_iter = txn_traces - .iter() - .flat_map(|x| x.traces.keys().map(|addr| hash(addr.as_bytes()))); - let addrs_with_storage_access_without_s_tries_iter: Vec<_> = - all_addrs_that_access_storage_iter - .filter(|addr| !s_tries.iter().any(|(a, _)| addr == a)) - .collect(); - - s_tries.extend( - addrs_with_storage_access_without_s_tries_iter - .into_iter() - .map(|k| (k, HashedPartialTrie::default())), - ); - } - fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: NodesUsedByTxn, meta: &TxnMetaState, txn_idx: TxnIdx, ) -> TraceParsingResult<()> { - println!("Applying deltas!"); - for (hashed_acc_addr, storage_writes) in deltas.storage_writes { let storage_trie = trie_state .storage .get_mut(&H256::from_slice(&hashed_acc_addr.bytes_be())) - .ok_or( - TraceParsingError::MissingAccountStorageTrie(H256::zero()), // TODO!!! FIX - )?; - - println!("Applying storage writes of {:?}", storage_writes); - - println!( - "All storage slots before write apply: {:#?}", - storage_trie - .keys() - .map(|k| format!("{:x}", k)) - .collect::>() - ); - - for (addr, write) in storage_writes.iter() { - if storage_trie.get(*addr).is_none() { - println!( - "STORAGE SLOT CREATED! (h_account: {:x}) {:x} --> {}", - hashed_acc_addr, - addr, - hex::encode(write) - ); - } - } + .ok_or(TraceParsingError::MissingAccountStorageTrie( + H256::from_slice(&hashed_acc_addr.bytes_be()), + ))?; for (slot, val) in storage_writes .into_iter() @@ -391,30 +239,21 @@ impl ProcessedBlockTrace { let val_k = Nibbles::from_h256_be(hashed_acc_addr); // If the account was created, then it will not exist in the trie. - let val_bytes = trie_state.state.get(val_k).unwrap_or_else(|| { - println!("ACCOUNT CREATED DURING DELTA APPLY! {}", hashed_acc_addr); - &EMPTY_ACCOUNT_BYTES_RLPED - }); - - println!( - "Empty RLP account: {:?}", - rlp::decode::(&EMPTY_ACCOUNT_BYTES_RLPED).unwrap() - ); + let val_bytes = trie_state + .state + .get(val_k) + .unwrap_or(&EMPTY_ACCOUNT_BYTES_RLPED); let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) })?; - println!("Account for (before) {:x}: {:#?}", hashed_acc_addr, account); - s_trie_writes.apply_writes_to_state_node( &mut account, &hashed_acc_addr, &trie_state.storage, )?; - println!("Account for {:x}: {:#?}", hashed_acc_addr, account); - let updated_account_bytes = rlp::encode(&account); trie_state .state @@ -436,8 +275,6 @@ impl ProcessedBlockTrace { other_data: &OtherBlockData, initial_trie_state: &PartialTrieState, ) { - println!("Padding len: {}", gen_inputs.len()); - match gen_inputs.len() { 0 => { // Need to pad with two dummy txns. @@ -469,8 +306,6 @@ impl StateTrieWrites { h_addr: &HashedAccountAddr, acc_storage_tries: &HashMap, ) -> TraceParsingResult<()> { - println!("Applying writes!"); - let storage_root_hash_change = match self.storage_trie_change { false => None, true => { @@ -482,10 +317,6 @@ impl StateTrieWrites { } }; - if let Some(new_t) = storage_root_hash_change { - println!("NEW STORAGE ROOT BEING APPLIED: {:x}", new_t); - } - update_val_if_some(&mut state_node.balance, self.balance); update_val_if_some(&mut state_node.nonce, self.nonce); update_val_if_some(&mut state_node.storage_root, storage_root_hash_change); @@ -495,11 +326,11 @@ impl StateTrieWrites { } } -fn calculate_trie_input_hashes(t_inputs: &TrieInputs) -> TrieRoots { +fn calculate_trie_input_hashes(t_inputs: &PartialTrieState) -> TrieRoots { TrieRoots { - state_root: t_inputs.state_trie.hash(), - transactions_root: t_inputs.transactions_trie.hash(), - receipts_root: t_inputs.receipts_trie.hash(), + state_root: t_inputs.state.hash(), + transactions_root: t_inputs.txn.hash(), + receipts_root: t_inputs.receipt.hash(), } } @@ -600,11 +431,6 @@ fn create_minimal_storage_partial_tries<'a>( state_accounts_with_no_accesses_but_storage_tries: &HashMap, accesses_per_account: impl Iterator)>, ) -> TraceParsingResult> { - println!( - "BASE TRIES KEYS: {:#?}", - storage_tries.keys().collect::>() - ); - accesses_per_account .map(|(h_addr, mem_accesses)| { // TODO: Clean up... diff --git a/src/deserializers.rs b/src/deserializers.rs index e64461ca4..7e0b4054e 100644 --- a/src/deserializers.rs +++ b/src/deserializers.rs @@ -1,12 +1,13 @@ //! Custom deserializers / serializers for Serde. use hex::{FromHex, ToHex}; -use plonky2_evm::generation::mpt::LegacyReceiptRlp; use rlp::DecoderError; use serde::{ de::{Error, Visitor}, Deserialize, Deserializer, Serialize, Serializer, }; +use crate::types::ReceiptRlp; + #[derive(Clone, Debug, Default, Deserialize)] pub struct ByteString(#[serde(with = "self")] pub Vec); @@ -30,7 +31,7 @@ impl From> for ByteString { } } -impl TryFrom for LegacyReceiptRlp { +impl TryFrom for ReceiptRlp { type Error = DecoderError; fn try_from(value: ByteString) -> Result { @@ -38,8 +39,8 @@ impl TryFrom for LegacyReceiptRlp { } } -impl From for ByteString { - fn from(value: LegacyReceiptRlp) -> Self { +impl From for ByteString { + fn from(value: ReceiptRlp) -> Self { Self(rlp::encode(&value).into()) } } diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index aa81001d6..1a5074d93 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -1,7 +1,6 @@ use std::collections::{HashMap, HashSet}; use std::fmt::Debug; use std::iter::once; -use std::str::FromStr; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; @@ -17,7 +16,7 @@ use crate::trace_protocol::{ }; use crate::types::{ CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, - OtherBlockData, StorageAddr, TrieRootHash, TxnProofGenIR, EMPTY_CODE_HASH, EMPTY_TRIE_HASH, + OtherBlockData, TrieRootHash, TxnProofGenIR, EMPTY_CODE_HASH, EMPTY_TRIE_HASH, }; use crate::utils::{ hash, print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie, @@ -221,7 +220,7 @@ impl TxnInfo { nodes_used_by_txn.storage_accesses.push(( Nibbles::from_h256_be(hashed_addr), storage_access_keys - .map(|k| storage_addr_to_nibbles_even_nibble_fixed_hashed(&k)) + .map(|k| Nibbles::from_h256_be(hash(&k.0))) .collect(), )); @@ -271,11 +270,6 @@ impl TxnInfo { } } - // println!( - // "Storage accesses for {:x} (hashed: {:x}): {:#?}", - // addr, hashed_addr, nodes_used_by_txn - // ); - let accounts_with_storage_accesses: HashSet<_> = HashSet::from_iter( nodes_used_by_txn .storage_accesses @@ -302,6 +296,9 @@ impl TxnInfo { .state_accounts_with_no_accesses_but_storage_tries .extend(accounts_with_storage_but_no_storage_accesses); + println!("META TXN BYTES: {}", hex::encode(&self.meta.byte_code)); + println!("META RECEIPT BYTES: {}", hex::encode(&self.meta.byte_code)); + let txn_bytes = match self.meta.byte_code.is_empty() { false => Some(self.meta.byte_code), true => None, @@ -370,38 +367,3 @@ pub(crate) struct TxnMetaState { pub(crate) receipt_node_bytes: Vec, pub(crate) gas_used: u64, } - -// TODO: Remove/rename function based on how complex this gets... -fn storage_addr_to_nibbles_even_nibble_fixed_hashed(addr: &StorageAddr) -> Nibbles { - // I think this is all we need to do? Yell at me if this breaks things. - // H256's are never going to be truncated I think. - - // // TODO: Disgusting hack! Remove if this works... - // let s = hex::encode(addr.as_bytes()); - - // let mut n = Nibbles::from_str(&s).unwrap(); - // let odd_count = (n.count & 1) == 1; - - // if odd_count { - // n.push_nibble_front(0); - // } - - // n - - // let hashed_addr = hash(addr.as_bytes()); - // Nibbles::from_h256_be(hashed_addr) - - Nibbles::from_h256_be(hash(&addr.0)) -} - -// TODO: Extreme hack! Please don't keep... -fn string_to_nibbles_even_nibble_fixed(s: &str) -> Nibbles { - let mut n = Nibbles::from_str(s).unwrap(); - let odd_count = (n.count & 1) == 1; - - if odd_count { - n.push_nibble_front(0); - } - - n -} diff --git a/src/types.rs b/src/types.rs index 5a4334989..f49b6548e 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,9 +1,12 @@ +use bytes::Bytes; use eth_trie_utils::nibbles::Nibbles; use ethereum_types::{H256, U256}; use plonky2_evm::{ - generation::GenerationInputs, + generation::{mpt::LogRlp, GenerationInputs}, proof::{BlockHashes, BlockMetadata, TrieRoots}, }; +use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; +use rlp_derive::{RlpDecodable, RlpEncodable}; use serde::{Deserialize, Serialize}; use crate::proof_gen_types::ProofBeforeAndAfterDeltas; @@ -128,3 +131,58 @@ impl TxnProofGenIR { dummy } } + +// TODO: Replace with enum... +pub type TxnType = u8; + +#[derive(Clone, Debug)] +pub enum ReceiptRlp { + Legacy(ReceiptRlpCommon), + Other(TxnType, ReceiptRlpCommon), +} + +impl ReceiptRlp { + pub fn bloom(&self) -> &Bytes { + match self { + ReceiptRlp::Legacy(c) => &c.bloom, + ReceiptRlp::Other(_, c) => &c.bloom, + } + } +} + +impl Encodable for ReceiptRlp { + fn rlp_append(&self, s: &mut RlpStream) { + let common = match self { + ReceiptRlp::Legacy(c) => c, + ReceiptRlp::Other(t_byte, c) => { + s.append(t_byte); + c + } + }; + + s.append(common); + } +} + +// TODO: Make a bit nicer... +impl Decodable for ReceiptRlp { + fn decode(rlp: &Rlp) -> Result { + println!("-RLP- {}", rlp); + + let list_rlp = rlp.as_list()?; + + Ok(match list_rlp.len() { + 4 => Self::Legacy(rlp::decode(rlp.as_raw())?), + 5 => Self::Other(list_rlp[0], rlp::decode(&list_rlp[1..])?), + _ => panic!("Malformed receipt rlp!"), + }) + } +} + +#[derive(Clone, Debug, RlpDecodable, RlpEncodable)] +pub struct ReceiptRlpCommon { + pub status: bool, + pub cum_gas_used: U256, + pub bloom: Bytes, + pub logs: Vec, +} diff --git a/src/utils.rs b/src/utils.rs index 663e81924..78b1a89e8 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,5 +1,4 @@ use eth_trie_utils::{ - nibbles::Nibbles, partial_trie::{HashedPartialTrie, PartialTrie}, trie_ops::ValOrHash, }; @@ -18,14 +17,6 @@ pub(crate) fn update_val_if_some(target: &mut T, opt: Option) { } } -pub(crate) fn clone_vec_and_remove_refs(vec_of_refs: &[&T]) -> Vec { - vec_of_refs.iter().map(|r| (*r).clone()).collect() -} - -pub(crate) fn h256_to_nibbles(v: H256) -> Nibbles { - Nibbles::from_h256_be(v) -} - // TODO: Move under a feature flag... pub(crate) fn print_value_and_hash_nodes_of_trie(trie: &HashedPartialTrie) { let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); From cc2f35a51a31db67967395937f866b9b888b94ff Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 30 Nov 2023 13:39:25 -0700 Subject: [PATCH 150/208] Requested changes for PR #3 --- src/decoding.rs | 10 ++-- src/deserializers.rs | 17 ------- src/types.rs | 107 +------------------------------------------ 3 files changed, 7 insertions(+), 127 deletions(-) diff --git a/src/decoding.rs b/src/decoding.rs index 8b299f50f..9b77a5d06 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -343,20 +343,20 @@ fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedP fn create_dummy_txn_pair_for_empty_block( other_data: &OtherBlockData, - final_trie_state: &PartialTrieState, + initial_trie_state: &PartialTrieState, ) -> [TxnProofGenIR; 2] { [ - create_dummy_gen_input(other_data, final_trie_state, 0), - create_dummy_gen_input(other_data, final_trie_state, 0), + create_dummy_gen_input(other_data, initial_trie_state, 0), + create_dummy_gen_input(other_data, initial_trie_state, 0), ] } fn create_dummy_gen_input( other_data: &OtherBlockData, - final_trie_state: &PartialTrieState, + initial_trie_state: &PartialTrieState, txn_idx: TxnIdx, ) -> TxnProofGenIR { - let tries = create_dummy_proof_trie_inputs(final_trie_state); + let tries = create_dummy_proof_trie_inputs(initial_trie_state); let trie_roots_after = TrieRoots { state_root: tries.state_trie.hash(), diff --git a/src/deserializers.rs b/src/deserializers.rs index 7e0b4054e..5bcacea3d 100644 --- a/src/deserializers.rs +++ b/src/deserializers.rs @@ -1,13 +1,10 @@ //! Custom deserializers / serializers for Serde. use hex::{FromHex, ToHex}; -use rlp::DecoderError; use serde::{ de::{Error, Visitor}, Deserialize, Deserializer, Serialize, Serializer, }; -use crate::types::ReceiptRlp; - #[derive(Clone, Debug, Default, Deserialize)] pub struct ByteString(#[serde(with = "self")] pub Vec); @@ -31,20 +28,6 @@ impl From> for ByteString { } } -impl TryFrom for ReceiptRlp { - type Error = DecoderError; - - fn try_from(value: ByteString) -> Result { - rlp::decode(&value.0) - } -} - -impl From for ByteString { - fn from(value: ReceiptRlp) -> Self { - Self(rlp::encode(&value).into()) - } -} - fn remove_hex_prefix_if_present(data: &str) -> &str { let prefix = &data[..2]; diff --git a/src/types.rs b/src/types.rs index f49b6548e..c9a50a4e6 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,12 +1,9 @@ -use bytes::Bytes; use eth_trie_utils::nibbles::Nibbles; use ethereum_types::{H256, U256}; use plonky2_evm::{ - generation::{mpt::LogRlp, GenerationInputs}, - proof::{BlockHashes, BlockMetadata, TrieRoots}, + generation::GenerationInputs, + proof::{BlockHashes, BlockMetadata}, }; -use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; -use rlp_derive::{RlpDecodable, RlpEncodable}; use serde::{Deserialize, Serialize}; use crate::proof_gen_types::ProofBeforeAndAfterDeltas; @@ -85,104 +82,4 @@ impl TxnProofGenIR { gas_used_after: self.gen_inputs.gas_used_after, } } - - /// Creates a dummy proof, corresponding to no actual transaction. - /// - /// These can be used to pad a block if the number of transactions in the - /// block is below `2`. Dummy proofs will always be executed at the start - /// of a block. - pub fn create_dummy(b_height: BlockHeight) -> Self { - let trie_roots_after = TrieRoots { - state_root: EMPTY_TRIE_HASH, - transactions_root: EMPTY_TRIE_HASH, - receipts_root: EMPTY_TRIE_HASH, - }; - - let block_metadata = BlockMetadata { - block_number: b_height.into(), - ..Default::default() - }; - - let gen_inputs = GenerationInputs { - trie_roots_after, - block_metadata, - ..Default::default() - }; - - Self { - txn_idx: 0, - gen_inputs, - } - } - - /// Copy relevant fields of the `TxnProofGenIR` to a new `TxnProofGenIR` - /// with a different `b_height`. - /// - /// This can be used to pad a block if there is only one transaction in the - /// block. Block proofs need a minimum of two transactions. Dummy proofs - /// will always be executed at the start of a block. - pub fn dummy_with_at(&self, b_height: BlockHeight) -> Self { - let mut dummy = Self::create_dummy(b_height); - - dummy.gen_inputs.gas_used_before = self.gen_inputs.gas_used_after; - dummy.gen_inputs.gas_used_after = self.gen_inputs.gas_used_after; - - dummy.gen_inputs.trie_roots_after = self.gen_inputs.trie_roots_after.clone(); - dummy - } -} - -// TODO: Replace with enum... -pub type TxnType = u8; - -#[derive(Clone, Debug)] -pub enum ReceiptRlp { - Legacy(ReceiptRlpCommon), - Other(TxnType, ReceiptRlpCommon), -} - -impl ReceiptRlp { - pub fn bloom(&self) -> &Bytes { - match self { - ReceiptRlp::Legacy(c) => &c.bloom, - ReceiptRlp::Other(_, c) => &c.bloom, - } - } -} - -impl Encodable for ReceiptRlp { - fn rlp_append(&self, s: &mut RlpStream) { - let common = match self { - ReceiptRlp::Legacy(c) => c, - ReceiptRlp::Other(t_byte, c) => { - s.append(t_byte); - c - } - }; - - s.append(common); - } -} - -// TODO: Make a bit nicer... -impl Decodable for ReceiptRlp { - fn decode(rlp: &Rlp) -> Result { - println!("-RLP- {}", rlp); - - let list_rlp = rlp.as_list()?; - - Ok(match list_rlp.len() { - 4 => Self::Legacy(rlp::decode(rlp.as_raw())?), - 5 => Self::Other(list_rlp[0], rlp::decode(&list_rlp[1..])?), - _ => panic!("Malformed receipt rlp!"), - }) - } -} - -#[derive(Clone, Debug, RlpDecodable, RlpEncodable)] -pub struct ReceiptRlpCommon { - pub status: bool, - pub cum_gas_used: U256, - pub bloom: Bytes, - pub logs: Vec, } From 1f0fb9ad434f4109b7288f84c6ca82b694e79849 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 30 Nov 2023 13:42:44 -0700 Subject: [PATCH 151/208] Removed a few more traces --- src/compact/compact_prestate_processing.rs | 1 - src/compact/compact_to_partial_trie.rs | 10 ---------- src/decoding.rs | 9 --------- src/processed_block_trace.rs | 14 +------------- src/trace_protocol.rs | 5 +---- 5 files changed, 2 insertions(+), 37 deletions(-) diff --git a/src/compact/compact_prestate_processing.rs b/src/compact/compact_prestate_processing.rs index d9a1f8806..4b455c401 100644 --- a/src/compact/compact_prestate_processing.rs +++ b/src/compact/compact_prestate_processing.rs @@ -593,7 +593,6 @@ impl ParserState { ) } [WitnessEntry::Node(node), WitnessEntry::Node(NodeEntry::Hash(c_hash))] => { - println!("CREATING STORAGE TRIE FROM COMPACT!!"); Self::try_create_and_insert_partial_trie_from_node( node, Some((*c_hash).into()), diff --git a/src/compact/compact_to_partial_trie.rs b/src/compact/compact_to_partial_trie.rs index bb6844853..741eb5aa3 100644 --- a/src/compact/compact_to_partial_trie.rs +++ b/src/compact/compact_to_partial_trie.rs @@ -85,7 +85,6 @@ fn process_code( c_bytes: Vec, output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { - println!("PROCESSING CODE NODE!!"); let c_hash = hash(&c_bytes); output.code.insert(c_hash, c_bytes); @@ -124,10 +123,7 @@ fn process_leaf( } }; - println!("INSERTING KEY {:x}!", full_k); - output.trie.insert(full_k, l_val); - Ok(()) } @@ -150,12 +146,6 @@ fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( let code_hash = match &acc_data.account_node_code { Some(AccountNodeCode::CodeNode(c_bytes)) => { let c_hash = hash(c_bytes); - println!( - "Adding code hash mapping ({:x} --> {})", - c_hash, - hex::encode(c_bytes) - ); - output.code.insert(c_hash, c_bytes.clone()); c_hash diff --git a/src/decoding.rs b/src/decoding.rs index 9b77a5d06..1f7efa3f9 100644 --- a/src/decoding.rs +++ b/src/decoding.rs @@ -108,15 +108,6 @@ impl ProcessedBlockTrace { )?; let addresses = Self::get_known_addresses_if_enabled(); - - let account_and_storage_hashes = curr_block_tries - .state - .items() - .filter_map(|(a, v)| v.as_val().map(|v| (a, v.clone()))) - .map(|(a, v)| (a, rlp::decode::(&v).unwrap().storage_root)) - .collect::>(); - println!("{:#?}", account_and_storage_hashes); - let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; Self::apply_deltas_to_trie_state( diff --git a/src/processed_block_trace.rs b/src/processed_block_trace.rs index 1a5074d93..f311e39c5 100644 --- a/src/processed_block_trace.rs +++ b/src/processed_block_trace.rs @@ -277,10 +277,6 @@ impl TxnInfo { .filter(|(_, slots)| !slots.is_empty()) .map(|(addr, _)| *addr), ); - println!( - "Account with storage accesses: {:#?}", - accounts_with_storage_accesses - ); let all_accounts_with_non_empty_storage = all_accounts_in_pre_image .iter() @@ -296,9 +292,6 @@ impl TxnInfo { .state_accounts_with_no_accesses_but_storage_tries .extend(accounts_with_storage_but_no_storage_accesses); - println!("META TXN BYTES: {}", hex::encode(&self.meta.byte_code)); - println!("META RECEIPT BYTES: {}", hex::encode(&self.meta.byte_code)); - let txn_bytes = match self.meta.byte_code.is_empty() { false => Some(self.meta.byte_code), true => None, @@ -322,16 +315,11 @@ impl TxnInfo { } fn process_rlped_receipt_node_bytes(raw_bytes: Vec) -> Vec { - println!("PROC RAW: {}", hex::encode(&raw_bytes)); - match rlp::decode::(&raw_bytes) { Ok(_) => raw_bytes, Err(_) => { // Must be non-legacy. - - let decoded = rlp::decode::>(&raw_bytes).unwrap(); - println!("PROC Non-legacy: {}", hex::encode(&decoded)); - decoded + rlp::decode::>(&raw_bytes).unwrap() } } } diff --git a/src/trace_protocol.rs b/src/trace_protocol.rs index 49f92c5fc..e89289798 100644 --- a/src/trace_protocol.rs +++ b/src/trace_protocol.rs @@ -194,10 +194,7 @@ impl ContractCodeUsage { pub(crate) fn get_code_hash(&self) -> CodeHash { match self { ContractCodeUsage::Read(hash) => *hash, - ContractCodeUsage::Write(bytes) => { - println!("Hashing code {} ...", hex::encode(&bytes.0)); - hash(bytes) - } + ContractCodeUsage::Write(bytes) => hash(bytes), } } } From 35e8a1601f30acd3c956d2d63274e0f83e3c9771 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 30 Nov 2023 14:43:30 -0700 Subject: [PATCH 152/208] Small cleanup --- plonky_block_proof_gen/src/proof_gen.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index db883e67a..f4c0d50d6 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -48,8 +48,6 @@ pub fn generate_txn_proof( let txn_idx = start_info.txn_idx; let deltas = start_info.deltas(); - println!("BLOCK PROOF INPUTS: {:#?}", start_info); - let (txn_proof_intern, p_vals) = p_state .state .prove_root( @@ -126,7 +124,7 @@ struct ExpandedAggregatableProof<'a> { is_agg: bool, } -// TODO: Remove of simplify, as most of this work is occurring inside plonky2 +// TODO: Remove or simplify, as most of this work is occurring inside plonky2 // now. fn expand_aggregatable_proofs<'a>( lhs_child: &'a AggregatableProof, From c84396b056672fbfe7469183a70612bd2b7ef3a1 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 30 Nov 2023 16:51:30 -0500 Subject: [PATCH 153/208] Update default ranges and add comment --- plonky_block_proof_gen/src/prover_state.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 2d47ac016..aabb2b167 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -26,14 +26,17 @@ pub struct ProverStateBuilder { impl Default for ProverStateBuilder { fn default() -> Self { + // These ranges are somewhat arbitrary, but should be enough for testing + // purposes against most transactions. + // Some heavy contract deployments may require bumping these ranges though. Self { - arithmetic_circuit_size: 16..19, + arithmetic_circuit_size: 16..20, byte_packing_circuit_size: 10..20, - cpu_circuit_size: 15..22, + cpu_circuit_size: 12..22, keccak_circuit_size: 14..17, - keccak_sponge_circuit_size: 9..16, + keccak_sponge_circuit_size: 9..14, logic_circuit_size: 12..16, - memory_circuit_size: 18..24, + memory_circuit_size: 17..25, } } } From 982c215c36b6fd7acc82c5098939ff31875bcc47 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 1 Dec 2023 13:09:57 -0700 Subject: [PATCH 154/208] Plonky2 bump --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 1109707d6..acdff58a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,7 +16,7 @@ hex-literal = "0.4.1" hex = "0.4.3" keccak-hash = "0.10.0" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "5572da30d7ab818594cf8659839fa832dfcf1d3d" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "32d009671a1af86312807ba2dc90e9bb4f4a94da" } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" From 867cec48c066506dc81312a0f033f9a845a4c78d Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 1 Dec 2023 18:05:41 -0500 Subject: [PATCH 155/208] Do some cleanup and refactoring --- plonky_block_proof_gen/src/proof_gen.rs | 186 +++------------------- plonky_block_proof_gen/src/proof_types.rs | 65 ++------ plonky_block_proof_gen/src/types.rs | 91 ----------- 3 files changed, 40 insertions(+), 302 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index f4c0d50d6..f91583ffe 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,21 +1,10 @@ use plonky2::util::timing::TimingTree; -use plonky2_evm::{ - all_stark::AllStark, - config::StarkConfig, - proof::{ExtraBlockData, PublicValues}, -}; -use proof_protocol_decoder::{ - proof_gen_types::ProofBeforeAndAfterDeltas, - types::{OtherBlockData, TxnProofGenIR}, -}; +use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; +use proof_protocol_decoder::types::TxnProofGenIR; use crate::{ - proof_types::{ - create_extra_block_data, AggregatableProof, GeneratedAggProof, GeneratedBlockProof, - GeneratedTxnProof, ProofCommon, - }, + proof_types::{AggregatableProof, GeneratedAggProof, GeneratedBlockProof, GeneratedTxnProof}, prover_state::ProverState, - types::{PlonkyProofIntern, ProofUnderlyingTxns}, }; pub type ProofGenResult = Result; @@ -44,11 +33,7 @@ pub fn generate_txn_proof( p_state: &ProverState, start_info: TxnProofGenIR, ) -> ProofGenResult { - let b_height = start_info.b_height(); - let txn_idx = start_info.txn_idx; - let deltas = start_info.deltas(); - - let (txn_proof_intern, p_vals) = p_state + let (intern, p_vals) = p_state .state .prove_root( &AllStark::default(), @@ -58,18 +43,7 @@ pub fn generate_txn_proof( ) .map_err(|err| err.to_string())?; - let common = ProofCommon { - b_height, - deltas, - roots_before: p_vals.trie_roots_before, - roots_after: p_vals.trie_roots_after, - }; - - Ok(GeneratedTxnProof { - txn_idx, - common, - intern: txn_proof_intern, - }) + Ok(GeneratedTxnProof { p_vals, intern }) } /// Generate a agg proof from two child proofs. @@ -79,126 +53,20 @@ pub fn generate_agg_proof( p_state: &ProverState, lhs_child: &AggregatableProof, rhs_child: &AggregatableProof, - other_data: OtherBlockData, ) -> ProofGenResult { - let expanded_agg_proofs = expand_aggregatable_proofs(lhs_child, rhs_child, other_data); - - let (agg_proof_intern, p_vals) = p_state + let (intern, p_vals) = p_state .state .prove_aggregation( - expanded_agg_proofs.lhs.is_agg, - expanded_agg_proofs.lhs.intern, - expanded_agg_proofs.p_vals_lhs, - expanded_agg_proofs.rhs.is_agg, - expanded_agg_proofs.rhs.intern, - expanded_agg_proofs.p_vals_rhs, + lhs_child.is_agg(), + lhs_child.intern(), + lhs_child.public_values(), + rhs_child.is_agg(), + rhs_child.intern(), + rhs_child.public_values(), ) .map_err(|err| err.to_string())?; - let common = ProofCommon { - b_height: lhs_child.b_height(), - deltas: expanded_agg_proofs.combined_deltas, - roots_before: p_vals.trie_roots_before, - roots_after: p_vals.trie_roots_after, - }; - - Ok(GeneratedAggProof { - common, - underlying_txns: lhs_child - .underlying_txns() - .combine(&rhs_child.underlying_txns()), - intern: agg_proof_intern, - }) -} - -struct ExpandedAggregatableProofs<'a> { - p_vals_lhs: PublicValues, - p_vals_rhs: PublicValues, - lhs: ExpandedAggregatableProof<'a>, - rhs: ExpandedAggregatableProof<'a>, - combined_deltas: ProofBeforeAndAfterDeltas, -} - -struct ExpandedAggregatableProof<'a> { - intern: &'a PlonkyProofIntern, - is_agg: bool, -} - -// TODO: Remove or simplify, as most of this work is occurring inside plonky2 -// now. -fn expand_aggregatable_proofs<'a>( - lhs_child: &'a AggregatableProof, - rhs_child: &'a AggregatableProof, - other_data: OtherBlockData, -) -> ExpandedAggregatableProofs<'a> { - let (expanded_lhs, lhs_common) = expand_aggregatable_proof(lhs_child); - let (expanded_rhs, rhs_common) = expand_aggregatable_proof(rhs_child); - - let lhs_extra_data = - create_extra_block_data_for_child(lhs_common, &other_data, &lhs_child.underlying_txns()); - - let p_vals_lhs = PublicValues { - trie_roots_before: lhs_common.roots_before.clone(), - trie_roots_after: lhs_common.roots_after.clone(), - block_metadata: other_data.b_data.b_meta.clone(), - block_hashes: other_data.b_data.b_hashes.clone(), - extra_block_data: lhs_extra_data, - }; - - let rhs_extra_data = - create_extra_block_data_for_child(rhs_common, &other_data, &rhs_child.underlying_txns()); - - let p_vals_rhs = PublicValues { - trie_roots_before: rhs_common.roots_before.clone(), - trie_roots_after: rhs_common.roots_after.clone(), - block_metadata: other_data.b_data.b_meta, - block_hashes: other_data.b_data.b_hashes, - extra_block_data: rhs_extra_data, - }; - - let combined_deltas = merge_lhs_and_rhs_deltas(&lhs_common.deltas, &rhs_common.deltas); - - ExpandedAggregatableProofs { - p_vals_lhs, - p_vals_rhs, - lhs: expanded_lhs, - rhs: expanded_rhs, - combined_deltas, - } -} - -fn create_extra_block_data_for_child( - common: &ProofCommon, - other_data: &OtherBlockData, - txn_range: &ProofUnderlyingTxns, -) -> ExtraBlockData { - create_extra_block_data( - common.deltas.clone(), - other_data.genesis_state_trie_root, - txn_range.txn_idxs.start, - txn_range.txn_idxs.end, - ) -} - -fn merge_lhs_and_rhs_deltas( - lhs: &ProofBeforeAndAfterDeltas, - rhs: &ProofBeforeAndAfterDeltas, -) -> ProofBeforeAndAfterDeltas { - ProofBeforeAndAfterDeltas { - gas_used_before: lhs.gas_used_before, - gas_used_after: rhs.gas_used_after, - } -} - -fn expand_aggregatable_proof(p: &AggregatableProof) -> (ExpandedAggregatableProof, &ProofCommon) { - let (intern, is_agg, common) = match p { - AggregatableProof::Txn(txn_intern) => (&txn_intern.intern, false, &txn_intern.common), - AggregatableProof::Agg(agg_intern) => (&agg_intern.intern, true, &agg_intern.common), - }; - - let expanded = ExpandedAggregatableProof { intern, is_agg }; - - (expanded, common) + Ok(GeneratedAggProof { p_vals, intern }) } /// Generate a block proof. @@ -209,29 +77,21 @@ pub fn generate_block_proof( p_state: &ProverState, prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, curr_block_agg_proof: &GeneratedAggProof, - other_data: OtherBlockData, ) -> ProofGenResult { - let b_height = curr_block_agg_proof.common.b_height; + let b_height = curr_block_agg_proof + .p_vals + .block_metadata + .block_number + .low_u64(); let parent_intern = prev_opt_parent_b_proof.map(|p| &p.intern); - let extra_block_data = create_extra_block_data( - curr_block_agg_proof.common.deltas.clone(), - other_data.genesis_state_trie_root, - curr_block_agg_proof.underlying_txns.txn_idxs.start, - curr_block_agg_proof.underlying_txns.txn_idxs.end, - ); - - let p_vals = PublicValues { - trie_roots_before: curr_block_agg_proof.common.roots_before.clone(), - trie_roots_after: curr_block_agg_proof.common.roots_after.clone(), - block_metadata: other_data.b_data.b_meta, - block_hashes: other_data.b_data.b_hashes, - extra_block_data, - }; - let (b_proof_intern, _) = p_state .state - .prove_block(parent_intern, &curr_block_agg_proof.intern, p_vals) + .prove_block( + parent_intern, + &curr_block_agg_proof.intern, + curr_block_agg_proof.p_vals.clone(), + ) .map_err(|err| err.to_string())?; Ok(GeneratedBlockProof { diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 55706079c..cdac21fc4 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -1,55 +1,17 @@ -use ethereum_types::H256; -use plonky2_evm::proof::{ExtraBlockData, TrieRoots}; -use proof_protocol_decoder::proof_gen_types::ProofBeforeAndAfterDeltas; +use plonky2_evm::proof::PublicValues; use serde::{Deserialize, Serialize}; -use crate::types::{BlockHeight, PlonkyProofIntern, ProofUnderlyingTxns, TxnIdx}; - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ProofCommon { - pub b_height: BlockHeight, - pub deltas: ProofBeforeAndAfterDeltas, - pub roots_before: TrieRoots, - pub roots_after: TrieRoots, -} - -pub fn create_extra_block_data( - deltas: ProofBeforeAndAfterDeltas, - genesis_root: H256, - txn_start: TxnIdx, - txn_end: TxnIdx, -) -> ExtraBlockData { - ExtraBlockData { - genesis_state_trie_root: genesis_root, - txn_number_before: txn_start.into(), - txn_number_after: txn_end.into(), - gas_used_before: deltas.gas_used_before, - gas_used_after: deltas.gas_used_after, - } -} +use crate::types::{BlockHeight, PlonkyProofIntern}; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedTxnProof { - pub txn_idx: TxnIdx, - pub common: ProofCommon, + pub p_vals: PublicValues, pub intern: PlonkyProofIntern, } -impl GeneratedTxnProof { - pub fn underlying_txns(&self) -> ProofUnderlyingTxns { - if self.common.roots_before.transactions_root == self.common.roots_after.transactions_root { - // This is a dummy proof no transaction was executed. - (self.txn_idx..self.txn_idx).into() - } else { - (self.txn_idx..=self.txn_idx).into() - } - } -} - #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedAggProof { - pub underlying_txns: ProofUnderlyingTxns, - pub common: ProofCommon, + pub p_vals: PublicValues, pub intern: PlonkyProofIntern, } @@ -69,17 +31,24 @@ pub enum AggregatableProof { } impl AggregatableProof { - pub fn underlying_txns(&self) -> ProofUnderlyingTxns { + pub(crate) fn public_values(&self) -> PublicValues { + match self { + AggregatableProof::Txn(info) => info.p_vals.clone(), + AggregatableProof::Agg(info) => info.p_vals.clone(), + } + } + + pub(crate) fn is_agg(&self) -> bool { match self { - AggregatableProof::Txn(info) => info.underlying_txns(), - AggregatableProof::Agg(info) => info.underlying_txns.clone(), + AggregatableProof::Txn(_) => false, + AggregatableProof::Agg(_) => true, } } - pub fn b_height(&self) -> BlockHeight { + pub(crate) fn intern(&self) -> &PlonkyProofIntern { match self { - AggregatableProof::Txn(info) => info.common.b_height, - AggregatableProof::Agg(info) => info.common.b_height, + AggregatableProof::Txn(info) => &info.intern, + AggregatableProof::Agg(info) => &info.intern, } } } diff --git a/plonky_block_proof_gen/src/types.rs b/plonky_block_proof_gen/src/types.rs index 60ccc46fd..71fdf6d6b 100644 --- a/plonky_block_proof_gen/src/types.rs +++ b/plonky_block_proof_gen/src/types.rs @@ -1,19 +1,10 @@ -use std::{ - cmp::Ordering, - fmt::{self, Display, Formatter}, - ops::{Range, RangeInclusive}, -}; - use plonky2::{ field::goldilocks_field::GoldilocksField, plonk::{config::PoseidonGoldilocksConfig, proof::ProofWithPublicInputs}, }; -use serde::{Deserialize, Serialize}; pub type BlockHeight = u64; -pub type TxnIdx = usize; - pub type PlonkyProofIntern = ProofWithPublicInputs; pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits< @@ -21,85 +12,3 @@ pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecurs PoseidonGoldilocksConfig, 2, >; - -/// Underlying txns idxs associated with a proof. -/// Note that the range for a single txn for index `n` is `n..n+1`. -#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] -pub struct ProofUnderlyingTxns { - pub txn_idxs: Range, -} - -impl Display for ProofUnderlyingTxns { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self.num_txns() { - 0 => write!(f, "EMPTY_TXN"), - _ => write!(f, "{}-{}", self.txn_idxs.start, self.txn_idxs.end - 1), - } - } -} - -impl ProofUnderlyingTxns { - pub fn combine(&self, other: &Self) -> ProofUnderlyingTxns { - let combined_range = (self.txn_idxs.start.min(other.txn_idxs.start)) - ..(self.txn_idxs.end.max(other.txn_idxs.end)); - - combined_range.into() - } - - pub fn num_txns(&self) -> usize { - self.txn_idxs.end - self.txn_idxs.start - } - - pub fn contains_all_txns_in_block(&self, num_txns_in_block: usize) -> bool { - self.num_txns() == num_txns_in_block - } -} - -impl From> for ProofUnderlyingTxns { - fn from(txn_idxs: Range) -> Self { - Self { txn_idxs } - } -} - -impl From> for ProofUnderlyingTxns { - fn from(txn_idxs: RangeInclusive) -> Self { - Self { - txn_idxs: Range { - start: *txn_idxs.start(), - end: *txn_idxs.end() + 1, - }, - } - } -} - -impl From for Range { - fn from(underlying_txns: ProofUnderlyingTxns) -> Self { - underlying_txns.txn_idxs - } -} - -impl Ord for ProofUnderlyingTxns { - /// Compare two txn ranges. - /// - /// Assumes that empty txns (eg. `1..1`) will never be compared. - fn cmp(&self, other: &Self) -> Ordering { - match self == other { - true => Ordering::Equal, - false => match (self.txn_idxs.end - 1).cmp(&other.txn_idxs.start) { - Ordering::Less => Ordering::Less, - Ordering::Greater => Ordering::Greater, - Ordering::Equal => match self.txn_idxs.start.cmp(&(other.txn_idxs.end - 1)) { - Ordering::Less => Ordering::Greater, - Ordering::Equal => Ordering::Equal, - Ordering::Greater => Ordering::Less, - }, - }, - } - } -} - -impl PartialOrd for ProofUnderlyingTxns { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} From d77e0c4cb9658fd65875e0e0a24edbbde3481cd4 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 1 Dec 2023 18:41:45 -0500 Subject: [PATCH 156/208] Reuse type from proof-protocol-decoder --- plonky_block_proof_gen/src/proof_types.rs | 3 ++- plonky_block_proof_gen/src/types.rs | 2 -- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index cdac21fc4..84a249ed1 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -1,7 +1,8 @@ use plonky2_evm::proof::PublicValues; +use proof_protocol_decoder::types::BlockHeight; use serde::{Deserialize, Serialize}; -use crate::types::{BlockHeight, PlonkyProofIntern}; +use crate::types::PlonkyProofIntern; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedTxnProof { diff --git a/plonky_block_proof_gen/src/types.rs b/plonky_block_proof_gen/src/types.rs index 71fdf6d6b..c46471feb 100644 --- a/plonky_block_proof_gen/src/types.rs +++ b/plonky_block_proof_gen/src/types.rs @@ -3,8 +3,6 @@ use plonky2::{ plonk::{config::PoseidonGoldilocksConfig, proof::ProofWithPublicInputs}, }; -pub type BlockHeight = u64; - pub type PlonkyProofIntern = ProofWithPublicInputs; pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits< From 39c89a9691353251fb0988376fa8960d76cc7d30 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 1 Dec 2023 21:05:13 -0500 Subject: [PATCH 157/208] Update README --- plonky_block_proof_gen/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plonky_block_proof_gen/README.md b/plonky_block_proof_gen/README.md index 3919b0a8a..d7ef9e363 100644 --- a/plonky_block_proof_gen/README.md +++ b/plonky_block_proof_gen/README.md @@ -23,9 +23,9 @@ pub struct BlockHashes { ``` Note that `prev_hashes` is going to be `256` elements long (!) most of the time. -`generate_txn_proof` takes in the output from the parser lib (`TxnProofGenIR`) along with some constant block data. +`generate_txn_proof` takes in the output from the parser lib (`TxnProofGenIR`). -`generate_agg_proof` takes in the two child proofs (wrapped in `AggregatableProof`` to support txn or agg proofs) & constant block data. +`generate_agg_proof` takes in the two child proofs (wrapped in `AggregatableProof`` to support txn or agg proofs). `generate_block_proof` is a bit less obvious. You give it an agg proof that contains all txns in the entire block, but also pass in an optional previous block proof. The previous block proof is able to be `None` on checkpoint heights. From 91fc7d11e258a18088ecc4debb499d4311281431 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Sun, 3 Dec 2023 18:09:23 -0500 Subject: [PATCH 158/208] Add documentation --- plonky_block_proof_gen/src/lib.rs | 95 ++++++++++++++++++++++ plonky_block_proof_gen/src/proof_gen.rs | 17 ++-- plonky_block_proof_gen/src/proof_types.rs | 20 +++++ plonky_block_proof_gen/src/prover_state.rs | 8 ++ plonky_block_proof_gen/src/types.rs | 6 ++ 5 files changed, 140 insertions(+), 6 deletions(-) diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index 849159860..585be4dd9 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -1,3 +1,98 @@ +//! This library is intended to generate proofs with the [plonky2 zkEVM](https://github.com/0xPolygonZero/plonky2/evm), given +//! transactions provided in Intermediate Representation (IR) format. +//! +//! The exact format of this IR is defined by the [GenerationInputs](https://github.com/0xPolygonZero/plonky2/evm/src/generation/mod.rs) +//! used by the zkEVM prover, containing an RLP-encoded transaction along with +//! state metadata prior and post execution of this transaction. +//! +//! # Usage +//! +//! First, a prover needs to initialize its `ProverState`. For this, one can +//! use the `ProverStateBuilder`, which contains the ranges to be used by all +//! internal STARK tables of the zkEVM. +//! +//! The default method contains an initial set of ranges for each table, that +//! can be overriden at will by calling +//! `ProverStateBuilder::set_foo_circuit_size` where `foo` is the name of the +//! targeted table. At the moment, plonky2 zkEVM contains seven tables: +//! `arithmetic`, `byte_packing`, `cpu`, `keccak`, `keccak_sponge`, `logic` and +//! `memory`. +//! +//! ```rust +//! let mut builder = ProverStateBuilder::default(); +//! +//! // Change Cpu and Memory tables supported ranges. +//! builder.set_cpu_circuit_size(12..25); +//! builder.set_cpu_circuit_size(18..28); +//! +//! // Generate a `ProverState` from the builder. +//! let prover_state = builder.build(); +//! ``` +//! +//! ***NOTE***: All the circuits to generate the different kind of proofs, from +//! transaction proofs to block proofs, are specific to the initial set of +//! ranges selected for each table. Changing one of them will require building a +//! new `ProverState`, and will make all previously generated proofs +//! incompatible with the new state. Make sure you select sufficiently large +//! ranges for your application! +//! +//! Once all circuits have been pre-processed, a prover can now generate proofs +//! from inputs passed as Intermediary Representation. +//! +//! This library handles the 3 kinds of proof generations necessary for the +//! zkEVM: +//! +//! ### Transaction proofs +//! +//! From a `ProverState` and a transaction processed with some metadata in +//! Intermediate Representation, one can obtain a transaction proof by calling +//! the method below: +//! +//! ```rust +//! pub fn generate_txn_proof( +//! p_state: &ProverState, +//! start_info: TxnProofGenIR, +//! ) -> ProofGenResult { ... } +//! ``` +//! +//! The obtained `GeneratedTxnProof` contains the actual proof and some +//! additional data to be used when aggregating this transaction with others. +//! +//! ### Aggregation proofs +//! +//! Two proofs can be aggregated together with a `ProverState`. These `child` +//! proofs can either be transaction proofs, or aggregated proofs themselves. +//! This library abstracts their type behind an `AggregatableProof` enum. +//! +//! ```rust +//! pub fn generate_agg_proof( +//! p_state: &ProverState, +//! lhs_child: &AggregatableProof, +//! rhs_child: &AggregatableProof, +//! ) -> ProofGenResult { ... } +//! ``` +//! +//! ### Block proofs +//! +//! Once the prover has obtained a `GeneratedAggProof` corresponding to the +//! entire set of transactions within a block, they can then wrap it into a +//! final `GeneratedBlockProof`. The prover can pass an optional previous +//! block proof as argument to the `generate_block_proof` method, to combine +//! both statement into one, effectively proving an entire chain from genesis +//! through a single final proof. +//! +//! ```rust +//! pub fn generate_block_proof( +//! p_state: &ProverState, +//! prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, +//! curr_block_agg_proof: &GeneratedAggProof, +//! ) -> ProofGenResult { ... } +//! ``` + +#![cfg_attr(docsrs, feature(doc_cfg))] +#![deny(rustdoc::broken_intra_doc_links)] +#![deny(missing_docs)] + pub mod proof_gen; pub mod proof_types; pub mod prover_state; diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index f91583ffe..2ae989bdc 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,3 +1,6 @@ +//! This module defines the proof generation methods corresponding to the three +//! types of proofs the zkEVM internally handles. + use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; use proof_protocol_decoder::types::TxnProofGenIR; @@ -7,8 +10,10 @@ use crate::{ prover_state::ProverState, }; +/// A type alias for `Result`. pub type ProofGenResult = Result; +/// A custom error type to handle failure cases during proof generation. // Plonky2 is still using `anyhow` for proof gen, and since this is a library, // it's probably best if we at least convert it to a `String`. #[derive(Debug)] @@ -28,7 +33,7 @@ impl From for ProofGenError { } } -/// Generate a txn proof from proof IR data. +/// Generates a transaction proof from some IR data. pub fn generate_txn_proof( p_state: &ProverState, start_info: TxnProofGenIR, @@ -46,9 +51,9 @@ pub fn generate_txn_proof( Ok(GeneratedTxnProof { p_vals, intern }) } -/// Generate a agg proof from two child proofs. +/// Generates an aggregation proof from two child proofs. /// -/// Note that the child proofs may be either txn or agg proofs. +/// Note that the child proofs may be either transaction or aggregation proofs. pub fn generate_agg_proof( p_state: &ProverState, lhs_child: &AggregatableProof, @@ -69,10 +74,10 @@ pub fn generate_agg_proof( Ok(GeneratedAggProof { p_vals, intern }) } -/// Generate a block proof. +/// Generates a block proof. /// -/// Note that `prev_opt_parent_b_proof` is able to be `None` on checkpoint -/// heights. +/// It takes an optional argument, `prev_opt_parent_b_proof`, that can be set to +/// `None` on checkpoint heights. pub fn generate_block_proof( p_state: &ProverState, prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 84a249ed1..20a7d71d5 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -1,24 +1,42 @@ +//! This module defines the various proof types used throughout the block proof +//! generation process. + use plonky2_evm::proof::PublicValues; use proof_protocol_decoder::types::BlockHeight; use serde::{Deserialize, Serialize}; use crate::types::PlonkyProofIntern; +/// A transaction proof along with its public values, for proper connection with +/// contiguous proofs. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedTxnProof { + /// Public values of this transaction proof. pub p_vals: PublicValues, + /// Underlying plonky2 proof. pub intern: PlonkyProofIntern, } +/// An aggregation proof along with its public values, for proper connection +/// with contiguous proofs. +/// +/// Aggregation proofs can represent any contiguous range of two or more +/// transactions, up to an entire block. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedAggProof { + /// Public values of this aggregation proof. pub p_vals: PublicValues, + /// Underlying plonky2 proof. pub intern: PlonkyProofIntern, } +/// A block proof along with the block height against which this proof ensures +/// the validity since the last proof checkpoint. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedBlockProof { + /// Associated block height. pub b_height: BlockHeight, + /// Underlying plonky2 proof. pub intern: PlonkyProofIntern, } @@ -27,7 +45,9 @@ pub struct GeneratedBlockProof { /// away whether or not the proof was a txn or agg proof. #[derive(Clone, Debug, Deserialize, Serialize)] pub enum AggregatableProof { + /// The underlying proof is a transaction proof. Txn(GeneratedTxnProof), + /// The underlying proof is an aggregation proof. Agg(GeneratedAggProof), } diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index aabb2b167..7cb601ac1 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -1,3 +1,8 @@ +//! This module defines the `ProverState`, that contains all pre-processed +//! circuits necessary to handle arbitrary transaction proving and proof +//! aggregation to generate succinct block proofs attesting validity of an +//! entire EVM-based chain. + use std::ops::Range; use log::info; @@ -9,6 +14,7 @@ use crate::types::AllRecursiveCircuits; /// Plonky2 proving state. Note that this is generally going to be massive in /// terms of memory and has a long spin-up time, pub struct ProverState { + /// The set of pre-processed circuits to recursively prove transactions. pub state: AllRecursiveCircuits, } @@ -44,6 +50,8 @@ impl Default for ProverStateBuilder { macro_rules! define_set_circuit_size_method { ($name:ident) => { paste! { + /// Specifies a range of degrees to be supported for this STARK + /// table's associated recursive circuits. pub fn [](mut self, size: Range) -> Self { self.[<$name _circuit_size>] = size; self diff --git a/plonky_block_proof_gen/src/types.rs b/plonky_block_proof_gen/src/types.rs index c46471feb..13449845c 100644 --- a/plonky_block_proof_gen/src/types.rs +++ b/plonky_block_proof_gen/src/types.rs @@ -1,10 +1,16 @@ +//! This module contains type aliases and custom `Error` definition for +//! convenient proof generation. + use plonky2::{ field::goldilocks_field::GoldilocksField, plonk::{config::PoseidonGoldilocksConfig, proof::ProofWithPublicInputs}, }; +/// A type alias for proofs generated by the zkEVM. pub type PlonkyProofIntern = ProofWithPublicInputs; +/// A type alias for the set of preprocessed circuits necessary to generate +/// succinct block proofs. pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits< GoldilocksField, PoseidonGoldilocksConfig, From b1c29cf5050c4c563ed5f67bdcf0743be86ec993 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 11 Dec 2023 15:10:54 -0700 Subject: [PATCH 159/208] Moved the protocol src into its own sub-crate --- Cargo.toml | 30 +- README.md | 2 +- protocol_decoder/Cargo.lock | 1778 +++++++++++++++++ protocol_decoder/Cargo.toml | 27 + .../src}/compact/compact_debug_tools.rs | 0 .../compact/compact_prestate_processing.rs | 0 .../src}/compact/compact_to_partial_trie.rs | 0 .../src}/compact/complex_test_payloads.rs | 0 .../large_test_payloads/test_payload_5.txt | 0 {src => protocol_decoder/src}/compact/mod.rs | 0 {src => protocol_decoder/src}/decoding.rs | 0 .../src}/deserializers.rs | 0 {src => protocol_decoder/src}/lib.rs | 0 .../src}/processed_block_trace.rs | 0 .../src}/proof_gen_types.rs | 0 .../src}/trace_protocol.rs | 0 {src => protocol_decoder/src}/types.rs | 0 {src => protocol_decoder/src}/utils.rs | 0 18 files changed, 1809 insertions(+), 28 deletions(-) create mode 100644 protocol_decoder/Cargo.lock create mode 100644 protocol_decoder/Cargo.toml rename {src => protocol_decoder/src}/compact/compact_debug_tools.rs (100%) rename {src => protocol_decoder/src}/compact/compact_prestate_processing.rs (100%) rename {src => protocol_decoder/src}/compact/compact_to_partial_trie.rs (100%) rename {src => protocol_decoder/src}/compact/complex_test_payloads.rs (100%) rename {src => protocol_decoder/src}/compact/large_test_payloads/test_payload_5.txt (100%) rename {src => protocol_decoder/src}/compact/mod.rs (100%) rename {src => protocol_decoder/src}/decoding.rs (100%) rename {src => protocol_decoder/src}/deserializers.rs (100%) rename {src => protocol_decoder/src}/lib.rs (100%) rename {src => protocol_decoder/src}/processed_block_trace.rs (100%) rename {src => protocol_decoder/src}/proof_gen_types.rs (100%) rename {src => protocol_decoder/src}/trace_protocol.rs (100%) rename {src => protocol_decoder/src}/types.rs (100%) rename {src => protocol_decoder/src}/utils.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index acdff58a2..d64036269 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,27 +1,3 @@ -[package] -name = "proof_protocol_decoder" -authors = ["Polygon Zero "] -version = "0.1.0" -edition = "2021" - -[dependencies] -bytes = "1.5.0" -ciborium = "0.2.1" -ciborium-io = "0.2.1" -enum-as-inner = "0.6.0" -enumn = "0.1.12" -eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } -ethereum-types = "0.14.1" -hex-literal = "0.4.1" -hex = "0.4.3" -keccak-hash = "0.10.0" -log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "32d009671a1af86312807ba2dc90e9bb4f4a94da" } -thiserror = "1.0.49" -rlp = "0.5.2" -rlp-derive = "0.1.0" -serde = "1.0.166" -serde_with = "3.4.0" - -[dev-dependencies] -pretty_env_logger = "0.5.0" +[workspace] +members = ["protocol_decoder"] +resolver = "2" diff --git a/README.md b/README.md index a8ab7c55b..cfa23072d 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ A flexible protocol that clients (eg. full nodes) can use to easily generate blo ## Specification -Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. The specification itself is in the repo [here](src/trace_protocol.rs). +Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. The specification itself is in the repo [here](protocol_decoder/src/trace_protocol.rs). Because processing the incoming proof protocol payload is not a resource bottleneck, the design is not worrying too much about performance. Instead, the core focus is flexibility in clients creating their own implementation, where the protocol supports multiple ways to provide different pieces of data. For example, there are multiple different formats available to provide the trie pre-images in, and the implementor can choose whichever is closest to its own internal data structures. diff --git a/protocol_decoder/Cargo.lock b/protocol_decoder/Cargo.lock new file mode 100644 index 000000000..9d9646e96 --- /dev/null +++ b/protocol_decoder/Cargo.lock @@ -0,0 +1,1778 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +dependencies = [ + "cfg-if", + "const-random", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "base64" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.48.5", +] + +[[package]] +name = "ciborium" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "const-random" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aaf16c9c2c612020bcfd042e170f6e32de9b9d75adb5277cdbbd2e2c8c8299a" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.40", +] + +[[package]] +name = "darling_macro" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "deranged" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "enum-as-inner" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "enum-as-inner" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "enumn" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2ad8cef1d801a4686bfd8919f0b30eac4c8e48968c437a6405ded4fb5272d2b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "env_logger" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "eth_trie_utils" +version = "0.6.0" +source = "git+https://github.com/0xPolygonZero/eth_trie_utils.git?rev=e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5#e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" +dependencies = [ + "bytes", + "enum-as-inner 0.5.1", + "ethereum-types", + "hex", + "keccak-hash 0.10.0", + "log", + "num-traits", + "parking_lot", + "rlp", + "serde", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" +dependencies = [ + "crunchy", + "fixed-hash 0.8.0", + "impl-rlp", + "impl-serde", + "tiny-keccak", +] + +[[package]] +name = "ethereum-types" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" +dependencies = [ + "ethbloom", + "fixed-hash 0.8.0", + "impl-rlp", + "impl-serde", + "primitive-types 0.12.2", + "uint", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "static_assertions", +] + +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +dependencies = [ + "ahash", + "allocator-api2", + "rayon", + "serde", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "iana-time-zone" +version = "0.1.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", + "serde", +] + +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "jemalloc-sys" +version = "0.5.4+5.3.0-patched" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac6c1946e1cea1788cbfde01c993b52a10e2da07f4bac608228d1bed20bfebf2" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "jemallocator" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0de374a9f8e63150e6f5e8a60cc14c668226d7a347d8aee1a45766e3c4dd3bc" +dependencies = [ + "jemalloc-sys", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "keccak-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce2bd4c29270e724d3eaadf7bdc8700af4221fc0ed771b855eadcd1b98d52851" +dependencies = [ + "primitive-types 0.10.1", + "tiny-keccak", +] + +[[package]] +name = "keccak-hash" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b286e6b663fb926e1eeb68528e69cb70ed46c6d65871a21b2215ae8154c6d3c" +dependencies = [ + "primitive-types 0.12.2", + "tiny-keccak", +] + +[[package]] +name = "libc" +version = "0.2.151" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" + +[[package]] +name = "linux-raw-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" + +[[package]] +name = "lock_api" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +dependencies = [ + "autocfg", + "scopeguard", + "serde", +] + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", + "rand", +] + +[[package]] +name = "num-complex" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" +dependencies = [ + "num-traits", + "rand", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg", + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "parity-scale-codec" +version = "3.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "881331e34fa842a2fb61cc2db9643a8fedc615e47cfcc52597d1af0db9a7e8fe" +dependencies = [ + "arrayvec", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.5", +] + +[[package]] +name = "pest" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81d78524685f5ef2a3b3bd1cafbc9fcabb036253d9b1463e726a91cd16e2dfc2" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68bd1206e71118b5356dae5ddc61c8b11e28b09ef6a31acbd15ea48a28e0c227" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "pest_meta" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c747191d4ad9e4a4ab9c8798f1e82a39affe7ef9648390b7e5548d18e099de6" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + +[[package]] +name = "plonky2" +version = "0.1.4" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=32d009671a1af86312807ba2dc90e9bb4f4a94da#32d009671a1af86312807ba2dc90e9bb4f4a94da" +dependencies = [ + "ahash", + "anyhow", + "getrandom", + "hashbrown 0.14.3", + "itertools", + "keccak-hash 0.8.0", + "log", + "num", + "plonky2_field", + "plonky2_maybe_rayon", + "plonky2_util", + "rand", + "serde", + "serde_json", + "static_assertions", + "unroll", +] + +[[package]] +name = "plonky2_evm" +version = "0.1.1" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=32d009671a1af86312807ba2dc90e9bb4f4a94da#32d009671a1af86312807ba2dc90e9bb4f4a94da" +dependencies = [ + "anyhow", + "bytes", + "env_logger", + "eth_trie_utils", + "ethereum-types", + "hashbrown 0.14.3", + "hex-literal", + "itertools", + "jemallocator", + "keccak-hash 0.10.0", + "log", + "num", + "num-bigint", + "once_cell", + "pest", + "pest_derive", + "plonky2", + "plonky2_maybe_rayon", + "plonky2_util", + "rand", + "rand_chacha", + "rlp", + "rlp-derive", + "serde", + "serde_json", + "static_assertions", + "tiny-keccak", +] + +[[package]] +name = "plonky2_field" +version = "0.1.1" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=32d009671a1af86312807ba2dc90e9bb4f4a94da#32d009671a1af86312807ba2dc90e9bb4f4a94da" +dependencies = [ + "anyhow", + "itertools", + "num", + "plonky2_util", + "rand", + "serde", + "static_assertions", + "unroll", +] + +[[package]] +name = "plonky2_maybe_rayon" +version = "0.1.1" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=32d009671a1af86312807ba2dc90e9bb4f4a94da#32d009671a1af86312807ba2dc90e9bb4f4a94da" +dependencies = [ + "rayon", +] + +[[package]] +name = "plonky2_util" +version = "0.1.1" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=32d009671a1af86312807ba2dc90e9bb4f4a94da#32d009671a1af86312807ba2dc90e9bb4f4a94da" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "pretty_env_logger" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c" +dependencies = [ + "env_logger", + "log", +] + +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash 0.7.0", + "uint", +] + +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash 0.8.0", + "impl-codec", + "impl-rlp", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97dc5fea232fc28d2f597b37c4876b348a40e33f3b02cc975c8d006d78d94b1a" +dependencies = [ + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proof_protocol_decoder" +version = "0.1.0" +dependencies = [ + "bytes", + "ciborium", + "ciborium-io", + "enum-as-inner 0.6.0", + "enumn", + "eth_trie_utils", + "ethereum-types", + "hex", + "hex-literal", + "keccak-hash 0.10.0", + "log", + "plonky2_evm", + "pretty_env_logger", + "rlp", + "rlp-derive", + "serde", + "serde_with", + "thiserror", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes", + "rustc-hex", +] + +[[package]] +name = "rlp-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustix" +version = "0.38.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "ryu" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.193" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.193" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "serde_json" +version = "1.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +dependencies = [ + "base64", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.1.0", + "serde", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "smallvec" +version = "1.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "termcolor" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + +[[package]] +name = "time" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +dependencies = [ + "deranged", + "itoa", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +dependencies = [ + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "toml_datetime" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" + +[[package]] +name = "toml_edit" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" +dependencies = [ + "indexmap 2.1.0", + "toml_datetime", + "winnow", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unroll" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad948c1cb799b1a70f836077721a92a35ac177d4daddf4c20a633786d4cf618" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.40", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "winnow" +version = "0.5.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb877ca3232bec99a6472ed63f7241de2a250165260908b2d24c09d867907a85" +dependencies = [ + "memchr", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "zerocopy" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] diff --git a/protocol_decoder/Cargo.toml b/protocol_decoder/Cargo.toml new file mode 100644 index 000000000..550c7d781 --- /dev/null +++ b/protocol_decoder/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "protocol_decoder" +authors = ["Polygon Zero "] +version = "0.1.0" +edition = "2021" + +[dependencies] +bytes = "1.5.0" +ciborium = "0.2.1" +ciborium-io = "0.2.1" +enum-as-inner = "0.6.0" +enumn = "0.1.12" +eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } +ethereum-types = "0.14.1" +hex-literal = "0.4.1" +hex = "0.4.3" +keccak-hash = "0.10.0" +log = "0.4.20" +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "32d009671a1af86312807ba2dc90e9bb4f4a94da" } +thiserror = "1.0.49" +rlp = "0.5.2" +rlp-derive = "0.1.0" +serde = "1.0.166" +serde_with = "3.4.0" + +[dev-dependencies] +pretty_env_logger = "0.5.0" diff --git a/src/compact/compact_debug_tools.rs b/protocol_decoder/src/compact/compact_debug_tools.rs similarity index 100% rename from src/compact/compact_debug_tools.rs rename to protocol_decoder/src/compact/compact_debug_tools.rs diff --git a/src/compact/compact_prestate_processing.rs b/protocol_decoder/src/compact/compact_prestate_processing.rs similarity index 100% rename from src/compact/compact_prestate_processing.rs rename to protocol_decoder/src/compact/compact_prestate_processing.rs diff --git a/src/compact/compact_to_partial_trie.rs b/protocol_decoder/src/compact/compact_to_partial_trie.rs similarity index 100% rename from src/compact/compact_to_partial_trie.rs rename to protocol_decoder/src/compact/compact_to_partial_trie.rs diff --git a/src/compact/complex_test_payloads.rs b/protocol_decoder/src/compact/complex_test_payloads.rs similarity index 100% rename from src/compact/complex_test_payloads.rs rename to protocol_decoder/src/compact/complex_test_payloads.rs diff --git a/src/compact/large_test_payloads/test_payload_5.txt b/protocol_decoder/src/compact/large_test_payloads/test_payload_5.txt similarity index 100% rename from src/compact/large_test_payloads/test_payload_5.txt rename to protocol_decoder/src/compact/large_test_payloads/test_payload_5.txt diff --git a/src/compact/mod.rs b/protocol_decoder/src/compact/mod.rs similarity index 100% rename from src/compact/mod.rs rename to protocol_decoder/src/compact/mod.rs diff --git a/src/decoding.rs b/protocol_decoder/src/decoding.rs similarity index 100% rename from src/decoding.rs rename to protocol_decoder/src/decoding.rs diff --git a/src/deserializers.rs b/protocol_decoder/src/deserializers.rs similarity index 100% rename from src/deserializers.rs rename to protocol_decoder/src/deserializers.rs diff --git a/src/lib.rs b/protocol_decoder/src/lib.rs similarity index 100% rename from src/lib.rs rename to protocol_decoder/src/lib.rs diff --git a/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs similarity index 100% rename from src/processed_block_trace.rs rename to protocol_decoder/src/processed_block_trace.rs diff --git a/src/proof_gen_types.rs b/protocol_decoder/src/proof_gen_types.rs similarity index 100% rename from src/proof_gen_types.rs rename to protocol_decoder/src/proof_gen_types.rs diff --git a/src/trace_protocol.rs b/protocol_decoder/src/trace_protocol.rs similarity index 100% rename from src/trace_protocol.rs rename to protocol_decoder/src/trace_protocol.rs diff --git a/src/types.rs b/protocol_decoder/src/types.rs similarity index 100% rename from src/types.rs rename to protocol_decoder/src/types.rs diff --git a/src/utils.rs b/protocol_decoder/src/utils.rs similarity index 100% rename from src/utils.rs rename to protocol_decoder/src/utils.rs From fe44ef78336c7d54b1ae5abe0b6605c48ecac698 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 12 Dec 2023 10:06:54 -0700 Subject: [PATCH 160/208] Finished integration of `plonky_block_proof_gen` --- Cargo.toml | 8 +++++++- plonky_block_proof_gen/Cargo.toml | 16 ++++++++++++++++ plonky_block_proof_gen/src/proof_gen.rs | 2 +- plonky_block_proof_gen/src/proof_types.rs | 2 +- protocol_decoder/Cargo.lock | 2 +- protocol_decoder/Cargo.toml | 8 ++++---- protocol_decoder/src/decoding.rs | 9 --------- 7 files changed, 30 insertions(+), 17 deletions(-) create mode 100644 plonky_block_proof_gen/Cargo.toml diff --git a/Cargo.toml b/Cargo.toml index d64036269..74ce4d4c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,9 @@ [workspace] -members = ["protocol_decoder"] +members = ["plonky_block_proof_gen", "protocol_decoder"] resolver = "2" + +[workspace.dependencies] +ethereum-types = "0.14.1" +log = "0.4.20" +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "7efd147e0888c5c6754a4d7ee2691a2ff5c82072" } +serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml new file mode 100644 index 000000000..266152458 --- /dev/null +++ b/plonky_block_proof_gen/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "plonky_block_proof_gen" +description = "Generates block proofs from zero proof IR." +version = "0.1.0" +authors = ["Polygon Zero "] +edition = "2021" +license = "MIT OR Apache-2.0" + +[dependencies] +ethereum-types = { workspace = true } +log = { workspace = true } +paste = "1.0.14" +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "7efd147e0888c5c6754a4d7ee2691a2ff5c82072" } +plonky2_evm = { workspace = true } +protocol_decoder = { path = "../protocol_decoder" } +serde = { workspace = true } diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 2ae989bdc..952ca6eb6 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -3,7 +3,7 @@ use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; -use proof_protocol_decoder::types::TxnProofGenIR; +use protocol_decoder::types::TxnProofGenIR; use crate::{ proof_types::{AggregatableProof, GeneratedAggProof, GeneratedBlockProof, GeneratedTxnProof}, diff --git a/plonky_block_proof_gen/src/proof_types.rs b/plonky_block_proof_gen/src/proof_types.rs index 20a7d71d5..ce2a58252 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/plonky_block_proof_gen/src/proof_types.rs @@ -2,7 +2,7 @@ //! generation process. use plonky2_evm::proof::PublicValues; -use proof_protocol_decoder::types::BlockHeight; +use protocol_decoder::types::BlockHeight; use serde::{Deserialize, Serialize}; use crate::types::PlonkyProofIntern; diff --git a/protocol_decoder/Cargo.lock b/protocol_decoder/Cargo.lock index 9d9646e96..6f4974587 100644 --- a/protocol_decoder/Cargo.lock +++ b/protocol_decoder/Cargo.lock @@ -1082,7 +1082,7 @@ dependencies = [ ] [[package]] -name = "proof_protocol_decoder" +name = "protocol_decoder" version = "0.1.0" dependencies = [ "bytes", diff --git a/protocol_decoder/Cargo.toml b/protocol_decoder/Cargo.toml index 550c7d781..abda6c014 100644 --- a/protocol_decoder/Cargo.toml +++ b/protocol_decoder/Cargo.toml @@ -11,16 +11,16 @@ ciborium-io = "0.2.1" enum-as-inner = "0.6.0" enumn = "0.1.12" eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } -ethereum-types = "0.14.1" +ethereum-types = { workspace = true } hex-literal = "0.4.1" hex = "0.4.3" keccak-hash = "0.10.0" -log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "32d009671a1af86312807ba2dc90e9bb4f4a94da" } +log = { workspace = true } +plonky2_evm = { workspace = true } thiserror = "1.0.49" rlp = "0.5.2" rlp-derive = "0.1.0" -serde = "1.0.166" +serde = { workspace = true } serde_with = "3.4.0" [dev-dependencies] diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 1f7efa3f9..cb317d9f3 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -107,7 +107,6 @@ impl ProcessedBlockTrace { &other_data.b_data.b_meta.block_beneficiary, )?; - let addresses = Self::get_known_addresses_if_enabled(); let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; Self::apply_deltas_to_trie_state( @@ -137,7 +136,6 @@ impl ProcessedBlockTrace { contract_code: txn_info.contract_code_accessed, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), - addresses, }; let txn_proof_gen_ir = TxnProofGenIR { @@ -281,13 +279,6 @@ impl ProcessedBlockTrace { _ => (), } } - - // TODO: No idea how to implement this, so I'll come back to later... - /// If there are known addresses, return them here. - /// Only needed for debugging purposes. - fn get_known_addresses_if_enabled() -> Vec
{ - Vec::new() // TODO - } } impl StateTrieWrites { From 14da93176475d4f1ee6ade08adae6e00f66333e2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 12 Dec 2023 12:15:32 -0700 Subject: [PATCH 161/208] Fixed failing doc tests --- plonky_block_proof_gen/src/lib.rs | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index 585be4dd9..26c7d55ab 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -12,18 +12,20 @@ //! internal STARK tables of the zkEVM. //! //! The default method contains an initial set of ranges for each table, that -//! can be overriden at will by calling +//! can be overridden at will by calling //! `ProverStateBuilder::set_foo_circuit_size` where `foo` is the name of the //! targeted table. At the moment, plonky2 zkEVM contains seven tables: //! `arithmetic`, `byte_packing`, `cpu`, `keccak`, `keccak_sponge`, `logic` and //! `memory`. //! -//! ```rust +//! ```no_run +//! # use plonky_block_proof_gen::prover_state::ProverStateBuilder; //! let mut builder = ProverStateBuilder::default(); //! //! // Change Cpu and Memory tables supported ranges. -//! builder.set_cpu_circuit_size(12..25); -//! builder.set_cpu_circuit_size(18..28); +//! let builder = builder +//! .set_cpu_circuit_size(12..25) +//! .set_memory_circuit_size(18..28); //! //! // Generate a `ProverState` from the builder. //! let prover_state = builder.build(); @@ -48,7 +50,7 @@ //! Intermediate Representation, one can obtain a transaction proof by calling //! the method below: //! -//! ```rust +//! ```compile_fail //! pub fn generate_txn_proof( //! p_state: &ProverState, //! start_info: TxnProofGenIR, @@ -64,8 +66,8 @@ //! proofs can either be transaction proofs, or aggregated proofs themselves. //! This library abstracts their type behind an `AggregatableProof` enum. //! -//! ```rust -//! pub fn generate_agg_proof( +//! ```compile_fail +//! pub fn generate_agg_proof( //! p_state: &ProverState, //! lhs_child: &AggregatableProof, //! rhs_child: &AggregatableProof, @@ -81,7 +83,7 @@ //! both statement into one, effectively proving an entire chain from genesis //! through a single final proof. //! -//! ```rust +//! ```compile_fail //! pub fn generate_block_proof( //! p_state: &ProverState, //! prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, @@ -89,10 +91,6 @@ //! ) -> ProofGenResult { ... } //! ``` -#![cfg_attr(docsrs, feature(doc_cfg))] -#![deny(rustdoc::broken_intra_doc_links)] -#![deny(missing_docs)] - pub mod proof_gen; pub mod proof_types; pub mod prover_state; From 1e91c4b10c48f09208e175f7796cfb244b607a39 Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 12 Dec 2023 14:20:17 -0700 Subject: [PATCH 162/208] Requested PR changes for #9 --- plonky_block_proof_gen/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index 26c7d55ab..7786e59b7 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -51,7 +51,7 @@ //! the method below: //! //! ```compile_fail -//! pub fn generate_txn_proof( +//! pub fn generate_txn_proof( //! p_state: &ProverState, //! start_info: TxnProofGenIR, //! ) -> ProofGenResult { ... } @@ -84,7 +84,7 @@ //! through a single final proof. //! //! ```compile_fail -//! pub fn generate_block_proof( +//! pub fn generate_block_proof( //! p_state: &ProverState, //! prev_opt_parent_b_proof: Option<&GeneratedBlockProof>, //! curr_block_agg_proof: &GeneratedAggProof, From ca8dde2fd65929c9ae7ba5242b1f2af2d940c67a Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 14 Dec 2023 15:54:21 +0100 Subject: [PATCH 163/208] Bump plonky2 and use replace genesis for checkpoints --- Cargo.toml | 2 +- plonky_block_proof_gen/Cargo.toml | 2 +- protocol_decoder/src/decoding.rs | 4 ++-- protocol_decoder/src/proof_gen_types.rs | 4 ++-- protocol_decoder/src/types.rs | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 74ce4d4c5..aeb35bba6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "7efd147e0888c5c6754a4d7ee2691a2ff5c82072" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "71dff6e9827f501bc59416dc25ce06c4aec030ab" } serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index 266152458..181ad4345 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "7efd147e0888c5c6754a4d7ee2691a2ff5c82072" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "71dff6e9827f501bc59416dc25ce06c4aec030ab" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index cb317d9f3..5c6285fd1 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -132,7 +132,7 @@ impl ProcessedBlockTrace { * it here... */ tries, trie_roots_after, - genesis_state_trie_root: other_data.genesis_state_trie_root, + checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, contract_code: txn_info.contract_code_accessed, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), @@ -350,7 +350,7 @@ fn create_dummy_gen_input( signed_txn: None, tries, trie_roots_after, - genesis_state_trie_root: other_data.genesis_state_trie_root, + checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), ..GenerationInputs::default() diff --git a/protocol_decoder/src/proof_gen_types.rs b/protocol_decoder/src/proof_gen_types.rs index c3a91ab81..7fb02287b 100644 --- a/protocol_decoder/src/proof_gen_types.rs +++ b/protocol_decoder/src/proof_gen_types.rs @@ -26,12 +26,12 @@ impl> From for ProofBeforeAndAfterDeltas { impl ProofBeforeAndAfterDeltas { pub fn into_extra_block_data( self, - genesis_state_trie_root: TrieRootHash, + checkpoint_state_trie_root: TrieRootHash, txn_start: TxnIdx, txn_end: TxnIdx, ) -> ExtraBlockData { ExtraBlockData { - genesis_state_trie_root, + checkpoint_state_trie_root, txn_number_before: txn_start.into(), txn_number_after: txn_end.into(), gas_used_before: self.gas_used_before, diff --git a/protocol_decoder/src/types.rs b/protocol_decoder/src/types.rs index c9a50a4e6..6d89c862e 100644 --- a/protocol_decoder/src/types.rs +++ b/protocol_decoder/src/types.rs @@ -57,7 +57,7 @@ pub struct TxnProofGenIR { #[derive(Clone, Debug, Deserialize, Serialize)] pub struct OtherBlockData { pub b_data: BlockLevelData, - pub genesis_state_trie_root: TrieRootHash, + pub checkpoint_state_trie_root: TrieRootHash, } /// Data that is specific to a block and is constant for all txns in a given From fc6b4c18225e23745b9ec42f9f5c33a89c4e665b Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 5 Dec 2023 11:51:29 -0700 Subject: [PATCH 164/208] Added a way to indicate that an account self-destructed to traces --- protocol_decoder/src/decoding.rs | 35 +++++++++++++++++-- protocol_decoder/src/processed_block_trace.rs | 8 +++++ protocol_decoder/src/trace_protocol.rs | 5 +++ 3 files changed, 45 insertions(+), 3 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index cb317d9f3..86392d1ae 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -202,6 +202,9 @@ impl ProcessedBlockTrace { meta: &TxnMetaState, txn_idx: TxnIdx, ) -> TraceParsingResult<()> { + // Used for some errors. Note that the clone is very cheap. + let state_trie_initial = trie_state.state.clone(); + for (hashed_acc_addr, storage_writes) in deltas.storage_writes { let storage_trie = trie_state .storage @@ -233,9 +236,7 @@ impl ProcessedBlockTrace { .get(val_k) .unwrap_or(&EMPTY_ACCOUNT_BYTES_RLPED); - let mut account: AccountRlp = rlp::decode(val_bytes).map_err(|err| { - TraceParsingError::AccountDecode(hex::encode(val_bytes), err.to_string()) - })?; + let mut account = account_from_rlped_bytes(val_bytes)?; s_trie_writes.apply_writes_to_state_node( &mut account, @@ -249,6 +250,29 @@ impl ProcessedBlockTrace { .insert(val_k, updated_account_bytes.to_vec()); } + // Remove any accounts that self-destructed. + for hashed_addr in deltas.self_destructed_accounts { + let k = Nibbles::from_h256_be(hashed_addr); + + let account_data = trie_state.state.get(k).ok_or_else(|| { + TraceParsingError::NonExistentTrieEntry( + TrieType::State, + k, + state_trie_initial.hash(), + ) + })?; + let _account = account_from_rlped_bytes(account_data)?; + + trie_state + .storage + .remove(&hashed_addr) + .ok_or(TraceParsingError::MissingAccountStorageTrie(hashed_addr))?; + // TODO: Once the mechanism for resolving code hashes settles, we probably want + // to also delete the code hash mapping here as well... + + trie_state.state.delete(k); + } + let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); trie_state.txn.insert(txn_k, meta.txn_bytes()); @@ -447,3 +471,8 @@ fn create_trie_subset_wrapped( create_trie_subset(trie, accesses) .map_err(|_| TraceParsingError::MissingKeysCreatingSubPartialTrie(trie_type)) } + +fn account_from_rlped_bytes(bytes: &[u8]) -> TraceParsingResult { + rlp::decode(bytes) + .map_err(|err| TraceParsingError::AccountDecode(hex::encode(bytes), err.to_string())) +} diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index f311e39c5..a616af558 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -268,6 +268,13 @@ impl TxnInfo { } } } + + if trace + .self_destructed + .map_or(false, |self_destructed| self_destructed) + { + nodes_used_by_txn.self_destructed_accounts.push(hashed_addr); + } } let accounts_with_storage_accesses: HashSet<_> = HashSet::from_iter( @@ -339,6 +346,7 @@ pub(crate) struct NodesUsedByTxn { pub(crate) storage_writes: Vec<(Nibbles, Vec<(HashedStorageAddrNibbles, Vec)>)>, pub(crate) state_accounts_with_no_accesses_but_storage_tries: HashMap, + pub(crate) self_destructed_accounts: Vec, } #[derive(Debug)] diff --git a/protocol_decoder/src/trace_protocol.rs b/protocol_decoder/src/trace_protocol.rs index e89289798..a062a2cd2 100644 --- a/protocol_decoder/src/trace_protocol.rs +++ b/protocol_decoder/src/trace_protocol.rs @@ -175,6 +175,11 @@ pub struct TxnTrace { /// Contract code that this address accessed. #[serde(skip_serializing_if = "Option::is_none")] pub code_usage: Option, + + /// True if the account existed before this txn but self-destructed at the + /// end of this txn. + #[serde(skip_serializing_if = "Option::is_none")] + pub self_destructed: Option, } /// Contract code access type. Used by txn traces. From 700cc52e8206cd6f299b330f80e3aadebc959ae1 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Sat, 16 Dec 2023 18:49:49 +0100 Subject: [PATCH 165/208] Bump and add early-abort signaling --- Cargo.toml | 2 +- plonky_block_proof_gen/Cargo.toml | 2 +- plonky_block_proof_gen/src/proof_gen.rs | 4 ++++ 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index aeb35bba6..5dc71fd0a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "71dff6e9827f501bc59416dc25ce06c4aec030ab" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "f8f6b07a3905185af302d58fb6b97c55d12e57be" } serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index 181ad4345..6a8ca48d4 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "71dff6e9827f501bc59416dc25ce06c4aec030ab" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "f8f6b07a3905185af302d58fb6b97c55d12e57be" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 952ca6eb6..31424b144 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,6 +1,8 @@ //! This module defines the proof generation methods corresponding to the three //! types of proofs the zkEVM internally handles. +use std::sync::atomic::AtomicBool; + use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; use protocol_decoder::types::TxnProofGenIR; @@ -37,6 +39,7 @@ impl From for ProofGenError { pub fn generate_txn_proof( p_state: &ProverState, start_info: TxnProofGenIR, + abort_signal: Option>, ) -> ProofGenResult { let (intern, p_vals) = p_state .state @@ -45,6 +48,7 @@ pub fn generate_txn_proof( &StarkConfig::standard_fast_config(), start_info.gen_inputs, &mut TimingTree::default(), + abort_signal, ) .map_err(|err| err.to_string())?; From eb1e0443da76184e4f3a399cfb441e21ddfc20c7 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Sat, 16 Dec 2023 18:51:49 +0100 Subject: [PATCH 166/208] Missing import --- plonky_block_proof_gen/src/proof_gen.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 31424b144..7b403b657 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -1,7 +1,7 @@ //! This module defines the proof generation methods corresponding to the three //! types of proofs the zkEVM internally handles. -use std::sync::atomic::AtomicBool; +use std::sync::{atomic::AtomicBool, Arc}; use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; From 6672d35526dafe50ed0a2e726276b3f2bb41a51c Mon Sep 17 00:00:00 2001 From: BGluth Date: Tue, 5 Dec 2023 15:29:44 -0700 Subject: [PATCH 167/208] Assorted fixes --- .../compact/compact_prestate_processing.rs | 2 +- .../src/compact/compact_to_partial_trie.rs | 17 ++++ protocol_decoder/src/decoding.rs | 86 +++++++++++++++++-- protocol_decoder/src/processed_block_trace.rs | 8 +- 4 files changed, 104 insertions(+), 9 deletions(-) diff --git a/protocol_decoder/src/compact/compact_prestate_processing.rs b/protocol_decoder/src/compact/compact_prestate_processing.rs index 4b455c401..4363a8946 100644 --- a/protocol_decoder/src/compact/compact_prestate_processing.rs +++ b/protocol_decoder/src/compact/compact_prestate_processing.rs @@ -1419,7 +1419,7 @@ fn get_bytes_from_cursor(cursor: &mut C, cursor_start_pos: u64 t_bytes } -#[cfg(test)] +#[cfg(all(debug_tools, test))] mod tests { use eth_trie_utils::{nibbles::Nibbles, partial_trie::PartialTrie}; diff --git a/protocol_decoder/src/compact/compact_to_partial_trie.rs b/protocol_decoder/src/compact/compact_to_partial_trie.rs index 741eb5aa3..12f16d0fc 100644 --- a/protocol_decoder/src/compact/compact_to_partial_trie.rs +++ b/protocol_decoder/src/compact/compact_to_partial_trie.rs @@ -69,12 +69,19 @@ fn process_branch( branch: &[Option>], output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { + println!("Full node at {:x}", curr_key); + for (i, slot) in branch.iter().enumerate().take(16) { if let Some(child) = slot { // TODO: Seriously update `eth_trie_utils` to have a better API... let mut new_k = curr_key; new_k.push_nibble_back(i as Nibble); create_partial_trie_from_compact_node_rec(new_k, child, output)?; + } else { + println!( + "Full node child at {} is nil.", + curr_key.clone().merge_nibble(i as Nibble) + ); } } @@ -105,6 +112,8 @@ fn process_hash( // trie. p_trie.insert(curr_key, hash); + println!("Inserting hash node at {:x}", curr_key); + Ok(()) } @@ -116,6 +125,8 @@ fn process_leaf( ) -> CompactParsingResult<()> { let full_k = curr_key.merge_nibbles(leaf_key); + println!("Inserting {:x} as a leaf", full_k); + let l_val = match leaf_node_data { LeafNodeData::Value(v_bytes) => rlp::encode(&v_bytes.0).to_vec(), LeafNodeData::Account(acc_data) => { @@ -148,6 +159,12 @@ fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( let c_hash = hash(c_bytes); output.code.insert(c_hash, c_bytes.clone()); + println!( + "ADDING ACCOUNT CODE HASH {:x} -- {}", + c_hash, + hex::encode(c_bytes) + ); + c_hash } Some(AccountNodeCode::HashNode(c_hash)) => *c_hash, diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 14c1cf78c..a14d6ca14 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -7,10 +7,10 @@ use std::{ use eth_trie_utils::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, Node, PartialTrie}, + trie_ops::ValOrHash, trie_subsets::create_trie_subset, }; -use ethereum_types::{Address, H256, U256}; -use log::trace; +use ethereum_types::{Address, H160, H256, U256}; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -93,6 +93,22 @@ impl ProcessedBlockTrace { ..Default::default() }; + println!("State trie initial contents:"); + for (k, v) in curr_block_tries.state.items() { + let v_str = match v { + ValOrHash::Val(v) => hex::encode(&v), + ValOrHash::Hash(h) => format!("{:x}", h), + }; + + println!("k: {} --> {}", k, v_str); + } + + println!("Initial state Root: {:x}", curr_block_tries.state.hash()); + + // let state_trie_json = + // serde_json::to_string_pretty(&curr_block_tries.state).unwrap(); + // println!("Initial state trie: {}", state_trie_json); + let mut tot_gas_used = U256::zero(); let mut txn_gen_inputs = self @@ -117,10 +133,9 @@ impl ProcessedBlockTrace { )?; let trie_roots_after = calculate_trie_input_hashes(&curr_block_tries); - trace!( + println!( "Protocol expected trie roots after txn {}: {:?}", - txn_idx, - trie_roots_after + txn_idx, trie_roots_after ); let gen_inputs = GenerationInputs { @@ -149,6 +164,13 @@ impl ProcessedBlockTrace { }) .collect::>>()?; + for (k, v) in curr_block_tries.state.items() { + if let Some(v) = v.as_val() { + let acc_data = rlp::decode::(v).unwrap(); + println!("(FULL) account data: {:x} --> {:#?}", k, acc_data); + } + } + Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, &other_data, @@ -168,6 +190,31 @@ impl ProcessedBlockTrace { nodes_used_by_txn.state_accesses.iter().cloned(), )?; + let s: Vec<_> = state_trie + .items() + .map(|(_k, v)| match v { + ValOrHash::Val(v) => format!("V - {}", hex::encode(v)), + ValOrHash::Hash(h) => format!("H - {:x}", h), + }) + .collect(); + + println!("Actual final sub state trie: {:#?}", s); + + println!( + "Querying the hash(H160::zero()) ({}):", + hash(H160::zero().as_bytes()) + ); + state_trie.get( + Nibbles::from_bytes_be( + &hex::decode("5380c7b7ae81a58eb98d9c78de4a1fd7fd9535fc953ed2be602daaa41767312a") + .unwrap(), + ) + .unwrap(), + ); + println!("DONE QUERY"); + + // println!("State partial trie: {}", s); + let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... let transactions_trie = @@ -227,6 +274,9 @@ impl ProcessedBlockTrace { } } + // trie_state.state.insert(Nibbles::from_h256_be(hash(H160::zero().as_bytes())), + // ValOrHash::Val(EMPTY_ACCOUNT_BYTES_RLPED.to_vec())); + for (hashed_acc_addr, s_trie_writes) in deltas.state_writes { let val_k = Nibbles::from_h256_be(hashed_acc_addr); @@ -323,6 +373,30 @@ impl StateTrieWrites { } }; + if self.balance.is_some() + || self.nonce.is_some() + || self.code_hash.is_some() + || storage_root_hash_change.is_some() + { + println!("DELTA FOR {:x}", h_addr); + + if let Some(v) = self.balance { + println!("---- balance: {:x}", v); + } + + if let Some(v) = self.nonce { + println!("---- nonce: {:x}", v); + } + + if let Some(v) = self.code_hash { + println!("---- c_hash: {:x}", v); + } + + if let Some(v) = storage_root_hash_change { + println!("---- storage change: {:x}", v); + } + } + update_val_if_some(&mut state_node.balance, self.balance); update_val_if_some(&mut state_node.nonce, self.nonce); update_val_if_some(&mut state_node.storage_root, storage_root_hash_change); @@ -425,7 +499,7 @@ fn create_minimal_state_partial_trie( ) -> TraceParsingResult { create_trie_subset_wrapped( state_trie, - state_accesses.map(Nibbles::from_h256_be), + state_accesses.into_iter().map(Nibbles::from_h256_be), TrieType::State, ) } diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index a616af558..04d07857e 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -7,7 +7,9 @@ use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; use ethereum_types::U256; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; -use crate::compact::compact_prestate_processing::{process_compact_prestate, PartialTriePreImages}; +use crate::compact::compact_prestate_processing::{ + process_compact_prestate_debug, PartialTriePreImages, +}; use crate::decoding::TraceParsingResult; use crate::trace_protocol::{ BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, @@ -158,7 +160,7 @@ fn process_multiple_storage_tries( fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { // TODO: Wrap in proper result type... - let out = process_compact_prestate(trie).unwrap(); + let out = process_compact_prestate_debug(trie).unwrap(); // TODO: Make this into a result... assert!(out.header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); @@ -205,6 +207,8 @@ impl TxnInfo { let mut contract_code_accessed = create_empty_code_access_map(); for (addr, trace) in self.traces { + println!("Addr {} --> {}", addr, hash(addr.as_bytes())); + let hashed_addr = hash(addr.as_bytes()); let storage_writes = trace.storage_written.unwrap_or_default(); From f9b7921fbc78ba6949a83284da822fb5fa800132 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 7 Dec 2023 13:56:24 -0700 Subject: [PATCH 168/208] Fixed some keys not parsing correctly sometimes --- .../compact/compact_prestate_processing.rs | 21 ++++++++++++++----- .../src/compact/complex_test_payloads.rs | 5 +++++ .../large_test_payloads/test_payload_6.txt | 1 + 3 files changed, 22 insertions(+), 5 deletions(-) create mode 100644 protocol_decoder/src/compact/large_test_payloads/test_payload_6.txt diff --git a/protocol_decoder/src/compact/compact_prestate_processing.rs b/protocol_decoder/src/compact/compact_prestate_processing.rs index 4363a8946..e1b961b62 100644 --- a/protocol_decoder/src/compact/compact_prestate_processing.rs +++ b/protocol_decoder/src/compact/compact_prestate_processing.rs @@ -1338,11 +1338,15 @@ fn parse_to_instructions_and_bytes_for_instruction( fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { let mut key = Nibbles::default(); + if bytes.is_empty() { + return key; + } + // I have no idea why Erigon is doing this with their keys, as I'm don't think // this is part of the yellow paper at all? - let is_just_term_byte = bytes.len() == 1 && bytes[0] == 0x10; - if is_just_term_byte { - return key; + if bytes.len() == 1 { + let low_nib = bytes[0] & 0b00001111; + key.push_nibble_back(low_nib); } let flags = bytes[0]; @@ -1355,7 +1359,7 @@ fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { true => &bytes[1..], }; - if actual_key_bytes.is_empty() || is_just_term_byte { + if actual_key_bytes.is_empty() { return key; } @@ -1419,7 +1423,7 @@ fn get_bytes_from_cursor(cursor: &mut C, cursor_start_pos: u64 t_bytes } -#[cfg(all(debug_tools, test))] +#[cfg(test)] mod tests { use eth_trie_utils::{nibbles::Nibbles, partial_trie::PartialTrie}; @@ -1428,6 +1432,7 @@ mod tests { compact_prestate_processing::ParserState, complex_test_payloads::{ TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3, TEST_PAYLOAD_4, TEST_PAYLOAD_5, + TEST_PAYLOAD_6, }, }; @@ -1520,4 +1525,10 @@ mod tests { init(); TEST_PAYLOAD_5.parse_and_check_hash_matches_with_debug(); } + + #[test] + fn complex_payload_6() { + init(); + TEST_PAYLOAD_6.parse_and_check_hash_matches_with_debug(); + } } diff --git a/protocol_decoder/src/compact/complex_test_payloads.rs b/protocol_decoder/src/compact/complex_test_payloads.rs index db5196add..32d9fb56e 100644 --- a/protocol_decoder/src/compact/complex_test_payloads.rs +++ b/protocol_decoder/src/compact/complex_test_payloads.rs @@ -24,6 +24,11 @@ pub(crate) const TEST_PAYLOAD_5: TestProtocolInputAndRoot = TestProtocolInputAnd root_str: "2b5a703bdec53099c42d7575f8cd6db85d6f2226a04e98e966fcaef87868869b", }; +pub(crate) const TEST_PAYLOAD_6: TestProtocolInputAndRoot = TestProtocolInputAndRoot { + byte_str: include_str!("large_test_payloads/test_payload_6.txt"), + root_str: "135a0c66146c60d7f78049b3a3486aae3e155015db041a4650966e001f9ba301", +}; + type ProcessCompactPrestateFn = fn(TrieCompact) -> CompactParsingResult; pub(crate) struct TestProtocolInputAndRoot { diff --git a/protocol_decoder/src/compact/large_test_payloads/test_payload_6.txt b/protocol_decoder/src/compact/large_test_payloads/test_payload_6.txt new file mode 100644 index 000000000..649cba743 --- /dev/null +++ b/protocol_decoder/src/compact/large_test_payloads/test_payload_6.txt @@ -0,0 +1 @@ +0103aa2f42ac8eed7b6fce9828f68d29f3734387c3508e9a71fa83068cdff475cee80605582103788cf291b5b859fa19c00375139aec4555453a73d38f743f3d355998748e3530070119056705582103f3cdd54b57d220a4f2ffeeeb77210e356f4699b920f51eb0ab65eb4703e1a130084a021e19e0c9bab2400000055821030d044c7d2a9f32182dfd85cb1fec836cf6bf2efb3036525cb40a18c30ab49450084a021e19e0c9bab240000003933962bff17c6b99fda5ad295fea19eb308d1bd22a38a2c555f9f2caa7f1595401410203b603564c85581d9f3165facdbd3edebd05417b132ec760ce26eba226ca210458039c8326a77a252c21dc92b0686e88e6d66ea220755656e6c78064cb005a71ba510558200223b1161ff8fc8050c76354ad4b4bf09a508ceee08982734f74864218cfa3c50701190cd8021901200383720048f7b2ebb31a126961df4d4098568afdfdb9c21dc60b11859c92dca712034cf46d1149e552c13d2faf68f2f2312c24b69745981cdd74b5d05cfdf655d5db03f8157fe9d80aacb95a2acacfb5367c05b749c46d1a4afc95d7dffcd68a8024df045914bf608060405234801561001057600080fd5b50600436106100725760003560e01c8063a18246e211610050578063a18246e2146100d5578063b7af3cdc146100dd578063e9dc6375146100f257610072565b80634aa4a4fc146100775780637e5af771146100955780639d7b0ea8146100b5575b600080fd5b61007f610105565b60405161008c9190611311565b60405180910390f35b6100a86100a3366004611017565b610129565b60405161008c9190611325565b6100c86100c3366004611057565b610148565b60405161008c9190611330565b6100c861028d565b6100e56102b1565b60405161008c9190611339565b6100e5610100366004611057565b6103d7565b7f000000000000000000000000d15f63c56aeceb772022eb0f3fec874b5157a36a81565b60006101358383610148565b61013f8584610148565b13949350505050565b60007f000000000000000000000000d15f63c56aeceb772022eb0f3fec874b5157a36a6001600160a01b0316836001600160a01b0316141561018d5750606319610287565b8160011415610283576001600160a01b03831673a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4814156101c4575061012c610287565b6001600160a01b03831673dac17f958d2ee523a2206206994597c13d831ec714156101f1575060c8610287565b6001600160a01b038316736b175474e89094c44da98b954eedeac495271d0f141561021e57506064610287565b6001600160a01b038316738daebade922df735c38c80c7ebd708af50815faa141561024c575060c719610287565b6001600160a01b038316732260fac5e5542a773aa44fbcfedf7c193bc2c599141561027b575061012b19610287565b506000610287565b5060005b92915050565b7f455448000000000000000000000000000000000000000000000000000000000081565b606060005b60208110801561031657507f455448000000000000000000000000000000000000000000000000000000000081602081106102ed57fe5b1a60f81b7fff000000000000000000000000000000000000000000000000000000000000001615155b15610323576001016102b6565b60008167ffffffffffffffff8111801561033c57600080fd5b506040519080825280601f01601f191660200182016040528015610367576020820181803683370190505b50905060005b828110156103d0577f455448000000000000000000000000000000000000000000000000000000000081602081106103a157fe5b1a60f81b8282815181106103b157fe5b60200101906001600160f81b031916908160001a90535060010161036d565b5091505090565b60606000806000806000876001600160a01b03166399fbab88886040518263ffffffff1660e01b815260040161040d9190611330565b6101806040518083038186803b15801561042657600080fd5b505afa15801561043a573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061045e91906111dc565b5050505050965096509650965096505050600061051c896001600160a01b031663c45a01556040518163ffffffff1660e01b815260040160206040518083038186803b1580156104ad57600080fd5b505afa1580156104c1573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906104e59190610ff4565b6040518060600160405280896001600160a01b03168152602001886001600160a01b031681526020018762ffffff168152506108e7565b9050600061052d87876100a36109e3565b90506000811561053d578761053f565b865b90506000821561054f5787610551565b885b90506000846001600160a01b0316633850c7bd6040518163ffffffff1660e01b815260040160e06040518083038186803b15801561058e57600080fd5b505afa1580156105a2573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906105c69190611133565b505050505091505073f7012159bf761b312153e8c8d176932fe9aaa7ea63c49917d7604051806101c001604052808f8152602001866001600160a01b03168152602001856001600160a01b031681526020017f000000000000000000000000d15f63c56aeceb772022eb0f3fec874b5157a36a6001600160a01b0316876001600160a01b03161461065f5761065a876109e7565b610667565b6106676102b1565b81526020017f000000000000000000000000d15f63c56aeceb772022eb0f3fec874b5157a36a6001600160a01b0316866001600160a01b0316146106b3576106ae866109e7565b6106bb565b6106bb6102b1565b8152602001866001600160a01b031663313ce5676040518163ffffffff1660e01b815260040160206040518083038186803b1580156106f957600080fd5b505afa15801561070d573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061073191906111c2565b60ff168152602001856001600160a01b031663313ce5676040518163ffffffff1660e01b815260040160206040518083038186803b15801561077257600080fd5b505afa158015610786573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906107aa91906111c2565b60ff16815260200187151581526020018a60020b81526020018960020b81526020018460020b8152602001886001600160a01b031663d0c93a7c6040518163ffffffff1660e01b815260040160206040518083038186803b15801561080e57600080fd5b505afa158015610822573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906108469190611082565b60020b81526020018b62ffffff168152602001886001600160a01b03168152506040518263ffffffff1660e01b8152600401610882919061134c565b60006040518083038186803b15801561089a57600080fd5b505af41580156108ae573d6000803e3d6000fd5b505050506040513d6000823e601f3d908101601f191682016040526108d6919081019061109c565b9d9c50505050505050505050505050565b600081602001516001600160a01b031682600001516001600160a01b03161061090f57600080fd5b50805160208083015160409384015184516001600160a01b0394851681850152939091168385015262ffffff166060808401919091528351808403820181526080840185528051908301207fff0000000000000000000000000000000000000000000000000000000000000060a085015294901b6bffffffffffffffffffffffff191660a183015260b58201939093527fe34f199b19b2b4f47f68442619d555527d244f78a3297ea89325f843f87b8b5460d5808301919091528251808303909101815260f5909101909152805191012090565b4690565b60606000610a15837f95d89b4100000000000000000000000000000000000000000000000000000000610a3a565b9050805160001415610a3257610a2a83610c8f565b915050610a35565b90505b919050565b60408051600481526024810182526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fffffffff0000000000000000000000000000000000000000000000000000000085161781529151815160609360009384936001600160a01b03891693919290918291908083835b60208310610ad35780518252601f199092019160209182019101610ab4565b6001836020036101000a038019825116818451168082178552505050505050905001915050600060405180830381855afa9150503d8060008114610b33576040519150601f19603f3d011682016040523d82523d6000602084013e610b38565b606091505b5091509150811580610b4957508051155b15610b67576040518060200160405280600081525092505050610287565b805160201415610b9f576000818060200190516020811015610b8857600080fd5b50519050610b9581610c9c565b9350505050610287565b604081511115610c7757808060200190516020811015610bbe57600080fd5b8101908080516040519392919084640100000000821115610bde57600080fd5b908301906020820185811115610bf357600080fd5b8251640100000000811182820188101715610c0d57600080fd5b82525081516020918201929091019080838360005b83811015610c3a578181015183820152602001610c22565b50505050905090810190601f168015610c675780820380516001836020036101000a031916815260200191505b5060405250505092505050610287565b50506040805160208101909152600081529392505050565b6060610a32826006610ddc565b604080516020808252818301909252606091600091906020820181803683370190505090506000805b6020811015610d3e576000858260208110610cdc57fe5b1a60f81b90507fff00000000000000000000000000000000000000000000000000000000000000811615610d355780848481518110610d1757fe5b60200101906001600160f81b031916908160001a9053506001909201915b50600101610cc5565b5060008167ffffffffffffffff81118015610d5857600080fd5b506040519080825280601f01601f191660200182016040528015610d83576020820181803683370190505b50905060005b82811015610dd357838181518110610d9d57fe5b602001015160f81c60f81b828281518110610db457fe5b60200101906001600160f81b031916908160001a905350600101610d89565b50949350505050565b606060028206158015610def5750600082115b8015610dfc575060288211155b610e6757604080517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601e60248201527f41646472657373537472696e675574696c3a20494e56414c49445f4c454e0000604482015290519081900360640190fd5b60008267ffffffffffffffff81118015610e8057600080fd5b506040519080825280601f01601f191660200182016040528015610eab576020820181803683370190505b5090506001600160a01b03841660005b60028504811015610f4f57600860138290030282901c600f600482901c1660f082168203610ee882610f59565b868560020281518110610ef757fe5b60200101906001600160f81b031916908160001a905350610f1781610f59565b868560020260010181518110610f2957fe5b60200101906001600160f81b031916908160001a9053505060019092019150610ebb9050565b5090949350505050565b6000600a8260ff161015610f7457506030810160f81b610a35565b506037810160f81b610a35565b8051610a358161149a565b8051600281900b8114610a3557600080fd5b80516fffffffffffffffffffffffffffffffff81168114610a3557600080fd5b805161ffff81168114610a3557600080fd5b805162ffffff81168114610a3557600080fd5b805160ff81168114610a3557600080fd5b600060208284031215611005578081fd5b81516110108161149a565b9392505050565b60008060006060848603121561102b578182fd5b83356110368161149a565b925060208401356110468161149a565b929592945050506040919091013590565b60008060408385031215611069578182fd5b82356110748161149a565b946020939093013593505050565b600060208284031215611093578081fd5b61101082610f8c565b6000602082840312156110ad578081fd5b815167ffffffffffffffff808211156110c4578283fd5b818401915084601f8301126110d7578283fd5b8151818111156110e357fe5b604051601f8201601f19168101602001838111828210171561110157fe5b604052818152838201602001871015611118578485fd5b61112982602083016020870161146a565b9695505050505050565b600080600080600080600060e0888a03121561114d578283fd5b87516111588161149a565b965061116660208901610f8c565b955061117460408901610fbe565b945061118260608901610fbe565b935061119060808901610fbe565b925061119e60a08901610fe3565b915060c088015180151581146111b2578182fd5b8091505092959891949750929550565b6000602082840312156111d3578081fd5b61101082610fe3565b6000806000806000806000806000806000806101808d8f0312156111fe578485fd5b8c516bffffffffffffffffffffffff81168114611219578586fd5b9b5061122760208e01610f81565b9a5061123560408e01610f81565b995061124360608e01610f81565b985061125160808e01610fd0565b975061125f60a08e01610f8c565b965061126d60c08e01610f8c565b955061127b60e08e01610f9e565b94506101008d015193506101208d0151925061129a6101408e01610f9e565b91506112a96101608e01610f9e565b90509295989b509295989b509295989b565b6001600160a01b03169052565b15159052565b60020b9052565b600081518084526112ed81602086016020860161146a565b601f01601f19169290920160200192915050565b62ffffff169052565b60ff169052565b6001600160a01b0391909116815260200190565b901515815260200190565b90815260200190565b60006020825261101060208301846112d5565b60006020825282516020830152602083015161136b60408401826112bb565b50604083015161137e60608401826112bb565b5060608301516101c080608085015261139b6101e08501836112d5565b91506080850151601f198584030160a08601526113b883826112d5565b92505060a08501516113cd60c086018261130a565b5060c08501516113e060e086018261130a565b5060e08501516101006113f5818701836112c8565b8601519050610120611409868201836112ce565b860151905061014061141d868201836112ce565b8601519050610160611431868201836112ce565b8601519050610180611445868201836112ce565b86015190506101a061145986820183611301565b8601519050610f4f858301826112bb565b60005b8381101561148557818101518382015260200161146d565b83811115611494576000848401525b50505050565b6001600160a01b03811681146114af57600080fd5b5056fea164736f6c6343000706000a06055821032a62a10d2b8a745c3d7263697d83842a787a23eb2afed33fd82228f39bcab04007011914bf03d285ca3f5bf71d4b10f8d17ef027b43851ab46e11c866b070a1c2caf9233448d055820021eec2b84f0ba344fd4b4d2f022469febe7a772c4789acfc119eb558ab1da3d0c114a021e19c798975968c5be055820021c32804e80f8eb1caba9250d655cc8fb0944322e45ab5060aa356b5803acfd084a021e19e0c9bab24000000219102005582002eb406e4787251ea4f45aca503809c48d756e62c88c9da568d3d5266255f923084a021e19e0c9bab2400000055820020a45a2e16569b92f8a0af38f2ac2fce62c4e0332ed2c47b1af1041344990ca084a021e19e0c9bab2400000035deadf02dd8344275283fee394945c5e15787054e0eef21f50c960fd913232970605582002417f50fc699ebb817e23468e114836fb4578b6281ced73df8cbbfefb4272430701191c86021982400325fb5c4b4424cf5ba456fc96196c8c0b565ac286019f34e8d44282d2bd51f133035fe2a8fd8e3f6b5ffc5fb25860a2bf32f6a32730b3edec6b63f779cb047e1e7a02199dff \ No newline at end of file From 85bbf4c515707f1e0706744b2f38c2408f19b87f Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 14 Dec 2023 10:32:25 -0700 Subject: [PATCH 169/208] Added a hack to handle Nibbles --> H256 conv - Handles an edge case where the first two bits are `0`. - Actual fix needs to be done `eth_trie_utils`. --- .../src/compact/compact_to_partial_trie.rs | 9 ++++++--- protocol_decoder/src/processed_block_trace.rs | 5 +++-- protocol_decoder/src/utils.rs | 13 +++++++++++++ 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/protocol_decoder/src/compact/compact_to_partial_trie.rs b/protocol_decoder/src/compact/compact_to_partial_trie.rs index 12f16d0fc..f4e7e0be6 100644 --- a/protocol_decoder/src/compact/compact_to_partial_trie.rs +++ b/protocol_decoder/src/compact/compact_to_partial_trie.rs @@ -4,7 +4,6 @@ use eth_trie_utils::{ nibbles::{Nibble, Nibbles}, partial_trie::{HashedPartialTrie, PartialTrie}, }; -use ethereum_types::H256; use log::trace; use plonky2_evm::generation::mpt::AccountRlp; @@ -13,7 +12,7 @@ use super::compact_prestate_processing::{ }; use crate::{ types::{CodeHash, HashedAccountAddr, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH}, - utils::hash, + utils::{h_addr_nibs_to_h256, hash}, }; #[derive(Debug, Default)] @@ -189,7 +188,11 @@ pub(crate) fn convert_storage_trie_root_keyed_hashmap_to_account_addr_keyed( let mut acc_addr_to_storage_trie_map = HashMap::new(); let account_addr_and_storage_root_iter = state_trie.items() - .filter_map(|(h_addr_nibs, acc_bytes)| acc_bytes.as_val().map(|acc_bytes| (H256::from_slice(&h_addr_nibs.bytes_be()), rlp::decode::(acc_bytes).expect("Encoder lib managed to improperly encode an account node in the state trie! This is a major bug in the encoder.").storage_root))); + .filter_map(|(h_addr_nibs, acc_bytes)| { + acc_bytes.as_val().map(|acc_bytes| { + (h_addr_nibs_to_h256(&h_addr_nibs), rlp::decode::(acc_bytes).expect("Encoder lib managed to improperly encode an account node in the state trie! This is a major bug in the encoder.").storage_root) + }) + }); // TODO: Replace with a map... for (acc_addr, storage_root) in account_addr_and_storage_root_iter { diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index 04d07857e..6f93b79c2 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -21,7 +21,8 @@ use crate::types::{ OtherBlockData, TrieRootHash, TxnProofGenIR, EMPTY_CODE_HASH, EMPTY_TRIE_HASH, }; use crate::utils::{ - hash, print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie, + h_addr_nibs_to_h256, hash, print_value_and_hash_nodes_of_storage_trie, + print_value_and_hash_nodes_of_trie, }; #[derive(Debug)] @@ -79,7 +80,7 @@ impl BlockTrace { .filter_map(|(addr, data)| { data.as_val().map(|data| { ( - HashedAccountAddr::from_slice(&addr.bytes_be()), + h_addr_nibs_to_h256(&addr), rlp::decode::(data).unwrap(), ) }) diff --git a/protocol_decoder/src/utils.rs b/protocol_decoder/src/utils.rs index 78b1a89e8..ffb482084 100644 --- a/protocol_decoder/src/utils.rs +++ b/protocol_decoder/src/utils.rs @@ -1,4 +1,5 @@ use eth_trie_utils::{ + nibbles::Nibbles, partial_trie::{HashedPartialTrie, PartialTrie}, trie_ops::ValOrHash, }; @@ -44,3 +45,15 @@ fn print_value_and_hash_nodes_of_trie_common(trie: &HashedPartialTrie) -> Vec H256 { + // TODO: HACK! This fix really needs to be in `eth_trie_utils`... + let mut nib_bytes = h_addr_nibs.bytes_be(); + if nib_bytes.len() < 32 { + for _ in nib_bytes.len()..32 { + nib_bytes.insert(0, 0); + } + } + + H256::from_slice(&nib_bytes) +} From dfd849f6b11321a91de654d8bf0871fa4ea5a372 Mon Sep 17 00:00:00 2001 From: BGluth Date: Sat, 23 Dec 2023 10:50:24 -0700 Subject: [PATCH 170/208] Dep bump --- Cargo.toml | 2 +- plonky_block_proof_gen/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5dc71fd0a..dad11e512 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "f8f6b07a3905185af302d58fb6b97c55d12e57be" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "ae3003a9d7eec22384328079fd8b413ce7acb153" } serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index 6a8ca48d4..5a5247e0c 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "f8f6b07a3905185af302d58fb6b97c55d12e57be" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "ae3003a9d7eec22384328079fd8b413ce7acb153" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } From 999baaed7f1bff22520c618a4c9c50062ededa61 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 11 Jan 2024 10:38:11 -0700 Subject: [PATCH 171/208] Updated `eth_trie_utils` - Contains important fixes. --- Cargo.toml | 4 ++++ protocol_decoder/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index dad11e512..c5a685c37 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,3 +7,7 @@ ethereum-types = "0.14.1" log = "0.4.20" plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "ae3003a9d7eec22384328079fd8b413ce7acb153" } serde = "1.0.166" + +# TODO: Remove once we update to the laest plonky2! +[patch."https://github.com/0xPolygonZero/eth_trie_utils.git"] +eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git?rev=7fc3c3f54b3cec9c6fc5ffc5230910bd1cb77f76" } diff --git a/protocol_decoder/Cargo.toml b/protocol_decoder/Cargo.toml index abda6c014..d774ec4da 100644 --- a/protocol_decoder/Cargo.toml +++ b/protocol_decoder/Cargo.toml @@ -10,7 +10,7 @@ ciborium = "0.2.1" ciborium-io = "0.2.1" enum-as-inner = "0.6.0" enumn = "0.1.12" -eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "e9ec4ec2aa2ae976b7c699ef40c1ffc716d87ed5" } +eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git", rev = "7fc3c3f54b3cec9c6fc5ffc5230910bd1cb77f76" } ethereum-types = { workspace = true } hex-literal = "0.4.1" hex = "0.4.3" From 77de9b7e3b5b8bd2db0879c2c7832d6e216bdafa Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 11 Jan 2024 11:32:28 -0700 Subject: [PATCH 172/208] Bumped plonky2 --- Cargo.toml | 6 +----- plonky_block_proof_gen/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index c5a685c37..a35fb376f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,9 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "ae3003a9d7eec22384328079fd8b413ce7acb153" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "b119e96f7f8dee57ebdbc6738e96891d2e628520" } serde = "1.0.166" - -# TODO: Remove once we update to the laest plonky2! -[patch."https://github.com/0xPolygonZero/eth_trie_utils.git"] -eth_trie_utils = { git = "https://github.com/0xPolygonZero/eth_trie_utils.git?rev=7fc3c3f54b3cec9c6fc5ffc5230910bd1cb77f76" } diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index 5a5247e0c..d610f88ca 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "ae3003a9d7eec22384328079fd8b413ce7acb153" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "b119e96f7f8dee57ebdbc6738e96891d2e628520" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } From efbc82ad39febc8f43ed9149f3f1253fbc323aa2 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 15 Jan 2024 11:19:54 -0700 Subject: [PATCH 173/208] Important plonky2 bump --- Cargo.toml | 2 +- plonky_block_proof_gen/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a35fb376f..a111d6817 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "b119e96f7f8dee57ebdbc6738e96891d2e628520" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "30b47998262642be54da5acf03dfca31af4d93f7" } serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index d610f88ca..2849d7340 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "b119e96f7f8dee57ebdbc6738e96891d2e628520" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "30b47998262642be54da5acf03dfca31af4d93f7" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } From 886ef7fd5cab8d7347b54a4735a2763adbbb7e64 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Wed, 17 Jan 2024 13:16:16 -0500 Subject: [PATCH 174/208] Bump plonky2 deps to 39a2d62d6d025631380da88aa78c2f8b929852a2 --- Cargo.toml | 2 +- plonky_block_proof_gen/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a111d6817..92f8adb20 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "30b47998262642be54da5acf03dfca31af4d93f7" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "39a2d62d6d025631380da88aa78c2f8b929852a2" } serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index 2849d7340..82b9f1749 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "30b47998262642be54da5acf03dfca31af4d93f7" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "39a2d62d6d025631380da88aa78c2f8b929852a2" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } From 33b1a17db40085a9417f0d57a512e7dd69861823 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 18 Jan 2024 14:35:00 -0700 Subject: [PATCH 175/208] Cleanup - Removed a lot of `println!`'s used for debugging. - Cleaned up a few warnings. --- .../compact/compact_prestate_processing.rs | 2 +- .../src/compact/compact_to_partial_trie.rs | 17 ---- protocol_decoder/src/decoding.rs | 92 +------------------ protocol_decoder/src/processed_block_trace.rs | 2 - 4 files changed, 2 insertions(+), 111 deletions(-) diff --git a/protocol_decoder/src/compact/compact_prestate_processing.rs b/protocol_decoder/src/compact/compact_prestate_processing.rs index e1b961b62..28dada01e 100644 --- a/protocol_decoder/src/compact/compact_prestate_processing.rs +++ b/protocol_decoder/src/compact/compact_prestate_processing.rs @@ -1462,7 +1462,7 @@ mod tests { let (header, parser) = ParserState::create_and_extract_header(bytes).unwrap(); assert_eq!(header.version, 1); - let _output = match parser.parse() { + let _ = match parser.parse() { Ok(trie) => trie, Err(err) => panic!("{}", err), }; diff --git a/protocol_decoder/src/compact/compact_to_partial_trie.rs b/protocol_decoder/src/compact/compact_to_partial_trie.rs index f4e7e0be6..e19c85999 100644 --- a/protocol_decoder/src/compact/compact_to_partial_trie.rs +++ b/protocol_decoder/src/compact/compact_to_partial_trie.rs @@ -68,19 +68,12 @@ fn process_branch( branch: &[Option>], output: &mut CompactToPartialTrieExtractionOutput, ) -> CompactParsingResult<()> { - println!("Full node at {:x}", curr_key); - for (i, slot) in branch.iter().enumerate().take(16) { if let Some(child) = slot { // TODO: Seriously update `eth_trie_utils` to have a better API... let mut new_k = curr_key; new_k.push_nibble_back(i as Nibble); create_partial_trie_from_compact_node_rec(new_k, child, output)?; - } else { - println!( - "Full node child at {} is nil.", - curr_key.clone().merge_nibble(i as Nibble) - ); } } @@ -111,8 +104,6 @@ fn process_hash( // trie. p_trie.insert(curr_key, hash); - println!("Inserting hash node at {:x}", curr_key); - Ok(()) } @@ -124,8 +115,6 @@ fn process_leaf( ) -> CompactParsingResult<()> { let full_k = curr_key.merge_nibbles(leaf_key); - println!("Inserting {:x} as a leaf", full_k); - let l_val = match leaf_node_data { LeafNodeData::Value(v_bytes) => rlp::encode(&v_bytes.0).to_vec(), LeafNodeData::Account(acc_data) => { @@ -158,12 +147,6 @@ fn convert_account_node_data_to_rlp_bytes_and_add_any_code_to_lookup( let c_hash = hash(c_bytes); output.code.insert(c_hash, c_bytes.clone()); - println!( - "ADDING ACCOUNT CODE HASH {:x} -- {}", - c_hash, - hex::encode(c_bytes) - ); - c_hash } Some(AccountNodeCode::HashNode(c_hash)) => *c_hash, diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index a14d6ca14..65b86356f 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -7,10 +7,9 @@ use std::{ use eth_trie_utils::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, Node, PartialTrie}, - trie_ops::ValOrHash, trie_subsets::create_trie_subset, }; -use ethereum_types::{Address, H160, H256, U256}; +use ethereum_types::{Address, H256, U256}; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, proof::TrieRoots, @@ -93,22 +92,6 @@ impl ProcessedBlockTrace { ..Default::default() }; - println!("State trie initial contents:"); - for (k, v) in curr_block_tries.state.items() { - let v_str = match v { - ValOrHash::Val(v) => hex::encode(&v), - ValOrHash::Hash(h) => format!("{:x}", h), - }; - - println!("k: {} --> {}", k, v_str); - } - - println!("Initial state Root: {:x}", curr_block_tries.state.hash()); - - // let state_trie_json = - // serde_json::to_string_pretty(&curr_block_tries.state).unwrap(); - // println!("Initial state trie: {}", state_trie_json); - let mut tot_gas_used = U256::zero(); let mut txn_gen_inputs = self @@ -133,11 +116,6 @@ impl ProcessedBlockTrace { )?; let trie_roots_after = calculate_trie_input_hashes(&curr_block_tries); - println!( - "Protocol expected trie roots after txn {}: {:?}", - txn_idx, trie_roots_after - ); - let gen_inputs = GenerationInputs { txn_number_before: txn_idx.into(), gas_used_before: tot_gas_used, @@ -164,13 +142,6 @@ impl ProcessedBlockTrace { }) .collect::>>()?; - for (k, v) in curr_block_tries.state.items() { - if let Some(v) = v.as_val() { - let acc_data = rlp::decode::(v).unwrap(); - println!("(FULL) account data: {:x} --> {:#?}", k, acc_data); - } - } - Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, &other_data, @@ -190,31 +161,6 @@ impl ProcessedBlockTrace { nodes_used_by_txn.state_accesses.iter().cloned(), )?; - let s: Vec<_> = state_trie - .items() - .map(|(_k, v)| match v { - ValOrHash::Val(v) => format!("V - {}", hex::encode(v)), - ValOrHash::Hash(h) => format!("H - {:x}", h), - }) - .collect(); - - println!("Actual final sub state trie: {:#?}", s); - - println!( - "Querying the hash(H160::zero()) ({}):", - hash(H160::zero().as_bytes()) - ); - state_trie.get( - Nibbles::from_bytes_be( - &hex::decode("5380c7b7ae81a58eb98d9c78de4a1fd7fd9535fc953ed2be602daaa41767312a") - .unwrap(), - ) - .unwrap(), - ); - println!("DONE QUERY"); - - // println!("State partial trie: {}", s); - let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); // TODO: Replace cast once `eth_trie_utils` supports `into` for `usize... let transactions_trie = @@ -249,9 +195,6 @@ impl ProcessedBlockTrace { meta: &TxnMetaState, txn_idx: TxnIdx, ) -> TraceParsingResult<()> { - // Used for some errors. Note that the clone is very cheap. - let state_trie_initial = trie_state.state.clone(); - for (hashed_acc_addr, storage_writes) in deltas.storage_writes { let storage_trie = trie_state .storage @@ -304,15 +247,6 @@ impl ProcessedBlockTrace { for hashed_addr in deltas.self_destructed_accounts { let k = Nibbles::from_h256_be(hashed_addr); - let account_data = trie_state.state.get(k).ok_or_else(|| { - TraceParsingError::NonExistentTrieEntry( - TrieType::State, - k, - state_trie_initial.hash(), - ) - })?; - let _account = account_from_rlped_bytes(account_data)?; - trie_state .storage .remove(&hashed_addr) @@ -373,30 +307,6 @@ impl StateTrieWrites { } }; - if self.balance.is_some() - || self.nonce.is_some() - || self.code_hash.is_some() - || storage_root_hash_change.is_some() - { - println!("DELTA FOR {:x}", h_addr); - - if let Some(v) = self.balance { - println!("---- balance: {:x}", v); - } - - if let Some(v) = self.nonce { - println!("---- nonce: {:x}", v); - } - - if let Some(v) = self.code_hash { - println!("---- c_hash: {:x}", v); - } - - if let Some(v) = storage_root_hash_change { - println!("---- storage change: {:x}", v); - } - } - update_val_if_some(&mut state_node.balance, self.balance); update_val_if_some(&mut state_node.nonce, self.nonce); update_val_if_some(&mut state_node.storage_root, storage_root_hash_change); diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index 6f93b79c2..e9728a8a7 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -208,8 +208,6 @@ impl TxnInfo { let mut contract_code_accessed = create_empty_code_access_map(); for (addr, trace) in self.traces { - println!("Addr {} --> {}", addr, hash(addr.as_bytes())); - let hashed_addr = hash(addr.as_bytes()); let storage_writes = trace.storage_written.unwrap_or_default(); From 729b4ea2ab3cfdf6d62af137772361dda5b505ca Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 18 Jan 2024 14:41:40 -0700 Subject: [PATCH 176/208] One more `plonky2` bump --- Cargo.toml | 2 +- plonky_block_proof_gen/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 92f8adb20..3d1073222 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,5 @@ resolver = "2" [workspace.dependencies] ethereum-types = "0.14.1" log = "0.4.20" -plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "39a2d62d6d025631380da88aa78c2f8b929852a2" } +plonky2_evm = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "265d46a96ecfec49a32973f66f8aa811586c5d4a" } serde = "1.0.166" diff --git a/plonky_block_proof_gen/Cargo.toml b/plonky_block_proof_gen/Cargo.toml index 82b9f1749..898cca66c 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/plonky_block_proof_gen/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT OR Apache-2.0" ethereum-types = { workspace = true } log = { workspace = true } paste = "1.0.14" -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "39a2d62d6d025631380da88aa78c2f8b929852a2" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "265d46a96ecfec49a32973f66f8aa811586c5d4a" } plonky2_evm = { workspace = true } protocol_decoder = { path = "../protocol_decoder" } serde = { workspace = true } From b1cce71c92d1782ada4cebb52f133270fa347dc8 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 18 Jan 2024 16:59:32 -0700 Subject: [PATCH 177/208] Requested PR changes for #12 --- protocol_decoder/src/decoding.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 65b86356f..fb1057d09 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -217,9 +217,6 @@ impl ProcessedBlockTrace { } } - // trie_state.state.insert(Nibbles::from_h256_be(hash(H160::zero().as_bytes())), - // ValOrHash::Val(EMPTY_ACCOUNT_BYTES_RLPED.to_vec())); - for (hashed_acc_addr, s_trie_writes) in deltas.state_writes { let val_k = Nibbles::from_h256_be(hashed_acc_addr); From 2e33ca244f82a44d41bb8cbf3f30fab4d283a046 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 1 Dec 2023 10:18:58 -0700 Subject: [PATCH 178/208] Added support for withdrawals --- protocol_decoder/src/decoding.rs | 3 +-- protocol_decoder/src/processed_block_trace.rs | 13 ++++++++++--- protocol_decoder/src/types.rs | 3 ++- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index fb1057d09..7fef9dff5 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -121,8 +121,7 @@ impl ProcessedBlockTrace { gas_used_before: tot_gas_used, gas_used_after: new_tot_gas_used, signed_txn: txn_info.meta.txn_bytes, - withdrawals: Vec::new(), /* TODO: Once this is added to the trace spec, add - * it here... */ + withdrawals: self.withdrawals.clone(), tries, trie_roots_after, checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index e9728a8a7..de0dd3d98 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -4,7 +4,7 @@ use std::iter::once; use eth_trie_utils::nibbles::Nibbles; use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie}; -use ethereum_types::U256; +use ethereum_types::{Address, U256}; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use crate::compact::compact_prestate_processing::{ @@ -29,6 +29,7 @@ use crate::utils::{ pub(crate) struct ProcessedBlockTrace { pub(crate) tries: PartialTriePreImages, pub(crate) txn_info: Vec, + pub(crate) withdrawals: Vec<(Address, U256)>, } const COMPATIBLE_HEADER_VERSION: u8 = 1; @@ -42,11 +43,16 @@ impl BlockTrace { where F: CodeHashResolveFunc, { - let proced_block_trace = self.into_processed_block_trace(p_meta); + let proced_block_trace = + self.into_processed_block_trace(p_meta, other_data.b_data.withdrawals.clone()); proced_block_trace.into_txn_proof_gen_ir(other_data) } - fn into_processed_block_trace(self, p_meta: &ProcessingMeta) -> ProcessedBlockTrace + fn into_processed_block_trace( + self, + p_meta: &ProcessingMeta, + withdrawals: Vec<(Address, U256)>, + ) -> ProcessedBlockTrace where F: CodeHashResolveFunc, { @@ -96,6 +102,7 @@ impl BlockTrace { ProcessedBlockTrace { tries: pre_image_data.tries, txn_info, + withdrawals, } } } diff --git a/protocol_decoder/src/types.rs b/protocol_decoder/src/types.rs index 6d89c862e..2cc7af73a 100644 --- a/protocol_decoder/src/types.rs +++ b/protocol_decoder/src/types.rs @@ -1,5 +1,5 @@ use eth_trie_utils::nibbles::Nibbles; -use ethereum_types::{H256, U256}; +use ethereum_types::{Address, H256, U256}; use plonky2_evm::{ generation::GenerationInputs, proof::{BlockHashes, BlockMetadata}, @@ -66,6 +66,7 @@ pub struct OtherBlockData { pub struct BlockLevelData { pub b_meta: BlockMetadata, pub b_hashes: BlockHashes, + pub withdrawals: Vec<(Address, U256)>, } impl TxnProofGenIR { pub fn b_height(&self) -> BlockHeight { From 5d1b884641cebc4ddfa60a9be012a91cb1b5b086 Mon Sep 17 00:00:00 2001 From: BGluth Date: Thu, 18 Jan 2024 17:57:12 -0700 Subject: [PATCH 179/208] Impled the new suggested way of handling withdrawls - See [this PR post](https://github.com/0xPolygonZero/proof-protocol-decoder/pull/4#issuecomment-1841843807) for more info. --- protocol_decoder/src/decoding.rs | 59 +++++++++++++++++++++++++++++--- 1 file changed, 55 insertions(+), 4 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 7fef9dff5..201a699b8 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -121,7 +121,9 @@ impl ProcessedBlockTrace { gas_used_before: tot_gas_used, gas_used_after: new_tot_gas_used, signed_txn: txn_info.meta.txn_bytes, - withdrawals: self.withdrawals.clone(), + withdrawals: Vec::default(), /* Only ever set in a dummy txn at the end of + * the block (see `[add_withdrawls_to_txns]` for + * more info). */ tries, trie_roots_after, checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, @@ -141,11 +143,20 @@ impl ProcessedBlockTrace { }) .collect::>>()?; - Self::pad_gen_inputs_with_dummy_inputs_if_needed( + let dummys_added = Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, &other_data, &initial_tries_for_dummies, ); + + Self::add_withdrawls_to_txns( + &mut txn_gen_inputs, + &other_data, + &curr_block_tries, + self.withdrawals, + dummys_added, + ); + Ok(txn_gen_inputs) } @@ -267,7 +278,9 @@ impl ProcessedBlockTrace { gen_inputs: &mut Vec, other_data: &OtherBlockData, initial_trie_state: &PartialTrieState, - ) { + ) -> bool { + let mut dummys_added = true; + match gen_inputs.len() { 0 => { // Need to pad with two dummy txns. @@ -280,7 +293,45 @@ impl ProcessedBlockTrace { let dummy_txn = create_dummy_gen_input(other_data, initial_trie_state, 0); gen_inputs.insert(0, dummy_txn); } - _ => (), + _ => dummys_added = false, + } + + dummys_added + } + + /// The withdrawls are always represented as a single "dummy" txn at the end + /// of the block. However, if no dummies have already been added, then + /// we need to append one to the end. If dummies have been added, then + /// add it to the last one. + fn add_withdrawls_to_txns( + txn_ir: &mut Vec, + other_data: &OtherBlockData, + final_trie_state: &PartialTrieState, + withdrawals: Vec<(Address, U256)>, + dummies_already_added: bool, + ) { + if withdrawals.is_empty() { + return; + } + + match dummies_already_added { + false => { + // Guarenteed to have a real txn. + let final_ir = txn_ir.last().unwrap(); + + // Dummy state will be the state after the final txn. + let withdrawl_dummy = + create_dummy_gen_input(other_data, final_trie_state, final_ir.txn_idx + 1); + + // If we have no actual dummy txns, then we create one and append it to the end + // of the block. + txn_ir.push(withdrawl_dummy); + } + true => { + // If we have dummy txns (note: `txn_ir[1]` is always a dummy txn in this case), + // then this dummy will get the withdrawls. + txn_ir[1].gen_inputs.withdrawals = withdrawals; + } } } } From 6a93b66421cb713463596904b0aa4960e32191a9 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 08:53:13 -0500 Subject: [PATCH 180/208] Add VerifierState type --- plonky_block_proof_gen/src/lib.rs | 1 + plonky_block_proof_gen/src/types.rs | 8 ++ plonky_block_proof_gen/src/verifier_state.rs | 109 +++++++++++++++++++ 3 files changed, 118 insertions(+) create mode 100644 plonky_block_proof_gen/src/verifier_state.rs diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index 7786e59b7..b70035ca4 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -95,3 +95,4 @@ pub mod proof_gen; pub mod proof_types; pub mod prover_state; pub mod types; +pub mod verifier_state; diff --git a/plonky_block_proof_gen/src/types.rs b/plonky_block_proof_gen/src/types.rs index 13449845c..52f5d2d43 100644 --- a/plonky_block_proof_gen/src/types.rs +++ b/plonky_block_proof_gen/src/types.rs @@ -16,3 +16,11 @@ pub type AllRecursiveCircuits = plonky2_evm::fixed_recursive_verifier::AllRecurs PoseidonGoldilocksConfig, 2, >; + +/// A type alias for the verifier data necessary to verify succinct block +/// proofs. +/// While the prover state [`AllRecursiveCircuits`] can also verify proofs, this +/// [`VerifierData`] is much lighter, allowing anyone to verify block proofs, +/// regardless of the underlying hardware. +pub type VerifierData = + plonky2::plonk::circuit_data::VerifierCircuitData; diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs new file mode 100644 index 000000000..feeb9c1dc --- /dev/null +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -0,0 +1,109 @@ +//! This module defines the `VerifierState`, that contains the necessary data to +//! handle succinct block proofs verification. + +use std::ops::Range; + +use log::info; +use paste::paste; +use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; + +use crate::{ + prover_state::ProverState, + types::{AllRecursiveCircuits, VerifierData}, +}; + +/// Plonky2 verifier state. +/// +/// The default generation requires generating all the verifier data before +/// extracting the verifier-related data, which can take a long time and require +/// a large amount of memory. +pub struct VerifierState { + /// + pub state: VerifierData, +} + +/// Builder for the verifier state. +#[derive(Debug)] +pub struct VerifierStateBuilder { + arithmetic_circuit_size: Range, + byte_packing_circuit_size: Range, + cpu_circuit_size: Range, + keccak_circuit_size: Range, + keccak_sponge_circuit_size: Range, + logic_circuit_size: Range, + memory_circuit_size: Range, +} + +impl Default for VerifierStateBuilder { + fn default() -> Self { + // These ranges are somewhat arbitrary, but should be enough for testing + // purposes against most transactions. + // Some heavy contract deployments may require bumping these ranges though. + Self { + arithmetic_circuit_size: 16..20, + byte_packing_circuit_size: 10..20, + cpu_circuit_size: 12..22, + keccak_circuit_size: 14..17, + keccak_sponge_circuit_size: 9..14, + logic_circuit_size: 12..16, + memory_circuit_size: 17..25, + } + } +} + +macro_rules! define_set_circuit_size_method { + ($name:ident) => { + paste! { + /// Specifies a range of degrees to be supported for this STARK + /// table's associated recursive circuits. + pub fn [](mut self, size: Range) -> Self { + self.[<$name _circuit_size>] = size; + self + } + } + }; +} + +impl VerifierStateBuilder { + define_set_circuit_size_method!(arithmetic); + define_set_circuit_size_method!(byte_packing); + define_set_circuit_size_method!(cpu); + define_set_circuit_size_method!(keccak); + define_set_circuit_size_method!(keccak_sponge); + define_set_circuit_size_method!(logic); + define_set_circuit_size_method!(memory); + + // TODO: Consider adding async version? + /// Instantiate the verifier state from the builder. Note that this is a + /// very expensive call! + pub fn build(self) -> VerifierState { + info!("Initializing Plonky2 aggregation verifier state (This may take a while)..."); + + let state = AllRecursiveCircuits::new( + &AllStark::default(), + &[ + self.arithmetic_circuit_size, + self.byte_packing_circuit_size, + self.cpu_circuit_size, + self.keccak_circuit_size, + self.keccak_sponge_circuit_size, + self.logic_circuit_size, + self.memory_circuit_size, + ], + &StarkConfig::standard_fast_config(), + ); + + info!("Finished initializing Plonky2 aggregation verifier state!"); + + VerifierState { state } + } +} + +/// Extracts the verifier state from the entire prover state. +impl From for VerifierState { + fn from(prover_state: ProverState) -> Self { + VerifierState { + state: prover_state.state.final_verifier_data(), + } + } +} From afabcef858a861a2aa9c3f54cd2de182415c8a35 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 09:11:19 -0500 Subject: [PATCH 181/208] Add quick comment and re-exports --- plonky_block_proof_gen/src/lib.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index b70035ca4..c86fef1da 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -90,9 +90,22 @@ //! curr_block_agg_proof: &GeneratedAggProof, //! ) -> ProofGenResult { ... } //! ``` +//! +//! ## Verifying block proofs +//! +//! The `ProverState` can be used to verify any block proofs emitted with the +//! same set of circuits. +//! However, because the prover state can be quite heavy, the necessary verifier +//! data to verify block proofs can be saved independently into a +//! `VerifierState`, to allow anyone to easily verify block proofs. pub mod proof_gen; pub mod proof_types; pub mod prover_state; pub mod types; pub mod verifier_state; + +// Re-exports + +pub use prover_state::ProverState; +pub use verifier_state::VerifierState; From 2a50edc6468eeef3209ce142e0ff0944b0011213 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 09:21:19 -0500 Subject: [PATCH 182/208] Unify hardcoded ranges --- plonky_block_proof_gen/src/constants.rs | 16 ++++++++++++++++ plonky_block_proof_gen/src/lib.rs | 1 + plonky_block_proof_gen/src/prover_state.rs | 17 +++++++++-------- plonky_block_proof_gen/src/verifier_state.rs | 17 +++++++++-------- 4 files changed, 35 insertions(+), 16 deletions(-) create mode 100644 plonky_block_proof_gen/src/constants.rs diff --git a/plonky_block_proof_gen/src/constants.rs b/plonky_block_proof_gen/src/constants.rs new file mode 100644 index 000000000..dfa8556fa --- /dev/null +++ b/plonky_block_proof_gen/src/constants.rs @@ -0,0 +1,16 @@ +//! Hardcoded circuit constants to be used when generating the prover circuits. + +/// Default range to be used for the `ArithmeticStark` table. +pub(crate) const DEFAULT_ARITHMETIC_RANGE: Range = 16..20; +/// Default range to be used for the `BytePackingStark` table. +pub(crate) const DEFAULT_BYTE_PACKING_RANGE: Range = 10..20; +/// Default range to be used for the `CpuStark` table. +pub(crate) const DEFAULT_CPU_RANGE: Range = 12..22; +/// Default range to be used for the `KeccakStark` table. +pub(crate) const DEFAULT_KECCAK_RANGE: Range = 14..17; +/// Default range to be used for the `KeccakSpongeStark` table. +pub(crate) const DEFAULT_KECCAK_SPONGE_RANGE: Range = 9..14; +/// Default range to be used for the `LogicStark` table. +pub(crate) const DEFAULT_LOGIC_RANGE: Range = 12..16; +/// Default range to be used for the `MemoryStark` table. +pub(crate) const DEFAULT_MEMORY_RANGE: Range = 17..25; diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index c86fef1da..cda80667c 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -99,6 +99,7 @@ //! data to verify block proofs can be saved independently into a //! `VerifierState`, to allow anyone to easily verify block proofs. +pub(crate) mod constants; pub mod proof_gen; pub mod proof_types; pub mod prover_state; diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 7cb601ac1..d7775e329 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -9,6 +9,7 @@ use log::info; use paste::paste; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; +use crate::constants::*; use crate::types::AllRecursiveCircuits; /// Plonky2 proving state. Note that this is generally going to be massive in @@ -32,17 +33,17 @@ pub struct ProverStateBuilder { impl Default for ProverStateBuilder { fn default() -> Self { - // These ranges are somewhat arbitrary, but should be enough for testing + // The default ranges are somewhat arbitrary, but should be enough for testing // purposes against most transactions. // Some heavy contract deployments may require bumping these ranges though. Self { - arithmetic_circuit_size: 16..20, - byte_packing_circuit_size: 10..20, - cpu_circuit_size: 12..22, - keccak_circuit_size: 14..17, - keccak_sponge_circuit_size: 9..14, - logic_circuit_size: 12..16, - memory_circuit_size: 17..25, + arithmetic_circuit_size: DEFAULT_ARITHMETIC_RANGE, + byte_packing_circuit_size: DEFAULT_BYTE_PACKING_RANGE, + cpu_circuit_size: DEFAULT_CPU_RANGE, + keccak_circuit_size: DEFAULT_KECCAK_RANGE, + keccak_sponge_circuit_size: DEFAULT_KECCAK_SPONGE_RANGE, + logic_circuit_size: DEFAULT_LOGIC_RANGE, + memory_circuit_size: DEFAULT_MEMORY_RANGE, } } } diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index feeb9c1dc..cd73f32cf 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -7,6 +7,7 @@ use log::info; use paste::paste; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; +use crate::constants::*; use crate::{ prover_state::ProverState, types::{AllRecursiveCircuits, VerifierData}, @@ -36,17 +37,17 @@ pub struct VerifierStateBuilder { impl Default for VerifierStateBuilder { fn default() -> Self { - // These ranges are somewhat arbitrary, but should be enough for testing + // The default ranges are somewhat arbitrary, but should be enough for testing // purposes against most transactions. // Some heavy contract deployments may require bumping these ranges though. Self { - arithmetic_circuit_size: 16..20, - byte_packing_circuit_size: 10..20, - cpu_circuit_size: 12..22, - keccak_circuit_size: 14..17, - keccak_sponge_circuit_size: 9..14, - logic_circuit_size: 12..16, - memory_circuit_size: 17..25, + arithmetic_circuit_size: DEFAULT_ARITHMETIC_RANGE, + byte_packing_circuit_size: DEFAULT_BYTE_PACKING_RANGE, + cpu_circuit_size: DEFAULT_CPU_RANGE, + keccak_circuit_size: DEFAULT_KECCAK_RANGE, + keccak_sponge_circuit_size: DEFAULT_KECCAK_SPONGE_RANGE, + logic_circuit_size: DEFAULT_LOGIC_RANGE, + memory_circuit_size: DEFAULT_MEMORY_RANGE, } } } From 2e89add0d7ac7bd855f114da70bf1664f9d973e3 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 16:39:11 -0500 Subject: [PATCH 183/208] Fix --- plonky_block_proof_gen/src/constants.rs | 2 ++ plonky_block_proof_gen/src/verifier_state.rs | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/constants.rs b/plonky_block_proof_gen/src/constants.rs index dfa8556fa..4ec011592 100644 --- a/plonky_block_proof_gen/src/constants.rs +++ b/plonky_block_proof_gen/src/constants.rs @@ -1,5 +1,7 @@ //! Hardcoded circuit constants to be used when generating the prover circuits. +use core::ops::Range; + /// Default range to be used for the `ArithmeticStark` table. pub(crate) const DEFAULT_ARITHMETIC_RANGE: Range = 16..20; /// Default range to be used for the `BytePackingStark` table. diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index cd73f32cf..75bf9ab4e 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -96,7 +96,9 @@ impl VerifierStateBuilder { info!("Finished initializing Plonky2 aggregation verifier state!"); - VerifierState { state } + VerifierState { + state: state.final_verifier_data(), + } } } From 10896d9c9f4ae8ff676ce198b177a278285cf709 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 16:41:34 -0500 Subject: [PATCH 184/208] Add CI jobs --- .github/workflows/ci.yml | 85 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..1af134c94 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,85 @@ +name: Continuous Integration + +on: + push: + branches: [main] + pull_request: + branches: + - "**" + workflow_dispatch: + branches: + - "**" + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + +jobs: + test: + name: Test Suite + runs-on: ubuntu-latest + timeout-minutes: 30 + if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Install nightly toolchain + uses: dtolnay/rust-toolchain@nightly + + - name: Set up rust cache + uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + + - name: Check in protocol_decoder subdirectory + run: cargo check --manifest-path protocol_decoder/Cargo.toml + env: + RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 + RUST_LOG: 1 + CARGO_INCREMENTAL: 1 + RUST_BACKTRACE: 1 + + - name: Check in plonky_block_proof_gen subdirectory + run: cargo check --manifest-path plonky_block_proof_gen/Cargo.toml + env: + RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 + RUST_LOG: 1 + CARGO_INCREMENTAL: 1 + RUST_BACKTRACE: 1 + + - name: Run cargo test + run: cargo test --workspace + env: + RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 + RUST_LOG: 1 + CARGO_INCREMENTAL: 1 + RUST_BACKTRACE: 1 + + lints: + name: Formatting and Clippy + runs-on: ubuntu-latest + timeout-minutes: 10 + if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Install nightly toolchain + uses: dtolnay/rust-toolchain@nightly + with: + components: rustfmt, clippy + + - name: Set up rust cache + uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + + - name: Run cargo fmt + run: cargo fmt --all --check + + - name: Run cargo clippy + run: cargo clippy --all-features --all-targets -- -D warnings -A incomplete-features From c1993f0000df9a662de83a0b8faef975798a48a0 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 16:49:08 -0500 Subject: [PATCH 185/208] Reduce code duplication --- plonky_block_proof_gen/src/prover_state.rs | 25 +++--- plonky_block_proof_gen/src/verifier_state.rs | 82 ++------------------ 2 files changed, 22 insertions(+), 85 deletions(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index d7775e329..e1d0449d1 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -22,13 +22,13 @@ pub struct ProverState { /// Builder for the prover state. #[derive(Debug)] pub struct ProverStateBuilder { - arithmetic_circuit_size: Range, - byte_packing_circuit_size: Range, - cpu_circuit_size: Range, - keccak_circuit_size: Range, - keccak_sponge_circuit_size: Range, - logic_circuit_size: Range, - memory_circuit_size: Range, + pub(crate) arithmetic_circuit_size: Range, + pub(crate) byte_packing_circuit_size: Range, + pub(crate) cpu_circuit_size: Range, + pub(crate) keccak_circuit_size: Range, + pub(crate) keccak_sponge_circuit_size: Range, + pub(crate) logic_circuit_size: Range, + pub(crate) memory_circuit_size: Range, } impl Default for ProverStateBuilder { @@ -73,10 +73,11 @@ impl ProverStateBuilder { // TODO: Consider adding async version? /// Instantiate the prover state from the builder. Note that this is a very /// expensive call! - pub fn build(self) -> ProverState { - info!("Initializing Plonky2 aggregation prover state (This may take a while)..."); + pub fn build(self, verbose: bool) -> ProverState { + if verbose { + info!("Initializing Plonky2 aggregation prover state (This may take a while)..."); + } - // ... Yeah I don't understand the mysterious ranges either :) let state = AllRecursiveCircuits::new( &AllStark::default(), &[ @@ -91,7 +92,9 @@ impl ProverStateBuilder { &StarkConfig::standard_fast_config(), ); - info!("Finished initializing Plonky2 aggregation prover state!"); + if verbose { + info!("Finished initializing Plonky2 aggregation prover state!"); + } ProverState { state } } diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index 75bf9ab4e..3552f285b 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -1,17 +1,10 @@ //! This module defines the `VerifierState`, that contains the necessary data to //! handle succinct block proofs verification. -use std::ops::Range; - use log::info; -use paste::paste; -use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; -use crate::constants::*; -use crate::{ - prover_state::ProverState, - types::{AllRecursiveCircuits, VerifierData}, -}; +use crate::prover_state::ProverStateBuilder; +use crate::{prover_state::ProverState, types::VerifierData}; /// Plonky2 verifier state. /// @@ -24,76 +17,17 @@ pub struct VerifierState { } /// Builder for the verifier state. -#[derive(Debug)] -pub struct VerifierStateBuilder { - arithmetic_circuit_size: Range, - byte_packing_circuit_size: Range, - cpu_circuit_size: Range, - keccak_circuit_size: Range, - keccak_sponge_circuit_size: Range, - logic_circuit_size: Range, - memory_circuit_size: Range, -} - -impl Default for VerifierStateBuilder { - fn default() -> Self { - // The default ranges are somewhat arbitrary, but should be enough for testing - // purposes against most transactions. - // Some heavy contract deployments may require bumping these ranges though. - Self { - arithmetic_circuit_size: DEFAULT_ARITHMETIC_RANGE, - byte_packing_circuit_size: DEFAULT_BYTE_PACKING_RANGE, - cpu_circuit_size: DEFAULT_CPU_RANGE, - keccak_circuit_size: DEFAULT_KECCAK_RANGE, - keccak_sponge_circuit_size: DEFAULT_KECCAK_SPONGE_RANGE, - logic_circuit_size: DEFAULT_LOGIC_RANGE, - memory_circuit_size: DEFAULT_MEMORY_RANGE, - } - } -} - -macro_rules! define_set_circuit_size_method { - ($name:ident) => { - paste! { - /// Specifies a range of degrees to be supported for this STARK - /// table's associated recursive circuits. - pub fn [](mut self, size: Range) -> Self { - self.[<$name _circuit_size>] = size; - self - } - } - }; -} +/// This is essentially the same as the [`ProverStateBuilder`], in that we need +/// to first generate the entire prover state before extracting the verifier +/// data. +pub type VerifierStateBuilder = ProverStateBuilder; impl VerifierStateBuilder { - define_set_circuit_size_method!(arithmetic); - define_set_circuit_size_method!(byte_packing); - define_set_circuit_size_method!(cpu); - define_set_circuit_size_method!(keccak); - define_set_circuit_size_method!(keccak_sponge); - define_set_circuit_size_method!(logic); - define_set_circuit_size_method!(memory); - - // TODO: Consider adding async version? /// Instantiate the verifier state from the builder. Note that this is a /// very expensive call! - pub fn build(self) -> VerifierState { + pub fn build_verifier(self) -> VerifierState { info!("Initializing Plonky2 aggregation verifier state (This may take a while)..."); - - let state = AllRecursiveCircuits::new( - &AllStark::default(), - &[ - self.arithmetic_circuit_size, - self.byte_packing_circuit_size, - self.cpu_circuit_size, - self.keccak_circuit_size, - self.keccak_sponge_circuit_size, - self.logic_circuit_size, - self.memory_circuit_size, - ], - &StarkConfig::standard_fast_config(), - ); - + let ProverState { state } = self.build(false); info!("Finished initializing Plonky2 aggregation verifier state!"); VerifierState { From 95fc8a4f271159c1eb8c854dd06f50ea83da295b Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 16:52:30 -0500 Subject: [PATCH 186/208] Silence clippy --- protocol_decoder/src/lib.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/protocol_decoder/src/lib.rs b/protocol_decoder/src/lib.rs index e1e004f09..2436c9abb 100644 --- a/protocol_decoder/src/lib.rs +++ b/protocol_decoder/src/lib.rs @@ -1,6 +1,10 @@ #![feature(linked_list_cursors)] #![feature(trait_alias)] #![feature(iter_array_chunks)] +// TODO: address these lints +#![allow(unused)] +#![allow(clippy::type_complexity)] +#![allow(private_interfaces)] mod compact; pub mod decoding; From 035f29e9d87b11946f42b50f9ee64107df1f3b61 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 17:00:08 -0500 Subject: [PATCH 187/208] Missing comment --- plonky_block_proof_gen/src/verifier_state.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index 3552f285b..96082a1ec 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -12,7 +12,8 @@ use crate::{prover_state::ProverState, types::VerifierData}; /// extracting the verifier-related data, which can take a long time and require /// a large amount of memory. pub struct VerifierState { - /// + /// The verification circuit data associated to the block proof layer of the + /// plonky2 prover state. pub state: VerifierData, } From e548d30e92a0fcff784591d8d2722f1f32fe1263 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 17:13:10 -0500 Subject: [PATCH 188/208] Add some more documentation --- plonky_block_proof_gen/src/lib.rs | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/plonky_block_proof_gen/src/lib.rs b/plonky_block_proof_gen/src/lib.rs index cda80667c..17c23c58e 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/plonky_block_proof_gen/src/lib.rs @@ -98,6 +98,24 @@ //! However, because the prover state can be quite heavy, the necessary verifier //! data to verify block proofs can be saved independently into a //! `VerifierState`, to allow anyone to easily verify block proofs. +//! +//! ```compile_fail +//! # use plonky_block_proof_gen::prover_state::ProverStateBuilder; +//! # use plonky_block_proof_gen::verifier_state::VerifierState; +//! let mut builder = ProverStateBuilder::default(); +//! +//! // Generate a `ProverState` from the builder. +//! let prover_state = builder.build(); +//! +//! // Derive a `VerifierState` from the `ProverState`. +//! let verifier_state: VerifierState = prover_state.into(); +//! +//! // The prover generates some block proof. +//! let block_proof = prover_state.generate_block_proof(...); +//! +//! // Have the verifier attest validity of the proof. +//! assert!(verifier_state.verify(block_proof.intern).is_ok()); +//! ``` pub(crate) mod constants; pub mod proof_gen; From 9efafbaa4c54d48e516ef9ac3bfe12fe46f7cacf Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 18:23:53 -0500 Subject: [PATCH 189/208] Typo --- plonky_block_proof_gen/src/verifier_state.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index 96082a1ec..a9fc1ddb3 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -8,7 +8,7 @@ use crate::{prover_state::ProverState, types::VerifierData}; /// Plonky2 verifier state. /// -/// The default generation requires generating all the verifier data before +/// The default generation requires generating all the prover data before /// extracting the verifier-related data, which can take a long time and require /// a large amount of memory. pub struct VerifierState { From de824419704afa7e740798a4bb661e4f13e636a3 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 18:57:13 -0500 Subject: [PATCH 190/208] Add verification method --- plonky_block_proof_gen/src/verifier_state.rs | 23 ++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index a9fc1ddb3..a1ff91cfc 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -2,8 +2,11 @@ //! handle succinct block proofs verification. use log::info; +use plonky2::recursion::cyclic_recursion::check_cyclic_proof_verifier_data; +use crate::proof_gen::ProofGenResult; use crate::prover_state::ProverStateBuilder; +use crate::types::PlonkyProofIntern; use crate::{prover_state::ProverState, types::VerifierData}; /// Plonky2 verifier state. @@ -45,3 +48,23 @@ impl From for VerifierState { } } } + +impl VerifierState { + /// Verifies a `block_proof`. + pub fn verify(&self, block_proof: &PlonkyProofIntern) -> ProofGenResult<()> { + // Proof verification + self.state + .verify(block_proof.clone()) + .map_err(|err| err.to_string())?; + + // Verifier data verification + check_cyclic_proof_verifier_data( + block_proof, + &self.state.verifier_only, + &self.state.common, + ) + .map_err(|err| err.to_string())?; + + Ok(()) + } +} From 0e5c0b820151003097026fd7ccc6de178368b000 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 19 Jan 2024 21:07:12 -0500 Subject: [PATCH 191/208] Comments --- plonky_block_proof_gen/src/prover_state.rs | 10 +++------- plonky_block_proof_gen/src/verifier_state.rs | 10 ++++++---- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index e1d0449d1..6f7e30ef4 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -73,10 +73,8 @@ impl ProverStateBuilder { // TODO: Consider adding async version? /// Instantiate the prover state from the builder. Note that this is a very /// expensive call! - pub fn build(self, verbose: bool) -> ProverState { - if verbose { - info!("Initializing Plonky2 aggregation prover state (This may take a while)..."); - } + pub fn build(self) -> ProverState { + info!("Initializing Plonky2 aggregation prover state (This may take a while)..."); let state = AllRecursiveCircuits::new( &AllStark::default(), @@ -92,9 +90,7 @@ impl ProverStateBuilder { &StarkConfig::standard_fast_config(), ); - if verbose { - info!("Finished initializing Plonky2 aggregation prover state!"); - } + info!("Finished initializing Plonky2 aggregation prover state!"); ProverState { state } } diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/plonky_block_proof_gen/src/verifier_state.rs index a1ff91cfc..b6a3ceb32 100644 --- a/plonky_block_proof_gen/src/verifier_state.rs +++ b/plonky_block_proof_gen/src/verifier_state.rs @@ -1,6 +1,8 @@ //! This module defines the `VerifierState`, that contains the necessary data to //! handle succinct block proofs verification. +use core::borrow::Borrow; + use log::info; use plonky2::recursion::cyclic_recursion::check_cyclic_proof_verifier_data; @@ -31,7 +33,7 @@ impl VerifierStateBuilder { /// very expensive call! pub fn build_verifier(self) -> VerifierState { info!("Initializing Plonky2 aggregation verifier state (This may take a while)..."); - let ProverState { state } = self.build(false); + let ProverState { state } = self.build(); info!("Finished initializing Plonky2 aggregation verifier state!"); VerifierState { @@ -41,10 +43,10 @@ impl VerifierStateBuilder { } /// Extracts the verifier state from the entire prover state. -impl From for VerifierState { - fn from(prover_state: ProverState) -> Self { +impl> From for VerifierState { + fn from(prover_state: T) -> Self { VerifierState { - state: prover_state.state.final_verifier_data(), + state: prover_state.borrow().state.final_verifier_data(), } } } From ffac326b3d54bc12894c82f8be45bdd378ecc681 Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 19 Jan 2024 20:33:41 -0700 Subject: [PATCH 192/208] Apply suggestions from code review Robin's spelling corrections. Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> --- protocol_decoder/src/decoding.rs | 6 +++--- protocol_decoder/src/processed_block_trace.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 201a699b8..da8bf0614 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -299,7 +299,7 @@ impl ProcessedBlockTrace { dummys_added } - /// The withdrawls are always represented as a single "dummy" txn at the end + /// The withdrawals are always represented as a single "dummy" txn at the end /// of the block. However, if no dummies have already been added, then /// we need to append one to the end. If dummies have been added, then /// add it to the last one. @@ -316,11 +316,11 @@ impl ProcessedBlockTrace { match dummies_already_added { false => { - // Guarenteed to have a real txn. + // Guaranteed to have a real txn. let final_ir = txn_ir.last().unwrap(); // Dummy state will be the state after the final txn. - let withdrawl_dummy = + let withdrawal_dummy = create_dummy_gen_input(other_data, final_trie_state, final_ir.txn_idx + 1); // If we have no actual dummy txns, then we create one and append it to the end diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index de0dd3d98..1c83679f8 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -43,7 +43,7 @@ impl BlockTrace { where F: CodeHashResolveFunc, { - let proced_block_trace = + let processed_block_trace = self.into_processed_block_trace(p_meta, other_data.b_data.withdrawals.clone()); proced_block_trace.into_txn_proof_gen_ir(other_data) } From d6794389bd6633668ffa210923a8b7f18663c2ee Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 19 Jan 2024 20:51:25 -0700 Subject: [PATCH 193/208] Suggested PR changes for #4 --- protocol_decoder/src/decoding.rs | 55 ++++++++++--------- protocol_decoder/src/processed_block_trace.rs | 3 +- 2 files changed, 30 insertions(+), 28 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index da8bf0614..1d5b249d0 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -122,8 +122,8 @@ impl ProcessedBlockTrace { gas_used_after: new_tot_gas_used, signed_txn: txn_info.meta.txn_bytes, withdrawals: Vec::default(), /* Only ever set in a dummy txn at the end of - * the block (see `[add_withdrawls_to_txns]` for - * more info). */ + * the block (see `[add_withdrawals_to_txns]` + * for more info). */ tries, trie_roots_after, checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, @@ -143,19 +143,21 @@ impl ProcessedBlockTrace { }) .collect::>>()?; - let dummys_added = Self::pad_gen_inputs_with_dummy_inputs_if_needed( + let dummies_added = Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, &other_data, &initial_tries_for_dummies, ); - Self::add_withdrawls_to_txns( - &mut txn_gen_inputs, - &other_data, - &curr_block_tries, - self.withdrawals, - dummys_added, - ); + if !self.withdrawals.is_empty() { + Self::add_withdrawals_to_txns( + &mut txn_gen_inputs, + &other_data, + &curr_block_tries, + self.withdrawals, + dummies_added, + ); + } Ok(txn_gen_inputs) } @@ -279,7 +281,7 @@ impl ProcessedBlockTrace { other_data: &OtherBlockData, initial_trie_state: &PartialTrieState, ) -> bool { - let mut dummys_added = true; + let mut dummies_added = true; match gen_inputs.len() { 0 => { @@ -293,27 +295,26 @@ impl ProcessedBlockTrace { let dummy_txn = create_dummy_gen_input(other_data, initial_trie_state, 0); gen_inputs.insert(0, dummy_txn); } - _ => dummys_added = false, + _ => dummies_added = false, } - dummys_added + dummies_added } - /// The withdrawals are always represented as a single "dummy" txn at the end - /// of the block. However, if no dummies have already been added, then - /// we need to append one to the end. If dummies have been added, then - /// add it to the last one. - fn add_withdrawls_to_txns( + /// The withdrawals are always in the final ir payload. How they are placed + /// differs based on whether or not there are already dummy proofs present + /// in the IR. The rules for adding withdrawals to the IR list are: + /// - If dummy proofs are already present, then the withdrawals are added to + /// the last dummy proof (always index `1`). + /// - If no dummy proofs are already present, then a dummy proof that just + /// contains the withdrawals is appended to the end of the IR vec. + fn add_withdrawals_to_txns( txn_ir: &mut Vec, other_data: &OtherBlockData, final_trie_state: &PartialTrieState, withdrawals: Vec<(Address, U256)>, dummies_already_added: bool, ) { - if withdrawals.is_empty() { - return; - } - match dummies_already_added { false => { // Guaranteed to have a real txn. @@ -323,13 +324,13 @@ impl ProcessedBlockTrace { let withdrawal_dummy = create_dummy_gen_input(other_data, final_trie_state, final_ir.txn_idx + 1); - // If we have no actual dummy txns, then we create one and append it to the end - // of the block. - txn_ir.push(withdrawl_dummy); + // If we have no actual dummy proofs, then we create one and append it to the + // end of the block. + txn_ir.push(withdrawal_dummy); } true => { - // If we have dummy txns (note: `txn_ir[1]` is always a dummy txn in this case), - // then this dummy will get the withdrawls. + // If we have dummy proofs (note: `txn_ir[1]` is always a dummy txn in this + // case), then this dummy will get the withdrawals. txn_ir[1].gen_inputs.withdrawals = withdrawals; } } diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index 1c83679f8..bc8fdf1ee 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -45,7 +45,8 @@ impl BlockTrace { { let processed_block_trace = self.into_processed_block_trace(p_meta, other_data.b_data.withdrawals.clone()); - proced_block_trace.into_txn_proof_gen_ir(other_data) + + processed_block_trace.into_txn_proof_gen_ir(other_data) } fn into_processed_block_trace( From d3a1b835f1a9074e72362582d80221c9b81e515c Mon Sep 17 00:00:00 2001 From: BGluth Date: Fri, 19 Jan 2024 21:19:42 -0700 Subject: [PATCH 194/208] Withdrawals now adjust account balances --- protocol_decoder/src/decoding.rs | 40 ++++++++++++++++++++++++++++---- 1 file changed, 36 insertions(+), 4 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 1d5b249d0..8dc7d6361 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -43,6 +43,9 @@ pub enum TraceParsingError { // placeholder. #[error("Missing keys when creating sub-partial tries (Trie type: {0})")] MissingKeysCreatingSubPartialTrie(TrieType), + + #[error("No account present at {0:x} (hashed: {1:x}) to withdrawal {2} Gwei from!")] + MissingWithdrawalAccount(Address, HashedAccountAddr, U256), } #[derive(Debug)] @@ -153,10 +156,10 @@ impl ProcessedBlockTrace { Self::add_withdrawals_to_txns( &mut txn_gen_inputs, &other_data, - &curr_block_tries, + &mut curr_block_tries, self.withdrawals, dummies_added, - ); + )?; } Ok(txn_gen_inputs) @@ -311,10 +314,13 @@ impl ProcessedBlockTrace { fn add_withdrawals_to_txns( txn_ir: &mut Vec, other_data: &OtherBlockData, - final_trie_state: &PartialTrieState, + final_trie_state: &mut PartialTrieState, withdrawals: Vec<(Address, U256)>, dummies_already_added: bool, - ) { + ) -> TraceParsingResult<()> { + // Withdrawals update balances in the account trie, so we need to do that here. + Self::update_trie_state_from_withdrawals(withdrawals.iter(), &mut final_trie_state.state)?; + match dummies_already_added { false => { // Guaranteed to have a real txn. @@ -334,6 +340,32 @@ impl ProcessedBlockTrace { txn_ir[1].gen_inputs.withdrawals = withdrawals; } } + + Ok(()) + } + + fn update_trie_state_from_withdrawals<'a>( + withdrawals: impl Iterator + 'a, + state: &mut HashedPartialTrie, + ) -> TraceParsingResult<()> { + for (addr, amt) in withdrawals { + let h_addr = hash(addr.as_bytes()); + let h_addr_nibs = Nibbles::from_h256_be(h_addr); + + let acc_bytes = + state + .get(h_addr_nibs) + .ok_or(TraceParsingError::MissingWithdrawalAccount( + *addr, h_addr, *amt, + ))?; + let mut acc_data = account_from_rlped_bytes(acc_bytes)?; + + acc_data.balance += *amt; + + state.insert(h_addr_nibs, rlp::encode(&acc_data).to_vec()); + } + + Ok(()) } } From 0792e3a313e74f930a66abe9ce01f45724c89539 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 22 Jan 2024 10:11:55 -0700 Subject: [PATCH 195/208] Suggested PR changes for #4 (2) --- protocol_decoder/src/decoding.rs | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 8dc7d6361..9b976d7f5 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -44,7 +44,7 @@ pub enum TraceParsingError { #[error("Missing keys when creating sub-partial tries (Trie type: {0})")] MissingKeysCreatingSubPartialTrie(TrieType), - #[error("No account present at {0:x} (hashed: {1:x}) to withdrawal {2} Gwei from!")] + #[error("No account present at {0:x} (hashed: {1:x}) to withdraw {2} Gwei from!")] MissingWithdrawalAccount(Address, HashedAccountAddr, U256), } @@ -279,29 +279,40 @@ impl ProcessedBlockTrace { Ok(()) } + /// Pads a generated IR vec with additional "dummy" entries if needed. + /// We need to ensure that generated IR always has at least `2` elements, + /// and if there are only `0` or `1` elements, then we need to pad so + /// that we have two entries in total. These dummy entries serve only to + /// allow the proof generation process to finish. Specifically, we need + /// at least two entries to generate an agg proof, and we need an agg + /// proof to generate a block proof. These entries do not mutate state + /// (unless there are withdrawals in the block (see + /// `[add_withdrawals_to_txns]`), where the final one will mutate the + /// state trie. fn pad_gen_inputs_with_dummy_inputs_if_needed( gen_inputs: &mut Vec, other_data: &OtherBlockData, initial_trie_state: &PartialTrieState, ) -> bool { - let mut dummies_added = true; - match gen_inputs.len() { 0 => { - // Need to pad with two dummy txns. + // Need to pad with two dummy entries. gen_inputs.extend(create_dummy_txn_pair_for_empty_block( other_data, initial_trie_state, )); + + true } 1 => { + // Just need one. let dummy_txn = create_dummy_gen_input(other_data, initial_trie_state, 0); gen_inputs.insert(0, dummy_txn); + + true } - _ => dummies_added = false, + _ => false, } - - dummies_added } /// The withdrawals are always in the final ir payload. How they are placed @@ -319,7 +330,7 @@ impl ProcessedBlockTrace { dummies_already_added: bool, ) -> TraceParsingResult<()> { // Withdrawals update balances in the account trie, so we need to do that here. - Self::update_trie_state_from_withdrawals(withdrawals.iter(), &mut final_trie_state.state)?; + Self::update_trie_state_from_withdrawals(&withdrawals, &mut final_trie_state.state)?; match dummies_already_added { false => { @@ -338,6 +349,8 @@ impl ProcessedBlockTrace { // If we have dummy proofs (note: `txn_ir[1]` is always a dummy txn in this // case), then this dummy will get the withdrawals. txn_ir[1].gen_inputs.withdrawals = withdrawals; + txn_ir[1].gen_inputs.tries.state_trie = final_trie_state.state.clone(); + txn_ir[1].gen_inputs.trie_roots_after.state_root = final_trie_state.state.hash(); } } @@ -345,7 +358,7 @@ impl ProcessedBlockTrace { } fn update_trie_state_from_withdrawals<'a>( - withdrawals: impl Iterator + 'a, + withdrawals: impl IntoIterator + 'a, state: &mut HashedPartialTrie, ) -> TraceParsingResult<()> { for (addr, amt) in withdrawals { From 875fe425381b1d7b9989b0f1a4a40544316d7ece Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 22 Jan 2024 10:40:57 -0700 Subject: [PATCH 196/208] Suggested PR changes for #4 (3) --- protocol_decoder/src/decoding.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 9b976d7f5..540acf310 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -349,7 +349,6 @@ impl ProcessedBlockTrace { // If we have dummy proofs (note: `txn_ir[1]` is always a dummy txn in this // case), then this dummy will get the withdrawals. txn_ir[1].gen_inputs.withdrawals = withdrawals; - txn_ir[1].gen_inputs.tries.state_trie = final_trie_state.state.clone(); txn_ir[1].gen_inputs.trie_roots_after.state_root = final_trie_state.state.hash(); } } From 71ea720fe0b056b0e17d7f79030d223ae3c6ee68 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 22 Jan 2024 10:48:54 -0700 Subject: [PATCH 197/208] Suggested PR changes for #4 (4) --- protocol_decoder/src/decoding.rs | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 540acf310..1d775660d 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -329,23 +329,34 @@ impl ProcessedBlockTrace { withdrawals: Vec<(Address, U256)>, dummies_already_added: bool, ) -> TraceParsingResult<()> { - // Withdrawals update balances in the account trie, so we need to do that here. - Self::update_trie_state_from_withdrawals(&withdrawals, &mut final_trie_state.state)?; - match dummies_already_added { false => { // Guaranteed to have a real txn. - let final_ir = txn_ir.last().unwrap(); + let txn_idx_of_dummy_entry = txn_ir.last().unwrap().txn_idx + 1; // Dummy state will be the state after the final txn. - let withdrawal_dummy = - create_dummy_gen_input(other_data, final_trie_state, final_ir.txn_idx + 1); + let mut withdrawal_dummy = + create_dummy_gen_input(other_data, final_trie_state, txn_idx_of_dummy_entry); + + Self::update_trie_state_from_withdrawals( + &withdrawals, + &mut final_trie_state.state, + )?; + + // Only the state root hash needs to be updated from the withdrawals. + withdrawal_dummy.gen_inputs.trie_roots_after.state_root = + final_trie_state.state.hash(); // If we have no actual dummy proofs, then we create one and append it to the // end of the block. txn_ir.push(withdrawal_dummy); } true => { + Self::update_trie_state_from_withdrawals( + &withdrawals, + &mut final_trie_state.state, + )?; + // If we have dummy proofs (note: `txn_ir[1]` is always a dummy txn in this // case), then this dummy will get the withdrawals. txn_ir[1].gen_inputs.withdrawals = withdrawals; @@ -356,6 +367,8 @@ impl ProcessedBlockTrace { Ok(()) } + /// Withdrawals update balances in the account trie, so we need to update + /// our local trie state. fn update_trie_state_from_withdrawals<'a>( withdrawals: impl IntoIterator + 'a, state: &mut HashedPartialTrie, From 20e7bb4123afbe578f16f3f85223ca37a84ef8da Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 22 Jan 2024 11:01:48 -0700 Subject: [PATCH 198/208] Suggested PR changes for #4 (5) --- protocol_decoder/src/decoding.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 1d775660d..b8ce3f61d 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -330,6 +330,8 @@ impl ProcessedBlockTrace { dummies_already_added: bool, ) -> TraceParsingResult<()> { match dummies_already_added { + // If we have no actual dummy proofs, then we create one and append it to the + // end of the block. false => { // Guaranteed to have a real txn. let txn_idx_of_dummy_entry = txn_ir.last().unwrap().txn_idx + 1; @@ -343,12 +345,12 @@ impl ProcessedBlockTrace { &mut final_trie_state.state, )?; + withdrawal_dummy.gen_inputs.withdrawals = withdrawals; + // Only the state root hash needs to be updated from the withdrawals. withdrawal_dummy.gen_inputs.trie_roots_after.state_root = final_trie_state.state.hash(); - // If we have no actual dummy proofs, then we create one and append it to the - // end of the block. txn_ir.push(withdrawal_dummy); } true => { From 3ff6be2f8ddcd7fc5e3c3f62a817588d6800e7ee Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Mon, 22 Jan 2024 13:39:14 -0500 Subject: [PATCH 199/208] Do some minor cleanup --- plonky_block_proof_gen/src/proof_gen.rs | 4 +- .../compact/compact_prestate_processing.rs | 2 +- protocol_decoder/src/decoding.rs | 32 ++++----------- protocol_decoder/src/lib.rs | 2 - protocol_decoder/src/processed_block_trace.rs | 7 +++- protocol_decoder/src/proof_gen_types.rs | 41 ------------------- protocol_decoder/src/trace_protocol.rs | 2 +- protocol_decoder/src/types.rs | 24 +---------- 8 files changed, 19 insertions(+), 95 deletions(-) delete mode 100644 protocol_decoder/src/proof_gen_types.rs diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/plonky_block_proof_gen/src/proof_gen.rs index 7b403b657..e63b4181f 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/plonky_block_proof_gen/src/proof_gen.rs @@ -38,7 +38,7 @@ impl From for ProofGenError { /// Generates a transaction proof from some IR data. pub fn generate_txn_proof( p_state: &ProverState, - start_info: TxnProofGenIR, + gen_inputs: TxnProofGenIR, abort_signal: Option>, ) -> ProofGenResult { let (intern, p_vals) = p_state @@ -46,7 +46,7 @@ pub fn generate_txn_proof( .prove_root( &AllStark::default(), &StarkConfig::standard_fast_config(), - start_info.gen_inputs, + gen_inputs, &mut TimingTree::default(), abort_signal, ) diff --git a/protocol_decoder/src/compact/compact_prestate_processing.rs b/protocol_decoder/src/compact/compact_prestate_processing.rs index 28dada01e..6af7f356a 100644 --- a/protocol_decoder/src/compact/compact_prestate_processing.rs +++ b/protocol_decoder/src/compact/compact_prestate_processing.rs @@ -1,4 +1,4 @@ -//! Processing for the compact format as specified here: https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md +//! Processing for the compact format as specified here: use std::{ any::type_name, diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index b8ce3f61d..83e8038ac 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -135,14 +135,9 @@ impl ProcessedBlockTrace { block_hashes: other_data.b_data.b_hashes.clone(), }; - let txn_proof_gen_ir = TxnProofGenIR { - txn_idx, - gen_inputs, - }; - tot_gas_used = new_tot_gas_used; - Ok(txn_proof_gen_ir) + Ok(gen_inputs) }) .collect::>>()?; @@ -334,7 +329,8 @@ impl ProcessedBlockTrace { // end of the block. false => { // Guaranteed to have a real txn. - let txn_idx_of_dummy_entry = txn_ir.last().unwrap().txn_idx + 1; + let txn_idx_of_dummy_entry = + txn_ir.last().unwrap().txn_number_before.low_u64() as usize + 1; // Dummy state will be the state after the final txn. let mut withdrawal_dummy = @@ -345,11 +341,10 @@ impl ProcessedBlockTrace { &mut final_trie_state.state, )?; - withdrawal_dummy.gen_inputs.withdrawals = withdrawals; + withdrawal_dummy.withdrawals = withdrawals; // Only the state root hash needs to be updated from the withdrawals. - withdrawal_dummy.gen_inputs.trie_roots_after.state_root = - final_trie_state.state.hash(); + withdrawal_dummy.trie_roots_after.state_root = final_trie_state.state.hash(); txn_ir.push(withdrawal_dummy); } @@ -361,8 +356,8 @@ impl ProcessedBlockTrace { // If we have dummy proofs (note: `txn_ir[1]` is always a dummy txn in this // case), then this dummy will get the withdrawals. - txn_ir[1].gen_inputs.withdrawals = withdrawals; - txn_ir[1].gen_inputs.trie_roots_after.state_root = final_trie_state.state.hash(); + txn_ir[1].withdrawals = withdrawals; + txn_ir[1].trie_roots_after.state_root = final_trie_state.state.hash(); } } @@ -461,7 +456,7 @@ fn create_dummy_gen_input( receipts_root: EMPTY_TRIE_HASH, }; - let gen_inputs = GenerationInputs { + GenerationInputs { signed_txn: None, tries, trie_roots_after, @@ -469,9 +464,7 @@ fn create_dummy_gen_input( block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), ..GenerationInputs::default() - }; - - gen_inputs_to_ir(gen_inputs, txn_idx) + } } impl TxnMetaState { @@ -483,13 +476,6 @@ impl TxnMetaState { } } -fn gen_inputs_to_ir(gen_inputs: GenerationInputs, txn_idx: TxnIdx) -> TxnProofGenIR { - TxnProofGenIR { - txn_idx, - gen_inputs, - } -} - fn create_dummy_proof_trie_inputs(final_trie_state: &PartialTrieState) -> TrieInputs { let partial_sub_storage_tries: Vec<_> = final_trie_state .storage diff --git a/protocol_decoder/src/lib.rs b/protocol_decoder/src/lib.rs index 2436c9abb..314b433d6 100644 --- a/protocol_decoder/src/lib.rs +++ b/protocol_decoder/src/lib.rs @@ -3,14 +3,12 @@ #![feature(iter_array_chunks)] // TODO: address these lints #![allow(unused)] -#![allow(clippy::type_complexity)] #![allow(private_interfaces)] mod compact; pub mod decoding; mod deserializers; pub mod processed_block_trace; -pub mod proof_gen_types; pub mod trace_protocol; pub mod types; pub mod utils; diff --git a/protocol_decoder/src/processed_block_trace.rs b/protocol_decoder/src/processed_block_trace.rs index bc8fdf1ee..1570bfdd0 100644 --- a/protocol_decoder/src/processed_block_trace.rs +++ b/protocol_decoder/src/processed_block_trace.rs @@ -346,6 +346,9 @@ fn create_empty_code_access_map() -> HashMap> { HashMap::from_iter(once((EMPTY_CODE_HASH, Vec::new()))) } +pub(crate) type StorageAccess = Vec; +pub(crate) type StorageWrite = Vec<(HashedStorageAddrNibbles, Vec)>; + /// Note that "*_accesses" includes writes. #[derive(Debug, Default)] pub(crate) struct NodesUsedByTxn { @@ -353,8 +356,8 @@ pub(crate) struct NodesUsedByTxn { pub(crate) state_writes: Vec<(HashedAccountAddr, StateTrieWrites)>, // Note: All entries in `storage_writes` also appear in `storage_accesses`. - pub(crate) storage_accesses: Vec<(Nibbles, Vec)>, - pub(crate) storage_writes: Vec<(Nibbles, Vec<(HashedStorageAddrNibbles, Vec)>)>, + pub(crate) storage_accesses: Vec<(Nibbles, StorageAccess)>, + pub(crate) storage_writes: Vec<(Nibbles, StorageWrite)>, pub(crate) state_accounts_with_no_accesses_but_storage_tries: HashMap, pub(crate) self_destructed_accounts: Vec, diff --git a/protocol_decoder/src/proof_gen_types.rs b/protocol_decoder/src/proof_gen_types.rs deleted file mode 100644 index 7fb02287b..000000000 --- a/protocol_decoder/src/proof_gen_types.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::borrow::Borrow; - -use ethereum_types::U256; -use plonky2_evm::proof::ExtraBlockData; -use serde::{Deserialize, Serialize}; - -use crate::types::{TrieRootHash, TxnIdx}; - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct ProofBeforeAndAfterDeltas { - pub gas_used_before: U256, - pub gas_used_after: U256, -} - -impl> From for ProofBeforeAndAfterDeltas { - fn from(v: T) -> Self { - let b = v.borrow(); - - Self { - gas_used_before: b.gas_used_before, - gas_used_after: b.gas_used_after, - } - } -} - -impl ProofBeforeAndAfterDeltas { - pub fn into_extra_block_data( - self, - checkpoint_state_trie_root: TrieRootHash, - txn_start: TxnIdx, - txn_end: TxnIdx, - ) -> ExtraBlockData { - ExtraBlockData { - checkpoint_state_trie_root, - txn_number_before: txn_start.into(), - txn_number_after: txn_end.into(), - gas_used_before: self.gas_used_before, - gas_used_after: self.gas_used_after, - } - } -} diff --git a/protocol_decoder/src/trace_protocol.rs b/protocol_decoder/src/trace_protocol.rs index a062a2cd2..323dff5e5 100644 --- a/protocol_decoder/src/trace_protocol.rs +++ b/protocol_decoder/src/trace_protocol.rs @@ -84,7 +84,7 @@ pub struct TrieUncompressed {} // TODO #[serde_as] -/// Compact representation of a trie (will likely be very close to https://github.com/ledgerwatch/erigon/blob/devel/docs/programmers_guide/witness_formal_spec.md) +/// Compact representation of a trie (will likely be very close to ) #[derive(Debug, Deserialize, Serialize)] pub struct TrieCompact(#[serde_as(as = "FromInto")] pub Vec); diff --git a/protocol_decoder/src/types.rs b/protocol_decoder/src/types.rs index 2cc7af73a..a138abaf2 100644 --- a/protocol_decoder/src/types.rs +++ b/protocol_decoder/src/types.rs @@ -6,8 +6,6 @@ use plonky2_evm::{ }; use serde::{Deserialize, Serialize}; -use crate::proof_gen_types::ProofBeforeAndAfterDeltas; - pub type BlockHeight = u64; pub type Bloom = [U256; 8]; pub type CodeHash = H256; @@ -47,11 +45,7 @@ pub(crate) const ZERO_STORAGE_SLOT_VAL_RLPED: [u8; 1] = [128]; /// An `IR` (Intermediate Representation) for a given txn in a block that we can /// use to generate a proof for that txn. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct TxnProofGenIR { - pub txn_idx: TxnIdx, - pub gen_inputs: GenerationInputs, -} +pub type TxnProofGenIR = GenerationInputs; /// Other data that is needed for proof gen. #[derive(Clone, Debug, Deserialize, Serialize)] @@ -68,19 +62,3 @@ pub struct BlockLevelData { pub b_hashes: BlockHashes, pub withdrawals: Vec<(Address, U256)>, } -impl TxnProofGenIR { - pub fn b_height(&self) -> BlockHeight { - self.gen_inputs.block_metadata.block_number.as_u64() - } - - pub fn txn_idx(&self) -> TxnIdx { - self.txn_idx - } - - pub fn deltas(&self) -> ProofBeforeAndAfterDeltas { - ProofBeforeAndAfterDeltas { - gas_used_before: self.gen_inputs.gas_used_before, - gas_used_after: self.gen_inputs.gas_used_after, - } - } -} From 96c859a0b581b01e4ecd7db7f9dd345f3f7ead45 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Mon, 22 Jan 2024 14:44:01 -0500 Subject: [PATCH 200/208] Fix dummy proofs for withdrawal --- protocol_decoder/src/decoding.rs | 81 ++++++++++++++++++++++---------- 1 file changed, 57 insertions(+), 24 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index 83e8038ac..ed7c5ad4c 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -12,7 +12,7 @@ use eth_trie_utils::{ use ethereum_types::{Address, H256, U256}; use plonky2_evm::{ generation::{mpt::AccountRlp, GenerationInputs, TrieInputs}, - proof::TrieRoots, + proof::{ExtraBlockData, TrieRoots}, }; use thiserror::Error; @@ -95,7 +95,13 @@ impl ProcessedBlockTrace { ..Default::default() }; - let mut tot_gas_used = U256::zero(); + let mut extra_data = ExtraBlockData { + checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, + txn_number_before: U256::zero(), + txn_number_after: U256::zero(), + gas_used_before: U256::zero(), + gas_used_after: U256::zero(), + }; let mut txn_gen_inputs = self .txn_info @@ -109,7 +115,10 @@ impl ProcessedBlockTrace { &other_data.b_data.b_meta.block_beneficiary, )?; - let new_tot_gas_used = tot_gas_used + txn_info.meta.gas_used; + // For each non-dummy txn, we increment `txn_number_after` by 1, and + // update `gas_used_after` accordingly. + extra_data.txn_number_after += U256::one(); + extra_data.gas_used_after = txn_info.meta.gas_used.into(); Self::apply_deltas_to_trie_state( &mut curr_block_tries, @@ -120,22 +129,25 @@ impl ProcessedBlockTrace { let trie_roots_after = calculate_trie_input_hashes(&curr_block_tries); let gen_inputs = GenerationInputs { - txn_number_before: txn_idx.into(), - gas_used_before: tot_gas_used, - gas_used_after: new_tot_gas_used, + txn_number_before: extra_data.txn_number_before, + gas_used_before: extra_data.gas_used_before, + gas_used_after: extra_data.gas_used_after, signed_txn: txn_info.meta.txn_bytes, withdrawals: Vec::default(), /* Only ever set in a dummy txn at the end of * the block (see `[add_withdrawals_to_txns]` * for more info). */ tries, trie_roots_after, - checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, + checkpoint_state_trie_root: extra_data.checkpoint_state_trie_root, contract_code: txn_info.contract_code_accessed, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), }; - tot_gas_used = new_tot_gas_used; + // After processing a transaction, we update the remaining accumulators + // for the next transaction. + extra_data.txn_number_before += U256::one(); + extra_data.gas_used_before = extra_data.gas_used_after; Ok(gen_inputs) }) @@ -144,6 +156,7 @@ impl ProcessedBlockTrace { let dummies_added = Self::pad_gen_inputs_with_dummy_inputs_if_needed( &mut txn_gen_inputs, &other_data, + &extra_data, &initial_tries_for_dummies, ); @@ -151,6 +164,7 @@ impl ProcessedBlockTrace { Self::add_withdrawals_to_txns( &mut txn_gen_inputs, &other_data, + &extra_data, &mut curr_block_tries, self.withdrawals, dummies_added, @@ -199,6 +213,7 @@ impl ProcessedBlockTrace { storage_tries, }) } + fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: NodesUsedByTxn, @@ -287,21 +302,23 @@ impl ProcessedBlockTrace { fn pad_gen_inputs_with_dummy_inputs_if_needed( gen_inputs: &mut Vec, other_data: &OtherBlockData, - initial_trie_state: &PartialTrieState, + extra_data: &ExtraBlockData, + initial_tries: &PartialTrieState, ) -> bool { match gen_inputs.len() { 0 => { // Need to pad with two dummy entries. gen_inputs.extend(create_dummy_txn_pair_for_empty_block( other_data, - initial_trie_state, + extra_data, + initial_tries, )); true } 1 => { // Just need one. - let dummy_txn = create_dummy_gen_input(other_data, initial_trie_state, 0); + let dummy_txn = create_dummy_gen_input(other_data, extra_data, initial_tries); gen_inputs.insert(0, dummy_txn); true @@ -320,6 +337,7 @@ impl ProcessedBlockTrace { fn add_withdrawals_to_txns( txn_ir: &mut Vec, other_data: &OtherBlockData, + extra_data: &ExtraBlockData, final_trie_state: &mut PartialTrieState, withdrawals: Vec<(Address, U256)>, dummies_already_added: bool, @@ -334,7 +352,7 @@ impl ProcessedBlockTrace { // Dummy state will be the state after the final txn. let mut withdrawal_dummy = - create_dummy_gen_input(other_data, final_trie_state, txn_idx_of_dummy_entry); + create_dummy_gen_input(other_data, extra_data, final_trie_state); Self::update_trie_state_from_withdrawals( &withdrawals, @@ -435,35 +453,50 @@ fn create_fully_hashed_out_sub_partial_trie(trie: &HashedPartialTrie) -> HashedP fn create_dummy_txn_pair_for_empty_block( other_data: &OtherBlockData, - initial_trie_state: &PartialTrieState, + extra_data: &ExtraBlockData, + final_tries: &PartialTrieState, ) -> [TxnProofGenIR; 2] { [ - create_dummy_gen_input(other_data, initial_trie_state, 0), - create_dummy_gen_input(other_data, initial_trie_state, 0), + create_dummy_gen_input(other_data, extra_data, final_tries), + create_dummy_gen_input(other_data, extra_data, final_tries), ] } fn create_dummy_gen_input( other_data: &OtherBlockData, - initial_trie_state: &PartialTrieState, - txn_idx: TxnIdx, + extra_data: &ExtraBlockData, + final_tries: &PartialTrieState, ) -> TxnProofGenIR { - let tries = create_dummy_proof_trie_inputs(initial_trie_state); + let tries = create_dummy_proof_trie_inputs(final_tries); let trie_roots_after = TrieRoots { state_root: tries.state_trie.hash(), - transactions_root: EMPTY_TRIE_HASH, - receipts_root: EMPTY_TRIE_HASH, + transactions_root: tries.transactions_trie.hash(), + receipts_root: tries.receipts_trie.hash(), }; + // Sanity checks + assert_eq!( + extra_data.txn_number_before, extra_data.txn_number_after, + "Txn numbers before/after differ in a dummy payload with no txn!" + ); + assert_eq!( + extra_data.gas_used_before, extra_data.gas_used_after, + "Gas used before/after differ in a dummy payload with no txn!" + ); + GenerationInputs { signed_txn: None, tries, trie_roots_after, - checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, + checkpoint_state_trie_root: extra_data.checkpoint_state_trie_root, block_metadata: other_data.b_data.b_meta.clone(), block_hashes: other_data.b_data.b_hashes.clone(), - ..GenerationInputs::default() + txn_number_before: extra_data.txn_number_before, + gas_used_before: extra_data.gas_used_before, + gas_used_after: extra_data.gas_used_after, + contract_code: HashMap::default(), + withdrawals: vec![], // this is set after creating dummy payloads } } @@ -490,8 +523,8 @@ fn create_dummy_proof_trie_inputs(final_trie_state: &PartialTrieState) -> TrieIn TrieInputs { state_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.state), - transactions_trie: HashedPartialTrie::default(), - receipts_trie: HashedPartialTrie::default(), + transactions_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.txn), + receipts_trie: create_fully_hashed_out_sub_partial_trie(&final_trie_state.receipt), storage_tries: partial_sub_storage_tries, } } From 572bbd8489c9dab39aac1822756c53311b3eec74 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Fri, 26 Jan 2024 11:52:45 -0500 Subject: [PATCH 201/208] Fix gas update --- protocol_decoder/src/decoding.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index ed7c5ad4c..ba98ab2ee 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -118,7 +118,7 @@ impl ProcessedBlockTrace { // For each non-dummy txn, we increment `txn_number_after` by 1, and // update `gas_used_after` accordingly. extra_data.txn_number_after += U256::one(); - extra_data.gas_used_after = txn_info.meta.gas_used.into(); + extra_data.gas_used_after += txn_info.meta.gas_used.into(); Self::apply_deltas_to_trie_state( &mut curr_block_tries, From ee02225bcfa0094470a5eba04d0679b6a7645faf Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Tue, 30 Jan 2024 12:53:04 -0500 Subject: [PATCH 202/208] Fix padding for single txn blocks --- protocol_decoder/src/decoding.rs | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/protocol_decoder/src/decoding.rs b/protocol_decoder/src/decoding.rs index ba98ab2ee..1e401fd32 100644 --- a/protocol_decoder/src/decoding.rs +++ b/protocol_decoder/src/decoding.rs @@ -158,6 +158,8 @@ impl ProcessedBlockTrace { &other_data, &extra_data, &initial_tries_for_dummies, + &curr_block_tries, + !self.withdrawals.is_empty(), ); if !self.withdrawals.is_empty() { @@ -304,10 +306,13 @@ impl ProcessedBlockTrace { other_data: &OtherBlockData, extra_data: &ExtraBlockData, initial_tries: &PartialTrieState, + final_tries: &PartialTrieState, + has_withdrawals: bool, ) -> bool { match gen_inputs.len() { 0 => { - // Need to pad with two dummy entries. + debug_assert!(initial_tries.state == final_tries.state); + // We need to pad with two dummy entries. gen_inputs.extend(create_dummy_txn_pair_for_empty_block( other_data, extra_data, @@ -317,9 +322,23 @@ impl ProcessedBlockTrace { true } 1 => { - // Just need one. - let dummy_txn = create_dummy_gen_input(other_data, extra_data, initial_tries); - gen_inputs.insert(0, dummy_txn); + // We just need one dummy entry. + // If there are withdrawals, we will need to append them at the end of the block + // execution, in which case we directly append the dummy proof + // after the only txn of this block. + // If there are no withdrawals, then the dummy proof will be prepended to the + // actual txn. + match has_withdrawals { + false => { + let dummy_txn = + create_dummy_gen_input(other_data, extra_data, initial_tries); + gen_inputs.insert(0, dummy_txn) + } + true => { + let dummy_txn = create_dummy_gen_input(other_data, extra_data, final_tries); + gen_inputs.push(dummy_txn) + } + }; true } From 6264580d1cd9510cbafb3349d82a3a22c70563d6 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Tue, 6 Feb 2024 15:58:01 -0500 Subject: [PATCH 203/208] Change constants and update ProverStateBuilder::default() description --- plonky_block_proof_gen/src/constants.rs | 14 +++++++------- plonky_block_proof_gen/src/prover_state.rs | 10 +++++++--- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/plonky_block_proof_gen/src/constants.rs b/plonky_block_proof_gen/src/constants.rs index 4ec011592..808f9f2b7 100644 --- a/plonky_block_proof_gen/src/constants.rs +++ b/plonky_block_proof_gen/src/constants.rs @@ -3,16 +3,16 @@ use core::ops::Range; /// Default range to be used for the `ArithmeticStark` table. -pub(crate) const DEFAULT_ARITHMETIC_RANGE: Range = 16..20; +pub(crate) const DEFAULT_ARITHMETIC_RANGE: Range = 16..28; /// Default range to be used for the `BytePackingStark` table. -pub(crate) const DEFAULT_BYTE_PACKING_RANGE: Range = 10..20; +pub(crate) const DEFAULT_BYTE_PACKING_RANGE: Range = 9..28; /// Default range to be used for the `CpuStark` table. -pub(crate) const DEFAULT_CPU_RANGE: Range = 12..22; +pub(crate) const DEFAULT_CPU_RANGE: Range = 12..28; /// Default range to be used for the `KeccakStark` table. -pub(crate) const DEFAULT_KECCAK_RANGE: Range = 14..17; +pub(crate) const DEFAULT_KECCAK_RANGE: Range = 14..25; /// Default range to be used for the `KeccakSpongeStark` table. -pub(crate) const DEFAULT_KECCAK_SPONGE_RANGE: Range = 9..14; +pub(crate) const DEFAULT_KECCAK_SPONGE_RANGE: Range = 9..25; /// Default range to be used for the `LogicStark` table. -pub(crate) const DEFAULT_LOGIC_RANGE: Range = 12..16; +pub(crate) const DEFAULT_LOGIC_RANGE: Range = 12..28; /// Default range to be used for the `MemoryStark` table. -pub(crate) const DEFAULT_MEMORY_RANGE: Range = 17..25; +pub(crate) const DEFAULT_MEMORY_RANGE: Range = 17..30; diff --git a/plonky_block_proof_gen/src/prover_state.rs b/plonky_block_proof_gen/src/prover_state.rs index 6f7e30ef4..0f3cb99aa 100644 --- a/plonky_block_proof_gen/src/prover_state.rs +++ b/plonky_block_proof_gen/src/prover_state.rs @@ -32,10 +32,14 @@ pub struct ProverStateBuilder { } impl Default for ProverStateBuilder { + /// Generates a new builder from a set of default ranges. + /// These ranges should be sufficient to prove any transaction, + /// but will require a significant amount of RAM (around 30GB). + /// + /// Specifying shorter ranges will allow for a lower memory + /// consumption, with the drawback of possibly not being sufficient + /// for some transactions. fn default() -> Self { - // The default ranges are somewhat arbitrary, but should be enough for testing - // purposes against most transactions. - // Some heavy contract deployments may require bumping these ranges though. Self { arithmetic_circuit_size: DEFAULT_ARITHMETIC_RANGE, byte_packing_circuit_size: DEFAULT_BYTE_PACKING_RANGE, From 8875dc4aebb480f364d0b0327ec6c8135081d3f0 Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Tue, 6 Feb 2024 16:06:10 -0500 Subject: [PATCH 204/208] Pin nightly to working version --- .github/workflows/ci.yml | 7 +++++-- rust-toolchain.toml | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1af134c94..03b6bfbed 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,9 @@ jobs: uses: actions/checkout@v4 - name: Install nightly toolchain - uses: dtolnay/rust-toolchain@nightly + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2024-02-01 - name: Set up rust cache uses: Swatinem/rust-cache@v2 @@ -69,8 +71,9 @@ jobs: uses: actions/checkout@v4 - name: Install nightly toolchain - uses: dtolnay/rust-toolchain@nightly + uses: dtolnay/rust-toolchain@master with: + toolchain: nightly-2024-02-01 components: rustfmt, clippy - name: Set up rust cache diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 5d56faf9a..78c155af2 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly" +channel = "nightly-2024-02-01" From b56dbd4faf4b4d7070f0f130c8a5337e4c5496e6 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 12 Feb 2024 12:04:38 -0700 Subject: [PATCH 205/208] Prepared `README.md` to merge into `zk_evm` --- README.md => protocol_decoder/README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename README.md => protocol_decoder/README.md (100%) diff --git a/README.md b/protocol_decoder/README.md similarity index 100% rename from README.md rename to protocol_decoder/README.md From 24501f4b60fe98c80c96eae083cd7111869a7427 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 12 Feb 2024 12:54:48 -0700 Subject: [PATCH 206/208] Renamed sub-repos - Renamed `protocol_decoder` --> `trace_decoder`. - Renamed `plonky_block_proof_gen` --> `proof_gen`. --- .github/workflows/ci.yml | 8 ++++---- Cargo.toml | 2 +- {plonky_block_proof_gen => proof_gen}/Cargo.toml | 4 ++-- {plonky_block_proof_gen => proof_gen}/README.md | 1 - {plonky_block_proof_gen => proof_gen}/src/constants.rs | 0 {plonky_block_proof_gen => proof_gen}/src/lib.rs | 6 +++--- {plonky_block_proof_gen => proof_gen}/src/proof_gen.rs | 2 +- {plonky_block_proof_gen => proof_gen}/src/proof_types.rs | 2 +- {plonky_block_proof_gen => proof_gen}/src/prover_state.rs | 0 {plonky_block_proof_gen => proof_gen}/src/types.rs | 0 .../src/verifier_state.rs | 0 {protocol_decoder => trace_decoder}/Cargo.toml | 2 +- {protocol_decoder => trace_decoder}/README.md | 2 +- .../src/compact/compact_debug_tools.rs | 0 .../src/compact/compact_prestate_processing.rs | 0 .../src/compact/compact_to_partial_trie.rs | 0 .../src/compact/complex_test_payloads.rs | 0 .../src/compact/large_test_payloads/test_payload_5.txt | 0 .../src/compact/large_test_payloads/test_payload_6.txt | 0 {protocol_decoder => trace_decoder}/src/compact/mod.rs | 0 {protocol_decoder => trace_decoder}/src/decoding.rs | 0 {protocol_decoder => trace_decoder}/src/deserializers.rs | 0 {protocol_decoder => trace_decoder}/src/lib.rs | 0 .../src/processed_block_trace.rs | 0 {protocol_decoder => trace_decoder}/src/trace_protocol.rs | 0 {protocol_decoder => trace_decoder}/src/types.rs | 0 {protocol_decoder => trace_decoder}/src/utils.rs | 0 27 files changed, 14 insertions(+), 15 deletions(-) rename {plonky_block_proof_gen => proof_gen}/Cargo.toml (84%) rename {plonky_block_proof_gen => proof_gen}/README.md (92%) rename {plonky_block_proof_gen => proof_gen}/src/constants.rs (100%) rename {plonky_block_proof_gen => proof_gen}/src/lib.rs (95%) rename {plonky_block_proof_gen => proof_gen}/src/proof_gen.rs (98%) rename {plonky_block_proof_gen => proof_gen}/src/proof_types.rs (98%) rename {plonky_block_proof_gen => proof_gen}/src/prover_state.rs (100%) rename {plonky_block_proof_gen => proof_gen}/src/types.rs (100%) rename {plonky_block_proof_gen => proof_gen}/src/verifier_state.rs (100%) rename {protocol_decoder => trace_decoder}/Cargo.toml (96%) rename {protocol_decoder => trace_decoder}/README.md (95%) rename {protocol_decoder => trace_decoder}/src/compact/compact_debug_tools.rs (100%) rename {protocol_decoder => trace_decoder}/src/compact/compact_prestate_processing.rs (100%) rename {protocol_decoder => trace_decoder}/src/compact/compact_to_partial_trie.rs (100%) rename {protocol_decoder => trace_decoder}/src/compact/complex_test_payloads.rs (100%) rename {protocol_decoder => trace_decoder}/src/compact/large_test_payloads/test_payload_5.txt (100%) rename {protocol_decoder => trace_decoder}/src/compact/large_test_payloads/test_payload_6.txt (100%) rename {protocol_decoder => trace_decoder}/src/compact/mod.rs (100%) rename {protocol_decoder => trace_decoder}/src/decoding.rs (100%) rename {protocol_decoder => trace_decoder}/src/deserializers.rs (100%) rename {protocol_decoder => trace_decoder}/src/lib.rs (100%) rename {protocol_decoder => trace_decoder}/src/processed_block_trace.rs (100%) rename {protocol_decoder => trace_decoder}/src/trace_protocol.rs (100%) rename {protocol_decoder => trace_decoder}/src/types.rs (100%) rename {protocol_decoder => trace_decoder}/src/utils.rs (100%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 03b6bfbed..e363b5249 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,16 +37,16 @@ jobs: with: cache-on-failure: true - - name: Check in protocol_decoder subdirectory - run: cargo check --manifest-path protocol_decoder/Cargo.toml + - name: Check in trace_decoder subdirectory + run: cargo check --manifest-path trace_decoder/Cargo.toml env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 RUST_LOG: 1 CARGO_INCREMENTAL: 1 RUST_BACKTRACE: 1 - - name: Check in plonky_block_proof_gen subdirectory - run: cargo check --manifest-path plonky_block_proof_gen/Cargo.toml + - name: Check in proof_gen subdirectory + run: cargo check --manifest-path proof_gen/Cargo.toml env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 RUST_LOG: 1 diff --git a/Cargo.toml b/Cargo.toml index d80f93ec2..585d5764d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["mpt_trie", "plonky_block_proof_gen", "protocol_decoder"] +members = ["mpt_trie", "proof_gen", "trace_decoder"] resolver = "2" [workspace.dependencies] diff --git a/plonky_block_proof_gen/Cargo.toml b/proof_gen/Cargo.toml similarity index 84% rename from plonky_block_proof_gen/Cargo.toml rename to proof_gen/Cargo.toml index 898cca66c..2e4f8202a 100644 --- a/plonky_block_proof_gen/Cargo.toml +++ b/proof_gen/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "plonky_block_proof_gen" +name = "proof_gen" description = "Generates block proofs from zero proof IR." version = "0.1.0" authors = ["Polygon Zero "] @@ -12,5 +12,5 @@ log = { workspace = true } paste = "1.0.14" plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "265d46a96ecfec49a32973f66f8aa811586c5d4a" } plonky2_evm = { workspace = true } -protocol_decoder = { path = "../protocol_decoder" } +trace_decoder = { path = "../trace_decoder" } serde = { workspace = true } diff --git a/plonky_block_proof_gen/README.md b/proof_gen/README.md similarity index 92% rename from plonky_block_proof_gen/README.md rename to proof_gen/README.md index d7ef9e363..9b8ab599f 100644 --- a/plonky_block_proof_gen/README.md +++ b/proof_gen/README.md @@ -6,7 +6,6 @@ For the time being, the only library that produces proof IR is currently [plonky # General Usage (Extremely rough, will change) -In [proof_gen.rs](https://github.com/0xPolygonZero/plonky-block-proof-gen/blob/main/src/proof_gen.rs), there are three core functions: - `generate_txn_proof` - `generate_agg_proof` diff --git a/plonky_block_proof_gen/src/constants.rs b/proof_gen/src/constants.rs similarity index 100% rename from plonky_block_proof_gen/src/constants.rs rename to proof_gen/src/constants.rs diff --git a/plonky_block_proof_gen/src/lib.rs b/proof_gen/src/lib.rs similarity index 95% rename from plonky_block_proof_gen/src/lib.rs rename to proof_gen/src/lib.rs index 17c23c58e..7907ae3b9 100644 --- a/plonky_block_proof_gen/src/lib.rs +++ b/proof_gen/src/lib.rs @@ -19,7 +19,7 @@ //! `memory`. //! //! ```no_run -//! # use plonky_block_proof_gen::prover_state::ProverStateBuilder; +//! # use proof_gen::prover_state::ProverStateBuilder; //! let mut builder = ProverStateBuilder::default(); //! //! // Change Cpu and Memory tables supported ranges. @@ -100,8 +100,8 @@ //! `VerifierState`, to allow anyone to easily verify block proofs. //! //! ```compile_fail -//! # use plonky_block_proof_gen::prover_state::ProverStateBuilder; -//! # use plonky_block_proof_gen::verifier_state::VerifierState; +//! # use proof_gen::prover_state::ProverStateBuilder; +//! # use proof_gen::verifier_state::VerifierState; //! let mut builder = ProverStateBuilder::default(); //! //! // Generate a `ProverState` from the builder. diff --git a/plonky_block_proof_gen/src/proof_gen.rs b/proof_gen/src/proof_gen.rs similarity index 98% rename from plonky_block_proof_gen/src/proof_gen.rs rename to proof_gen/src/proof_gen.rs index e63b4181f..5175d78fe 100644 --- a/plonky_block_proof_gen/src/proof_gen.rs +++ b/proof_gen/src/proof_gen.rs @@ -5,7 +5,7 @@ use std::sync::{atomic::AtomicBool, Arc}; use plonky2::util::timing::TimingTree; use plonky2_evm::{all_stark::AllStark, config::StarkConfig}; -use protocol_decoder::types::TxnProofGenIR; +use trace_decoder::types::TxnProofGenIR; use crate::{ proof_types::{AggregatableProof, GeneratedAggProof, GeneratedBlockProof, GeneratedTxnProof}, diff --git a/plonky_block_proof_gen/src/proof_types.rs b/proof_gen/src/proof_types.rs similarity index 98% rename from plonky_block_proof_gen/src/proof_types.rs rename to proof_gen/src/proof_types.rs index ce2a58252..f64fc9dcd 100644 --- a/plonky_block_proof_gen/src/proof_types.rs +++ b/proof_gen/src/proof_types.rs @@ -2,8 +2,8 @@ //! generation process. use plonky2_evm::proof::PublicValues; -use protocol_decoder::types::BlockHeight; use serde::{Deserialize, Serialize}; +use trace_decoder::types::BlockHeight; use crate::types::PlonkyProofIntern; diff --git a/plonky_block_proof_gen/src/prover_state.rs b/proof_gen/src/prover_state.rs similarity index 100% rename from plonky_block_proof_gen/src/prover_state.rs rename to proof_gen/src/prover_state.rs diff --git a/plonky_block_proof_gen/src/types.rs b/proof_gen/src/types.rs similarity index 100% rename from plonky_block_proof_gen/src/types.rs rename to proof_gen/src/types.rs diff --git a/plonky_block_proof_gen/src/verifier_state.rs b/proof_gen/src/verifier_state.rs similarity index 100% rename from plonky_block_proof_gen/src/verifier_state.rs rename to proof_gen/src/verifier_state.rs diff --git a/protocol_decoder/Cargo.toml b/trace_decoder/Cargo.toml similarity index 96% rename from protocol_decoder/Cargo.toml rename to trace_decoder/Cargo.toml index d774ec4da..7d0f3f5e1 100644 --- a/protocol_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "protocol_decoder" +name = "trace_decoder" authors = ["Polygon Zero "] version = "0.1.0" edition = "2021" diff --git a/protocol_decoder/README.md b/trace_decoder/README.md similarity index 95% rename from protocol_decoder/README.md rename to trace_decoder/README.md index cfa23072d..e83296a55 100644 --- a/protocol_decoder/README.md +++ b/trace_decoder/README.md @@ -4,7 +4,7 @@ A flexible protocol that clients (eg. full nodes) can use to easily generate blo ## Specification -Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. The specification itself is in the repo [here](protocol_decoder/src/trace_protocol.rs). +Temporary [high-level overview and comparison](docs/usage_seq_diagrams.md) to what the old Edge setup used to look like. The specification itself is in the repo [here](trace_decoder/src/trace_protocol.rs). Because processing the incoming proof protocol payload is not a resource bottleneck, the design is not worrying too much about performance. Instead, the core focus is flexibility in clients creating their own implementation, where the protocol supports multiple ways to provide different pieces of data. For example, there are multiple different formats available to provide the trie pre-images in, and the implementor can choose whichever is closest to its own internal data structures. diff --git a/protocol_decoder/src/compact/compact_debug_tools.rs b/trace_decoder/src/compact/compact_debug_tools.rs similarity index 100% rename from protocol_decoder/src/compact/compact_debug_tools.rs rename to trace_decoder/src/compact/compact_debug_tools.rs diff --git a/protocol_decoder/src/compact/compact_prestate_processing.rs b/trace_decoder/src/compact/compact_prestate_processing.rs similarity index 100% rename from protocol_decoder/src/compact/compact_prestate_processing.rs rename to trace_decoder/src/compact/compact_prestate_processing.rs diff --git a/protocol_decoder/src/compact/compact_to_partial_trie.rs b/trace_decoder/src/compact/compact_to_partial_trie.rs similarity index 100% rename from protocol_decoder/src/compact/compact_to_partial_trie.rs rename to trace_decoder/src/compact/compact_to_partial_trie.rs diff --git a/protocol_decoder/src/compact/complex_test_payloads.rs b/trace_decoder/src/compact/complex_test_payloads.rs similarity index 100% rename from protocol_decoder/src/compact/complex_test_payloads.rs rename to trace_decoder/src/compact/complex_test_payloads.rs diff --git a/protocol_decoder/src/compact/large_test_payloads/test_payload_5.txt b/trace_decoder/src/compact/large_test_payloads/test_payload_5.txt similarity index 100% rename from protocol_decoder/src/compact/large_test_payloads/test_payload_5.txt rename to trace_decoder/src/compact/large_test_payloads/test_payload_5.txt diff --git a/protocol_decoder/src/compact/large_test_payloads/test_payload_6.txt b/trace_decoder/src/compact/large_test_payloads/test_payload_6.txt similarity index 100% rename from protocol_decoder/src/compact/large_test_payloads/test_payload_6.txt rename to trace_decoder/src/compact/large_test_payloads/test_payload_6.txt diff --git a/protocol_decoder/src/compact/mod.rs b/trace_decoder/src/compact/mod.rs similarity index 100% rename from protocol_decoder/src/compact/mod.rs rename to trace_decoder/src/compact/mod.rs diff --git a/protocol_decoder/src/decoding.rs b/trace_decoder/src/decoding.rs similarity index 100% rename from protocol_decoder/src/decoding.rs rename to trace_decoder/src/decoding.rs diff --git a/protocol_decoder/src/deserializers.rs b/trace_decoder/src/deserializers.rs similarity index 100% rename from protocol_decoder/src/deserializers.rs rename to trace_decoder/src/deserializers.rs diff --git a/protocol_decoder/src/lib.rs b/trace_decoder/src/lib.rs similarity index 100% rename from protocol_decoder/src/lib.rs rename to trace_decoder/src/lib.rs diff --git a/protocol_decoder/src/processed_block_trace.rs b/trace_decoder/src/processed_block_trace.rs similarity index 100% rename from protocol_decoder/src/processed_block_trace.rs rename to trace_decoder/src/processed_block_trace.rs diff --git a/protocol_decoder/src/trace_protocol.rs b/trace_decoder/src/trace_protocol.rs similarity index 100% rename from protocol_decoder/src/trace_protocol.rs rename to trace_decoder/src/trace_protocol.rs diff --git a/protocol_decoder/src/types.rs b/trace_decoder/src/types.rs similarity index 100% rename from protocol_decoder/src/types.rs rename to trace_decoder/src/types.rs diff --git a/protocol_decoder/src/utils.rs b/trace_decoder/src/utils.rs similarity index 100% rename from protocol_decoder/src/utils.rs rename to trace_decoder/src/utils.rs From a867e03f3c01f845365f844a56b78445280d2fda Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 12 Feb 2024 12:56:24 -0700 Subject: [PATCH 207/208] Fixed an `https` link + cleanup --- proof_gen/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/proof_gen/README.md b/proof_gen/README.md index 9b8ab599f..b16e55589 100644 --- a/proof_gen/README.md +++ b/proof_gen/README.md @@ -4,8 +4,10 @@ Library for generating proofs from proof IR. For the time being, the only library that produces proof IR is currently [plonky-edge-block-trace-parser](https://github.com/0xPolygonZero/plonky-edge-block-trace-parser). Down the road, the IR will be produced by decoding the proof gen protocol. + # General Usage (Extremely rough, will change) +In [proof_gen.rs](https://github.com/0xPolygonZero/zk_evm/proof-gen/blob/main/src/proof_gen.rs), there are three core functions: - `generate_txn_proof` - `generate_agg_proof` From 9516245ce1d298067c0da825315eddccb1a49740 Mon Sep 17 00:00:00 2001 From: BGluth Date: Mon, 12 Feb 2024 12:56:58 -0700 Subject: [PATCH 208/208] Added `license` section to root `README.md` - I don't know how we actually want to organize this. The sub-repos also have their own license sections. --- README.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/README.md b/README.md index 07d9e00c8..ef89cd023 100644 --- a/README.md +++ b/README.md @@ -1 +1,14 @@ # zk_evm + +## License + +Licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.