Skip to content

Commit

Permalink
Refactor: trace (#1341)
Browse files Browse the repository at this point in the history
* done

* wip trace refactor

* remove code hashing for create case in collect_codes

* done

* better log

* clean collect_codes

* xx

* extcodecopy

* xx

* x

* x

* xx

* spelling

* x

* fix

* x

* m

* less assert

* build ChunkInfo from traces

* ChunkInfo partial eq

* cleanup
  • Loading branch information
lispc authored Jun 17, 2024
1 parent 2f178fa commit 91ea9f6
Show file tree
Hide file tree
Showing 20 changed files with 304 additions and 202 deletions.
20 changes: 20 additions & 0 deletions aggregator/src/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,26 @@ pub struct BatchHash<const N_SNARKS: usize> {
}

impl<const N_SNARKS: usize> BatchHash<N_SNARKS> {
/// Build Batch hash from an ordered list of chunks. Will pad if needed
pub fn construct_with_unpadded(chunks: &[ChunkInfo]) -> Self {
assert_ne!(chunks.len(), 0);
assert!(chunks.len() <= N_SNARKS);
let mut chunks_with_padding = chunks.to_vec();
if chunks.len() < N_SNARKS {
log::warn!(
"chunk len({}) < N_SNARKS({}), padding...",
chunks.len(),
N_SNARKS
);
let last_chunk = chunks.last().unwrap();
let mut padding_chunk = last_chunk.clone();
padding_chunk.is_padding = true;
chunks_with_padding
.extend(std::iter::repeat(padding_chunk).take(N_SNARKS - chunks.len()));
}
Self::construct(&chunks_with_padding)
}

/// Build Batch hash from an ordered list of #N_SNARKS of chunks.
pub fn construct(chunks_with_padding: &[ChunkInfo]) -> Self {
assert_eq!(
Expand Down
24 changes: 20 additions & 4 deletions aggregator/src/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,8 @@ impl<const N_SNARKS: usize> BatchData<N_SNARKS> {
N_BATCH_BYTES + Self::n_rows_digest()
}

pub(crate) fn new(num_valid_chunks: usize, chunks_with_padding: &[ChunkInfo]) -> Self {
/// Construct BatchData from chunks
pub fn new(num_valid_chunks: usize, chunks_with_padding: &[ChunkInfo]) -> Self {
assert!(num_valid_chunks > 0);
assert!(num_valid_chunks <= N_SNARKS);

Expand All @@ -191,7 +192,14 @@ impl<const N_SNARKS: usize> BatchData<N_SNARKS> {
.collect::<Vec<u32>>()
.try_into()
.unwrap();
assert!(chunk_sizes.iter().sum::<u32>() <= Self::n_rows_data() as u32);

if chunk_sizes.iter().sum::<u32>() > Self::n_rows_data() as u32 {
panic!(
"invalid chunk_sizes {}, n_rows_data {}",
chunk_sizes.iter().sum::<u32>(),
Self::n_rows_data()
)
}

// chunk data of the "last valid chunk" is repeated over the padded chunks for simplicity
// in calculating chunk_data_digest for those padded chunks. However, for the "chunk data"
Expand Down Expand Up @@ -269,14 +277,22 @@ impl<const N_SNARKS: usize> BatchData<N_SNARKS> {
}

/// Get the zstd encoded batch data bytes.
pub(crate) fn get_encoded_batch_data_bytes(&self) -> Vec<u8> {
pub fn get_encoded_batch_data_bytes(&self) -> Vec<u8> {
let batch_data_bytes = self.get_batch_data_bytes();
let mut encoder = init_zstd_encoder(None);
encoder
.set_pledged_src_size(Some(batch_data_bytes.len() as u64))
.expect("infallible");
encoder.write_all(&batch_data_bytes).expect("infallible");
encoder.finish().expect("infallible")
let encoded_bytes = encoder.finish().expect("infallible");
log::info!(
"compress batch data from {} to {}, compression ratio {:.2}, blob usage {:.3}",
batch_data_bytes.len(),
encoded_bytes.len(),
batch_data_bytes.len() as f32 / encoded_bytes.len() as f32,
encoded_bytes.len() as f32 / N_BLOB_BYTES as f32
);
encoded_bytes
}

/// Get the BLOB_WIDTH number of scalar field elements, as 32-bytes unsigned integers.
Expand Down
59 changes: 56 additions & 3 deletions aggregator/src/chunk.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
//! This module implements `Chunk` related data types.
//! A chunk is a list of blocks.
use eth_types::{base64, ToBigEndian, H256};
use eth_types::{base64, l2_types::BlockTrace, ToBigEndian, H256};
use ethers_core::utils::keccak256;
use serde::{Deserialize, Serialize};
use std::iter;
use zkevm_circuits::witness::Block;

#[derive(Default, Debug, Clone, Deserialize, Serialize)]
#[derive(Default, Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
/// A chunk is a set of continuous blocks.
/// ChunkInfo is metadata of chunk, with following fields:
/// - state root before this chunk
Expand Down Expand Up @@ -35,14 +35,66 @@ pub struct ChunkInfo {
}

impl ChunkInfo {
/// Construct by block traces
pub fn from_block_traces(traces: &[BlockTrace]) -> Self {
let data_bytes = iter::empty()
.chain(
// header part
traces.iter().flat_map(|b| b.da_encode_header()),
)
.chain(
// l1 msg hashes
traces.iter().flat_map(|b| {
b.transactions
.iter()
.filter(|tx| tx.is_l1_tx())
.flat_map(|tx| tx.tx_hash.to_fixed_bytes())
}),
)
.collect::<Vec<u8>>();

let data_hash = H256(keccak256(data_bytes));
log::debug!(
"chunk-hash: data hash = {}",
hex::encode(data_hash.to_fixed_bytes())
);

let tx_bytes = traces
.iter()
.flat_map(|b| {
b.transactions
.iter()
.filter(|tx| !tx.is_l1_tx())
.flat_map(|tx| tx.to_eth_tx(None, None, None, None).rlp().to_vec())
})
.collect::<Vec<u8>>();

let post_state_root = traces
.last()
.expect("at least 1 block needed")
.header
.state_root;
let withdraw_root = traces.last().unwrap().withdraw_trie_root;
let chain_id = traces.first().unwrap().chain_id;
let prev_state_root = traces.first().unwrap().storage_trace.root_before;

Self {
chain_id,
prev_state_root,
post_state_root,
withdraw_root,
data_hash,
tx_bytes,
is_padding: false,
}
}
/// Construct by a witness block.
pub fn from_witness_block(block: &Block, is_padding: bool) -> Self {
// <https://github.com/scroll-tech/zkevm-circuits/blob/25dd32aa316ec842ffe79bb8efe9f05f86edc33e/bus-mapping/src/circuit_input_builder.rs#L690>

let mut total_l1_popped = block.start_l1_queue_index;
log::debug!("chunk-hash: start_l1_queue_index = {}", total_l1_popped);
let data_bytes = iter::empty()
// .chain(block_headers.iter().flat_map(|(&block_num, block)| {
.chain(block.context.ctxs.iter().flat_map(|(b_num, b_ctx)| {
let num_l2_txs = block
.txs
Expand All @@ -69,6 +121,7 @@ impl ChunkInfo {
num_txs,
);

// https://github.com/scroll-tech/da-codec/blob/b842a0f961ad9180e16b50121ef667e15e071a26/encoding/codecv2/codecv2.go#L97
iter::empty()
// Block Values
.chain(b_ctx.number.as_u64().to_be_bytes())
Expand Down
1 change: 1 addition & 0 deletions aggregator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ mod tests;
pub use self::core::extract_proof_and_instances_with_pairing_check;
pub use aggregation::*;
pub use batch::BatchHash;
pub use blob::BatchData;
pub use chunk::ChunkInfo;
pub use compression::*;
pub use constants::MAX_AGG_SNARKS;
Expand Down
76 changes: 32 additions & 44 deletions bus-mapping/src/circuit_input_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,44 +342,35 @@ impl<'a> CircuitInputBuilder {
Ok(())
}

fn check_post_state(&self, post_states: &[eth_types::l2_types::AccountProofWrapper]) {
fn check_post_state(&self, post_states: &[eth_types::l2_types::AccountTrace]) {
for account_post_state in post_states {
let account_post_state = account_post_state.clone();
if let Some(address) = account_post_state.address {
let local_acc = self.sdb.get_account(&address).1;
log::trace!("local acc {local_acc:?}, trace acc {account_post_state:?}");
if local_acc.balance != account_post_state.balance.unwrap() {
log::error!("incorrect balance")
}
if local_acc.nonce != account_post_state.nonce.unwrap().into() {
log::error!("incorrect nonce")
let address = account_post_state.address;
let local_acc = self.sdb.get_account(&address).1;
log::trace!("local acc {local_acc:?}, trace acc {account_post_state:?}");
if local_acc.balance != account_post_state.balance {
log::error!("incorrect balance")
}
if local_acc.nonce != account_post_state.nonce.into() {
log::error!("incorrect nonce")
}
let p_hash = account_post_state.poseidon_code_hash;
if p_hash.is_zero() {
if !local_acc.is_empty() {
log::error!("incorrect poseidon_code_hash")
}
let p_hash = account_post_state.poseidon_code_hash.unwrap();
if p_hash.is_zero() {
if !local_acc.is_empty() {
log::error!("incorrect poseidon_code_hash")
}
} else {
if local_acc.code_hash != p_hash {
log::error!("incorrect poseidon_code_hash")
}
} else {
if local_acc.code_hash != p_hash {
log::error!("incorrect poseidon_code_hash")
}
let k_hash = account_post_state.keccak_code_hash.unwrap();
if k_hash.is_zero() {
if !local_acc.is_empty() {
log::error!("incorrect keccak_code_hash")
}
} else {
if local_acc.keccak_code_hash != k_hash {
log::error!("incorrect keccak_code_hash")
}
}
let k_hash = account_post_state.keccak_code_hash;
if k_hash.is_zero() {
if !local_acc.is_empty() {
log::error!("incorrect keccak_code_hash")
}
if let Some(storage) = account_post_state.storage {
let k = storage.key.unwrap();
let local_v = self.sdb.get_storage(&address, &k).1;
if *local_v != storage.value.unwrap() {
log::error!("incorrect storage for k = {k}");
}
} else {
if local_acc.keccak_code_hash != k_hash {
log::error!("incorrect keccak_code_hash")
}
}
}
Expand Down Expand Up @@ -591,16 +582,13 @@ impl<'a> CircuitInputBuilder {
} else {
GasCost(tx.gas - geth_trace.struct_logs[0].gas.0)
};
// EIP2930 not implemented
if tx.access_list.is_none() {
debug_assert_eq!(
steps_gas_cost,
real_gas_cost.as_u64(),
"begin step cost {:?}, precompile step cost {:?}",
begin_tx_steps[0].gas_cost,
begin_tx_steps.get(1).map(|st| st.gas_cost),
);
}
debug_assert_eq!(
steps_gas_cost,
real_gas_cost.as_u64(),
"begin step cost {:?}, next step cost {:?}",
begin_tx_steps[0].gas_cost,
begin_tx_steps.get(1).map(|st| st.gas_cost),
);
}

tx.steps_mut().extend(begin_tx_steps);
Expand Down
8 changes: 4 additions & 4 deletions bus-mapping/src/circuit_input_builder/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,22 +208,22 @@ impl Blocks {
eth_block: &eth_types::Block<eth_types::Transaction>,
circuits_params: CircuitsParams,
) -> Result<Self, Error> {
let mut block = Self {
let mut blocks = Self {
block_steps: BlockSteps::default(),
exp_events: Vec::new(),
chain_id,
start_l1_queue_index,
circuits_params,
..Default::default()
};
let info = Block::new_with_l1_queue_index(
let block = Block::new_with_l1_queue_index(
chain_id,
start_l1_queue_index,
history_hashes,
eth_block,
)?;
block.blocks.insert(info.number.as_u64(), info);
Ok(block)
blocks.add_block(block);
Ok(blocks)
}

/// Return the list of transactions of this block.
Expand Down
4 changes: 1 addition & 3 deletions bus-mapping/src/circuit_input_builder/l2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,7 @@ impl CircuitInputBuilder {
&eth_block,
)?;
// override zeroed minder field with additional "coinbase" field in blocktrace
if let Some(address) = block_trace.coinbase.address {
block.coinbase = address;
}
block.coinbase = block_trace.coinbase.address;
let block_num = block.number.as_u64();
// TODO: should be check the block number is in sequence?
self.block.add_block(block);
Expand Down
15 changes: 8 additions & 7 deletions bus-mapping/src/evm/opcodes/begin_end_tx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -283,20 +283,21 @@ pub fn gen_begin_tx_steps(state: &mut CircuitInputStateRef) -> Result<Vec<ExecSt
// Keccak table and verify the contract address.
if state.tx.is_create() {
// 1. add RLP-bytes for contract address to keccak circuit.
state.block.sha3_inputs.push({
let address_preimage = {
let mut stream = ethers_core::utils::rlp::RlpStream::new();
stream.begin_list(2);
stream.append(&caller_address);
stream.append(&nonce_prev);
stream.out().to_vec()
});
};
state.block.sha3_inputs.push(address_preimage);
// 2. add init code to keccak circuit.
let init_code = state.tx.input.as_slice();
let length = init_code.len();
state.block.sha3_inputs.push(init_code.to_vec());
let initcode = state.tx.input.clone();
let length = initcode.len();
state.block.sha3_inputs.push(initcode.clone());
// 3. add init code to copy circuit.
let code_hash = CodeDB::hash(init_code);
let bytes = Bytecode::from(init_code.to_vec())
let code_hash = state.code_db.insert(initcode.clone());
let bytes = Bytecode::from(initcode)
.code
.iter()
.map(|element| (element.value, element.is_code, false))
Expand Down
Loading

0 comments on commit 91ea9f6

Please sign in to comment.