Skip to content

Commit

Permalink
fix building
Browse files Browse the repository at this point in the history
  • Loading branch information
lispc committed Nov 7, 2024
1 parent 3414b2e commit 5640663
Show file tree
Hide file tree
Showing 14 changed files with 55 additions and 183 deletions.
4 changes: 2 additions & 2 deletions aggregator/src/blob_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ mod eip4844;
cfg_if! {
if #[cfg(feature = "da-avail")] {
// const DATA_AVAILABILITY: DataAvailability = DataAvailability::Avail;
pub use avail::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH};
pub use avail::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH, get_blob_bytes};
} else {
// const DATA_AVAILABILITY: DatayAvailability = DataAvailability::Eip4844;
pub use eip4844::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH};
pub use eip4844::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH, get_blob_bytes};
}
}
5 changes: 5 additions & 0 deletions aggregator/src/blob_consistency/avail.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,3 +98,8 @@ pub struct AssignedBarycentricEvaluationConfig {
/// 32 Assigned cells representing the LE-bytes of evaluation y.
pub(crate) y_le: Vec<AssignedValue<Fr>>,
}

/// Get the blob data bytes that will be populated in BlobDataConfig.
pub fn get_blob_bytes(_batch_bytes: &[u8]) -> Vec<u8> {
unimplemented!("trick for linting");
}
1 change: 0 additions & 1 deletion aggregator/src/blob_consistency/eip4844.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ fn kzg_to_versioned_hash(commitment: &c_kzg::KzgCommitment) -> H256 {
H256::from_slice(&res[..])
}

#[cfg(test)]
/// Get the blob data bytes that will be populated in BlobDataConfig.
pub fn get_blob_bytes(batch_bytes: &[u8]) -> Vec<u8> {
let mut blob_bytes = crate::witgen::zstd_encode(batch_bytes);
Expand Down
1 change: 1 addition & 0 deletions aggregator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ mod tests;
pub use self::core::extract_proof_and_instances_with_pairing_check;
pub use aggregation::*;
pub use batch::{BatchHash, BatchHeader};
pub use blob_consistency::get_blob_bytes;
pub use chunk::ChunkInfo;
pub use compression::*;
pub use constants::MAX_AGG_SNARKS;
Expand Down
2 changes: 1 addition & 1 deletion prover/src/aggregator/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ mod verifier;
pub use verifier::Verifier;

/// Re-export some types from the [`aggregator`] crate.
pub use aggregator::{BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS};
pub use aggregator::{get_blob_bytes, BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS};

/// Alias for convenience.
pub type BatchProver<'a> = Prover<'a>;
Expand Down
25 changes: 15 additions & 10 deletions prover/src/aggregator/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use crate::{
types::BundleProvingTask,
utils::{force_to_read, try_to_read},
BatchProofV2, BatchProofV2Metadata, BatchProvingTask, BundleProofV2, ChunkKind, ChunkProof,
ParamsMap, ProverError,
ChunkProofV2, ParamsMap, ProverError,
};

/// Prover capable of generating [`BatchProof`] and [`BundleProof`].
Expand Down Expand Up @@ -197,8 +197,8 @@ impl<'params> Prover<'params> {
let bundle_snarks = bundle
.batch_proofs
.iter()
.map(|proof| proof.into())
.collect::<Vec<_>>();
.map(Snark::try_from)
.collect::<Result<Vec<Snark>, _>>()?;

// Load from disk or generate a layer-5 Recursive Circuit SNARK.
let layer5_snark = self
Expand Down Expand Up @@ -266,11 +266,16 @@ impl<'params> Prover<'params> {
self.check_protocol_of_chunks(&batch.chunk_proofs)?;

// Split chunk info and snarks from the batch proving task.
let (mut chunk_infos, mut layer2_snarks): (Vec<_>, Vec<_>) = batch
let mut chunk_infos = batch
.chunk_proofs
.iter()
.map(|proof| proof.inner.chunk_info().clone())
.collect::<Vec<_>>();
let mut layer2_snarks = batch
.chunk_proofs
.iter()
.map(|proof| (proof.chunk_info.clone(), proof.to_snark()))
.unzip();
.map(Snark::try_from)
.collect::<Result<Vec<Snark>, ProverError>>()?;

// Pad the SNARKs with the last SNARK until we have MAX_AGG_SNARKS number of SNARKs.
if num_chunks < MAX_AGG_SNARKS {
Expand Down Expand Up @@ -372,15 +377,15 @@ impl<'params> Prover<'params> {
/// Sanity check: validate that the SNARK [`protocol`][snark_verifier::Protocol] for the SNARKs
/// being aggregated by the [`BatchCircuit`][aggregator::BatchCircuit] match the expected SNARK
/// protocols conditional to the chunk proof generation route utilised, i.e. halo2 or sp1.
fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProof]) -> Result<(), ProverError> {
fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProofV2]) -> Result<(), ProverError> {
for (i, proof) in chunk_proofs.iter().enumerate() {
let expected = match proof.chunk_kind {
let expected = match proof.inner.chunk_kind() {
ChunkKind::Halo2 => &self.halo2_protocol,
ChunkKind::Sp1 => &self.sp1_protocol,
};
if proof.protocol.ne(expected) {
if proof.inner.protocol().ne(expected) {
let expected_digest = format!("{:x}", Sha256::digest(expected));
let found_digest = format!("{:x}", Sha256::digest(&proof.protocol));
let found_digest = format!("{:x}", Sha256::digest(proof.inner.protocol()));
log::error!(
"BatchProver: SNARK protocol mismatch! index={}, expected={}, found={}",
i,
Expand Down
5 changes: 0 additions & 5 deletions prover/src/inner/mod.rs

This file was deleted.

43 changes: 0 additions & 43 deletions prover/src/inner/prover/mock.rs

This file was deleted.

70 changes: 0 additions & 70 deletions prover/src/inner/prover/mod.rs

This file was deleted.

42 changes: 0 additions & 42 deletions prover/src/inner/verifier.rs

This file was deleted.

10 changes: 7 additions & 3 deletions prover/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@
mod aggregator;
pub use aggregator::{
check_chunk_hashes, BatchData, BatchHash, BatchHeader, BatchProver, BatchProverError,
BatchVerifier, RecursionTask, MAX_AGG_SNARKS,
check_chunk_hashes, get_blob_bytes, BatchData, BatchHash, BatchHeader, BatchProver,
BatchProverError, BatchVerifier, RecursionTask, MAX_AGG_SNARKS,
};

mod common;
Expand Down Expand Up @@ -68,7 +68,11 @@ mod utils;
pub use utils::*;

mod zkevm;
pub use zkevm::{ChunkProver, ChunkProverError, ChunkVerifier, CircuitCapacityChecker};
pub use zkevm::{
circuit::calculate_row_usage_of_witness_block, circuit::chunk_trace_to_witness_block,
ChunkProver, ChunkProverError, ChunkVerifier, CircuitCapacityChecker, RowUsage,
SubCircuitRowUsage,
};

/// Re-export the eth-types crate.
pub use eth_types;
Expand Down
2 changes: 1 addition & 1 deletion prover/src/proof/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::{utils::read_json_deep, zkevm::SubCircuitRowUsage};
use super::{dump_as_json, dump_data, dump_vk, InnerProof};

/// The innermost SNARK belongs to the following variants.
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
#[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq)]
pub enum ChunkKind {
/// halo2-based SuperCircuit.
Halo2,
Expand Down
12 changes: 12 additions & 0 deletions prover/src/proof/proof_v2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,18 @@ impl ChunkProofV2Metadata {
row_usage,
})
}
/// Get the chunk info embedded
pub fn chunk_info(&self) -> &ChunkInfo {
&self.chunk_info
}
/// Get the chunk kind
pub fn chunk_kind(&self) -> ChunkKind {
self.chunk_kind
}
/// Get the chunk protocol
pub fn protocol(&self) -> &Vec<u8> {
&self.protocol
}
}

impl Proof for ChunkProofV2Metadata {
Expand Down
16 changes: 11 additions & 5 deletions prover/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use eth_types::{base64, l2_types::BlockTrace};
use serde::{Deserialize, Serialize};
use zkevm_circuits::evm_circuit::witness::Block;

use crate::{BatchProof, ChunkProof};
use crate::{BatchProofV2, ChunkProofV2};

/// Alias for convenience.
pub type WitnessBlock = Block;
Expand Down Expand Up @@ -63,7 +63,7 @@ impl ChunkProvingTask {
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct BatchProvingTask {
/// Chunk proofs for the contiguous list of chunks within the batch.
pub chunk_proofs: Vec<ChunkProof>,
pub chunk_proofs: Vec<ChunkProofV2>,
/// The [`BatchHeader`], as computed on-chain for this batch.
///
/// Ref: https://github.com/scroll-tech/scroll-contracts/blob/2ac4f3f7e090d7127db4b13b3627cb3ce2d762bc/src/libraries/codec/BatchHeaderV3Codec.sol
Expand All @@ -83,7 +83,8 @@ impl BatchProvingTask {
self.chunk_proofs
.last()
.unwrap()
.chunk_info
.inner
.chunk_info()
.public_input_hash()
.to_low_u64_le()
.to_string()
Expand All @@ -94,7 +95,7 @@ impl BatchProvingTask {
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct BundleProvingTask {
/// The [`BatchProofs`][BatchProof] for the contiguous list of batches to be bundled together.
pub batch_proofs: Vec<BatchProof>,
pub batch_proofs: Vec<BatchProofV2>,
}

impl BundleProvingTask {
Expand All @@ -103,6 +104,11 @@ impl BundleProvingTask {
/// This is used as a file descriptor to save to (load from) disk in order to avoid proof
/// generation if the same proof/SNARK is already found on disk.
pub fn identifier(&self) -> String {
self.batch_proofs.last().unwrap().batch_hash.to_string()
self.batch_proofs
.last()
.unwrap()
.inner
.batch_hash
.to_string()
}
}

0 comments on commit 5640663

Please sign in to comment.