Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

halo2 + sp1 chunks #352

Open
wants to merge 37 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 36 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
48e3de4
sp1 chunk test
noel2004 Oct 3, 2024
2f637c5
renaming snark
noel2004 Oct 4, 2024
ae9cc80
fix naming
noel2004 Oct 4, 2024
7c7c458
refactoring sp1 test
noel2004 Oct 6, 2024
6a6c467
resume path extracting
noel2004 Oct 6, 2024
a4f9490
test for integrating sp1 to batch
noel2004 Oct 7, 2024
3e92fa2
purge uncessary dumping
noel2004 Oct 7, 2024
37e41f6
e2e support sp1 path
noel2004 Oct 7, 2024
6cf60cf
lints
noel2004 Oct 7, 2024
1491df4
add testing stuff for sp1
noel2004 Oct 8, 2024
592156c
add PI digest log
noel2004 Oct 8, 2024
b1e121b
update testing stuff
lispc Oct 8, 2024
595b742
fix an issue in restore results
noel2004 Oct 8, 2024
c6ab340
more fixing
noel2004 Oct 8, 2024
332fc3b
also fix issue for zkevm-chunk-test
noel2004 Oct 8, 2024
520609c
add post batch test
noel2004 Oct 9, 2024
cee1921
update deps
roynalnaruto Oct 9, 2024
0ce4fa7
add verifier deployment test
roynalnaruto Oct 9, 2024
df43565
more verifier deploy test (snark-verifier, not revm)
roynalnaruto Oct 9, 2024
90f3f71
tmp: add evm verifier bin
roynalnaruto Oct 9, 2024
a0e6c66
bump zkevm-circuits:prover
roynalnaruto Oct 9, 2024
bf73b34
poseidon-circuit dep correct as per rust-toolchain
roynalnaruto Oct 9, 2024
7be636d
use prover's export instead of snark-verifier-sdk
roynalnaruto Oct 9, 2024
714f6b7
e2e hybrid test setup
roynalnaruto Oct 16, 2024
c8211e8
fix: hybrid batch dir
roynalnaruto Oct 16, 2024
dbb72a7
variants for chunk protocol in batch prover
roynalnaruto Oct 16, 2024
d0ef2f7
bump prover and snark-verifier
roynalnaruto Oct 17, 2024
db65938
bump prover
roynalnaruto Oct 17, 2024
19c6328
bump prover
roynalnaruto Oct 18, 2024
0b60e64
bum prover
roynalnaruto Oct 18, 2024
ad2c90e
bump prover
roynalnaruto Oct 18, 2024
00a0052
bump prover
roynalnaruto Oct 18, 2024
38baee7
bump prover
roynalnaruto Oct 18, 2024
42eb985
bump prover
roynalnaruto Oct 22, 2024
0b78006
Merge branch 'main' into feat/hybrid-snark-agg
roynalnaruto Nov 8, 2024
7bb2795
compile + clippy
roynalnaruto Nov 8, 2024
1a7da0e
minor fix
lispc Nov 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ test-bundle-prove:
test-e2e-prove:
@SCROLL_PROVER_DUMP_YUL=true cargo test --release -p integration --test e2e_tests test_e2e_prove_verify -- --exact --nocapture

test-e2e-prove-hybrid:
@SCROLL_PROVER_DUMP_YUL=true cargo test --release -p integration --test e2e_tests test_e2e_prove_verify_hybrid -- --exact --nocapture

test-batch-bundle-prove:
@SCROLL_PROVER_DUMP_YUL=true cargo test --release -p integration --test e2e_tests test_batch_bundle_verify -- --exact --nocapture

Expand Down
11 changes: 7 additions & 4 deletions bin/src/trace_prover.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use clap::Parser;
use integration::{prove::prove_and_verify_chunk, test_util::load_chunk};
use prover::{init_env_and_log, ChunkProvingTask};
use prover::{init_env_and_log, ChunkProver, ChunkProvingTask};
use std::env;

#[derive(Parser, Debug)]
Expand Down Expand Up @@ -34,12 +34,15 @@ fn main() {
let chunk = ChunkProvingTask::new(traces);
let params_map =
prover::Prover::load_params_map(&args.params_path, &prover::CHUNK_PROVER_DEGREES);
let mut prover = ChunkProver::from_params_and_assets(&params_map, &args.assets_path);
log::info!("Constructed chunk prover");
prove_and_verify_chunk(
chunk,
Some("0"), // same with `make test-chunk-prove`, to load vk
&params_map,
&args.assets_path,
&output_dir,
chunk,
&mut prover,
Some("0"), // same with `make test-chunk-prove`, to load vk
true,
);
log::info!("chunk prove done");
}
132 changes: 121 additions & 11 deletions integration/src/prove.rs
Original file line number Diff line number Diff line change
@@ -1,35 +1,136 @@
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover::{
get_blob_bytes, BatchData, BatchProofV2, BatchProver, BatchProvingTask, BatchVerifier,
BundleProvingTask, ChunkInfo, ChunkProver, ChunkProvingTask, ChunkVerifier, MAX_AGG_SNARKS,
chunk_trace_to_witness_block, get_blob_bytes, BatchData, BatchProofV2, BatchProver,
BatchProvingTask, BatchVerifier, BundleProvingTask, ChunkInfo, ChunkProofV2,
ChunkProofV2Metadata, ChunkProver, ChunkProvingTask, ChunkVerifier, Snark, MAX_AGG_SNARKS,
};
use std::{collections::BTreeMap, env, time::Instant};

use crate::verifier::EVMVerifier;
use crate::verifier::{new_chunk_verifier, EVMVerifier};

/// The `output_dir` is assumed to output_dir of chunk proving.
pub fn new_batch_prover<'a>(
params_map: &'a BTreeMap<u32, ParamsKZG<Bn256>>,
output_dir: &str,
) -> BatchProver<'a> {
env::set_var("HALO2_CHUNK_PROTOCOL", "protocol_chunk_0.protocol");
env::set_var("SP1_CHUNK_PROTOCOL", "protocol_chunk_0.protocol");
env::set_var("HALO2_CHUNK_PROTOCOL", "protocol_chunk_halo2.protocol");
env::set_var("SP1_CHUNK_PROTOCOL", "protocol_chunk_sp1.protocol");
env::set_var("SCROLL_PROVER_ASSETS_DIR", output_dir);
let prover = BatchProver::from_params_and_assets(params_map, output_dir);
log::info!("Constructed batch prover");

prover
}

pub fn prove_and_verify_chunk(
/// SP1Prover simple compress a snark from sp1, so we have
/// same snark (only different preprocess bytes) as zkevm's chunk proof
pub struct SP1Prover<'p>(ChunkProver<'p>);

impl<'params> SP1Prover<'params> {
pub fn from_params_and_assets(
params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>,
assets_dir: &str,
) -> Self {
Self(ChunkProver::from_params_and_assets(params_map, assets_dir))
}

pub fn get_vk(&self) -> Option<Vec<u8>> {
self.0.get_vk()
}

pub fn gen_chunk_proof(
&mut self,
chunk: ChunkProvingTask,
chunk_identifier: &str,
sp1_snark: Snark,
output_dir: Option<&str>,
) -> Result<ChunkProofV2> {
use prover::LayerId::Layer2;

let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?;
let chunk_info = if let Some(chunk_info_input) = chunk.chunk_info {
chunk_info_input
} else {
log::info!("gen chunk_info {chunk_identifier:?}");
ChunkInfo::from_witness_block(&witness_block, false)
};

let comp_snark = self.0.prover_impl.load_or_gen_comp_snark(
chunk_identifier,
Layer2.id(),
false,
Layer2.degree(),
sp1_snark,
output_dir,
)?;

let pk = self.0.prover_impl.pk(Layer2.id());
let proof_metadata =
ChunkProofV2Metadata::new(&comp_snark, prover::ChunkKind::Sp1, chunk_info, None)?;
let proof = ChunkProofV2::new(comp_snark, pk, proof_metadata)?;

// in case we read the snark directly from previous calculation,
// the pk is not avaliable and we skip dumping the proof
if pk.is_some() {
if let Some(output_dir) = output_dir {
proof.dump(output_dir, chunk_identifier)?;
}
} else {
log::info!("skip dumping vk since snark is restore from disk")
}

Ok(proof)
}
}

/// prove_and_verify_sp1_chunk would expect a sp1 snark name "sp1_snark_<chunk_id>.json"
pub fn prove_and_verify_sp1_chunk(
params_map: &BTreeMap<u32, ParamsKZG<Bn256>>,
output_dir: &str,
sp1_dir: Option<&str>,
chunk: ChunkProvingTask,
prover: &mut SP1Prover,
chunk_identifier: Option<&str>,
) -> ChunkProofV2 {
let chunk_identifier =
chunk_identifier.map_or_else(|| chunk.identifier(), |name| name.to_string());

let sp1_dir = sp1_dir.unwrap_or(output_dir);
let sp1_snark_name = format!("sp1_snark_{}.json", chunk_identifier);

let now = Instant::now();
let snark_path = std::path::Path::new(sp1_dir).join(&sp1_snark_name);
let sp1_snark = prover::read_json_deep(snark_path).expect("failed to load SNARK");
let chunk_proof = prover
.gen_chunk_proof(chunk, &chunk_identifier, sp1_snark, Some(output_dir))
.expect("cannot generate sp1 chunk snark");
log::info!(
"finish generating sp1 chunk snark, elapsed: {:?}",
now.elapsed()
);

// output_dir is used to load chunk vk
env::set_var(
"CHUNK_VK_FILENAME",
&format!("vk_chunk_{chunk_identifier}.vkey"),
);
let verifier = new_chunk_verifier(params_map, output_dir);
let snark = Snark::try_from(&chunk_proof).expect("should be ok");
assert!(verifier.verify_snark(snark));
log::info!("Verified sp1 chunk proof");

chunk_proof
}

pub fn prove_and_verify_chunk(
params_map: &BTreeMap<u32, ParamsKZG<Bn256>>,
assets_path: &str,
output_dir: &str,
) {
let mut prover = ChunkProver::from_params_and_assets(params_map, assets_path);
log::info!("Constructed chunk prover");

chunk: ChunkProvingTask,
prover: &mut ChunkProver,
chunk_identifier: Option<&str>,
skip_verify: bool,
) -> ChunkProofV2 {
let chunk_identifier =
chunk_identifier.map_or_else(|| chunk.identifier(), |name| name.to_string());

Expand All @@ -42,6 +143,13 @@ pub fn prove_and_verify_chunk(
now.elapsed()
);

// there is an issue: if snark is restore from disk, the pk is not generated
// and the dumping process of proof would write the existed vk with 0 bytes
// and cause verify failed
// the work-around is skip verify in e2e test
if skip_verify {
return chunk_proof;
}
// output_dir is used to load chunk vk
env::set_var(
"CHUNK_VK_FILENAME",
Expand All @@ -52,6 +160,8 @@ pub fn prove_and_verify_chunk(
.verify_chunk_proof(&chunk_proof)
.expect("should verify");
log::info!("Verified chunk proof");

chunk_proof
}

pub fn prove_and_verify_batch(
Expand Down
44 changes: 41 additions & 3 deletions integration/src/verifier.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,47 @@
use std::path::PathBuf;

use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover::{batch_vk_filename, CompressionCircuit, Verifier};
use std::{collections::BTreeMap, env};

use prover::{force_read, DEPLOYMENT_CODE_FILENAME};

type SnarkVerifier<'a> = Verifier<'a, CompressionCircuit>;

// FIXME: why we use common::Verifier instead of ChunkVerifier here?
pub fn new_chunk_verifier<'a>(
params_map: &'a BTreeMap<u32, ParamsKZG<Bn256>>,
assets_dir: &str,
) -> SnarkVerifier<'a> {
let path = std::path::PathBuf::from(assets_dir).join(prover::chunk_vk_filename());
let raw_vk = force_read(&path);
if raw_vk.is_empty() {
panic!("empty vk read from {path:?}");
}
env::set_var("COMPRESSION_CONFIG", &*prover::LAYER2_CONFIG_PATH);
let params = params_map
.get(&prover::LAYER2_DEGREE)
.expect("should be loaded");
SnarkVerifier::from_params(params, &raw_vk)
}

#[allow(dead_code)]
pub fn new_batch_verifier<'a>(
params_map: &'a BTreeMap<u32, ParamsKZG<Bn256>>,
assets_dir: &str,
) -> SnarkVerifier<'a> {
let path = PathBuf::from(assets_dir).join(batch_vk_filename());
let raw_vk = force_read(&path);
if raw_vk.is_empty() {
panic!("empty vk read from {path:?}");
}
env::set_var("COMPRESSION_CONFIG", &*prover::LAYER4_CONFIG_PATH);
let params = params_map
.get(&prover::LAYER4_DEGREE)
.expect("should be loaded");
SnarkVerifier::from_params(params, &raw_vk)
}

#[derive(Debug)]
pub struct EVMVerifier(Vec<u8>);

Expand All @@ -16,8 +56,6 @@ impl EVMVerifier {
}

pub fn verify_evm_proof(&self, call_data: Vec<u8>) -> bool {
let res = prover::deploy_and_call(self.0.clone(), call_data);
log::debug!("verify_evm_proof result {:?}", res);
res.is_ok()
prover::deploy_and_call(self.0.clone(), call_data).is_ok()
}
}
87 changes: 86 additions & 1 deletion integration/tests/batch_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ fn test_batches_with_each_chunk_num_prove_verify() {
use itertools::Itertools;

let output_dir = init_env_and_log("batches_with_each_chunk_num_tests");
log::info!("Initialized ENV and created output-dir {output_dir}");
log::info!("Initialized ENV and use output-dir {output_dir}");

let params_map = prover::Prover::load_params_map(
PARAMS_DIR,
Expand Down Expand Up @@ -83,6 +83,91 @@ fn test_batches_with_each_chunk_num_prove_verify() {
}
}

#[cfg(feature = "prove_verify")]
#[ignore = "only used for subsequent chunk tests"]
#[test]
fn test_batch_prove_verify_after_chunk_tests() {
use integration::{
prove::get_blob_from_chunks,
test_util::{load_chunk, trace_path_for_test, PARAMS_DIR},
};
use itertools::Itertools;
use prover::{
eth_types::H256, BatchHeader, ChunkProofV2, ChunkProvingTask, BATCH_PROVER_DEGREES,
};

let output_dir = init_env_and_log("batch_tests");
log::info!("Initialized ENV and created output-dir {output_dir}");

let params_map = prover::Prover::load_params_map(
PARAMS_DIR,
&BATCH_PROVER_DEGREES.iter().copied().collect_vec(),
);

let trace_paths_env = trace_path_for_test();
let trace_paths: Vec<_> = trace_paths_env.split(';').collect();
log::info!("Use traces paths {trace_paths:?}");

let mut l1_message_popped = 0;
let mut last_block_timestamp = 0;

// like gen_batch_proving_task in e2e, collect every chunks
let chunk_proofs = trace_paths
.iter()
.map(|chunk_dir| load_chunk(chunk_dir).1)
.map(|traces| {
// make use of traces before consumed by chunkproof
l1_message_popped += traces.iter().map(|tr| tr.num_l1_txs()).sum::<u64>();
last_block_timestamp = traces
.last()
.map_or(last_block_timestamp, |tr| tr.header.timestamp.as_u64());

let task = ChunkProvingTask::new(traces);
let loaded_proof = ChunkProofV2::from_json(&output_dir, &task.identifier());
if let Ok(proof) = loaded_proof.as_ref() {
log::info!(
"expected PI of {} is {:#x?}",
task.identifier(),
proof.inner.chunk_info().public_input_hash(),
);
}
loaded_proof
})
.collect::<Result<Vec<_>, _>>()
.unwrap();

let chunk_infos = chunk_proofs
.iter()
.map(|proof| proof.inner.chunk_info().clone())
.collect::<Vec<_>>();

let blob_bytes = get_blob_from_chunks(&chunk_infos);

let batch_header = BatchHeader::construct_from_chunks(
4,
123,
l1_message_popped,
l1_message_popped,
H256([
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
]),
last_block_timestamp,
&chunk_infos,
&blob_bytes,
);

let batch = BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
};

// dump_chunk_protocol(&batch, &output_dir);
let mut batch_prover = new_batch_prover(&params_map, &output_dir);
prove_and_verify_batch(&params_map, &output_dir, &mut batch_prover, batch);
}

fn load_batch_proving_task(batch_task_file: &str) -> BatchProvingTask {
let batch: BatchProvingTask = read_json_deep(batch_task_file).unwrap();
let tx_bytes_total_len: usize = batch
Expand Down
Loading
Loading