Skip to content
This repository was archived by the owner on Apr 18, 2025. It is now read-only.

Commit 3a65044

Browse files
committed
Merge remote-tracking branch 'origin/test/refactor_batch_accumulator' into feat/agg_recursion
2 parents 17327d1 + 7216dcb commit 3a65044

File tree

3 files changed

+104
-93
lines changed

3 files changed

+104
-93
lines changed

aggregator/src/aggregation/circuit.rs

Lines changed: 42 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ use rand::Rng;
2121
use std::rc::Rc;
2222
use std::{env, fs::File};
2323

24+
#[cfg(not(feature = "disable_proof_aggregation"))]
2425
use snark_verifier::loader::halo2::{halo2_ecc::halo2_base::AssignedValue, Halo2Loader};
2526
use snark_verifier::pcs::kzg::KzgSuccinctVerifyingKey;
2627
#[cfg(not(feature = "disable_proof_aggregation"))]
@@ -372,6 +373,11 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
372373
};
373374

374375
// Extract digests
376+
#[cfg(feature = "disable_proof_aggregation")]
377+
let (_batch_hash_digest, _chunk_pi_hash_digests, _potential_batch_data_hash_digest) =
378+
parse_hash_digest_cells::<N_SNARKS>(&assigned_batch_hash.hash_output);
379+
380+
#[cfg(not(feature = "disable_proof_aggregation"))]
375381
let (_batch_hash_digest, chunk_pi_hash_digests, _potential_batch_data_hash_digest) =
376382
parse_hash_digest_cells::<N_SNARKS>(&assigned_batch_hash.hash_output);
377383

@@ -500,20 +506,21 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
500506
"original and recovered bytes mismatch"
501507
);
502508

503-
let decoder_exports = config.decoder_config.assign(
504-
&mut layouter,
505-
&batch_bytes,
506-
&encoded_batch_bytes,
507-
witness_rows,
508-
decoded_literals,
509-
fse_aux_tables,
510-
block_info_arr,
511-
sequence_info_arr,
512-
address_table_arr,
513-
sequence_exec_info_arr,
514-
&challenges,
515-
LOG_DEGREE, // TODO: configure k for batch circuit instead of hard-coded here.
516-
)?;
509+
// batch_circuit_debug
510+
// let decoder_exports = config.decoder_config.assign(
511+
// &mut layouter,
512+
// &batch_bytes,
513+
// &encoded_batch_bytes,
514+
// witness_rows,
515+
// decoded_literals,
516+
// fse_aux_tables,
517+
// block_info_arr,
518+
// sequence_info_arr,
519+
// address_table_arr,
520+
// sequence_exec_info_arr,
521+
// &challenges,
522+
// LOG_DEGREE, // TODO: configure k for batch circuit instead of hard-coded here.
523+
// )?;
517524

518525
layouter.assign_region(
519526
|| "consistency checks",
@@ -562,26 +569,27 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
562569
region.constrain_equal(c.cell(), ec.cell())?;
563570
}
564571

565-
// equate rlc (from blob data) with decoder's encoded_rlc
566-
region.constrain_equal(
567-
blob_data_exports.bytes_rlc.cell(),
568-
decoder_exports.encoded_rlc.cell(),
569-
)?;
570-
// equate len(blob_bytes) with decoder's encoded_len
571-
region.constrain_equal(
572-
blob_data_exports.bytes_len.cell(),
573-
decoder_exports.encoded_len.cell(),
574-
)?;
575-
// equate rlc (from batch data) with decoder's decoded_rlc
576-
region.constrain_equal(
577-
batch_data_exports.bytes_rlc.cell(),
578-
decoder_exports.decoded_rlc.cell(),
579-
)?;
580-
// equate len(batch_data) with decoder's decoded_len
581-
region.constrain_equal(
582-
batch_data_exports.batch_data_len.cell(),
583-
decoder_exports.decoded_len.cell(),
584-
)?;
572+
// batch_circuit_debug
573+
// // equate rlc (from blob data) with decoder's encoded_rlc
574+
// region.constrain_equal(
575+
// blob_data_exports.bytes_rlc.cell(),
576+
// decoder_exports.encoded_rlc.cell(),
577+
// )?;
578+
// // equate len(blob_bytes) with decoder's encoded_len
579+
// region.constrain_equal(
580+
// blob_data_exports.bytes_len.cell(),
581+
// decoder_exports.encoded_len.cell(),
582+
// )?;
583+
// // equate rlc (from batch data) with decoder's decoded_rlc
584+
// region.constrain_equal(
585+
// batch_data_exports.bytes_rlc.cell(),
586+
// decoder_exports.decoded_rlc.cell(),
587+
// )?;
588+
// // equate len(batch_data) with decoder's decoded_len
589+
// region.constrain_equal(
590+
// batch_data_exports.batch_data_len.cell(),
591+
// decoder_exports.decoded_len.cell(),
592+
// )?;
585593

586594
Ok(())
587595
},

aggregator/src/aggregation/config.rs

Lines changed: 29 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,9 @@ pub struct BatchCircuitConfig<const N_SNARKS: usize> {
3838
pub blob_data_config: BlobDataConfig<N_SNARKS>,
3939
/// The batch data's config.
4040
pub batch_data_config: BatchDataConfig<N_SNARKS>,
41-
/// The zstd decoder's config.
42-
pub decoder_config: DecoderConfig<1024, 512>,
41+
// batch_circuit_debug
42+
// /// The zstd decoder's config.
43+
// pub decoder_config: DecoderConfig<1024, 512>,
4344
/// Config to do the barycentric evaluation on blob polynomial.
4445
pub barycentric: BarycentricEvaluationConfig,
4546
/// Instance for public input; stores
@@ -130,29 +131,30 @@ impl<const N_SNARKS: usize> BatchCircuitConfig<N_SNARKS> {
130131
);
131132

132133
// Zstd decoder.
133-
let pow_rand_table = PowOfRandTable::construct(meta, &challenges_expr);
134-
135-
let pow2_table = Pow2Table::construct(meta);
136-
let range8 = RangeTable::construct(meta);
137-
let range16 = RangeTable::construct(meta);
138-
let range512 = RangeTable::construct(meta);
139-
let range_block_len = RangeTable::construct(meta);
140-
let bitwise_op_table = BitwiseOpTable::construct(meta);
141-
142-
let decoder_config = DecoderConfig::configure(
143-
meta,
144-
&challenges_expr,
145-
DecoderConfigArgs {
146-
pow_rand_table,
147-
pow2_table,
148-
u8_table,
149-
range8,
150-
range16,
151-
range512,
152-
range_block_len,
153-
bitwise_op_table,
154-
},
155-
);
134+
// batch_circuit_debug
135+
// let pow_rand_table = PowOfRandTable::construct(meta, &challenges_expr);
136+
137+
// let pow2_table = Pow2Table::construct(meta);
138+
// let range8 = RangeTable::construct(meta);
139+
// let range16 = RangeTable::construct(meta);
140+
// let range512 = RangeTable::construct(meta);
141+
// let range_block_len = RangeTable::construct(meta);
142+
// let bitwise_op_table = BitwiseOpTable::construct(meta);
143+
144+
// let decoder_config = DecoderConfig::configure(
145+
// meta,
146+
// &challenges_expr,
147+
// DecoderConfigArgs {
148+
// pow_rand_table,
149+
// pow2_table,
150+
// u8_table,
151+
// range8,
152+
// range16,
153+
// range512,
154+
// range_block_len,
155+
// bitwise_op_table,
156+
// },
157+
// );
156158

157159
// Instance column stores public input column
158160
// the public instance for this circuit consists of
@@ -177,7 +179,8 @@ impl<const N_SNARKS: usize> BatchCircuitConfig<N_SNARKS> {
177179
instance,
178180
barycentric,
179181
batch_data_config,
180-
decoder_config,
182+
// batch_circuit_debug
183+
// decoder_config,
181184
}
182185
}
183186

aggregator/src/batch.rs

Lines changed: 33 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,38 @@ pub struct BatchHeader {
3535
pub blob_data_proof: [H256; 2],
3636
}
3737

38+
impl BatchHeader {
39+
pub(crate) fn batch_hash(&self) -> H256 {
40+
// the current batch hash is build as
41+
// keccak256(
42+
// version ||
43+
// batch_index ||
44+
// l1_message_popped ||
45+
// total_l1_message_popped ||
46+
// batch_data_hash ||
47+
// versioned_hash ||
48+
// parent_batch_hash ||
49+
// last_block_timestamp ||
50+
// z ||
51+
// y
52+
// )
53+
let batch_hash_preimage = [
54+
vec![self.version].as_slice(),
55+
self.batch_index.to_be_bytes().as_ref(),
56+
self.l1_message_popped.to_be_bytes().as_ref(),
57+
self.total_l1_message_popped.to_be_bytes().as_ref(),
58+
self.data_hash.as_bytes(),
59+
self.blob_versioned_hash.as_bytes(),
60+
self.parent_batch_hash.as_bytes(),
61+
self.last_block_timestamp.to_be_bytes().as_ref(),
62+
self.blob_data_proof[0].to_fixed_bytes().as_ref(),
63+
self.blob_data_proof[1].to_fixed_bytes().as_ref(),
64+
]
65+
.concat();
66+
keccak256(batch_hash_preimage).into()
67+
}
68+
}
69+
3870
#[derive(Default, Debug, Clone)]
3971
/// A batch is a set of N_SNARKS num of continuous chunks
4072
/// - the first k chunks are from real traces
@@ -193,39 +225,7 @@ impl<const N_SNARKS: usize> BatchHash<N_SNARKS> {
193225
// Update export value
194226
export_batch_header.blob_versioned_hash = versioned_hash;
195227

196-
// the current batch hash is build as
197-
// keccak256(
198-
// version ||
199-
// batch_index ||
200-
// l1_message_popped ||
201-
// total_l1_message_popped ||
202-
// batch_data_hash ||
203-
// versioned_hash ||
204-
// parent_batch_hash ||
205-
// last_block_timestamp ||
206-
// z ||
207-
// y
208-
// )
209-
let batch_hash_preimage = [
210-
vec![batch_header.version].as_slice(),
211-
batch_header.batch_index.to_be_bytes().as_ref(),
212-
batch_header.l1_message_popped.to_be_bytes().as_ref(),
213-
batch_header.total_l1_message_popped.to_be_bytes().as_ref(),
214-
batch_data_hash.as_slice(),
215-
versioned_hash.as_bytes(),
216-
batch_header.parent_batch_hash.as_bytes(),
217-
batch_header.last_block_timestamp.to_be_bytes().as_ref(),
218-
point_evaluation_assignments
219-
.challenge
220-
.to_be_bytes()
221-
.as_ref(),
222-
point_evaluation_assignments
223-
.evaluation
224-
.to_be_bytes()
225-
.as_ref(),
226-
]
227-
.concat();
228-
let current_batch_hash: H256 = keccak256(batch_hash_preimage).into();
228+
let current_batch_hash = export_batch_header.batch_hash();
229229

230230
log::info!(
231231
"batch hash {:?}, datahash {}, z {}, y {}, versioned hash {:x}",

0 commit comments

Comments
 (0)