Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(Validium): refactor batch commit data generation #1325

Merged
8 changes: 4 additions & 4 deletions core/bin/external_node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ use zksync_core::{
commitment_generator::CommitmentGenerator,
consensus,
consistency_checker::ConsistencyChecker,
eth_sender::l1_batch_commit_data_generator::{
L1BatchCommitDataGenerator, RollupModeL1BatchCommitDataGenerator,
ValidiumModeL1BatchCommitDataGenerator,
},
l1_gas_price::MainNodeFeeParamsFetcher,
metadata_calculator::{MetadataCalculator, MetadataCalculatorConfig},
reorg_detector::ReorgDetector,
Expand All @@ -39,10 +43,6 @@ use zksync_eth_client::clients::QueryClient;
use zksync_health_check::{AppHealthCheck, HealthStatus, ReactiveHealthCheck};
use zksync_state::PostgresStorageCaches;
use zksync_storage::RocksDB;
use zksync_types::l1_batch_commit_data_generator::{
L1BatchCommitDataGenerator, RollupModeL1BatchCommitDataGenerator,
ValidiumModeL1BatchCommitDataGenerator,
};
use zksync_utils::wait_for_tasks::wait_for_tasks;
use zksync_web3_decl::jsonrpsee::http_client::HttpClient;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,33 +1,44 @@
use std::sync::Arc;

use zksync_types::{
commitment::L1BatchWithMetadata, ethabi::Token,
l1_batch_commit_data_generator::L1BatchCommitDataGenerator,
};
use zksync_types::{commitment::L1BatchWithMetadata, ethabi::Token};

use crate::{
i_executor::structures::{CommitBatchInfo, StoredBatchInfo},
i_executor::structures::{CommitBatchInfoRollup, CommitBatchInfoValidium, StoredBatchInfo},
Tokenizable, Tokenize,
};

/// Input required to encode `commitBatches` call.
/// Input required to encode `commitBatches` call for a contract running in rollup mode.
#[derive(Debug, Clone)]
pub struct CommitBatchesRollup {
pub last_committed_l1_batch: L1BatchWithMetadata,
pub l1_batches: Vec<L1BatchWithMetadata>,
}

impl Tokenize for CommitBatchesRollup {
fn into_tokens(self) -> Vec<Token> {
let stored_batch_info = StoredBatchInfo(&self.last_committed_l1_batch).into_token();
let l1_batches_to_commit = self
.l1_batches
.iter()
.map(|batch| CommitBatchInfoRollup::new(batch).into_token())
.collect();

vec![stored_batch_info, Token::Array(l1_batches_to_commit)]
}
}

/// Input required to encode `commitBatches` call for a contract running in validium mode.
#[derive(Debug, Clone)]
pub struct CommitBatches {
pub struct CommitBatchesValidium {
pub last_committed_l1_batch: L1BatchWithMetadata,
pub l1_batches: Vec<L1BatchWithMetadata>,
pub l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
}

impl Tokenize for CommitBatches {
impl Tokenize for CommitBatchesValidium {
fn into_tokens(self) -> Vec<Token> {
let stored_batch_info = StoredBatchInfo(&self.last_committed_l1_batch).into_token();
let l1_batches_to_commit = self
.l1_batches
.iter()
.map(|batch| {
CommitBatchInfo::new(batch, self.l1_batch_commit_data_generator.clone())
.into_token()
})
.map(|batch| CommitBatchInfoValidium::new(batch).into_token())
.collect();

vec![stored_batch_info, Token::Array(l1_batches_to_commit)]
Expand Down
4 changes: 3 additions & 1 deletion core/lib/l1_contract_interface/src/i_executor/methods/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
//! Utilities for encoding input data for methods defined in `IExecutor.sol`.

pub use self::{
commit_batches::CommitBatches, execute_batches::ExecuteBatches, prove_batches::ProveBatches,
commit_batches::{CommitBatchesRollup, CommitBatchesValidium},
execute_batches::ExecuteBatches,
prove_batches::ProveBatches,
};

mod commit_batches;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,35 +1,77 @@
use std::sync::Arc;

use zksync_types::{
commitment::{pre_boojum_serialize_commitments, L1BatchWithMetadata},
block::L1BatchHeader,
commitment::{
pre_boojum_serialize_commitments, serialize_commitments, L1BatchMetadata,
L1BatchWithMetadata,
},
ethabi::Token,
l1_batch_commit_data_generator::L1BatchCommitDataGenerator,
web3::{contract::Error as Web3ContractError, error::Error as Web3ApiError},
U256,
};

use crate::Tokenizable;

/// Encoding for `CommitBatchInfo` from `IExecutor.sol`
/// Encoding for `CommitBatchInfo` from `IExecutor.sol` for a contract running in rollup mode.
#[derive(Debug)]
pub struct CommitBatchInfoRollup<'a> {
pub l1_batch_with_metadata: &'a L1BatchWithMetadata,
}

impl<'a> CommitBatchInfoRollup<'a> {
pub fn new(l1_batch_with_metadata: &'a L1BatchWithMetadata) -> Self {
Self {
l1_batch_with_metadata,
}
}
}

impl<'a> Tokenizable for CommitBatchInfoRollup<'a> {
fn from_token(_token: Token) -> Result<Self, Web3ContractError>
where
Self: Sized,
{
// Currently there is no need to decode this struct.
// We still want to implement `Tokenizable` trait for it, so that *once* it's needed
// the implementation is provided here and not in some other inconsistent way.
Err(Web3ContractError::Api(Web3ApiError::Decoder(
"Not implemented".to_string(),
)))
}

fn into_token(self) -> Token {
if self
.l1_batch_with_metadata
.header
.protocol_version
.unwrap()
.is_pre_boojum()
{
pre_boojum_into_token(self.l1_batch_with_metadata)
} else {
Token::Tuple(encode_l1_commit(
&self.l1_batch_with_metadata.header,
&self.l1_batch_with_metadata.metadata,
Some(&self.l1_batch_with_metadata),
))
}
}
}

/// Encoding for `CommitBatchInfo` from `IExecutor.sol` for a contract running in validium mode.
#[derive(Debug)]
pub struct CommitBatchInfo<'a> {
pub struct CommitBatchInfoValidium<'a> {
pub l1_batch_with_metadata: &'a L1BatchWithMetadata,
pub l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
}

impl<'a> CommitBatchInfo<'a> {
pub fn new(
l1_batch_with_metadata: &'a L1BatchWithMetadata,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> Self {
impl<'a> CommitBatchInfoValidium<'a> {
pub fn new(l1_batch_with_metadata: &'a L1BatchWithMetadata) -> Self {
Self {
l1_batch_with_metadata,
l1_batch_commit_data_generator,
}
}
}

impl<'a> Tokenizable for CommitBatchInfo<'a> {
impl<'a> Tokenizable for CommitBatchInfoValidium<'a> {
fn from_token(_token: Token) -> Result<Self, Web3ContractError>
where
Self: Sized,
Expand All @@ -52,8 +94,11 @@ impl<'a> Tokenizable for CommitBatchInfo<'a> {
{
pre_boojum_into_token(self.l1_batch_with_metadata)
} else {
self.l1_batch_commit_data_generator
.l1_commit_data(self.l1_batch_with_metadata)
Token::Tuple(encode_l1_commit(
&self.l1_batch_with_metadata.header,
&self.l1_batch_with_metadata.metadata,
None,
))
}
}
}
Expand Down Expand Up @@ -88,3 +133,48 @@ fn pre_boojum_into_token(l1_batch_commit_with_metadata: &L1BatchWithMetadata) ->
),
])
}

fn encode_l1_commit(
header: &L1BatchHeader,
metadata: &L1BatchMetadata,
pubdata_input: Option<&L1BatchWithMetadata>,
) -> Vec<Token> {
let commit_data = vec![
// `batchNumber`
Token::Uint(U256::from(header.number.0)),
// `timestamp`
Token::Uint(U256::from(header.timestamp)),
// `indexRepeatedStorageChanges`
Token::Uint(U256::from(metadata.rollup_last_leaf_index)),
// `newStateRoot`
Token::FixedBytes(metadata.merkle_root_hash.as_bytes().to_vec()),
// `numberOfLayer1Txs`
Token::Uint(U256::from(header.l1_tx_count)),
// `priorityOperationsHash`
Token::FixedBytes(header.priority_ops_onchain_data_hash().as_bytes().to_vec()),
// `bootloaderHeapInitialContentsHash`
Token::FixedBytes(
metadata
.bootloader_initial_content_commitment
.unwrap()
.as_bytes()
.to_vec(),
),
// `eventsQueueStateHash`
Token::FixedBytes(
metadata
.events_queue_commitment
.unwrap()
.as_bytes()
.to_vec(),
),
// `systemLogs`
Token::Bytes(serialize_commitments(&header.system_logs)),
Token::Bytes(
pubdata_input
.map(L1BatchWithMetadata::construct_pubdata)
.unwrap_or_default(),
),
];
commit_data
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@
mod commit_batch_info;
mod stored_batch_info;

pub use self::{commit_batch_info::CommitBatchInfo, stored_batch_info::StoredBatchInfo};
pub use self::{
commit_batch_info::{CommitBatchInfoRollup, CommitBatchInfoValidium},
stored_batch_info::StoredBatchInfo,
};
76 changes: 0 additions & 76 deletions core/lib/types/src/l1_batch_commit_data_generator.rs

This file was deleted.

2 changes: 0 additions & 2 deletions core/lib/types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,6 @@ pub mod transaction_request;
pub mod utils;
pub mod vm_version;

pub mod l1_batch_commit_data_generator;

/// Denotes the first byte of the special zkSync's EIP-712-signed transaction.
pub const EIP_712_TX_TYPE: u8 = 0x71;

Expand Down
9 changes: 3 additions & 6 deletions core/lib/zksync_core/src/consistency_checker/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,10 @@ use zksync_contracts::PRE_BOOJUM_COMMIT_FUNCTION;
use zksync_dal::{ConnectionPool, StorageProcessor};
use zksync_eth_client::{Error as L1ClientError, EthInterface};
use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck};
use zksync_l1_contract_interface::{i_executor::structures::CommitBatchInfo, Tokenizable};
use zksync_types::{
l1_batch_commit_data_generator::L1BatchCommitDataGenerator, web3::ethabi, L1BatchNumber, H256,
};
use zksync_types::{web3::ethabi, L1BatchNumber, H256};

use crate::{
eth_sender::l1_batch_commit_data_generator::L1BatchCommitDataGenerator,
metrics::{CheckerComponent, EN_METRICS},
utils::wait_for_l1_batch_with_metadata,
};
Expand Down Expand Up @@ -183,8 +181,7 @@ impl LocalL1BatchCommitData {

Ok(Some(Self {
is_pre_boojum,
l1_commit_data: CommitBatchInfo::new(&l1_batch, l1_batch_commit_data_generator)
.into_token(),
l1_commit_data: l1_batch_commit_data_generator.l1_commit_batch(&l1_batch),
commit_tx_hash,
}))
}
Expand Down
Loading