Skip to content

Commit

Permalink
feat(vm-runner): shadow protective reads using VM runner (#2017)
Browse files Browse the repository at this point in the history
## What ❔

Adds a new component `vm_runner_protective_reads` that computes
protective reads independently and asynchronously from state keeper.

For now, the component does not actually save anything; instead, it
computes protective reads and compares them against what state keeper
has already written to the DB. So, in short, this is just the first
stepping stone aka a sanity check that the VM runner mechanism works as
intended.

## Why ❔

In the future, we want to be able to save protective reads
asynchronously thus saving time on L1 batch sealing.

## Checklist

<!-- Check your PR fulfills the following items. -->
<!-- For draft PRs check the boxes as you complete them. -->

- [x] PR title corresponds to the body of PR (we generate changelog
entries from PRs).
- [ ] Tests for the changes have been added / updated.
- [x] Documentation comments have been added / updated.
- [x] Code has been formatted via `zk fmt` and `zk lint`.
- [x] Spellcheck has been run via `zk spellcheck`.
  • Loading branch information
itegulov authored Jun 4, 2024
1 parent af39ca3 commit 1402dd0
Show file tree
Hide file tree
Showing 38 changed files with 622 additions and 13 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/ci-core-reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ jobs:
# `sleep 60` because we need to wait until server added all the tokens
- name: Run server
run: |
ci_run zk server --uring --components api,tree,eth,state_keeper,housekeeper,commitment_generator &>server.log &
ci_run zk server --uring --components api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads &>server.log &
ci_run sleep 60
- name: Deploy legacy era contracts
Expand Down Expand Up @@ -134,7 +134,7 @@ jobs:
base_token: ["Eth", "Custom"]
deployment_mode: ["Rollup", "Validium"]
env:
SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator${{ matrix.consensus && ',consensus' || '' }}"
SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads${{ matrix.consensus && ',consensus' || '' }}"

runs-on: [matterlabs-ci-runner]
steps:
Expand Down Expand Up @@ -302,7 +302,7 @@ jobs:
runs-on: [matterlabs-ci-runner]

env:
SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator${{ matrix.consensus && ',consensus' || '' }}"
SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads${{ matrix.consensus && ',consensus' || '' }}"
EXT_NODE_FLAGS: "${{ matrix.consensus && '-- --enable-consensus' || '' }}"

steps:
Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion core/bin/zksync_server/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ use zksync_config::{
house_keeper::HouseKeeperConfig,
ContractsConfig, DatabaseSecrets, FriProofCompressorConfig, FriProverConfig,
FriProverGatewayConfig, FriWitnessGeneratorConfig, FriWitnessVectorGeneratorConfig,
L1Secrets, ObservabilityConfig, PrometheusConfig, ProofDataHandlerConfig, Secrets,
L1Secrets, ObservabilityConfig, PrometheusConfig, ProofDataHandlerConfig,
ProtectiveReadsWriterConfig, Secrets,
},
ApiConfig, ContractVerifierConfig, DBConfig, EthConfig, EthWatchConfig, GasAdjusterConfig,
GenesisConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig,
Expand Down Expand Up @@ -306,5 +307,6 @@ fn load_env_config() -> anyhow::Result<TempConfigStore> {
object_store_config: ObjectStoreConfig::from_env().ok(),
observability: ObservabilityConfig::from_env().ok(),
snapshot_creator: SnapshotsCreatorConfig::from_env().ok(),
protective_reads_writer_config: ProtectiveReadsWriterConfig::from_env().ok(),
})
}
15 changes: 15 additions & 0 deletions core/bin/zksync_server/src/node_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ use zksync_node_framework::{
StateKeeperLayer,
},
tee_verifier_input_producer::TeeVerifierInputProducerLayer,
vm_runner::protective_reads::ProtectiveReadsWriterLayer,
web3_api::{
caches::MempoolCacheLayer,
server::{Web3ServerLayer, Web3ServerOptionalConfig},
Expand Down Expand Up @@ -399,6 +400,17 @@ impl MainNodeBuilder {
Ok(self)
}

fn add_vm_runner_protective_reads_layer(mut self) -> anyhow::Result<Self> {
let protective_reads_writer_config =
try_load_config!(self.configs.protective_reads_writer_config);
self.node.add_layer(ProtectiveReadsWriterLayer::new(
protective_reads_writer_config,
self.genesis_config.l2_chain_id,
));

Ok(self)
}

pub fn build(mut self, mut components: Vec<Component>) -> anyhow::Result<ZkStackService> {
// Add "base" layers (resources and helper tasks).
self = self
Expand Down Expand Up @@ -480,6 +492,9 @@ impl MainNodeBuilder {
Component::CommitmentGenerator => {
self = self.add_commitment_generator_layer()?;
}
Component::VmRunnerProtectiveReads => {
self = self.add_vm_runner_protective_reads_layer()?;
}
}
}
Ok(self.node.build()?)
Expand Down
2 changes: 2 additions & 0 deletions core/lib/config/src/configs/general.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::{
chain::{CircuitBreakerConfig, MempoolConfig, OperationsManagerConfig, StateKeeperConfig},
fri_prover_group::FriProverGroupConfig,
house_keeper::HouseKeeperConfig,
vm_runner::ProtectiveReadsWriterConfig,
FriProofCompressorConfig, FriProverConfig, FriProverGatewayConfig,
FriWitnessGeneratorConfig, FriWitnessVectorGeneratorConfig, ObservabilityConfig,
PrometheusConfig, ProofDataHandlerConfig,
Expand Down Expand Up @@ -32,4 +33,5 @@ pub struct GeneralConfig {
pub eth: Option<EthConfig>,
pub snapshot_creator: Option<SnapshotsCreatorConfig>,
pub observability: Option<ObservabilityConfig>,
pub protective_reads_writer_config: Option<ProtectiveReadsWriterConfig>,
}
2 changes: 2 additions & 0 deletions core/lib/config/src/configs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ pub use self::{
secrets::{DatabaseSecrets, L1Secrets, Secrets},
snapshots_creator::SnapshotsCreatorConfig,
utils::PrometheusConfig,
vm_runner::ProtectiveReadsWriterConfig,
};

pub mod api;
Expand All @@ -46,6 +47,7 @@ pub mod proof_data_handler;
pub mod secrets;
pub mod snapshots_creator;
pub mod utils;
pub mod vm_runner;
pub mod wallets;

const BYTES_IN_MEGABYTE: usize = 1_024 * 1_024;
16 changes: 16 additions & 0 deletions core/lib/config/src/configs/vm_runner.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
use serde::Deserialize;

#[derive(Debug, Deserialize, Clone, PartialEq, Default)]
pub struct ProtectiveReadsWriterConfig {
/// Path to the RocksDB data directory that serves state cache.
#[serde(default = "ProtectiveReadsWriterConfig::default_protective_reads_db_path")]
pub protective_reads_db_path: String,
/// How many max batches should be processed at the same time.
pub protective_reads_window_size: u32,
}

impl ProtectiveReadsWriterConfig {
fn default_protective_reads_db_path() -> String {
"./db/protective_reads_writer".to_owned()
}
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DROP TABLE IF EXISTS vm_runner_protective_reads;
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
CREATE TABLE IF NOT EXISTS vm_runner_protective_reads
(
l1_batch_number BIGINT NOT NULL PRIMARY KEY,
created_at TIMESTAMP NOT NULL,
updated_at TIMESTAMP NOT NULL,
time_taken TIME
);
9 changes: 8 additions & 1 deletion core/lib/dal/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use crate::{
sync_dal::SyncDal, system_dal::SystemDal,
tee_verifier_input_producer_dal::TeeVerifierInputProducerDal, tokens_dal::TokensDal,
tokens_web3_dal::TokensWeb3Dal, transactions_dal::TransactionsDal,
transactions_web3_dal::TransactionsWeb3Dal,
transactions_web3_dal::TransactionsWeb3Dal, vm_runner_dal::VmRunnerDal,
};

pub mod blocks_dal;
Expand Down Expand Up @@ -55,6 +55,7 @@ pub mod tokens_dal;
pub mod tokens_web3_dal;
pub mod transactions_dal;
pub mod transactions_web3_dal;
pub mod vm_runner_dal;

#[cfg(test)]
mod tests;
Expand Down Expand Up @@ -119,6 +120,8 @@ where
fn snapshot_recovery_dal(&mut self) -> SnapshotRecoveryDal<'_, 'a>;

fn pruning_dal(&mut self) -> PruningDal<'_, 'a>;

fn vm_runner_dal(&mut self) -> VmRunnerDal<'_, 'a>;
}

#[derive(Clone, Debug)]
Expand Down Expand Up @@ -229,4 +232,8 @@ impl<'a> CoreDal<'a> for Connection<'a, Core> {
fn pruning_dal(&mut self) -> PruningDal<'_, 'a> {
PruningDal { storage: self }
}

fn vm_runner_dal(&mut self) -> VmRunnerDal<'_, 'a> {
VmRunnerDal { storage: self }
}
}
83 changes: 83 additions & 0 deletions core/lib/dal/src/vm_runner_dal.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt};
use zksync_types::L1BatchNumber;

use crate::Core;

#[derive(Debug)]
pub struct VmRunnerDal<'c, 'a> {
pub(crate) storage: &'c mut Connection<'a, Core>,
}

impl VmRunnerDal<'_, '_> {
pub async fn get_protective_reads_latest_processed_batch(
&mut self,
) -> DalResult<L1BatchNumber> {
let row = sqlx::query!(
r#"
SELECT
COALESCE(MAX(l1_batch_number), 0) AS "last_processed_l1_batch!"
FROM
vm_runner_protective_reads
"#
)
.instrument("get_protective_reads_latest_processed_batch")
.report_latency()
.fetch_one(self.storage)
.await?;
Ok(L1BatchNumber(row.last_processed_l1_batch as u32))
}

pub async fn get_protective_reads_last_ready_batch(
&mut self,
window_size: u32,
) -> DalResult<L1BatchNumber> {
let row = sqlx::query!(
r#"
WITH
available_batches AS (
SELECT
MAX(number) AS "last_batch"
FROM
l1_batches
),
processed_batches AS (
SELECT
COALESCE(MAX(l1_batch_number), 0) + $1 AS "last_ready_batch"
FROM
vm_runner_protective_reads
)
SELECT
LEAST(last_batch, last_ready_batch) AS "last_ready_batch!"
FROM
available_batches
FULL JOIN processed_batches ON TRUE
"#,
window_size as i32
)
.instrument("get_protective_reads_last_ready_batch")
.report_latency()
.fetch_one(self.storage)
.await?;
Ok(L1BatchNumber(row.last_ready_batch as u32))
}

pub async fn mark_protective_reads_batch_as_completed(
&mut self,
l1_batch_number: L1BatchNumber,
) -> DalResult<()> {
sqlx::query!(
r#"
INSERT INTO
vm_runner_protective_reads (l1_batch_number, created_at, updated_at)
VALUES
($1, NOW(), NOW())
"#,
i64::from(l1_batch_number.0),
)
.instrument("mark_protective_reads_batch_as_completed")
.report_latency()
.execute(self.storage)
.await?;
Ok(())
}
}
1 change: 1 addition & 0 deletions core/lib/env_config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ mod utils;
mod genesis;
#[cfg(test)]
mod test_utils;
mod vm_runner;
mod wallets;

pub trait FromEnv: Sized {
Expand Down
9 changes: 9 additions & 0 deletions core/lib/env_config/src/vm_runner.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
use zksync_config::configs::ProtectiveReadsWriterConfig;

use crate::{envy_load, FromEnv};

impl FromEnv for ProtectiveReadsWriterConfig {
fn from_env() -> anyhow::Result<Self> {
envy_load("vm_runner.protective_reads", "VM_RUNNER_PROTECTIVE_READS_")
}
}
6 changes: 6 additions & 0 deletions core/lib/protobuf_config/src/general.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ impl ProtoRepr for proto::GeneralConfig {
snapshot_creator: read_optional_repr(&self.snapshot_creator)
.context("snapshot_creator")?,
observability: read_optional_repr(&self.observability).context("observability")?,
protective_reads_writer_config: read_optional_repr(&self.protective_reads_writer)
.context("vm_runner")?,
})
}

Expand Down Expand Up @@ -68,6 +70,10 @@ impl ProtoRepr for proto::GeneralConfig {
eth: this.eth.as_ref().map(ProtoRepr::build),
snapshot_creator: this.snapshot_creator.as_ref().map(ProtoRepr::build),
observability: this.observability.as_ref().map(ProtoRepr::build),
protective_reads_writer: this
.protective_reads_writer_config
.as_ref()
.map(ProtoRepr::build),
}
}
}
1 change: 1 addition & 0 deletions core/lib/protobuf_config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ pub mod testonly;
#[cfg(test)]
mod tests;
mod utils;
mod vm_runner;
mod wallets;

use std::str::FromStr;
Expand Down
2 changes: 2 additions & 0 deletions core/lib/protobuf_config/src/proto/config/general.proto
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import "zksync/config/house_keeper.proto";
import "zksync/config/observability.proto";
import "zksync/config/snapshots_creator.proto";
import "zksync/config/utils.proto";
import "zksync/config/vm_runner.proto";

message GeneralConfig {
optional config.database.Postgres postgres = 1;
Expand All @@ -35,4 +36,5 @@ message GeneralConfig {
optional config.prover.ProverGateway prover_gateway = 30;
optional config.snapshot_creator.SnapshotsCreator snapshot_creator = 31;
optional config.observability.Observability observability = 32;
optional config.vm_runner.ProtectiveReadsWriter protective_reads_writer = 33;
}
8 changes: 8 additions & 0 deletions core/lib/protobuf_config/src/proto/config/vm_runner.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
syntax = "proto3";

package zksync.config.vm_runner;

message ProtectiveReadsWriter {
optional string protective_reads_db_path = 1; // required; fs path
optional uint64 protective_reads_window_size = 2; // required
}
Loading

0 comments on commit 1402dd0

Please sign in to comment.