Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Merged by Bors] - Added Merkle Proof Generation for Beacon State #3674

Closed
wants to merge 12 commits into from
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions consensus/types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ name = "benches"
harness = false

[dependencies]
merkle_proof = { path = "../../consensus/merkle_proof" }
bls = { path = "../../crypto/bls" }
compare_fields = { path = "../../common/compare_fields" }
compare_fields_derive = { path = "../../common/compare_fields_derive" }
Expand Down
61 changes: 61 additions & 0 deletions consensus/types/src/beacon_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ pub enum Error {
current_epoch: Epoch,
epoch: Epoch,
},
IndexNotSupported(usize),
MerkleTreeError(merkle_proof::MerkleTreeError),
}

/// Control whether an epoch-indexed field can be indexed at the next epoch or not.
Expand Down Expand Up @@ -1669,6 +1671,59 @@ impl<T: EthSpec> BeaconState<T> {
};
Ok(sync_committee)
}

pub fn compute_merkle_proof(
&mut self,
generalized_index: usize,
) -> Result<Vec<Hash256>, Error> {
// 1. Convert generalized index to field index.
let field_index = match generalized_index {
light_client_update::CURRENT_SYNC_COMMITTEE_INDEX
| light_client_update::NEXT_SYNC_COMMITTEE_INDEX => {
// Sync committees are top-level fields, subtract off the generalized indices
// for the internal nodes. Result should be 22 or 23, the field offset of the committee
// in the `BeaconState`:
// https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/beacon-chain.md#beaconstate
generalized_index.checked_sub(tree_hash_cache::NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES)
.ok_or(Error::IndexNotSupported(generalized_index))?
}
light_client_update::FINALIZED_ROOT_INDEX => {
// Finalized root is the right child of `finalized_checkpoint`, divide by two to get
// the generalized index of `state.finalized_checkpoint`.
let finalized_checkpoint_generalized_index = generalized_index / 2;
// Subtract off the internal nodes. Result should be 105/2 - 32 = 20 which matches
// position of `finalized_checkpoint` in `BeaconState`.
finalized_checkpoint_generalized_index.checked_sub(tree_hash_cache::NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES)
.ok_or(Error::IndexNotSupported(generalized_index))?
}
_ => return Err(Error::IndexNotSupported(generalized_index)),
};

// 2. Get all `BeaconState` leaves.
// The most efficient way to get these at the moment is from the tree hash cache.
// We can copy the guts of this function (or generalize it):
// https://github.com/sigp/lighthouse/blob/6d5a2b509fac7b6ffe693866f58ba49989f946d7/consensus/types/src/beacon_state/tree_hash_cache.rs#L213-L371
Giulio2002 marked this conversation as resolved.
Show resolved Hide resolved
let cache = self.tree_hash_cache_mut().take();
let leaves;
if let Some(mut cache) = cache {
leaves = cache.recalculate_tree_hash_leaves(self)?;
} else {
return Err(Error::TreeHashCacheNotInitialized);
}
Giulio2002 marked this conversation as resolved.
Show resolved Hide resolved

// 3. Make deposit tree.
// Use the depth of the `BeaconState` fields (i.e. `log2(32) = 5`).
let depth = light_client_update::CURRENT_SYNC_COMMITTEE_PROOF_LEN;
let tree = merkle_proof::MerkleTree::create(&leaves, depth);
let (_, mut proof) = tree.generate_proof(field_index, depth)?;

// 4. If we're proving the finalized root, patch in the finalized epoch to complete the proof.
if generalized_index == light_client_update::FINALIZED_ROOT_INDEX {
proof.insert(0, self.finalized_checkpoint().epoch.tree_hash_root());
}

Ok(proof)
}
}

impl From<RelativeEpochError> for Error {
Expand Down Expand Up @@ -1701,6 +1756,12 @@ impl From<tree_hash::Error> for Error {
}
}

impl From<merkle_proof::MerkleTreeError> for Error {
fn from(e: merkle_proof::MerkleTreeError) -> Error {
Error::MerkleTreeError(e)
}
}

impl From<ArithError> for Error {
fn from(e: ArithError) -> Error {
Error::ArithError(e)
Expand Down
194 changes: 78 additions & 116 deletions consensus/types/src/beacon_state/tree_hash_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use tree_hash::{mix_in_length, MerkleHasher, TreeHash};
///
/// This constant is set with the assumption that there are `> 16` and `<= 32` fields on the
/// `BeaconState`. **Tree hashing will fail if this value is set incorrectly.**
const NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES: usize = 32;
pub const NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES: usize = 32;

/// The number of nodes in the Merkle tree of a validator record.
const NODES_PER_VALIDATOR: usize = 15;
Expand Down Expand Up @@ -210,6 +210,80 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
}
}

pub fn recalculate_tree_hash_leaves(
&mut self,
state: &BeaconState<T>,
) -> Result<Vec<Hash256>, Error> {
let mut leaves = vec![
// Genesis data leaves.
state.genesis_time().tree_hash_root(),
state.genesis_validators_root().tree_hash_root(),
// Current fork data leaves.
state.slot().tree_hash_root(),
state.fork().tree_hash_root(),
state.latest_block_header().tree_hash_root(),
// Roots leaves.
state.block_roots()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.block_roots)?,
state.state_roots()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.state_roots)?,
state.historical_roots()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.historical_roots)?,
// Eth1 Data leaves.
state.eth1_data().tree_hash_root(),
self.eth1_data_votes.recalculate_tree_hash_root(state)?,
state.eth1_deposit_index().tree_hash_root(),
// Validator leaves.
self.validators.recalculate_tree_hash_root(state.validators())?,
state.balances().recalculate_tree_hash_root(&mut self.balances_arena, &mut self.balances)?,
state.randao_mixes().recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.randao_mixes)?,
state.slashings().recalculate_tree_hash_root(&mut self.slashings_arena, &mut self.slashings)?
];
// Participation
if let BeaconState::Base(state) = state {
leaves.push(state.previous_epoch_attestations.tree_hash_root());
leaves.push(state.current_epoch_attestations.tree_hash_root());
} else {
leaves.push(
self.previous_epoch_participation
.recalculate_tree_hash_root(&ParticipationList::new(
state.previous_epoch_participation()?,
))?,
);
leaves.push(
self.current_epoch_participation
.recalculate_tree_hash_root(&ParticipationList::new(
state.current_epoch_participation()?,
))?,
);
}
// Checkpoint leaves
leaves.push(state.justification_bits().tree_hash_root());
leaves.push(state.previous_justified_checkpoint().tree_hash_root());
leaves.push(state.current_justified_checkpoint().tree_hash_root());
leaves.push(state.finalized_checkpoint().tree_hash_root());
// Inactivity & light-client sync committees (Altair and later).
if let Ok(inactivity_scores) = state.inactivity_scores() {
leaves.push(
self.inactivity_scores
.recalculate_tree_hash_root(inactivity_scores)?,
);
}
if let Ok(current_sync_committee) = state.current_sync_committee() {
leaves.push(current_sync_committee.tree_hash_root());
}

if let Ok(next_sync_committee) = state.next_sync_committee() {
leaves.push(next_sync_committee.tree_hash_root());
}

// Execution payload (merge and later).
if let Ok(payload_header) = state.latest_execution_payload_header() {
leaves.push(payload_header.tree_hash_root());
}
Ok(leaves)
}

/// Updates the cache and returns the tree hash root for the given `state`.
///
/// The provided `state` should be a descendant of the last `state` given to this function, or
Expand Down Expand Up @@ -246,121 +320,9 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {

let mut hasher = MerkleHasher::with_leaves(NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES);

hasher.write(state.genesis_time().tree_hash_root().as_bytes())?;
hasher.write(state.genesis_validators_root().tree_hash_root().as_bytes())?;
hasher.write(state.slot().tree_hash_root().as_bytes())?;
hasher.write(state.fork().tree_hash_root().as_bytes())?;
hasher.write(state.latest_block_header().tree_hash_root().as_bytes())?;
hasher.write(
state
.block_roots()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.block_roots)?
.as_bytes(),
)?;
hasher.write(
state
.state_roots()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.state_roots)?
.as_bytes(),
)?;
hasher.write(
state
.historical_roots()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.historical_roots)?
.as_bytes(),
)?;
hasher.write(state.eth1_data().tree_hash_root().as_bytes())?;
hasher.write(
self.eth1_data_votes
.recalculate_tree_hash_root(state)?
.as_bytes(),
)?;
hasher.write(state.eth1_deposit_index().tree_hash_root().as_bytes())?;
hasher.write(
self.validators
.recalculate_tree_hash_root(state.validators())?
.as_bytes(),
)?;
hasher.write(
state
.balances()
.recalculate_tree_hash_root(&mut self.balances_arena, &mut self.balances)?
.as_bytes(),
)?;
hasher.write(
state
.randao_mixes()
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.randao_mixes)?
.as_bytes(),
)?;
hasher.write(
state
.slashings()
.recalculate_tree_hash_root(&mut self.slashings_arena, &mut self.slashings)?
.as_bytes(),
)?;

// Participation
if let BeaconState::Base(state) = state {
hasher.write(
state
.previous_epoch_attestations
.tree_hash_root()
.as_bytes(),
)?;
hasher.write(state.current_epoch_attestations.tree_hash_root().as_bytes())?;
} else {
hasher.write(
self.previous_epoch_participation
.recalculate_tree_hash_root(&ParticipationList::new(
state.previous_epoch_participation()?,
))?
.as_bytes(),
)?;
hasher.write(
self.current_epoch_participation
.recalculate_tree_hash_root(&ParticipationList::new(
state.current_epoch_participation()?,
))?
.as_bytes(),
)?;
}

hasher.write(state.justification_bits().tree_hash_root().as_bytes())?;
hasher.write(
state
.previous_justified_checkpoint()
.tree_hash_root()
.as_bytes(),
)?;
hasher.write(
state
.current_justified_checkpoint()
.tree_hash_root()
.as_bytes(),
)?;
hasher.write(state.finalized_checkpoint().tree_hash_root().as_bytes())?;

// Inactivity & light-client sync committees (Altair and later).
if let Ok(inactivity_scores) = state.inactivity_scores() {
hasher.write(
self.inactivity_scores
.recalculate_tree_hash_root(inactivity_scores)?
.as_bytes(),
)?;
}

if let Ok(current_sync_committee) = state.current_sync_committee() {
hasher.write(current_sync_committee.tree_hash_root().as_bytes())?;
}

if let Ok(next_sync_committee) = state.next_sync_committee() {
hasher.write(next_sync_committee.tree_hash_root().as_bytes())?;
}

// Execution payload (merge and later).
if let Ok(payload_header) = state.latest_execution_payload_header() {
hasher.write(payload_header.tree_hash_root().as_bytes())?;
let leaves = self.recalculate_tree_hash_leaves(state)?;
for leaf in leaves {
hasher.write(leaf.as_bytes())?;
}

let root = hasher.finish()?;
Expand Down
10 changes: 4 additions & 6 deletions consensus/types/src/light_client_bootstrap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,15 @@ pub struct LightClientBootstrap<T: EthSpec> {
}

impl<T: EthSpec> LightClientBootstrap<T> {
pub fn from_beacon_state(beacon_state: BeaconState<T>) -> Result<Self, Error> {
pub fn from_beacon_state(mut beacon_state: BeaconState<T>) -> Result<Self, Error> {
Giulio2002 marked this conversation as resolved.
Show resolved Hide resolved
let mut header = beacon_state.latest_block_header().clone();
header.state_root = beacon_state.tree_hash_root();
let current_sync_committee_branch =
beacon_state.compute_merkle_proof(CURRENT_SYNC_COMMITTEE_INDEX)?;
Ok(LightClientBootstrap {
header,
current_sync_committee: beacon_state.current_sync_committee()?.clone(),
/// TODO(Giulio2002): Generate Merkle Proof, this is just empty hashes
current_sync_committee_branch: FixedVector::new(vec![
Hash256::zero();
CURRENT_SYNC_COMMITTEE_PROOF_LEN
])?,
current_sync_committee_branch: FixedVector::new(current_sync_committee_branch)?,
})
}
}
Expand Down
7 changes: 4 additions & 3 deletions consensus/types/src/light_client_finality_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ impl<T: EthSpec> LightClientFinalityUpdate<T> {
chain_spec: ChainSpec,
beacon_state: BeaconState<T>,
block: BeaconBlock<T>,
attested_state: BeaconState<T>,
mut attested_state: BeaconState<T>,
Giulio2002 marked this conversation as resolved.
Show resolved Hide resolved
finalized_block: BeaconBlock<T>,
) -> Result<Self, Error> {
let altair_fork_epoch = chain_spec
Expand Down Expand Up @@ -60,11 +60,12 @@ impl<T: EthSpec> LightClientFinalityUpdate<T> {
if finalized_header.tree_hash_root() != beacon_state.finalized_checkpoint().root {
return Err(Error::InvalidFinalizedBlock);
}
// TODO(Giulio2002): compute proper merkle proofs.

let finality_branch = attested_state.compute_merkle_proof(FINALIZED_ROOT_INDEX)?;
Ok(Self {
attested_header: attested_header,
finalized_header: finalized_header,
finality_branch: FixedVector::new(vec![Hash256::zero(); FINALIZED_ROOT_PROOF_LEN])?,
finality_branch: FixedVector::new(finality_branch)?,
sync_aggregate: sync_aggregate.clone(),
signature_slot: block.slot(),
})
Expand Down
13 changes: 6 additions & 7 deletions consensus/types/src/light_client_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl<T: EthSpec> LightClientUpdate<T> {
chain_spec: ChainSpec,
beacon_state: BeaconState<T>,
block: BeaconBlock<T>,
attested_state: BeaconState<T>,
mut attested_state: BeaconState<T>,
Giulio2002 marked this conversation as resolved.
Show resolved Hide resolved
finalized_block: BeaconBlock<T>,
) -> Result<Self, Error> {
let altair_fork_epoch = chain_spec
Expand Down Expand Up @@ -114,16 +114,15 @@ impl<T: EthSpec> LightClientUpdate<T> {
if finalized_header.tree_hash_root() != beacon_state.finalized_checkpoint().root {
return Err(Error::InvalidFinalizedBlock);
}
// TODO(Giulio2002): compute proper merkle proofs.
let next_sync_committee_branch =
attested_state.compute_merkle_proof(NEXT_SYNC_COMMITTEE_INDEX)?;
let finality_branch = attested_state.compute_merkle_proof(FINALIZED_ROOT_INDEX)?;
Ok(Self {
attested_header,
next_sync_committee: attested_state.next_sync_committee()?.clone(),
next_sync_committee_branch: FixedVector::new(vec![
Hash256::zero();
NEXT_SYNC_COMMITTEE_PROOF_LEN
])?,
next_sync_committee_branch: FixedVector::new(next_sync_committee_branch)?,
finalized_header,
finality_branch: FixedVector::new(vec![Hash256::zero(); FINALIZED_ROOT_PROOF_LEN])?,
finality_branch: FixedVector::new(finality_branch)?,
sync_aggregate: sync_aggregate.clone(),
signature_slot: block.slot(),
})
Expand Down
2 changes: 2 additions & 0 deletions testing/ef_tests/src/cases.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ mod fork;
mod fork_choice;
mod genesis_initialization;
mod genesis_validity;
mod merkle_proof_validity;
mod operations;
mod rewards;
mod sanity_blocks;
Expand All @@ -41,6 +42,7 @@ pub use epoch_processing::*;
pub use fork::ForkTest;
pub use genesis_initialization::*;
pub use genesis_validity::*;
pub use merkle_proof_validity::*;
pub use operations::*;
pub use rewards::RewardsTest;
pub use sanity_blocks::*;
Expand Down
Loading