Skip to content

Commit

Permalink
Grandpa justifications: Avoid duplicate vote ancestries (paritytech#2634
Browse files Browse the repository at this point in the history
) (paritytech#2635)

* Grandpa justifications: Avoid duplicate vote ancestries
  • Loading branch information
serban300 committed Apr 9, 2024
1 parent d5a4f5a commit 71bd634
Show file tree
Hide file tree
Showing 7 changed files with 114 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,13 @@ impl<'a, Header: HeaderT> EquivocationsCollector<'a, Header> {
}

impl<'a, Header: HeaderT> JustificationVerifier<Header> for EquivocationsCollector<'a, Header> {
fn process_duplicate_votes_ancestries(
&mut self,
_duplicate_votes_ancestries: Vec<usize>,
) -> Result<(), JustificationVerificationError> {
Ok(())
}

fn process_redundant_vote(
&mut self,
_precommit_idx: usize,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,13 @@ use finality_grandpa::voter_set::VoterSet;
use sp_consensus_grandpa::{AuthorityId, AuthoritySignature, SetId};
use sp_runtime::{traits::Header as HeaderT, RuntimeDebug};
use sp_std::{
collections::{btree_map::BTreeMap, btree_set::BTreeSet},
collections::{
btree_map::{
BTreeMap,
Entry::{Occupied, Vacant},
},
btree_set::BTreeSet,
},
prelude::*,
};

Expand All @@ -44,23 +50,40 @@ pub struct AncestryChain<Header: HeaderT> {
/// We expect all forks in the ancestry chain to be descendants of base.
base: HeaderId<Header::Hash, Header::Number>,
/// Header hash => parent header hash mapping.
pub parents: BTreeMap<Header::Hash, Header::Hash>,
parents: BTreeMap<Header::Hash, Header::Hash>,
/// Hashes of headers that were not visited by `ancestry()`.
pub unvisited: BTreeSet<Header::Hash>,
unvisited: BTreeSet<Header::Hash>,
}

impl<Header: HeaderT> AncestryChain<Header> {
/// Create new ancestry chain.
pub fn new(justification: &GrandpaJustification<Header>) -> AncestryChain<Header> {
/// Creates a new instance of `AncestryChain` starting from a `GrandpaJustification`.
///
/// Returns the `AncestryChain` and a `Vec` containing the `votes_ancestries` entries
/// that were ignored when creating it, because they are duplicates.
pub fn new(
justification: &GrandpaJustification<Header>,
) -> (AncestryChain<Header>, Vec<usize>) {
let mut parents = BTreeMap::new();
let mut unvisited = BTreeSet::new();
for ancestor in &justification.votes_ancestries {
let mut ignored_idxs = Vec::new();
for (idx, ancestor) in justification.votes_ancestries.iter().enumerate() {
let hash = ancestor.hash();
let parent_hash = *ancestor.parent_hash();
parents.insert(hash, parent_hash);
unvisited.insert(hash);
match parents.entry(hash) {
Occupied(_) => {
ignored_idxs.push(idx);
},
Vacant(entry) => {
entry.insert(*ancestor.parent_hash());
unvisited.insert(hash);
},
}
}
AncestryChain { base: justification.commit_target_id(), parents, unvisited }
(AncestryChain { base: justification.commit_target_id(), parents, unvisited }, ignored_idxs)
}

/// Returns the hash of a block's parent if the block is present in the ancestry.
pub fn parent_hash_of(&self, hash: &Header::Hash) -> Option<&Header::Hash> {
self.parents.get(hash)
}

/// Returns a route if the precommit target block is a descendant of the `base` block.
Expand All @@ -80,7 +103,7 @@ impl<Header: HeaderT> AncestryChain<Header> {
break
}

current_hash = match self.parents.get(&current_hash) {
current_hash = match self.parent_hash_of(&current_hash) {
Some(parent_hash) => {
let is_visited_before = self.unvisited.get(&current_hash).is_none();
if is_visited_before {
Expand Down Expand Up @@ -117,6 +140,8 @@ pub enum Error {
InvalidAuthorityList,
/// Justification is finalizing unexpected header.
InvalidJustificationTarget,
/// The justification contains duplicate headers in its `votes_ancestries` field.
DuplicateVotesAncestries,
/// Error validating a precommit
Precommit(PrecommitError),
/// The cumulative weight of all votes in the justification is not enough to justify commit
Expand Down Expand Up @@ -168,6 +193,12 @@ enum IterationFlow {

/// Verification callbacks.
trait JustificationVerifier<Header: HeaderT> {
/// Called when there are duplicate headers in the votes ancestries.
fn process_duplicate_votes_ancestries(
&mut self,
duplicate_votes_ancestries: Vec<usize>,
) -> Result<(), Error>;

fn process_redundant_vote(
&mut self,
precommit_idx: usize,
Expand Down Expand Up @@ -216,10 +247,14 @@ trait JustificationVerifier<Header: HeaderT> {
}

let threshold = context.voter_set.threshold().get();
let mut chain = AncestryChain::new(justification);
let (mut chain, ignored_idxs) = AncestryChain::new(justification);
let mut signature_buffer = Vec::new();
let mut cumulative_weight = 0u64;

if !ignored_idxs.is_empty() {
self.process_duplicate_votes_ancestries(ignored_idxs)?;
}

for (precommit_idx, signed) in justification.commit.precommits.iter().enumerate() {
if cumulative_weight >= threshold {
let action =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ struct JustificationOptimizer<Header: HeaderT> {
votes: BTreeSet<AuthorityId>,

extra_precommits: Vec<usize>,
duplicate_votes_ancestries_idxs: Vec<usize>,
redundant_votes_ancestries: BTreeSet<Header::Hash>,
}

Expand All @@ -41,6 +42,11 @@ impl<Header: HeaderT> JustificationOptimizer<Header> {
for invalid_precommit_idx in self.extra_precommits.into_iter().rev() {
justification.commit.precommits.remove(invalid_precommit_idx);
}
if !self.duplicate_votes_ancestries_idxs.is_empty() {
for idx in self.duplicate_votes_ancestries_idxs.iter().rev() {
justification.votes_ancestries.swap_remove(*idx);
}
}
if !self.redundant_votes_ancestries.is_empty() {
justification
.votes_ancestries
Expand All @@ -50,6 +56,14 @@ impl<Header: HeaderT> JustificationOptimizer<Header> {
}

impl<Header: HeaderT> JustificationVerifier<Header> for JustificationOptimizer<Header> {
fn process_duplicate_votes_ancestries(
&mut self,
duplicate_votes_ancestries: Vec<usize>,
) -> Result<(), Error> {
self.duplicate_votes_ancestries_idxs = duplicate_votes_ancestries.to_vec();
Ok(())
}

fn process_redundant_vote(
&mut self,
precommit_idx: usize,
Expand Down Expand Up @@ -118,6 +132,7 @@ pub fn verify_and_optimize_justification<Header: HeaderT>(
let mut optimizer = JustificationOptimizer {
votes: BTreeSet::new(),
extra_precommits: vec![],
duplicate_votes_ancestries_idxs: vec![],
redundant_votes_ancestries: Default::default(),
};
optimizer.verify_justification(finalized_target, context, justification)?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,21 @@ use crate::justification::verification::{
};
use sp_consensus_grandpa::AuthorityId;
use sp_runtime::traits::Header as HeaderT;
use sp_std::collections::btree_set::BTreeSet;
use sp_std::{collections::btree_set::BTreeSet, vec::Vec};

/// Verification callbacks that reject all unknown, duplicate or redundant votes.
struct StrictJustificationVerifier {
votes: BTreeSet<AuthorityId>,
}

impl<Header: HeaderT> JustificationVerifier<Header> for StrictJustificationVerifier {
fn process_duplicate_votes_ancestries(
&mut self,
_duplicate_votes_ancestries: Vec<usize>,
) -> Result<(), Error> {
Err(Error::DuplicateVotesAncestries)
}

fn process_redundant_vote(
&mut self,
_precommit_idx: usize,
Expand Down
4 changes: 2 additions & 2 deletions bridges/primitives/header-chain/tests/implementation_match.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ struct AncestryChain(bp_header_chain::justification::AncestryChain<TestHeader>);

impl AncestryChain {
fn new(justification: &GrandpaJustification<TestHeader>) -> Self {
Self(bp_header_chain::justification::AncestryChain::new(justification))
Self(bp_header_chain::justification::AncestryChain::new(justification).0)
}
}

Expand All @@ -58,7 +58,7 @@ impl finality_grandpa::Chain<TestHash, TestNumber> for AncestryChain {
if current_hash == base {
break
}
match self.0.parents.get(&current_hash) {
match self.0.parent_hash_of(&current_hash) {
Some(parent_hash) => {
current_hash = *parent_hash;
route.push(current_hash);
Expand Down
20 changes: 20 additions & 0 deletions bridges/primitives/header-chain/tests/justification/optimizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,26 @@ fn unrelated_ancestry_votes_are_removed_by_optimizer() {
assert_eq!(num_precommits_before - 1, num_precommits_after);
}

#[test]
fn duplicate_votes_ancestries_are_removed_by_optimizer() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
let optimized_votes_ancestries = justification.votes_ancestries.clone();
justification.votes_ancestries = justification
.votes_ancestries
.into_iter()
.flat_map(|item| std::iter::repeat(item).take(3))
.collect();

verify_and_optimize_justification::<TestHeader>(
header_id::<TestHeader>(1),
&verification_context(TEST_GRANDPA_SET_ID),
&mut justification,
)
.unwrap();

assert_eq!(justification.votes_ancestries, optimized_votes_ancestries);
}

#[test]
fn redundant_votes_ancestries_are_removed_by_optimizer() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
Expand Down
16 changes: 15 additions & 1 deletion bridges/primitives/header-chain/tests/justification/strict.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,21 @@ fn justification_with_invalid_authority_signature_rejected() {
}

#[test]
fn justification_with_invalid_precommit_ancestry() {
fn justification_with_duplicate_votes_ancestry() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
justification.votes_ancestries.push(justification.votes_ancestries[0].clone());

assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
&verification_context(TEST_GRANDPA_SET_ID),
&justification,
),
Err(JustificationVerificationError::DuplicateVotesAncestries),
);
}
#[test]
fn justification_with_redundant_votes_ancestry() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
justification.votes_ancestries.push(test_header(10));

Expand Down

0 comments on commit 71bd634

Please sign in to comment.