Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Malus: add disputed block percentage #6100

Merged
merged 52 commits into from
Oct 13, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
ed18805
Malus: add disputed block percentage
bredamatt Oct 4, 2022
1eb3844
Cleanup tests
bredamatt Oct 4, 2022
f46e97c
* Replace unwrap with expect and meaningful error message
bredamatt Oct 4, 2022
99bc912
* Remove Inner
bredamatt Oct 4, 2022
a259272
* Rename sampled variable
bredamatt Oct 6, 2022
f371e5e
* Add percentage option to dispute_ancestor
bredamatt Oct 6, 2022
f3dca67
* Support static probability for `ReplaceValidationResult` proxy
bredamatt Oct 7, 2022
d08d405
* Add `--percentage` to `back-garbage-candidate` variant
bredamatt Oct 7, 2022
e9a1b3e
* Add probabilistic behavior to `dispute-ancestor` variant
bredamatt Oct 7, 2022
4fff00b
* More descriptive comments
bredamatt Oct 7, 2022
bfe0aa7
* cargo +nightly fmt --all
bredamatt Oct 7, 2022
4704e98
* Move Bernoulli distributrion to ReplaceValidationResult constructor
bredamatt Oct 7, 2022
0ec9bc1
* Remove dangling comment
bredamatt Oct 7, 2022
563fa17
* Consistent log
bredamatt Oct 7, 2022
f789958
* Add logs based on sampled value
bredamatt Oct 7, 2022
decb9e2
* Cargo +nightly fmt --all
bredamatt Oct 7, 2022
3ad5cec
* Remove unused percentage attributed after moving Bernoulli to const…
bredamatt Oct 10, 2022
5001fa5
Squashed commit of the following:
bredamatt Oct 10, 2022
b6e446e
Revert "Squashed commit of the following:"
bredamatt Oct 10, 2022
0a4d2b9
Companion for BEEFY: Simplify hashing for pallet-beefy-mmr (#6098)
serban300 Oct 4, 2022
dffb976
Keep sessions in window for the full unfinalized chain (#6054)
sandreim Oct 4, 2022
89d1f31
Bump lru from 0.7.8 to 0.8.0 (#6060)
dependabot[bot] Oct 4, 2022
b012597
Batch vote import in dispute-distribution (#5894)
eskimor Oct 4, 2022
2fd74d9
Add unknown words (#6105)
eskimor Oct 4, 2022
7e89bab
Buffered connection management for collator-protocol (#6022)
slumber Oct 5, 2022
c169279
Properly migrate weights to v2 (#6091)
KiChjang Oct 5, 2022
233c158
Pass through `runtime-benchmark` feature (#6110)
athei Oct 5, 2022
1655839
Companion for #11649: Bound uses of `Call` (#5729)
gavofyork Oct 5, 2022
e2a977a
update kvdb & co (#6111)
ordian Oct 5, 2022
4a6cf48
Skip `unexpected metric type`
bkontur Oct 6, 2022
78b8294
service: use MmrRootProvider as custom BEEFY payload provider (compan…
acatangiu Oct 6, 2022
82f7ad5
Maximum value for `MultiplierUpdate` (#6021)
Szegoo Oct 6, 2022
978d87f
Companion for upgrading pin-project (#6118)
bkchr Oct 7, 2022
0717246
Companion for 12109 (#5929)
Lezek123 Oct 9, 2022
1e96dfd
Add event to asset claim (#6029)
girazoki Oct 10, 2022
857e635
Fix flaky test (#6131)
slumber Oct 10, 2022
96185c3
ci/guide: install mdbook-graphviz (#6119)
ordian Oct 10, 2022
612302c
Revert "Squashed commit of the following:"
bredamatt Oct 10, 2022
35af7ad
Merge branch 'master' of github.com:paritytech/polkadot into bredamat…
bredamatt Oct 10, 2022
cf3f453
* Remove unused imports
bredamatt Oct 10, 2022
f1440e7
* cargo +nightly fmt --all
bredamatt Oct 10, 2022
6f54587
Make tweaks based on PR comments
bredamatt Oct 11, 2022
9842ee9
unit test related to gum formatting
bredamatt Oct 11, 2022
56347c6
cargo +nightly fmt --all
bredamatt Oct 11, 2022
4a7dd91
Merge branch 'master' of github.com:paritytech/polkadot into bredamat…
bredamatt Oct 12, 2022
72591a9
Resolve merge conflicts
bredamatt Oct 12, 2022
23d9498
cargo +nightly fmt --all
bredamatt Oct 12, 2022
be1f5a9
Fix tests so they use cli rather than cmd
bredamatt Oct 12, 2022
cdc68c4
CI unused import check fix
bredamatt Oct 12, 2022
dd96c47
Move info! log to startup
bredamatt Oct 12, 2022
5c20d8c
make info log more comprehensible
bredamatt Oct 12, 2022
7529e94
Merge branch 'master' of github.com:paritytech/polkadot into bredamat…
bredamatt Oct 13, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Revert "Squashed commit of the following:"
This reverts commit 5001fa5.
  • Loading branch information
bredamatt committed Oct 10, 2022
commit b6e446ed11da95bd0eb0c2d1b7311002ceb1b6f3
496 changes: 255 additions & 241 deletions Cargo.lock

Large diffs are not rendered by default.

9 changes: 4 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ readme = "README.md"
[dependencies]
polkadot-cli = { path = "cli", features = [ "kusama-native", "westend-native", "rococo-native" ] }
color-eyre = { version = "0.6.1", default-features = false }
parity-util-mem = { version = "0.12.0", default-features = false, features = ["jemalloc-global"] }
parity-util-mem = { version = "0.11.0", default-features = false, features = ["jemalloc-global"] }

[dev-dependencies]
assert_cmd = "2.0.4"
Expand Down Expand Up @@ -125,9 +125,9 @@ maintenance = { status = "actively-developed" }
#
# This list is ordered alphabetically.
[profile.dev.package]
blake2b_simd = { opt-level = 3 }
blake2 = { opt-level = 3 }
blake2-rfc = { opt-level = 3 }
blake2b_simd = { opt-level = 3 }
chacha20poly1305 = { opt-level = 3 }
cranelift-codegen = { opt-level = 3 }
cranelift-wasm = { opt-level = 3 }
Expand All @@ -138,8 +138,8 @@ curve25519-dalek = { opt-level = 3 }
ed25519-dalek = { opt-level = 3 }
flate2 = { opt-level = 3 }
futures-channel = { opt-level = 3 }
hash-db = { opt-level = 3 }
hashbrown = { opt-level = 3 }
hash-db = { opt-level = 3 }
hmac = { opt-level = 3 }
httparse = { opt-level = 3 }
integer-sqrt = { opt-level = 3 }
Expand All @@ -151,8 +151,8 @@ libz-sys = { opt-level = 3 }
mio = { opt-level = 3 }
nalgebra = { opt-level = 3 }
num-bigint = { opt-level = 3 }
parking_lot = { opt-level = 3 }
parking_lot_core = { opt-level = 3 }
parking_lot = { opt-level = 3 }
percent-encoding = { opt-level = 3 }
primitive-types = { opt-level = 3 }
reed-solomon-novelpoly = { opt-level = 3 }
Expand All @@ -162,7 +162,6 @@ sha2 = { opt-level = 3 }
sha3 = { opt-level = 3 }
smallvec = { opt-level = 3 }
snow = { opt-level = 3 }
substrate-bip39 = {opt-level = 3}
twox-hash = { opt-level = 3 }
uint = { opt-level = 3 }
wasmi = { opt-level = 3 }
Expand Down
6 changes: 1 addition & 5 deletions cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,7 @@ cli = [
"polkadot-client",
"polkadot-node-core-pvf",
]
runtime-benchmarks = [
"service/runtime-benchmarks",
"polkadot-node-metrics/runtime-benchmarks",
"polkadot-performance-test?/runtime-benchmarks"
]
runtime-benchmarks = ["service/runtime-benchmarks", "polkadot-node-metrics/runtime-benchmarks"]
trie-memory-tracker = ["sp-trie/memory-tracker"]
full-node = ["service/full-node"]
try-runtime = ["service/try-runtime"]
Expand Down
2 changes: 1 addition & 1 deletion core-primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ sp-std = { git = "https://github.com/paritytech/substrate", branch = "master", d
sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "master", default-features = false }
scale-info = { version = "2.1.2", default-features = false, features = ["derive"] }
parity-scale-codec = { version = "3.1.5", default-features = false, features = [ "derive" ] }
parity-util-mem = { version = "0.12.0", default-features = false, optional = true }
parity-util-mem = { version = "0.11.0", default-features = false, optional = true }

[features]
default = [ "std" ]
Expand Down
6 changes: 3 additions & 3 deletions node/core/approval-voting/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ futures-timer = "3.0.2"
parity-scale-codec = { version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] }
gum = { package = "tracing-gum", path = "../../gum" }
bitvec = { version = "1.0.0", default-features = false, features = ["alloc"] }
lru = "0.8"
lru = "0.7"
merlin = "2.0"
schnorrkel = "0.9.1"
kvdb = "0.12.0"
kvdb = "0.11.0"
derive_more = "0.99.17"
thiserror = "1.0.31"

Expand All @@ -40,5 +40,5 @@ sp-core = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-consensus-babe = { git = "https://github.com/paritytech/substrate", branch = "master" }
polkadot-node-subsystem-test-helpers = { path = "../../subsystem-test-helpers" }
assert_matches = "1.4.0"
kvdb-memorydb = "0.12.0"
kvdb-memorydb = "0.11.0"
test-helpers = { package = "polkadot-primitives-test-helpers", path = "../../../primitives/test-helpers" }
32 changes: 0 additions & 32 deletions node/core/approval-voting/src/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1296,38 +1296,6 @@ pub(crate) mod tests {
}
);

// Caching of sesssions needs sessoion of first unfinalied block.
assert_matches!(
handle.recv().await,
AllMessages::ChainApi(ChainApiMessage::FinalizedBlockNumber(
s_tx,
)) => {
let _ = s_tx.send(Ok(header.number));
}
);

assert_matches!(
handle.recv().await,
AllMessages::ChainApi(ChainApiMessage::FinalizedBlockHash(
block_number,
s_tx,
)) => {
assert_eq!(block_number, header.number);
let _ = s_tx.send(Ok(Some(header.hash())));
}
);

assert_matches!(
handle.recv().await,
AllMessages::RuntimeApi(RuntimeApiMessage::Request(
h,
RuntimeApiRequest::SessionIndexForChild(s_tx),
)) => {
assert_eq!(h, header.hash());
let _ = s_tx.send(Ok(session));
}
);

// determine_new_blocks exits early as the parent_hash is in the DB

assert_matches!(
Expand Down
7 changes: 1 addition & 6 deletions node/core/approval-voting/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ use std::{
collections::{
btree_map::Entry as BTMEntry, hash_map::Entry as HMEntry, BTreeMap, HashMap, HashSet,
},
num::NonZeroUsize,
sync::Arc,
time::Duration,
};
Expand Down Expand Up @@ -105,11 +104,7 @@ const APPROVAL_CHECKING_TIMEOUT: Duration = Duration::from_secs(120);
/// Value rather arbitrarily: Should not be hit in practice, it exists to more easily diagnose dead
/// lock issues for example.
const WAIT_FOR_SIGS_TIMEOUT: Duration = Duration::from_millis(500);
const APPROVAL_CACHE_SIZE: NonZeroUsize = match NonZeroUsize::new(1024) {
Some(cap) => cap,
None => panic!("Approval cache size must be non-zero."),
};

const APPROVAL_CACHE_SIZE: usize = 1024;
const TICK_TOO_FAR_IN_FUTURE: Tick = 20; // 10 seconds.
const APPROVAL_DELAY: Tick = 2;
const LOG_TARGET: &str = "parachain::approval-voting";
Expand Down
31 changes: 0 additions & 31 deletions node/core/approval-voting/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -807,37 +807,6 @@ async fn import_block(
}
);

assert_matches!(
overseer_recv(overseer).await,
AllMessages::ChainApi(ChainApiMessage::FinalizedBlockNumber(
s_tx,
)) => {
let _ = s_tx.send(Ok(number));
}
);

assert_matches!(
overseer_recv(overseer).await,
AllMessages::ChainApi(ChainApiMessage::FinalizedBlockHash(
block_number,
s_tx,
)) => {
assert_eq!(block_number, number);
let _ = s_tx.send(Ok(Some(hashes[number as usize].0)));
}
);

assert_matches!(
overseer_recv(overseer).await,
AllMessages::RuntimeApi(RuntimeApiMessage::Request(
h,
RuntimeApiRequest::SessionIndexForChild(s_tx),
)) => {
assert_eq!(h, hashes[number as usize].0);
let _ = s_tx.send(Ok(number.into()));
}
);

if !fork {
assert_matches!(
overseer_recv(overseer).await,
Expand Down
4 changes: 2 additions & 2 deletions node/core/av-store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ edition = "2021"
[dependencies]
futures = "0.3.21"
futures-timer = "3.0.2"
kvdb = "0.12.0"
kvdb = "0.11.0"
thiserror = "1.0.31"
gum = { package = "tracing-gum", path = "../../gum" }
bitvec = "1.0.0"
Expand All @@ -24,7 +24,7 @@ polkadot-node-primitives = { path = "../../primitives" }
log = "0.4.17"
env_logger = "0.9.0"
assert_matches = "1.4.0"
kvdb-memorydb = "0.12.0"
kvdb-memorydb = "0.11.0"

sp-core = { git = "https://github.com/paritytech/substrate", branch = "master" }
polkadot-node-subsystem-util = { path = "../../subsystem-util" }
Expand Down
28 changes: 13 additions & 15 deletions node/core/av-store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -792,9 +792,8 @@ fn note_block_included(
macro_rules! peek_num {
($iter:ident) => {
match $iter.peek() {
Some(Ok((k, _))) => Ok(decode_unfinalized_key(&k[..]).ok().map(|(b, _, _)| b)),
Some(Err(_)) => Err($iter.next().expect("peek returned Some(Err); qed").unwrap_err()),
None => Ok(None),
Some((k, _)) => decode_unfinalized_key(&k[..]).ok().map(|(b, _, _)| b),
None => None,
}
};
}
Expand All @@ -820,10 +819,10 @@ async fn process_block_finalized<Context>(
let mut iter = subsystem
.db
.iter_with_prefix(subsystem.config.col_meta, &start_prefix)
.take_while(|r| r.as_ref().map_or(true, |(k, _v)| &k[..] < &end_prefix[..]))
.take_while(|(k, _)| &k[..] < &end_prefix[..])
.peekable();

match peek_num!(iter)? {
match peek_num!(iter) {
None => break, // end of iterator.
Some(n) => n,
}
Expand Down Expand Up @@ -868,10 +867,10 @@ async fn process_block_finalized<Context>(
let iter = subsystem
.db
.iter_with_prefix(subsystem.config.col_meta, &start_prefix)
.take_while(|r| r.as_ref().map_or(true, |(k, _v)| &k[..] < &end_prefix[..]))
.take_while(|(k, _)| &k[..] < &end_prefix[..])
.peekable();

let batch = load_all_at_finalized_height(iter, batch_num, batch_finalized_hash)?;
let batch = load_all_at_finalized_height(iter, batch_num, batch_finalized_hash);

// Now that we've iterated over the entire batch at this finalized height,
// update the meta.
Expand All @@ -891,22 +890,22 @@ async fn process_block_finalized<Context>(
// loads all candidates at the finalized height and maps them to `true` if finalized
// and `false` if unfinalized.
fn load_all_at_finalized_height(
mut iter: std::iter::Peekable<impl Iterator<Item = io::Result<util::database::DBKeyValue>>>,
mut iter: std::iter::Peekable<impl Iterator<Item = (Box<[u8]>, Box<[u8]>)>>,
block_number: BlockNumber,
finalized_hash: Hash,
) -> io::Result<impl IntoIterator<Item = (CandidateHash, bool)>> {
) -> impl IntoIterator<Item = (CandidateHash, bool)> {
// maps candidate hashes to true if finalized, false otherwise.
let mut candidates = HashMap::new();

// Load all candidates that were included at this height.
loop {
match peek_num!(iter)? {
match peek_num!(iter) {
None => break, // end of iterator.
Some(n) if n != block_number => break, // end of batch.
_ => {},
}

let (k, _v) = iter.next().expect("`peek` used to check non-empty; qed")?;
let (k, _v) = iter.next().expect("`peek` used to check non-empty; qed");
let (_, block_hash, candidate_hash) =
decode_unfinalized_key(&k[..]).expect("`peek_num` checks validity of key; qed");

Expand All @@ -917,7 +916,7 @@ fn load_all_at_finalized_height(
}
}

Ok(candidates)
candidates
}

fn update_blocks_at_finalized_height(
Expand Down Expand Up @@ -1215,10 +1214,9 @@ fn prune_all(db: &Arc<dyn Database>, config: &Config, clock: &dyn Clock) -> Resu
let mut tx = DBTransaction::new();
let iter = db
.iter_with_prefix(config.col_meta, &range_start[..])
.take_while(|r| r.as_ref().map_or(true, |(k, _v)| &k[..] < &range_end[..]));
.take_while(|(k, _)| &k[..] < &range_end[..]);

for r in iter {
let (k, _v) = r?;
for (k, _v) in iter {
tx.delete(config.col_meta, &k[..]);

let (_, candidate_hash) = match decode_pruning_key(&k[..]) {
Expand Down
4 changes: 2 additions & 2 deletions node/core/chain-selection/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ polkadot-primitives = { path = "../../../primitives" }
polkadot-node-primitives = { path = "../../primitives" }
polkadot-node-subsystem = { path = "../../subsystem" }
polkadot-node-subsystem-util = { path = "../../subsystem-util" }
kvdb = "0.12.0"
kvdb = "0.11.0"
thiserror = "1.0.31"
parity-scale-codec = "3.1.5"

Expand All @@ -22,4 +22,4 @@ polkadot-node-subsystem-test-helpers = { path = "../../subsystem-test-helpers" }
sp-core = { git = "https://github.com/paritytech/substrate", branch = "master" }
parking_lot = "0.12.0"
assert_matches = "1"
kvdb-memorydb = "0.12.0"
kvdb-memorydb = "0.11.0"
26 changes: 9 additions & 17 deletions node/core/chain-selection/src/db_backend/v1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,21 +235,16 @@ impl Backend for DbBackend {
self.inner.iter_with_prefix(self.config.col_data, &STAGNANT_AT_PREFIX[..]);

let val = stagnant_at_iter
.filter_map(|r| match r {
Ok((k, v)) =>
match (decode_stagnant_at_key(&mut &k[..]), <Vec<_>>::decode(&mut &v[..]).ok())
{
(Some(at), Some(stagnant_at)) => Some(Ok((at, stagnant_at))),
_ => None,
},
Err(e) => Some(Err(e)),
.filter_map(|(k, v)| {
match (decode_stagnant_at_key(&mut &k[..]), <Vec<_>>::decode(&mut &v[..]).ok()) {
(Some(at), Some(stagnant_at)) => Some((at, stagnant_at)),
_ => None,
}
})
.enumerate()
.take_while(|(idx, r)| {
r.as_ref().map_or(true, |(at, _)| *at <= up_to.into() && *idx < max_elements)
})
.take_while(|(idx, (at, _))| *at <= up_to.into() && *idx < max_elements)
.map(|(_, v)| v)
.collect::<Result<Vec<_>, _>>()?;
.collect::<Vec<_>>();

Ok(val)
}
Expand All @@ -259,13 +254,10 @@ impl Backend for DbBackend {
self.inner.iter_with_prefix(self.config.col_data, &BLOCK_HEIGHT_PREFIX[..]);

let val = blocks_at_height_iter
.filter_map(|r| match r {
Ok((k, _)) => decode_block_height_key(&k[..]).map(Ok),
Err(e) => Some(Err(e)),
})
.filter_map(|(k, _)| decode_block_height_key(&k[..]))
.next();

val.transpose().map_err(Error::from)
Ok(val)
}

fn load_blocks_by_number(&self, number: BlockNumber) -> Result<Vec<Hash>, Error> {
Expand Down
6 changes: 3 additions & 3 deletions node/core/dispute-coordinator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ edition = "2021"
futures = "0.3.21"
gum = { package = "tracing-gum", path = "../../gum" }
parity-scale-codec = "3.1.5"
kvdb = "0.12.0"
kvdb = "0.11.0"
thiserror = "1.0.31"
lru = "0.8.0"
lru = "0.7.7"
fatality = "0.0.6"

polkadot-primitives = { path = "../../../primitives" }
Expand All @@ -22,7 +22,7 @@ sc-keystore = { git = "https://github.com/paritytech/substrate", branch = "maste


[dev-dependencies]
kvdb-memorydb = "0.12.0"
kvdb-memorydb = "0.11.0"
polkadot-node-subsystem-test-helpers = { path = "../../subsystem-test-helpers" }
sp-keyring = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-core = { git = "https://github.com/paritytech/substrate", branch = "master" }
Expand Down
10 changes: 2 additions & 8 deletions node/core/dispute-coordinator/src/scraping/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with Polkadot. If not, see <http://www.gnu.org/licenses/>.

use std::{
collections::{BTreeMap, HashSet},
num::NonZeroUsize,
};
use std::collections::{BTreeMap, HashSet};

use futures::channel::oneshot;
use lru::LruCache;
Expand Down Expand Up @@ -47,10 +44,7 @@ mod tests;
/// `last_observed_blocks` LRU. This means, this value should the very least be as large as the
/// number of expected forks for keeping chain scraping efficient. Making the LRU much larger than
/// that has very limited use.
const LRU_OBSERVED_BLOCKS_CAPACITY: NonZeroUsize = match NonZeroUsize::new(20) {
Some(cap) => cap,
None => panic!("Observed blocks cache size must be non-zero"),
};
const LRU_OBSERVED_BLOCKS_CAPACITY: usize = 20;

/// Chain scraper
///
Expand Down
Loading