diff --git a/actor/src/lib.rs b/actor/src/lib.rs index 6f23140a02c..1e7799a822e 100644 --- a/actor/src/lib.rs +++ b/actor/src/lib.rs @@ -1,6 +1,5 @@ -//! //! Iroha simple actor framework. -//! +#![allow(clippy::same_name_method)] #[cfg(feature = "deadlock_detection")] use std::any::type_name; @@ -128,8 +127,10 @@ impl Addr { } /// Send a message and wait for an answer. + /// /// # Errors /// Fails if no one will send message + /// /// # Panics /// If queue is full pub async fn send(&self, message: M) -> Result @@ -149,6 +150,7 @@ impl Addr { } /// Send a message without waiting for an answer. + /// /// # Errors /// Fails if queue is full or actor is disconnected pub async fn do_send(&self, message: M) diff --git a/client/src/client.rs b/client/src/client.rs index 3fc612982af..232aa419355 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -273,8 +273,8 @@ impl Client { .send(EventListenerInitialized) .wrap_err("Failed to send through init channel.")?; for event in event_iterator.flatten() { - if let Event::Pipeline(event) = event { - match event.status { + if let Event::Pipeline(this_event) = event { + match this_event.status { PipelineStatus::Validating => {} PipelineStatus::Rejected(reason) => event_sender .send(Err(reason)) @@ -544,9 +544,10 @@ impl EventIterator { ))?; loop { match stream.read_message() { - Ok(WebSocketMessage::Binary(message)) => { + Ok(WebSocketMessage::Binary(this_message)) => { if let EventSocketMessage::SubscriptionAccepted = - VersionedEventSocketMessage::decode_versioned(&message)?.into_inner_v1() + VersionedEventSocketMessage::decode_versioned(&this_message)? + .into_inner_v1() { break; } @@ -577,17 +578,20 @@ impl Iterator for EventIterator { }; let event = match event_socket_message { EventSocketMessage::Event(event) => event, - message => return Some(Err(eyre!("Expected Event but got {:?}", message))), + msg => return Some(Err(eyre!("Expected Event but got {:?}", msg))), }; - let message = + let versioned_message = match VersionedEventSocketMessage::from(EventSocketMessage::EventReceived) .encode_versioned() .wrap_err("Failed to serialize receipt.") { - Ok(message) => message, + Ok(msg) => msg, Err(e) => return Some(Err(e)), }; - return match self.stream.write_message(WebSocketMessage::Binary(message)) { + return match self + .stream + .write_message(WebSocketMessage::Binary(versioned_message)) + { Ok(_) => Some(Ok(event)), Err(err) => Some(Err(eyre!("Failed to send receipt: {}", err))), }; diff --git a/client/src/http_client.rs b/client/src/http_client.rs index 50faac66292..bbd3c8ff98e 100644 --- a/client/src/http_client.rs +++ b/client/src/http_client.rs @@ -91,17 +91,16 @@ pub fn web_socket_connect(uri: U, headers: Headers) -> Result, { - #[allow(clippy::string_add)] - let uri = if let Some(uri) = uri.as_ref().strip_prefix("https://") { - "wss://".to_owned() + uri - } else if let Some(uri) = uri.as_ref().strip_prefix("http://") { - "ws://".to_owned() + uri + let ws_uri = if let Some(https_uri) = uri.as_ref().strip_prefix("https://") { + "wss://".to_owned() + https_uri + } else if let Some(http_uri) = uri.as_ref().strip_prefix("http://") { + "ws://".to_owned() + http_uri } else { return Err(eyre!("No schema in web socket uri provided")); }; let req = http::Request::builder() - .uri(uri) + .uri(ws_uri) .set_headers(headers) .wrap_err("Failed to build web socket request")? .body(()) diff --git a/config/derive/src/lib.rs b/config/derive/src/lib.rs index 4950731ce51..ead62010747 100644 --- a/config/derive/src/lib.rs +++ b/config/derive/src/lib.rs @@ -25,8 +25,8 @@ mod attrs { } fn get_type_argument<'a, 'b>(s: &'a str, ty: &'b Type) -> Option<&'b GenericArgument> { - let path = if let Type::Path(ty) = ty { - ty + let path = if let Type::Path(typ) = ty { + typ } else { return None; }; @@ -45,9 +45,9 @@ fn get_type_argument<'a, 'b>(s: &'a str, ty: &'b Type) -> Option<&'b GenericArgu fn is_arc_rwlock(ty: &Type) -> bool { let dearced_ty = get_type_argument("Arc", ty) - .and_then(|ty| { - if let GenericArgument::Type(ty) = ty { - Some(ty) + .and_then(|typ| { + if let GenericArgument::Type(r#type) = typ { + Some(r#type) } else { None } @@ -125,42 +125,42 @@ fn impl_load_env( .zip(field_idents.iter()) .zip(as_str.iter()) .zip(lvalue.iter()) - .map(|(((ty, ident), &as_str), lvalue)| { + .map(|(((ty, ident), &as_str_attr), l_value)| { let is_string = if let Type::Path(TypePath { path, .. }) = ty { path.is_ident("String") } else { false }; let set_field = if is_string { - quote! { #lvalue = var } - } else if as_str { + quote! { #l_value = var } + } else if as_str_attr { quote! { - #lvalue = serde_json::from_value(var.into()) + #l_value = serde_json::from_value(var.into()) .map_err(|e| iroha_config::derive::Error::field_error(stringify!(#ident), e))? } } else { quote! { - #lvalue = serde_json::from_str(&var) + #l_value = serde_json::from_str(&var) .map_err(|e| iroha_config::derive::Error::field_error(stringify!(#ident), e))? } }; - (set_field, lvalue) + (set_field, l_value) }) .zip(field_environment.iter()) .zip(inner.iter()) - .map(|(((set_field, lvalue), field_environment), &inner)| { - let inner = if inner { + .map(|(((set_field, l_value), field_env), &inner_thing)| { + let inner_thing2 = if inner_thing { quote! { - #lvalue.load_environment()?; + #l_value.load_environment()?; } } else { quote! {} }; quote! { - if let Ok(var) = std::env::var(#field_environment) { + if let Ok(var) = std::env::var(#field_env) { #set_field; } - #inner + #inner_thing2 } }); @@ -197,14 +197,14 @@ fn impl_get_doc_recursive( .zip(inner) .zip(docs) .zip(field_ty) - .map(|(((ident, inner), docs), ty)| { - if inner { + .map(|(((ident, inner_thing), documentation), ty)| { + if inner_thing { quote! { - [stringify!(#ident)] => Some(#docs), + [stringify!(#ident)] => Some(#documentation), [stringify!(#ident), rest @ ..] => <#ty as iroha_config::Configurable>::get_doc_recursive(rest)?, } } else { - quote! { [stringify!(#ident)] => Some(#docs), } + quote! { [stringify!(#ident)] => Some(#documentation), } } }) // XXX: Workaround @@ -239,14 +239,14 @@ fn impl_get_docs( .zip(inner) .zip(docs) .zip(field_ty) - .map(|(((ident, inner), docs), ty)| { - let docs = if inner { + .map(|(((ident, inner_thing), documentation), ty)| { + let doc = if inner_thing { quote!{ <#ty as iroha_config::Configurable>::get_docs().into() } } else { - quote!{ #docs.into() } + quote!{ #documentation.into() } }; - quote! { map.insert(stringify!(#ident).to_owned(), #docs); } + quote! { map.insert(stringify!(#ident).to_owned(), #doc); } }) // XXX: Workaround //Decription of issue is here https://stackoverflow.com/a/65353489 @@ -285,11 +285,11 @@ fn impl_get_recursive( .iter() .zip(inner) .zip(lvalue.iter()) - .map(|((ident, inner), lvalue)| { - let inner = if inner { + .map(|((ident, inner_thing), l_value)| { + let inner_thing2 = if inner_thing { quote! { [stringify!(#ident), rest @ ..] => { - #lvalue.get_recursive(rest)? + #l_value.get_recursive(rest)? }, } } else { @@ -297,10 +297,10 @@ fn impl_get_recursive( }; quote! { [stringify!(#ident)] => { - serde_json::to_value(&#lvalue) + serde_json::to_value(&#l_value) .map_err(|e| iroha_config::derive::Error::field_error(stringify!(#ident), e))? } - #inner + #inner_thing2 } }) // XXX: Workaround @@ -385,12 +385,12 @@ fn impl_configurable(ast: &DeriveInput) -> TokenStream { .iter() .zip(field_environment.iter()) .zip(field_ty.iter()) - .map(|((attrs, env), field_ty)| { + .map(|((attrs, env), field_type)| { let real_doc = attrs .iter() .filter_map(|attr| attr.parse_meta().ok()) - .find_map(|meta| { - if let Meta::NameValue(meta) = meta { + .find_map(|metadata| { + if let Meta::NameValue(meta) = metadata { if meta.path.is_ident("doc") { if let Lit::Str(s) = meta.lit { return Some(s); @@ -403,7 +403,7 @@ fn impl_configurable(ast: &DeriveInput) -> TokenStream { let docs = format!( "{}Has type `{}`. Can be configured via environment variable `{}`", real_doc, - quote! { #field_ty }.to_string().replace(' ', ""), + quote! { #field_type }.to_string().replace(' ', ""), env ); LitStr::new(&docs, Span::mixed_site()) diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 7600828e77e..e55aaa45932 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -38,7 +38,6 @@ pub struct BlockSynchronizer { state: State, gossip_period: Duration, batch_size: u32, - n_topology_shifts_before_reshuffle: u64, broker: Broker, mailbox: usize, } @@ -56,7 +55,6 @@ pub trait BlockSynchronizerTrait: Actor + Handler + Handler>, sumeragi: AlwaysAddr, peer_id: PeerId, - n_topology_shifts_before_reshuffle: u64, broker: Broker, ) -> Self; } @@ -70,7 +68,6 @@ impl BlockSynchronizerTrait for BlockSynchroniz wsv: Arc>, sumeragi: AlwaysAddr, peer_id: PeerId, - n_topology_shifts_before_reshuffle: u64, broker: Broker, ) -> Self { Self { @@ -80,7 +77,6 @@ impl BlockSynchronizerTrait for BlockSynchroniz state: State::Idle, gossip_period: Duration::from_millis(config.gossip_period_ms), batch_size: config.batch_size, - n_topology_shifts_before_reshuffle, broker, mailbox: config.mailbox, } @@ -161,8 +157,8 @@ impl BlockSynchronizer { info!(blocks_left = blocks.len(), "Synchronizing blocks"); - let (block, blocks) = if let Some((block, blocks)) = blocks.split_first() { - (block, blocks) + let (this_block, remaining_blocks) = if let Some((blck, blcks)) = blocks.split_first() { + (blck, blcks) } else { self.state = State::Idle; self.request_latest_blocks_from_peer(peer_id).await; @@ -171,34 +167,36 @@ impl BlockSynchronizer { let mut network_topology = self .sumeragi - .send(GetNetworkTopology(block.header().clone())) + .send(GetNetworkTopology(this_block.header().clone())) .await; // If it is genesis topology we cannot apply view changes as peers have custom order! #[allow(clippy::expect_used)] - if !block.header().is_genesis() { + if !this_block.header().is_genesis() { network_topology = network_topology .into_builder() - .with_view_changes(block.header().view_change_proofs.clone()) + .with_view_changes(this_block.header().view_change_proofs.clone()) .build() .expect( "Unreachable as doing view changes on valid topology will not raise an error.", ); } - if self.wsv.as_ref().latest_block_hash() == block.header().previous_block_hash + if self.wsv.as_ref().latest_block_hash() == this_block.header().previous_block_hash && network_topology .filter_signatures_by_roles( &[Role::ValidatingPeer, Role::Leader, Role::ProxyTail], - block.verified_signatures().map(SignatureOf::transmute_ref), + this_block + .verified_signatures() + .map(SignatureOf::transmute_ref), ) .len() >= network_topology.min_votes_for_commit() as usize { - self.state = State::InProgress(blocks.to_vec(), peer_id); + self.state = State::InProgress(remaining_blocks.to_vec(), peer_id); self.sumeragi - .do_send(CommitBlock(block.clone().into())) + .do_send(CommitBlock(this_block.clone().into())) .await; } else { - warn!(block_hash = %block.hash(), "Failed to commit a block received via synchronization request - validation failed"); + warn!(block_hash = %this_block.hash(), "Failed to commit a block received via synchronization request - validation failed"); self.state = State::Idle; } } diff --git a/core/src/kura.rs b/core/src/kura.rs index 9f8555f487d..30decd252de 100644 --- a/core/src/kura.rs +++ b/core/src/kura.rs @@ -6,6 +6,7 @@ use std::{ ffi::OsString, fmt::Debug, io, + marker::PhantomData, num::NonZeroU64, path::{Path, PathBuf}, sync::Arc, @@ -50,13 +51,15 @@ pub struct GetBlockHash { /// Provides all necessary methods to read and write data, hides implementation details. #[derive(Debug)] pub struct KuraWithIO { + // TODO: Kura doesn't have different initialisation modes!!! + #[allow(dead_code)] mode: Mode, block_store: BlockStore, merkle_tree: MerkleTree, wsv: Arc>, broker: Broker, mailbox: usize, - io: IO, + io: PhantomData, } /// Production qualification of `KuraWithIO` @@ -83,7 +86,7 @@ impl KuraWithIO { wsv, broker, mailbox, - io, + io: PhantomData::default(), }) } } @@ -458,7 +461,7 @@ impl BlockStore { .map_ok(Self::read_file) .try_flatten() .enumerate() - .map(|(i, b)| b.map(|b| (i, b))) + .map(|(i, b)| b.map(|bb| (i, bb))) .and_then(|(i, b)| async move { if b.header().height == (i as u64) + 1 { Ok(b) @@ -474,7 +477,6 @@ impl BlockStore { /// /// # Errors /// Will fail on filesystem access error -/// async fn storage_files_base_indices( path: &Path, io: &IO, @@ -482,8 +484,8 @@ async fn storage_files_base_indices( let bases = io .read_dir(path.to_path_buf()) .await? - .filter_map(|e| async { - e.ok() + .filter_map(|item| async { + item.ok() .and_then(|e| e.to_string_lossy().parse::().ok()) }) .collect::>() diff --git a/core/src/lib.rs b/core/src/lib.rs index 3245ecd3c79..9cd105c7cdc 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -231,7 +231,6 @@ where Arc::clone(&wsv), sumeragi.clone(), PeerId::new(&config.torii.p2p_addr, &config.public_key), - config.sumeragi.n_topology_shifts_before_reshuffle, broker.clone(), ) .start() @@ -292,14 +291,14 @@ where telemetry: Option<(SubstrateTelemetry, FutureTelemetry)>, config: &Configuration, ) -> Result { - if let Some((telemetry, telemetry_future)) = telemetry { + if let Some((substrate_telemetry, telemetry_future)) = telemetry { #[cfg(feature = "dev-telemetry")] { iroha_telemetry::dev::start(&config.telemetry, telemetry_future) .await .wrap_err("Failed to setup telemetry for futures")?; } - iroha_telemetry::ws::start(&config.telemetry, telemetry) + iroha_telemetry::ws::start(&config.telemetry, substrate_telemetry) .await .wrap_err("Failed to setup telemetry") } else { diff --git a/core/src/queue.rs b/core/src/queue.rs index 6811eb77c61..acf9cc7c53c 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -141,9 +141,12 @@ impl Queue { entry.insert(tx); - if let Err(hash) = self.queue.push(hash) { - let (_, tx) = self.txs.remove(&hash).expect("Inserted just before match"); - return Err((tx, Error::Full)); + if let Err(err_hash) = self.queue.push(hash) { + let (_, err_tx) = self + .txs + .remove(&err_hash) + .expect("Inserted just before match"); + return Err((err_tx, Error::Full)); } Ok(()) } diff --git a/core/src/smartcontracts/isi/permissions.rs b/core/src/smartcontracts/isi/permissions.rs index 1dbc3ea16b2..ea473ef07ee 100644 --- a/core/src/smartcontracts/isi/permissions.rs +++ b/core/src/smartcontracts/isi/permissions.rs @@ -133,7 +133,7 @@ impl IsAllowed for CheckNested { Instruction::If(if_box) => { self.check(authority, &if_box.then, wsv) .and_then(|_| match &if_box.otherwise { - Some(instruction) => self.check(authority, instruction, wsv), + Some(this_instruction) => self.check(authority, this_instruction, wsv), None => Ok(()), }) } @@ -143,7 +143,7 @@ impl IsAllowed for CheckNested { Instruction::Sequence(sequence_box) => sequence_box .instructions .iter() - .try_for_each(|instruction| self.check(authority, instruction, wsv)), + .try_for_each(|this_instruction| self.check(authority, this_instruction, wsv)), } } } @@ -326,8 +326,8 @@ pub fn check_query_in_instruction( Instruction::If(if_box) => { check_query_in_instruction(authority, &if_box.then, wsv, validator).and_then(|_| { match &if_box.otherwise { - Some(instruction) => { - check_query_in_instruction(authority, instruction, wsv, validator) + Some(this_instruction) => { + check_query_in_instruction(authority, this_instruction, wsv, validator) } None => Ok(()), } @@ -342,8 +342,8 @@ pub fn check_query_in_instruction( sequence_box .instructions .iter() - .try_for_each(|instruction| { - check_query_in_instruction(authority, instruction, wsv, validator) + .try_for_each(|this_instruction| { + check_query_in_instruction(authority, this_instruction, wsv, validator) }) } } @@ -584,8 +584,8 @@ impl IsAllowed for IsGrantAllowedBoxed { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - if let Instruction::Grant(instruction) = instruction { - self.check_grant(authority, instruction, wsv) + if let Instruction::Grant(isi) = instruction { + self.check_grant(authority, isi, wsv) } else { Ok(()) } diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index 448f82bfed4..c726b0f884e 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -712,7 +712,7 @@ impl Sumeragi { latest_view_change: HashOf, ) { let old_voting_block = voting_block; - let voting_block = Arc::clone(&self.voting_block); + let voting_block_ref = Arc::clone(&self.voting_block); let key_pair = self.key_pair.clone(); let commit_time = self.commit_time; let broker = self.broker.clone(); @@ -727,25 +727,26 @@ impl Sumeragi { task::spawn( async move { time::sleep(commit_time).await; - let voting_block = if let Some(voting_block) = voting_block.write().await.clone() { - voting_block - } else { - return; - }; + let voter_block = + if let Some(this_voting_block) = voting_block_ref.write().await.clone() { + this_voting_block + } else { + return; + }; // If the block was not yet committed send commit timeout to other peers to initiate view change. - if voting_block.block.hash() != old_voting_block.block.hash() { + if voter_block.block.hash() != old_voting_block.block.hash() { return; } warn!( - block_hash = %voting_block.block.hash(), + block_hash = %voter_block.block.hash(), "Block commit timeout detected!", ); #[allow(clippy::expect_used)] let msg = VersionedMessage::from(Message::ViewChangeSuggested( view_change::Proof::commit_timeout( - voting_block.block.hash(), + voter_block.block.hash(), latest_view_change, latest_block, key_pair.clone(), @@ -801,8 +802,8 @@ impl Sumeragi { self.txs_awaiting_created_block.clear(); self.txs_awaiting_receipts.clear(); let previous_role = self.topology.role(&self.peer_id); - if let Some(invalidated_block_hash) = invalidated_block_hash { - self.invalidated_blocks_hashes.push(invalidated_block_hash) + if let Some(hash) = invalidated_block_hash { + self.invalidated_blocks_hashes.push(hash) } self.topology.apply_view_change(proof.clone()); *self.voting_block.write().await = None; @@ -1123,9 +1124,9 @@ pub mod message { sumeragi: &Sumeragi, ) -> bool { let voting_block = sumeragi.voting_block.read().await.clone(); - voting_block.map_or(false, |voting_block| { - voting_block.block.hash() == reason.hash - && (current_time() - voting_block.voted_at) >= sumeragi.commit_time + voting_block.map_or(false, |voter_block| { + voter_block.block.hash() == reason.hash + && (current_time() - voter_block.voted_at) >= sumeragi.commit_time }) } diff --git a/core/src/torii/mod.rs b/core/src/torii/mod.rs index e8c8a9c79a9..1f065906e48 100644 --- a/core/src/torii/mod.rs +++ b/core/src/torii/mod.rs @@ -173,14 +173,14 @@ impl Torii { /// Fixing status code for custom rejection, because of argument parsing #[allow(clippy::unused_async)] - async fn recover_arg_parse(err: Rejection) -> Result { - if let Some(err) = err.find::() { + async fn recover_arg_parse(rejection: Rejection) -> Result { + if let Some(err) = rejection.find::() { return Ok(reply::with_status(err.to_string(), err.status_code())); } - if let Some(err) = err.find::() { + if let Some(err) = rejection.find::() { return Ok(reply::with_status(err.to_string(), err.status_code())); } - Err(err) + Err(rejection) } /// To handle incoming requests `Torii` should be started first. @@ -233,8 +233,8 @@ impl Torii { .and(add_state(self.events)) .and(warp::ws()) .map(|events, ws: Ws| { - ws.on_upgrade(|ws| async move { - if let Err(error) = handle_subscription(events, ws).await { + ws.on_upgrade(|this_ws| async move { + if let Err(error) = handle_subscription(events, this_ws).await { iroha_logger::error!(%error, "Failed to subscribe someone"); } }) diff --git a/core/src/tx.rs b/core/src/tx.rs index fbef3d5ad16..05bf9e26aa2 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -195,8 +195,6 @@ impl AcceptedTransaction { } for instruction in &self.payload.instructions { - let account_id = self.payload.account_id.clone(); - instruction .clone() .execute(account_id.clone(), &wsv_temp) diff --git a/data_model/src/merkle.rs b/data_model/src/merkle.rs index 36868c669b3..3815fb44faf 100644 --- a/data_model/src/merkle.rs +++ b/data_model/src/merkle.rs @@ -120,7 +120,9 @@ impl Node { Leaf { hash } if idx == 0 => Ok(*hash), Subtree { left, right, .. } => match left.get_leaf_inner(idx) { Ok(hash) => Ok(hash), - Err(seen) => right.get_leaf_inner(idx - seen).map_err(|idx| idx + seen), + Err(seen) => right + .get_leaf_inner(idx - seen) + .map_err(|index| index + seen), }, Leaf { .. } | Empty => Err(1), } @@ -160,7 +162,7 @@ impl Node { .as_ref() .iter() .zip(right_hash.as_ref().iter()) - .map(|(left, right)| left.saturating_add(*right)) + .map(|(l, r)| l.saturating_add(*r)) .take(32) .collect(); HashOf::from_hash(Hash::new(&sum)) diff --git a/data_model/src/transaction.rs b/data_model/src/transaction.rs index b3a12fc111e..09aa5194d73 100644 --- a/data_model/src/transaction.rs +++ b/data_model/src/transaction.rs @@ -168,16 +168,16 @@ impl From for VersionedTransaction { fn from(transaction: VersionedValidTransaction) -> Self { match transaction { VersionedValidTransaction::V1(v1) => { - let transaction: ValidTransaction = v1.0; + let tx: ValidTransaction = v1.0; - let signatures = transaction + let signatures = tx .signatures .values() .iter() .cloned() .collect::>(); let tx = Transaction { - payload: transaction.payload, + payload: tx.payload, signatures, }; tx.into() diff --git a/futures/tests/basic.rs b/futures/tests/basic.rs index 159f7bc2f15..c7eb918715d 100644 --- a/futures/tests/basic.rs +++ b/futures/tests/basic.rs @@ -14,7 +14,7 @@ async fn sleep(times: Vec) -> i32 { task::yield_now().await; } // Just random result - 10 + 10_i32 } fn almost_equal(a: Duration, b: Duration) -> bool { @@ -36,7 +36,7 @@ async fn test_sleep() { let (_, telemetry_future) = iroha_logger::init(&Configuration::default()) .unwrap() .unwrap(); - assert_eq!(sleep(sleep_times.clone()).await, 10); + assert_eq!(sleep(sleep_times.clone()).await, 10_i32); let telemetry = ReceiverStream::new(telemetry_future) .map(FuturePollTelemetry::try_from) .filter_map(Result::ok) @@ -46,11 +46,15 @@ async fn test_sleep() { assert_eq!(telemetry.len(), 3); let id = telemetry[0].id; - let times = telemetry.iter().map(|telemetry| telemetry.duration); + let times = telemetry + .iter() + .map(|telemetry_item| telemetry_item.duration); assert!(telemetry .iter() - .all(|telemetry| telemetry.name == "basic::sleep")); - assert!(telemetry.iter().all(|telemetry| telemetry.id == id)); + .all(|telemetry_item| telemetry_item.name == "basic::sleep")); + assert!(telemetry + .iter() + .all(|telemetry_item| telemetry_item.id == id)); assert!(times.zip(sleep_times).all(|(a, b)| almost_equal(a, b))); } diff --git a/lints.toml b/lints.toml index 3c0054f223a..0a637208e3c 100644 --- a/lints.toml +++ b/lints.toml @@ -24,6 +24,7 @@ deny = [ ] allow = [ 'unused_results', + 'clippy::string_add', 'clippy::as_conversions', 'clippy::default_numeric_fallback', 'clippy::else_if_without_else', @@ -50,6 +51,11 @@ allow = [ 'clippy::wildcard_imports', 'elided_lifetimes_in_paths', 'missing_debug_implementations', + 'clippy::mod-module-files', + 'clippy::self-named-module-files', + # We often need to shadow the name of the method to specialise. + # As soon as trait specialisation is stable we need to remove it. + 'clippy::same_name_method', # TODO: Remove when stabilized ## https://rust-lang.github.io/rust-clippy/master/index.html#missing_const_for_fn diff --git a/logger/src/config.rs b/logger/src/config.rs index 3ad7acbc163..d171f89e367 100644 --- a/logger/src/config.rs +++ b/logger/src/config.rs @@ -1,6 +1,5 @@ //! Module containing logic related to spawning a logger from the //! configuration, as well as run-time reloading of the log-level. - use std::fmt::Debug; use iroha_config::{ diff --git a/logger/src/telemetry.rs b/logger/src/telemetry.rs index ac23494fc67..9a3c3df7e9b 100644 --- a/logger/src/telemetry.rs +++ b/logger/src/telemetry.rs @@ -154,14 +154,14 @@ impl EventInspectorTrait for TelemetryLayer { fn event(&self, event: &Event<'_>) { let target = event.metadata().target(); - if let Some(target) = target.strip_prefix(TELEMETRY_TARGET_PREFIX) { + if let Some(telemetry_target) = target.strip_prefix(TELEMETRY_TARGET_PREFIX) { let _result = self .telemetry_sender - .try_send(Telemetry::from_event(target, event)); - } else if let Some(target) = target.strip_prefix(TELEMETRY_FUTURE_TARGET_PREFIX) { + .try_send(Telemetry::from_event(telemetry_target, event)); + } else if let Some(future_target) = target.strip_prefix(TELEMETRY_FUTURE_TARGET_PREFIX) { let _result = self .telemetry_future_sender - .try_send(Telemetry::from_event(target, event)); + .try_send(Telemetry::from_event(future_target, event)); } else { self.subscriber.event(event) } diff --git a/p2p/src/peer.rs b/p2p/src/peer.rs index 9d27d8676f9..21d1d1dc54e 100644 --- a/p2p/src/peer.rs +++ b/p2p/src/peer.rs @@ -489,8 +489,8 @@ where } if let Self::Ready(id, broker, mut connection, crypto) = dummy { debug!(peer_addr = %id.address, "Handshake finished"); - let message = PeerMessage::::Connected(id.clone(), connection.id); - broker.issue_send(message).await; + let connected_message = PeerMessage::::Connected(id.clone(), connection.id); + broker.issue_send(connected_message).await; #[allow(clippy::unwrap_used)] let read: OwnedReadHalf = connection.read.take().unwrap(); @@ -520,12 +520,13 @@ where async fn handle(&mut self, MessageResult(msg): MessageResult) { if let Self::Ready(id, broker, connection, crypto) = self { let message = match msg { - Ok(message) => message, + Ok(this_message) => this_message, Err(error) => { warn!(%error, "Error reading message"); // TODO implement some recovery - let message = PeerMessage::::Disconnected(id.clone(), connection.id); - broker.issue_send(message).await; + let disconnect_message = + PeerMessage::::Disconnected(id.clone(), connection.id); + broker.issue_send(disconnect_message).await; return; } }; @@ -546,9 +547,10 @@ where }; let decoded: Result = Decode::decode(&mut data.as_slice()); match decoded { - Ok(data) => { - let message = PeerMessage::Message(id.clone(), Box::new(data)); - broker.issue_send(message).await; + Ok(decoded_data) => { + let message_with_data = + PeerMessage::Message(id.clone(), Box::new(decoded_data)); + broker.issue_send(message_with_data).await; } Err(error) => warn!(%error, "Error parsing message!"), } diff --git a/p2p/tests/p2p.rs b/p2p/tests/p2p.rs index 226a5a3ed8b..42a549b492a 100644 --- a/p2p/tests/p2p.rs +++ b/p2p/tests/p2p.rs @@ -177,11 +177,11 @@ async fn two_networks() { tokio::time::sleep(delay).await; assert_eq!(messages2.load(Ordering::SeqCst), 1); - let connected_peers: ConnectedPeers = network1.send(GetConnectedPeers).await.unwrap(); - assert_eq!(connected_peers.peers.len(), 1); + let connected_peers1: ConnectedPeers = network1.send(GetConnectedPeers).await.unwrap(); + assert_eq!(connected_peers1.peers.len(), 1); - let connected_peers: ConnectedPeers = network2.send(GetConnectedPeers).await.unwrap(); - assert_eq!(connected_peers.peers.len(), 1); + let connected_peers2: ConnectedPeers = network2.send(GetConnectedPeers).await.unwrap(); + assert_eq!(connected_peers2.peers.len(), 1); // Connecting to the same peer from network1 broker1 @@ -328,7 +328,7 @@ fn test_encryption() { assert!(res.is_ok()); let ciphertext = res.unwrap(); - let res = encryptor.decrypt_easy(aad.as_ref(), ciphertext.as_slice()); - assert!(res.is_ok()); - assert_eq!(res.unwrap().as_slice(), message); + let res_cipher = encryptor.decrypt_easy(aad.as_ref(), ciphertext.as_slice()); + assert!(res_cipher.is_ok()); + assert_eq!(res_cipher.unwrap().as_slice(), message); } diff --git a/permissions_validators/src/lib.rs b/permissions_validators/src/lib.rs index 0afbba18d22..31566617f98 100644 --- a/permissions_validators/src/lib.rs +++ b/permissions_validators/src/lib.rs @@ -142,8 +142,8 @@ pub mod private_blockchain { instruction: &Instruction, _wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let _register_box = if let Instruction::Register(instruction) = instruction { - instruction + let _register_box = if let Instruction::Register(register) = instruction { + register } else { return Ok(()); }; @@ -758,8 +758,8 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let transfer_box = if let Instruction::Transfer(instruction) = instruction { - instruction + let transfer_box = if let Instruction::Transfer(transfer) = instruction { + transfer } else { return Ok(()); }; @@ -798,8 +798,8 @@ pub mod public_blockchain { .source_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let source_id: AssetId = if let Ok(source_id) = source_id.try_into() { - source_id + let source_id: AssetId = if let Ok(id) = source_id.try_into() { + id } else { return Err("Source id is not an AssetId.".to_owned()); }; @@ -859,12 +859,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::Unregister(instruction) = instruction { - instruction + let unregister_box = if let Instruction::Unregister(unregister) = instruction { + unregister } else { return Ok(()); }; - let object_id = instruction + let object_id = unregister_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -896,8 +896,8 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let unregister_box = if let Instruction::Unregister(instruction) = instruction { - instruction + let unregister_box = if let Instruction::Unregister(unregister) = instruction { + unregister } else { return Err("Instruction is not unregister.".to_owned()); }; @@ -905,8 +905,8 @@ pub mod public_blockchain { .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetDefinitionId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AssetDefinitionId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AssetDefinitionId.".to_owned()); }; @@ -971,12 +971,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::Mint(instruction) = instruction { - instruction + let mint_box = if let Instruction::Mint(mint) = instruction { + mint } else { return Ok(()); }; - let destination_id = instruction + let destination_id = mint_box .destination_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1008,8 +1008,8 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let mint_box = if let Instruction::Mint(instruction) = instruction { - instruction + let mint_box = if let Instruction::Mint(mint) = instruction { + mint } else { return Err("Instruction is not mint.".to_owned()); }; @@ -1017,8 +1017,8 @@ pub mod public_blockchain { .destination_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let asset_id: AssetId = if let Ok(destination_id) = destination_id.try_into() { - destination_id + let asset_id: AssetId = if let Ok(dest_id) = destination_id.try_into() { + dest_id } else { return Err("Destination is not an Asset.".to_owned()); }; @@ -1085,12 +1085,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::Burn(instruction) = instruction { - instruction + let burn_box = if let Instruction::Burn(burn) = instruction { + burn } else { return Ok(()); }; - let destination_id = instruction + let destination_id = burn_box .destination_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1122,8 +1122,8 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let burn_box = if let Instruction::Burn(instruction) = instruction { - instruction + let burn_box = if let Instruction::Burn(burn) = instruction { + burn } else { return Err("Instruction is not burn.".to_owned()); }; @@ -1131,8 +1131,8 @@ pub mod public_blockchain { .destination_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let asset_id: AssetId = if let Ok(destination_id) = destination_id.try_into() { - destination_id + let asset_id: AssetId = if let Ok(dest_id) = destination_id.try_into() { + dest_id } else { return Err("Destination is not an Asset.".to_owned()); }; @@ -1185,12 +1185,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::Burn(instruction) = instruction { - instruction + let burn_box = if let Instruction::Burn(burn) = instruction { + burn } else { return Ok(()); }; - let destination_id = instruction + let destination_id = burn_box .destination_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1224,9 +1224,8 @@ pub mod public_blockchain { .destination_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let destination_id: AssetId = if let Ok(destination_id) = destination_id.try_into() - { - destination_id + let destination_id: AssetId = if let Ok(dest_id) = destination_id.try_into() { + dest_id } else { return Err("Source id is not an AssetId.".to_owned()); }; @@ -1301,12 +1300,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::SetKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::SetKeyValue(set_kv) = instruction { + set_kv } else { return Ok(()); }; - let object_id = instruction + let object_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1334,17 +1333,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let set_box = if let Instruction::SetKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::SetKeyValue(set_kv) = instruction { + set_kv } else { return Err("Instruction is not set.".to_owned()); }; - let object_id = set_box + let object_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AssetId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AssetId.".to_owned()); }; @@ -1398,12 +1397,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::SetKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::SetKeyValue(set_kv) = instruction { + set_kv } else { return Ok(()); }; - let object_id = instruction + let object_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1430,17 +1429,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let set_box = if let Instruction::SetKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::SetKeyValue(set_kv) = instruction { + set_kv } else { return Err("Instruction is not set.".to_owned()); }; - let object_id = set_box + let object_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AccountId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AccountId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AccountId.".to_owned()); }; @@ -1494,12 +1493,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::RemoveKeyValue(instruction) = instruction { - instruction + let rem_kv_box = if let Instruction::RemoveKeyValue(rem_kv) = instruction { + rem_kv } else { return Ok(()); }; - let object_id = instruction + let object_id = rem_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1525,17 +1524,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let remove_box = if let Instruction::RemoveKeyValue(instruction) = instruction { - instruction + let rem_kv_box = if let Instruction::RemoveKeyValue(rem_kv) = instruction { + rem_kv } else { return Err("Instruction is not set.".to_owned()); }; - let object_id = remove_box + let object_id = rem_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AssetId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AssetId.".to_owned()); }; @@ -1589,12 +1588,12 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::RemoveKeyValue(instruction) = instruction { - instruction + let rem_kv_box = if let Instruction::RemoveKeyValue(rem_kv) = instruction { + rem_kv } else { return Ok(()); }; - let object_id = instruction + let object_id = rem_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; @@ -1621,17 +1620,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let remove_box = if let Instruction::RemoveKeyValue(instruction) = instruction { - instruction + let rem_kv_box = if let Instruction::RemoveKeyValue(rem_kv) = instruction { + rem_kv } else { return Err("Instruction is not remove.".to_owned()); }; - let object_id = remove_box + let object_id = rem_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AccountId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AccountId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AccountId.".to_owned()); }; @@ -1745,17 +1744,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::SetKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::SetKeyValue(set_kv) = instruction { + set_kv } else { return Ok(()); }; - let object_id = instruction + let obj_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetDefinitionId = try_into_or_exit!(object_id); + let object_id: AssetDefinitionId = try_into_or_exit!(obj_id); let registered_by_signer_account = wsv .asset_definition_entry(&object_id) .map(|asset_definition_entry| { @@ -1785,17 +1784,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result<(), DenialReason> { - let instruction = if let Instruction::RemoveKeyValue(instruction) = instruction { - instruction + let rem_kv_box = if let Instruction::RemoveKeyValue(rem_kv) = instruction { + rem_kv } else { return Ok(()); }; - let object_id = instruction + let obj_id = rem_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetDefinitionId = try_into_or_exit!(object_id); + let object_id: AssetDefinitionId = try_into_or_exit!(obj_id); let registered_by_signer_account = wsv .asset_definition_entry(&object_id) .map(|asset_definition_entry| { @@ -1825,17 +1824,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let set_box = if let Instruction::SetKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::SetKeyValue(set_kv) = instruction { + set_kv } else { return Err("Instruction is not set.".to_owned()); }; - let object_id = set_box + let object_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetDefinitionId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AssetDefinitionId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AssetDefinitionId.".to_owned()); }; @@ -1864,17 +1863,17 @@ pub mod public_blockchain { instruction: &Instruction, wsv: &WorldStateView, ) -> Result { - let set_box = if let Instruction::RemoveKeyValue(instruction) = instruction { - instruction + let set_kv_box = if let Instruction::RemoveKeyValue(set_kv) = instruction { + set_kv } else { return Err("Instruction is not remove key value.".to_owned()); }; - let object_id = set_box + let object_id = set_kv_box .object_id .evaluate(wsv, &Context::new()) .map_err(|e| e.to_string())?; - let object_id: AssetDefinitionId = if let Ok(object_id) = object_id.try_into() { - object_id + let object_id: AssetDefinitionId = if let Ok(obj_id) = object_id.try_into() { + obj_id } else { return Err("Source id is not an AssetDefinitionId.".to_owned()); }; diff --git a/schema/derive/src/lib.rs b/schema/derive/src/lib.rs index 3751cadac82..c48804f132f 100644 --- a/schema/derive/src/lib.rs +++ b/schema/derive/src/lib.rs @@ -210,7 +210,7 @@ fn variant_field(fields: &Fields) -> Option { panic!("Please don't use named fields on enums. It is against iroha styleguide") } }; - filter_map_fields_types(field).map(|field| field.ty) + filter_map_fields_types(field).map(|this_field| this_field.ty) } /// Returns types for which schema should be called and metadata for struct @@ -299,8 +299,8 @@ fn variant_index(v: &Variant, i: usize) -> TokenStream2 { let index = find_meta_item(v.attrs.iter(), |meta| { if let NestedMeta::Meta(Meta::NameValue(ref nv)) = meta { if nv.path.is_ident("index") { - if let Lit::Int(ref v) = nv.lit { - let byte = v + if let Lit::Int(ref val) = nv.lit { + let byte = val .base10_parse::() .expect("Internal error, index attribute must have been checked"); return Some(byte); @@ -313,7 +313,7 @@ fn variant_index(v: &Variant, i: usize) -> TokenStream2 { // then fallback to discriminant or just index index - .map(|i| quote! { #i }) + .map(|int| quote! { #int }) .or_else(|| { v.discriminant.as_ref().map(|&(_, ref expr)| { let n: Lit = syn::parse2(quote! { #expr }).unwrap(); diff --git a/telemetry/src/dev.rs b/telemetry/src/dev.rs index 27f02ef09cc..c32180fa25a 100644 --- a/telemetry/src/dev.rs +++ b/telemetry/src/dev.rs @@ -46,8 +46,8 @@ pub async fn start( // // TODO: After migration to tokio move to https://docs.rs/tokio-serde let join_handle = task::spawn(async move { - while let Some(telemetry) = telemetry.next().await { - let telemetry_json = match serde_json::to_string(&telemetry) { + while let Some(item) = telemetry.next().await { + let telemetry_json = match serde_json::to_string(&item) { Ok(json) => json, Err(error) => { iroha_logger::error!(%error, "Failed to serialize telemetry to json"); diff --git a/telemetry/src/ws.rs b/telemetry/src/ws.rs index 8179f14e412..ce95d47127f 100644 --- a/telemetry/src/ws.rs +++ b/telemetry/src/ws.rs @@ -175,9 +175,9 @@ fn prepare_message(name: &str, telemetry: Telemetry) -> Result<(Message, Option< let fields = telemetry.fields.0; let msg_kind = fields .iter() - .find_map(|(name, map)| (*name == "msg").then(|| map)) + .find_map(|(this_name, map)| (*this_name == "msg").then(|| map)) .and_then(|v| { - v.as_str().map(|v| match v { + v.as_str().map(|val| match val { "system.connected" => Some(MessageKind::Initialization), _ => None, }) @@ -219,7 +219,7 @@ fn prepare_message(name: &str, telemetry: Telemetry) -> Result<(Message, Option< payload.insert("network_id".into(), "".into()); } let mut map = Map::new(); - map.insert("id".into(), 0.into()); + map.insert("id".into(), 0_i32.into()); map.insert("ts".into(), Local::now().to_rfc3339().into()); map.insert("payload".into(), payload.into()); let msg = Message::Binary(serde_json::to_vec(&map)?); @@ -452,7 +452,7 @@ mod tests { panic!() }; let map: Map = serde_json::from_slice(&bytes).unwrap(); - assert_eq!(map.get("id"), Some(&Value::Number(0.into()))); + assert_eq!(map.get("id"), Some(&Value::Number(0_i32.into()))); assert!(map.contains_key("ts")); let payload = map.get("payload").unwrap().as_object().unwrap(); assert_eq!( @@ -488,7 +488,7 @@ mod tests { panic!() }; let map: Map = serde_json::from_slice(&bytes).unwrap(); - assert_eq!(map.get("id"), Some(&Value::Number(0.into()))); + assert_eq!(map.get("id"), Some(&Value::Number(0_i32.into()))); assert!(map.contains_key("ts")); assert!(map.contains_key("payload")); let payload = map.get("payload").unwrap().as_object().unwrap(); @@ -496,7 +496,7 @@ mod tests { payload.get("msg"), Some(&Value::String("system.interval".to_owned())) ); - assert_eq!(payload.get("peers"), Some(&Value::Number(2.into()))); + assert_eq!(payload.get("peers"), Some(&Value::Number(2_i32.into()))); } drop(telemetry_sender);