Skip to content

Commit

Permalink
Fixed new clippy lints.
Browse files Browse the repository at this point in the history
Signed-off-by: Aleksandr <a-p-petrosyan@yandex.ru>
  • Loading branch information
appetrosyan committed Dec 3, 2021
1 parent ea08502 commit 557991f
Show file tree
Hide file tree
Showing 24 changed files with 248 additions and 230 deletions.
6 changes: 4 additions & 2 deletions actor/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
//!
//! Iroha simple actor framework.
//!
#![allow(clippy::same_name_method)]

#[cfg(feature = "deadlock_detection")]
use std::any::type_name;
Expand Down Expand Up @@ -128,8 +127,10 @@ impl<A: Actor> Addr<A> {
}

/// Send a message and wait for an answer.
///
/// # Errors
/// Fails if no one will send message
///
/// # Panics
/// If queue is full
pub async fn send<M>(&self, message: M) -> Result<M::Result, Error>
Expand All @@ -149,6 +150,7 @@ impl<A: Actor> Addr<A> {
}

/// Send a message without waiting for an answer.
///
/// # Errors
/// Fails if queue is full or actor is disconnected
pub async fn do_send<M>(&self, message: M)
Expand Down
20 changes: 12 additions & 8 deletions client/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,8 +273,8 @@ impl Client {
.send(EventListenerInitialized)
.wrap_err("Failed to send through init channel.")?;
for event in event_iterator.flatten() {
if let Event::Pipeline(event) = event {
match event.status {
if let Event::Pipeline(this_event) = event {
match this_event.status {
PipelineStatus::Validating => {}
PipelineStatus::Rejected(reason) => event_sender
.send(Err(reason))
Expand Down Expand Up @@ -544,9 +544,10 @@ impl EventIterator {
))?;
loop {
match stream.read_message() {
Ok(WebSocketMessage::Binary(message)) => {
Ok(WebSocketMessage::Binary(this_message)) => {
if let EventSocketMessage::SubscriptionAccepted =
VersionedEventSocketMessage::decode_versioned(&message)?.into_inner_v1()
VersionedEventSocketMessage::decode_versioned(&this_message)?
.into_inner_v1()
{
break;
}
Expand Down Expand Up @@ -577,17 +578,20 @@ impl Iterator for EventIterator {
};
let event = match event_socket_message {
EventSocketMessage::Event(event) => event,
message => return Some(Err(eyre!("Expected Event but got {:?}", message))),
msg => return Some(Err(eyre!("Expected Event but got {:?}", msg))),
};
let message =
let versioned_message =
match VersionedEventSocketMessage::from(EventSocketMessage::EventReceived)
.encode_versioned()
.wrap_err("Failed to serialize receipt.")
{
Ok(message) => message,
Ok(msg) => msg,
Err(e) => return Some(Err(e)),
};
return match self.stream.write_message(WebSocketMessage::Binary(message)) {
return match self
.stream
.write_message(WebSocketMessage::Binary(versioned_message))
{
Ok(_) => Some(Ok(event)),
Err(err) => Some(Err(eyre!("Failed to send receipt: {}", err))),
};
Expand Down
11 changes: 5 additions & 6 deletions client/src/http_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,17 +91,16 @@ pub fn web_socket_connect<U>(uri: U, headers: Headers) -> Result<WebSocketStream
where
U: AsRef<str>,
{
#[allow(clippy::string_add)]
let uri = if let Some(uri) = uri.as_ref().strip_prefix("https://") {
"wss://".to_owned() + uri
} else if let Some(uri) = uri.as_ref().strip_prefix("http://") {
"ws://".to_owned() + uri
let ws_uri = if let Some(https_uri) = uri.as_ref().strip_prefix("https://") {
"wss://".to_owned() + https_uri
} else if let Some(http_uri) = uri.as_ref().strip_prefix("http://") {
"ws://".to_owned() + http_uri
} else {
return Err(eyre!("No schema in web socket uri provided"));
};

let req = http::Request::builder()
.uri(uri)
.uri(ws_uri)
.set_headers(headers)
.wrap_err("Failed to build web socket request")?
.body(())
Expand Down
66 changes: 33 additions & 33 deletions config/derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ mod attrs {
}

fn get_type_argument<'a, 'b>(s: &'a str, ty: &'b Type) -> Option<&'b GenericArgument> {
let path = if let Type::Path(ty) = ty {
ty
let path = if let Type::Path(typ) = ty {
typ
} else {
return None;
};
Expand All @@ -45,9 +45,9 @@ fn get_type_argument<'a, 'b>(s: &'a str, ty: &'b Type) -> Option<&'b GenericArgu

fn is_arc_rwlock(ty: &Type) -> bool {
let dearced_ty = get_type_argument("Arc", ty)
.and_then(|ty| {
if let GenericArgument::Type(ty) = ty {
Some(ty)
.and_then(|typ| {
if let GenericArgument::Type(r#type) = typ {
Some(r#type)
} else {
None
}
Expand Down Expand Up @@ -125,42 +125,42 @@ fn impl_load_env(
.zip(field_idents.iter())
.zip(as_str.iter())
.zip(lvalue.iter())
.map(|(((ty, ident), &as_str), lvalue)| {
.map(|(((ty, ident), &as_str_attr), l_value)| {
let is_string = if let Type::Path(TypePath { path, .. }) = ty {
path.is_ident("String")
} else {
false
};
let set_field = if is_string {
quote! { #lvalue = var }
} else if as_str {
quote! { #l_value = var }
} else if as_str_attr {
quote! {
#lvalue = serde_json::from_value(var.into())
#l_value = serde_json::from_value(var.into())
.map_err(|e| iroha_config::derive::Error::field_error(stringify!(#ident), e))?
}
} else {
quote! {
#lvalue = serde_json::from_str(&var)
#l_value = serde_json::from_str(&var)
.map_err(|e| iroha_config::derive::Error::field_error(stringify!(#ident), e))?
}
};
(set_field, lvalue)
(set_field, l_value)
})
.zip(field_environment.iter())
.zip(inner.iter())
.map(|(((set_field, lvalue), field_environment), &inner)| {
let inner = if inner {
.map(|(((set_field, l_value), field_env), &inner_thing)| {
let inner_thing2 = if inner_thing {
quote! {
#lvalue.load_environment()?;
#l_value.load_environment()?;
}
} else {
quote! {}
};
quote! {
if let Ok(var) = std::env::var(#field_environment) {
if let Ok(var) = std::env::var(#field_env) {
#set_field;
}
#inner
#inner_thing2
}
});

Expand Down Expand Up @@ -197,14 +197,14 @@ fn impl_get_doc_recursive(
.zip(inner)
.zip(docs)
.zip(field_ty)
.map(|(((ident, inner), docs), ty)| {
if inner {
.map(|(((ident, inner_thing), documentation), ty)| {
if inner_thing {
quote! {
[stringify!(#ident)] => Some(#docs),
[stringify!(#ident)] => Some(#documentation),
[stringify!(#ident), rest @ ..] => <#ty as iroha_config::Configurable>::get_doc_recursive(rest)?,
}
} else {
quote! { [stringify!(#ident)] => Some(#docs), }
quote! { [stringify!(#ident)] => Some(#documentation), }
}
})
// XXX: Workaround
Expand Down Expand Up @@ -239,14 +239,14 @@ fn impl_get_docs(
.zip(inner)
.zip(docs)
.zip(field_ty)
.map(|(((ident, inner), docs), ty)| {
let docs = if inner {
.map(|(((ident, inner_thing), documentation), ty)| {
let doc = if inner_thing {
quote!{ <#ty as iroha_config::Configurable>::get_docs().into() }
} else {
quote!{ #docs.into() }
quote!{ #documentation.into() }
};

quote! { map.insert(stringify!(#ident).to_owned(), #docs); }
quote! { map.insert(stringify!(#ident).to_owned(), #doc); }
})
// XXX: Workaround
//Decription of issue is here https://stackoverflow.com/a/65353489
Expand Down Expand Up @@ -285,22 +285,22 @@ fn impl_get_recursive(
.iter()
.zip(inner)
.zip(lvalue.iter())
.map(|((ident, inner), lvalue)| {
let inner = if inner {
.map(|((ident, inner_thing), l_value)| {
let inner_thing2 = if inner_thing {
quote! {
[stringify!(#ident), rest @ ..] => {
#lvalue.get_recursive(rest)?
#l_value.get_recursive(rest)?
},
}
} else {
quote! {}
};
quote! {
[stringify!(#ident)] => {
serde_json::to_value(&#lvalue)
serde_json::to_value(&#l_value)
.map_err(|e| iroha_config::derive::Error::field_error(stringify!(#ident), e))?
}
#inner
#inner_thing2
}
})
// XXX: Workaround
Expand Down Expand Up @@ -385,12 +385,12 @@ fn impl_configurable(ast: &DeriveInput) -> TokenStream {
.iter()
.zip(field_environment.iter())
.zip(field_ty.iter())
.map(|((attrs, env), field_ty)| {
.map(|((attrs, env), field_type)| {
let real_doc = attrs
.iter()
.filter_map(|attr| attr.parse_meta().ok())
.find_map(|meta| {
if let Meta::NameValue(meta) = meta {
.find_map(|metadata| {
if let Meta::NameValue(meta) = metadata {
if meta.path.is_ident("doc") {
if let Lit::Str(s) = meta.lit {
return Some(s);
Expand All @@ -403,7 +403,7 @@ fn impl_configurable(ast: &DeriveInput) -> TokenStream {
let docs = format!(
"{}Has type `{}`. Can be configured via environment variable `{}`",
real_doc,
quote! { #field_ty }.to_string().replace(' ', ""),
quote! { #field_type }.to_string().replace(' ', ""),
env
);
LitStr::new(&docs, Span::mixed_site())
Expand Down
26 changes: 12 additions & 14 deletions core/src/block_sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ pub struct BlockSynchronizer<S: SumeragiTrait, W: WorldTrait> {
state: State,
gossip_period: Duration,
batch_size: u32,
n_topology_shifts_before_reshuffle: u64,
broker: Broker,
mailbox: usize,
}
Expand All @@ -56,7 +55,6 @@ pub trait BlockSynchronizerTrait: Actor + Handler<ContinueSync> + Handler<Messag
wsv: Arc<WorldStateView<Self::World>>,
sumeragi: AlwaysAddr<Self::Sumeragi>,
peer_id: PeerId,
n_topology_shifts_before_reshuffle: u64,
broker: Broker,
) -> Self;
}
Expand All @@ -70,7 +68,6 @@ impl<S: SumeragiTrait, W: WorldTrait> BlockSynchronizerTrait for BlockSynchroniz
wsv: Arc<WorldStateView<W>>,
sumeragi: AlwaysAddr<S>,
peer_id: PeerId,
n_topology_shifts_before_reshuffle: u64,
broker: Broker,
) -> Self {
Self {
Expand All @@ -80,7 +77,6 @@ impl<S: SumeragiTrait, W: WorldTrait> BlockSynchronizerTrait for BlockSynchroniz
state: State::Idle,
gossip_period: Duration::from_millis(config.gossip_period_ms),
batch_size: config.batch_size,
n_topology_shifts_before_reshuffle,
broker,
mailbox: config.mailbox,
}
Expand Down Expand Up @@ -161,8 +157,8 @@ impl<S: SumeragiTrait + Debug, W: WorldTrait> BlockSynchronizer<S, W> {

info!(blocks_left = blocks.len(), "Synchronizing blocks");

let (block, blocks) = if let Some((block, blocks)) = blocks.split_first() {
(block, blocks)
let (this_block, remaining_blocks) = if let Some((blck, blcks)) = blocks.split_first() {
(blck, blcks)
} else {
self.state = State::Idle;
self.request_latest_blocks_from_peer(peer_id).await;
Expand All @@ -171,34 +167,36 @@ impl<S: SumeragiTrait + Debug, W: WorldTrait> BlockSynchronizer<S, W> {

let mut network_topology = self
.sumeragi
.send(GetNetworkTopology(block.header().clone()))
.send(GetNetworkTopology(this_block.header().clone()))
.await;
// If it is genesis topology we cannot apply view changes as peers have custom order!
#[allow(clippy::expect_used)]
if !block.header().is_genesis() {
if !this_block.header().is_genesis() {
network_topology = network_topology
.into_builder()
.with_view_changes(block.header().view_change_proofs.clone())
.with_view_changes(this_block.header().view_change_proofs.clone())
.build()
.expect(
"Unreachable as doing view changes on valid topology will not raise an error.",
);
}
if self.wsv.as_ref().latest_block_hash() == block.header().previous_block_hash
if self.wsv.as_ref().latest_block_hash() == this_block.header().previous_block_hash
&& network_topology
.filter_signatures_by_roles(
&[Role::ValidatingPeer, Role::Leader, Role::ProxyTail],
block.verified_signatures().map(SignatureOf::transmute_ref),
this_block
.verified_signatures()
.map(SignatureOf::transmute_ref),
)
.len()
>= network_topology.min_votes_for_commit() as usize
{
self.state = State::InProgress(blocks.to_vec(), peer_id);
self.state = State::InProgress(remaining_blocks.to_vec(), peer_id);
self.sumeragi
.do_send(CommitBlock(block.clone().into()))
.do_send(CommitBlock(this_block.clone().into()))
.await;
} else {
warn!(block_hash = %block.hash(), "Failed to commit a block received via synchronization request - validation failed");
warn!(block_hash = %this_block.hash(), "Failed to commit a block received via synchronization request - validation failed");
self.state = State::Idle;
}
}
Expand Down
Loading

0 comments on commit 557991f

Please sign in to comment.