Skip to content

Commit

Permalink
Try fmt and clippy again
Browse files Browse the repository at this point in the history
  • Loading branch information
adzialocha committed May 29, 2024
1 parent 44aa814 commit c5b81d2
Show file tree
Hide file tree
Showing 11 changed files with 26 additions and 27 deletions.
3 changes: 1 addition & 2 deletions aquadoggo/src/api/config_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,8 +281,7 @@ impl TryFrom<ConfigFile> for Configuration {
.get_or_init(|| {
// Initialise a `TempDir` instance globally to make sure it does not run out of
// scope and gets deleted before the end of the application runtime
tempfile::TempDir::new()
.expect("Could not create temporary directory to store blobs")
TempDir::new().expect("Could not create temporary directory to store blobs")
})
.path()
.to_path_buf(),
Expand Down
4 changes: 2 additions & 2 deletions aquadoggo/src/db/query/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub fn parse_str(key: &str, value: &[OperationValue]) -> Result<(String, FilterB
}

// Unwrap since we know at least one element exists
let element = value.get(0).unwrap();
let element = value.first().unwrap();

if key.ends_with("_gt") {
Ok((
Expand All @@ -41,7 +41,7 @@ pub fn parse_str(key: &str, value: &[OperationValue]) -> Result<(String, FilterB
false,
))
} else if key.ends_with("_gte") {
let element = value.get(0).context("Needs at least one value")?;
let element = value.first().context("Needs at least one value")?;

Ok((
clean_key(key, "_gte"),
Expand Down
10 changes: 5 additions & 5 deletions aquadoggo/src/db/stores/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -814,7 +814,7 @@ mod tests {
test_runner(|node: TestNode| async move {
// Populate the store with some entries and operations but DON'T materialise any resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.get(0).expect("At least one document");
let document = documents.first().expect("At least one document");

// Get the operations and build the document.
let operations = node
Expand Down Expand Up @@ -959,7 +959,7 @@ mod tests {
// Populate the store with some entries and operations but DON'T materialise any
// resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.get(0).expect("At least one document");
let document = documents.first().expect("At least one document");

// The document is successfully inserted into the database, this relies on the
// operations already being present and would fail if they were not.
Expand Down Expand Up @@ -1017,7 +1017,7 @@ mod tests {
// Populate the store with some entries and operations but DON'T materialise any
// resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.get(0).expect("At least one document");
let document = documents.first().expect("At least one document");

// Get the view id.
let view_id = document.view_id();
Expand Down Expand Up @@ -1060,7 +1060,7 @@ mod tests {
test_runner(|node: TestNode| async move {
// Populate the store with some entries and operations but DON'T materialise any resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.get(0).expect("At least one document");
let document = documents.first().expect("At least one document");

// Insert the document, this is possible even though it has been deleted.
let result = node.context.store.insert_document(document).await;
Expand All @@ -1086,7 +1086,7 @@ mod tests {
test_runner(|node: TestNode| async move {
// Populate the store with some entries and operations but DON'T materialise any resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.get(0).expect("At least one document");
let document = documents.first().expect("At least one document");

// Get the operations for this document and sort them into linear order.
let operations = node
Expand Down
8 changes: 4 additions & 4 deletions aquadoggo/src/db/stores/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ mod tests {
let _ = populate_store(&node.context.store, &config).await;

// The key pair of the author who published to the note.
let key_pair = config.authors.get(0).expect("At least one key pair");
let key_pair = config.authors.first().expect("At least one key pair");

// We get back the first entry.
let first_entry = node
Expand Down Expand Up @@ -403,7 +403,7 @@ mod tests {
// The public key of the author who published to the node.
let public_key_in_db = config
.authors
.get(0)
.first()
.expect("At least one key pair")
.public_key();

Expand Down Expand Up @@ -453,7 +453,7 @@ mod tests {
// The public key of the author who published to the node.
let public_key = config
.authors
.get(0)
.first()
.expect("At least one key pair")
.public_key();

Expand Down Expand Up @@ -529,7 +529,7 @@ mod tests {
// The public key of the author who published to the node.
let public_key = config
.authors
.get(0)
.first()
.expect("At least one key pair")
.public_key();

Expand Down
4 changes: 2 additions & 2 deletions aquadoggo/src/db/stores/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ fn group_and_parse_operation_rows(
// If we've moved on to the next operation, then push the complete vec of operation
// rows to the grouped rows collection and then setup for the next iteration.
grouped_operation_rows.push(current_operation_rows.clone());
current_operation_id = row.operation_id.clone();
current_operation_id.clone_from(&row.operation_id);
current_operation_rows = vec![row];
}
}
Expand Down Expand Up @@ -588,7 +588,7 @@ mod tests {
test_runner(|mut node: TestNode| async move {
// Populate the store with some entries and operations and materialize documents.
let documents = populate_and_materialize(&mut node, &config).await;
let document_id = documents.get(0).expect("At least one document id").id();
let document_id = documents.first().expect("At least one document id").id();

let operations_by_document_id = node
.context
Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/db/types/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl AsDocument for StorageDocument {

/// Update the current view of this document.
fn update_view(&mut self, id: &DocumentViewId, view: Option<&DocumentViewFields>) {
self.view_id = id.to_owned();
id.clone_into(&mut self.view_id);
self.fields = view.cloned();

// If no view has been passed we can consider this document as deleted
Expand Down
12 changes: 6 additions & 6 deletions aquadoggo/src/graphql/scalars/entry_hash_scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,12 @@ impl Display for EntryHashScalar {
}
}

/// Validation method used internally in `async-graphql` to check scalar values passed into the
/// public api.
fn validate(value: &Value) -> bool {
EntryHashScalar::from_value(value.to_owned()).is_ok()
}

#[cfg(test)]
mod tests {
use p2panda_rs::document::DocumentViewId;
Expand All @@ -69,9 +75,3 @@ mod tests {
}
}
}

/// Validation method used internally in `async-graphql` to check scalar values passed into the
/// public api.
fn validate(value: &Value) -> bool {
EntryHashScalar::from_value(value.to_owned()).is_ok()
}
2 changes: 1 addition & 1 deletion aquadoggo/src/materializer/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ mod tests {

// Send a message over the bus which kicks in materialization
tx.send(crate::bus::ServiceMessage::NewOperation(
p2panda_rs::entry::traits::AsEncodedEntry::hash(&entry_encoded).into(),
AsEncodedEntry::hash(&entry_encoded).into(),
))
.unwrap();

Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/materializer/tasks/reduce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ mod tests {

// We didn't reduce this document_view so it shouldn't exist in the db.
let document_view_id: DocumentViewId =
sorted_document_operations.get(0).unwrap().clone().0.into();
sorted_document_operations.first().unwrap().clone().0.into();

let document = node
.context
Expand Down
4 changes: 2 additions & 2 deletions aquadoggo/src/replication/strategies/log_height.rs
Original file line number Diff line number Diff line change
Expand Up @@ -392,10 +392,10 @@ mod tests {
let schema = config.schema.clone();

// Collect the values for the two authors and documents.
let key_pair_a = config.authors.get(0).unwrap();
let key_pair_a = config.authors.first().unwrap();
let key_pair_b = config.authors.get(1).unwrap();

let document_a = documents.get(0).unwrap().id();
let document_a = documents.first().unwrap().id();
let document_b = documents.get(1).unwrap().id();

// Compose the list of logs the a remote might need.
Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ async fn e2e() {
// lightweight clients communicating with nodes who persist, replicate, materialise and
// serve the data.

let client = reqwest::Client::builder()
let client = Client::builder()
.redirect(reqwest::redirect::Policy::none())
.build()
.unwrap();
Expand Down

0 comments on commit c5b81d2

Please sign in to comment.