Skip to content

Commit

Permalink
Revert "Make clippy happy"
Browse files Browse the repository at this point in the history
This reverts commit e250ccd.
  • Loading branch information
adzialocha committed May 28, 2024
1 parent e250ccd commit 44aa814
Show file tree
Hide file tree
Showing 11 changed files with 29 additions and 26 deletions.
2 changes: 1 addition & 1 deletion aquadoggo/src/api/config_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ impl TryFrom<ConfigFile> for Configuration {
.get_or_init(|| {
// Initialise a `TempDir` instance globally to make sure it does not run out of
// scope and gets deleted before the end of the application runtime
TempDir::new()
tempfile::TempDir::new()
.expect("Could not create temporary directory to store blobs")
})
.path()
Expand Down
4 changes: 2 additions & 2 deletions aquadoggo/src/db/query/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub fn parse_str(key: &str, value: &[OperationValue]) -> Result<(String, FilterB
}

// Unwrap since we know at least one element exists
let element = value.first().unwrap();
let element = value.get(0).unwrap();

if key.ends_with("_gt") {
Ok((
Expand All @@ -41,7 +41,7 @@ pub fn parse_str(key: &str, value: &[OperationValue]) -> Result<(String, FilterB
false,
))
} else if key.ends_with("_gte") {
let element = value.first().context("Needs at least one value")?;
let element = value.get(0).context("Needs at least one value")?;

Ok((
clean_key(key, "_gte"),
Expand Down
10 changes: 5 additions & 5 deletions aquadoggo/src/db/stores/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -814,7 +814,7 @@ mod tests {
test_runner(|node: TestNode| async move {
// Populate the store with some entries and operations but DON'T materialise any resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.first().expect("At least one document");
let document = documents.get(0).expect("At least one document");

// Get the operations and build the document.
let operations = node
Expand Down Expand Up @@ -959,7 +959,7 @@ mod tests {
// Populate the store with some entries and operations but DON'T materialise any
// resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.first().expect("At least one document");
let document = documents.get(0).expect("At least one document");

// The document is successfully inserted into the database, this relies on the
// operations already being present and would fail if they were not.
Expand Down Expand Up @@ -1017,7 +1017,7 @@ mod tests {
// Populate the store with some entries and operations but DON'T materialise any
// resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.first().expect("At least one document");
let document = documents.get(0).expect("At least one document");

// Get the view id.
let view_id = document.view_id();
Expand Down Expand Up @@ -1060,7 +1060,7 @@ mod tests {
test_runner(|node: TestNode| async move {
// Populate the store with some entries and operations but DON'T materialise any resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.first().expect("At least one document");
let document = documents.get(0).expect("At least one document");

// Insert the document, this is possible even though it has been deleted.
let result = node.context.store.insert_document(document).await;
Expand All @@ -1086,7 +1086,7 @@ mod tests {
test_runner(|node: TestNode| async move {
// Populate the store with some entries and operations but DON'T materialise any resulting documents.
let documents = populate_store(&node.context.store, &config).await;
let document = documents.first().expect("At least one document");
let document = documents.get(0).expect("At least one document");

// Get the operations for this document and sort them into linear order.
let operations = node
Expand Down
11 changes: 7 additions & 4 deletions aquadoggo/src/db/stores/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ mod tests {
let _ = populate_store(&node.context.store, &config).await;

// The key pair of the author who published to the note.
let key_pair = config.authors.first().expect("At least one key pair");
let key_pair = config.authors.get(0).expect("At least one key pair");

// We get back the first entry.
let first_entry = node
Expand Down Expand Up @@ -402,7 +402,8 @@ mod tests {

// The public key of the author who published to the node.
let public_key_in_db = config
.authors.first()
.authors
.get(0)
.expect("At least one key pair")
.public_key();

Expand Down Expand Up @@ -451,7 +452,8 @@ mod tests {
let _ = populate_store(&node.context.store, &config).await;
// The public key of the author who published to the node.
let public_key = config
.authors.first()
.authors
.get(0)
.expect("At least one key pair")
.public_key();

Expand Down Expand Up @@ -526,7 +528,8 @@ mod tests {

// The public key of the author who published to the node.
let public_key = config
.authors.first()
.authors
.get(0)
.expect("At least one key pair")
.public_key();

Expand Down
4 changes: 2 additions & 2 deletions aquadoggo/src/db/stores/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ fn group_and_parse_operation_rows(
// If we've moved on to the next operation, then push the complete vec of operation
// rows to the grouped rows collection and then setup for the next iteration.
grouped_operation_rows.push(current_operation_rows.clone());
current_operation_id.clone_from(&row.operation_id);
current_operation_id = row.operation_id.clone();
current_operation_rows = vec![row];
}
}
Expand Down Expand Up @@ -588,7 +588,7 @@ mod tests {
test_runner(|mut node: TestNode| async move {
// Populate the store with some entries and operations and materialize documents.
let documents = populate_and_materialize(&mut node, &config).await;
let document_id = documents.first().expect("At least one document id").id();
let document_id = documents.get(0).expect("At least one document id").id();

let operations_by_document_id = node
.context
Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/db/types/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl AsDocument for StorageDocument {

/// Update the current view of this document.
fn update_view(&mut self, id: &DocumentViewId, view: Option<&DocumentViewFields>) {
id.clone_into(&mut self.view_id);
self.view_id = id.to_owned();
self.fields = view.cloned();

// If no view has been passed we can consider this document as deleted
Expand Down
12 changes: 6 additions & 6 deletions aquadoggo/src/graphql/scalars/entry_hash_scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,6 @@ impl Display for EntryHashScalar {
}
}

/// Validation method used internally in `async-graphql` to check scalar values passed into the
/// public api.
fn validate(value: &Value) -> bool {
EntryHashScalar::from_value(value.to_owned()).is_ok()
}

#[cfg(test)]
mod tests {
use p2panda_rs::document::DocumentViewId;
Expand All @@ -75,3 +69,9 @@ mod tests {
}
}
}

/// Validation method used internally in `async-graphql` to check scalar values passed into the
/// public api.
fn validate(value: &Value) -> bool {
EntryHashScalar::from_value(value.to_owned()).is_ok()
}
2 changes: 1 addition & 1 deletion aquadoggo/src/materializer/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ mod tests {

// Send a message over the bus which kicks in materialization
tx.send(crate::bus::ServiceMessage::NewOperation(
AsEncodedEntry::hash(&entry_encoded).into(),
p2panda_rs::entry::traits::AsEncodedEntry::hash(&entry_encoded).into(),
))
.unwrap();

Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/materializer/tasks/reduce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ mod tests {

// We didn't reduce this document_view so it shouldn't exist in the db.
let document_view_id: DocumentViewId =
sorted_document_operations.first().unwrap().clone().0.into();
sorted_document_operations.get(0).unwrap().clone().0.into();

let document = node
.context
Expand Down
4 changes: 2 additions & 2 deletions aquadoggo/src/replication/strategies/log_height.rs
Original file line number Diff line number Diff line change
Expand Up @@ -392,10 +392,10 @@ mod tests {
let schema = config.schema.clone();

// Collect the values for the two authors and documents.
let key_pair_a = config.authors.first().unwrap();
let key_pair_a = config.authors.get(0).unwrap();
let key_pair_b = config.authors.get(1).unwrap();

let document_a = documents.first().unwrap().id();
let document_a = documents.get(0).unwrap().id();
let document_b = documents.get(1).unwrap().id();

// Compose the list of logs the a remote might need.
Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ async fn e2e() {
// lightweight clients communicating with nodes who persist, replicate, materialise and
// serve the data.

let client = Client::builder()
let client = reqwest::Client::builder()
.redirect(reqwest::redirect::Policy::none())
.build()
.unwrap();
Expand Down

0 comments on commit 44aa814

Please sign in to comment.