diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs index e85786dfe0f1..bdfb340f5191 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs @@ -325,13 +325,13 @@ pub(super) fn validate_unsupported_field_type(field: ScalarFieldWalker<'_>, ctx: let source = if let Some(s) = ctx.datasource { s } else { return }; static TYPE_REGEX: Lazy = Lazy::new(|| { - Regex::new(r#"(?x) + Regex::new(r"(?x) ^ # beginning of the string (?P[^(]+) # a required prefix that is any character until the first opening brace (?:\((?P.*?)\))? # (optional) an opening parenthesis, a closing parenthesis and captured params in-between (?P.+)? # (optional) captured suffix after the params until the end of the string $ # end of the string - "#).unwrap() + ").unwrap() }); let connector = source.active_connector; diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs index 671d1bda43d1..71a07dacae5c 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs @@ -307,17 +307,17 @@ mod tests { use super::is_leftwise_included_it; #[test] fn test_is_left_wise_included() { - let item = vec![1, 2]; - let group = vec![1, 2, 3, 4]; + let item = [1, 2]; + let group = [1, 2, 3, 4]; assert!(is_leftwise_included_it(item.iter(), group.iter())); - let item = vec![1, 2, 3, 4]; - let group = vec![1, 2, 3, 4]; + let item = [1, 2, 3, 4]; + let group = [1, 2, 3, 4]; assert!(is_leftwise_included_it(item.iter(), group.iter())); - let item = vec![1, 2, 3, 4]; - let group = vec![1, 2]; + let item = [1, 2, 3, 4]; + let group = [1, 2]; assert!(!is_leftwise_included_it(item.iter(), group.iter())); - let item = vec![2, 3]; - let group = vec![1, 2, 3, 4]; + let item = [2, 3]; + let group = [1, 2, 3, 4]; assert!(!is_leftwise_included_it(item.iter(), group.iter())); } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs index c2a2ed1c185b..eb4571914c04 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs @@ -194,7 +194,7 @@ pub(crate) fn validate_no_referential_actions( field .explicit_on_delete_span() .into_iter() - .chain(field.explicit_on_update_span().into_iter()) + .chain(field.explicit_on_update_span()) }); for span in referential_action_spans { diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs index 8e3a15782c98..d2fa7609876d 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs @@ -49,7 +49,7 @@ pub(crate) fn validate_no_referential_actions(relation: ImplicitManyToManyRelati field .explicit_on_delete_span() .into_iter() - .chain(field.explicit_on_update_span().into_iter()) + .chain(field.explicit_on_update_span()) }); for span in referential_action_spans { diff --git a/psl/psl/tests/parsing/literals.rs b/psl/psl/tests/parsing/literals.rs index 2636a4bf40a2..2a20be397c9e 100644 --- a/psl/psl/tests/parsing/literals.rs +++ b/psl/psl/tests/parsing/literals.rs @@ -378,7 +378,7 @@ fn sql_server_absolute_windows_ca_file_should_not_be_modified() { .unwrap(); assert_eq!( - r#"sqlserver://localhost:1433;trustServerCertificateCA=C:{\\}path{\}customCA.crt"#, + r"sqlserver://localhost:1433;trustServerCertificateCA=C:{\\}path{\}customCA.crt", url ) } diff --git a/psl/schema-ast/src/parser/parse_expression.rs b/psl/schema-ast/src/parser/parse_expression.rs index 1dbac0467166..c5a9b68b17fc 100644 --- a/psl/schema-ast/src/parser/parse_expression.rs +++ b/psl/schema-ast/src/parser/parse_expression.rs @@ -113,8 +113,8 @@ fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Strin final_span.start += start; final_span.end = final_span.start + 1 + c.len_utf8(); diagnostics.push_error(DatamodelError::new_static( - r#"Unknown escape sequence. If the value is a windows-style path, `\` must be escaped as `\\`."#, - final_span + r"Unknown escape sequence. If the value is a windows-style path, `\` must be escaped as `\\`.", + final_span, )); } }, @@ -162,7 +162,7 @@ fn try_parse_unicode_codepoint( } (consumed_second_codepoint, Some(second_codepoint)) => { // UTF-16 surrogate with - let char = match char::decode_utf16([first_codepoint, second_codepoint].into_iter()).next() { + let char = match char::decode_utf16([first_codepoint, second_codepoint]).next() { Some(Ok(c)) => Some(c), _ => { diagnostics.push_error(unicode_sequence_error( diff --git a/quaint/src/ast/compare.rs b/quaint/src/ast/compare.rs index 9f0a697e3e75..d92843a23557 100644 --- a/quaint/src/ast/compare.rs +++ b/quaint/src/ast/compare.rs @@ -126,7 +126,7 @@ impl<'a> Compare<'a> { let base_select = super::Select::from_table(ident).column(selected_columns.remove(0)); // We know we have the same amount of columns on both sides, - let column_pairs = cols.into_iter().zip(selected_columns.into_iter()); + let column_pairs = cols.into_iter().zip(selected_columns); // Adding to the new select a condition to filter out the rest of // the tuple, so if our tuple is `(a, b) IN (SELECT x, y ..)`, this diff --git a/quaint/src/ast/merge.rs b/quaint/src/ast/merge.rs index 62a046837e54..b2b5a2e88e72 100644 --- a/quaint/src/ast/merge.rs +++ b/quaint/src/ast/merge.rs @@ -106,7 +106,7 @@ impl<'a> TryFrom> for Merge<'a> { let query = match insert.values.kind { ExpressionKind::Row(row) => { - let cols_vals = columns.iter().zip(row.values.into_iter()); + let cols_vals = columns.iter().zip(row.values); let select = cols_vals.fold(Select::default(), |query, (col, val)| { query.value(val.alias(col.name.clone())) @@ -117,14 +117,14 @@ impl<'a> TryFrom> for Merge<'a> { ExpressionKind::Values(values) => { let mut rows = values.rows; let row = rows.pop().unwrap(); - let cols_vals = columns.iter().zip(row.values.into_iter()); + let cols_vals = columns.iter().zip(row.values); let select = cols_vals.fold(Select::default(), |query, (col, val)| { query.value(val.alias(col.name.clone())) }); let union = rows.into_iter().fold(Union::new(select), |union, row| { - let cols_vals = columns.iter().zip(row.values.into_iter()); + let cols_vals = columns.iter().zip(row.values); let select = cols_vals.fold(Select::default(), |query, (col, val)| { query.value(val.alias(col.name.clone())) diff --git a/quaint/src/ast/update.rs b/quaint/src/ast/update.rs index 43b97dbbb401..5d35929eac60 100644 --- a/quaint/src/ast/update.rs +++ b/quaint/src/ast/update.rs @@ -150,10 +150,7 @@ impl<'a> Update<'a> { /// # } /// ``` #[cfg(any(feature = "postgresql", feature = "sqlite"))] - #[cfg_attr( - feature = "docs", - doc(cfg(any(feature = "postgresql", feature = "sqlite"))) - )] + #[cfg_attr(feature = "docs", doc(cfg(any(feature = "postgresql", feature = "sqlite"))))] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index a2b5e440464f..9fc67e9d662f 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -2991,7 +2991,7 @@ async fn generate_binary_uuid(api: &mut dyn TestApi) -> crate::Result<()> { let val = res.into_single()?; // If it is a byte type and has a value, it's a generated UUID. - assert!(matches!(val, Value::Bytes(x) if matches!(x, Some(_)))); + assert!(matches!(val, Value::Bytes(x) if x.is_some())); Ok(()) } @@ -3004,7 +3004,7 @@ async fn generate_swapped_binary_uuid(api: &mut dyn TestApi) -> crate::Result<() let val = res.into_single()?; // If it is a byte type and has a value, it's a generated UUID. - assert!(matches!(val, Value::Bytes(x) if matches!(x, Some(_)))); + assert!(matches!(val, Value::Bytes(x) if x.is_some())); Ok(()) } @@ -3017,7 +3017,7 @@ async fn generate_native_uuid(api: &mut dyn TestApi) -> crate::Result<()> { let val = res.into_single()?; // If it is a text type and has a value, it's a generated string UUID. - assert!(matches!(val, Value::Text(x) if matches!(x, Some(_)))); + assert!(matches!(val, Value::Text(x) if x.is_some())); Ok(()) } diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index f1666b3881b0..39aca6de2d52 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -175,7 +175,7 @@ async fn test_get_int64_from_int32_field_fails(api: &mut dyn TestApi) -> crate:: let select = Select::from_table(&table).column("value").order_by("id".descend()); let res = api.conn().select(select).await; - assert!(matches!(res, Err(_))); + assert!(res.is_err()); Ok(()) } diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 1050c6aa1ae2..9f4d9bcb5bcd 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -315,7 +315,7 @@ pub trait Visitor<'a> { { self.write(" SET ")?; - let pairs = update.columns.into_iter().zip(update.values.into_iter()); + let pairs = update.columns.into_iter().zip(update.values); let len = pairs.len(); for (i, (key, value)) in pairs.enumerate() { @@ -365,7 +365,7 @@ pub trait Visitor<'a> { } fn visit_update_set(&mut self, update: Update<'a>) -> Result { - let pairs = update.columns.into_iter().zip(update.values.into_iter()); + let pairs = update.columns.into_iter().zip(update.values); let len = pairs.len(); for (i, (key, value)) in pairs.enumerate() { diff --git a/query-engine/black-box-tests/tests/black_box_tests.rs b/query-engine/black-box-tests/tests/black_box_tests.rs index 41aa88b2ecd0..6c2028e1fe0f 100644 --- a/query-engine/black-box-tests/tests/black_box_tests.rs +++ b/query-engine/black-box-tests/tests/black_box_tests.rs @@ -1,3 +1,5 @@ +#![allow(clippy::module_inception, clippy::needless_raw_string_hashes)] + mod helpers; mod metrics; diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index d39a9fb20d7f..e3826d9cafe2 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -26,7 +26,7 @@ mod smoke_tests { let res = client .post("http://0.0.0.0:57582/") .body( - r###" + r#" { "action": "findMany", "modelName": "Person", @@ -38,7 +38,7 @@ mod smoke_tests { } } } - "###, + "#, ) .send() .await diff --git a/query-engine/black-box-tests/tests/protocols/mismatched.rs b/query-engine/black-box-tests/tests/protocols/mismatched.rs index a9b8c29ade00..be98922edd12 100644 --- a/query-engine/black-box-tests/tests/protocols/mismatched.rs +++ b/query-engine/black-box-tests/tests/protocols/mismatched.rs @@ -1,7 +1,7 @@ use crate::helpers::*; use query_engine_tests::*; -const JSON_QUERY: &str = r###" +const JSON_QUERY: &str = r#" { "action": "findMany", "modelName": "Person", @@ -13,15 +13,15 @@ const JSON_QUERY: &str = r###" } } } -"###; +"#; -const GRAPHQL_QUERY: &str = r###" +const GRAPHQL_QUERY: &str = r#" { "operationName": null, "variables": {}, "query": "{\n findManyPerson {\n id\n }\n}\n" } -"###; +"#; #[test_suite(schema(schema))] mod mismatched { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 1a6523986f4c..e45cef8ac306 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -357,7 +357,7 @@ mod interactive_tx { // Mongo for example doesn't read the inner commit value. is_one_of!( run_query!(&runner, r#"query { findManyTestModel { id }}"#), - vec![ + [ r#"{"data":{"findManyTestModel":[{"id":1}]}}"#, r#"{"data":{"findManyTestModel":[{"id":1},{"id":2}]}}"# ] diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs index cb264d681c23..a30808902c1d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs @@ -208,7 +208,7 @@ mod json_filters { // MySQL has slightly different semantics and also coerces null to [null]. is_one_of!( run_query!(runner, jsonq(&runner, r#"array_contains: "null""#, None)), - vec![ + [ r#"{"data":{"findManyTestModel":[{"id":7}]}}"#, r#"{"data":{"findManyTestModel":[{"id":7},{"id":8}]}}"# ] @@ -216,7 +216,7 @@ mod json_filters { is_one_of!( run_query!(runner, jsonq(&runner, r#"array_contains: "[null]""#, None)), - vec![ + [ r#"{"data":{"findManyTestModel":[{"id":7}]}}"#, r#"{"data":{"findManyTestModel":[{"id":7},{"id":8}]}}"# ] diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs index 11ab7e2350ab..4177f4d3a07d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs @@ -104,9 +104,9 @@ mod views { &runner, r#"{ findManyTestView(orderBy: { children: { _count: asc } }) { id _count { children } } }"# ), - vec![ + [ r#"{"data":{"findManyTestView":[{"id":2,"_count":{"children":0}},{"id":3,"_count":{"children":0}},{"id":1,"_count":{"children":2}}]}}"#, - r#"{"data":{"findManyTestView":[{"id":3,"_count":{"children":0}},{"id":2,"_count":{"children":0}},{"id":1,"_count":{"children":2}}]}}"#, + r#"{"data":{"findManyTestView":[{"id":3,"_count":{"children":0}},{"id":2,"_count":{"children":0}},{"id":1,"_count":{"children":2}}]}}"# ] ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/query_engine_tests.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/query_engine_tests.rs index ec9aea568d69..e160424b9382 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/query_engine_tests.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/query_engine_tests.rs @@ -1,4 +1,8 @@ -#![allow(clippy::module_inception, clippy::too_many_arguments)] +#![allow( + clippy::module_inception, + clippy::too_many_arguments, + clippy::needless_raw_string_hashes +)] mod new; mod queries; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs index 41f023bd6516..fd0068761a55 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs @@ -131,7 +131,7 @@ mod update_many { is_one_of!( query_number_operation(&runner, "optInt", "increment", "10").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":12},{"optInt":13}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":10},{"optInt":12},{"optInt":13}]}}"# ] @@ -140,7 +140,7 @@ mod update_many { // optInts before this op are now: null/10, 12, 13 is_one_of!( query_number_operation(&runner, "optInt", "decrement", "10").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":2},{"optInt":3}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":0},{"optInt":2},{"optInt":3}]}}"# ] @@ -149,7 +149,7 @@ mod update_many { // optInts before this op are now: null/0, 2, 3 is_one_of!( query_number_operation(&runner, "optInt", "multiply", "2").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":4},{"optInt":6}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":0},{"optInt":4},{"optInt":6}]}}"# ] @@ -160,7 +160,7 @@ mod update_many { // optInts before this op are now: null/0, 4, 6 is_one_of!( query_number_operation(&runner, "optInt", "divide", "3").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":1},{"optInt":2}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":0},{"optInt":1},{"optInt":2}]}}"# ] @@ -169,7 +169,7 @@ mod update_many { is_one_of!( query_number_operation(&runner, "optInt", "set", "5").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":5},{"optInt":5},{"optInt":5}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":5},{"optInt":5},{"optInt":5}]}}"# ] @@ -177,7 +177,7 @@ mod update_many { is_one_of!( query_number_operation(&runner, "optInt", "set", "null").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":null},{"optInt":null}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":null},{"optInt":null}]}}"# ] @@ -196,7 +196,7 @@ mod update_many { is_one_of!( query_number_operation(&runner, "optInt", "increment", "10").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":12},{"optInt":13}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":10},{"optInt":12},{"optInt":13}]}}"# ] @@ -205,7 +205,7 @@ mod update_many { // optInts before this op are now: null/10, 12, 13 is_one_of!( query_number_operation(&runner, "optInt", "decrement", "10").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":2},{"optInt":3}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":0},{"optInt":2},{"optInt":3}]}}"# ] @@ -214,7 +214,7 @@ mod update_many { // optInts before this op are now: null/0, 2, 3 is_one_of!( query_number_operation(&runner, "optInt", "multiply", "2").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":4},{"optInt":6}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":0},{"optInt":4},{"optInt":6}]}}"# ] @@ -222,7 +222,7 @@ mod update_many { is_one_of!( query_number_operation(&runner, "optInt", "set", "5").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":5},{"optInt":5},{"optInt":5}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":5},{"optInt":5},{"optInt":5}]}}"# ] @@ -230,7 +230,7 @@ mod update_many { is_one_of!( query_number_operation(&runner, "optInt", "set", "null").await?, - vec![ + [ r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":null},{"optInt":null}]}}"#, r#"{"data":{"findManyTestModel":[{"optInt":null},{"optInt":null},{"optInt":null}]}}"# ] diff --git a/query-engine/connectors/mongodb-query-connector/src/value.rs b/query-engine/connectors/mongodb-query-connector/src/value.rs index 1df1caa53be2..cf984ad76830 100644 --- a/query-engine/connectors/mongodb-query-connector/src/value.rs +++ b/query-engine/connectors/mongodb-query-connector/src/value.rs @@ -139,7 +139,7 @@ impl IntoBson for (&MongoDbType, PrismaValue) { } let mut bytes: [u8; 12] = [0x0; 12]; - bytes.iter_mut().set_from(b.into_iter()); + bytes.iter_mut().set_from(b); Bson::ObjectId(ObjectId::from_bytes(bytes)) } diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/connectors/sql-query-connector/src/join_utils.rs index 51d180fa3b7f..dbec0e430951 100644 --- a/query-engine/connectors/sql-query-connector/src/join_utils.rs +++ b/query-engine/connectors/sql-query-connector/src/join_utils.rs @@ -98,7 +98,7 @@ fn compute_aggr_join_one2m( // + GROUP BY Child. let query = right_fields.iter().fold(query, |acc, f| acc.group_by(f.as_column(ctx))); - let pairs = left_fields.into_iter().zip(right_fields.into_iter()); + let pairs = left_fields.into_iter().zip(right_fields); let on_conditions: Vec = pairs .map(|(a, b)| { let col_a = match previous_join { @@ -238,7 +238,7 @@ pub(crate) fn compute_one2m_join( let right_table_alias = format!("{}_{}", join_prefix, rf.related_model().name()); let related_model = rf.related_model(); - let pairs = left_fields.into_iter().zip(right_fields.into_iter()); + let pairs = left_fields.into_iter().zip(right_fields); let on_conditions: Vec = pairs .map(|(a, b)| { diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index 012a355b4980..58a814692271 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -715,7 +715,7 @@ impl QueryDocumentParser { }) .collect::>>()?; - map.extend(defaults.into_iter()); + map.extend(defaults); // Ensure the constraints are upheld. If any `fields` are specified, then the constraints should be upheld against those only. // If no `fields` are specified, then the constraints should be upheld against all fields of the object. diff --git a/query-engine/core/src/query_document/selection.rs b/query-engine/core/src/query_document/selection.rs index d348575640f7..206fc95c8315 100644 --- a/query-engine/core/src/query_document/selection.rs +++ b/query-engine/core/src/query_document/selection.rs @@ -196,18 +196,15 @@ impl<'a> From> for ArgumentValue { fn from(other: In<'a>) -> Self { match other.selection_set { SelectionSet::Multi(key_sets, val_sets) => { - let key_vals = key_sets.into_iter().zip(val_sets.into_iter()); + let key_vals = key_sets.into_iter().zip(val_sets); let conjuctive = key_vals.fold(Conjuctive::new(), |acc, (keys, vals)| { - let ands = keys - .into_iter() - .zip(vals.into_iter()) - .fold(Conjuctive::new(), |acc, (key, val)| { - let mut argument = IndexMap::new(); - argument.insert(key.into_owned(), val); - - acc.and(argument) - }); + let ands = keys.into_iter().zip(vals).fold(Conjuctive::new(), |acc, (key, val)| { + let mut argument = IndexMap::new(); + argument.insert(key.into_owned(), val); + + acc.and(argument) + }); acc.or(ands) }); diff --git a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs index 4ebf7d8e7079..f4c890aa7d00 100644 --- a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs +++ b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs @@ -15,51 +15,47 @@ pub fn extract_query_args( arguments: Vec>, model: &Model, ) -> QueryGraphBuilderResult { - let query_args = arguments.into_iter().fold( - Ok(QueryArguments::new(model.clone())), - |result: QueryGraphBuilderResult, arg| { - if let Ok(res) = result { - match arg.name.as_str() { - args::CURSOR => Ok(QueryArguments { - cursor: extract_cursor(arg.value, model)?, - ..res - }), - - args::TAKE => Ok(QueryArguments { - take: arg.value.try_into()?, - ..res - }), - - args::SKIP => Ok(QueryArguments { - skip: extract_skip(arg.value)?, - ..res - }), - - args::ORDER_BY => Ok(QueryArguments { - order_by: extract_order_by(&model.into(), arg.value)?, - ..res - }), - - args::DISTINCT => Ok(QueryArguments { - distinct: Some(extract_distinct(arg.value)?), - ..res - }), - - args::WHERE => { - let val: Option> = arg.value.try_into()?; - match val { - Some(m) => { - let filter = Some(extract_filter(m, model)?); - Ok(QueryArguments { filter, ..res }) - } - None => Ok(res), + let query_args = arguments.into_iter().try_fold( + QueryArguments::new(model.clone()), + |result, arg| -> QueryGraphBuilderResult { + match arg.name.as_str() { + args::CURSOR => Ok(QueryArguments { + cursor: extract_cursor(arg.value, model)?, + ..result + }), + + args::TAKE => Ok(QueryArguments { + take: arg.value.try_into()?, + ..result + }), + + args::SKIP => Ok(QueryArguments { + skip: extract_skip(arg.value)?, + ..result + }), + + args::ORDER_BY => Ok(QueryArguments { + order_by: extract_order_by(&model.into(), arg.value)?, + ..result + }), + + args::DISTINCT => Ok(QueryArguments { + distinct: Some(extract_distinct(arg.value)?), + ..result + }), + + args::WHERE => { + let val: Option> = arg.value.try_into()?; + match val { + Some(m) => { + let filter = Some(extract_filter(m, model)?); + Ok(QueryArguments { filter, ..result }) } + None => Ok(result), } - - _ => Ok(res), } - } else { - result + + _ => Ok(result), } }, )?; diff --git a/query-engine/dmmf/src/tests/mod.rs b/query-engine/dmmf/src/tests/mod.rs index 8af01092834d..4d25aba3bab7 100644 --- a/query-engine/dmmf/src/tests/mod.rs +++ b/query-engine/dmmf/src/tests/mod.rs @@ -1,2 +1,4 @@ +#![allow(clippy::module_inception)] + mod setup; mod tests; diff --git a/query-engine/js-connectors/src/proxy.rs b/query-engine/js-connectors/src/proxy.rs index 8d3346851bfe..05dd1e00bd0f 100644 --- a/query-engine/js-connectors/src/proxy.rs +++ b/query-engine/js-connectors/src/proxy.rs @@ -591,7 +591,7 @@ mod proxy_test { let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type); - let date = NaiveDate::from_ymd_opt(2023, 01, 01).unwrap(); + let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); assert_eq!(quaint_value, QuaintValue::Date(Some(date))); } @@ -621,7 +621,7 @@ mod proxy_test { let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type); - let datetime = NaiveDate::from_ymd_opt(2023, 01, 01) + let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() .and_hms_opt(23, 59, 59) .unwrap(); diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 195e5412084f..a70bcfcf2033 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -559,7 +559,7 @@ mod tests { global_labels.insert("global_one".to_string(), "one".to_string()); let prometheus = metrics.to_prometheus(global_labels); - let snapshot = expect_test::expect![[r##" + let snapshot = expect_test::expect![[r#" # HELP counter_1 # TYPE counter_1 counter counter_1{global_one="one",global_two="two",label="one"} 4 @@ -608,7 +608,7 @@ mod tests { histogram_2_sum{global_one="one",global_two="two"} 1000 histogram_2_count{global_one="one",global_two="two"} 1 - "##]]; + "#]]; snapshot.assert_eq(&prometheus); } diff --git a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs index 65de2353d24a..208705268c1e 100644 --- a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs +++ b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs @@ -414,6 +414,7 @@ impl<'a> JsonProtocolAdapter<'a> { } #[cfg(test)] +#[allow(clippy::needless_raw_string_hashes)] mod tests { use super::*; use insta::assert_debug_snapshot; diff --git a/query-engine/schema/src/build/input_types/objects/filter_objects.rs b/query-engine/schema/src/build/input_types/objects/filter_objects.rs index 37337de3d8cf..b8af982182a1 100644 --- a/query-engine/schema/src/build/input_types/objects/filter_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/filter_objects.rs @@ -179,7 +179,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In ); fields.extend(compound_id_field); - fields.extend(boolean_operators.into_iter()); + fields.extend(boolean_operators); fields.extend( rest_fields .into_iter() diff --git a/query-engine/schema/src/build/utils.rs b/query-engine/schema/src/build/utils.rs index 3828429a6393..4eeafcb23c82 100644 --- a/query-engine/schema/src/build/utils.rs +++ b/query-engine/schema/src/build/utils.rs @@ -81,7 +81,7 @@ pub(crate) fn input_field<'a>( /// Appends an option of type T to a vector over T if the option is Some. pub(crate) fn append_opt(vec: &mut Vec, opt: Option) { - vec.extend(opt.into_iter()) + vec.extend(opt) } /// Computes a compound field name based on an index. diff --git a/schema-engine/cli/tests/cli_tests.rs b/schema-engine/cli/tests/cli_tests.rs index a0aee7001e03..18866f9b1c0a 100644 --- a/schema-engine/cli/tests/cli_tests.rs +++ b/schema-engine/cli/tests/cli_tests.rs @@ -417,7 +417,7 @@ fn introspect_sqlite_empty_database() { let mut response = String::new(); stdout.read_line(&mut response).unwrap(); - assert!(response.starts_with(r##"{"jsonrpc":"2.0","error":{"code":4466,"message":"An error happened. Check the data field for details.","data":{"is_panic":false,"message":"The introspected database was empty.","meta":null,"error_code":"P4001"}},"id":1}"##)); + assert!(response.starts_with(r#"{"jsonrpc":"2.0","error":{"code":4466,"message":"An error happened. Check the data field for details.","data":{"is_panic":false,"message":"The introspected database was empty.","meta":null,"error_code":"P4001"}},"id":1}"#)); }) } @@ -672,6 +672,6 @@ fn introspect_e2e() { dbg!("response: {:?}", &response); - assert!(response.starts_with(r##"{"jsonrpc":"2.0","result":{"datamodel":"datasource db {\n provider = \"sqlite\"\n url = env(\"TEST_DATABASE_URL\")\n}\n","warnings":[]},"##)); + assert!(response.starts_with(r#"{"jsonrpc":"2.0","result":{"datamodel":"datasource db {\n provider = \"sqlite\"\n url = env(\"TEST_DATABASE_URL\")\n}\n","warnings":[]},"#)); }); } diff --git a/schema-engine/connectors/schema-connector/src/namespaces.rs b/schema-engine/connectors/schema-connector/src/namespaces.rs index 0f18c2ecdb2d..d35f82df0f0d 100644 --- a/schema-engine/connectors/schema-connector/src/namespaces.rs +++ b/schema-engine/connectors/schema-connector/src/namespaces.rs @@ -34,6 +34,6 @@ impl IntoIterator for Namespaces { type IntoIter = std::iter::Chain, as IntoIterator>::IntoIter>; fn into_iter(self) -> Self::IntoIter { - std::iter::once(self.0).chain(self.1.into_iter()) + std::iter::once(self.0).chain(self.1) } } diff --git a/schema-engine/connectors/sql-schema-connector/src/flavour/mysql.rs b/schema-engine/connectors/sql-schema-connector/src/flavour/mysql.rs index 2c84f208cab5..1efb4611bb50 100644 --- a/schema-engine/connectors/sql-schema-connector/src/flavour/mysql.rs +++ b/schema-engine/connectors/sql-schema-connector/src/flavour/mysql.rs @@ -17,7 +17,7 @@ use std::future; use url::Url; const ADVISORY_LOCK_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); -static QUALIFIED_NAME_RE: Lazy = Lazy::new(|| Regex::new(r#"`[^ ]+`\.`[^ ]+`"#).unwrap()); +static QUALIFIED_NAME_RE: Lazy = Lazy::new(|| Regex::new(r"`[^ ]+`\.`[^ ]+`").unwrap()); type State = super::State, Connection)>; diff --git a/schema-engine/connectors/sql-schema-connector/src/migration_pair.rs b/schema-engine/connectors/sql-schema-connector/src/migration_pair.rs index 93a872dccb84..fcd3c7021ed6 100644 --- a/schema-engine/connectors/sql-schema-connector/src/migration_pair.rs +++ b/schema-engine/connectors/sql-schema-connector/src/migration_pair.rs @@ -29,7 +29,7 @@ impl MigrationPair { { f(&self.previous) .into_iter() - .zip(f(&self.next).into_iter()) + .zip(f(&self.next)) .map(MigrationPair::from) } diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_renderer/postgres_renderer.rs b/schema-engine/connectors/sql-schema-connector/src/sql_renderer/postgres_renderer.rs index f466594de0ac..fdebc14f89b2 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_renderer/postgres_renderer.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_renderer/postgres_renderer.rs @@ -310,7 +310,7 @@ impl SqlRenderer for PostgresFlavour { if self.is_cockroachdb() { let mut out = Vec::with_capacity(before_statements.len() + after_statements.len() + lines.len()); - out.extend(before_statements.into_iter()); + out.extend(before_statements); for line in lines { out.push(format!( "ALTER TABLE {} {}", @@ -318,7 +318,7 @@ impl SqlRenderer for PostgresFlavour { line )) } - out.extend(after_statements.into_iter()); + out.extend(after_statements); out } else { let alter_table = format!( @@ -330,7 +330,7 @@ impl SqlRenderer for PostgresFlavour { before_statements .into_iter() .chain(std::iter::once(alter_table)) - .chain(after_statements.into_iter()) + .chain(after_statements) .collect() } } diff --git a/schema-engine/datamodel-renderer/src/value/constant.rs b/schema-engine/datamodel-renderer/src/value/constant.rs index be306a7bfbfc..a3169a721620 100644 --- a/schema-engine/datamodel-renderer/src/value/constant.rs +++ b/schema-engine/datamodel-renderer/src/value/constant.rs @@ -6,7 +6,7 @@ pub struct Constant(pub(crate) T); impl<'a> Clone for Constant<&'a str> { fn clone(&self) -> Self { - Constant(self.0) + *self } } diff --git a/schema-engine/sql-introspection-tests/tests/cockroachdb/constraints.rs b/schema-engine/sql-introspection-tests/tests/cockroachdb/constraints.rs index 848078a4c786..264cbe77cf27 100644 --- a/schema-engine/sql-introspection-tests/tests/cockroachdb/constraints.rs +++ b/schema-engine/sql-introspection-tests/tests/cockroachdb/constraints.rs @@ -57,7 +57,7 @@ async fn aragon_test_cockroachdb(api: &mut TestApi) -> TestResult { #[test_connector(tags(CockroachDb))] async fn noalyss_folder_test_cockroachdb(api: &mut TestApi) -> TestResult { - let raw_sql = indoc! {r#" + let raw_sql = indoc! {r" CREATE TABLE user_active_security ( id BIGSERIAL NOT NULL, us_login STRING NOT NULL, @@ -83,7 +83,7 @@ async fn noalyss_folder_test_cockroachdb(api: &mut TestApi) -> TestResult { is_public CHAR(1) NOT NULL DEFAULT 'N', CONSTRAINT ck_is_public CHECK (is_public = ANY ARRAY['Y':::STRING::CHAR, 'N':::STRING::CHAR]:::CHAR[]) ); - "#}; + "}; api.raw_cmd(raw_sql).await; diff --git a/schema-engine/sql-introspection-tests/tests/postgres/constraints.rs b/schema-engine/sql-introspection-tests/tests/postgres/constraints.rs index b745d5502979..43796ad6c8a8 100644 --- a/schema-engine/sql-introspection-tests/tests/postgres/constraints.rs +++ b/schema-engine/sql-introspection-tests/tests/postgres/constraints.rs @@ -57,7 +57,7 @@ async fn aragon_test_postgres(api: &mut TestApi) -> TestResult { #[test_connector(tags(Postgres), exclude(CockroachDb))] async fn noalyss_folder_test_postgres(api: &mut TestApi) -> TestResult { - let raw_sql = indoc! {r#" + let raw_sql = indoc! {r" CREATE TABLE user_active_security ( id BIGSERIAL NOT NULL, us_login TEXT NOT NULL, @@ -81,7 +81,7 @@ async fn noalyss_folder_test_postgres(api: &mut TestApi) -> TestResult { tl_id BIGSERIAL PRIMARY KEY, is_public CHAR(1) NOT NULL DEFAULT 'N' CHECK (is_public IN ('Y', 'N')) ); - "#}; + "}; api.raw_cmd(raw_sql).await; diff --git a/schema-engine/sql-introspection-tests/tests/tables/mod.rs b/schema-engine/sql-introspection-tests/tests/tables/mod.rs index 154aa213ce38..4af098c1c9f3 100644 --- a/schema-engine/sql-introspection-tests/tests/tables/mod.rs +++ b/schema-engine/sql-introspection-tests/tests/tables/mod.rs @@ -12,14 +12,14 @@ use sql_introspection_tests::test_api::*; #[test_connector(tags(Mysql57))] async fn nul_default_bytes(api: &mut TestApi) -> TestResult { - let create_table = indoc! {r#" + let create_table = indoc! {r" CREATE TABLE nul_default_bytes ( id INT NOT NULL PRIMARY KEY, val BINARY(5) DEFAULT '\0\0\0\0\0' NOT NULL ) - "#}; + "}; api.database().raw_cmd(create_table).await?; @@ -185,12 +185,12 @@ async fn a_table_with_unique_index(api: &mut TestApi) -> TestResult { }) .await?; - let dm = indoc! {r##" + let dm = indoc! {r#" model Blog { id Int @id @default(autoincrement()) authorId Int @unique(map: "test") } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); @@ -212,14 +212,14 @@ async fn a_table_with_multi_column_unique_index(api: &mut TestApi) -> TestResult }) .await?; - let dm = indoc! {r##" + let dm = indoc! {r#" model User { id Int @id @default(autoincrement()) firstname Int lastname Int @@unique([firstname, lastname], map: "test") } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); @@ -314,13 +314,13 @@ async fn a_table_with_a_non_unique_index(api: &mut TestApi) -> TestResult { }) .await?; - let dm = indoc! {r##" + let dm = indoc! {r#" model User { a Int id Int @id @default(autoincrement()) @@index([a], map: "test") } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); @@ -343,14 +343,14 @@ async fn a_table_with_a_multi_column_non_unique_index(api: &mut TestApi) -> Test }) .await?; - let dm = indoc! { r##" + let dm = indoc! { r#" model User { a Int b Int id Int @id @default(autoincrement()) @@index([a,b], map: "test") } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); @@ -640,7 +640,7 @@ async fn different_default_values_should_work(api: &mut TestApi) -> TestResult { }) .await?; - let dm = indoc! {r##" + let dm = indoc! {r#" model Blog { id Int @id @default(autoincrement()) text String? @default("one") @db.Text @@ -650,7 +650,7 @@ async fn different_default_values_should_work(api: &mut TestApi) -> TestResult { tinytext_float String @default(dbgenerated("(1.0)")) @db.TinyText tinytext_short String @default(dbgenerated("(1)")) @db.TinyText } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); @@ -782,11 +782,11 @@ async fn unique_and_index_on_same_field_works_mysql(api: &mut TestApi) -> TestRe api.raw_cmd(setup).await; - let dm = indoc! {r##" + let dm = indoc! {r#" model users { id BigInt @id @unique(map: "id") @default(autoincrement()) @db.UnsignedBigInt } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); @@ -805,11 +805,11 @@ async fn unique_and_index_on_same_field_works_mariadb(api: &mut TestApi) -> Test api.raw_cmd(setup).await; - let dm = indoc! {r##" + let dm = indoc! {r#" model users { id Int @id @unique(map: "really_must_be_different") } - "##}; + "#}; let result = api.introspect().await?; api.assert_eq_datamodels(dm, &result); diff --git a/schema-engine/sql-migration-tests/tests/migrations/sql.rs b/schema-engine/sql-migration-tests/tests/migrations/sql.rs index aa3500b724d8..36486e7c18a6 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/sql.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/sql.rs @@ -155,7 +155,7 @@ fn reserved_sql_keywords_must_work(api: TestApi) { #[test_connector(capabilities(Enums))] fn enum_value_with_database_names_must_work(api: TestApi) { - let dm = r##" + let dm = r#" model Cat { id String @id mood CatMood @@ -165,7 +165,7 @@ fn enum_value_with_database_names_must_work(api: TestApi) { ANGRY HUNGRY @map("hongry") } - "##; + "#; api.schema_push_w_datasource(dm) .migration_id(Some("initial")) @@ -182,7 +182,7 @@ fn enum_value_with_database_names_must_work(api: TestApi) { .assert_enum("CatMood", |enm| enm.assert_values(&["ANGRY", "hongry"])); } - let dm = r##" + let dm = r#" model Cat { id String @id mood CatMood @@ -192,7 +192,7 @@ fn enum_value_with_database_names_must_work(api: TestApi) { ANGRY HUNGRY @map("hongery") } - "##; + "#; if api.is_mysql() { api.schema_push_w_datasource(dm).force(true).send().assert_warnings(&["The values [hongry] on the enum `Cat_mood` will be removed. If these variants are still used in the database, this will fail.".into()]); @@ -210,7 +210,7 @@ fn enum_value_with_database_names_must_work(api: TestApi) { #[test_connector(capabilities(Enums))] fn enum_defaults_must_work(api: TestApi) { - let dm = r##" + let dm = r#" model Cat { id String @id mood CatMood @default(HUNGRY) @@ -221,7 +221,7 @@ fn enum_defaults_must_work(api: TestApi) { ANGRY HUNGRY @map("hongry") } - "##; + "#; api.schema_push_w_datasource(dm) .migration_id(Some("initial")) @@ -318,7 +318,7 @@ fn multi_field_id_as_part_of_relation_must_work(api: TestApi) { #[test_connector(exclude(Vitess))] fn remapped_multi_field_id_as_part_of_relation_must_work(api: TestApi) { - let dm = r##" + let dm = r#" model Cat { nemesis_name String @map("dogname") nemesis_weight Int @map("dogweight") @@ -334,7 +334,7 @@ fn remapped_multi_field_id_as_part_of_relation_must_work(api: TestApi) { @@id([name, weight]) } - "##; + "#; api.schema_push_w_datasource(dm).send().assert_green(); diff --git a/schema-engine/sql-migration-tests/tests/native_types/mssql.rs b/schema-engine/sql-migration-tests/tests/native_types/mssql.rs index e6c722566253..32ac24688601 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/mssql.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/mssql.rs @@ -198,7 +198,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "Date", - Value::date(Utc::today().naive_utc()), + Value::date(Utc::now().naive_utc().date()), &[ "DateTime", "DateTime2", @@ -728,7 +728,7 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { "NVarChar", ], ), - ("Date", Value::date(Utc::today().naive_utc()), &["SmallDateTime"]), + ("Date", Value::date(Utc::now().naive_utc().date()), &["SmallDateTime"]), ( "Time", Value::time(Utc::now().naive_utc().time()), @@ -1537,7 +1537,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "Date", - Value::date(Utc::today().naive_utc()), + Value::date(Utc::now().naive_utc().date()), &[ "TinyInt", "SmallInt", diff --git a/schema-engine/sql-migration-tests/tests/native_types/postgres.rs b/schema-engine/sql-migration-tests/tests/native_types/postgres.rs index d7edc13933e0..1e114f147e5d 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/postgres.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/postgres.rs @@ -107,7 +107,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "Date", - Value::date(Utc::today().naive_utc()), + Value::date(Utc::now().naive_utc().date()), &["VarChar(53)", "Char(28)", "Text", "Timestamp(3)", "Timestamptz(3)"], ), ( @@ -498,7 +498,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "Date", - Value::date(Utc::today().naive_utc()), + Value::date(Utc::now().naive_utc().date()), &[ "SmallInt", "Integer", @@ -1083,7 +1083,7 @@ static SAFE_CASTS_NON_LIST_TO_STRING: CastList = Lazy::new(|| { ("ByteA", Value::array(vec![Value::bytes(b"DEAD".to_vec())])), ("Timestamp(3)", Value::array(vec![Value::datetime(Utc::now())])), ("Timestamptz(3)", Value::array(vec![Value::datetime(Utc::now())])), - ("Date", Value::array(vec![Value::date(Utc::today().naive_utc())])), + ("Date", Value::array(vec![Value::date(Utc::now().naive_utc().date())])), ( "Time(3)", Value::array(vec![Value::time(Utc::now().naive_utc().time())]), @@ -1122,7 +1122,7 @@ static SAFE_CASTS_NON_LIST_TO_STRING: CastList = Lazy::new(|| { ("ByteA", Value::array(vec![Value::bytes(b"DEAD".to_vec())])), ("Timestamp(3)", Value::array(vec![Value::datetime(Utc::now())])), ("Timestamptz(3)", Value::array(vec![Value::datetime(Utc::now())])), - ("Date", Value::array(vec![Value::date(Utc::today().naive_utc())])), + ("Date", Value::array(vec![Value::date(Utc::now().naive_utc().date())])), ( "Time(3)", Value::array(vec![Value::time(Utc::now().naive_utc().time())]), diff --git a/schema-engine/sql-schema-describer/src/mysql.rs b/schema-engine/sql-schema-describer/src/mysql.rs index 68e2f64e07a5..0d5fd98140dd 100644 --- a/schema-engine/sql-schema-describer/src/mysql.rs +++ b/schema-engine/sql-schema-describer/src/mysql.rs @@ -768,7 +768,7 @@ impl<'a> SqlSchemaDescriber<'a> { } fn extract_precision(input: &str) -> Option { - static RE: Lazy = Lazy::new(|| Regex::new(r#".*\(([1-9])\)"#).unwrap()); + static RE: Lazy = Lazy::new(|| Regex::new(r".*\(([1-9])\)").unwrap()); RE.captures(input) .and_then(|cap| cap.get(1).map(|precision| precision.as_str().parse::().unwrap())) } @@ -778,8 +778,8 @@ impl<'a> SqlSchemaDescriber<'a> { // In addition, MariaDB will return string literals with the quotes and extra backslashes around // control characters like `\n`. fn unescape_and_unquote_default_string(default: String, flavour: &Flavour) -> String { - static MYSQL_ESCAPING_RE: Lazy = Lazy::new(|| Regex::new(r#"\\('|\\[^\\])|'(')"#).unwrap()); - static MARIADB_NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r#"\\n"#).unwrap()); + static MYSQL_ESCAPING_RE: Lazy = Lazy::new(|| Regex::new(r"\\('|\\[^\\])|'(')").unwrap()); + static MARIADB_NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\\n").unwrap()); static MARIADB_DEFAULT_QUOTE_UNESCAPE_RE: Lazy = Lazy::new(|| Regex::new(r#"'(.*)'"#).unwrap()); let maybe_unquoted: Cow<'_, str> = if matches!(flavour, Flavour::MariaDb) { @@ -799,7 +799,7 @@ impl<'a> SqlSchemaDescriber<'a> { /// Tests whether an introspected default value should be categorized as current_timestamp. fn default_is_current_timestamp(default_str: &str) -> bool { static MYSQL_CURRENT_TIMESTAMP_RE: Lazy = - Lazy::new(|| Regex::new(r#"(?i)^current_timestamp(\([0-9]*\))?$"#).unwrap()); + Lazy::new(|| Regex::new(r"(?i)^current_timestamp(\([0-9]*\))?$").unwrap()); MYSQL_CURRENT_TIMESTAMP_RE.is_match(default_str) } diff --git a/schema-engine/sql-schema-describer/src/postgres.rs b/schema-engine/sql-schema-describer/src/postgres.rs index 77dc04ffe51a..10a6bd76cb53 100644 --- a/schema-engine/sql-schema-describer/src/postgres.rs +++ b/schema-engine/sql-schema-describer/src/postgres.rs @@ -1004,7 +1004,7 @@ impl<'a> SqlSchemaDescriber<'a> { let (character_maximum_length, numeric_precision, numeric_scale, time_precision) = if matches!(col.get_expect_string("data_type").as_str(), "ARRAY") { fn get_single(formatted_type: &str) -> Option { - static SINGLE_REGEX: Lazy = Lazy::new(|| Regex::new(r#".*\(([0-9]*)\).*\[\]$"#).unwrap()); + static SINGLE_REGEX: Lazy = Lazy::new(|| Regex::new(r".*\(([0-9]*)\).*\[\]$").unwrap()); SINGLE_REGEX .captures(formatted_type) @@ -1014,7 +1014,7 @@ impl<'a> SqlSchemaDescriber<'a> { fn get_dual(formatted_type: &str) -> (Option, Option) { static DUAL_REGEX: Lazy = - Lazy::new(|| Regex::new(r#"numeric\(([0-9]*),([0-9]*)\)\[\]$"#).unwrap()); + Lazy::new(|| Regex::new(r"numeric\(([0-9]*),([0-9]*)\)\[\]$").unwrap()); let first = DUAL_REGEX .captures(formatted_type) .and_then(|cap| cap.get(1).and_then(|precision| precision.as_str().parse().ok())); diff --git a/schema-engine/sql-schema-describer/tests/describers/mysql_describer_tests.rs b/schema-engine/sql-schema-describer/tests/describers/mysql_describer_tests.rs index c28628e4012a..f0ce2833988e 100644 --- a/schema-engine/sql-schema-describer/tests/describers/mysql_describer_tests.rs +++ b/schema-engine/sql-schema-describer/tests/describers/mysql_describer_tests.rs @@ -3018,11 +3018,11 @@ fn escaped_quotes_in_string_defaults_must_be_unescaped(api: TestApi) { #[test_connector(tags(Mysql))] fn escaped_backslashes_in_string_literals_must_be_unescaped(api: TestApi) { - let create_table = r#" + let create_table = r" CREATE TABLE test ( `model_name_space` VARCHAR(255) NOT NULL DEFAULT 'xyz\\Datasource\\Model' ) - "#; + "; api.raw_cmd(create_table); diff --git a/schema-engine/sql-schema-describer/tests/describers/postgres_describer_tests.rs b/schema-engine/sql-schema-describer/tests/describers/postgres_describer_tests.rs index e6c9c2b41c42..0ce7cee65812 100644 --- a/schema-engine/sql-schema-describer/tests/describers/postgres_describer_tests.rs +++ b/schema-engine/sql-schema-describer/tests/describers/postgres_describer_tests.rs @@ -1242,7 +1242,7 @@ fn postgres_sequences_must_work(api: TestApi) { #[test_connector(tags(Postgres), exclude(CockroachDb))] fn postgres_multi_field_indexes_must_be_inferred_in_the_right_order(api: TestApi) { - let schema = r##" + let schema = r#" CREATE TABLE "indexes_test" ( id TEXT PRIMARY KEY, name TEXT NOT NULL, @@ -1251,7 +1251,7 @@ fn postgres_multi_field_indexes_must_be_inferred_in_the_right_order(api: TestApi CREATE UNIQUE INDEX "my_idx" ON "indexes_test" (name, age); CREATE INDEX "my_idx2" ON "indexes_test" (age, name); - "##; + "#; api.raw_cmd(schema); let schema = api.describe(); diff --git a/schema-engine/sql-schema-describer/tests/test_api/mod.rs b/schema-engine/sql-schema-describer/tests/test_api/mod.rs index 1c10bfc23609..17a94589ddbd 100644 --- a/schema-engine/sql-schema-describer/tests/test_api/mod.rs +++ b/schema-engine/sql-schema-describer/tests/test_api/mod.rs @@ -179,9 +179,9 @@ impl SqlSchemaAssertionsExt for SqlSchema { } fn assert_not_namespace(&self, namespace_name: &str) -> &Self { - self.walk_namespaces() - .find(|ns| ns.name() == namespace_name) - .map::<(), _>(|_x| panic!("Found unexpected namespace '{namespace_name}'")); + if self.walk_namespaces().any(|ns| ns.name() == namespace_name) { + panic!("Found unexpected namespace '{namespace_name}'") + } self } }