diff --git a/.cargo/config b/.cargo/config index b9bbf9123..d928bf34f 100644 --- a/.cargo/config +++ b/.cargo/config @@ -3,3 +3,82 @@ gen-syntax = "run --package tools --bin tools -- gen-syntax" gen-runtime-capi = "run --package tools --bin tools -- gen-runtime-capi" gen-abi = "run --package tools --bin tools -- gen-abi" + +[target.'cfg(all())'] +rustflags = [ + "-Wclippy::all", + "-Wclippy::await_holding_lock", + "-Wclippy::bool-to-int-with-if", + "-Wclippy::cast_lossless", + "-Wclippy::char_lit_as_u8", + "-Wclippy::checked_conversions", + "-Wclippy::debug_assert_with_mut_call", + "-Wclippy::default_trait_access", + "-Wclippy::doc_markdown", + "-Wclippy::empty_enum", + "-Wclippy::enum_glob_use", + "-Wclippy::expl_impl_clone_on_copy", + "-Wclippy::explicit_deref_methods", + "-Wclippy::explicit_into_iter_loop", + "-Wclippy::fallible_impl_from", + "-Wclippy::filter_map_next", + "-Wclippy::flat_map_option", + "-Wclippy::float_cmp_const", + "-Wclippy::fn_params_excessive_bools", + "-Wclippy::from_iter_instead_of_collect", + "-Wclippy::if-not-else", + "-Wclippy::implicit_clone", + "-Wclippy::imprecise_flops", + "-Wclippy::inconsistent_struct_constructor", + "-Wclippy::inefficient_to_string", + "-Wclippy::invalid_upcast_comparisons", + "-Wclippy::items-after-statements", + "-Wclippy::large_digit_groups", + "-Wclippy::large_stack_arrays", + "-Wclippy::large_types_passed_by_value", + "-Wclippy::let_unit_value", + "-Wclippy::linkedlist", + "-Wclippy::lossy_float_literal", + "-Wclippy::macro_use_imports", + "-Wclippy::manual-assert", + "-Wclippy::manual_ok_or", + "-Wclippy::map_err_ignore", + "-Wclippy::map_flatten", + "-Wclippy::map_unwrap_or", + "-Wclippy::match_on_vec_items", + "-Wclippy::match_same_arms", + "-Wclippy::match_wild_err_arm", + "-Wclippy::match_wildcard_for_single_variants", + "-Wclippy::mem_forget", + "-Wclippy::missing_enforced_import_renames", + "-Wclippy::mut_mut", + "-Wclippy::mutex_integer", + "-Wclippy::needless_borrow", + "-Wclippy::needless_continue", + "-Wclippy::needless_for_each", + "-Wclippy::option_option", + "-Wclippy::path_buf_push_overwrite", + "-Wclippy::ptr_as_ptr", + "-Wclippy::rc_mutex", + "-Wclippy::redundant_closure_for_method_calls", + "-Wclippy::ref_option_ref", + "-Wclippy::rest_pat_in_fully_bound_structs", + "-Wclippy::same_functions_in_if_condition", + "-Wclippy::semicolon_if_nothing_returned", + "-Wclippy::single_match_else", + "-Wclippy::string_add_assign", + "-Wclippy::string_lit_as_bytes", + "-Wclippy::string_to_string", + "-Wclippy::todo", + "-Wclippy::trait_duplication_in_bounds", + "-Wclippy::uninlined_format_args", + "-Wclippy::unnested_or_patterns", + "-Wclippy::unused_self", + "-Wclippy::useless_transmute", + "-Wclippy::verbose_file_reads", + "-Wclippy::wildcard-imports", + "-Wclippy::zero_sized_map_values", + "-Wfuture_incompatible", + "-Wnonstandard_style", + "-Wrust_2018_idioms", +] diff --git a/cpp/include/mun/runtime_capi.h b/cpp/include/mun/runtime_capi.h index 3c201349a..873cf059e 100644 --- a/cpp/include/mun/runtime_capi.h +++ b/cpp/include/mun/runtime_capi.h @@ -100,7 +100,7 @@ typedef MunRawGcPtr MunGcPtr; /** * Definition of an external function that is callable from Mun. * - * The ownership of the contained TypeInfoHandles is considered to lie with this struct. + * The ownership of the contained `TypeInfoHandles` is considered to lie with this struct. */ typedef struct MunExternalFunctionDefinition { /** @@ -604,7 +604,7 @@ void mun_string_destroy(const char *string); * * # Safety * - * Only call this function on an ErrorHandle once. + * Only call this function on an [`ErrorHandle`] once. */ void mun_error_destroy(struct MunErrorHandle error); diff --git a/crates/mun/src/ops/build.rs b/crates/mun/src/ops/build.rs index e23f5a2b3..b678000d8 100644 --- a/crates/mun/src/ops/build.rs +++ b/crates/mun/src/ops/build.rs @@ -90,7 +90,7 @@ pub fn build(args: Args) -> Result { ) })? } - Some(path) => std::fs::canonicalize(Path::new(&path)).map_err(|_| { + Some(path) => std::fs::canonicalize(Path::new(&path)).map_err(|_error| { anyhow::anyhow!( "'{}' does not refer to a valid manifest path", path.display() diff --git a/crates/mun/src/ops/init.rs b/crates/mun/src/ops/init.rs index 50146905d..73c0a25cc 100644 --- a/crates/mun/src/ops/init.rs +++ b/crates/mun/src/ops/init.rs @@ -65,11 +65,12 @@ version="0.1.0" /// Shortcut function for creating new directories. pub fn create_dir(path: impl AsRef) -> anyhow::Result<()> { fs::create_dir(&path) - .map_err(|_| anyhow!("failed to create directory `{}`", path.as_ref().display())) + .map_err(|_error| anyhow!("failed to create directory `{}`", path.as_ref().display())) } /// Shortcut function for creating new files. pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> anyhow::Result<()> { let path = path.as_ref(); - fs::write(path, contents.as_ref()).map_err(|_| anyhow!("failed to write `{}`", path.display())) + fs::write(path, contents.as_ref()) + .map_err(|_error| anyhow!("failed to write `{}`", path.display())) } diff --git a/crates/mun/src/ops/start.rs b/crates/mun/src/ops/start.rs index a785e946e..fb4bf237e 100644 --- a/crates/mun/src/ops/start.rs +++ b/crates/mun/src/ops/start.rs @@ -36,19 +36,19 @@ pub fn start(args: Args) -> anyhow::Result { .invoke(&args.entry, ()) .map_err(|e| anyhow!("{}", e))?; - println!("{result}") + println!("{result}"); } else if return_type.equals::() { let result: f64 = runtime .invoke(&args.entry, ()) .map_err(|e| anyhow!("{}", e))?; - println!("{result}") + println!("{result}"); } else if return_type.equals::() { let result: i64 = runtime .invoke(&args.entry, ()) .map_err(|e| anyhow!("{}", e))?; - println!("{result}") + println!("{result}"); } else if return_type.equals::<()>() { #[allow(clippy::unit_arg)] runtime diff --git a/crates/mun/tests/book.rs b/crates/mun/tests/book.rs index ff46c9c01..24faa6a78 100644 --- a/crates/mun/tests/book.rs +++ b/crates/mun/tests/book.rs @@ -1 +1,4 @@ +#![allow(unused_attributes)] + +#[allow(clippy::all)] include!(concat!(env!("OUT_DIR"), "/skeptic-tests.rs")); diff --git a/crates/mun_abi/src/assembly_info.rs b/crates/mun_abi/src/assembly_info.rs index d8d0902e4..9fd62c963 100644 --- a/crates/mun_abi/src/assembly_info.rs +++ b/crates/mun_abi/src/assembly_info.rs @@ -76,7 +76,7 @@ mod tests { assert_eq!(assembly.dependencies().count(), dependencies.len()); for (lhs, rhs) in assembly.dependencies().zip([FAKE_DEPENDENCY].iter()) { - assert_eq!(lhs, *rhs) + assert_eq!(lhs, *rhs); } } } diff --git a/crates/mun_abi/src/dispatch_table.rs b/crates/mun_abi/src/dispatch_table.rs index 8f3786358..4851f57fd 100644 --- a/crates/mun_abi/src/dispatch_table.rs +++ b/crates/mun_abi/src/dispatch_table.rs @@ -67,7 +67,7 @@ impl<'a> DispatchTable<'a> { /// /// This is generally not recommended, use with caution! Calling this method with an /// out-of-bounds index is _undefined behavior_ even if the resulting reference is not used. - /// For a safe alternative see [get_ptr](#method.get_ptr). + /// For a safe alternative see [`get_ptr`](#method.get_ptr). /// /// # Safety /// @@ -89,7 +89,7 @@ impl<'a> DispatchTable<'a> { /// /// This is generally not recommended, use with caution! Calling this method with an /// out-of-bounds index is _undefined behavior_ even if the resulting reference is not used. - /// For a safe alternative see [get_ptr_mut](#method.get_ptr_mut). + /// For a safe alternative see [`get_ptr_mut`](#method.get_ptr_mut). /// /// # Safety /// diff --git a/crates/mun_abi/src/lib.rs b/crates/mun_abi/src/lib.rs index a494aa700..eb4b97b76 100644 --- a/crates/mun_abi/src/lib.rs +++ b/crates/mun_abi/src/lib.rs @@ -62,13 +62,6 @@ impl Guid { impl fmt::Display for Guid { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let hyphenated = format_hyphenated(&self.0); - - // SAFETY: The encoded buffer is ASCII encoded - let hyphenated = unsafe { std::str::from_utf8_unchecked(&hyphenated) }; - - return f.write_str(hyphenated); - #[inline] const fn format_hyphenated(src: &[u8; 16]) -> [u8; 36] { const LUT: [u8; 16] = [ @@ -99,6 +92,13 @@ impl fmt::Display for Guid { } dst } + + let hyphenated = format_hyphenated(&self.0); + + // SAFETY: The encoded buffer is ASCII encoded + let hyphenated = unsafe { std::str::from_utf8_unchecked(&hyphenated) }; + + f.write_str(hyphenated) } } diff --git a/crates/mun_abi/src/module_info.rs b/crates/mun_abi/src/module_info.rs index ed1e3562e..f3e3b6837 100644 --- a/crates/mun_abi/src/module_info.rs +++ b/crates/mun_abi/src/module_info.rs @@ -81,6 +81,7 @@ mod tests { use std::{ffi::CString, ptr}; use crate::type_id::HasStaticTypeId; + use crate::StructMemoryKind; use crate::{ test_utils::{ fake_fn_prototype, fake_module_info, fake_struct_definition, fake_type_definition, @@ -121,7 +122,8 @@ mod tests { let functions = &[fn_info]; let struct_name = CString::new(FAKE_STRUCT_NAME).expect("Invalid fake struct name"); - let struct_info = fake_struct_definition(&struct_name, &[], &[], &[], Default::default()); + let struct_info = + fake_struct_definition(&struct_name, &[], &[], &[], StructMemoryKind::default()); let type_info = fake_type_definition(&struct_name, 1, 1, TypeDefinitionData::Struct(struct_info)); let types = [type_info]; @@ -144,7 +146,7 @@ mod tests { ); } - let result_types: &[TypeDefinition] = module.types(); + let result_types: &[TypeDefinition<'_>] = module.types(); assert_eq!(result_types.len(), types.len()); for (lhs, rhs) in result_types.iter().zip(types.iter()) { assert_eq!(lhs, rhs); diff --git a/crates/mun_abi/src/primitive.rs b/crates/mun_abi/src/primitive.rs index 222a306ae..a587edb33 100644 --- a/crates/mun_abi/src/primitive.rs +++ b/crates/mun_abi/src/primitive.rs @@ -16,7 +16,7 @@ macro_rules! define_primitives { $( impl HasStaticTypeId for $ty { fn type_id() -> &'static $crate::TypeId<'static> { - const TYPE_ID: $crate::TypeId = $crate::TypeId::Concrete(Guid::from_str($name)); + const TYPE_ID: $crate::TypeId<'static> = $crate::TypeId::Concrete(Guid::from_str($name)); &TYPE_ID } } diff --git a/crates/mun_abi/src/static_type_map.rs b/crates/mun_abi/src/static_type_map.rs index 4461d5716..1354de21d 100644 --- a/crates/mun_abi/src/static_type_map.rs +++ b/crates/mun_abi/src/static_type_map.rs @@ -39,9 +39,10 @@ impl StaticTypeMap { // Insert the value into the map let old = map.borrow_mut().insert(TypeId::of::(), reference); - if old.is_some() { - panic!("StaticTypeMap value was reinitialized. This is a bug.") - } + assert!( + old.is_none(), + "StaticTypeMap value was reinitialized. This is a bug." + ); reference } } diff --git a/crates/mun_abi/src/struct_info.rs b/crates/mun_abi/src/struct_info.rs index 481bd036a..32b330554 100644 --- a/crates/mun_abi/src/struct_info.rs +++ b/crates/mun_abi/src/struct_info.rs @@ -96,8 +96,6 @@ impl<'a> serde::Serialize for StructDefinition<'a> { use itertools::Itertools; use serde::ser::SerializeStruct; - let mut s = serializer.serialize_struct("StructInfo", 3)?; - #[derive(serde::Serialize)] struct Field<'a> { name: &'a str, @@ -105,6 +103,8 @@ impl<'a> serde::Serialize for StructDefinition<'a> { offset: &'a u16, } + let mut s = serializer.serialize_struct("StructInfo", 3)?; + s.serialize_field("guid", &self.guid)?; s.serialize_field( "fields", @@ -143,7 +143,7 @@ mod tests { field_names, field_types, field_offsets, - Default::default(), + StructMemoryKind::default(), ); assert_eq!(struct_info.field_names().count(), 0); @@ -165,12 +165,12 @@ mod tests { field_names, field_types, field_offsets, - Default::default(), + StructMemoryKind::default(), ); assert_eq!(struct_info.num_fields(), 1); for (lhs, rhs) in struct_info.field_names().zip([FAKE_FIELD_NAME].iter()) { - assert_eq!(lhs, *rhs) + assert_eq!(lhs, *rhs); } assert_eq!(struct_info.field_types(), field_types); assert_eq!(struct_info.field_offsets(), field_offsets); diff --git a/crates/mun_abi/src/test_utils.rs b/crates/mun_abi/src/test_utils.rs index 43dd72e30..143820ff0 100644 --- a/crates/mun_abi/src/test_utils.rs +++ b/crates/mun_abi/src/test_utils.rs @@ -11,7 +11,7 @@ use std::{ pub(crate) const FAKE_TYPE_GUID: Guid = Guid([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); -pub(crate) const FAKE_TYPE_ID: TypeId = TypeId::Concrete(FAKE_TYPE_GUID); +pub(crate) const FAKE_TYPE_ID: TypeId<'static> = TypeId::Concrete(FAKE_TYPE_GUID); pub(crate) const FAKE_DEPENDENCY: &str = "path/to/dependency.munlib"; pub(crate) const FAKE_FIELD_NAME: &str = "field_name"; pub(crate) const FAKE_FN_NAME: &str = "fn_name"; diff --git a/crates/mun_abi/src/type_id.rs b/crates/mun_abi/src/type_id.rs index 5aaeaf6e7..38d105deb 100644 --- a/crates/mun_abi/src/type_id.rs +++ b/crates/mun_abi/src/type_id.rs @@ -87,13 +87,13 @@ impl<'a> fmt::Display for ArrayTypeId<'a> { /// A trait that defines that for a type we can statically return a `TypeId`. pub trait HasStaticTypeId { - /// Returns a reference to the TypeInfo for the type + /// Returns a reference to the [`TypeInfo`] for the type fn type_id() -> &'static TypeId<'static>; } impl HasStaticTypeId for *const T { fn type_id() -> &'static TypeId<'static> { - static VALUE: OnceCell> = OnceCell::new(); + static VALUE: OnceCell>> = OnceCell::new(); let map = VALUE.get_or_init(Default::default); map.call_once::(|| { PointerTypeId { @@ -107,7 +107,7 @@ impl HasStaticTypeId for *const T { impl HasStaticTypeId for *mut T { fn type_id() -> &'static TypeId<'static> { - static VALUE: OnceCell> = OnceCell::new(); + static VALUE: OnceCell>> = OnceCell::new(); let map = VALUE.get_or_init(Default::default); map.call_once::(|| { PointerTypeId { diff --git a/crates/mun_abi/src/type_info.rs b/crates/mun_abi/src/type_info.rs index 816e71bde..7a51adba0 100644 --- a/crates/mun_abi/src/type_info.rs +++ b/crates/mun_abi/src/type_info.rs @@ -92,7 +92,7 @@ impl<'a> TypeDefinition<'a> { } /// Retrieves the type's struct information, if available. - pub fn as_struct(&self) -> Option<&StructDefinition> { + pub fn as_struct(&self) -> Option<&StructDefinition<'_>> { let TypeDefinitionData::Struct(s) = &self.data; Some(s) } @@ -147,7 +147,7 @@ impl<'a> TypeDefinitionData<'a> { /// A trait that defines that for a type we can statically return a type name. pub trait HasStaticTypeName { - /// Returns a reference to the TypeInfo for the type + /// Returns a reference to the [`TypeInfo`] for the type fn type_name() -> &'static CStr; } @@ -155,7 +155,10 @@ pub trait HasStaticTypeName { mod tests { use std::ffi::CString; - use crate::test_utils::{fake_struct_definition, fake_type_definition, FAKE_TYPE_NAME}; + use crate::{ + test_utils::{fake_struct_definition, fake_type_definition, FAKE_TYPE_NAME}, + StructMemoryKind, + }; use super::TypeDefinitionData; @@ -170,7 +173,7 @@ mod tests { field_names, field_types, field_offsets, - Default::default(), + StructMemoryKind::default(), ); let type_definition = @@ -189,7 +192,7 @@ mod tests { field_names, field_types, field_offsets, - Default::default(), + StructMemoryKind::default(), ); let type_definition = @@ -211,7 +214,7 @@ mod tests { field_names, field_types, field_offsets, - Default::default(), + StructMemoryKind::default(), ); let type_definition = @@ -230,7 +233,7 @@ mod tests { field_names, field_types, field_offsets, - Default::default(), + StructMemoryKind::default(), ); let type_definition = diff --git a/crates/mun_abi/src/type_lut.rs b/crates/mun_abi/src/type_lut.rs index b0f4a4c98..fe1b2a21d 100644 --- a/crates/mun_abi/src/type_lut.rs +++ b/crates/mun_abi/src/type_lut.rs @@ -23,8 +23,10 @@ pub struct TypeLut<'a> { impl<'a> TypeLut<'a> { /// Returns an iterator over pairs of type IDs and type handles. - pub fn iter(&self) -> impl Iterator { - let (type_ids, type_ptrs, type_names) = if self.num_entries != 0 { + pub fn iter(&self) -> impl Iterator, &*const ffi::c_void, &str)> { + let (type_ids, type_ptrs, type_names) = if self.num_entries == 0 { + (([]).iter(), ([]).iter(), ([]).iter()) + } else { let ptrs = unsafe { slice::from_raw_parts_mut(self.type_handles, self.num_entries as usize) }; let type_ids = @@ -33,8 +35,6 @@ impl<'a> TypeLut<'a> { unsafe { slice::from_raw_parts(self.type_names, self.num_entries as usize) }; (type_ids.iter(), ptrs.iter(), type_names.iter()) - } else { - (([]).iter(), ([]).iter(), ([]).iter()) }; izip!(type_ids, type_ptrs, type_names).map(|(id, ptr, type_name)| { @@ -45,8 +45,12 @@ impl<'a> TypeLut<'a> { } /// Returns an iterator over pairs of type IDs and mutable type handles. - pub fn iter_mut(&mut self) -> impl Iterator { - let (type_ids, type_ptrs, type_names) = if self.num_entries != 0 { + pub fn iter_mut( + &mut self, + ) -> impl Iterator, &mut *const ffi::c_void, &str)> { + let (type_ids, type_ptrs, type_names) = if self.num_entries == 0 { + (([]).iter(), ([]).iter_mut(), ([]).iter()) + } else { let ptrs = unsafe { slice::from_raw_parts_mut(self.type_handles, self.num_entries as usize) }; let type_ids = @@ -55,8 +59,6 @@ impl<'a> TypeLut<'a> { unsafe { slice::from_raw_parts(self.type_names, self.num_entries as usize) }; (type_ids.iter(), ptrs.iter_mut(), type_names.iter()) - } else { - (([]).iter(), ([]).iter_mut(), ([]).iter()) }; izip!(type_ids, type_ptrs, type_names).map(|(id, ptr, type_name)| { @@ -88,7 +90,7 @@ impl<'a> TypeLut<'a> { /// /// This is generally not recommended, use with caution! Calling this method with an /// out-of-bounds index is _undefined behavior_ even if the resulting reference is not used. - /// For a safe alternative see [get_ptr](#method.get_ptr). + /// For a safe alternative see [`get_ptr`](#method.get_ptr). /// /// # Safety /// @@ -110,7 +112,7 @@ impl<'a> TypeLut<'a> { /// /// This is generally not recommended, use with caution! Calling this method with an /// out-of-bounds index is _undefined behavior_ even if the resulting reference is not used. - /// For a safe alternative see [get_ptr_mut](#method.get_ptr_mut). + /// For a safe alternative see [`get_ptr_mut`](#method.get_ptr_mut). /// /// # Safety /// @@ -378,7 +380,7 @@ mod tests { let type_lut = fake_type_lut(type_ids, type_ptrs, type_names); for (lhs, rhs) in type_lut.type_names().zip([FAKE_TYPE_NAME].iter()) { - assert_eq!(lhs, *rhs) + assert_eq!(lhs, *rhs); } } } diff --git a/crates/mun_capi_utils/src/error.rs b/crates/mun_capi_utils/src/error.rs index dc5a804c8..05221eafd 100644 --- a/crates/mun_capi_utils/src/error.rs +++ b/crates/mun_capi_utils/src/error.rs @@ -8,6 +8,7 @@ use std::{ #[repr(C)] #[derive(Clone, Copy)] +#[allow(clippy::doc_markdown)] /// A C-style handle to an error message. /// /// If the handle contains a non-null pointer, an error occurred. @@ -62,7 +63,7 @@ impl>> From for ErrorHandle { /// /// # Safety /// -/// Only call this function on an ErrorHandle once. +/// Only call this function on an [`ErrorHandle`] once. #[no_mangle] pub unsafe extern "C" fn mun_error_destroy(error: ErrorHandle) { if !error.0.is_null() { diff --git a/crates/mun_capi_utils/src/lib.rs b/crates/mun_capi_utils/src/lib.rs index d56f692f2..92a8f76cc 100644 --- a/crates/mun_capi_utils/src/lib.rs +++ b/crates/mun_capi_utils/src/lib.rs @@ -20,7 +20,7 @@ pub unsafe extern "C" fn mun_string_destroy(string: *const c_char) { } } -/// Tries to convert a C style string pointer to a CStr. +/// Tries to convert a C style string pointer to a [`CStr`]. /// /// # Safety /// diff --git a/crates/mun_codegen/src/code_gen.rs b/crates/mun_codegen/src/code_gen.rs index 7d7d13e1c..f6c470041 100644 --- a/crates/mun_codegen/src/code_gen.rs +++ b/crates/mun_codegen/src/code_gen.rs @@ -17,7 +17,7 @@ pub mod symbols; /// Optimizes the specified LLVM `Module` using the default passes for the given /// `OptimizationLevel`. -fn optimize_module(module: &Module, optimization_lvl: OptimizationLevel) { +fn optimize_module(module: &Module<'_>, optimization_lvl: OptimizationLevel) { let pass_builder = PassManagerBuilder::create(); pass_builder.set_optimization_level(optimization_lvl); diff --git a/crates/mun_codegen/src/code_gen/object_file.rs b/crates/mun_codegen/src/code_gen/object_file.rs index 384ceaa97..d151876df 100644 --- a/crates/mun_codegen/src/code_gen/object_file.rs +++ b/crates/mun_codegen/src/code_gen/object_file.rs @@ -15,7 +15,7 @@ impl ObjectFile { pub fn new( target: &spec::Target, target_machine: &TargetMachine, - module: &inkwell::module::Module, + module: &inkwell::module::Module<'_>, ) -> Result { let obj = target_machine .write_to_memory_buffer(module, FileType::Object) diff --git a/crates/mun_codegen/src/code_gen/symbols/ir_type_builder.rs b/crates/mun_codegen/src/code_gen/symbols/ir_type_builder.rs index aa44a64af..6c3946857 100644 --- a/crates/mun_codegen/src/code_gen/symbols/ir_type_builder.rs +++ b/crates/mun_codegen/src/code_gen/symbols/ir_type_builder.rs @@ -24,11 +24,11 @@ impl<'ink, 'a, 'b, 'c> TypeIdBuilder<'ink, 'a, 'b, 'c> { pub fn new(context: &'a IrValueContext<'ink, 'b, 'c>) -> Self { Self { context, - interned_types: RefCell::new(Default::default()), + interned_types: RefCell::new(FxHashMap::default()), } } - /// Constructs an [`ir::TypeId`] from an internal TypeId. + /// Constructs an [`ir::TypeId`] from an internal [`TypeId`]. pub fn construct_from_type_id(&self, type_id: &Arc) -> ir::TypeId<'ink> { match &type_id.data { TypeIdData::Concrete(guid) => ir::TypeId::Concrete(*guid), @@ -48,26 +48,24 @@ impl<'ink, 'a, 'b, 'c> TypeIdBuilder<'ink, 'a, 'b, 'c> { /// Returns the global pointer to the specific type fn get_global_type_id(&self, type_id: &Arc) -> Global<'ink, ir::TypeId<'ink>> { - let global = match { - let borrow = self.interned_types.borrow(); - borrow.get(type_id.as_ref()).cloned() + if let Some(v) = { + let interned_types = self.interned_types.borrow(); + interned_types.get(type_id.as_ref()).cloned() } { - Some(v) => v, - None => { - let pointee_ir_type_id = self.construct_from_type_id(type_id); - let global = pointee_ir_type_id.as_value(self.context).into_global( - &type_id.name, - self.context, - true, - Linkage::Private, - Some(UnnamedAddress::Global), - ); - self.interned_types - .borrow_mut() - .insert(type_id.clone(), global); - global - } - }; - global + v + } else { + let pointee_ir_type_id = self.construct_from_type_id(type_id); + let global = pointee_ir_type_id.as_value(self.context).into_global( + &type_id.name, + self.context, + true, + Linkage::Private, + Some(UnnamedAddress::Global), + ); + self.interned_types + .borrow_mut() + .insert(type_id.clone(), global); + global + } } } diff --git a/crates/mun_codegen/src/code_gen/symbols/mod.rs b/crates/mun_codegen/src/code_gen/symbols/mod.rs index 17f362ff8..a81e9f742 100644 --- a/crates/mun_codegen/src/code_gen/symbols/mod.rs +++ b/crates/mun_codegen/src/code_gen/symbols/mod.rs @@ -30,7 +30,7 @@ fn gen_prototype_from_function<'ink>( db: &dyn HirDatabase, context: &IrValueContext<'ink, '_, '_>, function: mun_hir::Function, - hir_types: &HirTypeCache, + hir_types: &HirTypeCache<'_, 'ink>, ir_type_builder: &TypeIdBuilder<'ink, '_, '_, '_>, ) -> ir::FunctionPrototype<'ink> { let name = function.full_name(db); @@ -104,12 +104,12 @@ fn gen_prototype_from_dispatch_entry<'ink>( } /// Construct a global that holds a reference to all types. e.g.: -/// MunTypeInfo[] definitions = { ... } +/// `MunTypeInfo[] definitions = { ... }` fn get_type_definition_array<'ink>( db: &dyn HirDatabase, context: &IrValueContext<'ink, '_, '_>, types: impl Iterator, - hir_types: &HirTypeCache, + hir_types: &HirTypeCache<'_, 'ink>, ir_type_builder: &TypeIdBuilder<'ink, '_, '_, '_>, ) -> Value<'ink, *const ir::TypeDefinition<'ink>> { types @@ -156,7 +156,7 @@ fn gen_struct_info<'ink>( db: &dyn HirDatabase, hir_struct: mun_hir::Struct, context: &IrValueContext<'ink, '_, '_>, - hir_types: &HirTypeCache, + hir_types: &HirTypeCache<'_, 'ink>, ir_type_builder: &TypeIdBuilder<'ink, '_, '_, '_>, ) -> ir::StructDefinition<'ink> { let struct_ir = hir_types.get_struct_type(hir_struct); @@ -214,12 +214,12 @@ fn gen_struct_info<'ink>( } /// Construct a global that holds a reference to all functions. e.g.: -/// MunFunctionDefinition[] definitions = { ... } +/// `MunFunctionDefinition[] definitions = { ... }` fn get_function_definition_array<'ink, 'a>( db: &dyn HirDatabase, context: &IrValueContext<'ink, '_, '_>, functions: impl Iterator, - hir_types: &HirTypeCache, + hir_types: &HirTypeCache<'_, 'ink>, ir_type_builder: &TypeIdBuilder<'ink, '_, '_, '_>, ) -> Global<'ink, [ir::FunctionDefinition<'ink>]> { let module = context.module; @@ -258,7 +258,7 @@ fn get_function_definition_array<'ink, 'a>( /// ``` fn gen_type_lut<'ink>( context: &IrValueContext<'ink, '_, '_>, - type_table: &TypeTable, + type_table: &TypeTable<'ink>, ir_type_builder: &TypeIdBuilder<'ink, '_, '_, '_>, ) -> ir::TypeLut<'ink> { let module = context.module; @@ -281,14 +281,15 @@ fn gen_type_lut<'ink>( }) .into_const_private_pointer("fn.get_info.typeLut.typeNames", context); - let type_ptrs = TypeTable::find_global(module) - .map(|type_table| { + let type_ptrs = TypeTable::find_global(module).map_or_else( + || Value::null(context), + |type_table| { Value::<*mut *const std::ffi::c_void>::with_cast( type_table.as_value(context).value, context, ) - }) - .unwrap_or_else(|| Value::null(context)); + }, + ); ir::TypeLut { type_ids, @@ -317,15 +318,22 @@ fn gen_dispatch_table<'ink>( .into_const_private_pointer("fn.get_info.dispatchTable.signatures", context); // Get the pointer to the global table (or nullptr if no global table was defined). - let fn_ptrs = dispatch_table - .global_value() - .map(|_g| + let fn_ptrs = dispatch_table.global_value().map_or_else( + || Value::null(context), + |_g| { // TODO: This is a hack, the passed module here is a clone of the module with which the // dispatch table was created. Because of this we have to lookup the dispatch table // global again. There is however not a `GlobalValue::get_name` method so I just // hardcoded the name here. - Value::<*mut *const fn()>::with_cast(module.get_global("dispatchTable").unwrap().as_pointer_value(), context)) - .unwrap_or_else(|| Value::null(context)); + Value::<*mut *const fn()>::with_cast( + module + .get_global("dispatchTable") + .unwrap() + .as_pointer_value(), + context, + ) + }, + ); ir::DispatchTable { prototypes, @@ -458,7 +466,7 @@ fn gen_get_info_fn<'ink>( .into_pointer_value() } else { builder.build_alloca( - Value::::get_ir_type(context.type_context), + Value::>::get_ir_type(context.type_context), "", ) }; @@ -501,7 +509,9 @@ fn gen_get_info_fn<'ink>( builder.build_store( num_dependencies_addr, context.context.i32_type().const_int( - u32::try_from(dependencies.len()).expect("too many dependencies") as u64, + u32::try_from(dependencies.len()) + .expect("too many dependencies") + .into(), false, ), ); @@ -520,7 +530,7 @@ fn gen_get_info_fn<'ink>( /// Generates a method `void set_allocator_handle(void*)` that stores the argument into the global /// `allocatorHandle`. This global is used internally to reference the allocator used by this /// munlib. -fn gen_set_allocator_handle_fn(context: &IrValueContext) { +fn gen_set_allocator_handle_fn(context: &IrValueContext<'_, '_, '_>) { let set_allocator_handle_fn = context.module.add_function( "set_allocator_handle", Value::::get_ir_type(context.type_context), @@ -545,7 +555,7 @@ fn gen_set_allocator_handle_fn(context: &IrValueContext) { /// Generates a `get_version` method that returns the current abi version. /// Specifically, it returns the abi version the function was generated in. -fn gen_get_version_fn(context: &IrValueContext) { +fn gen_get_version_fn(context: &IrValueContext<'_, '_, '_>) { let get_version_fn = context.module.add_function( abi::GET_VERSION_FN_NAME, Value:: u32>::get_ir_type(context.type_context), diff --git a/crates/mun_codegen/src/ir.rs b/crates/mun_codegen/src/ir.rs index 3ec5f44f4..2523b00fb 100644 --- a/crates/mun_codegen/src/ir.rs +++ b/crates/mun_codegen/src/ir.rs @@ -31,7 +31,7 @@ pub trait IsIrType<'ink> { fn ir_type(context: &'ink Context, target: &TargetData) -> Self::Type; } -/// Defines that a type has a static represention in inkwell that can be described as a BasicType. +/// Defines that a type has a static represention in inkwell that can be described as a `BasicType`. pub trait IsBasicIrType<'ink> { fn ir_type(context: &'ink Context, target: &TargetData) -> BasicTypeEnum<'ink>; } @@ -52,7 +52,7 @@ pub trait IsFunctionReturnType<'ink> { ) -> FunctionType<'ink>; } -/// All types that statically have a BasicTypeEnum can also be used as a function return type +/// All types that statically have a `BasicTypeEnum` can also be used as a function return type impl<'ink, T: IsBasicIrType<'ink>> IsFunctionReturnType<'ink> for T { fn fn_type( context: &'ink Context, diff --git a/crates/mun_codegen/src/ir/array.rs b/crates/mun_codegen/src/ir/array.rs index b55dcd1d9..35e41a131 100644 --- a/crates/mun_codegen/src/ir/array.rs +++ b/crates/mun_codegen/src/ir/array.rs @@ -22,7 +22,7 @@ use inkwell::types::{BasicTypeEnum, IntType, StructType}; use inkwell::values::{BasicValueEnum, IntValue, PointerValue}; use std::ffi::CStr; -/// A helper struct that wraps a PointerValue which points to an in memory Mun array value. +/// A helper struct that wraps a [`PointerValue`] which points to an in memory Mun array value. #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub struct RuntimeArrayValue<'ink>(RuntimeReferenceValue<'ink>); @@ -34,7 +34,7 @@ impl<'ink> RuntimeArrayValue<'ink> { RuntimeReferenceValue::from_ptr(ptr, array_type).map(Self) } - /// Constructs a new instance from an inkwell PointerValue without checking if this is actually + /// Constructs a new instance from an inkwell [`PointerValue`] without checking if this is actually /// a pointer to an array. pub unsafe fn from_ptr_unchecked(ptr: PointerValue<'ink>) -> Self { Self(RuntimeReferenceValue::from_ptr_unchecked(ptr)) @@ -81,7 +81,7 @@ impl<'ink> RuntimeArrayValue<'ink> { } /// Returns the type of the `length` field - pub fn length_ty(&self) -> IntType { + pub fn length_ty(&self) -> IntType<'_> { self.array_data_ty() .get_field_type_at_index(0) .expect("an array must have a second field") @@ -89,7 +89,7 @@ impl<'ink> RuntimeArrayValue<'ink> { } /// Returns the type of the `length` field - pub fn capacity_ty(&self) -> IntType { + pub fn capacity_ty(&self) -> IntType<'_> { self.array_data_ty() .get_field_type_at_index(1) .expect("an array must have a second field") diff --git a/crates/mun_codegen/src/ir/body.rs b/crates/mun_codegen/src/ir/body.rs index 82222cd75..5ece87280 100644 --- a/crates/mun_codegen/src/ir/body.rs +++ b/crates/mun_codegen/src/ir/body.rs @@ -156,7 +156,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { pub fn gen_fn_wrapper(&mut self) { let fn_sig = self.hir_function.ty(self.db).callable_sig(self.db).unwrap(); - let args: Vec = fn_sig + let args: Vec> = fn_sig .params() .iter() .enumerate() @@ -237,7 +237,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { match self.infer[*callee].as_callable_def() { Some(mun_hir::CallableDef::Function(def)) => { // Get all the arguments - let args: Vec = args + let args: Vec> = args .iter() .map(|expr| self.gen_expr(*expr).expect("expected a value").into()) .collect(); @@ -278,7 +278,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { } => self.gen_field(expr, *receiver_expr, name), Expr::Array(exprs) => self.gen_array(expr, exprs).map(Into::into), Expr::Index { base, index } => self.gen_index(expr, *base, *index), - _ => unimplemented!("unimplemented expr type {:?}", &body[expr]), + Expr::Missing => unimplemented!("unimplemented expr type {:?}", &body[expr]), } } @@ -304,7 +304,9 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { std::mem::transmute::(v.value) }) } - _ => unreachable!("unresolved bitness in code generation"), + mun_hir::IntBitness::Xsize => { + unreachable!("unresolved bitness in code generation") + } }; ir_ty.into() @@ -352,7 +354,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { ) -> BasicValueEnum<'ink> { // Construct the struct literal let struct_ty = self.hir_types.get_struct_type(hir_struct); - let mut value: AggregateValueEnum = struct_ty.get_undef().into(); + let mut value: AggregateValueEnum<'_> = struct_ty.get_undef().into(); for (i, arg) in args.into_iter().enumerate() { value = self .builder @@ -373,7 +375,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { fn gen_struct_alloc_on_heap( &mut self, hir_struct: mun_hir::Struct, - struct_lit: StructValue, + struct_lit: StructValue<'_>, ) -> BasicValueEnum<'ink> { let struct_ir_ty = self.hir_types.get_struct_type(hir_struct); let new_fn_ptr = self.dispatch_table.gen_intrinsic_lookup( @@ -442,7 +444,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { ) -> BasicValueEnum<'ink> { let struct_ty = self.infer[type_expr].clone(); let hir_struct = struct_ty.as_struct().unwrap(); // Can only really get here if the type is a struct - let fields: Vec = fields + let fields: Vec> = fields .iter() .map(|field| self.gen_expr(field.expr).expect("expected a field value")) .collect(); @@ -454,7 +456,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { fn gen_named_tuple_lit(&mut self, type_expr: ExprId, args: &[ExprId]) -> BasicValueEnum<'ink> { let struct_ty = self.infer[type_expr].clone(); let hir_struct = struct_ty.as_struct().unwrap(); // Can only really get here if the type is a struct - let args: Vec = args + let args: Vec> = args .iter() .map(|expr| self.gen_expr(*expr).expect("expected a field value")) .collect(); @@ -662,14 +664,14 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { /// Generates IR to calculate a unary operation on a floating point value. fn gen_unary_op_float(&mut self, expr: ExprId, op: UnaryOp) -> Option> { - let value: FloatValue = self + let value: FloatValue<'ink> = self .gen_expr(expr) .map(|value| self.opt_deref_value(expr, value)) .expect("no value") .into_float_value(); match op { UnaryOp::Neg => Some(self.builder.build_float_neg(value, "neg").into()), - _ => unimplemented!("Operator {:?} is not implemented for float", op), + UnaryOp::Not => unimplemented!("Operator {:?} is not implemented for float", op), } } @@ -680,7 +682,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { op: UnaryOp, signedness: mun_hir::Signedness, ) -> Option> { - let value: IntValue = self + let value: IntValue<'ink> = self .gen_expr(expr) .map(|value| self.opt_deref_value(expr, value)) .expect("no value") @@ -700,14 +702,14 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { /// Generates IR to calculate a unary operation on a boolean value. fn gen_unary_op_bool(&mut self, expr: ExprId, op: UnaryOp) -> Option> { - let value: IntValue = self + let value: IntValue<'ink> = self .gen_expr(expr) .map(|value| self.opt_deref_value(expr, value)) .expect("no value") .into_int_value(); match op { UnaryOp::Not => Some(self.builder.build_not(value, "not").into()), - _ => unimplemented!("Operator {:?} is not implemented for boolean", op), + UnaryOp::Neg => unimplemented!("Operator {:?} is not implemented for boolean", op), } } @@ -718,11 +720,11 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { rhs_expr: ExprId, op: BinaryOp, ) -> Option> { - let lhs: IntValue = self + let lhs: IntValue<'ink> = self .gen_expr(lhs_expr) .map(|value| self.opt_deref_value(lhs_expr, value))? .into_int_value(); - let rhs: IntValue = self + let rhs: IntValue<'ink> = self .gen_expr(rhs_expr) .map(|value| self.opt_deref_value(rhs_expr, value))? .into_int_value(); @@ -800,7 +802,9 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { self.builder.build_store(place, rhs); Some(self.gen_empty()) } - _ => unimplemented!("Operator {:?} is not implemented for float", op), + BinaryOp::LogicOp(_) => { + unimplemented!("Operator {:?} is not implemented for float", op) + } } } @@ -836,7 +840,9 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { self.builder.build_store(place, rhs); Some(self.gen_empty()) } - _ => unreachable!("Operator {:?} is not implemented for integer", op), + BinaryOp::LogicOp(_) => { + unreachable!("Operator {:?} is not implemented for integer", op) + } } } @@ -1128,7 +1134,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { let merge_block = self.context.append_basic_block(self.fn_value, "if_merge"); // Build the actual branching IR for the if statement - let else_block = else_block_and_expr.map(|e| e.0).unwrap_or(merge_block); + let else_block = else_block_and_expr.map_or(merge_block, |e| e.0); self.builder .build_conditional_branch(condition_ir, then_block, else_block); @@ -1175,7 +1181,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { if else_block_ir.is_none() { merge_block .remove_from_function() - .expect("merge block must have a parent") + .expect("merge block must have a parent"); } else_block_ir } else { @@ -1205,33 +1211,31 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { _expr: ExprId, break_expr: Option, ) -> Option> { - match break_expr { - Some(expr) => { - // There is an expression - // e.g. break x; - // Turn that expression into IR. - let break_value = self.gen_expr(expr); - - // If the expression never returns, we can stop what we're doing. - if let Some(break_value) = break_value { - let loop_info = self.active_loop.as_mut().unwrap(); - loop_info.break_values.push(Some(( - break_value, - self.builder.get_insert_block().unwrap(), - ))); - self.builder - .build_unconditional_branch(loop_info.exit_block); - } - } - None => { - // If the break expression doesnt contain a break statement. Add a none to the - // break values. + if let Some(expr) = break_expr { + // There is an expression + // e.g. break x; + // Turn that expression into IR. + let break_value = self.gen_expr(expr); + + // If the expression never returns, we can stop what we're doing. + if let Some(break_value) = break_value { let loop_info = self.active_loop.as_mut().unwrap(); - loop_info.break_values.push(None); + loop_info.break_values.push(Some(( + break_value, + self.builder.get_insert_block().unwrap(), + ))); self.builder .build_unconditional_branch(loop_info.exit_block); } + } else { + // If the break expression doesnt contain a break statement. Add a none to the + // break values. + let loop_info = self.active_loop.as_mut().unwrap(); + loop_info.break_values.push(None); + self.builder + .build_unconditional_branch(loop_info.exit_block); }; + None } @@ -1323,7 +1327,14 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { self.builder.build_unconditional_branch(loop_block); } - if !break_values.is_empty() { + if break_values.is_empty() { + // Not a single code entry point jumped to the exit block through a break. Therefor we + // can completely remove the exit block since it doesnt have a predecessor. + exit_block + .remove_from_function() + .expect("the exit block must have a parent"); + None + } else { // Move the builder to the exit block self.builder.position_at_end(exit_block); @@ -1332,7 +1343,7 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { if let Some(Some((value, _))) = break_values.first() { let phi = self.builder.build_phi(value.get_type(), "exit"); for (value, block) in break_values.into_iter().map(Option::unwrap) { - phi.add_incoming(&[(&value, block)]) + phi.add_incoming(&[(&value, block)]); } Some(phi.as_basic_value()) } else { @@ -1340,13 +1351,6 @@ impl<'db, 'ink, 't> BodyIrGenerator<'db, 'ink, 't> { // just empty. Some(self.gen_empty()) } - } else { - // Not a single code entry point jumped to the exit block through a break. Therefor we - // can completely remove the exit block since it doesnt have a predecessor. - exit_block - .remove_from_function() - .expect("the exit block must have a parent"); - None } } diff --git a/crates/mun_codegen/src/ir/dispatch_table.rs b/crates/mun_codegen/src/ir/dispatch_table.rs index 8d6df8983..a3c1e7b7e 100644 --- a/crates/mun_codegen/src/ir/dispatch_table.rs +++ b/crates/mun_codegen/src/ir/dispatch_table.rs @@ -26,7 +26,7 @@ use std::{ /// ``` /// /// The dispatch table is used to add a patchable indirection when calling a function from IR. The -/// DispatchTable is exposed to the Runtime which fills the structure with valid pointers to +/// `DispatchTable` is exposed to the Runtime which fills the structure with valid pointers to /// functions. This basically enables all hot reloading within Mun. #[derive(Debug, Eq, PartialEq)] pub struct DispatchTable<'ink> { @@ -55,7 +55,7 @@ pub struct FunctionPrototype { } /// A `DispatchableFunction` is an entry in the dispatch table that may or may not be pointing to an -/// existing mun_hir function. +/// existing `mun_hir` function. #[derive(Debug, Clone, Eq, PartialEq)] pub struct DispatchableFunction { pub prototype: FunctionPrototype, @@ -73,7 +73,7 @@ impl<'ink> DispatchTable<'ink> { &self.entries } - /// Generate a function lookup through the DispatchTable, equivalent to something along the + /// Generate a function lookup through the `DispatchTable`, equivalent to something along the /// lines of: `dispatchTable[i]`, where i is the index of the function and `dispatchTable` is a /// struct pub fn gen_function_lookup( @@ -91,10 +91,10 @@ impl<'ink> DispatchTable<'ink> { .get(&function) .expect("unknown function"); - self.gen_function_lookup_by_index(table_ref, builder, &function_name, index) + Self::gen_function_lookup_by_index(table_ref, builder, &function_name, index) } - /// Generates a function lookup through the DispatchTable, equivalent to something along the + /// Generates a function lookup through the `DispatchTable`, equivalent to something along the /// lines of: `dispatchTable[i]`, where i is the index of the intrinsic and `dispatchTable` is a /// struct pub fn gen_intrinsic_lookup( @@ -111,13 +111,12 @@ impl<'ink> DispatchTable<'ink> { .get(&prototype) .expect("unknown function"); - self.gen_function_lookup_by_index(table_ref, builder, &prototype.name, index) + Self::gen_function_lookup_by_index(table_ref, builder, &prototype.name, index) } - /// Generates a function lookup through the DispatchTable, equivalent to something along the + /// Generates a function lookup through the `DispatchTable`, equivalent to something along the /// lines of: `dispatchTable[i]`, where i is the index and `dispatchTable` is a struct fn gen_function_lookup_by_index( - &self, table_ref: Option>, builder: &inkwell::builder::Builder<'ink>, function_name: &str, @@ -204,9 +203,9 @@ impl<'db, 'ink, 't> DispatchTableBuilder<'db, 'ink, 't> { context, module, target_data, - function_to_idx: Default::default(), - prototype_to_idx: Default::default(), - entries: Default::default(), + function_to_idx: HashMap::default(), + prototype_to_idx: HashMap::default(), + entries: Vec::default(), table_ref: None, table_type: context.opaque_struct_type("DispatchTable"), hir_types, @@ -240,7 +239,7 @@ impl<'db, 'ink, 't> DispatchTableBuilder<'db, 'ink, 't> { self.table_ref = Some( self.module .add_global(self.table_type, None, "dispatchTable"), - ) + ); } } @@ -285,10 +284,10 @@ impl<'db, 'ink, 't> DispatchTableBuilder<'db, 'ink, 't> { .iter() .map(|arg| self.hir_types.type_id(arg)) .collect(); - let ret_type = if !sig.ret().is_empty() { - self.hir_types.type_id(sig.ret()) - } else { + let ret_type = if sig.ret().is_empty() { <()>::type_id().clone() + } else { + self.hir_types.type_id(sig.ret()) }; let prototype = FunctionPrototype { @@ -315,13 +314,13 @@ impl<'db, 'ink, 't> DispatchTableBuilder<'db, 'ink, 't> { self.collect_expr(body.body_expr(), body, infer); } - /// Builds the final DispatchTable with all *called* functions from within the module + /// Builds the final `DispatchTable` with all *called* functions from within the module /// # Parameters /// * **functions**: Mapping of *defined* Mun functions to their respective IR values. /// Returns the `DispatchTable` and a set of dependencies for the module. pub fn build(self) -> (DispatchTable<'ink>, FxHashSet) { // Construct the table body from all the entries in the dispatch table - let table_body: Vec = self + let table_body: Vec> = self .entries .iter() .map(|f| f.ir_type.ptr_type(inkwell::AddressSpace::default()).into()) @@ -332,7 +331,7 @@ impl<'db, 'ink, 't> DispatchTableBuilder<'db, 'ink, 't> { // Create a default initializer for function that are already known if let Some(table_ref) = self.table_ref { - let values: Vec = self + let values: Vec> = self .entries .iter() .enumerate() diff --git a/crates/mun_codegen/src/ir/file_group.rs b/crates/mun_codegen/src/ir/file_group.rs index 57923f05c..eb1114476 100644 --- a/crates/mun_codegen/src/ir/file_group.rs +++ b/crates/mun_codegen/src/ir/file_group.rs @@ -69,11 +69,12 @@ pub(crate) fn gen_file_group_ir<'ink>( ); } } - ModuleDef::Module(_) => (), - ModuleDef::Function(_) => (), // TODO: Extern types? - ModuleDef::Struct(_) => (), - ModuleDef::PrimitiveType(_) => (), - ModuleDef::TypeAlias(_) => (), + // TODO: Extern types for functions? + ModuleDef::Module(_) + | ModuleDef::Struct(_) + | ModuleDef::PrimitiveType(_) + | ModuleDef::TypeAlias(_) + | ModuleDef::Function(_) => (), } } diff --git a/crates/mun_codegen/src/ir/function.rs b/crates/mun_codegen/src/ir/function.rs index 946e02992..c40d9a680 100644 --- a/crates/mun_codegen/src/ir/function.rs +++ b/crates/mun_codegen/src/ir/function.rs @@ -6,7 +6,7 @@ use inkwell::{ use mun_hir::HirDatabase; -/// Constructs a PassManager to optimize functions for the given optimization level. +/// Constructs a `PassManager` to optimize functions for the given optimization level. pub(crate) fn create_pass_manager<'ink>( module: &Module<'ink>, optimization_lvl: OptimizationLevel, diff --git a/crates/mun_codegen/src/ir/intrinsics.rs b/crates/mun_codegen/src/ir/intrinsics.rs index 2bcafc4fe..aea15b935 100644 --- a/crates/mun_codegen/src/ir/intrinsics.rs +++ b/crates/mun_codegen/src/ir/intrinsics.rs @@ -82,8 +82,8 @@ fn collect_expr<'ink>( expr_id, body, infer, - ) - }) + ); + }); } /// Collects all intrinsics from the specified `body`. diff --git a/crates/mun_codegen/src/ir/reference.rs b/crates/mun_codegen/src/ir/reference.rs index c7b25039c..02471e07e 100644 --- a/crates/mun_codegen/src/ir/reference.rs +++ b/crates/mun_codegen/src/ir/reference.rs @@ -46,7 +46,7 @@ impl<'ink> RuntimeReferenceValue<'ink> { } } - /// Constructs a new instance from an inkwell PointerValue without checking if this is actually + /// Constructs a new instance from an inkwell `PointerValue` without checking if this is actually /// a pointer to an object on the heap. pub unsafe fn from_ptr_unchecked(ptr: PointerValue<'ink>) -> Self { Self(ptr) diff --git a/crates/mun_codegen/src/ir/ty.rs b/crates/mun_codegen/src/ir/ty.rs index 4b0c0a6de..c080aeb67 100644 --- a/crates/mun_codegen/src/ir/ty.rs +++ b/crates/mun_codegen/src/ir/ty.rs @@ -38,8 +38,8 @@ impl<'db, 'ink> HirTypeCache<'db, 'ink> { db, target_data, types: RefCell::new(HashMap::default()), - struct_to_type_id: Default::default(), - array_ty_to_type_id: Default::default(), + struct_to_type_id: RefCell::default(), + array_ty_to_type_id: RefCell::default(), } } @@ -165,7 +165,7 @@ impl<'db, 'ink> HirTypeCache<'db, 'ink> { } /// Returns the type of the struct that should be used for variables. Depending on the memory - /// type of the struct this is either a pointer to a GCHandle which holds a pointer to a struct, + /// type of the struct this is either a pointer to a `GCHandle` which holds a pointer to a struct, /// or, in case of a value struct, the struct type itself. /// Returns the type of the struct that should be used for variables. pub fn get_struct_reference_type(&self, struct_ty: mun_hir::Struct) -> BasicTypeEnum<'ink> { @@ -315,7 +315,7 @@ impl<'db, 'ink> HirTypeCache<'db, 'ink> { tuple_ir_types.push( self.get_basic_type(ty) .expect("tuple type should be a basic type"), - ) + ); } self.context.struct_type(&tuple_ir_types, false) } @@ -378,9 +378,7 @@ impl<'db, 'ink> HirTypeCache<'db, 'ink> { .array_ty_to_type_id .borrow_mut() .insert(a.interned().clone(), array_type_id.clone()); - if previous_entry.is_some() { - panic!("array cyclic reference?"); - } + assert!(previous_entry.is_none(), "array cyclic reference?"); array_type_id } diff --git a/crates/mun_codegen/src/ir/type_table.rs b/crates/mun_codegen/src/ir/type_table.rs index 1c5e13765..29f1f2767 100644 --- a/crates/mun_codegen/src/ir/type_table.rs +++ b/crates/mun_codegen/src/ir/type_table.rs @@ -24,7 +24,7 @@ pub struct TypeTable<'ink> { } impl<'ink> TypeTable<'ink> { - /// The name of the TypeTable's LLVM `GlobalValue`. + /// The name of the `TypeTable`'s LLVM `GlobalValue`. pub(crate) const NAME: &'static str = "global_type_lookup_table"; /// Returns a slice containing all types @@ -32,7 +32,7 @@ impl<'ink> TypeTable<'ink> { &self.entries } - /// Looks for a global symbol with the name of the TypeTable global in the specified `module`. + /// Looks for a global symbol with the name of the `TypeTable` global in the specified `module`. /// Returns the global value if it could be found, `None` otherwise. pub fn find_global(module: &Module<'ink>) -> Option> { module @@ -122,7 +122,7 @@ impl<'db, 'ink, 't> TypeTableBuilder<'db, 'ink, 't> { value_context, dispatch_table, hir_types, - entries: Default::default(), + entries: HashSet::default(), module_group, } } @@ -146,11 +146,11 @@ impl<'db, 'ink, 't> TypeTableBuilder<'db, 'ink, 't> { None => panic!("expected a callable expression"), } } else if let mun_hir::Expr::Array(..) = expr { - self.collect_type(self.hir_types.type_id(&infer[expr_id])) + self.collect_type(self.hir_types.type_id(&infer[expr_id])); } // Recurse further - expr.walk_child_exprs(|expr_id| self.collect_expr(expr_id, body, infer)) + expr.walk_child_exprs(|expr_id| self.collect_expr(expr_id, body, infer)); } /// Collects `TypeInfo` from types in the signature of a function @@ -196,7 +196,7 @@ impl<'db, 'ink, 't> TypeTableBuilder<'db, 'ink, 't> { self.collect_type(type_info); let fields = hir_struct.fields(self.db); - for field in fields.into_iter() { + for field in fields { self.collect_type(self.hir_types.type_id(&field.ty(self.db))); } } diff --git a/crates/mun_codegen/src/ir/types/test.rs b/crates/mun_codegen/src/ir/types/test.rs index 8c4d8ec44..415f8743f 100644 --- a/crates/mun_codegen/src/ir/types/test.rs +++ b/crates/mun_codegen/src/ir/types/test.rs @@ -8,6 +8,13 @@ use crate::value::{IrTypeContext, SizedValueType}; #[test] fn abi_struct_sizes() { + fn test_type_size<'ink, A: Sized, T: SizedValueType<'ink>>(context: &IrTypeContext<'ink, '_>) { + let ir_type = T::get_ir_type(context); + println!("{}", ir_type.print_to_string().to_string()); + let ir_size = context.target_data.get_abi_size(&ir_type); + assert_eq!(mem::size_of::(), ir_size as usize); + } + // Get target data for the current host let target = mun_target::spec::Target::host_target().expect("unable to determine host target"); let target_data = inkwell::targets::TargetData::create(&target.data_layout); @@ -17,29 +24,22 @@ fn abi_struct_sizes() { let type_context = IrTypeContext { context: &context, target_data: &target_data, - struct_types: &RefCell::new(Default::default()), + struct_types: &RefCell::default(), }; - fn test_type_size<'ink, A: Sized, T: SizedValueType<'ink>>(context: &IrTypeContext<'ink, '_>) { - let ir_type = T::get_ir_type(context); - println!("{}", ir_type.print_to_string().to_string()); - let ir_size = context.target_data.get_abi_size(&ir_type); - assert_eq!(mem::size_of::(), ir_size as usize); - } - test_type_size::(&type_context); test_type_size::(&type_context); test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); - test_type_size::(&type_context); + test_type_size::, ir::TypeId<'_>>(&type_context); + test_type_size::, ir::PointerTypeId<'_>>(&type_context); + test_type_size::, ir::ArrayTypeId<'_>>(&type_context); + test_type_size::, ir::TypeDefinitionData<'_>>(&type_context); + test_type_size::, ir::StructDefinition<'_>>(&type_context); + test_type_size::, ir::TypeDefinition<'_>>(&type_context); + test_type_size::, ir::FunctionSignature<'_>>(&type_context); + test_type_size::, ir::FunctionPrototype<'_>>(&type_context); + test_type_size::, ir::ModuleInfo<'_>>(&type_context); + test_type_size::, ir::DispatchTable<'_>>(&type_context); + test_type_size::, ir::TypeLut<'_>>(&type_context); + test_type_size::, ir::AssemblyInfo<'_>>(&type_context); } diff --git a/crates/mun_codegen/src/linker.rs b/crates/mun_codegen/src/linker.rs index 5c2648996..2e8fcbffa 100644 --- a/crates/mun_codegen/src/linker.rs +++ b/crates/mun_codegen/src/linker.rs @@ -128,18 +128,14 @@ impl Ld64Linker { let sdk_name = match (arch.as_ref(), os.as_ref()) { ("aarch64", "tvos") => "appletvos", ("x86_64", "tvos") => "appletvsimulator", - ("arm", "ios") => "iphoneos", ("aarch64", "ios") if llvm_target.contains("macabi") => "macosx", ("aarch64", "ios") if llvm_target.ends_with("-simulator") => "iphonesimulator", - ("aarch64", "ios") => "iphoneos", - ("x86", "ios") => "iphonesimulator", ("x86_64", "ios") if llvm_target.contains("macabi") => "macosx", - ("x86_64", "ios") => "iphonesimulator", - ("x86_64", "watchos") => "watchsimulator", - ("arm64_32", "watchos") => "watchos", + ("arm" | "aarch64", "ios") => "iphoneos", + ("x86" | "x86_64", "ios") => "iphonesimulator", ("aarch64", "watchos") if llvm_target.ends_with("-simulator") => "watchsimulator", - ("aarch64", "watchos") => "watchos", - ("arm", "watchos") => "watchos", + ("x86_64", "watchos") => "watchsimulator", + ("aarch64" | "arm" | "arm64_32", "watchos") => "watchos", (_, "macos") => "macosx", _ => { return Err(LinkerError::PlatformSdkMissing(format!( diff --git a/crates/mun_codegen/src/mock.rs b/crates/mun_codegen/src/mock.rs index d3ace6637..524d210a9 100644 --- a/crates/mun_codegen/src/mock.rs +++ b/crates/mun_codegen/src/mock.rs @@ -63,9 +63,9 @@ impl mun_hir::Upcast for MockDatabase { impl Default for MockDatabase { fn default() -> Self { - let mut db: MockDatabase = MockDatabase { - storage: Default::default(), - events: Default::default(), + let mut db = MockDatabase { + storage: salsa::Storage::default(), + events: Mutex::default(), }; db.set_optimization_level(OptimizationLevel::Default); db.set_target(Target::host_target().unwrap()); @@ -76,7 +76,7 @@ impl Default for MockDatabase { impl MockDatabase { /// Creates a database from the given text. pub fn with_single_file(text: &str) -> (MockDatabase, FileId) { - let mut db: MockDatabase = Default::default(); + let mut db = MockDatabase::default(); let mut source_root = SourceRoot::default(); let source_root_id = SourceRootId(0); diff --git a/crates/mun_codegen/src/module_group.rs b/crates/mun_codegen/src/module_group.rs index f632bf819..9259cb610 100644 --- a/crates/mun_codegen/src/module_group.rs +++ b/crates/mun_codegen/src/module_group.rs @@ -95,9 +95,7 @@ impl ModuleGroup { .get(&visible_mod.into()) // If all its children are also part of the module group we can keep the // function internal, so there is no need to export it. - .map(|&includes_subtree| !includes_subtree) - // Otherwise, the module is not part of the group and we have to export it. - .unwrap_or(true) + .map_or(true, |&includes_subtree| !includes_subtree) } } } diff --git a/crates/mun_codegen/src/module_partition.rs b/crates/mun_codegen/src/module_partition.rs index f2cad94b0..6b803e7d8 100644 --- a/crates/mun_codegen/src/module_partition.rs +++ b/crates/mun_codegen/src/module_partition.rs @@ -25,13 +25,15 @@ impl ModulePartition { ) -> ModuleGroupId { let id = ModuleGroupId(self.groups.len()); for module in group.iter() { - if self.module_to_group.insert(module, id).is_some() { - panic!("cannot add a module to multiple groups"); - } + assert!( + self.module_to_group.insert(module, id).is_none(), + "cannot add a module to multiple groups" + ); if let Some(file_id) = module.file_id(db) { - if self.file_to_group.insert(file_id, id).is_some() { - panic!("cannot add a file to multiple groups"); - } + assert!( + self.file_to_group.insert(file_id, id).is_none(), + "cannot add a file to multiple groups" + ); } } diff --git a/crates/mun_codegen/src/test.rs b/crates/mun_codegen/src/test.rs index 24c400e6d..1ff0146bc 100644 --- a/crates/mun_codegen/src/test.rs +++ b/crates/mun_codegen/src/test.rs @@ -22,7 +22,7 @@ fn array_index_assign() { a[1] = 100 } ", - ) + ); } #[test] @@ -35,7 +35,7 @@ fn array_index() { a[3] } ", - ) + ); } #[test] @@ -47,7 +47,7 @@ fn array_literal() { let a = [1,2,3,4,] } ", - ) + ); } #[test] @@ -65,7 +65,7 @@ fn multi_file() { 3 } ", - ) + ); } #[test] @@ -83,7 +83,7 @@ fn issue_262() { let a = 3 + 4; a }", - ) + ); } #[test] @@ -103,7 +103,7 @@ fn issue_225() { { let a = Num { value: b }; a}.value; } "#, - ) + ); } #[test] @@ -116,7 +116,7 @@ fn issue_228_never_if() { return 2; } "#, - ) + ); } #[test] @@ -128,7 +128,7 @@ fn issue_228() { if n == 0 {return 1} else {n * (n-1)} } "#, - ) + ); } #[test] @@ -150,7 +150,7 @@ fn issue_128() { thing(78); } "#, - ) + ); } #[test] @@ -196,7 +196,7 @@ fn literal_types() { pub fn add(a:u32) -> u32 { a + 12u32 }", - ) + ); } #[test] @@ -292,7 +292,7 @@ fn assignment_op_struct() { // a /= b; // a %= b; "#, - ) + ); } macro_rules! test_number_operator_types { @@ -607,7 +607,7 @@ fn if_statement() { b } "#, - ) + ); } #[test] @@ -622,7 +622,7 @@ fn void_return() { let c = bar() } "#, - ) + ); } #[test] @@ -638,7 +638,7 @@ fn fibonacci() { } } "#, - ) + ); } #[test] @@ -661,7 +661,7 @@ fn fibonacci_loop() { } } "#, - ) + ); } #[test] @@ -677,7 +677,7 @@ fn loop_issue_llvm13() { } } "#, - ) + ); } #[test] @@ -773,7 +773,7 @@ fn loop_expr() { loop {} } "#, - ) + ); } #[test] @@ -793,7 +793,7 @@ fn loop_break_expr() { } } "#, - ) + ); } #[test] @@ -812,7 +812,7 @@ fn while_expr() { }; } "#, - ) + ); } #[test] @@ -829,7 +829,7 @@ fn struct_test() { let c: Baz = Baz; } "#, - ) + ); } #[test] @@ -860,7 +860,7 @@ fn field_expr() { aa_lhs + aa_rhs } "#, - ) + ); } #[test] @@ -875,7 +875,7 @@ fn field_crash() { b.a } "#, - ) + ); } #[test] @@ -891,7 +891,7 @@ fn gc_struct() { let b = a; } "#, - ) + ); } #[test] @@ -904,7 +904,7 @@ fn extern_fn() { add(3,4); } "#, - ) + ); } #[test] @@ -1032,7 +1032,7 @@ fn nested_private_extern_fn() { private_fn() } "#, - ) + ); } #[test] @@ -1048,7 +1048,7 @@ fn nested_private_recursive_fn() { private_fn() } "#, - ) + ); } #[test] @@ -1066,7 +1066,7 @@ fn nested_private_recursive_fn_with_args() { private_fn(other()) } "#, - ) + ); } fn test_snapshot(name: &str, text: &str) { diff --git a/crates/mun_codegen/src/value/float_value.rs b/crates/mun_codegen/src/value/float_value.rs index bb56e97d6..ae2a1cc19 100644 --- a/crates/mun_codegen/src/value/float_value.rs +++ b/crates/mun_codegen/src/value/float_value.rs @@ -58,7 +58,8 @@ impl HasConstValue for f64 { impl<'ink> AsValue<'ink, f32> for f32 { fn as_value(&self, context: &IrValueContext<'ink, '_, '_>) -> Value<'ink, f32> { Value::from_raw( - ::get_ir_type(context.type_context).const_float(*self as f64), + ::get_ir_type(context.type_context) + .const_float(f64::from(*self)), ) } } diff --git a/crates/mun_codegen/src/value/global.rs b/crates/mun_codegen/src/value/global.rs index 0af89d659..0dacdad71 100644 --- a/crates/mun_codegen/src/value/global.rs +++ b/crates/mun_codegen/src/value/global.rs @@ -41,7 +41,7 @@ impl<'ink, T: ?Sized> Global<'ink, T> { pub unsafe fn from_raw(value: inkwell::values::GlobalValue<'ink>) -> Self { Global { value, - data: Default::default(), + data: PhantomData, } } } @@ -84,7 +84,7 @@ where } Global { value: global, - data: Default::default(), + data: PhantomData, } } diff --git a/crates/mun_codegen/src/value/int_value.rs b/crates/mun_codegen/src/value/int_value.rs index b20caddb5..5219e43dd 100644 --- a/crates/mun_codegen/src/value/int_value.rs +++ b/crates/mun_codegen/src/value/int_value.rs @@ -50,7 +50,7 @@ impl<'ink> AsValue<'ink, u8> for u8 { fn as_value(&self, context: &IrValueContext<'ink, '_, '_>) -> Value<'ink, u8> { Value::from_raw( ::get_ir_type(context.type_context) - .const_int(*self as u64, false), + .const_int(u64::from(*self), false), ) } } @@ -59,7 +59,7 @@ impl<'ink> AsValue<'ink, u16> for u16 { fn as_value(&self, context: &IrValueContext<'ink, '_, '_>) -> Value<'ink, u16> { Value::from_raw( ::get_ir_type(context.type_context) - .const_int(*self as u64, false), + .const_int(u64::from(*self), false), ) } } @@ -68,7 +68,7 @@ impl<'ink> AsValue<'ink, u32> for u32 { fn as_value(&self, context: &IrValueContext<'ink, '_, '_>) -> Value<'ink, u32> { Value::from_raw( ::get_ir_type(context.type_context) - .const_int(*self as u64, false), + .const_int(u64::from(*self), false), ) } } @@ -94,7 +94,7 @@ impl<'ink> AsValue<'ink, bool> for bool { fn as_value(&self, context: &IrValueContext<'ink, '_, '_>) -> Value<'ink, bool> { Value::from_raw( ::get_ir_type(context.type_context) - .const_int(*self as u64, true), + .const_int(u64::from(*self), true), ) } } diff --git a/crates/mun_codegen/src/value/mod.rs b/crates/mun_codegen/src/value/mod.rs index c78791d7c..f6d6dc0ef 100644 --- a/crates/mun_codegen/src/value/mod.rs +++ b/crates/mun_codegen/src/value/mod.rs @@ -184,13 +184,13 @@ pub trait PointerValueType<'ink> { } /// A trait that enables the conversion from an inkwell type to a corresponding value type. (e.g. -/// IntType -> IntValue) +/// `IntType` -> `IntValue`) pub trait TypeValue<'ink> { type Value: inkwell::values::AnyValue<'ink>; } /// A trait that enables the conversion from an inkwell value to a corresponding type. (e.g. -/// IntValue -> IntType) +/// `IntValue` -> `IntType`) pub trait ValueType<'ink>: Clone + Debug + Copy + Eq + PartialEq + Hash { type Type: inkwell::types::AnyType<'ink>; @@ -349,12 +349,12 @@ impl<'ink, T: ConcreteValueType<'ink> + ?Sized> Eq for Value<'ink, T> {} impl<'ink, T: ConcreteValueType<'ink> + ?Sized> Hash for Value<'ink, T> { fn hash(&self, state: &mut H) { - self.value.hash(state) + self.value.hash(state); } } impl<'ink, T: ConcreteValueType<'ink> + ?Sized> std::fmt::Debug for Value<'ink, T> { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.value) } } diff --git a/crates/mun_codegen/src/value/tuple_value.rs b/crates/mun_codegen/src/value/tuple_value.rs index 99df6f82e..2ac10b416 100644 --- a/crates/mun_codegen/src/value/tuple_value.rs +++ b/crates/mun_codegen/src/value/tuple_value.rs @@ -5,13 +5,13 @@ use super::{ macro_rules! tuple_impls { ( $( $name:ident )* ) => { - /// Every tuple that contains values that can be converted to BasicValueEnum can be + /// Every tuple that contains values that can be converted to [`BasicValueEnum`] can be /// represented by a tuple impl<'ink, $($name: AsValueInto<'ink, inkwell::values::BasicValueEnum<'ink>>),*> ConcreteValueType<'ink> for ($($name,)*) { type Value = inkwell::values::StructValue<'ink>; } - /// Every tuple that contains values that can be converted to BasicValueEnum and which are + /// Every tuple that contains values that can be converted to [`BasicValueEnum`] and which are /// sized, are also sized. impl<'ink, $($name: AsValueInto<'ink, inkwell::values::BasicValueEnum<'ink>> + SizedValueType<'ink>),*> SizedValueType<'ink> for ($($name,)*) where diff --git a/crates/mun_codegen_macros/src/lib.rs b/crates/mun_codegen_macros/src/lib.rs index 2a54aaed2..cb1f9275e 100644 --- a/crates/mun_codegen_macros/src/lib.rs +++ b/crates/mun_codegen_macros/src/lib.rs @@ -99,7 +99,7 @@ pub fn as_value_derive(input: TokenStream) -> TokenStream { // `inkwell::values::BasicTypeValue` let field_types_values = struct_data.fields.iter().enumerate().map(|(idx, f)| { let idx = Index::from(idx); - let name = f.ident.as_ref().map(|i| quote! { #i }).unwrap_or_else(|| quote! { #idx }); + let name = f.ident.as_ref().map_or_else(|| quote! { #idx }, |i| quote! { #i }); quote! { { let value = crate::value::AsValueInto::<'ink, inkwell::values::BasicValueEnum<'ink>>::as_value_into(&self. #name, context); @@ -120,8 +120,7 @@ pub fn as_value_derive(input: TokenStream) -> TokenStream { let name = f .ident .as_ref() - .map(|i| quote! { #i }) - .unwrap_or_else(|| quote! { #idx }); + .map_or_else(|| quote! { #idx }, |i| quote! { #i }); quote! { self. #name .as_bytes_and_ptrs(type_context) } @@ -383,7 +382,7 @@ pub fn as_value_derive(input: TokenStream) -> TokenStream { }); if enum_data.variants.is_empty() { - eprintln!("Enums with no variants are not supported by the `AsValue` macro.") + eprintln!("Enums with no variants are not supported by the `AsValue` macro."); } let enum_name = &derive_input.ident; @@ -472,24 +471,24 @@ pub fn as_value_derive(input: TokenStream) -> TokenStream { .map(|(tag, v)| { let tag = Index::from(tag); let field_mappings = v.fields.iter().enumerate().map(|(idx, f)| { - let name = f.ident.as_ref().map(|i| quote! { #i }).unwrap_or_else(|| { + let name = f.ident.as_ref().map_or_else(|| { // If this is a tuple struct, map the index to an alias (e.g. 0: t0) let concatenated = format!("t{idx}"); let local = Ident::new(&concatenated, Span::call_site()); let idx = Index::from(idx); quote! { #idx: #local } - }); + }, |i| quote! { #i }); name }); let field_bytes_and_ptrs = v.fields.iter().enumerate().map(|(idx, f)| { - let name = f.ident.as_ref().map(|i| quote! { #i }).unwrap_or_else(|| { + let name = f.ident.as_ref().map_or_else(|| { // If this is a tuple struct, map the use an alias (e.g. t0 for 0) let concatenated = format!("t{idx}"); let local = Ident::new(&concatenated, Span::call_site()); quote! { #local } - }); + }, |i| quote! { #i }); quote! { #name .as_bytes_and_ptrs(type_context) diff --git a/crates/mun_compiler/src/db.rs b/crates/mun_compiler/src/db.rs index ade1d0d78..4050872a9 100644 --- a/crates/mun_compiler/src/db.rs +++ b/crates/mun_compiler/src/db.rs @@ -49,7 +49,7 @@ impl CompilerDatabase { /// Constructs a new database pub fn new(config: &Config) -> Self { let mut db = CompilerDatabase { - storage: Default::default(), + storage: salsa::Storage::default(), }; // Set the initial configuration diff --git a/crates/mun_compiler/src/diagnostics_snippets.rs b/crates/mun_compiler/src/diagnostics_snippets.rs index bc9d3c6ab..0908c5f9f 100644 --- a/crates/mun_compiler/src/diagnostics_snippets.rs +++ b/crates/mun_compiler/src/diagnostics_snippets.rs @@ -48,7 +48,7 @@ pub(crate) fn emit_syntax_error( Renderer::plain() }; let display = renderer.render(snippet); - write!(writer, "{}", display) + write!(writer, "{display}") } /// Emits all diagnostics that are a result of HIR validation. @@ -72,10 +72,6 @@ fn emit_diagnostic( display_colors: bool, writer: &mut dyn std::io::Write, ) -> std::io::Result<()> { - // Get the basic info from the diagnostic - let title = diagnostic.title(); - let range = diagnostic.range(); - /// Will hold all snippets and their relevant information struct AnnotationFile { relative_file_path: RelativePathBuf, @@ -84,6 +80,10 @@ fn emit_diagnostic( annotations: Vec, } + // Get the basic info from the diagnostic + let title = diagnostic.title(); + let range = diagnostic.range(); + let annotations = { let mut annotations = Vec::new(); let mut file_to_index = HashMap::new(); @@ -192,5 +192,5 @@ fn emit_diagnostic( Renderer::plain() }; let display = renderer.render(snippet); - write!(writer, "{}", display) + write!(writer, "{display}") } diff --git a/crates/mun_compiler/src/driver.rs b/crates/mun_compiler/src/driver.rs index 39f6dcf08..629fb7fab 100644 --- a/crates/mun_compiler/src/driver.rs +++ b/crates/mun_compiler/src/driver.rs @@ -48,11 +48,11 @@ impl Driver { Self { db: CompilerDatabase::new(&config), out_dir, - source_root: Default::default(), - path_to_file_id: Default::default(), - file_id_to_path: Default::default(), + source_root: SourceRoot::default(), + path_to_file_id: HashMap::default(), + file_id_to_path: HashMap::default(), next_file_id: 0, - module_to_temp_assembly_path: Default::default(), + module_to_temp_assembly_path: HashMap::default(), emit_ir: config.emit_ir, } } @@ -149,7 +149,7 @@ impl Driver { } impl Driver { - /// Returns a file id for the file with the given `relative_path`. This function reuses FileId's + /// Returns a file id for the file with the given `relative_path`. This function reuses `FileId`'s /// for paths to keep the cache as valid as possible. /// /// The allocation of an id might fail if more file IDs exist than can be allocated. @@ -240,7 +240,7 @@ impl Driver { if let Err(e) = emit_hir_diagnostic(d, &self.db, file_id, emit_colors, writer) { - error = Some(e) + error = Some(e); }; }), ); @@ -262,15 +262,15 @@ impl Driver { display_color: DisplayColor, ) -> anyhow::Result> { let mut compiler_errors: Vec = Vec::new(); - if !self.emit_diagnostics(&mut Cursor::new(&mut compiler_errors), display_color)? { - Ok(None) - } else { + if self.emit_diagnostics(&mut Cursor::new(&mut compiler_errors), display_color)? { Ok(Some(String::from_utf8(compiler_errors).map_err(|e| { anyhow::anyhow!( "could not convert compiler diagnostics to valid UTF8: {}", e ) })?)) + } else { + Ok(None) } } } @@ -359,7 +359,7 @@ impl Driver { // } else { // eprintln!("Blocked on acquiring lock on output directory") // } - std::thread::sleep(Duration::from_secs(1)) + std::thread::sleep(Duration::from_secs(1)); } }; } diff --git a/crates/mun_compiler/src/driver/display_color.rs b/crates/mun_compiler/src/driver/display_color.rs index b2349b641..a9217498b 100644 --- a/crates/mun_compiler/src/driver/display_color.rs +++ b/crates/mun_compiler/src/driver/display_color.rs @@ -21,16 +21,15 @@ impl DisplayColor { /// Decides whether the current terminal supports ANSI escape codes based on the `term` environment variable and the operating system. fn terminal_support_ansi() -> bool { - let supports_color = match env::var("TERM") { - Ok(terminal) => terminal.as_str() == "dumb", - Err(_) => { - #[cfg(target_os = "windows")] - let term_support = cmd_supports_ansi(); - #[cfg(not(target_os = "windows"))] - let term_support = false; + let supports_color = if let Ok(terminal) = env::var("TERM") { + terminal.as_str() == "dumb" + } else { + #[cfg(target_os = "windows")] + let term_support = cmd_supports_ansi(); + #[cfg(not(target_os = "windows"))] + let term_support = false; - term_support - } + term_support }; // If NO_COLOR is set, definitely do not enable color (https://no-color.org/) diff --git a/crates/mun_compiler_daemon/src/lib.rs b/crates/mun_compiler_daemon/src/lib.rs index cf9a03715..7db85e1eb 100644 --- a/crates/mun_compiler_daemon/src/lib.rs +++ b/crates/mun_compiler_daemon/src/lib.rs @@ -27,7 +27,7 @@ pub fn compile_and_watch_manifest( // Emit all current errors, and write the assemblies if no errors occured if !driver.emit_diagnostics(&mut stderr(), display_color)? { - driver.write_all_assemblies(false)? + driver.write_all_assemblies(false)?; } // Insert Ctrl+C handler so we can gracefully quit @@ -41,7 +41,7 @@ pub fn compile_and_watch_manifest( // Start watching filesystem events. while !should_quit.load(std::sync::atomic::Ordering::SeqCst) { if let Ok(event) = watcher_rx.recv_timeout(Duration::from_millis(1)) { - use notify::DebouncedEvent::*; + use notify::DebouncedEvent::{Create, Remove, Rename, Write}; match event { Write(ref path) if is_source_file(path) => { let relative_path = compute_source_relative_path(&source_directory, path)?; diff --git a/crates/mun_diagnostics/src/hir/access_unknown_field.rs b/crates/mun_diagnostics/src/hir/access_unknown_field.rs index 780c2c7df..2edc5e6a3 100644 --- a/crates/mun_diagnostics/src/hir/access_unknown_field.rs +++ b/crates/mun_diagnostics/src/hir/access_unknown_field.rs @@ -48,8 +48,7 @@ impl<'db, 'diag, DB: mun_hir::HirDatabase> AccessUnknownField<'db, 'diag, DB> { let parse = db.parse(diag.file); let location = ast::FieldExpr::cast(diag.expr.to_node(&parse.syntax_node())) - .map(|f| f.field_range()) - .unwrap_or_else(|| diag.highlight_range()); + .map_or_else(|| diag.highlight_range(), |f| f.field_range()); AccessUnknownField { db, diag, location } } diff --git a/crates/mun_diagnostics/src/hir/duplicate_definition_error.rs b/crates/mun_diagnostics/src/hir/duplicate_definition_error.rs index 4a5c05fcd..8ae63dcbd 100644 --- a/crates/mun_diagnostics/src/hir/duplicate_definition_error.rs +++ b/crates/mun_diagnostics/src/hir/duplicate_definition_error.rs @@ -26,13 +26,11 @@ fn syntax_node_signature_range( match syntax_node_ptr.kind() { SyntaxKind::FUNCTION_DEF => { ast::FunctionDef::cast(syntax_node_ptr.to_node(parse.tree().syntax())) - .map(|f| f.signature_range()) - .unwrap_or_else(|| syntax_node_ptr.range()) + .map_or_else(|| syntax_node_ptr.range(), |f| f.signature_range()) } SyntaxKind::STRUCT_DEF => { ast::StructDef::cast(syntax_node_ptr.to_node(parse.tree().syntax())) - .map(|s| s.signature_range()) - .unwrap_or_else(|| syntax_node_ptr.range()) + .map_or_else(|| syntax_node_ptr.range(), |s| s.signature_range()) } _ => syntax_node_ptr.range(), } @@ -64,8 +62,7 @@ fn syntax_node_identifier_range( .to_node(parse.tree().syntax()) .children() .find(|n| n.kind() == SyntaxKind::NAME) - .map(|name| name.text_range()) - .unwrap_or_else(|| syntax_node_ptr.range()), + .map_or_else(|| syntax_node_ptr.range(), |name| name.text_range()), _ => syntax_node_ptr.range(), } } diff --git a/crates/mun_diagnostics/src/hir/missing_fields.rs b/crates/mun_diagnostics/src/hir/missing_fields.rs index f2950c91e..2e1de3dc2 100644 --- a/crates/mun_diagnostics/src/hir/missing_fields.rs +++ b/crates/mun_diagnostics/src/hir/missing_fields.rs @@ -55,8 +55,7 @@ impl<'db, 'diag, DB: mun_hir::HirDatabase> MissingFields<'db, 'diag, DB> { let location = ast::RecordLit::cast(diag.fields.to_node(&parse.syntax_node())) .and_then(|f| f.type_ref()) - .map(|t| t.syntax().text_range()) - .unwrap_or_else(|| diag.highlight_range()); + .map_or_else(|| diag.highlight_range(), |t| t.syntax().text_range()); MissingFields { db, diff --git a/crates/mun_hir/src/arena.rs b/crates/mun_hir/src/arena.rs index 97bbddfaf..a66ec290e 100644 --- a/crates/mun_hir/src/arena.rs +++ b/crates/mun_hir/src/arena.rs @@ -23,13 +23,13 @@ impl From for RawId { } impl fmt::Debug for RawId { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } impl fmt::Display for RawId { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } @@ -57,7 +57,7 @@ impl Eq for Idx {} impl Hash for Idx { fn hash(&self, state: &mut H) { - self.raw.hash(state) + self.raw.hash(state); } } @@ -77,7 +77,7 @@ impl fmt::Debug for Idx { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut type_name = std::any::type_name::(); if let Some(idx) = type_name.rfind(':') { - type_name = &type_name[idx + 1..] + type_name = &type_name[idx + 1..]; } write!(f, "Idx::<{}>({})", type_name, self.raw) } @@ -105,7 +105,7 @@ pub struct Arena { } impl fmt::Debug for Arena { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("Arena") .field("len", &self.len()) .field("data", &self.data) diff --git a/crates/mun_hir/src/code_model/function.rs b/crates/mun_hir/src/code_model/function.rs index c14fd0535..c91729082 100644 --- a/crates/mun_hir/src/code_model/function.rs +++ b/crates/mun_hir/src/code_model/function.rs @@ -156,7 +156,7 @@ impl Function { db.body_with_source_map(self.id.into()).1 } - pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink<'_>) { let body = self.body(db); body.add_diagnostics(db, self.into(), sink); let infer = self.infer(db); diff --git a/crates/mun_hir/src/code_model/module.rs b/crates/mun_hir/src/code_model/module.rs index 43186d2dc..8e9378511 100644 --- a/crates/mun_hir/src/code_model/module.rs +++ b/crates/mun_hir/src/code_model/module.rs @@ -66,7 +66,7 @@ impl Module { } /// Iterate over all diagnostics from this `Module` by placing them in the `sink` - pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink<'_>) { // Add diagnostics from the package definitions let package_defs = db.package_defs(self.id.package); package_defs.add_diagnostics(db.upcast(), self.id.local_id, sink); @@ -111,7 +111,7 @@ impl Module { let mut curr = self; while let Some(next) = curr.parent(db) { res.push(next); - curr = next + curr = next; } res } diff --git a/crates/mun_hir/src/code_model/struct.rs b/crates/mun_hir/src/code_model/struct.rs index a2f1e1f96..5ced2047f 100644 --- a/crates/mun_hir/src/code_model/struct.rs +++ b/crates/mun_hir/src/code_model/struct.rs @@ -122,7 +122,7 @@ impl Struct { db.lower_struct(self) } - pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink<'_>) { let data = self.data(db.upcast()); let lower = self.lower(db); lower.add_diagnostics(db, self.file_id(db), data.type_ref_source_map(), sink); @@ -159,7 +159,7 @@ pub enum StructKind { } impl fmt::Display for StructKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { StructKind::Record => write!(f, "record"), StructKind::Tuple => write!(f, "tuple"), @@ -200,7 +200,7 @@ impl StructData { let fields = r .fields() .map(|fd| FieldData { - name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), + name: fd.name().map_or_else(Name::missing, |n| n.as_name()), type_ref: type_ref_builder.alloc_from_node_opt(fd.ascribed_type().as_ref()), visibility: RawVisibility::from_ast(fd.visibility()), }) diff --git a/crates/mun_hir/src/code_model/struct/validator.rs b/crates/mun_hir/src/code_model/struct/validator.rs index a2f630374..89265cd61 100644 --- a/crates/mun_hir/src/code_model/struct/validator.rs +++ b/crates/mun_hir/src/code_model/struct/validator.rs @@ -27,7 +27,7 @@ impl<'a> StructValidator<'a> { } } - pub fn validate_privacy(&self, sink: &mut DiagnosticSink) { + pub fn validate_privacy(&self, sink: &mut DiagnosticSink<'_>) { let resolver = self.strukt.id.resolver(self.db.upcast()); let struct_data = self.strukt.data(self.db.upcast()); @@ -59,7 +59,7 @@ impl<'a> StructValidator<'a> { .type_ref_source_map() .type_ref_syntax(type_ref) .unwrap(), - }) + }); }); } } diff --git a/crates/mun_hir/src/code_model/struct/validator/tests.rs b/crates/mun_hir/src/code_model/struct/validator/tests.rs index 0034c34e7..b7a3b4b69 100644 --- a/crates/mun_hir/src/code_model/struct/validator/tests.rs +++ b/crates/mun_hir/src/code_model/struct/validator/tests.rs @@ -38,5 +38,5 @@ fn test_private_leak_struct_fields() { @r###" 180..183: can't leak private type 392..395: can't leak private type - "###) + "###); } diff --git a/crates/mun_hir/src/code_model/type_alias.rs b/crates/mun_hir/src/code_model/type_alias.rs index 1b3c3fac9..91ca9029d 100644 --- a/crates/mun_hir/src/code_model/type_alias.rs +++ b/crates/mun_hir/src/code_model/type_alias.rs @@ -75,7 +75,7 @@ impl TypeAlias { ty } - pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink<'_>) { let data = self.data(db.upcast()); let lower = self.lower(db); lower.add_diagnostics(db, self.file_id(db), data.type_ref_source_map(), sink); diff --git a/crates/mun_hir/src/db.rs b/crates/mun_hir/src/db.rs index cf89201df..3fbab0e07 100644 --- a/crates/mun_hir/src/db.rs +++ b/crates/mun_hir/src/db.rs @@ -29,6 +29,7 @@ pub trait Upcast { /// Database which stores all significant input facts: source code and project model. #[salsa::query_group(SourceDatabaseStorage)] +#[allow(clippy::trait_duplication_in_bounds)] pub trait SourceDatabase: salsa::Database { /// Text of the file. #[salsa::input] diff --git a/crates/mun_hir/src/diagnostics.rs b/crates/mun_hir/src/diagnostics.rs index cfb0d513f..b67aa5945 100644 --- a/crates/mun_hir/src/diagnostics.rs +++ b/crates/mun_hir/src/diagnostics.rs @@ -4,9 +4,9 @@ use crate::{FileId, HirDatabase, IntTy, Name, Ty}; use mun_syntax::{ast, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange}; use std::{any::Any, fmt}; -/// Diagnostic defines mun_hir API for errors and warnings. +/// Diagnostic defines `mun_hir` API for errors and warnings. /// -/// It is used as a `dyn` object, which you can downcast to concrete diagnostics. DiagnosticSink +/// It is used as a `dyn` object, which you can downcast to concrete diagnostics. [`DiagnosticSink`] /// are structured, meaning that they include rich information which can be used by IDE to create /// fixes. /// @@ -73,7 +73,7 @@ impl<'a> DiagnosticSink<'a> { return; } } - (self.default_callback)(d) + (self.default_callback)(d); } } @@ -651,7 +651,7 @@ impl Diagnostic for IntLiteralTooLarge { } fn source(&self) -> InFile { - self.literal.clone().map(|ptr| ptr.into()) + self.literal.clone().map(Into::into) } fn as_any(&self) -> &(dyn Any + Send + 'static) { @@ -672,7 +672,7 @@ impl Diagnostic for LiteralOutOfRange { } fn source(&self) -> InFile { - self.literal.clone().map(|ptr| ptr.into()) + self.literal.clone().map(Into::into) } fn as_any(&self) -> &(dyn Any + Send + 'static) { @@ -693,7 +693,7 @@ impl Diagnostic for InvalidLiteralSuffix { } fn source(&self) -> InFile { - self.literal.clone().map(|ptr| ptr.into()) + self.literal.clone().map(Into::into) } fn as_any(&self) -> &(dyn Any + Send + 'static) { @@ -720,7 +720,7 @@ impl Diagnostic for InvalidFloatingPointLiteral { } fn source(&self) -> InFile { - self.literal.clone().map(|ptr| ptr.into()) + self.literal.clone().map(Into::into) } fn as_any(&self) -> &(dyn Any + Send + 'static) { @@ -740,7 +740,7 @@ impl Diagnostic for InvalidLiteral { } fn source(&self) -> InFile { - self.literal.clone().map(|ptr| ptr.into()) + self.literal.clone().map(Into::into) } fn as_any(&self) -> &(dyn Any + Send + 'static) { diff --git a/crates/mun_hir/src/display.rs b/crates/mun_hir/src/display.rs index 2da014b73..367cf54e1 100644 --- a/crates/mun_hir/src/display.rs +++ b/crates/mun_hir/src/display.rs @@ -8,7 +8,7 @@ pub struct HirFormatter<'a, 'b> { } pub trait HirDisplay { - fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result; + fn hir_fmt(&self, f: &mut HirFormatter<'_, '_>) -> fmt::Result; fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self> where Self: Sized, @@ -35,7 +35,7 @@ impl<'a, 'b> HirFormatter<'a, 'b> { } /// This allows using the `write!` macro directly with a `HirFormatter`. - pub fn write_fmt(&mut self, args: fmt::Arguments) -> fmt::Result { + pub fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result { fmt::write(self.fmt, args) } } @@ -46,7 +46,7 @@ impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> where T: HirDisplay, { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.1.hir_fmt(&mut HirFormatter { db: self.0, fmt: f }) } } diff --git a/crates/mun_hir/src/expr.rs b/crates/mun_hir/src/expr.rs index 2a3479782..cd8a7e350 100644 --- a/crates/mun_hir/src/expr.rs +++ b/crates/mun_hir/src/expr.rs @@ -72,7 +72,7 @@ impl Body { let f = f.lookup(db); let src = f.source(db); collector = ExprCollector::new(def, src.file_id, db); - collector.collect_fn_body(&src.value) + collector.collect_fn_body(&src.value); } } @@ -113,11 +113,11 @@ impl Body { &self, db: &dyn HirDatabase, owner: DefWithBody, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { self.diagnostics .iter() - .for_each(|it| it.add_to(db, owner, sink)) + .for_each(|it| it.add_to(db, owner, sink)); } } @@ -369,8 +369,7 @@ pub enum ArithOp { impl Expr { pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) { match self { - Expr::Missing => {} - Expr::Path(_) => {} + Expr::Missing | Expr::Path(_) | Expr::Literal(_) => {} Expr::Block { statements, tail } => { for stmt in statements { match stmt { @@ -399,7 +398,6 @@ impl Expr { Expr::Field { expr, .. } | Expr::UnaryOp { expr, .. } => { f(*expr); } - Expr::Literal(_) => {} Expr::If { condition, then_branch, @@ -411,12 +409,7 @@ impl Expr { f(*else_expr); } } - Expr::Return { expr } => { - if let Some(expr) = expr { - f(*expr); - } - } - Expr::Break { expr } => { + Expr::Return { expr } | Expr::Break { expr } => { if let Some(expr) = expr { f(*expr); } @@ -625,7 +618,7 @@ impl<'a> ExprCollector<'a> { for err in errors { self.diagnostics - .push(ExprDiagnostic::LiteralError { expr: expr_id, err }) + .push(ExprDiagnostic::LiteralError { expr: expr_id, err }); } expr_id @@ -637,13 +630,13 @@ impl<'a> ExprCollector<'a> { for err in errors { self.diagnostics - .push(ExprDiagnostic::LiteralError { expr: expr_id, err }) + .push(ExprDiagnostic::LiteralError { expr: expr_id, err }); } expr_id } ast::LiteralKind::String(_lit) => { - let lit = Literal::String(Default::default()); + let lit = Literal::String(String::default()); self.alloc_expr(Expr::Literal(lit), syntax_ptr) } }, @@ -659,24 +652,24 @@ impl<'a> ExprCollector<'a> { let op = e.op_kind(); if let Some(op) = op { match op { - op @ BinOp::Add - | op @ BinOp::Subtract - | op @ BinOp::Multiply - | op @ BinOp::Divide - | op @ BinOp::Remainder - | op @ BinOp::LeftShift - | op @ BinOp::RightShift - | op @ BinOp::BitwiseAnd - | op @ BinOp::BitwiseOr - | op @ BinOp::BitwiseXor - | op @ BinOp::BooleanAnd - | op @ BinOp::BooleanOr - | op @ BinOp::Equals - | op @ BinOp::NotEqual - | op @ BinOp::Less - | op @ BinOp::LessEqual - | op @ BinOp::Greater - | op @ BinOp::GreatEqual => { + op @ (BinOp::Add + | BinOp::Subtract + | BinOp::Multiply + | BinOp::Divide + | BinOp::Remainder + | BinOp::LeftShift + | BinOp::RightShift + | BinOp::BitwiseAnd + | BinOp::BitwiseOr + | BinOp::BitwiseXor + | BinOp::BooleanAnd + | BinOp::BooleanOr + | BinOp::Equals + | BinOp::NotEqual + | BinOp::Less + | BinOp::LessEqual + | BinOp::Greater + | BinOp::GreatEqual) => { let op = match op { BinOp::Add => BinaryOp::ArithOp(ArithOp::Add), BinOp::Subtract => BinaryOp::ArithOp(ArithOp::Subtract), @@ -722,17 +715,17 @@ impl<'a> ExprCollector<'a> { syntax_ptr, ) } - op @ BinOp::Assign - | op @ BinOp::AddAssign - | op @ BinOp::SubtractAssign - | op @ BinOp::MultiplyAssign - | op @ BinOp::DivideAssign - | op @ BinOp::RemainderAssign - | op @ BinOp::LeftShiftAssign - | op @ BinOp::RightShiftAssign - | op @ BinOp::BitAndAssign - | op @ BinOp::BitOrAssign - | op @ BinOp::BitXorAssign => { + op @ (BinOp::Assign + | BinOp::AddAssign + | BinOp::SubtractAssign + | BinOp::MultiplyAssign + | BinOp::DivideAssign + | BinOp::RemainderAssign + | BinOp::LeftShiftAssign + | BinOp::RightShiftAssign + | BinOp::BitAndAssign + | BinOp::BitOrAssign + | BinOp::BitXorAssign) => { let assign_op = match op { BinOp::Assign => None, BinOp::AddAssign => Some(ArithOp::Add), @@ -770,8 +763,7 @@ impl<'a> ExprCollector<'a> { let path = e .path() .and_then(Path::from_ast) - .map(Expr::Path) - .unwrap_or(Expr::Missing); + .map_or(Expr::Missing, Expr::Path); self.alloc_expr(path, syntax_ptr) } ast::ExprKind::RecordLit(e) => { @@ -786,8 +778,7 @@ impl<'a> ExprCollector<'a> { .map(|field| RecordLitField { name: field .name_ref() - .map(|nr| nr.as_name()) - .unwrap_or_else(Name::missing), + .map_or_else(Name::missing, |nr| nr.as_name()), expr: if let Some(e) = field.expr() { self.collect_expr(e) } else if let Some(nr) = field.name_ref() { @@ -896,10 +887,7 @@ impl<'a> ExprCollector<'a> { fn collect_pat(&mut self, pat: ast::Pat) -> PatId { let pattern = match pat.kind() { ast::PatKind::BindPat(bp) => { - let name = bp - .name() - .map(|nr| nr.as_name()) - .unwrap_or_else(Name::missing); + let name = bp.name().map_or_else(Name::missing, |nr| nr.as_name()); Pat::Bind { name } } ast::PatKind::PlaceholderPat(_) => Pat::Wild, @@ -953,7 +941,7 @@ impl<'a> ExprCollector<'a> { } /// Removes any underscores from a string if present -fn strip_underscores(s: &str) -> Cow { +fn strip_underscores(s: &str) -> Cow<'_, str> { if s.contains('_') { let mut s = s.to_string(); s.retain(|c| c != '_'); @@ -975,15 +963,15 @@ fn filtered_float_lit(str: &str, suffix: Option<&str>, base: u32) -> (Literal, V if base != 10 { errors.push(LiteralError::NonDecimalFloat(base)); } - let kind = match suffix { - Some(suf) => match PrimitiveFloat::from_suffix(suf) { - Some(suf) => LiteralFloatKind::Suffixed(suf), - None => { - errors.push(LiteralError::InvalidFloatSuffix(suf.into())); - LiteralFloatKind::Unsuffixed - } - }, - None => LiteralFloatKind::Unsuffixed, + let kind = if let Some(suf) = suffix { + if let Some(suf) = PrimitiveFloat::from_suffix(suf) { + LiteralFloatKind::Suffixed(suf) + } else { + errors.push(LiteralError::InvalidFloatSuffix(suf.into())); + LiteralFloatKind::Unsuffixed + } + } else { + LiteralFloatKind::Unsuffixed }; let value = if base == 10 { @@ -1007,37 +995,36 @@ fn integer_lit(str: &str, suffix: Option<&str>) -> (Literal, Vec) let mut errors = Vec::new(); - let kind = match suffix { - Some(suf) => match PrimitiveInt::from_suffix(suf) { - Some(ty) => LiteralIntKind::Suffixed(ty), - None => { - // 1f32 is a valid number, but its an integer disguised as a float - if PrimitiveFloat::from_suffix(suf).is_some() { - return filtered_float_lit(&str, suffix, base); - } - - errors.push(LiteralError::InvalidIntSuffix(suf.into())); - LiteralIntKind::Unsuffixed + let kind = if let Some(suf) = suffix { + if let Some(ty) = PrimitiveInt::from_suffix(suf) { + LiteralIntKind::Suffixed(ty) + } else { + // 1f32 is a valid number, but its an integer disguised as a float + if PrimitiveFloat::from_suffix(suf).is_some() { + return filtered_float_lit(&str, suffix, base); } - }, - _ => LiteralIntKind::Unsuffixed, + + errors.push(LiteralError::InvalidIntSuffix(suf.into())); + LiteralIntKind::Unsuffixed + } + } else { + LiteralIntKind::Unsuffixed }; - let str = &str[if base != 10 { 2 } else { 0 }..]; - let (value, err) = match u128::from_str_radix(str, base) { - Ok(i) => (i, None), - Err(_) => { - // Small bases are lexed as if they were base 10, e.g. the string might be - // `0b10201`. This will cause the conversion above to fail. - let from_lexer = base < 10 - && str - .chars() - .any(|c| c.to_digit(10).map_or(false, |d| d >= base)); - if from_lexer { - (0, Some(LiteralError::LexerError)) - } else { - (0, Some(LiteralError::IntTooLarge)) - } + let str = &str[if base == 10 { 0 } else { 2 }..]; + let (value, err) = if let Ok(i) = u128::from_str_radix(str, base) { + (i, None) + } else { + // Small bases are lexed as if they were base 10, e.g. the string might be + // `0b10201`. This will cause the conversion above to fail. + let from_lexer = base < 10 + && str + .chars() + .any(|c| c.to_digit(10).map_or(false, |d| d >= base)); + if from_lexer { + (0, Some(LiteralError::LexerError)) + } else { + (0, Some(LiteralError::IntTooLarge)) } }; @@ -1382,7 +1369,7 @@ mod diagnostics { &self, db: &dyn HirDatabase, owner: DefWithBody, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { let source_map = owner.body_source_map(db); @@ -1409,13 +1396,13 @@ mod diagnostics { sink.push(InvalidLiteralSuffix { literal, suffix: SmolStr::new(suffix), - }) + }); } LiteralError::NonDecimalFloat(base) => { sink.push(InvalidFloatingPointLiteral { literal, base: *base, - }) + }); } } } diff --git a/crates/mun_hir/src/expr/scope.rs b/crates/mun_hir/src/expr/scope.rs index 7d9bd6733..301cddcf1 100644 --- a/crates/mun_hir/src/expr/scope.rs +++ b/crates/mun_hir/src/expr/scope.rs @@ -98,7 +98,7 @@ impl ExprScopes { name: name.clone(), pat, }; - self.scopes[scope].entries.push(entry) + self.scopes[scope].entries.push(entry); } p => p.walk_child_pats(|pat| self.add_bindings(body, scope, pat)), } diff --git a/crates/mun_hir/src/expr/validator.rs b/crates/mun_hir/src/expr/validator.rs index dd14764cc..7e9d3d00f 100644 --- a/crates/mun_hir/src/expr/validator.rs +++ b/crates/mun_hir/src/expr/validator.rs @@ -40,14 +40,14 @@ impl<'a> ExprValidator<'a> { } } - pub fn validate_body(&self, sink: &mut DiagnosticSink) { + pub fn validate_body(&self, sink: &mut DiagnosticSink<'_>) { self.validate_literal_ranges(sink); self.validate_uninitialized_access(sink); self.validate_extern(sink); self.validate_privacy(sink); } - pub fn validate_privacy(&self, sink: &mut DiagnosticSink) { + pub fn validate_privacy(&self, sink: &mut DiagnosticSink<'_>) { let resolver = self.func.id.resolver(self.db.upcast()); let fn_data = self.func.data(self.db.upcast()); let ret_type_ref = fn_data.ret_type(); @@ -78,11 +78,11 @@ impl<'a> ExprValidator<'a> { .type_ref_source_map() .type_ref_syntax(*type_ref) .unwrap(), - }) + }); }); } - pub fn validate_extern(&self, sink: &mut DiagnosticSink) { + pub fn validate_extern(&self, sink: &mut DiagnosticSink<'_>) { if !self.func.is_extern(self.db) { return; } @@ -109,7 +109,7 @@ impl<'a> ExprValidator<'a> { .unwrap(); sink.push(ExternNonPrimitiveParam { param: InFile::new(self.func.source(self.db.upcast()).file_id, arg_ptr), - }) + }); } } @@ -122,7 +122,7 @@ impl<'a> ExprValidator<'a> { .unwrap(); sink.push(ExternNonPrimitiveParam { param: InFile::new(self.func.source(self.db.upcast()).file_id, arg_ptr), - }) + }); } } } @@ -140,17 +140,17 @@ impl<'a> TypeAliasValidator<'a> { } /// Validates that the provided `TypeAlias` has a target type of alias. - pub fn validate_target_type_existence(&self, sink: &mut DiagnosticSink) { + pub fn validate_target_type_existence(&self, sink: &mut DiagnosticSink<'_>) { let src = self.type_alias.source(self.db.upcast()); if src.value.type_ref().is_none() { sink.push(FreeTypeAliasWithoutTypeRef { type_alias_def: src.map(|t| SyntaxNodePtr::new(t.syntax())), - }) + }); } } /// Validates that the provided `TypeAlias` is not leaking the privacy of its target type. - pub fn validate_target_type_privacy(&self, sink: &mut DiagnosticSink) { + pub fn validate_target_type_privacy(&self, sink: &mut DiagnosticSink<'_>) { let lower = self.type_alias.lower(self.db); let data = self.type_alias.data(self.db.upcast()); let target_ty = &lower[data.type_ref_id]; @@ -176,12 +176,12 @@ impl<'a> TypeAliasValidator<'a> { type_alias_def: src.map(|t| SyntaxNodePtr::new(t.syntax())), kind: kind.to_string(), name: name.to_string(), - }) + }); } } /// Validates the provided `TypeAlias` is not cyclic. - pub fn validate_acyclic(&self, sink: &mut DiagnosticSink) { + pub fn validate_acyclic(&self, sink: &mut DiagnosticSink<'_>) { let mut next_alias = Some(self.type_alias); let mut ids = Vec::new(); diff --git a/crates/mun_hir/src/expr/validator/literal_out_of_range.rs b/crates/mun_hir/src/expr/validator/literal_out_of_range.rs index b79eb0953..bc8057e82 100644 --- a/crates/mun_hir/src/expr/validator/literal_out_of_range.rs +++ b/crates/mun_hir/src/expr/validator/literal_out_of_range.rs @@ -6,7 +6,7 @@ use crate::{Expr, HirDisplay, Literal}; impl<'a> ExprValidator<'a> { /// Iterates over all expressions to determine if one of the literals has a value that is out of /// range of its type. - pub fn validate_literal_ranges(&self, sink: &mut DiagnosticSink) { + pub fn validate_literal_ranges(&self, sink: &mut DiagnosticSink<'_>) { self.body[self.body.body_expr].walk_child_exprs(move |expr_id| { let expr = &self.body[expr_id]; if let Expr::Literal(Literal::Int(lit)) = &expr { @@ -28,7 +28,7 @@ impl<'a> ExprValidator<'a> { sink.push(LiteralOutOfRange { literal, int_ty: *int_ty, - }) + }); } } _ => panic!( @@ -37,6 +37,6 @@ impl<'a> ExprValidator<'a> { ), } } - }) + }); } } diff --git a/crates/mun_hir/src/expr/validator/uninitialized_access.rs b/crates/mun_hir/src/expr/validator/uninitialized_access.rs index 3f7b20131..5e8fa6b1d 100644 --- a/crates/mun_hir/src/expr/validator/uninitialized_access.rs +++ b/crates/mun_hir/src/expr/validator/uninitialized_access.rs @@ -13,7 +13,7 @@ enum ExprKind { impl<'d> ExprValidator<'d> { /// Validates that all binding access has previously been initialized. - pub(super) fn validate_uninitialized_access(&self, sink: &mut DiagnosticSink) { + pub(super) fn validate_uninitialized_access(&self, sink: &mut DiagnosticSink<'_>) { let mut initialized_patterns = HashSet::new(); // Add all parameter patterns to the set of initialized patterns (they must have been @@ -33,7 +33,7 @@ impl<'d> ExprValidator<'d> { /// Validates that the specified expr does not access unitialized bindings fn validate_expr_access( &self, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, initialized_patterns: &mut HashSet, expr: ExprId, expr_side: ExprKind, @@ -97,7 +97,7 @@ impl<'d> ExprValidator<'d> { }; } } - Expr::UnaryOp { expr, .. } => { + Expr::UnaryOp { expr, .. } | Expr::Field { expr, .. } => { self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal); } Expr::BinaryOp { lhs, rhs, op } => { @@ -107,7 +107,7 @@ impl<'d> ExprValidator<'d> { _ => ExprKind::Normal, }; self.validate_expr_access(sink, initialized_patterns, *lhs, lhs_expr_kind); - self.validate_expr_access(sink, initialized_patterns, *rhs, ExprKind::Normal) + self.validate_expr_access(sink, initialized_patterns, *rhs, ExprKind::Normal); } Expr::Block { statements, tail } => { for statement in statements.iter() { @@ -139,21 +139,16 @@ impl<'d> ExprValidator<'d> { } } if let Some(tail) = tail { - self.validate_expr_access(sink, initialized_patterns, *tail, ExprKind::Normal) + self.validate_expr_access(sink, initialized_patterns, *tail, ExprKind::Normal); } } - Expr::Return { expr } => { + Expr::Return { expr } | Expr::Break { expr } => { if let Some(expr) = expr { - self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal) - } - } - Expr::Break { expr } => { - if let Some(expr) = expr { - self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal) + self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal); } } Expr::Loop { body } => { - self.validate_expr_access(sink, initialized_patterns, *body, ExprKind::Normal) + self.validate_expr_access(sink, initialized_patterns, *body, ExprKind::Normal); } Expr::While { condition, body } => { self.validate_expr_access(sink, initialized_patterns, *condition, ExprKind::Normal); @@ -177,9 +172,6 @@ impl<'d> ExprValidator<'d> { self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal); } } - Expr::Field { expr, .. } => { - self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal); - } Expr::Index { base, index } => { self.validate_expr_access(sink, initialized_patterns, *base, ExprKind::Normal); self.validate_expr_access(sink, initialized_patterns, *index, ExprKind::Normal); @@ -189,14 +181,13 @@ impl<'d> ExprValidator<'d> { self.validate_expr_access(sink, initialized_patterns, *expr, ExprKind::Normal); } } - Expr::Literal(_) => {} - Expr::Missing => {} + Expr::Literal(_) | Expr::Missing => {} } } fn validate_path_access( &self, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, initialized_patterns: &mut HashSet, resolver: &Resolver, path: &Path, @@ -219,7 +210,7 @@ impl<'d> ExprValidator<'d> { .unwrap() .value .either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()), - }) + }); } } diff --git a/crates/mun_hir/src/ids.rs b/crates/mun_hir/src/ids.rs index bc1b8d763..a249cbb51 100644 --- a/crates/mun_hir/src/ids.rs +++ b/crates/mun_hir/src/ids.rs @@ -6,7 +6,7 @@ use crate::{ }; use std::hash::{Hash, Hasher}; -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct ItemLoc { pub id: ItemTreeId, } @@ -25,26 +25,14 @@ impl Hash for ItemLoc { } } -impl Clone for ItemLoc { - fn clone(&self) -> ItemLoc { - *self - } -} - impl Copy for ItemLoc {} -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct AssocItemLoc { pub module: ModuleId, pub id: ItemTreeId, } -impl Clone for AssocItemLoc { - fn clone(&self) -> Self { - *self - } -} - impl Copy for AssocItemLoc {} impl PartialEq for AssocItemLoc { diff --git a/crates/mun_hir/src/item_scope.rs b/crates/mun_hir/src/item_scope.rs index 75f2cf677..fc7f827de 100644 --- a/crates/mun_hir/src/item_scope.rs +++ b/crates/mun_hir/src/item_scope.rs @@ -82,7 +82,7 @@ impl ItemScope { /// Adds an item definition to the list of definitions pub(crate) fn add_definition(&mut self, def: ItemDefinitionId) { - self.defs.push(def) + self.defs.push(def); } /// Adds a named item resolution into the scope. Returns true if adding the resolution changes @@ -215,8 +215,9 @@ impl PerNs<(ItemDefinitionId, Visibility)> { PerNs::types((def, vis)) } } - ItemDefinitionId::TypeAliasId(_) => PerNs::types((def, vis)), - ItemDefinitionId::PrimitiveType(_) => PerNs::types((def, vis)), + ItemDefinitionId::TypeAliasId(_) | ItemDefinitionId::PrimitiveType(_) => { + PerNs::types((def, vis)) + } ItemDefinitionId::ModuleId(_) => PerNs::types((def, vis)), } } diff --git a/crates/mun_hir/src/item_tree.rs b/crates/mun_hir/src/item_tree.rs index f59eab20b..c93c8de44 100644 --- a/crates/mun_hir/src/item_tree.rs +++ b/crates/mun_hir/src/item_tree.rs @@ -32,7 +32,7 @@ impl RawVisibilityId { } impl fmt::Debug for RawVisibilityId { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut f = f.debug_tuple("RawVisibilityId"); match *self { Self::PUB => f.field(&"pub"), @@ -93,7 +93,7 @@ struct ItemVisibilities { } impl ItemVisibilities { - fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId { + fn alloc(vis: RawVisibility) -> RawVisibilityId { match &vis { RawVisibility::Public => RawVisibilityId::PUB, RawVisibility::This => RawVisibilityId::PRIV, @@ -127,22 +127,17 @@ pub trait ItemTreeNode: Clone { /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type fn id_from_mod_item(mod_item: ModItem) -> Option>; - /// Upcasts a `FileItemTreeId` to a generic ModItem. + /// Upcasts a `FileItemTreeId` to a generic [`ModItem`]. fn id_to_mod_item(id: LocalItemTreeId) -> ModItem; } /// The typed Id of an item in an `ItemTree` +#[derive(Clone)] pub struct LocalItemTreeId { index: Idx, _p: PhantomData, } -impl Clone for LocalItemTreeId { - fn clone(&self) -> Self { - *self - } -} - impl Copy for LocalItemTreeId {} impl PartialEq for LocalItemTreeId { @@ -155,7 +150,7 @@ impl Eq for LocalItemTreeId {} impl Hash for LocalItemTreeId { fn hash(&self, state: &mut H) { - self.index.hash(state) + self.index.hash(state); } } @@ -165,7 +160,7 @@ impl fmt::Debug for LocalItemTreeId { } } -/// Represents the Id of an item in the ItemTree of a file. +/// Represents the Id of an item in the [`ItemTree`] of a file. pub type ItemTreeId = InFile>; macro_rules! mod_items { @@ -415,21 +410,8 @@ mod diagnostics { &self, db: &dyn HirDatabase, item_tree: &ItemTree, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { - match self { - ItemTreeDiagnostic::DuplicateDefinition { - name, - first, - second, - } => sink.push(DuplicateDefinition { - file: item_tree.file_id, - name: name.to_string(), - first_definition: ast_ptr_from_mod(db.upcast(), item_tree, *first), - definition: ast_ptr_from_mod(db.upcast(), item_tree, *second), - }), - }; - fn ast_ptr_from_mod( db: &dyn DefDatabase, item_tree: &ItemTree, @@ -450,6 +432,19 @@ mod diagnostics { } } } + + match self { + ItemTreeDiagnostic::DuplicateDefinition { + name, + first, + second, + } => sink.push(DuplicateDefinition { + file: item_tree.file_id, + name: name.to_string(), + first_definition: ast_ptr_from_mod(db.upcast(), item_tree, *first), + definition: ast_ptr_from_mod(db.upcast(), item_tree, *second), + }), + }; } } } diff --git a/crates/mun_hir/src/item_tree/lower.rs b/crates/mun_hir/src/item_tree/lower.rs index 00fc4a7e8..80e518bfc 100644 --- a/crates/mun_hir/src/item_tree/lower.rs +++ b/crates/mun_hir/src/item_tree/lower.rs @@ -2,7 +2,7 @@ use super::{ diagnostics, Field, Fields, Function, IdRange, ItemTree, ItemTreeData, ItemTreeNode, - LocalItemTreeId, ModItem, RawVisibilityId, Struct, StructDefKind, TypeAlias, + ItemVisibilities, LocalItemTreeId, ModItem, RawVisibilityId, Struct, StructDefKind, TypeAlias, }; use crate::item_tree::Import; use crate::type_ref::{TypeRefMap, TypeRefMapBuilder}; @@ -62,7 +62,7 @@ impl Context { pub(super) fn lower_module_items(mut self, item_owner: &impl ModuleItemOwner) -> ItemTree { let top_level = item_owner .items() - .flat_map(|item| self.lower_mod_item(&item)) + .filter_map(|item| self.lower_mod_item(&item)) .flat_map(|items| items.0) .collect::>(); @@ -82,7 +82,7 @@ impl Context { name: name.clone(), first: **first_item, second: *item, - }) + }); } else { set.insert(name.clone(), item); } @@ -111,7 +111,7 @@ impl Context { /// Lowers a `use` statement fn lower_use(&mut self, use_item: &ast::Use) -> Vec> { - let visibility = self.lower_visibility(use_item); + let visibility = lower_visibility(use_item); let ast_id = self.source_ast_id_map.ast_id(use_item); // Every use item can expand to many `Import`s. @@ -141,7 +141,7 @@ impl Context { /// Lowers a function fn lower_function(&mut self, func: &ast::FunctionDef) -> Option> { let name = func.name()?.as_name(); - let visibility = self.lower_visibility(func); + let visibility = lower_visibility(func); let mut types = TypeRefMap::builder(); // Lower all the params @@ -179,7 +179,7 @@ impl Context { /// Lowers a struct fn lower_struct(&mut self, strukt: &ast::StructDef) -> Option> { let name = strukt.name()?.as_name(); - let visibility = self.lower_visibility(strukt); + let visibility = lower_visibility(strukt); let mut types = TypeRefMap::builder(); let fields = self.lower_fields(&strukt.kind(), &mut types); let ast_id = self.source_ast_id_map.ast_id(strukt); @@ -228,7 +228,7 @@ impl Context { ) -> IdRange { let start = self.next_field_idx(); for field in fields.fields() { - if let Some(data) = self.lower_record_field(&field, types) { + if let Some(data) = lower_record_field(&field, types) { let _idx = self.data.fields.alloc(data); } } @@ -236,18 +236,6 @@ impl Context { IdRange::new(start..end) } - /// Lowers a record field (e.g. `a:i32`) - fn lower_record_field( - &mut self, - field: &ast::RecordFieldDef, - types: &mut TypeRefMapBuilder, - ) -> Option { - let name = field.name()?.as_name(); - let type_ref = types.alloc_from_node_opt(field.ascribed_type().as_ref()); - let res = Field { name, type_ref }; - Some(res) - } - /// Lowers tuple fields (e.g. `(i32, u8)`) fn lower_tuple_fields( &mut self, @@ -256,32 +244,20 @@ impl Context { ) -> IdRange { let start = self.next_field_idx(); for (i, field) in fields.fields().enumerate() { - let data = self.lower_tuple_field(i, &field, types); + let data = lower_tuple_field(i, &field, types); let _idx = self.data.fields.alloc(data); } let end = self.next_field_idx(); IdRange::new(start..end) } - /// Lowers a tuple field (e.g. `i32`) - fn lower_tuple_field( - &mut self, - idx: usize, - field: &ast::TupleFieldDef, - types: &mut TypeRefMapBuilder, - ) -> Field { - let name = Name::new_tuple_field(idx); - let type_ref = types.alloc_from_node_opt(field.type_ref().as_ref()); - Field { name, type_ref } - } - /// Lowers a type alias (e.g. `type Foo = Bar`) fn lower_type_alias( &mut self, type_alias: &ast::TypeAliasDef, ) -> Option> { let name = type_alias.name()?.as_name(); - let visibility = self.lower_visibility(type_alias); + let visibility = lower_visibility(type_alias); let mut types = TypeRefMap::builder(); let type_ref = type_alias.type_ref().map(|ty| types.alloc_from_node(&ty)); let ast_id = self.source_ast_id_map.ast_id(type_alias); @@ -296,15 +272,34 @@ impl Context { Some(self.data.type_aliases.alloc(res).into()) } - /// Lowers an `ast::VisibilityOwner` - fn lower_visibility(&mut self, item: &impl ast::VisibilityOwner) -> RawVisibilityId { - let vis = RawVisibility::from_ast(item.visibility()); - self.data.visibilities.alloc(vis) - } - /// Returns the `Idx` of the next `Field` fn next_field_idx(&self) -> Idx { let idx: u32 = self.data.fields.len().try_into().expect("too many fields"); Idx::from_raw(RawId::from(idx)) } } + +/// Lowers a record field (e.g. `a:i32`) +fn lower_record_field(field: &ast::RecordFieldDef, types: &mut TypeRefMapBuilder) -> Option { + let name = field.name()?.as_name(); + let type_ref = types.alloc_from_node_opt(field.ascribed_type().as_ref()); + let res = Field { name, type_ref }; + Some(res) +} + +/// Lowers a tuple field (e.g. `i32`) +fn lower_tuple_field( + idx: usize, + field: &ast::TupleFieldDef, + types: &mut TypeRefMapBuilder, +) -> Field { + let name = Name::new_tuple_field(idx); + let type_ref = types.alloc_from_node_opt(field.type_ref().as_ref()); + Field { name, type_ref } +} + +/// Lowers an `ast::VisibilityOwner` +fn lower_visibility(item: &impl ast::VisibilityOwner) -> RawVisibilityId { + let vis = RawVisibility::from_ast(item.visibility()); + ItemVisibilities::alloc(vis) +} diff --git a/crates/mun_hir/src/item_tree/tests.rs b/crates/mun_hir/src/item_tree/tests.rs index 96d5bcaad..2c584f848 100644 --- a/crates/mun_hir/src/item_tree/tests.rs +++ b/crates/mun_hir/src/item_tree/tests.rs @@ -38,7 +38,7 @@ fn format_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) -> fmt::Res writeln!(children, "{:?}", tree[field])?; } } - _ => {} + Fields::Unit => {} }; } ModItem::TypeAlias(item) => { diff --git a/crates/mun_hir/src/lib.rs b/crates/mun_hir/src/lib.rs index 69bbc855f..634bff6b5 100644 --- a/crates/mun_hir/src/lib.rs +++ b/crates/mun_hir/src/lib.rs @@ -1,5 +1,5 @@ //! HIR provides high-level, object-oriented access to Mun code. It is constructed by first parsing -//! Mun code with the mun_syntax crate and then it is lowered into HIR constructs, names are +//! Mun code with the `mun_syntax` crate and then it is lowered into HIR constructs, names are //! resolved, and type checking is performed. HIR is the input for both the compiler as well as the //! language server. diff --git a/crates/mun_hir/src/line_index.rs b/crates/mun_hir/src/line_index.rs index 4981793ab..8cf1aadff 100644 --- a/crates/mun_hir/src/line_index.rs +++ b/crates/mun_hir/src/line_index.rs @@ -135,8 +135,7 @@ impl LineIndex { let end_of_part = self .newlines .get(last_line as usize + 1) - .map(|u| usize::from(*u) - 1usize) - .unwrap_or(text_len); + .map_or(text_len, |u| usize::from(*u) - 1usize); Some(&text[start_of_part..end_of_part]) } diff --git a/crates/mun_hir/src/mock.rs b/crates/mun_hir/src/mock.rs index eab2c4274..5ca6bd82e 100644 --- a/crates/mun_hir/src/mock.rs +++ b/crates/mun_hir/src/mock.rs @@ -51,8 +51,8 @@ impl Upcast for MockDatabase { impl Default for MockDatabase { fn default() -> Self { let mut db: MockDatabase = MockDatabase { - storage: Default::default(), - events: Default::default(), + storage: salsa::Storage::default(), + events: Mutex::default(), }; db.set_target(Target::host_target().unwrap()); db diff --git a/crates/mun_hir/src/module_tree.rs b/crates/mun_hir/src/module_tree.rs index 4bb5b9a95..ff6570f0a 100644 --- a/crates/mun_hir/src/module_tree.rs +++ b/crates/mun_hir/src/module_tree.rs @@ -54,7 +54,7 @@ impl ModuleTree { db: &dyn SourceDatabase, package: PackageId, ) -> Arc { - use diagnostics::ModuleTreeDiagnostic::*; + use diagnostics::ModuleTreeDiagnostic::DuplicateModuleFile; let mut diagnostics = Vec::new(); @@ -77,26 +77,24 @@ impl ModuleTree { .into_iter() .map(Name::new) { - module_id = match modules[module_id].children.get(&path_segment) { - Some(id) => *id, - None => { - let child_module_id = modules.alloc(ModuleData { - parent: Some(module_id), - children: Default::default(), - file: None, - }); - - if !is_valid_module_name(path_segment.to_string()) { - diagnostics - .push(ModuleTreeDiagnostic::InvalidModuleName(child_module_id)) - } + module_id = if let Some(id) = modules[module_id].children.get(&path_segment) { + *id + } else { + let child_module_id = modules.alloc(ModuleData { + parent: Some(module_id), + children: FxHashMap::default(), + file: None, + }); + + if !is_valid_module_name(path_segment.to_string()) { + diagnostics.push(ModuleTreeDiagnostic::InvalidModuleName(child_module_id)); + } - modules[module_id] - .children - .insert(path_segment, child_module_id); + modules[module_id] + .children + .insert(path_segment, child_module_id); - child_module_id - } + child_module_id }; } @@ -163,7 +161,7 @@ fn path_to_module_path(path: &RelativePath) -> Vec { path.components().map(|c| c.as_str().to_owned()).collect() } else if path .file_stem() - .map(|stem| stem.to_lowercase()) + .map(str::to_lowercase) .expect("the file has an extension so it must also have a file stem") == "mod" { diff --git a/crates/mun_hir/src/name.rs b/crates/mun_hir/src/name.rs index 59f624199..8eecdf298 100644 --- a/crates/mun_hir/src/name.rs +++ b/crates/mun_hir/src/name.rs @@ -1,7 +1,7 @@ use mun_syntax::{ast, SmolStr}; use std::fmt; -/// `Name` is a wrapper around string, which is used in mun_hir for both references +/// `Name` is a wrapper around string, which is used in `mun_hir` for both references /// and declarations. #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Name(Repr); @@ -13,7 +13,7 @@ enum Repr { } impl fmt::Display for Name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.0 { Repr::Text(text) => fmt::Display::fmt(&text, f), Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), @@ -57,7 +57,7 @@ impl Name { pub(crate) fn as_tuple_index(&self) -> Option { match self.0 { Repr::TupleField(idx) => Some(idx), - _ => None, + Repr::Text(_) => None, } } } diff --git a/crates/mun_hir/src/package_defs.rs b/crates/mun_hir/src/package_defs.rs index ee9efddfd..8ada57e70 100644 --- a/crates/mun_hir/src/package_defs.rs +++ b/crates/mun_hir/src/package_defs.rs @@ -30,10 +30,10 @@ impl PackageDefs { &self, db: &dyn DefDatabase, module: LocalModuleId, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { for diagnostic in self.diagnostics.iter() { - diagnostic.add_to(db, module, sink) + diagnostic.add_to(db, module, sink); } } } @@ -101,25 +101,8 @@ mod diagnostics { &self, db: &dyn DefDatabase, target_module: LocalModuleId, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { - if self.in_module != target_module { - return; - } - - match &self.kind { - DiagnosticKind::UnresolvedImport { ast, index } => { - if let Some(use_tree) = use_tree_ptr_from_ast(db.upcast(), ast, *index) { - sink.push(UnresolvedImport { use_tree }); - } - } - DiagnosticKind::DuplicateImport { ast, index } => { - if let Some(use_tree) = use_tree_ptr_from_ast(db.upcast(), ast, *index) { - sink.push(ImportDuplicateDefinition { use_tree }); - } - } - } - fn use_tree_ptr_from_ast( db: &dyn AstDatabase, ast: &AstId, @@ -132,13 +115,30 @@ mod diagnostics { InFile::new(ast.file_id, use_item), |_path, use_tree, _is_glob, _alias| { if cur == index { - tree = Some(use_tree.clone()) + tree = Some(use_tree.clone()); } cur += 1; }, ); tree.map(|t| InFile::new(ast.file_id, AstPtr::new(&t))) } + + if self.in_module != target_module { + return; + } + + match &self.kind { + DiagnosticKind::UnresolvedImport { ast, index } => { + if let Some(use_tree) = use_tree_ptr_from_ast(db.upcast(), ast, *index) { + sink.push(UnresolvedImport { use_tree }); + } + } + DiagnosticKind::DuplicateImport { ast, index } => { + if let Some(use_tree) = use_tree_ptr_from_ast(db.upcast(), ast, *index) { + sink.push(ImportDuplicateDefinition { use_tree }); + } + } + } } } } diff --git a/crates/mun_hir/src/package_defs/collector.rs b/crates/mun_hir/src/package_defs/collector.rs index d0693579d..6dde1ca78 100644 --- a/crates/mun_hir/src/package_defs/collector.rs +++ b/crates/mun_hir/src/package_defs/collector.rs @@ -1,5 +1,6 @@ use super::PackageDefs; use crate::{ + arena::map::ArenaMap, ids::ItemDefinitionId, ids::{FunctionLoc, Intern, StructLoc, TypeAliasLoc}, item_scope::ImportType, @@ -42,8 +43,9 @@ impl PartiallyResolvedImport { fn namespaces(&self) -> PerNs<(ItemDefinitionId, Visibility)> { match self { PartiallyResolvedImport::Unresolved => PerNs::none(), - PartiallyResolvedImport::Indeterminate(ns) => *ns, - PartiallyResolvedImport::Resolved(ns) => *ns, + PartiallyResolvedImport::Indeterminate(ns) | PartiallyResolvedImport::Resolved(ns) => { + *ns + } } } } @@ -101,14 +103,14 @@ pub(super) fn collect(db: &dyn DefDatabase, package_id: PackageId) -> PackageDef db, package_id, package_defs: PackageDefs { - modules: Default::default(), + modules: ArenaMap::default(), module_tree: db.module_tree(package_id), - diagnostics: Default::default(), + diagnostics: Vec::default(), }, - unresolved_imports: Default::default(), - resolved_imports: Default::default(), - glob_imports: Default::default(), - from_glob_import: Default::default(), + unresolved_imports: Vec::default(), + resolved_imports: Vec::default(), + glob_imports: FxHashMap::default(), + from_glob_import: PerNsGlobImports::default(), }; collector.collect(); collector.finish() @@ -133,60 +135,10 @@ struct DefCollector<'db> { impl<'db> DefCollector<'db> { /// Collects all information and stores it in the instance fn collect(&mut self) { - // Collect all definitions in each module - let module_tree = self.package_defs.module_tree.clone(); - - // Start by collecting the definitions from all modules. This ensures that, for every module, - // all local definitions are accessible. This is the starting point for the import - // resolution. - collect_modules_recursive(self, module_tree.root, None); - - // Now, as long as we have unresolved imports, try to resolve them, or part of them. - while !self.unresolved_imports.is_empty() { - // Keep track of whether we were able to resolve anything - let mut resolved_something = false; - - // Get all the current unresolved import directives - let imports = std::mem::take(&mut self.unresolved_imports); - - // For each import, try to resolve it with the current state. - for mut directive in imports { - // Resolve the import - directive.status = self.resolve_import(directive.module_id, &directive.import); - - // Check the status of the import, if the import is still considered unresolved, try - // again in the next round. - match directive.status { - PartiallyResolvedImport::Indeterminate(_) => { - self.record_resolved_import(&directive); - // FIXME: To avoid performance regression, we consider an import resolved - // if it is indeterminate (i.e not all namespace resolved). This might not - // completely resolve correctly in the future if we can have values and - // types with the same name. - self.resolved_imports.push(directive); - resolved_something = true; - } - PartiallyResolvedImport::Resolved(_) => { - self.record_resolved_import(&directive); - self.resolved_imports.push(directive); - resolved_something = true; - } - PartiallyResolvedImport::Unresolved => { - self.unresolved_imports.push(directive); - } - } - } - - // If nothing actually changed up to this point, stop resolving. - if !resolved_something { - break; - } - } - /// Recursively iterate over all modules in the `ModuleTree` and add them and their /// definitions to their corresponding `ItemScope`. fn collect_modules_recursive( - collector: &mut DefCollector, + collector: &mut DefCollector<'_>, module_id: LocalModuleId, parent: Option<(Name, LocalModuleId)>, ) { @@ -236,6 +188,57 @@ impl<'db> DefCollector<'db> { collect_modules_recursive(collector, child_module_id, Some((name, module_id))); } } + + // Collect all definitions in each module + let module_tree = self.package_defs.module_tree.clone(); + + // Start by collecting the definitions from all modules. This ensures that, for every module, + // all local definitions are accessible. This is the starting point for the import + // resolution. + collect_modules_recursive(self, module_tree.root, None); + + // Now, as long as we have unresolved imports, try to resolve them, or part of them. + while !self.unresolved_imports.is_empty() { + // Keep track of whether we were able to resolve anything + let mut resolved_something = false; + + // Get all the current unresolved import directives + let imports = std::mem::take(&mut self.unresolved_imports); + + // For each import, try to resolve it with the current state. + for mut directive in imports { + // Resolve the import + directive.status = self.resolve_import(directive.module_id, &directive.import); + + // Check the status of the import, if the import is still considered unresolved, try + // again in the next round. + #[allow(clippy::match_same_arms)] + match directive.status { + PartiallyResolvedImport::Indeterminate(_) => { + self.record_resolved_import(&directive); + // FIXME: To avoid performance regression, we consider an import resolved + // if it is indeterminate (i.e not all namespace resolved). This might not + // completely resolve correctly in the future if we can have values and + // types with the same name. + self.resolved_imports.push(directive); + resolved_something = true; + } + PartiallyResolvedImport::Resolved(_) => { + self.record_resolved_import(&directive); + self.resolved_imports.push(directive); + resolved_something = true; + } + PartiallyResolvedImport::Unresolved => { + self.unresolved_imports.push(directive); + } + } + } + + // If nothing actually changed up to this point, stop resolving. + if !resolved_something { + break; + } + } } /// Given an import, try to resolve it. @@ -279,6 +282,7 @@ impl<'db> DefCollector<'db> { ); if import.is_glob { + #[allow(clippy::match_same_arms)] match resolution.take_types() { Some((ItemDefinitionId::ModuleId(m), _)) => { let scope = &self.package_defs[m.local_id]; @@ -375,10 +379,8 @@ impl<'db> DefCollector<'db> { resolutions: &[ImportResolution], depth: usize, ) { - if depth > 100 { - // prevent stack overflows (but this shouldn't be possible) - panic!("infinite recursion in glob imports!"); - } + // prevent stack overflows (but this shouldn't be possible) + assert!(depth <= 100, "infinite recursion in glob imports!"); let scope = &mut self.package_defs.modules[import_module_id]; @@ -415,7 +417,7 @@ impl<'db> DefCollector<'db> { import_module_id, InFile::new(import_source.file_id, import_data.ast_id), import_data.index, - )) + )); } } None => { @@ -456,7 +458,7 @@ impl<'db> DefCollector<'db> { glob_import_source, resolutions, depth + 1, - ) + ); } } @@ -476,7 +478,7 @@ impl<'db> DefCollector<'db> { directive.module_id, InFile::new(import.source.file_id, import_data.ast_id), import_data.index, - )) + )); } package_defs diff --git a/crates/mun_hir/src/package_defs/tests.rs b/crates/mun_hir/src/package_defs/tests.rs index b6799ae0c..5313ecf99 100644 --- a/crates/mun_hir/src/package_defs/tests.rs +++ b/crates/mun_hir/src/package_defs/tests.rs @@ -238,8 +238,7 @@ fn tree_for_module( "mod {}", module .name(db) - .map(|name| name.to_string()) - .unwrap_or_else(|| "mod".to_owned()) + .map_or_else(|| "mod".to_owned(), |name| name.to_string()) )); // Add module level diagnostics @@ -329,7 +328,7 @@ fn tree_for_module( // Iterate over all children of this module for child_module in module.children(db) { - node.push_node(tree_for_module(db, package_defs, child_module)) + node.push_node(tree_for_module(db, package_defs, child_module)); } node @@ -343,8 +342,7 @@ fn fully_qualified_module_path(db: &dyn HirDatabase, module: Module) -> String { .into_iter() .map(|m| { m.name(db) - .map(|name| name.to_string()) - .unwrap_or_else(|| "package".to_owned()) + .map_or_else(|| "package".to_owned(), |name| name.to_string()) }) .rev(), "::".to_string(), diff --git a/crates/mun_hir/src/path.rs b/crates/mun_hir/src/path.rs index df488aecc..e2541fb25 100644 --- a/crates/mun_hir/src/path.rs +++ b/crates/mun_hir/src/path.rs @@ -141,11 +141,11 @@ fn lower_use_tree( } } if let Some(path) = convert_path(prefix, &ast_path) { - cb(path, tree, is_glob, alias) + cb(path, tree, is_glob, alias); } } else if is_glob { if let Some(prefix) = prefix { - cb(prefix, tree, is_glob, None) + cb(prefix, tree, is_glob, None); } } } diff --git a/crates/mun_hir/src/resolve.rs b/crates/mun_hir/src/resolve.rs index fccabe42f..106c72e38 100644 --- a/crates/mun_hir/src/resolve.rs +++ b/crates/mun_hir/src/resolve.rs @@ -126,7 +126,7 @@ impl Resolver { fn module_scope(&self) -> Option<(&PackageDefs, LocalModuleId)> { self.scopes.iter().rev().find_map(|scope| match scope { Scope::ModuleScope(m) => Some((&*m.package_defs, m.module_id)), - _ => None, + Scope::ExprScope(_) => None, }) } @@ -150,6 +150,22 @@ impl Resolver { db: &dyn DefDatabase, path: &Path, ) -> Option { + fn to_value_ns( + per_ns: PerNs<(ItemDefinitionId, Visibility)>, + ) -> Option<(ValueNs, Visibility)> { + let (res, vis) = match per_ns.take_values()? { + (ItemDefinitionId::FunctionId(id), vis) => (ValueNs::FunctionId(id), vis), + (ItemDefinitionId::StructId(id), vis) => (ValueNs::StructId(id), vis), + ( + ItemDefinitionId::ModuleId(_) + | ItemDefinitionId::TypeAliasId(_) + | ItemDefinitionId::PrimitiveType(_), + _, + ) => return None, + }; + Some((res, vis)) + } + let segments_count = path.segments.len(); let first_name = path.segments.first()?; for scope in self.scopes.iter().rev() { @@ -185,8 +201,10 @@ impl Resolver { (ItemDefinitionId::PrimitiveType(id), _) => { TypeNs::PrimitiveType(id) } - (ItemDefinitionId::ModuleId(_), _) - | (ItemDefinitionId::FunctionId(_), _) => return None, + ( + ItemDefinitionId::ModuleId(_) | ItemDefinitionId::FunctionId(_), + _, + ) => return None, }; Some(ResolveValueResult::Partial(ty, idx)) } @@ -194,20 +212,8 @@ impl Resolver { } }; } - return None; - fn to_value_ns( - per_ns: PerNs<(ItemDefinitionId, Visibility)>, - ) -> Option<(ValueNs, Visibility)> { - let (res, vis) = match per_ns.take_values()? { - (ItemDefinitionId::FunctionId(id), vis) => (ValueNs::FunctionId(id), vis), - (ItemDefinitionId::StructId(id), vis) => (ValueNs::StructId(id), vis), - (ItemDefinitionId::ModuleId(_), _) - | (ItemDefinitionId::TypeAliasId(_), _) - | (ItemDefinitionId::PrimitiveType(_), _) => return None, - }; - Some((res, vis)) - } + None } /// Resolves the specified `path` as a value. Returns either `None` or the resolved path value. @@ -229,18 +235,6 @@ impl Resolver { db: &dyn DefDatabase, path: &Path, ) -> Option<(TypeNs, Visibility, Option)> { - for scope in self.scopes.iter().rev() { - match scope { - Scope::ExprScope(_) => continue, - Scope::ModuleScope(m) => { - let (module_def, idx) = - m.package_defs.resolve_path_in_module(db, m.module_id, path); - let (res, vis) = to_type_ns(module_def)?; - return Some((res, vis, idx)); - } - } - } - return None; fn to_type_ns( per_ns: PerNs<(ItemDefinitionId, Visibility)>, ) -> Option<(TypeNs, Visibility)> { @@ -248,13 +242,26 @@ impl Resolver { (ItemDefinitionId::StructId(id), vis) => (TypeNs::StructId(id), vis), (ItemDefinitionId::TypeAliasId(id), vis) => (TypeNs::TypeAliasId(id), vis), (ItemDefinitionId::PrimitiveType(id), vis) => (TypeNs::PrimitiveType(id), vis), - - (ItemDefinitionId::ModuleId(_), _) | (ItemDefinitionId::FunctionId(_), _) => { + (ItemDefinitionId::ModuleId(_) | ItemDefinitionId::FunctionId(_), _) => { return None; } }; Some((res, vis)) } + + for scope in self.scopes.iter().rev() { + match scope { + Scope::ExprScope(_) => continue, + Scope::ModuleScope(m) => { + let (module_def, idx) = + m.package_defs.resolve_path_in_module(db, m.module_id, path); + let (res, vis) = to_type_ns(module_def)?; + return Some((res, vis, idx)); + } + } + } + + None } /// Resolves the specified `path` as a type. Returns either `None` or the resolved path type. @@ -283,14 +290,14 @@ impl Resolver { pub fn body_owner(&self) -> Option { self.scopes.iter().rev().find_map(|scope| match scope { Scope::ExprScope(it) => Some(it.owner), - _ => None, + Scope::ModuleScope(_) => None, }) } /// Calls the `visitor` for each entry in scope. pub fn visit_all_names(&self, db: &dyn DefDatabase, visitor: &mut dyn FnMut(Name, ScopeDef)) { for scope in self.scopes.iter().rev() { - scope.visit_names(db, visitor) + scope.visit_names(db, visitor); } } } @@ -305,7 +312,7 @@ impl Scope { .for_each(|(name, def)| visitor(name.clone(), ScopeDef::PerNs(def))); BUILTIN_SCOPE.iter().for_each(|(name, &def)| { visitor(name.clone(), ScopeDef::PerNs(def)); - }) + }); } Scope::ExprScope(scope) => scope .expr_scopes diff --git a/crates/mun_hir/src/semantics.rs b/crates/mun_hir/src/semantics.rs index 3b77c8443..eb63f0ae5 100644 --- a/crates/mun_hir/src/semantics.rs +++ b/crates/mun_hir/src/semantics.rs @@ -40,8 +40,8 @@ impl<'db> Semantics<'db> { pub fn new(db: &'db dyn HirDatabase) -> Self { Self { db, - source_file_to_file: Default::default(), - source_to_definition_cache: Default::default(), + source_file_to_file: RefCell::default(), + source_to_definition_cache: RefCell::default(), } } @@ -102,7 +102,10 @@ impl<'db> Semantics<'db> { } /// Runs a function with a `SourceToDefContext` which can be used to cache definition queries. - fn with_source_to_def_context T, T>(&self, f: F) -> T { + fn with_source_to_def_context) -> T, T>( + &self, + f: F, + ) -> T { let mut cache = self.source_to_definition_cache.borrow_mut(); let mut context = SourceToDefContext { db: self.db, @@ -171,14 +174,14 @@ impl ScopeDef { // Some things are returned as both a value and a type, such as a unit struct. items.push(ScopeDef::ModuleDef(ty.0.into())); if ty != val { - items.push(ScopeDef::ModuleDef(val.0.into())) + items.push(ScopeDef::ModuleDef(val.0.into())); } } (None, None) => {} }; if items.is_empty() { - items.push(ScopeDef::Unknown) + items.push(ScopeDef::Unknown); } items @@ -221,7 +224,7 @@ impl<'a> SemanticsScope<'a> { ScopeDef::Local(Local { parent, pat_id }) } }; - visit(name, def) - }) + visit(name, def); + }); } } diff --git a/crates/mun_hir/src/semantics/source_to_def.rs b/crates/mun_hir/src/semantics/source_to_def.rs index 27703c181..a89f83392 100644 --- a/crates/mun_hir/src/semantics/source_to_def.rs +++ b/crates/mun_hir/src/semantics/source_to_def.rs @@ -45,7 +45,7 @@ impl SourceToDefContext<'_, '_> { /// Find the `FunctionId` associated with the specified syntax tree node. fn fn_to_def(&mut self, src: InFile) -> Option { - let container = self.find_container(src.as_ref().map(|it| it.syntax()))?; + let container = self.find_container(src.as_ref().map(AstNode::syntax))?; let db = self.db; let def_map = &*self .cache @@ -124,11 +124,6 @@ impl SourceToDef for ModuleId { impl SourceToDef for ItemScope { fn source_to_def_map(&self, db: &dyn HirDatabase) -> SourceToDefMap { - let mut result = SourceToDefMap::default(); - self.declarations() - .for_each(|item| add_module_def(db.upcast(), &mut result, item)); - return result; - fn add_module_def(db: &dyn DefDatabase, map: &mut SourceToDefMap, item: ItemDefinitionId) { match item { ItemDefinitionId::FunctionId(id) => { @@ -146,6 +141,12 @@ impl SourceToDef for ItemScope { _ => {} } } + + let mut result = SourceToDefMap::default(); + self.declarations() + .for_each(|item| add_module_def(db.upcast(), &mut result, item)); + + result } } diff --git a/crates/mun_hir/src/source_id.rs b/crates/mun_hir/src/source_id.rs index 77fe83831..436acee10 100644 --- a/crates/mun_hir/src/source_id.rs +++ b/crates/mun_hir/src/source_id.rs @@ -25,17 +25,12 @@ impl AstId { } } -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct FileAstId { raw: ErasedFileAstId, _ty: PhantomData N>, } -impl Clone for FileAstId { - fn clone(&self) -> FileAstId { - *self - } -} impl Copy for FileAstId {} impl PartialEq for FileAstId { @@ -87,7 +82,7 @@ impl AstIdMap { } } - /// Constructs a new `AstIdMap` from a root SyntaxNode. + /// Constructs a new `AstIdMap` from a root [`SyntaxNode`]. /// `node` must be the root of a syntax tree. fn from_source(node: &SyntaxNode) -> AstIdMap { assert!(node.parent().is_none()); diff --git a/crates/mun_hir/src/tests.rs b/crates/mun_hir/src/tests.rs index 01eb80ae7..29b7f6839 100644 --- a/crates/mun_hir/src/tests.rs +++ b/crates/mun_hir/src/tests.rs @@ -6,7 +6,7 @@ use crate::{ }; use std::sync::Arc; -/// This function tests that the ModuleData of a module does not change if the contents of a function +/// This function tests that the `ModuleData` of a module does not change if the contents of a function /// is changed. #[test] fn check_package_defs_does_not_change() { @@ -25,7 +25,7 @@ fn check_package_defs_does_not_change() { assert!( format!("{events:?}").contains("package_defs"), "{events:#?}" - ) + ); } db.set_file_text( file_id, @@ -45,6 +45,6 @@ fn check_package_defs_does_not_change() { assert!( !format!("{events:?}").contains("package_defs"), "{events:#?}" - ) + ); } } diff --git a/crates/mun_hir/src/ty.rs b/crates/mun_hir/src/ty.rs index 1365c6909..1db78931b 100644 --- a/crates/mun_hir/src/ty.rs +++ b/crates/mun_hir/src/ty.rs @@ -251,11 +251,10 @@ impl Ty { match (self.interned(), other.interned()) { (TyKind::Struct(s1), TyKind::Struct(s2)) => s1 == s2, (TyKind::Tuple(_, substs1), TyKind::Tuple(_, substs2)) => substs1 == substs2, - (TyKind::Array(_), TyKind::Array(_)) => true, + (TyKind::Array(_), TyKind::Array(_)) | (TyKind::Bool, TyKind::Bool) => true, (TyKind::Float(f1), TyKind::Float(f2)) => f1 == f2, (TyKind::Int(i1), TyKind::Int(i2)) => i1 == i2, (TyKind::FnDef(def, _), TyKind::FnDef(def2, _)) => def == def2, - (TyKind::Bool, TyKind::Bool) => true, _ => false, } } @@ -288,9 +287,7 @@ impl Substitution { /// Assumes this instance has a single element and returns it. Panics if this instance doesnt /// contain exactly one element. pub fn as_single(&self) -> &Ty { - if self.0.len() != 1 { - panic!("expected substs of len 1, got {self:?}"); - } + assert!(self.0.len() == 1, "expected substs of len 1, got {self:?}"); &self.0[0] } } @@ -359,7 +356,7 @@ impl FnSig { } impl HirDisplay for Ty { - fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result { + fn hir_fmt(&self, f: &mut HirFormatter<'_, '_>) -> fmt::Result { match self.interned() { TyKind::Struct(s) => write!(f, "{}", s.name(f.db)), TyKind::Float(ty) => write!(f, "{ty}"), @@ -403,7 +400,7 @@ impl HirDisplay for Ty { } impl HirDisplay for &Ty { - fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result { + fn hir_fmt(&self, f: &mut HirFormatter<'_, '_>) -> fmt::Result { HirDisplay::hir_fmt(*self, f) } } @@ -436,11 +433,11 @@ impl TypeWalk for Ty { TyKind::Array(elem_ty) => f(elem_ty), _ => { if let Some(substs) = self.type_parameters() { - substs.walk(f) + substs.walk(f); } } } - f(self) + f(self); } fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { @@ -448,10 +445,10 @@ impl TypeWalk for Ty { TyKind::Array(elem_ty) => f(elem_ty), _ => { if let Some(substs) = self.type_parameters_mut() { - substs.walk_mut(f) + substs.walk_mut(f); } } } - f(self) + f(self); } } diff --git a/crates/mun_hir/src/ty/infer.rs b/crates/mun_hir/src/ty/infer.rs index 63aad36cb..69e5a9099 100644 --- a/crates/mun_hir/src/ty/infer.rs +++ b/crates/mun_hir/src/ty/infer.rs @@ -79,11 +79,11 @@ impl InferenceResult { &self, db: &dyn HirDatabase, owner: Function, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { self.diagnostics .iter() - .for_each(|it| it.add_to(db, owner, sink)) + .for_each(|it| it.add_to(db, owner, sink)); } } @@ -223,7 +223,7 @@ impl<'a> InferenceResultBuilder<'a> { } // Resolve the return type - self.return_ty = self.resolve_type(self.body.ret_type()) + self.return_ty = self.resolve_type(self.body.ret_type()); } /// Record the type of the specified pattern and all sub-patterns. @@ -285,11 +285,6 @@ impl<'a> InferenceResultBuilder<'a> { self.resolve_ty_as_far_as_possible(ty) } - /// Returns a type used for errors - fn error_type(&self) -> Ty { - TyKind::Unknown.intern() - } - /// Infer the type of the given expression. Returns the type of the expression. fn infer_expr_inner( &mut self, @@ -298,12 +293,12 @@ impl<'a> InferenceResultBuilder<'a> { check_params: &CheckParams, ) -> Ty { let ty = match &self.body[tgt_expr] { - Expr::Missing => self.error_type(), + Expr::Missing => error_type(), Expr::Path(p) => { // FIXME this could be more efficient... let resolver = resolver_for_expr(self.db.upcast(), self.body.owner(), tgt_expr); self.infer_path_expr(&resolver, p, tgt_expr, check_params) - .unwrap_or_else(|| self.error_type()) + .unwrap_or_else(error_type) } Expr::If { condition, @@ -324,7 +319,7 @@ impl<'a> InferenceResultBuilder<'a> { self.diagnostics.push(InferenceDiagnostic::InvalidLhs { id: tgt_expr, lhs: *lhs, - }) + }); } }; let rhs_expected = op::binary_op_rhs_expectation(*op, lhs_ty.clone()); @@ -334,12 +329,12 @@ impl<'a> InferenceResultBuilder<'a> { id: tgt_expr, lhs: lhs_ty, rhs: rhs_expected.clone(), - }) + }); } let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expected)); op::binary_op_return_ty(*op, rhs_ty) } - _ => self.error_type(), + _ => error_type(), }, Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), Expr::Call { callee: call, args } => self.infer_call(tgt_expr, *call, args, expected), @@ -397,9 +392,10 @@ impl<'a> InferenceResultBuilder<'a> { for (idx, field) in fields.iter().enumerate() { let field_ty = def_id .as_ref() - .and_then(|it| match it.field(self.db, &field.name) { - Some(field) => Some(field), - None => { + .and_then(|it| { + if let Some(field) = it.field(self.db, &field.name) { + Some(field) + } else { self.diagnostics.push(InferenceDiagnostic::NoSuchField { id: tgt_expr, field: idx, @@ -407,7 +403,7 @@ impl<'a> InferenceResultBuilder<'a> { None } }) - .map_or(self.error_type(), |field| field.ty(self.db)); + .map_or(error_type(), |field| field.ty(self.db)); self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); } if let Some(expr) = spread { @@ -420,6 +416,7 @@ impl<'a> InferenceResultBuilder<'a> { } Expr::Field { expr, name } => { let receiver_ty = self.infer_expr(*expr, &Expectation::none()); + #[allow(clippy::single_match_else)] match receiver_ty.interned() { TyKind::Struct(s) => { match s.field(self.db, name).map(|field| field.ty(self.db)) { @@ -432,7 +429,7 @@ impl<'a> InferenceResultBuilder<'a> { name: name.clone(), }); - self.error_type() + error_type() } } } @@ -441,7 +438,7 @@ impl<'a> InferenceResultBuilder<'a> { id: *expr, found: receiver_ty, }); - self.error_type() + error_type() } } } @@ -459,21 +456,20 @@ impl<'a> InferenceResultBuilder<'a> { id: *expr, ty: inner_ty, }); - self.error_type() + error_type() } }, UnaryOp::Neg => match inner_ty.interned() { TyKind::Float(_) | TyKind::Int(_) - | TyKind::InferenceVar(InferTy::Int(_)) - | TyKind::InferenceVar(InferTy::Float(_)) => inner_ty, + | TyKind::InferenceVar(InferTy::Int(_) | InferTy::Float(_)) => inner_ty, _ => { self.diagnostics .push(InferenceDiagnostic::CannotApplyUnaryOp { id: *expr, ty: inner_ty, }); - self.error_type() + error_type() } }, } @@ -507,7 +503,7 @@ impl<'a> InferenceResultBuilder<'a> { match base_ty.interned() { TyKind::Array(ty) => ty.clone(), - _ => self.error_type(), + _ => error_type(), } } }; @@ -528,32 +524,28 @@ impl<'a> InferenceResultBuilder<'a> { ) -> Ty { self.infer_expr(condition, &Expectation::has_type(TyKind::Bool.intern())); let then_ty = self.infer_expr_coerce(then_branch, expected); - match else_branch { - Some(else_branch) => { - let else_ty = self.infer_expr_coerce(else_branch, expected); - match self.coerce_merge_branch(&then_ty, &else_ty) { - Some(ty) => ty, - None => { - self.diagnostics - .push(InferenceDiagnostic::IncompatibleBranches { - id: tgt_expr, - then_ty: then_ty.clone(), - else_ty: else_ty.clone(), - }); - then_ty - } - } + if let Some(else_branch) = else_branch { + let else_ty = self.infer_expr_coerce(else_branch, expected); + if let Some(ty) = self.coerce_merge_branch(&then_ty, &else_ty) { + ty + } else { + self.diagnostics + .push(InferenceDiagnostic::IncompatibleBranches { + id: tgt_expr, + then_ty: then_ty.clone(), + else_ty: else_ty.clone(), + }); + then_ty } - None => { - if !self.coerce(&then_ty, &Ty::unit()) { - self.diagnostics - .push(InferenceDiagnostic::MissingElseBranch { - id: tgt_expr, - then_ty, - }) - } - Ty::unit() + } else { + if !self.coerce(&then_ty, &Ty::unit()) { + self.diagnostics + .push(InferenceDiagnostic::MissingElseBranch { + id: tgt_expr, + then_ty, + }); } + Ty::unit() } } @@ -611,7 +603,7 @@ impl<'a> InferenceResultBuilder<'a> { } TyKind::Unknown => { // Error has already been emitted somewhere else - self.error_type() + error_type() } _ => { self.diagnostics @@ -619,7 +611,7 @@ impl<'a> InferenceResultBuilder<'a> { id: callee, found: callee_ty, }); - self.error_type() + error_type() } } } @@ -659,7 +651,7 @@ impl<'a> InferenceResultBuilder<'a> { found: num_args, expected: num_params, } - }) + }); } } @@ -712,81 +704,77 @@ impl<'a> InferenceResultBuilder<'a> { id: ExprId, check_params: &CheckParams, ) -> Option { - match resolver.resolve_path_as_value_fully(self.db.upcast(), path) { - Some((value, vis)) => { - // Check visibility of this item - if !vis.is_visible_from( - self.db, - self.resolver - .module() - .expect("resolver must have a module to be able to resolve modules"), - ) { - self.diagnostics - .push(diagnostics::InferenceDiagnostic::PathIsPrivate { id }) - } - - // Match based on what type of value we found - match value { - ValueNs::LocalBinding(pat) => Some(self.type_of_pat.get(pat)?.clone()), - ValueNs::FunctionId(f) => { - let ty = self - .db - .type_for_def(TypableDef::Function(f.into()), Namespace::Values); - Some(ty) - } - ValueNs::StructId(s) => { - if check_params.is_unit_struct { - self.check_unit_struct_lit(id, s.into()) - } - let ty = self - .db - .type_for_def(TypableDef::Struct(s.into()), Namespace::Values); - Some(ty) - } - } + if let Some((value, vis)) = resolver.resolve_path_as_value_fully(self.db.upcast(), path) { + // Check visibility of this item + if !vis.is_visible_from( + self.db, + self.resolver + .module() + .expect("resolver must have a module to be able to resolve modules"), + ) { + self.diagnostics + .push(diagnostics::InferenceDiagnostic::PathIsPrivate { id }); } - None => { - // If no value was found, try to resolve the path as a type. This will always result - // in an error but it does provide much better diagnostics. - let ty = resolver.resolve_path_as_type_fully(self.db.upcast(), path); - if let Some((TypeNs::StructId(struct_id), _)) = ty { - // We can only really get here if the struct is actually a record. Both other - // types can be seen as a values because they have a constructor. - debug_assert_eq!( - Struct::from(struct_id).data(self.db.upcast()).kind, - StructKind::Record - ); - - // Should it be a unit struct? + // Match based on what type of value we found + match value { + ValueNs::LocalBinding(pat) => Some(self.type_of_pat.get(pat)?.clone()), + ValueNs::FunctionId(f) => { + let ty = self + .db + .type_for_def(TypableDef::Function(f.into()), Namespace::Values); + Some(ty) + } + ValueNs::StructId(s) => { if check_params.is_unit_struct { - self.diagnostics - .push(InferenceDiagnostic::MismatchedStructLit { - id, - expected: StructKind::Record, - found: StructKind::Unit, - }); - } else { - self.diagnostics - .push(InferenceDiagnostic::MismatchedStructLit { - id, - expected: StructKind::Record, - found: StructKind::Tuple, - }); + self.check_unit_struct_lit(id, s.into()); } - let ty = self .db - .type_for_def(TypableDef::Struct(struct_id.into()), Namespace::Values); - return Some(ty); + .type_for_def(TypableDef::Struct(s.into()), Namespace::Values); + Some(ty) } + } + } else { + // If no value was found, try to resolve the path as a type. This will always result + // in an error but it does provide much better diagnostics. + let ty = resolver.resolve_path_as_type_fully(self.db.upcast(), path); + if let Some((TypeNs::StructId(struct_id), _)) = ty { + // We can only really get here if the struct is actually a record. Both other + // types can be seen as a values because they have a constructor. + debug_assert_eq!( + Struct::from(struct_id).data(self.db.upcast()).kind, + StructKind::Record + ); - // If the path also cannot be resolved as type, it must be considered an invalid - // value and there is nothing we can make of this path. - self.diagnostics - .push(InferenceDiagnostic::UnresolvedValue { id: id.into() }); - None + // Should it be a unit struct? + if check_params.is_unit_struct { + self.diagnostics + .push(InferenceDiagnostic::MismatchedStructLit { + id, + expected: StructKind::Record, + found: StructKind::Unit, + }); + } else { + self.diagnostics + .push(InferenceDiagnostic::MismatchedStructLit { + id, + expected: StructKind::Record, + found: StructKind::Tuple, + }); + } + + let ty = self + .db + .type_for_def(TypableDef::Struct(struct_id.into()), Namespace::Values); + return Some(ty); } + + // If the path also cannot be resolved as type, it must be considered an invalid + // value and there is nothing we can make of this path. + self.diagnostics + .push(InferenceDiagnostic::UnresolvedValue { id: id.into() }); + None } } @@ -819,7 +807,7 @@ impl<'a> InferenceResultBuilder<'a> { type_of_expr: expr_types, type_of_pat: pat_types, diagnostics: self.diagnostics, - standard_types: Default::default(), + standard_types: InternedStandardTypes::default(), } } @@ -839,8 +827,7 @@ impl<'a> InferenceResultBuilder<'a> { } => { let decl_ty = type_ref .as_ref() - .map(|tr| self.resolve_type(*tr)) - .unwrap_or_else(|| self.error_type()); + .map_or_else(error_type, |tr| self.resolve_type(*tr)); //let decl_ty = self.insert_type_vars(decl_ty); let ty = if let Some(expr) = initializer { self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty)) @@ -903,15 +890,15 @@ impl<'a> InferenceResultBuilder<'a> { }; // Verify that it matches what we expected - let ty = if !self.unify(&ty, &expected.ty) { + let ty = if self.unify(&ty, &expected.ty) { + ty + } else { self.diagnostics.push(InferenceDiagnostic::MismatchedTypes { expected: expected.ty.clone(), found: ty, id: tgt_expr, }); expected.ty - } else { - ty }; // Update the expected type for the rest of the loop @@ -953,6 +940,7 @@ impl<'a> InferenceResultBuilder<'a> { Ty::unit() } + #[allow(clippy::unused_self)] pub fn report_pat_inference_failure(&mut self, _pat: PatId) { // self.diagnostics.push(InferenceDiagnostic::PatInferenceFailed { // pat @@ -962,6 +950,7 @@ impl<'a> InferenceResultBuilder<'a> { panic!("pattern failed inferencing"); } + #[allow(clippy::unused_self)] pub fn report_expr_inference_failure(&mut self, _expr: ExprId) { // self.diagnostics.push(InferenceDiagnostic::ExprInferenceFailed { // expr @@ -972,6 +961,11 @@ impl<'a> InferenceResultBuilder<'a> { } } +/// Returns a type used for errors +fn error_type() -> Ty { + TyKind::Unknown.intern() +} + /// When inferring an expression, we propagate downward whatever type hint we /// are able in the form of an `Expectation`. #[derive(Clone, PartialEq, Eq, Debug)] @@ -1150,7 +1144,7 @@ mod diagnostics { &self, db: &dyn HirDatabase, owner: Function, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { let file = owner.source(db.upcast()).file_id; let body = owner.body_source_map(db); @@ -1212,7 +1206,7 @@ mod diagnostics { expr, expected: *expected, found: *found, - }) + }); } InferenceDiagnostic::ExpectedFunction { id, found } => { let expr = body @@ -1362,7 +1356,7 @@ mod diagnostics { expr, receiver_ty: receiver_ty.clone(), name: name.clone(), - }) + }); } InferenceDiagnostic::FieldCountMismatch { id, @@ -1379,7 +1373,7 @@ mod diagnostics { expr, expected: *expected, found: *found, - }) + }); } InferenceDiagnostic::MissingFields { id, @@ -1396,7 +1390,7 @@ mod diagnostics { file, struct_ty: struct_ty.clone(), fields, - field_names: names.to_vec(), + field_names: names.clone(), }); } InferenceDiagnostic::MismatchedStructLit { @@ -1426,7 +1420,7 @@ mod diagnostics { file, receiver_expr: expr, found: found.clone(), - }) + }); } InferenceDiagnostic::NoSuchField { id, field } => { let field = owner.body_source_map(db).field_syntax(*id, *field).into(); @@ -1446,7 +1440,7 @@ mod diagnostics { sink.push(LiteralOutOfRange { literal, int_ty: *literal_ty, - }) + }); } } } diff --git a/crates/mun_hir/src/ty/infer/place_expr.rs b/crates/mun_hir/src/ty/infer/place_expr.rs index add33e6db..364ab39e6 100644 --- a/crates/mun_hir/src/ty/infer/place_expr.rs +++ b/crates/mun_hir/src/ty/infer/place_expr.rs @@ -17,7 +17,7 @@ impl<'a> InferenceResultBuilder<'a> { fn check_place_path(&mut self, resolver: &Resolver, path: &Path) -> bool { match resolver.resolve_path_as_value_fully(self.db.upcast(), path) { Some((ValueNs::LocalBinding(_), _)) => true, - Some((ValueNs::FunctionId(_), _)) | Some((ValueNs::StructId(_), _)) | None => false, + Some((ValueNs::FunctionId(_) | ValueNs::StructId(_), _)) | None => false, } } } diff --git a/crates/mun_hir/src/ty/infer/type_variable.rs b/crates/mun_hir/src/ty/infer/type_variable.rs index 3661f549a..98919f724 100644 --- a/crates/mun_hir/src/ty/infer/type_variable.rs +++ b/crates/mun_hir/src/ty/infer/type_variable.rs @@ -11,7 +11,7 @@ use std::{borrow::Cow, fmt}; pub struct TypeVarId(pub(crate) u32); impl fmt::Display for TypeVarId { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "'{}", self.0) } } @@ -159,6 +159,7 @@ impl TypeVariableTable { /// Handles unificiation of trivial cases. pub(crate) fn unify_inner_trivial(&mut self, a: &Ty, b: &Ty) -> bool { + #[allow(clippy::match_same_arms)] match (a.interned(), b.interned()) { // Ignore unificiation if dealing with unknown types, there are no guarentees in that case. (TyKind::Unknown, _) | (_, TyKind::Unknown) => true, @@ -285,7 +286,7 @@ impl TypeVariableTable { }) } - /// Resolves the type completely; type variables without known type are replaced by Ty::Unknown. + /// Resolves the type completely; type variables without known type are replaced by [`Ty::Unknown`]. pub(crate) fn resolve_ty_completely(&mut self, ty: Ty) -> Ty { self.resolve_ty_completely_inner(&mut Vec::new(), ty) } diff --git a/crates/mun_hir/src/ty/lower.rs b/crates/mun_hir/src/ty/lower.rs index f29ba4dd0..78c37b814 100644 --- a/crates/mun_hir/src/ty/lower.rs +++ b/crates/mun_hir/src/ty/lower.rs @@ -29,7 +29,7 @@ pub struct LowerTyMap { impl Default for LowerTyMap { fn default() -> Self { LowerTyMap { - type_ref_to_type: Default::default(), + type_ref_to_type: ArenaMap::default(), diagnostics: vec![], unknown_ty: TyKind::Unknown.intern(), } @@ -50,11 +50,11 @@ impl LowerTyMap { db: &dyn HirDatabase, file_id: FileId, source_map: &TypeRefSourceMap, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { self.diagnostics .iter() - .for_each(|it| it.add_to(db, file_id, source_map, sink)) + .for_each(|it| it.add_to(db, file_id, source_map, sink)); } } @@ -132,7 +132,7 @@ impl Ty { // Get the current module and see if the type is visible from here if let Some(module) = resolver.module() { if !vis.is_visible_from(db, module) { - diagnostics.push(LowerDiagnostic::TypeIsPrivate { id: type_ref }) + diagnostics.push(LowerDiagnostic::TypeIsPrivate { id: type_ref }); } } @@ -243,9 +243,10 @@ pub(crate) fn type_for_def(db: &dyn HirDatabase, def: TypableDef, ns: Namespace) (TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t), // 'error' cases: - (TypableDef::Function(_), Namespace::Types) => TyKind::Unknown.intern(), - (TypableDef::PrimitiveType(_), Namespace::Values) => TyKind::Unknown.intern(), - (TypableDef::TypeAlias(_), Namespace::Values) => TyKind::Unknown.intern(), + (TypableDef::Function(_), Namespace::Types) + | (TypableDef::PrimitiveType(_) | TypableDef::TypeAlias(_), Namespace::Values) => { + TyKind::Unknown.intern() + } } } @@ -334,7 +335,7 @@ pub mod diagnostics { _db: &dyn HirDatabase, file_id: FileId, source_map: &TypeRefSourceMap, - sink: &mut DiagnosticSink, + sink: &mut DiagnosticSink<'_>, ) { match self { LowerDiagnostic::UnresolvedType { id } => sink.push(UnresolvedType { diff --git a/crates/mun_hir/src/ty/op.rs b/crates/mun_hir/src/ty/op.rs index 9b347196f..e5bf40614 100644 --- a/crates/mun_hir/src/ty/op.rs +++ b/crates/mun_hir/src/ty/op.rs @@ -11,8 +11,7 @@ pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty { TyKind::Int(_) | TyKind::Float(_) | TyKind::Bool - | TyKind::InferenceVar(InferTy::Float(_)) - | TyKind::InferenceVar(InferTy::Int(_)) => lhs_ty, + | TyKind::InferenceVar(InferTy::Float(_) | InferTy::Int(_)) => lhs_ty, _ => TyKind::Unknown.intern(), }, @@ -22,30 +21,26 @@ pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty { | TyKind::Bool | TyKind::Struct(_) | TyKind::Array(_) - | TyKind::InferenceVar(InferTy::Float(_)) - | TyKind::InferenceVar(InferTy::Int(_)) => lhs_ty, + | TyKind::InferenceVar(InferTy::Float(_) | InferTy::Int(_)) => lhs_ty, _ => TyKind::Unknown.intern(), }, BinaryOp::Assignment { - op: Some(ArithOp::LeftShift), + op: + Some( + ArithOp::LeftShift + | ArithOp::RightShift + | ArithOp::BitAnd + | ArithOp::BitOr + | ArithOp::BitXor, + ), } - | BinaryOp::Assignment { - op: Some(ArithOp::RightShift), - } - | BinaryOp::Assignment { - op: Some(ArithOp::BitAnd), - } - | BinaryOp::Assignment { - op: Some(ArithOp::BitOr), - } - | BinaryOp::Assignment { - op: Some(ArithOp::BitXor), - } - | BinaryOp::ArithOp(ArithOp::LeftShift) - | BinaryOp::ArithOp(ArithOp::RightShift) - | BinaryOp::ArithOp(ArithOp::BitAnd) - | BinaryOp::ArithOp(ArithOp::BitOr) - | BinaryOp::ArithOp(ArithOp::BitXor) => match lhs_ty.interned() { + | BinaryOp::ArithOp( + ArithOp::LeftShift + | ArithOp::RightShift + | ArithOp::BitAnd + | ArithOp::BitOr + | ArithOp::BitXor, + ) => match lhs_ty.interned() { TyKind::Int(_) | TyKind::Bool | TyKind::InferenceVar(InferTy::Int(_)) => lhs_ty, _ => TyKind::Unknown.intern(), }, @@ -54,8 +49,7 @@ pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty { BinaryOp::Assignment { op: Some(_) } | BinaryOp::ArithOp(_) => match lhs_ty.interned() { TyKind::Int(_) | TyKind::Float(_) - | TyKind::InferenceVar(InferTy::Float(_)) - | TyKind::InferenceVar(InferTy::Int(_)) => lhs_ty, + | TyKind::InferenceVar(InferTy::Float(_) | InferTy::Int(_)) => lhs_ty, _ => TyKind::Unknown.intern(), }, } @@ -68,8 +62,7 @@ pub(super) fn binary_op_return_ty(op: BinaryOp, rhs_ty: Ty) -> Ty { BinaryOp::ArithOp(_) => match rhs_ty.interned() { TyKind::Int(_) | TyKind::Float(_) - | TyKind::InferenceVar(InferTy::Float(_)) - | TyKind::InferenceVar(InferTy::Int(_)) => rhs_ty, + | TyKind::InferenceVar(InferTy::Float(_) | InferTy::Int(_)) => rhs_ty, _ => TyKind::Unknown.intern(), }, BinaryOp::CmpOp(_) | BinaryOp::LogicOp(_) => TyKind::Bool.intern(), diff --git a/crates/mun_hir/src/ty/primitives.rs b/crates/mun_hir/src/ty/primitives.rs index 7e48458fe..f3a0ad671 100644 --- a/crates/mun_hir/src/ty/primitives.rs +++ b/crates/mun_hir/src/ty/primitives.rs @@ -11,13 +11,13 @@ pub struct IntTy { } impl fmt::Debug for IntTy { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for IntTy { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.as_str()) } } @@ -133,15 +133,15 @@ impl IntTy { IntBitness::X32 => i32::MAX as u128, IntBitness::X64 => i64::MAX as u128, IntBitness::X128 => i128::MAX as u128, - _ => unreachable!("cannot determine max size of variable bitness"), + IntBitness::Xsize => unreachable!("cannot determine max size of variable bitness"), }, Signedness::Unsigned => match self.bitness { - IntBitness::X8 => u8::MAX as u128, - IntBitness::X16 => u16::MAX as u128, - IntBitness::X32 => u32::MAX as u128, - IntBitness::X64 => u64::MAX as u128, + IntBitness::X8 => u8::MAX.into(), + IntBitness::X16 => u16::MAX.into(), + IntBitness::X32 => u32::MAX.into(), + IntBitness::X64 => u64::MAX.into(), IntBitness::X128 => u128::MAX, - _ => unreachable!("cannot determine max size of variable bitness"), + IntBitness::Xsize => unreachable!("cannot determine max size of variable bitness"), }, } } @@ -165,13 +165,13 @@ pub struct FloatTy { } impl fmt::Debug for FloatTy { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for FloatTy { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.as_str()) } } diff --git a/crates/mun_hir/src/ty/tests.rs b/crates/mun_hir/src/ty/tests.rs index 79bf47761..1abef4c39 100644 --- a/crates/mun_hir/src/ty/tests.rs +++ b/crates/mun_hir/src/ty/tests.rs @@ -52,7 +52,7 @@ fn array_element_assignment() { 40..50 'a[2] = 4u8': () 42..43 '2': i32 47..50 '4u8': u8 - "###) + "###); } #[test] @@ -93,7 +93,7 @@ fn array_is_place_expr() { 75..76 '3': i32 78..79 '0': i32 83..84 '4': i32 - "###) + "###); } #[test] @@ -116,7 +116,7 @@ fn infer_array_structs() { 69..70 'a': [Foo] 69..73 'a[2]': Foo 71..72 '2': i32 - "###) + "###); } #[test] @@ -144,7 +144,7 @@ fn infer_array() { 75..76 'a': [u8] 75..79 'a[3]': u8 77..78 '3': i32 - "###) + "###); } #[test] diff --git a/crates/mun_hir/src/type_ref.rs b/crates/mun_hir/src/type_ref.rs index ed2c24a2e..0ac608f5a 100644 --- a/crates/mun_hir/src/type_ref.rs +++ b/crates/mun_hir/src/type_ref.rs @@ -12,7 +12,7 @@ use std::ops::Index; /// The ID of a `TypeRef` in a `TypeRefMap` pub type LocalTypeRefId = Idx; -/// Compare ty::Ty +/// Compare [`ty::Ty`] #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeRef { Path(Path), @@ -52,10 +52,7 @@ pub struct TypeRefMap { impl TypeRefMap { pub(crate) fn builder() -> TypeRefMapBuilder { - TypeRefMapBuilder { - map: Default::default(), - source_map: Default::default(), - } + TypeRefMapBuilder::default() } /// Returns an iterator over all types in this instance @@ -73,7 +70,7 @@ impl Index for TypeRefMap { } /// A builder object to lower type references from syntax to a more abstract representation. -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Default, Eq, PartialEq)] pub(crate) struct TypeRefMapBuilder { map: TypeRefMap, source_map: TypeRefSourceMap, @@ -101,14 +98,14 @@ impl TypeRefMapBuilder { /// Lowers the given AST type references and returns the Id of the resulting `TypeRef`. pub fn alloc_from_node(&mut self, node: &ast::TypeRef) -> LocalTypeRefId { - use mun_syntax::ast::TypeRefKind::*; + use mun_syntax::ast::TypeRefKind::{ArrayType, NeverType, PathType}; + let ptr = AstPtr::new(node); let type_ref = match node.kind() { PathType(path) => path .path() .and_then(Path::from_ast) - .map(TypeRef::Path) - .unwrap_or(TypeRef::Error), + .map_or(TypeRef::Error, TypeRef::Path), NeverType(_) => TypeRef::Never, ArrayType(inner) => TypeRef::Array(self.alloc_from_node_opt(inner.type_ref().as_ref())), }; @@ -127,7 +124,7 @@ impl TypeRefMapBuilder { } /// Finish building type references, returning the `TypeRefMap` which contains all the - /// `TypeRef`s and a `TypeRefSourceMap` which converts LocalTypeRefIds back to source location. + /// `TypeRef`s and a `TypeRefSourceMap` which converts [`LocalTypeRefIds`] back to source location. pub fn finish(self) -> (TypeRefMap, TypeRefSourceMap) { (self.map, self.source_map) } diff --git a/crates/mun_language_server/src/analysis.rs b/crates/mun_language_server/src/analysis.rs index e81ff1551..5b8836266 100644 --- a/crates/mun_language_server/src/analysis.rs +++ b/crates/mun_language_server/src/analysis.rs @@ -21,7 +21,7 @@ impl Analysis { /// Applies the given changes to the state. If there are outstanding `AnalysisSnapshot`s they /// will be canceled. pub fn apply_change(&mut self, change: AnalysisChange) { - self.db.apply_change(change) + self.db.apply_change(change); } /// Creates a snapshot of the current `Analysis`. You can query the resulting `AnalysisSnapshot` diff --git a/crates/mun_language_server/src/change.rs b/crates/mun_language_server/src/change.rs index d1c7516eb..49db39a4c 100644 --- a/crates/mun_language_server/src/change.rs +++ b/crates/mun_language_server/src/change.rs @@ -18,17 +18,17 @@ impl AnalysisChange { /// Sets the packages pub fn set_packages(&mut self, packages: mun_hir::PackageSet) { - self.packages = Some(packages) + self.packages = Some(packages); } /// Records the addition of a new root pub fn set_roots(&mut self, roots: Vec) { - self.roots = Some(roots) + self.roots = Some(roots); } /// Records the change of content of a specific file pub fn change_file(&mut self, file_id: mun_hir::FileId, new_text: Option>) { - self.files_changed.push((file_id, new_text)) + self.files_changed.push((file_id, new_text)); } } @@ -37,7 +37,7 @@ impl AnalysisDatabase { pub(crate) fn apply_change(&mut self, change: AnalysisChange) { // Add new package set if let Some(package_set) = change.packages { - self.set_packages(Arc::new(package_set)) + self.set_packages(Arc::new(package_set)); } // Modify the source roots @@ -54,7 +54,7 @@ impl AnalysisDatabase { // Update changed files for (file_id, text) in change.files_changed { let text = text.unwrap_or_else(|| Arc::from("".to_owned())); - self.set_file_text(file_id, text) + self.set_file_text(file_id, text); } } } diff --git a/crates/mun_language_server/src/change_fixture.rs b/crates/mun_language_server/src/change_fixture.rs index a69f4622f..f70295403 100644 --- a/crates/mun_language_server/src/change_fixture.rs +++ b/crates/mun_language_server/src/change_fixture.rs @@ -33,7 +33,7 @@ impl ChangeFixture { "cannot have multiple cursor markers" ); file_position = Some((file_id, range_or_offset)); - text.to_string() + text.clone() } else { entry.text.clone() }; diff --git a/crates/mun_language_server/src/completion.rs b/crates/mun_language_server/src/completion.rs index 8d7acb358..16757a1c2 100644 --- a/crates/mun_language_server/src/completion.rs +++ b/crates/mun_language_server/src/completion.rs @@ -55,13 +55,13 @@ impl From for Vec { impl Completions { /// Adds a raw `CompletionItem` fn add(&mut self, item: CompletionItem) { - self.buf.push(item) + self.buf.push(item); } /// Adds a completion item for a resolved name fn add_resolution( &mut self, - ctx: &CompletionContext, + ctx: &CompletionContext<'_>, local_name: String, resolution: &ScopeDef, ) { @@ -71,7 +71,7 @@ impl Completions { } /// Adds a completion item for a field - fn add_field(&mut self, ctx: &CompletionContext, field: mun_hir::Field) { + fn add_field(&mut self, ctx: &CompletionContext<'_>, field: mun_hir::Field) { let item = render_field(RenderContext::new(ctx), field); self.add(item); } diff --git a/crates/mun_language_server/src/completion/dot.rs b/crates/mun_language_server/src/completion/dot.rs index 8d79f87f4..b73fd44ef 100644 --- a/crates/mun_language_server/src/completion/dot.rs +++ b/crates/mun_language_server/src/completion/dot.rs @@ -1,8 +1,8 @@ use super::{CompletionContext, Completions}; use mun_hir::Upcast; -/// Complete dot accesses, i.e. fields. Adds `CompletionItems` to `result. -pub(super) fn complete_dot(result: &mut Completions, ctx: &CompletionContext) { +/// Complete dot accesses, i.e. fields. Adds `CompletionItems` to `result`. +pub(super) fn complete_dot(result: &mut Completions, ctx: &CompletionContext<'_>) { // Get the expression that we want to get the fields of let dot_receiver = match &ctx.dot_receiver { Some(expr) => expr, @@ -18,7 +18,7 @@ pub(super) fn complete_dot(result: &mut Completions, ctx: &CompletionContext) { // Get all the fields of the expression if let Some(strukt) = receiver_ty.as_struct() { for field in strukt.fields(ctx.db.upcast()) { - result.add_field(ctx, field) + result.add_field(ctx, field); } }; } @@ -42,7 +42,7 @@ mod tests { } "#, Some(CompletionKind::Reference) - )) + )); } #[test] @@ -57,7 +57,7 @@ mod tests { } "#, Some(CompletionKind::Reference) - )) + )); } #[test] @@ -73,7 +73,7 @@ mod tests { } "#, Some(CompletionKind::Reference) - )) + )); } #[test] @@ -88,6 +88,6 @@ mod tests { } "#, Some(CompletionKind::Reference) - )) + )); } } diff --git a/crates/mun_language_server/src/completion/render.rs b/crates/mun_language_server/src/completion/render.rs index c8abcba23..d5d214ccd 100644 --- a/crates/mun_language_server/src/completion/render.rs +++ b/crates/mun_language_server/src/completion/render.rs @@ -6,12 +6,12 @@ use crate::{db::AnalysisDatabase, SymbolKind}; use function::FunctionRender; use mun_hir::{semantics::ScopeDef, HirDisplay}; -pub(super) fn render_field(ctx: RenderContext, field: mun_hir::Field) -> CompletionItem { +pub(super) fn render_field(ctx: RenderContext<'_>, field: mun_hir::Field) -> CompletionItem { Render::new(ctx).render_field(field) } pub(super) fn render_fn( - ctx: RenderContext, + ctx: RenderContext<'_>, local_name: Option, func: mun_hir::Function, ) -> Option { @@ -19,7 +19,7 @@ pub(super) fn render_fn( } pub(super) fn render_resolution( - ctx: RenderContext, + ctx: RenderContext<'_>, local_name: String, resolution: &ScopeDef, ) -> Option { @@ -57,7 +57,7 @@ impl<'a> Render<'a> { local_name: String, resolution: &ScopeDef, ) -> Option { - use mun_hir::ModuleDef::*; + use mun_hir::ModuleDef::{Function, Module, PrimitiveType, Struct, TypeAlias}; let completion_kind = match resolution { ScopeDef::ModuleDef(PrimitiveType(..)) => CompletionKind::BuiltinType, diff --git a/crates/mun_language_server/src/completion/unqualified_path.rs b/crates/mun_language_server/src/completion/unqualified_path.rs index 16f2e1104..e5186267d 100644 --- a/crates/mun_language_server/src/completion/unqualified_path.rs +++ b/crates/mun_language_server/src/completion/unqualified_path.rs @@ -8,7 +8,7 @@ use super::{CompletionContext, Completions}; /// foo_$0 /// } /// ``` -pub(super) fn complete_unqualified_path(result: &mut Completions, ctx: &CompletionContext) { +pub(super) fn complete_unqualified_path(result: &mut Completions, ctx: &CompletionContext<'_>) { // Only complete trivial paths (e.g. foo, not ::foo) if !ctx.is_trivial_path { return; @@ -35,6 +35,6 @@ mod tests { } "#, Some(CompletionKind::Reference) - )) + )); } } diff --git a/crates/mun_language_server/src/db.rs b/crates/mun_language_server/src/db.rs index a3330759b..e80ccd89a 100644 --- a/crates/mun_language_server/src/db.rs +++ b/crates/mun_language_server/src/db.rs @@ -31,7 +31,7 @@ pub(crate) struct AnalysisDatabase { impl Default for AnalysisDatabase { fn default() -> Self { let mut db = AnalysisDatabase { - storage: Default::default(), + storage: salsa::Storage::default(), }; db.set_target(Target::host_target().expect("could not determine host target spec")); db @@ -55,7 +55,7 @@ impl salsa::Database for AnalysisDatabase { | salsa::EventKind::WillExecute { .. } => { self.check_canceled(); } - _ => (), + salsa::EventKind::WillBlockOn { .. } => (), } } } diff --git a/crates/mun_language_server/src/file_structure.rs b/crates/mun_language_server/src/file_structure.rs index 4b28805af..4b27bff1e 100644 --- a/crates/mun_language_server/src/file_structure.rs +++ b/crates/mun_language_server/src/file_structure.rs @@ -93,7 +93,7 @@ fn try_convert_to_structure_node(node: &SyntaxNode) -> Option { can_insert_ws = true; } } - }) + }); } /// Given a `SyntaxNode` construct a `StructureNode` by referring to the type of a node. diff --git a/crates/mun_language_server/src/handlers.rs b/crates/mun_language_server/src/handlers.rs index 90ab45c31..7d9e1ae05 100644 --- a/crates/mun_language_server/src/handlers.rs +++ b/crates/mun_language_server/src/handlers.rs @@ -39,25 +39,6 @@ pub(crate) fn handle_completion( snapshot: LanguageServerSnapshot, params: lsp_types::CompletionParams, ) -> anyhow::Result> { - let position = from_lsp::file_position(&snapshot, params.text_document_position)?; - - // If the completion was triggered after a single colon there is nothing to do. We only want - // completion after a *double* colon (::) or after a dot (.). - if is_position_at_single_colon(&snapshot, position, params.context)? { - return Ok(None); - } - - // Get all completions from the analysis database - let items = match snapshot.analysis.completions(position)? { - None => return Ok(None), - Some(items) => items, - }; - - // Convert all the items to the LSP protocol type - let items: Vec = items.into_iter().map(to_lsp::completion_item).collect(); - - return Ok(Some(items.into())); - /// Helper function to check if the given position is preceded by a single colon. fn is_position_at_single_colon( snapshot: &LanguageServerSnapshot, @@ -80,9 +61,28 @@ pub(crate) fn handle_completion( } Ok(false) } + + let position = from_lsp::file_position(&snapshot, params.text_document_position)?; + + // If the completion was triggered after a single colon there is nothing to do. We only want + // completion after a *double* colon (::) or after a dot (.). + if is_position_at_single_colon(&snapshot, position, params.context)? { + return Ok(None); + } + + // Get all completions from the analysis database + let items = match snapshot.analysis.completions(position)? { + None => return Ok(None), + Some(items) => items, + }; + + // Convert all the items to the LSP protocol type + let items: Vec = items.into_iter().map(to_lsp::completion_item).collect(); + + Ok(Some(items.into())) } -/// Constructs a hierarchy of DocumentSymbols for a list of symbols that specify which index is the +/// Constructs a hierarchy of `DocumentSymbols` for a list of symbols that specify which index is the /// parent of a symbol. The parent index must always be smaller than the current index. fn build_hierarchy_from_flat_list( mut symbols_and_parent: Vec<(DocumentSymbol, Option)>, @@ -120,7 +120,7 @@ fn build_hierarchy_from_flat_list( #[cfg(test)] mod tests { use crate::handlers::build_hierarchy_from_flat_list; - use lsp_types::{DocumentSymbol, SymbolKind}; + use lsp_types::{DocumentSymbol, Range, SymbolKind}; #[test] fn test_build_hierarchy_from_flat_list() { @@ -131,44 +131,41 @@ mod tests { kind: SymbolKind::FILE, tags: None, deprecated: None, - range: Default::default(), - selection_range: Default::default(), + range: Range::default(), + selection_range: Range::default(), children: None, }; - let mut list = Vec::new(); - - list.push(( - DocumentSymbol { - name: "a".to_string(), - ..default_symbol.clone() - }, - None, - )); - - list.push(( - DocumentSymbol { - name: "b".to_string(), - ..default_symbol.clone() - }, - Some(0), - )); - - list.push(( - DocumentSymbol { - name: "c".to_string(), - ..default_symbol.clone() - }, - Some(0), - )); - - list.push(( - DocumentSymbol { - name: "d".to_string(), - ..default_symbol.clone() - }, - Some(1), - )); + let list = vec![ + ( + DocumentSymbol { + name: "a".to_string(), + ..default_symbol.clone() + }, + None, + ), + ( + DocumentSymbol { + name: "b".to_string(), + ..default_symbol.clone() + }, + Some(0), + ), + ( + DocumentSymbol { + name: "c".to_string(), + ..default_symbol.clone() + }, + Some(0), + ), + ( + DocumentSymbol { + name: "d".to_string(), + ..default_symbol.clone() + }, + Some(1), + ), + ]; assert_eq!( build_hierarchy_from_flat_list(list), @@ -190,6 +187,6 @@ mod tests { ]), ..default_symbol }] - ) + ); } } diff --git a/crates/mun_language_server/src/lib.rs b/crates/mun_language_server/src/lib.rs index 34e989025..957a66a7e 100644 --- a/crates/mun_language_server/src/lib.rs +++ b/crates/mun_language_server/src/lib.rs @@ -94,18 +94,17 @@ pub fn run_server() -> anyhow::Result<()> { let config = { // Convert the root uri to a PathBuf - let root_dir = match initialize_params + let root_dir = if let Some(path) = initialize_params .root_uri .and_then(|it| it.to_file_path().ok()) .and_then(|path| AbsPathBuf::try_from(path).ok()) { - Some(path) => path, - None => { - // Get the current working directory as fallback - let cwd = std::env::current_dir()?; - AbsPathBuf::try_from(cwd) - .expect("could not convert current directory to an absolute path") - } + path + } else { + // Get the current working directory as fallback + let cwd = std::env::current_dir()?; + AbsPathBuf::try_from(cwd) + .expect("could not convert current directory to an absolute path") }; let mut config = Config::new(root_dir); @@ -115,8 +114,7 @@ pub fn run_server() -> anyhow::Result<()> { .capabilities .workspace .and_then(|c| c.did_change_watched_files) - .map(|c| c.dynamic_registration.unwrap_or(false)) - .unwrap_or(false); + .is_some_and(|c| c.dynamic_registration.unwrap_or(false)); if supports_file_watcher_dynamic_registration { config.watcher = FilesWatcher::Client; } diff --git a/crates/mun_language_server/src/lsp_utils.rs b/crates/mun_language_server/src/lsp_utils.rs index 8f300f02b..5b65ceb40 100644 --- a/crates/mun_language_server/src/lsp_utils.rs +++ b/crates/mun_language_server/src/lsp_utils.rs @@ -6,16 +6,6 @@ pub(crate) fn apply_document_changes( old_text: &mut String, content_changes: Vec, ) { - // The changes are specified with ranges where they apply. These ranges are given as line-column - // pairs. We can compute the offset in the text using a `LineIndex` however, changes to the text - // may invalidate this too. - // As a simple optimization we keep track of the lines that are possibly invalid in the - // LineIndex based on where we insert new text. If a changes is within the invalid range we - // recompute the LineIndex. Some clients (e.g. Code) sort the ranges in reverse which should - // ensure that we almost never invalidate the LineIndex. - - let mut line_index = LineIndex::new(old_text); - enum IndexValid { All, UpToLineExclusive(u32), @@ -25,26 +15,33 @@ pub(crate) fn apply_document_changes( fn covers(&self, line: u32) -> bool { match *self { IndexValid::UpToLineExclusive(to) => to > line, - _ => true, + IndexValid::All => true, } } } + // The changes are specified with ranges where they apply. These ranges are given as line-column + // pairs. We can compute the offset in the text using a `LineIndex` however, changes to the text + // may invalidate this too. + // As a simple optimization we keep track of the lines that are possibly invalid in the + // LineIndex based on where we insert new text. If a changes is within the invalid range we + // recompute the LineIndex. Some clients (e.g. Code) sort the ranges in reverse which should + // ensure that we almost never invalidate the LineIndex. + + let mut line_index = LineIndex::new(old_text); + let mut index_valid = IndexValid::All; for change in content_changes { - match change.range { - Some(range) => { - if !index_valid.covers(range.end.line) { - line_index = LineIndex::new(old_text); - } - index_valid = IndexValid::UpToLineExclusive(range.start.line); - let range = from_lsp::text_range(&line_index, range); - old_text.replace_range(std::ops::Range::::from(range), &change.text); - } - None => { - *old_text = change.text; - index_valid = IndexValid::UpToLineExclusive(0) + if let Some(range) = change.range { + if !index_valid.covers(range.end.line) { + line_index = LineIndex::new(old_text); } + index_valid = IndexValid::UpToLineExclusive(range.start.line); + let range = from_lsp::text_range(&line_index, range); + old_text.replace_range(std::ops::Range::::from(range), &change.text); + } else { + *old_text = change.text; + index_valid = IndexValid::UpToLineExclusive(0); } } } diff --git a/crates/mun_language_server/src/state.rs b/crates/mun_language_server/src/state.rs index e8836c161..5dcf2c938 100644 --- a/crates/mun_language_server/src/state.rs +++ b/crates/mun_language_server/src/state.rs @@ -14,7 +14,7 @@ use mun_paths::AbsPathBuf; use mun_vfs::VirtualFileSystem; use parking_lot::RwLock; use rustc_hash::FxHashSet; -use std::{ops::Deref, sync::Arc, time::Instant}; +use std::{sync::Arc, time::Instant}; mod protocol; mod utils; @@ -98,7 +98,7 @@ impl LanguageServerState { let vfs_monitor: mun_vfs::NotifyMonitor = mun_vfs::Monitor::new(Box::new(move |msg| { vfs_monitor_sender .send(msg) - .expect("error sending vfs monitor message to foreground") + .expect("error sending vfs monitor message to foreground"); })); let vfs_monitor = Box::new(vfs_monitor) as Box; @@ -108,15 +108,15 @@ impl LanguageServerState { // Construct the state that will hold all the analysis and apply the initial state let mut analysis = Analysis::default(); let mut change = AnalysisChange::new(); - change.set_packages(Default::default()); - change.set_roots(Default::default()); + change.set_packages(mun_hir::PackageSet::default()); + change.set_roots(Vec::default()); analysis.apply_change(change); LanguageServerState { sender, request_queue: ReqQueue::default(), config, - vfs: Arc::new(RwLock::new(Default::default())), + vfs: Arc::default(), vfs_monitor, vfs_monitor_receiver, open_docs: FxHashSet::default(), @@ -216,7 +216,7 @@ impl LanguageServerState { progress_state, Some(format!("{done}/{total}")), Some(Progress::fraction(done, total)), - ) + ); } mun_vfs::MonitorMessage::Loaded { files } => { let vfs = &mut *self.vfs.write(); @@ -270,10 +270,9 @@ fn handle_diagnostics(state: LanguageServerSnapshot, sender: Sender) -> an uri: to_lsp::url(&state, annotation.range.file_id)?, range: to_lsp::range( annotation.range.value, - state + &*state .analysis - .file_line_index(annotation.range.file_id)? - .deref(), + .file_line_index(annotation.range.file_id)?, ), }, message: annotation.message, diff --git a/crates/mun_language_server/src/state/protocol.rs b/crates/mun_language_server/src/state/protocol.rs index 904df5e98..63a800669 100644 --- a/crates/mun_language_server/src/state/protocol.rs +++ b/crates/mun_language_server/src/state/protocol.rs @@ -120,7 +120,7 @@ impl LanguageServerState { self.request_queue.incoming.register( request.id.clone(), (request.method.clone(), request_received), - ) + ); } /// Sends a request to the client and registers the request so that we can handle the response. @@ -153,7 +153,7 @@ impl LanguageServerState { .outgoing .complete(response.id.clone()) .expect("received response for unknown request"); - handler(self, response) + handler(self, response); } /// Sends a response to the client. This method logs the time it took us to reply @@ -170,6 +170,6 @@ impl LanguageServerState { pub(crate) fn send(&mut self, message: lsp_server::Message) { self.sender .send(message) - .expect("error sending lsp message to the outgoing channel") + .expect("error sending lsp message to the outgoing channel"); } } diff --git a/crates/mun_language_server/src/state/utils.rs b/crates/mun_language_server/src/state/utils.rs index 71497692a..259c8c74f 100644 --- a/crates/mun_language_server/src/state/utils.rs +++ b/crates/mun_language_server/src/state/utils.rs @@ -21,7 +21,7 @@ impl LanguageServerState { let message = message.as_ref().to_owned(); self.send_notification::( lsp_types::ShowMessageParams { typ, message }, - ) + ); } /// Reports progress to the user via the `WorkDoneProgress` protocol. diff --git a/crates/mun_language_server/src/to_lsp.rs b/crates/mun_language_server/src/to_lsp.rs index bf50404db..09c484f06 100644 --- a/crates/mun_language_server/src/to_lsp.rs +++ b/crates/mun_language_server/src/to_lsp.rs @@ -25,7 +25,7 @@ fn url_from_path_with_drive_lowercasing(path: impl AsRef) -> anyhow::Resul // VSCode expects drive letters to be lowercased, whereas rust will uppercase the drive letters. if component_has_windows_drive { - let url_original = Url::from_file_path(&path).map_err(|_| { + let url_original = Url::from_file_path(&path).map_err(|()| { anyhow::anyhow!("can't convert path to url: {}", path.as_ref().display()) })?; @@ -42,7 +42,7 @@ fn url_from_path_with_drive_lowercasing(path: impl AsRef) -> anyhow::Resul Ok(url) } else { - Ok(Url::from_file_path(&path).map_err(|_| { + Ok(Url::from_file_path(&path).map_err(|()| { anyhow::anyhow!("can't convert path to url: {}", path.as_ref().display()) })?) } @@ -117,8 +117,7 @@ pub(crate) fn completion_item_kind( SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION, SymbolKind::Local => lsp_types::CompletionItemKind::VARIABLE, SymbolKind::Module => lsp_types::CompletionItemKind::MODULE, - SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT, - SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT, + SymbolKind::Struct | SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT, }, CompletionItemKind::Attribute => lsp_types::CompletionItemKind::ENUM_MEMBER, } diff --git a/crates/mun_language_server/tests/initialization.rs b/crates/mun_language_server/tests/initialization.rs index b98ee7fb3..a1484966a 100644 --- a/crates/mun_language_server/tests/initialization.rs +++ b/crates/mun_language_server/tests/initialization.rs @@ -1,5 +1,6 @@ mod support; +use lsp_types::{PartialResultParams, WorkDoneProgressParams}; use support::Project; #[test] @@ -42,8 +43,8 @@ fn test_document_symbols() { let symbols = server.send_request::( lsp_types::DocumentSymbolParams { text_document: server.doc_id("src/mod.mun"), - work_done_progress_params: Default::default(), - partial_result_params: Default::default(), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), }, ); diff --git a/crates/mun_language_server/tests/support.rs b/crates/mun_language_server/tests/support.rs index 5c975684d..61e25f615 100644 --- a/crates/mun_language_server/tests/support.rs +++ b/crates/mun_language_server/tests/support.rs @@ -26,7 +26,7 @@ pub struct Project<'a> { impl<'a> Project<'a> { /// Constructs a project from a fixture. - pub fn with_fixture(fixture: &str) -> Project { + pub fn with_fixture(fixture: &str) -> Project<'_> { Project { fixture, tmp_dir: None, @@ -97,7 +97,7 @@ impl Server { } } - /// Returns the LSP TextDocumentIdentifier for the given path + /// Returns the LSP `TextDocumentIdentifier` for the given path pub fn doc_id(&self, rel_path: &str) -> lsp_types::TextDocumentIdentifier { let path = self.tmp_dir.path().join(rel_path); lsp_types::TextDocumentIdentifier { @@ -125,7 +125,7 @@ impl Server { let mut total = 0; for msg in self.messages.borrow().iter() { if cond(msg) { - total += 1 + total += 1; } } while total < n { @@ -172,7 +172,7 @@ impl Server { N::Params: Serialize, { let r = Notification::new(N::METHOD.to_string(), params); - self.send_notification(r) + self.send_notification(r); } /// Sends a server notification to the main loop diff --git a/crates/mun_libloader/src/lib.rs b/crates/mun_libloader/src/lib.rs index 6907db5cb..ba7890838 100644 --- a/crates/mun_libloader/src/lib.rs +++ b/crates/mun_libloader/src/lib.rs @@ -89,11 +89,11 @@ impl MunLibrary { /// This operations executes a function in the munlib. There is no guarantee that the execution /// of the function wont result in undefined behavior. pub unsafe fn get_info(&self) -> abi::AssemblyInfo<'static> { - let get_info_fn: libloading::Symbol abi::AssemblyInfo<'static>> = self - .0 - .library() - .get(abi::GET_INFO_FN_NAME.as_bytes()) - .unwrap(); + let get_info_fn: libloading::Symbol<'_, extern "C" fn() -> abi::AssemblyInfo<'static>> = + self.0 + .library() + .get(abi::GET_INFO_FN_NAME.as_bytes()) + .unwrap(); get_info_fn() } diff --git a/crates/mun_memory/src/diff.rs b/crates/mun_memory/src/diff.rs index 0d95f6752..7c1aff8d7 100644 --- a/crates/mun_memory/src/diff.rs +++ b/crates/mun_memory/src/diff.rs @@ -131,14 +131,25 @@ fn append_struct_mapping( insertions: Vec>, mapping: &mut Vec, ) { + struct LengthDescription<'f> { + deletion_idx: usize, + insertion_idx: usize, + old_index: usize, + new_index: usize, + old_ty: Type, + new_ty: Type, + old_fields: &'f Vec>, + new_fields: &'f Vec>, + length: usize, + } + let deletions: Vec<_> = deletions .iter() .enumerate() .map(|(deletion_index, Change { index, element })| { - let fields = element - .as_struct() - .map(|s| s.fields().iter().map(UniqueFieldInfo::from).collect()) - .unwrap_or_else(Vec::new); + let fields = element.as_struct().map_or_else(Vec::new, |s| { + s.fields().iter().map(UniqueFieldInfo::from).collect() + }); (deletion_index, *index, element.clone(), fields) }) @@ -148,27 +159,14 @@ fn append_struct_mapping( .iter() .enumerate() .map(|(insertion_index, Change { index, element })| { - let fields = element - .as_struct() - .map(|s| s.fields().iter().map(UniqueFieldInfo::from).collect()) - .unwrap_or_else(Vec::new); + let fields = element.as_struct().map_or_else(Vec::new, |s| { + s.fields().iter().map(UniqueFieldInfo::from).collect() + }); (insertion_index, *index, element.clone(), fields) }) .collect(); - struct LengthDescription<'f> { - deletion_idx: usize, - insertion_idx: usize, - old_index: usize, - new_index: usize, - old_ty: Type, - new_ty: Type, - old_fields: &'f Vec>, - new_fields: &'f Vec>, - length: usize, - } - // For all (insertion, deletion) pairs, calculate their `myers::diff_length` let mut myers_lengths: Vec<_> = insertions .iter() @@ -206,7 +204,7 @@ fn append_struct_mapping( None } }) - .collect::>() + .collect::>>() }) .collect(); @@ -294,7 +292,7 @@ fn append_struct_mapping( fn field_diff(old: &[UniqueFieldInfo<'_>], new: &[UniqueFieldInfo<'_>]) -> Vec { let diff = myers::compute_diff(old, new); let (deletions, insertions) = myers::split_diff(&diff); - let mut insertions: Vec>> = + let mut insertions: Vec>>> = insertions.into_iter().map(Some).collect(); let mut mapping = Vec::with_capacity(diff.len()); @@ -338,10 +336,10 @@ fn field_diff(old: &[UniqueFieldInfo<'_>], new: &[UniqueFieldInfo<'_>]) -> Vec], new: &[UniqueFieldInfo<'_>]) -> Vec], new: &[UniqueFieldInfo<'_>]) -> Vec(diff: &[Diff]) -> (Vec>, Vec> index: *index, element: ty.clone(), }), - _ => None, + Diff::Insert { .. } => None, }) .collect(); let insertions = diff @@ -217,7 +217,7 @@ pub fn split_diff(diff: &[Diff]) -> (Vec>, Vec> index: *index, element: ty.clone(), }), - _ => None, + Diff::Delete { .. } => None, }) .collect(); diff --git a/crates/mun_memory/src/gc.rs b/crates/mun_memory/src/gc.rs index 5f0822597..f986f183d 100644 --- a/crates/mun_memory/src/gc.rs +++ b/crates/mun_memory/src/gc.rs @@ -49,7 +49,7 @@ pub trait Array: Sized { pub trait GcRuntime: Send + Sync { type Array: Array; - /// Allocates an object of the given type returning a GcPtr + /// Allocates an object of the given type returning a [`GcPtr`] fn alloc(&self, ty: &Type) -> GcPtr; /// Allocates an array of the given type. `ty` must be an array type. diff --git a/crates/mun_memory/src/gc/mark_sweep.rs b/crates/mun_memory/src/gc/mark_sweep.rs index 088223b0f..cb508cb91 100644 --- a/crates/mun_memory/src/gc/mark_sweep.rs +++ b/crates/mun_memory/src/gc/mark_sweep.rs @@ -14,7 +14,6 @@ use std::{ alloc::{Layout, LayoutError}, borrow::Cow, collections::{HashMap, VecDeque}, - ops::{Deref, DerefMut}, pin::Pin, ptr::NonNull, }; @@ -27,7 +26,7 @@ pub struct Trace { impl Trace { fn new(obj: NonNull) -> Trace { let mut trace = Trace { - stack: Default::default(), + stack: VecDeque::default(), }; let obj_ref = unsafe { obj.as_ref() }; match obj_ref.ty.kind() { @@ -68,7 +67,7 @@ impl Iterator for Trace { } Some(TraceEvent::Reference(r)) => return Some(r.into()), Some(TraceEvent::InlineStruct(s)) => { - self.stack.push_back(CompositeTrace::Struct(s)) + self.stack.push_back(CompositeTrace::Struct(s)); } } } @@ -253,7 +252,7 @@ impl From for MemoryLayoutError { } } -/// Helper object to work with GcPtr that represents an array. +/// Helper object to work with [`GcPtr`] that represents an array. /// /// Arrays are stored in memory with a header which holds the length and capacity. The memory layout /// of an array looks like this in memory: @@ -466,7 +465,7 @@ where let size = object.layout().size(); // We want to return a pointer to the `ObjectInfo`, to be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); { let mut objects = self.objects.write(); @@ -482,7 +481,7 @@ where let size = object.layout().size(); // We want to return a pointer to the `ObjectInfo`, to be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); { let mut objects = self.objects.write(); @@ -618,133 +617,6 @@ where O: Observer, { fn map_memory(&self, mapping: Mapping) -> Vec { - let mut objects = self.objects.write(); - - // Determine which types are still allocated with deleted types - let deleted = objects - .iter() - .filter_map(|(ptr, object_info)| { - if mapping.deletions.contains(&object_info.ty) { - Some(*ptr) - } else { - None - } - }) - .collect(); - - // Update type pointers of types that didn't change - for (old_ty, new_ty) in mapping.identical { - for object_info in objects.values_mut() { - if object_info.ty == old_ty { - object_info.set(ObjectInfo { - data: ObjectInfoData { - ptr: unsafe { object_info.data.ptr }, - }, - roots: object_info.roots, - color: object_info.color, - ty: new_ty.clone(), - }); - } - } - } - - let mut new_allocations = Vec::new(); - - // Map struct types - objects - .values_mut() - .filter(|object_info| object_info.ty.is_struct()) - .for_each(|object_info| { - if let Some(conversion) = mapping.struct_mappings.get(&object_info.ty) { - let old_layout = object_info.ty.value_layout(); - let src = unsafe { object_info.data.ptr }; - let dest = unsafe { - NonNull::new_unchecked(std::alloc::alloc_zeroed( - conversion.new_ty.value_layout(), - )) - }; - - map_struct( - &mut new_allocations, - &mapping.struct_mappings, - &conversion.field_mapping, - src, - dest, - ); - - unsafe { std::alloc::dealloc(src.as_ptr(), old_layout) }; - - object_info.set(ObjectInfo { - data: ObjectInfoData { ptr: dest }, - roots: object_info.roots, - color: object_info.color, - ty: conversion.new_ty.clone(), - }); - } - }); - - // Map rooted array types - objects - .values_mut() - .filter(|object_info| object_info.ty.is_array()) - .for_each(|object_info| { - let mut ty = object_info.ty.clone(); - let mut stack = Vec::new(); - - while let Some(array) = ty.as_array() { - stack.push(ty.clone()); - ty = array.element_type(); - } - - let old_element_ty = ty; - if let Some(conversion) = mapping.struct_mappings.get(&old_element_ty) { - let mut new_ty = conversion.new_ty.clone(); - while stack.pop().is_some() { - new_ty = new_ty.array_type(); - } - - // Only arrays containing structs need to be mapped, as an array of arrays merely - // contains `GcPtr`s. - let new_element_ty = new_ty.as_array().unwrap().element_type(); - if new_element_ty.is_struct() { - // Conversion between ADTs are already handled in struct mappings - assert!(old_element_ty.is_struct()); - - let element_action = - resolve_struct_to_struct_edit(&old_element_ty, &new_element_ty, 0); - - map_array( - &mut new_allocations, - &mapping.struct_mappings, - unsafe { - NonNull::new_unchecked( - object_info.as_mut().deref_mut() as *mut ObjectInfo - ) - }, - &element_action, - &new_ty, - ); - } else { - // Update the type of arrays of arrays - object_info.as_mut().ty = conversion.new_ty.clone(); - } - } - }); - - // Retroactively store newly allocated objects - // This cannot be done while mapping because we hold a mutable reference to objects - for object in new_allocations { - let size = object.layout().size(); - // We want to return a pointer to the `ObjectInfo`, to - // be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); - objects.insert(handle, object); - - self.log_alloc(handle, size); - } - - return deleted; - unsafe fn get_field_ptr(struct_ptr: NonNull, offset: usize) -> NonNull { let mut ptr = struct_ptr.as_ptr() as usize; ptr += offset; @@ -786,7 +658,7 @@ where dest, element_action, &new_ty.as_array().expect("Must be an array.").element_type(), - ) + ); }); unsafe { @@ -810,7 +682,7 @@ where let object = alloc_array(new_ty.clone(), 0); // We want to return a pointer to the `ObjectInfo`, to be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); // Write handle to field let mut dest_handle = dest.cast::(); @@ -827,7 +699,7 @@ where let array_handle = ArrayHandle { obj: unsafe { - NonNull::new_unchecked(object.as_mut().deref_mut() as *mut ObjectInfo) + NonNull::new_unchecked(&mut *object.as_mut() as *mut ObjectInfo) }, }; @@ -842,7 +714,7 @@ where ); // We want to return a pointer to the `ObjectInfo`, to be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); // Write handle to field let mut dest_handle = dest.cast::(); @@ -872,8 +744,8 @@ where src_ptr.as_ptr(), dest.as_ptr(), std::mem::size_of::(), - ) - }; + ); + } } mapping::Action::Cast { old_offset, old_ty } => { if !cast::try_cast_from_to( @@ -888,15 +760,13 @@ where mapping::Action::Copy { old_offset, size: size_in_bytes, - } => { - unsafe { - std::ptr::copy_nonoverlapping( - get_field_ptr(src, *old_offset).as_ptr(), - dest.as_ptr(), - *size_in_bytes, - ) - }; - } + } => unsafe { + std::ptr::copy_nonoverlapping( + get_field_ptr(src, *old_offset).as_ptr(), + dest.as_ptr(), + *size_in_bytes, + ); + }, mapping::Action::ElementFromArray { element_action, old_offset, @@ -919,7 +789,7 @@ where dest, element_action, new_ty, - ) + ); } else { // zero initialize } @@ -928,7 +798,7 @@ where let object = alloc_obj(new_ty.clone()); // We want to return a pointer to the `ObjectInfo`, to be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); // Write handle to field let mut dest_handle = dest.cast::(); @@ -980,7 +850,7 @@ where ); // We want to return a pointer to the `ObjectInfo`, to be used as handle. - let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into(); + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); // Write handle to field let mut dest_handle = dest.cast::(); @@ -1034,6 +904,133 @@ where ); } } + + let mut objects = self.objects.write(); + + // Determine which types are still allocated with deleted types + let deleted = objects + .iter() + .filter_map(|(ptr, object_info)| { + if mapping.deletions.contains(&object_info.ty) { + Some(*ptr) + } else { + None + } + }) + .collect(); + + // Update type pointers of types that didn't change + for (old_ty, new_ty) in mapping.identical { + for object_info in objects.values_mut() { + if object_info.ty == old_ty { + object_info.set(ObjectInfo { + data: ObjectInfoData { + ptr: unsafe { object_info.data.ptr }, + }, + roots: object_info.roots, + color: object_info.color, + ty: new_ty.clone(), + }); + } + } + } + + let mut new_allocations = Vec::new(); + + // Map struct types + objects + .values_mut() + .filter(|object_info| object_info.ty.is_struct()) + .for_each(|object_info| { + if let Some(conversion) = mapping.struct_mappings.get(&object_info.ty) { + let old_layout = object_info.ty.value_layout(); + let src = unsafe { object_info.data.ptr }; + let dest = unsafe { + NonNull::new_unchecked(std::alloc::alloc_zeroed( + conversion.new_ty.value_layout(), + )) + }; + + map_struct( + &mut new_allocations, + &mapping.struct_mappings, + &conversion.field_mapping, + src, + dest, + ); + + unsafe { std::alloc::dealloc(src.as_ptr(), old_layout) }; + + object_info.set(ObjectInfo { + data: ObjectInfoData { ptr: dest }, + roots: object_info.roots, + color: object_info.color, + ty: conversion.new_ty.clone(), + }); + } + }); + + // Map rooted array types + objects + .values_mut() + .filter(|object_info| object_info.ty.is_array()) + .for_each(|object_info| { + let mut ty = object_info.ty.clone(); + let mut stack = Vec::new(); + + while let Some(array) = ty.as_array() { + stack.push(ty.clone()); + ty = array.element_type(); + } + + let old_element_ty = ty; + if let Some(conversion) = mapping.struct_mappings.get(&old_element_ty) { + let mut new_ty = conversion.new_ty.clone(); + while stack.pop().is_some() { + new_ty = new_ty.array_type(); + } + + // Only arrays containing structs need to be mapped, as an array of arrays merely + // contains `GcPtr`s. + let new_element_ty = new_ty.as_array().unwrap().element_type(); + if new_element_ty.is_struct() { + // Conversion between ADTs are already handled in struct mappings + assert!(old_element_ty.is_struct()); + + let element_action = + resolve_struct_to_struct_edit(&old_element_ty, &new_element_ty, 0); + + map_array( + &mut new_allocations, + &mapping.struct_mappings, + unsafe { + NonNull::new_unchecked( + &mut *object_info.as_mut() as *mut ObjectInfo + ) + }, + &element_action, + &new_ty, + ); + } else { + // Update the type of arrays of arrays + object_info.as_mut().ty = conversion.new_ty.clone(); + } + } + }); + + // Retroactively store newly allocated objects + // This cannot be done while mapping because we hold a mutable reference to objects + for object in new_allocations { + let size = object.layout().size(); + // We want to return a pointer to the `ObjectInfo`, to + // be used as handle. + let handle = (&*object.as_ref() as *const _ as RawGcPtr).into(); + objects.insert(handle, object); + + self.log_alloc(handle, size); + } + + deleted } } diff --git a/crates/mun_memory/src/gc/root_ptr.rs b/crates/mun_memory/src/gc/root_ptr.rs index 51e381ab3..62811771f 100644 --- a/crates/mun_memory/src/gc/root_ptr.rs +++ b/crates/mun_memory/src/gc/root_ptr.rs @@ -16,7 +16,7 @@ where { fn clone(&self) -> Self { if let Some(runtime) = self.runtime.upgrade() { - runtime.as_ref().root(self.handle) + runtime.as_ref().root(self.handle); } Self { handle: self.handle, @@ -29,7 +29,7 @@ impl GcRootPtr where G: GcRuntime, { - /// Constructs a new GCRootHandle from a runtime and a handle + /// Constructs a new [`GcRootPtr`] from a runtime and a handle pub fn new(runtime: &Arc, handle: GcPtr) -> Self { runtime.as_ref().root(handle); Self { @@ -71,7 +71,7 @@ where { fn drop(&mut self) { if let Some(runtime) = self.runtime.upgrade() { - runtime.as_ref().unroot(self.handle) + runtime.as_ref().unroot(self.handle); } } } diff --git a/crates/mun_memory/src/lib.rs b/crates/mun_memory/src/lib.rs index 70a144cf3..0b2e4d9de 100644 --- a/crates/mun_memory/src/lib.rs +++ b/crates/mun_memory/src/lib.rs @@ -1,5 +1,3 @@ -extern crate core; - pub use r#type::{ ArrayType, Field, FieldData, HasStaticType, PointerType, StructType, StructTypeBuilder, Type, TypeCollectionStats, TypeKind, diff --git a/crates/mun_memory/src/mapping.rs b/crates/mun_memory/src/mapping.rs index 684d2dd78..fd88e6f46 100644 --- a/crates/mun_memory/src/mapping.rs +++ b/crates/mun_memory/src/mapping.rs @@ -4,7 +4,7 @@ use crate::{ diff::{compute_struct_diff, FieldDiff, StructDiff}, gc::GcPtr, r#type::Type, - ArrayType, TypeKind, + ArrayType, Field, TypeKind, }; use itertools::Itertools; use std::collections::{HashMap, HashSet}; @@ -109,7 +109,7 @@ impl Mapping { insertions.insert(ty.clone()); } StructDiff::Move { old_ty, new_ty, .. } => { - identical.push((old_ty.clone(), new_ty.clone())) + identical.push((old_ty.clone(), new_ty.clone())); } } } @@ -222,7 +222,7 @@ pub unsafe fn field_mapping(old_ty: &Type, new_ty: &Type, diff: &[FieldDiff]) -> } => old_index.map(|old_index| { let old_offset = old_fields .get(old_index) - .map(|field| field.offset()) + .map(Field::offset) .expect("The old field must exist."); (*new_index, resolve_edit(old_type, new_type, old_offset)) }), @@ -243,7 +243,7 @@ pub unsafe fn field_mapping(old_ty: &Type, new_ty: &Type, diff: &[FieldDiff]) -> } => { let old_offset = old_fields .get(*old_index) - .map(|field| field.offset()) + .map(Field::offset) .expect("Old field must exist."); Some(( @@ -276,7 +276,7 @@ pub unsafe fn field_mapping(old_ty: &Type, new_ty: &Type, diff: &[FieldDiff]) -> { let old_offset = old_fields .get(*new_index) - .map(|field| field.offset()) + .map(Field::offset) .expect("The old field must exist."); let action = mapping.get_mut(*new_index).unwrap(); @@ -364,7 +364,7 @@ fn resolve_primitive_to_primitive_edit( fn resolve_primitive_to_array_edit( old_ty: &Type, - new_array: &ArrayType, + new_array: &ArrayType<'_>, old_offset: usize, ) -> Action { Action::ArrayFromValue { @@ -438,7 +438,11 @@ pub fn resolve_struct_to_struct_edit(old_ty: &Type, new_ty: &Type, old_offset: u } } -fn resolve_struct_to_array_edit(old_ty: &Type, new_array: &ArrayType, old_offset: usize) -> Action { +fn resolve_struct_to_array_edit( + old_ty: &Type, + new_array: &ArrayType<'_>, + old_offset: usize, +) -> Action { Action::ArrayFromValue { element_action: Box::new(resolve_edit(old_ty, &new_array.element_type(), 0)), old_offset, @@ -450,7 +454,7 @@ fn resolve_pointer_edit(_old_ty: &Type, _new_ty: &Type) -> Action { unreachable!() } -fn resolve_array_edit(old_array: &ArrayType, new_ty: &Type, old_offset: usize) -> Action { +fn resolve_array_edit(old_array: &ArrayType<'_>, new_ty: &Type, old_offset: usize) -> Action { match &new_ty.kind() { TypeKind::Primitive(_) => resolve_array_to_primitive_edit(old_array, new_ty, old_offset), TypeKind::Struct(_) => resolve_array_to_struct_edit(old_array, new_ty, old_offset), @@ -460,7 +464,7 @@ fn resolve_array_edit(old_array: &ArrayType, new_ty: &Type, old_offset: usize) - } fn resolve_array_to_primitive_edit( - old_array: &ArrayType, + old_array: &ArrayType<'_>, new_ty: &Type, old_offset: usize, ) -> Action { @@ -470,7 +474,11 @@ fn resolve_array_to_primitive_edit( } } -fn resolve_array_to_struct_edit(old_array: &ArrayType, new_ty: &Type, old_offset: usize) -> Action { +fn resolve_array_to_struct_edit( + old_array: &ArrayType<'_>, + new_ty: &Type, + old_offset: usize, +) -> Action { Action::ElementFromArray { old_offset, element_action: Box::new(resolve_edit(&old_array.element_type(), new_ty, 0)), @@ -478,8 +486,8 @@ fn resolve_array_to_struct_edit(old_array: &ArrayType, new_ty: &Type, old_offset } fn resolve_array_to_array_edit( - old_array: &ArrayType, - new_array: &ArrayType, + old_array: &ArrayType<'_>, + new_array: &ArrayType<'_>, old_offset: usize, ) -> Action { let old_element_type = old_array.element_type(); diff --git a/crates/mun_memory/src/type/ffi/array.rs b/crates/mun_memory/src/type/ffi/array.rs index 029e8954d..c24b0f9d3 100644 --- a/crates/mun_memory/src/type/ffi/array.rs +++ b/crates/mun_memory/src/type/ffi/array.rs @@ -31,13 +31,13 @@ impl ArrayInfo { } Ok(ManuallyDrop::new(Arc::from_raw( - self.1 as *const TypeDataStore, + self.1.cast::(), ))) } /// Returns the struct info associated with the Type unsafe fn inner(&self) -> Result<&ArrayData, String> { - match (self.0 as *const ArrayData).as_ref() { + match self.0.cast::().as_ref() { Some(store) => Ok(store), None => Err(String::from("null pointer")), } diff --git a/crates/mun_memory/src/type/ffi/mod.rs b/crates/mun_memory/src/type/ffi/mod.rs index e66e11199..11d7081d8 100644 --- a/crates/mun_memory/src/type/ffi/mod.rs +++ b/crates/mun_memory/src/type/ffi/mod.rs @@ -28,10 +28,7 @@ pub struct Type(*const c_void, *const c_void); impl From for Type { fn from(ty: crate::Type) -> Self { let ty = ManuallyDrop::new(ty); - Type( - ty.inner.as_ptr() as *const _, - Arc::as_ptr(&ty.store) as *const _, - ) + Type(ty.inner.as_ptr() as *const _, Arc::as_ptr(&ty.store).cast()) } } @@ -43,13 +40,14 @@ impl Type { } Ok(ManuallyDrop::new(Arc::from_raw( - self.1 as *const TypeDataStore, + self.1.cast::(), ))) } /// Returns the store associated with the Type or unsafe fn inner(&self) -> Result<&TypeData, String> { - (self.0 as *const TypeData) + self.0 + .cast::() .as_ref() .ok_or_else(|| String::from("null pointer")) } @@ -71,7 +69,7 @@ impl Type { Ok(super::Type { inner: NonNull::new_unchecked(self.0 as *mut _), - store: Arc::from_raw(self.1 as *const _), + store: Arc::from_raw(self.1.cast()), }) } @@ -258,15 +256,15 @@ pub unsafe extern "C" fn mun_type_kind(ty: Type, kind: *mut TypeKind) -> ErrorHa TypeDataKind::Primitive(guid) => TypeKind::Primitive(*guid), TypeDataKind::Pointer(pointer) => TypeKind::Pointer(PointerInfo( (pointer as *const PointerData).cast(), - Arc::as_ptr(ManuallyDrop::deref(&store)) as *const _, + Arc::as_ptr(ManuallyDrop::deref(&store)).cast(), )), TypeDataKind::Struct(s) => TypeKind::Struct(StructInfo( (s as *const StructData).cast(), - Arc::as_ptr(ManuallyDrop::deref(&store)) as *const _, + Arc::as_ptr(ManuallyDrop::deref(&store)).cast(), )), TypeDataKind::Array(a) => TypeKind::Array(ArrayInfo( (a as *const ArrayData).cast(), - Arc::as_ptr(ManuallyDrop::deref(&store)) as *const _, + Arc::as_ptr(ManuallyDrop::deref(&store)).cast(), )), TypeDataKind::Uninitialized => unreachable!(), }; @@ -314,7 +312,7 @@ pub unsafe extern "C" fn mun_types_destroy(types: Types) -> ErrorHandle { return ErrorHandle::new("invalid argument 'types': null pointer"); } else if types.count > 0 { let types = Vec::from_raw_parts(types.types as *mut Type, types.count, types.count); - for ty in types.into_iter() { + for ty in types { // Take ownership of the stored type and drop it drop(mun_error_try!(ty .to_owned() @@ -422,6 +420,12 @@ mod test { #[test] fn test_mun_type_size() { + fn assert_size(ty: Type, expected_size: usize) { + let mut size = MaybeUninit::uninit(); + assert!(unsafe { mun_type_size(ty, size.as_mut_ptr()) }.is_ok()); + assert_eq!(unsafe { size.assume_init() }, expected_size); + } + let ffi_i8 = mun_type_primitive(PrimitiveType::I8); let ffi_u16 = mun_type_primitive(PrimitiveType::U16); let ffi_i32 = mun_type_primitive(PrimitiveType::I32); @@ -436,12 +440,6 @@ mod test { unsafe { mun_type_release(ffi_i32) }; unsafe { mun_type_release(ffi_u16) }; unsafe { mun_type_release(ffi_i8) }; - - fn assert_size(ty: Type, expected_size: usize) { - let mut size = MaybeUninit::uninit(); - assert!(unsafe { mun_type_size(ty, size.as_mut_ptr()) }.is_ok()); - assert_eq!(unsafe { size.assume_init() }, expected_size); - } } #[test] @@ -456,6 +454,12 @@ mod test { #[test] fn test_mun_type_alignment() { + fn assert_alignment(ty: Type, expected_alignment: usize) { + let mut align = MaybeUninit::uninit(); + assert!(unsafe { mun_type_alignment(ty, align.as_mut_ptr()) }.is_ok()); + assert_eq!(unsafe { align.assume_init() }, expected_alignment); + } + let ffi_i8 = mun_type_primitive(PrimitiveType::I8); let ffi_u16 = mun_type_primitive(PrimitiveType::U16); let ffi_i32 = mun_type_primitive(PrimitiveType::I32); @@ -470,12 +474,6 @@ mod test { unsafe { mun_type_release(ffi_i32) }; unsafe { mun_type_release(ffi_u16) }; unsafe { mun_type_release(ffi_i8) }; - - fn assert_alignment(ty: Type, expected_alignment: usize) { - let mut align = MaybeUninit::uninit(); - assert!(unsafe { mun_type_alignment(ty, align.as_mut_ptr()) }.is_ok()); - assert_eq!(unsafe { align.assume_init() }, expected_alignment); - } } #[test] diff --git a/crates/mun_memory/src/type/ffi/pointer.rs b/crates/mun_memory/src/type/ffi/pointer.rs index a54ebb78d..378d17534 100644 --- a/crates/mun_memory/src/type/ffi/pointer.rs +++ b/crates/mun_memory/src/type/ffi/pointer.rs @@ -31,13 +31,13 @@ impl PointerInfo { } Ok(ManuallyDrop::new(Arc::from_raw( - self.1 as *const TypeDataStore, + self.1.cast::(), ))) } /// Returns the pointer ino associated with the Type unsafe fn inner(&self) -> Result<&PointerData, String> { - match (self.0 as *const PointerData).as_ref() { + match self.0.cast::().as_ref() { Some(store) => Ok(store), None => Err(String::from("null pointer")), } @@ -46,8 +46,8 @@ impl PointerInfo { /// Converts from C FFI type to a Rust type. unsafe fn to_rust<'a>(self) -> Result, String> { match ( - (self.0 as *const PointerData).as_ref(), - (self.1 as *const Arc).as_ref(), + self.0.cast::().as_ref(), + self.1.cast::>().as_ref(), ) { (Some(inner), Some(store)) => Ok(super::super::PointerType { inner, store }), _ => Err(String::from("null pointer")), diff --git a/crates/mun_memory/src/type/ffi/primitive.rs b/crates/mun_memory/src/type/ffi/primitive.rs index 90bfc3a63..c4a39c0b9 100644 --- a/crates/mun_memory/src/type/ffi/primitive.rs +++ b/crates/mun_memory/src/type/ffi/primitive.rs @@ -58,6 +58,21 @@ mod test { #[test] fn test_primitives() { + fn test_primitive(primitive_type: PrimitiveType) { + let ffi_ty = mun_type_primitive(primitive_type); + + assert_getter1!(mun_type_kind(ffi_ty, ffi_kind)); + let guid = match ffi_kind { + TypeKind::Primitive(guid) => guid, + _ => panic!("invalid type kind for primitive"), + }; + + let rust_ty = unsafe { ffi_ty.to_owned() }.unwrap(); + let static_ty = T::type_info(); + assert_eq!(&rust_ty, static_ty); + assert_eq!(static_ty.as_concrete().unwrap(), &guid); + } + test_primitive::(Bool); test_primitive::(U8); test_primitive::(U16); @@ -73,20 +88,5 @@ mod test { test_primitive::(F64); test_primitive::<()>(Empty); test_primitive::(Void); - - fn test_primitive(primitive_type: PrimitiveType) { - let ffi_ty = mun_type_primitive(primitive_type); - - assert_getter1!(mun_type_kind(ffi_ty, ffi_kind)); - let guid = match ffi_kind { - TypeKind::Primitive(guid) => guid, - _ => panic!("invalid type kind for primitive"), - }; - - let rust_ty = unsafe { ffi_ty.to_owned() }.unwrap(); - let static_ty = T::type_info(); - assert_eq!(&rust_ty, static_ty); - assert_eq!(static_ty.as_concrete().unwrap(), &guid); - } } } diff --git a/crates/mun_memory/src/type/ffi/struct.rs b/crates/mun_memory/src/type/ffi/struct.rs index 7cb368599..3de973098 100644 --- a/crates/mun_memory/src/type/ffi/struct.rs +++ b/crates/mun_memory/src/type/ffi/struct.rs @@ -36,7 +36,7 @@ impl<'t> From> for StructInfo { impl StructInfo { /// Returns the struct info associated with the Type unsafe fn inner(&self) -> Result<&StructData, String> { - match (self.0 as *const StructData).as_ref() { + match self.0.cast::().as_ref() { Some(store) => Ok(store), None => Err(String::from("null pointer")), } @@ -113,7 +113,7 @@ pub unsafe extern "C" fn mun_fields_find_by_name( }; let has_field = try_deref_mut!(has_field); let field = try_deref_mut!(field); - let name = std::str::from_utf8_unchecked(slice::from_raw_parts(name as *const u8, len)); + let name = std::str::from_utf8_unchecked(slice::from_raw_parts(name.cast::(), len)); *has_field = false; @@ -168,12 +168,11 @@ pub unsafe extern "C" fn mun_struct_type_fields( let fields = try_deref_mut!(fields); // Get all fields - let mut fields_vec = Vec::from_iter( - inner - .fields - .iter() - .map(|field| Field((field as *const FieldData).cast(), ty.1)), - ); + let mut fields_vec = inner + .fields + .iter() + .map(|field| Field((field as *const FieldData).cast(), ty.1)) + .collect::>(); // Ensures that the length and the capacity are the same fields_vec.shrink_to_fit(); @@ -209,13 +208,13 @@ impl Field { } Ok(ManuallyDrop::new(Arc::from_raw( - self.1 as *const TypeDataStore, + self.1.cast::(), ))) } /// Returns the field info associated with this instance unsafe fn inner(&self) -> Result<&FieldData, String> { - match (self.0 as *const FieldData).as_ref() { + match self.0.cast::().as_ref() { Some(info) => Ok(info), None => Err(String::from("null pointer")), } diff --git a/crates/mun_memory/src/type/mod.rs b/crates/mun_memory/src/type/mod.rs index 977a33b7b..b49cb14c2 100644 --- a/crates/mun_memory/src/type/mod.rs +++ b/crates/mun_memory/src/type/mod.rs @@ -77,7 +77,7 @@ impl TypeDataStore { NonNull::new_unchecked(Box::as_mut(ty) as *mut TypeData) }); } else { - ty.mark = Mark::Unused + ty.mark = Mark::Unused; }; } } @@ -124,7 +124,7 @@ impl TypeDataStore { &ty.array_type, ] { let read_lock = indirection.read(); - if let &Some(mut indirection_ref) = read_lock.deref() { + if let &Some(mut indirection_ref) = &*read_lock { let reference = unsafe { indirection_ref.as_mut() }; if reference.mark == Mark::Unused { reference.mark = Mark::Used; @@ -214,9 +214,9 @@ impl TypeDataStore { layout, data, external_references: AtomicUsize::new(0), - immutable_pointer_type: Default::default(), - mutable_pointer_type: Default::default(), - array_type: Default::default(), + immutable_pointer_type: RwLock::default(), + mutable_pointer_type: RwLock::default(), + array_type: RwLock::default(), mark: Mark::Initializing, })); @@ -225,7 +225,7 @@ impl TypeDataStore { // as it lives. let entry = unsafe { NonNull::new_unchecked( - entries.back().expect("didnt insert").deref() as *const TypeData as *mut _, + &**entries.back().expect("didnt insert") as *const TypeData as *mut _ ) }; @@ -343,7 +343,7 @@ impl PartialEq for Type { impl Eq for Type {} impl Hash for Type { fn hash(&self, state: &mut H) { - self.inner().hash(state) + self.inner().hash(state); } } @@ -417,7 +417,7 @@ impl TypeData { let inner = unsafe { ty.inner.as_mut() }; // Recheck if another thread acquired the write lock in the mean time - if let Some(element_ty) = write_lock.deref() { + if let Some(element_ty) = &*write_lock { inner.mark = Mark::Used; return Type { inner: *element_ty, @@ -467,7 +467,7 @@ impl TypeData { let inner = unsafe { ty.inner.as_mut() }; // Recheck if another thread acquired the write lock in the mean time - if let Some(element_ty) = write_lock.deref() { + if let Some(element_ty) = &*write_lock { inner.mark = Mark::Used; return Type { inner: *element_ty, @@ -700,7 +700,7 @@ impl<'t> Display for PointerType<'t> { impl Hash for StructData { fn hash(&self, state: &mut H) { - self.guid.hash(state) + self.guid.hash(state); } } @@ -847,7 +847,7 @@ impl Type { } } - /// Returns true if this instance represents the TypeInfo of the given type. + /// Returns true if this instance represents the `TypeInfo` of the given type. /// /// ```rust /// # use mun_memory::HasStaticType; @@ -1003,7 +1003,7 @@ impl StructData { struct_info: &'abi abi::StructDefinition<'abi>, type_table: &TypeTable, ) -> Result> { - let fields: Result, TryFromAbiError> = izip!( + let fields: Result, TryFromAbiError<'abi>> = izip!( struct_info.field_names(), struct_info.field_types(), struct_info.field_offsets() @@ -1125,7 +1125,7 @@ impl StructTypeBuilder { mut self, iter: impl IntoIterator, ) -> Self { - for (name, ty) in iter.into_iter() { + for (name, ty) in iter { self = self.add_field(name.into(), ty); } self diff --git a/crates/mun_memory/src/type_table.rs b/crates/mun_memory/src/type_table.rs index b2a694064..c9a14b22f 100644 --- a/crates/mun_memory/src/type_table.rs +++ b/crates/mun_memory/src/type_table.rs @@ -11,7 +11,7 @@ pub struct TypeTable { } impl TypeTable { - /// Returns the TypeInfo for the type with the given name. + /// Returns the [`TypeInfo`] for the type with the given name. pub fn find_type_info_by_name>(&self, name: S) -> Option { self.type_name_to_type_info.get(name.as_ref()).cloned() } @@ -58,7 +58,7 @@ impl TypeTable { self.concrete.insert(guid, ty) } - /// Removes the specified TypeInfo from the lookup table. + /// Removes the specified [`TypeInfo`] from the lookup table. pub fn remove_type(&mut self, ty: &Type) -> Option { match ty.as_concrete() { None => panic!("can only remove concrete types"), @@ -92,8 +92,8 @@ impl TypeTable { impl Default for TypeTable { fn default() -> Self { let mut type_table = Self { - concrete: Default::default(), - type_name_to_type_info: Default::default(), + concrete: FxHashMap::default(), + type_name_to_type_info: FxHashMap::default(), }; // Add all primitive types diff --git a/crates/mun_memory/tests/diff/util.rs b/crates/mun_memory/tests/diff/util.rs index 05fccf3e5..426696dd1 100644 --- a/crates/mun_memory/tests/diff/util.rs +++ b/crates/mun_memory/tests/diff/util.rs @@ -53,7 +53,7 @@ pub(crate) fn apply_diff(old: &[Type], diff: Vec) -> Vec { StructDiff::Move { old_index, .. } => { combined.remove(*old_index); } - _ => (), + StructDiff::Insert { .. } => (), } } for diff in diff { @@ -74,10 +74,25 @@ pub(crate) fn apply_diff(old: &[Type], diff: Vec) -> Vec { fn apply_struct_mapping( name: &str, - old_struct: StructType, - new_struct: StructType, + old_struct: StructType<'_>, + new_struct: StructType<'_>, mapping: &[FieldDiff], ) -> Type { + fn get_new_index(diff: &FieldDiff) -> usize { + match diff { + FieldDiff::Insert { index, .. } => *index, + FieldDiff::Move { new_index, .. } => *new_index, + _ => std::usize::MAX, + } + } + + fn edit_field(kind: &FieldEditKind, old_field: &mut (String, Type), new_field: Field<'_>) { + match *kind { + FieldEditKind::ChangedTyped => old_field.1 = new_field.ty(), + FieldEditKind::RenamedField => old_field.0 = new_field.name().to_owned(), + } + } + let mut fields: VecDeque<_> = old_struct .fields() .iter() @@ -97,21 +112,6 @@ fn apply_struct_mapping( } } - fn get_new_index(diff: &FieldDiff) -> usize { - match diff { - FieldDiff::Insert { index, .. } => *index, - FieldDiff::Move { new_index, .. } => *new_index, - _ => std::usize::MAX, - } - } - - fn edit_field(kind: &FieldEditKind, old_field: &mut (String, Type), new_field: Field) { - match *kind { - FieldEditKind::ChangedTyped => old_field.1 = new_field.ty(), - FieldEditKind::RenamedField => old_field.0 = new_field.name().to_owned(), - } - } - // Sort elements in ascending order of their insertion indices. let mut additions: Vec<_> = mapping .iter() diff --git a/crates/mun_memory/tests/gc/structs.rs b/crates/mun_memory/tests/gc/structs.rs index d37caf6d9..dd7bd16b6 100644 --- a/crates/mun_memory/tests/gc/structs.rs +++ b/crates/mun_memory/tests/gc/structs.rs @@ -15,7 +15,7 @@ struct FooObject { impl Trace for FooObject { fn trace(&self, handles: &mut Vec) { - handles.push(self.bar) + handles.push(self.bar); } } @@ -42,7 +42,7 @@ fn test_trace() { let mut trace = foo_type_info.trace(foo_handle); assert_eq!(trace.next(), Some(bar_handle)); - assert_eq!(trace.next(), None) + assert_eq!(trace.next(), None); } #[test] diff --git a/crates/mun_memory/tests/gc/util.rs b/crates/mun_memory/tests/gc/util.rs index c886b0268..a1d7a14a6 100644 --- a/crates/mun_memory/tests/gc/util.rs +++ b/crates/mun_memory/tests/gc/util.rs @@ -30,7 +30,7 @@ impl gc::Observer for EventAggregator { type Event = T; fn event(&self, event: T) { - self.events.lock().push(event) + self.events.lock().push(event); } } diff --git a/crates/mun_paths/src/abs_path.rs b/crates/mun_paths/src/abs_path.rs index d96b54161..db16a5196 100644 --- a/crates/mun_paths/src/abs_path.rs +++ b/crates/mun_paths/src/abs_path.rs @@ -43,10 +43,10 @@ impl TryFrom for AbsPathBuf { type Error = PathBuf; fn try_from(path: PathBuf) -> Result { - if !path.is_absolute() { - Err(path) - } else { + if path.is_absolute() { Ok(AbsPathBuf(path)) + } else { + Err(path) } } } @@ -92,10 +92,10 @@ impl<'a> TryFrom<&'a Path> for &'a AbsPath { type Error = &'a Path; fn try_from(path: &'a Path) -> Result { - if !path.is_absolute() { - Err(path) - } else { + if path.is_absolute() { Ok(AbsPath::assert_new(path)) + } else { + Err(path) } } } diff --git a/crates/mun_project/src/manifest.rs b/crates/mun_project/src/manifest.rs index 961c5605c..ef8447fd0 100644 --- a/crates/mun_project/src/manifest.rs +++ b/crates/mun_project/src/manifest.rs @@ -67,7 +67,7 @@ impl PackageId { } impl fmt::Display for PackageId { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{} v{}", self.name(), self.version()) } } diff --git a/crates/mun_project/src/package.rs b/crates/mun_project/src/package.rs index c53903cd4..e8832a7ec 100644 --- a/crates/mun_project/src/package.rs +++ b/crates/mun_project/src/package.rs @@ -64,7 +64,7 @@ impl Package { } impl fmt::Display for Package { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.package_id()) } } diff --git a/crates/mun_project/src/project_manifest.rs b/crates/mun_project/src/project_manifest.rs index 11ac700b5..b6d08ecae 100644 --- a/crates/mun_project/src/project_manifest.rs +++ b/crates/mun_project/src/project_manifest.rs @@ -36,8 +36,7 @@ impl ProjectManifest { path.is_file() && path .file_name() - .map(|file_name| file_name == MANIFEST_FILENAME) - .unwrap_or(false) + .is_some_and(|file_name| file_name == MANIFEST_FILENAME) }) .map(|path| ProjectManifest { path: AbsPathBuf::try_from(path).expect( diff --git a/crates/mun_runtime/src/adt.rs b/crates/mun_runtime/src/adt.rs index 05265d3c9..e9021ff43 100644 --- a/crates/mun_runtime/src/adt.rs +++ b/crates/mun_runtime/src/adt.rs @@ -254,7 +254,7 @@ impl<'s> Marshal<'s> for StructRef<'s> { value.into_raw().get_ptr(), dest, type_info.value_layout().size(), - ) + ); }; } else { unsafe { *ptr.as_mut() = value.into_raw() }; diff --git a/crates/mun_runtime/src/array.rs b/crates/mun_runtime/src/array.rs index e07c6784c..f5c54ca46 100644 --- a/crates/mun_runtime/src/array.rs +++ b/crates/mun_runtime/src/array.rs @@ -44,7 +44,7 @@ impl<'array, T: Marshal<'array> + 'array> ArrayRef<'array, T> { Self { raw, runtime, - _phantom: Default::default(), + _phantom: PhantomData, } } @@ -175,7 +175,7 @@ impl RootedArray { assert!(gc.ptr_type(raw.0).is_array()); Self { handle: GcRootPtr::new(gc, raw.0), - _data: Default::default(), + _data: PhantomData, } } diff --git a/crates/mun_runtime/src/assembly.rs b/crates/mun_runtime/src/assembly.rs index e8ce120e8..1238f34bb 100644 --- a/crates/mun_runtime/src/assembly.rs +++ b/crates/mun_runtime/src/assembly.rs @@ -174,7 +174,7 @@ impl Assembly { let mut failed_to_link = Vec::new(); // Try to link outstanding entries - for (dispatch_ptr, fn_prototype) in to_link.into_iter() { + for (dispatch_ptr, fn_prototype) in to_link { // Get the types of the function arguments let fn_proto_arg_type_infos = fn_prototype .signature @@ -426,7 +426,7 @@ impl Assembly { let mut newly_linked = HashMap::new(); std::mem::swap(unlinked_assemblies, &mut newly_linked); - for (old_path, new_assembly) in newly_linked.into_iter() { + for (old_path, new_assembly) in newly_linked { assert!( linked_assemblies.remove(&old_path).is_some(), "Assembly must exist." @@ -443,12 +443,12 @@ impl Assembly { } /// Returns the assembly's information. - pub fn info(&self) -> &abi::AssemblyInfo { + pub fn info(&self) -> &abi::AssemblyInfo<'_> { &self.info } /// Returns the assembly's information. - pub fn info_mut(&mut self) -> &mut abi::AssemblyInfo { + pub fn info_mut(&mut self) -> &mut abi::AssemblyInfo<'_> { // HACK: We want to make sure that the assembly info never outlives self. unsafe { std::mem::transmute(&mut self.info) } } diff --git a/crates/mun_runtime/src/dispatch_table.rs b/crates/mun_runtime/src/dispatch_table.rs index d012b5d61..d97ae2e98 100644 --- a/crates/mun_runtime/src/dispatch_table.rs +++ b/crates/mun_runtime/src/dispatch_table.rs @@ -18,7 +18,7 @@ impl DispatchTable { /// Retrieves the name of all available functions. pub fn get_fn_names(&self) -> impl Iterator { - self.functions.keys().map(|key| key.as_str()) + self.functions.keys().map(String::as_str) } /// Inserts the `fn_info` for `fn_path` into the dispatch table. @@ -39,7 +39,7 @@ impl DispatchTable { // } /// Removes the function definitions from the given assembly from this dispatch table. - pub fn remove_module(&mut self, assembly: &abi::ModuleInfo) { + pub fn remove_module(&mut self, assembly: &abi::ModuleInfo<'_>) { for function in assembly.functions() { if let Some(value) = self.functions.get(function.prototype.name()) { if value.fn_ptr == function.fn_ptr { @@ -50,7 +50,7 @@ impl DispatchTable { } /// Add the function definitions from the given assembly from this dispatch table. - pub fn insert_module(&mut self, assembly: &abi::ModuleInfo, type_table: &TypeTable) { + pub fn insert_module(&mut self, assembly: &abi::ModuleInfo<'_>, type_table: &TypeTable) { for fn_def in assembly.functions() { let fn_def = FunctionDefinition::try_from_abi(fn_def, type_table) .expect("All types from a loaded assembly must exist in the type table."); diff --git a/crates/mun_runtime/src/function_info.rs b/crates/mun_runtime/src/function_info.rs index 783d7393d..c78b3f7da 100644 --- a/crates/mun_runtime/src/function_info.rs +++ b/crates/mun_runtime/src/function_info.rs @@ -165,7 +165,7 @@ impl FunctionDefinitionBuilder { /// Adds arguments pub fn add_arguments(mut self, iter: impl IntoIterator) -> Self { - for arg in iter.into_iter() { + for arg in iter { self.arg_types.push(arg); } self diff --git a/crates/mun_runtime/src/lib.rs b/crates/mun_runtime/src/lib.rs index 9fa156adc..5619ea508 100644 --- a/crates/mun_runtime/src/lib.rs +++ b/crates/mun_runtime/src/lib.rs @@ -132,8 +132,8 @@ impl RuntimeBuilder { Self { options: RuntimeOptions { library_path: library_path.into(), - type_table: Default::default(), - user_functions: Default::default(), + type_table: TypeTable::default(), + user_functions: Vec::default(), }, } } @@ -248,7 +248,7 @@ impl Runtime { }); let watcher: RecommendedWatcher = notify::recommended_watcher(move |res| { - tx.send(res).expect("Failed to send filesystem event.") + tx.send(res).expect("Failed to send filesystem event."); })?; let mut runtime = Runtime { assemblies: HashMap::new(), @@ -323,7 +323,7 @@ impl Runtime { (self.dispatch_table, self.type_table) = Assembly::link_all(loaded.values_mut(), &self.dispatch_table, &self.type_table)?; - for (library_path, assembly) in loaded.into_iter() { + for (library_path, assembly) in loaded { self.watcher .watch(library_path.parent().unwrap(), RecursiveMode::NonRecursive) .expect("Path must exist as we just loaded the library"); @@ -340,9 +340,8 @@ impl Runtime { self.dispatch_table.get_fn(function_name) } - /// For a given fn_name, find the most similar name in fn_names + /// For a given `fn_name`, find the most similar name in `fn_names` fn find_best_match_for_fn_name<'a>( - &self, fn_name: &'a str, fn_names: impl Iterator, dist: Option, @@ -371,7 +370,7 @@ impl Runtime { } /// Retrieve the type information corresponding to the `type_id`, if available. - pub fn get_type_info_by_id(&self, type_id: &abi::TypeId) -> Option { + pub fn get_type_info_by_id(&self, type_id: &abi::TypeId<'_>) -> Option { self.type_table.find_type_info_by_id(type_id) } @@ -684,7 +683,7 @@ impl<'name, T: InvokeArgs> InvokeErr<'name, T> { /// Inner implementation that retries a function invocation once, resulting in a /// potentially successful invocation. This is a workaround for: - /// https://doc.rust-lang.org/nomicon/lifetime-mismatch.html + /// /// /// # Safety /// @@ -783,7 +782,7 @@ impl Runtime { Err(msg) => { let available_names = self.dispatch_table.get_fn_names(); let suggested_name = - self.find_best_match_for_fn_name(function_name, available_names, None); + Self::find_best_match_for_fn_name(function_name, available_names, None); let suggested_message = suggested_name.map_or_else( || msg.clone(), diff --git a/crates/mun_runtime/src/utils.rs b/crates/mun_runtime/src/utils.rs index 51698bdb2..be43eb38c 100644 --- a/crates/mun_runtime/src/utils.rs +++ b/crates/mun_runtime/src/utils.rs @@ -42,7 +42,7 @@ mod tests { const FIRST_STRING: &str = "foo"; const SECOND_STRING: &str = "zbar"; const EXPECTED_DISTANCE: usize = 4; - assert_eq!(lev_distance(FIRST_STRING, SECOND_STRING), EXPECTED_DISTANCE) + assert_eq!(lev_distance(FIRST_STRING, SECOND_STRING), EXPECTED_DISTANCE); } #[test] @@ -50,7 +50,7 @@ mod tests { const FIRST_STRING: &str = "calculate"; const SECOND_STRING: &str = ""; const EXPECTED_DISTANCE: usize = FIRST_STRING.len(); - assert_eq!(lev_distance(FIRST_STRING, SECOND_STRING), EXPECTED_DISTANCE) + assert_eq!(lev_distance(FIRST_STRING, SECOND_STRING), EXPECTED_DISTANCE); } #[test] @@ -58,6 +58,6 @@ mod tests { const FIRST_STRING: &str = "calculate"; const SECOND_STRING: &str = "calculate"; const EXPECTED_DISTANCE: usize = 0; - assert_eq!(lev_distance(FIRST_STRING, SECOND_STRING), EXPECTED_DISTANCE) + assert_eq!(lev_distance(FIRST_STRING, SECOND_STRING), EXPECTED_DISTANCE); } } diff --git a/crates/mun_runtime/tests/arrays.rs b/crates/mun_runtime/tests/arrays.rs index f408970b8..fe2815120 100644 --- a/crates/mun_runtime/tests/arrays.rs +++ b/crates/mun_runtime/tests/arrays.rs @@ -33,13 +33,13 @@ fn array_of_structs() { ) .expect("Failed to build test driver"); - let result: ArrayRef<'_, StructRef> = driver.runtime.invoke("main", ()).unwrap(); + let result: ArrayRef<'_, StructRef<'_>> = driver.runtime.invoke("main", ()).unwrap(); let number: i32 = result.iter().nth(1).unwrap().get("value").unwrap(); assert_eq!(result.len(), 2); assert_eq!(number, 18571); - let result: ArrayRef<'_, StructRef> = driver.runtime.invoke("main_value", ()).unwrap(); + let result: ArrayRef<'_, StructRef<'_>> = driver.runtime.invoke("main_value", ()).unwrap(); let number: i64 = result.iter().nth(1).unwrap().get("value").unwrap(); assert_eq!(result.len(), 2); @@ -93,7 +93,7 @@ fn root_array() { .expect("Failed to build test driver"); let result = { - let array: ArrayRef = driver.runtime.invoke("main", ()).unwrap(); + let array: ArrayRef<'_, i32> = driver.runtime.invoke("main", ()).unwrap(); array.root() }; diff --git a/crates/mun_runtime/tests/functions.rs b/crates/mun_runtime/tests/functions.rs index 9461d2d25..f5ae699d6 100644 --- a/crates/mun_runtime/tests/functions.rs +++ b/crates/mun_runtime/tests/functions.rs @@ -5,6 +5,8 @@ use mun_test::CompileAndRunTestDriver; #[test] fn unknown_function() { + const EXPECTED_FN_NAME: &str = "may"; + let driver = CompileAndRunTestDriver::new( r" pub fn main() -> i32 { 5 } @@ -13,22 +15,19 @@ fn unknown_function() { ) .expect("Failed to build test driver"); - const EXPECTED_FN_NAME: &str = "may"; - let result: Result = driver.runtime.invoke(EXPECTED_FN_NAME, ()); let err = result.unwrap_err(); assert_eq!( err.to_string(), - format!( - "failed to obtain function '{}', no such function exists.", - EXPECTED_FN_NAME - ) + format!("failed to obtain function '{EXPECTED_FN_NAME}', no such function exists.") ); } #[test] fn exact_case_sensitive_match_exists_function() { + const EXPECTED_FN_NAME: &str = "Foo"; + let driver = CompileAndRunTestDriver::new( r" pub fn main() -> i32 { 5 } @@ -39,8 +38,6 @@ fn exact_case_sensitive_match_exists_function() { ) .expect("Failed to build test driver"); - const EXPECTED_FN_NAME: &str = "Foo"; - let result: Result = driver.runtime.invoke(EXPECTED_FN_NAME, ()); let err = result.unwrap_err(); @@ -55,6 +52,8 @@ fn exact_case_sensitive_match_exists_function() { #[test] fn close_match_exists_function() { + const EXPECTED_FN_NAME: &str = "calculatedistance"; + let driver = CompileAndRunTestDriver::new( r" pub fn main() -> i32 { 5 } @@ -65,22 +64,21 @@ fn close_match_exists_function() { ) .expect("Failed to build test driver"); - const EXPECTED_FN_NAME: &str = "calculatedistance"; - let result: Result = driver.runtime.invoke(EXPECTED_FN_NAME, ()); let err = result.unwrap_err(); assert_eq!( err.to_string(), format!( - "failed to obtain function '{}', no such function exists. There is a function with a similar name: calculate_distance", - EXPECTED_FN_NAME + "failed to obtain function '{EXPECTED_FN_NAME}', no such function exists. There is a function with a similar name: calculate_distance" ) ); } #[test] fn no_close_match_exists_function() { + const EXPECTED_FN_NAME: &str = "calculate"; + let driver = CompileAndRunTestDriver::new( r" pub fn main() -> i32 { 5 } @@ -90,22 +88,19 @@ fn no_close_match_exists_function() { ) .expect("Failed to build test driver"); - const EXPECTED_FN_NAME: &str = "calculate"; - let result: Result = driver.runtime.invoke(EXPECTED_FN_NAME, ()); let err = result.unwrap_err(); assert_eq!( err.to_string(), - format!( - "failed to obtain function '{}', no such function exists.", - EXPECTED_FN_NAME - ) + format!("failed to obtain function '{EXPECTED_FN_NAME}', no such function exists.") ); } #[test] fn multiple_match_exists_function() { + const EXPECTED_FN_NAME: &str = "foobar"; + let driver = CompileAndRunTestDriver::new( r" pub fn main() -> i32 { 5 } @@ -116,16 +111,13 @@ fn multiple_match_exists_function() { ) .expect("Failed to build test driver"); - const EXPECTED_FN_NAME: &str = "foobar"; - let result: Result = driver.runtime.invoke(EXPECTED_FN_NAME, ()); let err = result.unwrap_err(); assert_eq!( err.to_string(), format!( - "failed to obtain function '{}', no such function exists. There is a function with a similar name: foobar_b", - EXPECTED_FN_NAME + "failed to obtain function '{EXPECTED_FN_NAME}', no such function exists. There is a function with a similar name: foobar_b" ) ); } diff --git a/crates/mun_runtime/tests/hot_reloading.rs b/crates/mun_runtime/tests/hot_reloading.rs index f4dba6f40..87b35a78c 100644 --- a/crates/mun_runtime/tests/hot_reloading.rs +++ b/crates/mun_runtime/tests/hot_reloading.rs @@ -75,15 +75,20 @@ fn reloadable_struct_decl_single_file() { ) .expect("Failed to build test driver"); - let args: StructRef = driver + let args: StructRef<'_> = driver .runtime .invoke("args", ()) .expect("Failed to call function"); - let foo: StructRef = args.get("foo").expect("Failed to get struct field"); - assert_eq!(foo.get::("m").expect("Failed to get struct field"), 1); + let foo_struct: StructRef<'_> = args.get("foo").expect("Failed to get struct field"); + assert_eq!( + foo_struct + .get::("m") + .expect("Failed to get struct field"), + 1 + ); - let foo = foo.root(); + let foo_struct = foo_struct.root(); driver.update_file( "mod.mun", @@ -103,8 +108,13 @@ fn reloadable_struct_decl_single_file() { "#, ); - let foo = foo.as_ref(&driver.runtime); - assert_eq!(foo.get::("m").expect("Failed to get struct field"), 1); + let foo_struct = foo_struct.as_ref(&driver.runtime); + assert_eq!( + foo_struct + .get::("m") + .expect("Failed to get struct field"), + 1 + ); } #[test] @@ -136,18 +146,23 @@ fn reloadable_struct_decl_multi_file() { ) .expect("Failed to build test driver"); - let args: StructRef = driver + let args: StructRef<'_> = driver .runtime .invoke("args", ()) .expect("Failed to call function"); assert_eq!(args.get::("n").expect("Failed to get struct field"), 3); - let foo: StructRef = args.get("foo").expect("Failed to get struct field"); - assert_eq!(foo.get::("m").expect("Failed to get struct field"), 1); + let foo_struct: StructRef<'_> = args.get("foo").expect("Failed to get struct field"); + assert_eq!( + foo_struct + .get::("m") + .expect("Failed to get struct field"), + 1 + ); let args = args.root(); - let foo = foo.root(); + let foo_struct = foo_struct.root(); driver.update_file( "mod.mun", @@ -167,6 +182,11 @@ fn reloadable_struct_decl_multi_file() { let args = args.as_ref(&driver.runtime); assert_eq!(args.get::("n").expect("Failed to get struct field"), 3); - let foo = foo.as_ref(&driver.runtime); - assert_eq!(foo.get::("m").expect("Failed to get struct field"), 1); + let foo_struct = foo_struct.as_ref(&driver.runtime); + assert_eq!( + foo_struct + .get::("m") + .expect("Failed to get struct field"), + 1 + ); } diff --git a/crates/mun_runtime/tests/marshalling.rs b/crates/mun_runtime/tests/marshalling.rs index fa0c7fa17..16fa1d99b 100644 --- a/crates/mun_runtime/tests/marshalling.rs +++ b/crates/mun_runtime/tests/marshalling.rs @@ -338,33 +338,6 @@ fn field_crash() { #[test] fn marshal_struct() { - let driver = CompileAndRunTestDriver::new( - r#" - pub struct(value) Foo { a: i32, b: bool }; - pub struct Bar(i32, bool); - pub struct(value) Baz(Foo); - pub struct(gc) Qux(Bar); - - pub fn foo_new(a: i32, b: bool) -> Foo { - Foo { a, b, } - } - pub fn bar_new(a: i32, b: bool) -> Bar { - Bar(a, b) - } - pub fn baz_new(foo: Foo) -> Baz { - Baz(foo) - } - pub fn qux_new(bar: Bar) -> Qux { - Qux(bar) - } - pub fn baz_new_transitive(foo_a: i32, foo_b: bool) -> Baz { - Baz(foo_new(foo_a, foo_b)) - } - "#, - |builder| builder, - ) - .expect("Failed to build test driver"); - struct TestData(T, T); fn test_field< @@ -387,24 +360,6 @@ fn marshal_struct() { assert_eq!(Ok(data.0), s.get::(field_name)); } - let int_data = TestData(3i32, 6i32); - let bool_data = TestData(true, false); - - // Verify that struct marshalling works for fundamental types - let mut foo_struct: StructRef = driver - .runtime - .invoke("foo_new", (int_data.0, bool_data.0)) - .unwrap(); - test_field(&mut foo_struct, &int_data, "a"); - test_field(&mut foo_struct, &bool_data, "b"); - - let mut bar: StructRef = driver - .runtime - .invoke("bar_new", (int_data.0, bool_data.0)) - .unwrap(); - test_field(&mut bar, &int_data, "0"); - test_field(&mut bar, &bool_data, "1"); - fn test_struct<'t>(s: &mut StructRef<'t>, c1: StructRef<'t>, c2: StructRef<'t>) { let field_names: Vec = c1 .type_info() @@ -419,7 +374,7 @@ fn marshal_struct() { let bool_value = c2.get::(&field_names[1]); s.set("0", c2).unwrap(); - let c2 = s.get::("0").unwrap(); + let c2 = s.get::>("0").unwrap(); assert_eq!(c2.get::(&field_names[0]), int_value); assert_eq!(c2.get::(&field_names[1]), bool_value); @@ -427,53 +382,11 @@ fn marshal_struct() { let bool_value = c1.get::(&field_names[1]); s.replace("0", c1).unwrap(); - let c1 = s.get::("0").unwrap(); + let c1 = s.get::>("0").unwrap(); assert_eq!(c1.get::(&field_names[0]), int_value); assert_eq!(c1.get::(&field_names[1]), bool_value); } - // Verify that struct marshalling works for struct types - let mut baz_struct: StructRef = driver.runtime.invoke("baz_new", (foo_struct,)).unwrap(); - let c1: StructRef = driver - .runtime - .invoke("foo_new", (int_data.0, bool_data.0)) - .unwrap(); - let c2: StructRef = driver - .runtime - .invoke("foo_new", (int_data.1, bool_data.1)) - .unwrap(); - test_struct(&mut baz_struct, c1, c2); - - let mut qux: StructRef = driver.runtime.invoke("qux_new", (bar,)).unwrap(); - let c1: StructRef = driver - .runtime - .invoke("bar_new", (int_data.0, bool_data.0)) - .unwrap(); - let c2: StructRef = driver - .runtime - .invoke("bar_new", (int_data.1, bool_data.1)) - .unwrap(); - test_struct(&mut qux, c1, c2); - - // Verify the dispatch table works when a marshallable wrapper function exists alongside the - // original function. - let mut baz2: StructRef = driver - .runtime - .invoke("baz_new_transitive", (int_data.0, bool_data.0)) - .unwrap(); - // TODO: Find an ergonomic solution for this: - // .unwrap_or_else(|e| e.wait(&mut runtime_ref)); - - let c1: StructRef = driver - .runtime - .invoke("foo_new", (int_data.0, bool_data.0)) - .unwrap(); - let c2: StructRef = driver - .runtime - .invoke("foo_new", (int_data.1, bool_data.1)) - .unwrap(); - test_struct(&mut baz2, c1, c2); - fn test_shallow_copy< 't, T: 't @@ -496,12 +409,6 @@ fn marshal_struct() { assert_eq!(s1.get::(field_name), s2.get::(field_name)); } - // Verify that StructRef::get makes a shallow copy of a struct - let mut foo_struct = baz_struct.get::("0").unwrap(); - let foo_struct2 = baz_struct.get::("0").unwrap(); - test_shallow_copy(&mut foo_struct, &foo_struct2, &int_data, "a"); - test_shallow_copy(&mut foo_struct, &foo_struct2, &bool_data, "b"); - fn test_clone< 't, T: 't @@ -524,13 +431,106 @@ fn marshal_struct() { assert_eq!(s1.get::(field_name), s2.get::(field_name)); } + let driver = CompileAndRunTestDriver::new( + r#" + pub struct(value) Foo { a: i32, b: bool }; + pub struct Bar(i32, bool); + pub struct(value) Baz(Foo); + pub struct(gc) Qux(Bar); + + pub fn foo_new(a: i32, b: bool) -> Foo { + Foo { a, b, } + } + pub fn bar_new(a: i32, b: bool) -> Bar { + Bar(a, b) + } + pub fn baz_new(foo: Foo) -> Baz { + Baz(foo) + } + pub fn qux_new(bar: Bar) -> Qux { + Qux(bar) + } + pub fn baz_new_transitive(foo_a: i32, foo_b: bool) -> Baz { + Baz(foo_new(foo_a, foo_b)) + } + "#, + |builder| builder, + ) + .expect("Failed to build test driver"); + + let int_data = TestData(3i32, 6i32); + let bool_data = TestData(true, false); + + // Verify that struct marshalling works for fundamental types + let mut foo_struct: StructRef<'_> = driver + .runtime + .invoke("foo_new", (int_data.0, bool_data.0)) + .unwrap(); + test_field(&mut foo_struct, &int_data, "a"); + test_field(&mut foo_struct, &bool_data, "b"); + + let mut bar: StructRef<'_> = driver + .runtime + .invoke("bar_new", (int_data.0, bool_data.0)) + .unwrap(); + test_field(&mut bar, &int_data, "0"); + test_field(&mut bar, &bool_data, "1"); + + // Verify that struct marshalling works for struct types + let mut baz_struct: StructRef<'_> = driver.runtime.invoke("baz_new", (foo_struct,)).unwrap(); + let c1: StructRef<'_> = driver + .runtime + .invoke("foo_new", (int_data.0, bool_data.0)) + .unwrap(); + let c2: StructRef<'_> = driver + .runtime + .invoke("foo_new", (int_data.1, bool_data.1)) + .unwrap(); + test_struct(&mut baz_struct, c1, c2); + + let mut qux: StructRef<'_> = driver.runtime.invoke("qux_new", (bar,)).unwrap(); + let c1: StructRef<'_> = driver + .runtime + .invoke("bar_new", (int_data.0, bool_data.0)) + .unwrap(); + let c2: StructRef<'_> = driver + .runtime + .invoke("bar_new", (int_data.1, bool_data.1)) + .unwrap(); + test_struct(&mut qux, c1, c2); + + // Verify the dispatch table works when a marshallable wrapper function exists alongside the + // original function. + let mut baz2: StructRef<'_> = driver + .runtime + .invoke("baz_new_transitive", (int_data.0, bool_data.0)) + .unwrap(); + // TODO: Find an ergonomic solution for this: + // .unwrap_or_else(|e| e.wait(&mut runtime_ref)); + + let c1: StructRef<'_> = driver + .runtime + .invoke("foo_new", (int_data.0, bool_data.0)) + .unwrap(); + let c2: StructRef<'_> = driver + .runtime + .invoke("foo_new", (int_data.1, bool_data.1)) + .unwrap(); + test_struct(&mut baz2, c1, c2); + + // Verify that StructRef::get makes a shallow copy of a struct + let mut foo_struct = baz_struct.get::>("0").unwrap(); + let foo_struct2 = baz_struct.get::>("0").unwrap(); + test_shallow_copy(&mut foo_struct, &foo_struct2, &int_data, "a"); + test_shallow_copy(&mut foo_struct, &foo_struct2, &bool_data, "b"); + // Verify that StructRef::clone returns a `StructRef` to the same memory - let mut foo_struct = baz_struct.get::("0").unwrap(); + let mut foo_struct = baz_struct.get::>("0").unwrap(); let foo_struct2 = foo_struct.clone(); test_clone(&mut foo_struct, &foo_struct2, &int_data, "a"); test_clone(&mut foo_struct, &foo_struct2, &bool_data, "b"); - let mut bar = qux.get::("0").unwrap(); + let mut bar = qux.get::>("0").unwrap(); // Specify invalid return type let bar_err = bar.get::("0"); @@ -549,7 +549,7 @@ fn marshal_struct() { assert!(bar_err.is_err()); // Pass invalid struct type - let bar_err: Result = driver.runtime.invoke("baz_new", (bar,)); + let bar_err: Result, _> = driver.runtime.invoke("baz_new", (bar,)); assert!(bar_err.is_err()); } @@ -627,6 +627,26 @@ fn extern_fn_invalid_sig() { #[test] fn test_primitive_types() { + fn test_field< + 't, + T: 't + + Copy + + std::fmt::Debug + + PartialEq + + ArgumentReflection + + ReturnTypeReflection + + Marshal<'t>, + >( + s: &mut StructRef<'t>, + data: (T, T), + field_name: &str, + ) { + assert_eq!(Ok(data.0), s.get::(field_name)); + s.set(field_name, data.1).unwrap(); + assert_eq!(Ok(data.1), s.replace(field_name, data.0)); + assert_eq!(Ok(data.0), s.get::(field_name)); + } + let driver = CompileAndRunTestDriver::new( r#" pub struct Primitives { @@ -654,27 +674,7 @@ fn test_primitive_types() { ) .expect("Failed to build test driver"); - fn test_field< - 't, - T: 't - + Copy - + std::fmt::Debug - + PartialEq - + ArgumentReflection - + ReturnTypeReflection - + Marshal<'t>, - >( - s: &mut StructRef<'t>, - data: (T, T), - field_name: &str, - ) { - assert_eq!(Ok(data.0), s.get::(field_name)); - s.set(field_name, data.1).unwrap(); - assert_eq!(Ok(data.1), s.replace(field_name, data.0)); - assert_eq!(Ok(data.0), s.get::(field_name)); - } - - let mut foo_struct: StructRef = driver + let mut foo_struct: StructRef<'_> = driver .runtime .invoke( "new_primitives", diff --git a/crates/mun_runtime/tests/memory.rs b/crates/mun_runtime/tests/memory.rs index 3dcfdd97c..46f553af6 100644 --- a/crates/mun_runtime/tests/memory.rs +++ b/crates/mun_runtime/tests/memory.rs @@ -31,7 +31,7 @@ fn gc_trace() { .expect("Failed to build test driver"); let runtime = &driver.runtime; - let value: StructRef = runtime.invoke("new_foo", ()).unwrap(); + let value: StructRef<'_> = runtime.invoke("new_foo", ()).unwrap(); let value = value.root(); assert!(!runtime.gc_collect()); @@ -62,7 +62,7 @@ fn map_struct_insert_field1() { let b = 5i64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -101,7 +101,7 @@ fn map_struct_insert_field2() { let a = 5i64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -147,7 +147,7 @@ fn map_struct_insert_field3() { let a = 5i64; let b = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -195,7 +195,7 @@ fn map_struct_remove_field1() { let a = 1.0f64; let b = 3.0f64; let c = 5i64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -233,7 +233,7 @@ fn map_struct_remove_field2() { let a = 1.0f64; let b = 5i64; let c = 3.0f64; - let result: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let result: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let rooted_result = result.root(); driver.update_file( @@ -274,7 +274,7 @@ fn map_struct_remove_field3() { let a = 5i64; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -316,7 +316,7 @@ fn map_struct_cast_fields1() { let c = 3u32; let d = -4i64; let e = 3.1f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d, e)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d, e)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -370,7 +370,7 @@ fn map_struct_cast_fields2() { .expect("Failed to build test driver"); let a = -2i16; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a,)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a,)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -409,7 +409,7 @@ fn map_struct_swap_fields1() { let a = 1.0f64; let b = 3i64; let c = 5.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -459,7 +459,7 @@ fn map_struct_swap_fields2() { let b = 3i64; let c = 5.0f64; let d = 7i64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -512,7 +512,7 @@ fn map_struct_rename_field1() { let a = 5i64; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -560,7 +560,7 @@ fn map_struct_rename_field2() { let a = 5i64; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -610,7 +610,7 @@ fn map_struct_all() { let b = 1.0f64; let c = 3.0f64; let d = -1i32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -664,7 +664,7 @@ fn map_array_to_array_different_array_to_primitive_different() { let a = 5i32; let b = 1i32; let c = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -690,7 +690,7 @@ fn map_array_to_array_different_array_to_primitive_different() { assert_eq!(b_array.iter().count(), 3); b_array.iter().zip([b, a, b]).for_each(|(lhs, rhs)| { - assert_eq!(lhs, rhs as i64); + assert_eq!(lhs, i64::from(rhs)); }); assert_eq!( @@ -720,7 +720,7 @@ fn map_array_to_array_different_array_to_primitive_same() { let a = 5i32; let b = 1i32; let c = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -785,7 +785,7 @@ fn map_array_to_array_different_array_to_struct_different() { let a = 5i32; let b = 1i32; let d = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -810,7 +810,7 @@ fn map_array_to_array_different_array_to_struct_different() { for field_name in ["b", "c"] { let array = foo_struct .as_ref(&driver.runtime) - .get::>(field_name) + .get::>>(field_name) .unwrap(); assert_eq!(array.iter().count(), 3); @@ -819,7 +819,7 @@ fn map_array_to_array_different_array_to_struct_different() { .iter() .zip([b, a, b].into_iter()) .for_each(|(lhs, rhs)| { - assert_eq!(lhs.get::("0").unwrap(), rhs as i64); + assert_eq!(lhs.get::("0").unwrap(), i64::from(rhs)); }); } @@ -859,7 +859,7 @@ fn map_array_to_array_different_array_to_struct_same() { let a = 5i32; let b = 1i32; let d = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -884,7 +884,7 @@ fn map_array_to_array_different_array_to_struct_same() { for field_name in ["b", "c"] { let array = foo_struct .as_ref(&driver.runtime) - .get::>(field_name) + .get::>>(field_name) .unwrap(); assert_eq!(array.iter().count(), 3); @@ -924,7 +924,7 @@ fn map_array_to_array_different_primitive_to_array_different() { let a = 5i32; let b = 1i32; let c = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -953,7 +953,7 @@ fn map_array_to_array_different_primitive_to_array_different() { assert_eq!(lhs.iter().count(), 1); assert_eq!( lhs.iter().next().expect("Array must have a value."), - rhs as i64 + i64::from(rhs) ); }); @@ -984,7 +984,7 @@ fn map_array_to_array_different_primitive_to_array_same() { let a = 5i32; let b = 1i32; let c = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1041,7 +1041,7 @@ fn map_array_to_array_different_primitive_to_primitive() { let a = 5i32; let b = 1i32; let c = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1067,7 +1067,7 @@ fn map_array_to_array_different_primitive_to_primitive() { assert_eq!(b_array.iter().count(), 3); b_array.iter().zip([b, a, b]).for_each(|(lhs, rhs)| { - assert_eq!(lhs, rhs as i64); + assert_eq!(lhs, i64::from(rhs)); }); assert_eq!( @@ -1098,7 +1098,7 @@ fn map_array_to_array_different_primitive_to_struct() { let a = 5i32; let b = 1i32; let d = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1123,7 +1123,7 @@ fn map_array_to_array_different_primitive_to_struct() { for field_name in ["b", "c"] { let array = foo_struct .as_ref(&driver.runtime) - .get::>(field_name) + .get::>>(field_name) .unwrap(); assert_eq!(array.iter().count(), 3); @@ -1169,7 +1169,7 @@ fn map_array_to_array_different_struct_to_array_different() { let a = 5i32; let b = 1i32; let d = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1194,7 +1194,7 @@ fn map_array_to_array_different_struct_to_array_different() { for field_name in ["b", "c"] { let array = foo_struct .as_ref(&driver.runtime) - .get::>>(field_name) + .get::>>>(field_name) .unwrap(); assert_eq!(array.iter().count(), 3); @@ -1211,7 +1211,7 @@ fn map_array_to_array_different_struct_to_array_different() { .expect("Array must have a value.") .get::("0") .unwrap(), - rhs as i64 + i64::from(rhs) ); }); } @@ -1252,7 +1252,7 @@ fn map_array_to_array_different_struct_to_array_same() { let a = 5i32; let b = 1i32; let d = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1277,7 +1277,7 @@ fn map_array_to_array_different_struct_to_array_same() { for field_name in ["b", "c"] { let array = foo_struct .as_ref(&driver.runtime) - .get::>>(field_name) + .get::>>>(field_name) .unwrap(); assert_eq!(array.iter().count(), 3); @@ -1335,7 +1335,7 @@ fn map_array_to_array_different_struct_to_struct() { let a = 5i32; let b = 1i32; let d = 3.0f32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1361,7 +1361,7 @@ fn map_array_to_array_different_struct_to_struct() { for field_name in ["b", "c"] { let array = foo_struct .as_ref(&driver.runtime) - .get::>(field_name) + .get::>>(field_name) .unwrap(); assert_eq!(array.iter().count(), 3); @@ -1371,7 +1371,7 @@ fn map_array_to_array_different_struct_to_struct() { .zip([b, a, b].into_iter()) .for_each(|(lhs, rhs)| { // println!("struct type: {:?}", lhs.type_info()); - assert_eq!(lhs.get::("0").unwrap(), rhs as i64); + assert_eq!(lhs.get::("0").unwrap(), i64::from(rhs)); }); } @@ -1402,7 +1402,7 @@ fn map_array_to_array_same_primitive() { let a = 5i32; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1457,7 +1457,7 @@ fn map_array_to_array_same_struct() { let a = 5i32; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1479,7 +1479,7 @@ fn map_array_to_array_same_struct() { let b_array = foo_struct .as_ref(&driver.runtime) - .get::>("b") + .get::>>("b") .unwrap(); assert_eq!(b_array.iter().count(), 1); @@ -1520,7 +1520,7 @@ fn map_array_to_primitive_different() { let a = 5i32; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1570,7 +1570,7 @@ fn map_array_to_primitive_same() { let a = 5i32; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1625,7 +1625,7 @@ fn map_array_to_struct_different() { let b = 1.0f32; let c = -1i32; let d = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1649,17 +1649,17 @@ fn map_array_to_struct_different() { let bar_struct = foo_struct .as_ref(&driver.runtime) - .get::("b") + .get::>("b") .unwrap(); - assert_eq!(bar_struct.get::("0").unwrap(), b as f64); + assert_eq!(bar_struct.get::("0").unwrap(), f64::from(b)); let baz_struct = foo_struct .as_ref(&driver.runtime) - .get::("c") + .get::>("c") .unwrap(); - assert_eq!(baz_struct.get::("0").unwrap(), c as i64); + assert_eq!(baz_struct.get::("0").unwrap(), i64::from(c)); assert_eq!( foo_struct.as_ref(&driver.runtime).get::("d").unwrap(), @@ -1693,7 +1693,7 @@ fn map_array_to_struct_same() { let b = 1.0f32; let c = -1i32; let d = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1717,14 +1717,14 @@ fn map_array_to_struct_same() { let bar_struct = foo_struct .as_ref(&driver.runtime) - .get::("b") + .get::>("b") .unwrap(); assert_eq!(bar_struct.get::("0").unwrap(), b); let baz_struct = foo_struct .as_ref(&driver.runtime) - .get::("c") + .get::>("c") .unwrap(); assert_eq!(baz_struct.get::("0").unwrap(), c); @@ -1758,7 +1758,7 @@ fn map_primitive_to_array_same() { let b = 1.0f64; let c = 3.0f64; let d = -1i32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1822,7 +1822,7 @@ fn map_primitive_to_array_different() { let b = 1.0f32; let c = 3.0f64; let d = -1i32; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c, d)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1849,7 +1849,7 @@ fn map_primitive_to_array_different() { assert_eq!(b_array.iter().count(), 1); assert_eq!( b_array.iter().next().expect("Array must have a value."), - b as f64 + f64::from(b) ); assert_eq!( @@ -1865,7 +1865,7 @@ fn map_primitive_to_array_different() { assert_eq!(d_array.iter().count(), 1); assert_eq!( d_array.iter().next().expect("Array must have a value."), - d as i64 + i64::from(d) ); } @@ -1892,7 +1892,7 @@ fn map_struct_to_array_same() { let a = 5i32; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1914,7 +1914,7 @@ fn map_struct_to_array_same() { let b_array = foo_struct .as_ref(&driver.runtime) - .get::>("b") + .get::>>("b") .unwrap(); assert_eq!(b_array.iter().count(), 1); @@ -1957,7 +1957,7 @@ fn map_struct_to_array_different() { let a = 5i32; let b = 1.0f32; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -1979,7 +1979,7 @@ fn map_struct_to_array_different() { let b_array = foo_struct .as_ref(&driver.runtime) - .get::>("b") + .get::>>("b") .unwrap(); assert_eq!(b_array.iter().count(), 1); @@ -1990,7 +1990,7 @@ fn map_struct_to_array_different() { .expect("Array must have a value.") .get::("0") .unwrap(), - b as f64 + f64::from(b) ); assert_eq!( @@ -2018,7 +2018,7 @@ fn insert_array() { let b = 5i64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -2066,7 +2066,7 @@ fn delete_used_struct() { let a = 5i64; let b = 1.0f64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, b, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -2128,15 +2128,15 @@ fn nested_structs() { let a = -3.1f32; let b = 6.18f32; - let gc_struct: StructRef = driver.runtime.invoke("new_gc_struct", (a, b)).unwrap(); - let value_struct: StructRef = driver.runtime.invoke("new_value_struct", (a, b)).unwrap(); + let gc_struct: StructRef<'_> = driver.runtime.invoke("new_gc_struct", (a, b)).unwrap(); + let value_struct: StructRef<'_> = driver.runtime.invoke("new_value_struct", (a, b)).unwrap(); - let gc_wrapper: StructRef = driver + let gc_wrapper: StructRef<'_> = driver .runtime .invoke("new_gc_wrapper", (gc_struct.clone(), value_struct.clone())) .unwrap(); - let value_wrapper: StructRef = driver + let value_wrapper: StructRef<'_> = driver .runtime .invoke( "new_value_wrapper", @@ -2161,28 +2161,28 @@ fn nested_structs() { let gc_0 = gc_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(gc_0.get::("0"), Ok(a.into())); assert_eq!(gc_0.get::("1"), Ok(b.into())); let gc_1 = gc_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(gc_1.get::("0"), Ok(a.into())); assert_eq!(gc_1.get::("1"), Ok(b.into())); let value_0 = value_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(value_0.get::("0"), Ok(a.into())); assert_eq!(value_0.get::("1"), Ok(b.into())); let value_1 = value_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(value_1.get::("0"), Ok(a.into())); assert_eq!(value_1.get::("1"), Ok(b.into())); @@ -2201,28 +2201,28 @@ fn nested_structs() { let gc_0 = gc_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(gc_0.get::("0"), Ok(a.into())); assert_eq!(gc_0.get::("1"), Ok(b.into())); let gc_1 = gc_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(gc_1.get::("0"), Ok(a.into())); assert_eq!(gc_1.get::("1"), Ok(b.into())); let value_0 = value_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(value_0.get::("0"), Ok(a.into())); assert_eq!(value_0.get::("1"), Ok(b.into())); let value_1 = value_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(value_1.get::("0"), Ok(a.into())); assert_eq!(value_1.get::("1"), Ok(b.into())); @@ -2309,28 +2309,28 @@ fn nested_structs() { // The values in the wrappers should have been updated let mut gc_0 = gc_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(gc_0.get::("0"), Ok(0.0)); gc_0.set::("0", a.into()).unwrap(); let mut gc_1 = gc_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(gc_1.get::("0"), Ok(0.0)); gc_1.set::("0", a.into()).unwrap(); let mut value_0 = value_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(value_0.get::("0"), Ok(0.0)); value_0.set::("0", a.into()).unwrap(); let mut value_1 = value_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(value_1.get::("0"), Ok(0.0)); value_1.set::("0", a.into()).unwrap(); @@ -2367,28 +2367,28 @@ fn nested_structs() { // The values in the wrappers should have been updated let gc_0 = gc_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(gc_0.get::("0"), Ok(0.0)); assert_eq!(gc_0.get::("1"), Ok(0.0)); let gc_1 = gc_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(gc_1.get::("0"), Ok(0.0)); assert_eq!(gc_1.get::("1"), Ok(0.0)); let value_0 = value_wrapper .as_ref(&driver.runtime) - .get::("0") + .get::>("0") .unwrap(); assert_eq!(value_0.get::("0"), Ok(0.0)); assert_eq!(value_0.get::("1"), Ok(0.0)); let value_1 = value_wrapper .as_ref(&driver.runtime) - .get::("1") + .get::>("1") .unwrap(); assert_eq!(value_1.get::("0"), Ok(0.0)); assert_eq!(value_1.get::("1"), Ok(0.0)); @@ -2413,7 +2413,7 @@ fn insert_struct() { let a = 5i64; let c = 3.0f64; - let foo_struct: StructRef = driver.runtime.invoke("foo_new", (a, c)).unwrap(); + let foo_struct: StructRef<'_> = driver.runtime.invoke("foo_new", (a, c)).unwrap(); let foo_struct = foo_struct.root(); driver.update_file( @@ -2442,13 +2442,13 @@ fn insert_struct() { let b = foo_struct .as_ref(&driver.runtime) - .get::("b") + .get::>("b") .unwrap(); assert_eq!(b.get::("0"), Ok(0)); let d = foo_struct .as_ref(&driver.runtime) - .get::("d") + .get::>("d") .unwrap(); assert_eq!(d.get::("0"), Ok(0.0)); } @@ -2486,7 +2486,7 @@ fn test_type_table() { ) .expect("Failed to build test driver"); - let a: StructRef = driver + let a: StructRef<'_> = driver .runtime .invoke("foo::new_foo", ()) .expect("failed to call 'new_foo'"); diff --git a/crates/mun_runtime_capi/src/function.rs b/crates/mun_runtime_capi/src/function.rs index 86f077b9a..aa8d82f1d 100644 --- a/crates/mun_runtime_capi/src/function.rs +++ b/crates/mun_runtime_capi/src/function.rs @@ -34,7 +34,8 @@ impl Function { /// The caller must ensure that the internal pointers point to a valid /// [`mun_runtime::FunctionDefinition`]. pub unsafe fn inner(&self) -> Result<&mun_runtime::FunctionDefinition, &'static str> { - (self.0 as *const mun_runtime::FunctionDefinition) + self.0 + .cast::() .as_ref() .ok_or("null pointer") } @@ -224,7 +225,7 @@ pub(crate) mod tests { .into(); let fn_info_arc = ManuallyDrop::new(unsafe { - Arc::from_raw(function.0 as *const mun_runtime::FunctionDefinition) + Arc::from_raw(function.0.cast::()) }); let strong_count = Arc::strong_count(&fn_info_arc); assert!(strong_count > 0); diff --git a/crates/mun_runtime_capi/src/runtime.rs b/crates/mun_runtime_capi/src/runtime.rs index c4568c3c6..ed102af63 100644 --- a/crates/mun_runtime_capi/src/runtime.rs +++ b/crates/mun_runtime_capi/src/runtime.rs @@ -21,7 +21,8 @@ impl Runtime { /// /// The caller must ensure that the internal pointers point to a valid [`mun_runtime::Runtime`]. pub(crate) unsafe fn inner(&self) -> Result<&mun_runtime::Runtime, &'static str> { - (self.0 as *mut mun_runtime::Runtime) + self.0 + .cast::() .as_ref() .ok_or("null pointer") } @@ -33,7 +34,8 @@ impl Runtime { /// /// The caller must ensure that the internal pointers point to a valid [`mun_runtime::Runtime`]. pub unsafe fn inner_mut(&self) -> Result<&mut mun_runtime::Runtime, &'static str> { - (self.0 as *mut mun_runtime::Runtime) + self.0 + .cast::() .as_mut() .ok_or("null pointer") } @@ -41,7 +43,7 @@ impl Runtime { /// Definition of an external function that is callable from Mun. /// -/// The ownership of the contained TypeInfoHandles is considered to lie with this struct. +/// The ownership of the contained `TypeInfoHandles` is considered to lie with this struct. #[repr(C)] #[derive(Clone)] pub struct ExternalFunctionDefinition { @@ -178,7 +180,7 @@ pub unsafe extern "C" fn mun_runtime_create( Err(e) => return ErrorHandle::new(format!("{e:?}")), }; - handle.0 = Box::into_raw(Box::new(runtime)) as *mut _; + handle.0 = Box::into_raw(Box::new(runtime)).cast(); ErrorHandle::default() } @@ -188,7 +190,7 @@ pub extern "C" fn mun_runtime_destroy(runtime: Runtime) -> ErrorHandle { if runtime.0.is_null() { return ErrorHandle::new("invalid argument 'runtime': null pointer"); } - let _runtime = unsafe { Box::from_raw(runtime.0 as *mut Runtime) }; + let _runtime = unsafe { Box::from_raw(runtime.0.cast::()) }; ErrorHandle::default() } @@ -217,16 +219,16 @@ pub unsafe extern "C" fn mun_runtime_find_function_definition( return ErrorHandle::new("invalid argument 'fn_name': null pointer"); } let name = mun_error_try!(std::str::from_utf8(slice::from_raw_parts( - fn_name as *const u8, + fn_name.cast::(), fn_name_len )) - .map_err(|_| String::from("invalid argument 'fn_name': invalid UTF-8 encoded"))); + .map_err(|_error| String::from("invalid argument 'fn_name': invalid UTF-8 encoded"))); let has_fn_info = try_deref_mut!(has_fn_info); let fn_info = try_deref_mut!(fn_info); match runtime.get_function_definition(name) { Some(info) => { *has_fn_info = true; - *fn_info = info.into() + *fn_info = info.into(); } None => *has_fn_info = false, } @@ -285,7 +287,7 @@ pub unsafe extern "C" fn mun_runtime_get_type_info_by_name( #[no_mangle] pub unsafe extern "C" fn mun_runtime_get_type_info_by_id( runtime: Runtime, - type_id: *const abi::TypeId, + type_id: *const abi::TypeId<'_>, has_type_info: *mut bool, type_info: *mut Type, ) -> ErrorHandle { @@ -358,7 +360,7 @@ mod tests { assert_error_snapshot!( unsafe { mun_runtime_create( - invalid_encoding.as_ptr() as *const _, + invalid_encoding.as_ptr().cast(), RuntimeOptions::default(), ptr::null_mut(), ) @@ -427,7 +429,7 @@ mod tests { let invalid_encoding = ['�', '\0']; let type_id = <()>::type_info().clone().into(); let functions = vec![ExternalFunctionDefinition { - name: invalid_encoding.as_ptr() as *const _, + name: invalid_encoding.as_ptr().cast(), arg_types: ptr::null(), return_type: type_id, num_args: 0, @@ -559,7 +561,7 @@ mod tests { unsafe { mun_runtime_find_function_definition( driver.runtime, - invalid_encoding.as_ptr() as *const _, + invalid_encoding.as_ptr().cast(), 3, ptr::null_mut(), ptr::null_mut(), @@ -688,7 +690,7 @@ mod tests { unsafe { mun_runtime_get_type_info_by_name( driver.runtime, - invalid_encoding.as_ptr() as *const _, + invalid_encoding.as_ptr().cast(), ptr::null_mut(), ptr::null_mut(), ) @@ -812,7 +814,7 @@ mod tests { unsafe { mun_runtime_get_type_info_by_id( driver.runtime, - &type_id as *const abi::TypeId, + &type_id as *const abi::TypeId<'_>, ptr::null_mut(), ptr::null_mut(), ) @@ -835,7 +837,7 @@ mod tests { unsafe { mun_runtime_get_type_info_by_id( driver.runtime, - &type_id as *const abi::TypeId, + &type_id as *const abi::TypeId<'_>, &mut has_type_info as *mut _, ptr::null_mut(), ) @@ -855,7 +857,7 @@ mod tests { let type_id = abi::TypeId::Concrete(abi::Guid([0; 16])); assert_getter2!(mun_runtime_get_type_info_by_id( driver.runtime, - &type_id as *const abi::TypeId, + &type_id as *const abi::TypeId<'_>, has_type_info, _type_info, )); diff --git a/crates/mun_runtime_capi/src/test_util.rs b/crates/mun_runtime_capi/src/test_util.rs index 8d1de2912..e978e432d 100644 --- a/crates/mun_runtime_capi/src/test_util.rs +++ b/crates/mun_runtime_capi/src/test_util.rs @@ -3,7 +3,7 @@ use std::{ffi::CString, io::stderr, path::Path, ptr}; use crate::runtime::{mun_runtime_create, mun_runtime_destroy, Runtime, RuntimeOptions}; -/// Combines a compiler and runtime in one. Use of the TestDriver allows for quick testing of Mun +/// Combines a compiler and runtime in one. Use of the `TestDriver` allows for quick testing of Mun /// constructs in the runtime with hot-reloading support. pub(crate) struct TestDriver { _temp_dir: tempfile::TempDir, diff --git a/crates/mun_skeptic/src/lib.rs b/crates/mun_skeptic/src/lib.rs index fed6ce224..faee0b60e 100644 --- a/crates/mun_skeptic/src/lib.rs +++ b/crates/mun_skeptic/src/lib.rs @@ -11,9 +11,8 @@ use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag}; use std::{ cell::RefCell, env, - fs::File, - io, - io::{Read, Write}, + fs::{self, File}, + io::{self, Write}, mem, path::{Path, PathBuf}, }; @@ -211,11 +210,11 @@ fn parse_code_block_info(info: &str) -> CodeBlockInfo { "" => {} "mun" => { info.is_mun = true; - seen_mun_tags = true + seen_mun_tags = true; } "ignore" => { info.ignore = true; - seen_mun_tags = true + seen_mun_tags = true; } "no_run" => { info.no_run = true; @@ -238,7 +237,7 @@ fn emit_tests(out_path: impl AsRef, tests: Vec) { let mut content = String::new(); // All tests need the api from mun_skeptic::runtime - content.push_str("extern crate mun_skeptic;\n"); + content.push_str("use mun_skeptic;\n"); for test in tests.iter() { let test_string = emit_test_runner(test).unwrap(); @@ -291,10 +290,8 @@ fn emit_test_runner(test: &Test) -> io::Result { /// that a filesystem write event is only emitted when the content actually changes. fn write_if_contents_changed(name: &Path, contents: &str) -> io::Result<()> { // Can't open in write mode now as that would modify the last changed timestamp of the file - match File::open(name) { - Ok(mut file) => { - let mut current_contents = String::new(); - file.read_to_string(&mut current_contents)?; + match fs::read_to_string(name) { + Ok(current_contents) => { if current_contents == contents { // No change avoid writing to avoid updating the timestamp of the file return Ok(()); diff --git a/crates/mun_skeptic/src/runtime.rs b/crates/mun_skeptic/src/runtime.rs index f47341312..31056ee83 100644 --- a/crates/mun_skeptic/src/runtime.rs +++ b/crates/mun_skeptic/src/runtime.rs @@ -79,9 +79,10 @@ pub fn run_test(code: &str, mode: TestMode) { let runtime = unsafe { builder.finish() }.expect("error creating runtime for test assembly"); // Find the main function - if runtime.get_function_definition("main").is_none() { - panic!("Could not find `main` function"); - } + assert!( + runtime.get_function_definition("main").is_some(), + "Could not find `main` function" + ); // Call the main function let _: () = runtime diff --git a/crates/mun_syntax/src/ast/expr_extensions.rs b/crates/mun_syntax/src/ast/expr_extensions.rs index 589fee1f9..4676bc33d 100644 --- a/crates/mun_syntax/src/ast/expr_extensions.rs +++ b/crates/mun_syntax/src/ast/expr_extensions.rs @@ -92,7 +92,7 @@ impl BinExpr { pub fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { self.syntax() .children_with_tokens() - .filter_map(|it| it.into_token()) + .filter_map(rowan::NodeOrToken::into_token) .find_map(|c| { let bin_op = match c.kind() { T![+] => BinOp::Add, @@ -165,7 +165,7 @@ impl ast::FieldExpr { self.syntax .children_with_tokens() .find(|e| e.kind() == SyntaxKind::INDEX) - .and_then(|e| e.into_token()) + .and_then(rowan::NodeOrToken::into_token) } pub fn field_access(&self) -> Option { @@ -182,13 +182,13 @@ impl ast::FieldExpr { let field_index = self.index_token().map(|i| i.text_range()); let start = field_name - .map(|f| f.start()) + .map(rowan::TextRange::start) .or_else(|| field_index.map(|i| i.start().add(TextSize::from(1u32)))) .unwrap_or_else(|| self.syntax().text_range().start()); let end = field_name - .map(|f| f.end()) - .or_else(|| field_index.map(|f| f.end())) + .map(rowan::TextRange::end) + .or_else(|| field_index.map(rowan::TextRange::end)) .unwrap_or_else(|| self.syntax().text_range().end()); TextRange::new(start, end) @@ -208,7 +208,7 @@ impl Literal { self.syntax() .children_with_tokens() .find(|e| !e.kind().is_trivia()) - .and_then(|e| e.into_token()) + .and_then(rowan::NodeOrToken::into_token) .unwrap() } @@ -242,12 +242,11 @@ impl ast::IfExpr { self.blocks().next() } pub fn else_branch(&self) -> Option { - let res = match self.blocks().nth(1) { - Some(block) => ElseBranch::Block(block), - None => { - let elif: ast::IfExpr = child_opt(self)?; - ElseBranch::IfExpr(elif) - } + let res = if let Some(block) = self.blocks().nth(1) { + ElseBranch::Block(block) + } else { + let elif: ast::IfExpr = child_opt(self)?; + ElseBranch::IfExpr(elif) }; Some(res) } diff --git a/crates/mun_syntax/src/ast/extensions.rs b/crates/mun_syntax/src/ast/extensions.rs index b5df83218..efe69f793 100644 --- a/crates/mun_syntax/src/ast/extensions.rs +++ b/crates/mun_syntax/src/ast/extensions.rs @@ -40,15 +40,13 @@ impl ast::FunctionDef { let param_list = self.param_list().map(|p| p.syntax.text_range()); let ret_type = self.ret_type().map(|r| r.syntax.text_range()); - let start = fn_kw - .map(|kw| kw.start()) - .unwrap_or_else(|| self.syntax.text_range().start()); + let start = fn_kw.map_or_else(|| self.syntax.text_range().start(), rowan::TextRange::start); let end = ret_type - .map(|p| p.end()) - .or_else(|| param_list.map(|name| name.end())) - .or_else(|| name.map(|name| name.end())) - .or_else(|| fn_kw.map(|kw| kw.end())) + .map(rowan::TextRange::end) + .or_else(|| param_list.map(rowan::TextRange::end)) + .or_else(|| name.map(rowan::TextRange::end)) + .or_else(|| fn_kw.map(rowan::TextRange::end)) .unwrap_or_else(|| self.syntax().text_range().end()); TextRange::new(start, end) @@ -163,13 +161,12 @@ impl ast::StructDef { .map(|kw| kw.text_range()); let name = self.name().map(|n| n.syntax.text_range()); - let start = struct_kw - .map(|kw| kw.start()) - .unwrap_or_else(|| self.syntax.text_range().start()); + let start = + struct_kw.map_or_else(|| self.syntax.text_range().start(), rowan::TextRange::start); let end = name - .map(|name| name.end()) - .or_else(|| struct_kw.map(|kw| kw.end())) + .map(rowan::TextRange::end) + .or_else(|| struct_kw.map(rowan::TextRange::end)) .unwrap_or_else(|| self.syntax().text_range().end()); TextRange::new(start, end) diff --git a/crates/mun_syntax/src/ast/generated.rs b/crates/mun_syntax/src/ast/generated.rs index f282a1a84..a52ba0fbb 100644 --- a/crates/mun_syntax/src/ast/generated.rs +++ b/crates/mun_syntax/src/ast/generated.rs @@ -9,6 +9,7 @@ //! `.borrowed` functions. Most of the code works with borrowed mode, and only //! this mode has all AST accessors. +#![allow(clippy::enum_glob_use)] use crate::{ ast::{self, AstNode, AstToken}, SyntaxKind::{self, *}, diff --git a/crates/mun_syntax/src/ast/generated.rs.tera b/crates/mun_syntax/src/ast/generated.rs.tera index 2c74ce26e..ad19faa2f 100644 --- a/crates/mun_syntax/src/ast/generated.rs.tera +++ b/crates/mun_syntax/src/ast/generated.rs.tera @@ -9,6 +9,7 @@ the below applies to the result of this template //! `.borrowed` functions. Most of the code works with borrowed mode, and only //! this mode has all AST accessors. +#![allow(clippy::enum_glob_use)] use crate::{ ast::{self, AstNode, AstToken}, SyntaxKind::{self, *}, diff --git a/crates/mun_syntax/src/ast/token_extensions.rs b/crates/mun_syntax/src/ast/token_extensions.rs index fa5766901..9004ac05f 100644 --- a/crates/mun_syntax/src/ast/token_extensions.rs +++ b/crates/mun_syntax/src/ast/token_extensions.rs @@ -46,12 +46,11 @@ fn split_int_text_and_suffix(text: &str) -> (&str, Option<&str>) { } /// Skips all digits in the iterator that belong to the given base -fn skip_digits(base: usize, iter: &mut Peekable) { +fn skip_digits(base: usize, iter: &mut Peekable>) { while let Some((_, c)) = iter.peek() { if match c { - '0'..='9' => true, + '0'..='9' | '_' => true, 'a'..='f' | 'A'..='F' if base > 10 => true, - '_' => true, _ => false, } { iter.next(); diff --git a/crates/mun_syntax/src/ast/tokens.rs b/crates/mun_syntax/src/ast/tokens.rs index 499676d0d..4b2e49f50 100644 --- a/crates/mun_syntax/src/ast/tokens.rs +++ b/crates/mun_syntax/src/ast/tokens.rs @@ -1,4 +1,4 @@ -//! There are many AstNodes, but only a few tokens, so we hand-write them here. +//! There are many `AstNodes`, but only a few tokens, so we hand-write them here. use crate::{ ast::AstToken, @@ -65,7 +65,10 @@ pub enum CommentPlacement { } const COMMENT_PREFIX_TO_KIND: &[(&str, CommentKind)] = { - use {CommentPlacement::*, CommentShape::*}; + use { + CommentPlacement::{Inner, Outer}, + CommentShape::{Block, Line}, + }; &[ ( "///", diff --git a/crates/mun_syntax/src/lib.rs b/crates/mun_syntax/src/lib.rs index b32ed49d0..0b4eb35d9 100644 --- a/crates/mun_syntax/src/lib.rs +++ b/crates/mun_syntax/src/lib.rs @@ -206,9 +206,9 @@ fn api_walkthrough() { for item in file.items() { match item.kind() { ast::ModuleItemKind::FunctionDef(f) => func = Some(f), - ast::ModuleItemKind::StructDef(_) => (), - ast::ModuleItemKind::TypeAliasDef(_) => (), - ast::ModuleItemKind::Use(_) => (), + ast::ModuleItemKind::StructDef(_) + | ast::ModuleItemKind::TypeAliasDef(_) + | ast::ModuleItemKind::Use(_) => (), } } diff --git a/crates/mun_syntax/src/parsing.rs b/crates/mun_syntax/src/parsing.rs index a0db1a42d..beebf419e 100644 --- a/crates/mun_syntax/src/parsing.rs +++ b/crates/mun_syntax/src/parsing.rs @@ -64,7 +64,7 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec) { fn parse_from_tokens(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F) where - F: FnOnce(&mut parser::Parser), + F: FnOnce(&mut parser::Parser<'_>), { let mut p = parser::Parser::new(token_source); f(&mut p); diff --git a/crates/mun_syntax/src/parsing/event.rs b/crates/mun_syntax/src/parsing/event.rs index cde48cfc3..9e59d0f91 100644 --- a/crates/mun_syntax/src/parsing/event.rs +++ b/crates/mun_syntax/src/parsing/event.rs @@ -9,7 +9,7 @@ use std::mem; use crate::{ parsing::{ParseError, TreeSink}, - SyntaxKind::{self, *}, + SyntaxKind::{self, TOMBSTONE}, }; /// `Parser` produces a flat list of `Events`'s. They are converted to a tree structure in a @@ -84,7 +84,7 @@ pub(super) fn process(sink: &mut dyn TreeSink, mut events: Vec) { } for kind in forward_parents.drain(..).rev() { - sink.start_node(kind) + sink.start_node(kind); } } Event::Finish => sink.finish_node(), diff --git a/crates/mun_syntax/src/parsing/grammar.rs b/crates/mun_syntax/src/parsing/grammar.rs index 4f7030341..6667a8a78 100644 --- a/crates/mun_syntax/src/parsing/grammar.rs +++ b/crates/mun_syntax/src/parsing/grammar.rs @@ -9,7 +9,17 @@ mod types; use super::{ parser::{CompletedMarker, Marker, Parser}, token_set::TokenSet, - SyntaxKind::{self, *}, + SyntaxKind::{ + self, ARG_LIST, ARRAY_EXPR, ARRAY_TYPE, BIND_PAT, BIN_EXPR, BLOCK_EXPR, BREAK_EXPR, + CALL_EXPR, CONDITION, EOF, ERROR, EXPR_STMT, EXTERN, FIELD_EXPR, FLOAT_NUMBER, + FUNCTION_DEF, GC_KW, IDENT, IF_EXPR, INDEX, INDEX_EXPR, INT_NUMBER, LET_STMT, LITERAL, + LOOP_EXPR, MEMORY_TYPE_SPECIFIER, NAME, NAME_REF, NEVER_TYPE, PARAM, PARAM_LIST, + PAREN_EXPR, PATH, PATH_EXPR, PATH_SEGMENT, PATH_TYPE, PLACEHOLDER_PAT, PREFIX_EXPR, + RECORD_FIELD, RECORD_FIELD_DEF, RECORD_FIELD_DEF_LIST, RECORD_FIELD_LIST, RECORD_LIT, + RENAME, RETURN_EXPR, RET_TYPE, SOURCE_FILE, STRING, STRUCT_DEF, TUPLE_FIELD_DEF, + TUPLE_FIELD_DEF_LIST, TYPE_ALIAS_DEF, USE, USE_TREE, USE_TREE_LIST, VALUE_KW, VISIBILITY, + WHILE_EXPR, + }, }; #[derive(Clone, Copy, PartialEq, Eq)] @@ -24,39 +34,39 @@ impl BlockLike { } } -pub(crate) fn root(p: &mut Parser) { +pub(crate) fn root(p: &mut Parser<'_>) { let m = p.start(); declarations::mod_contents(p); m.complete(p, SOURCE_FILE); } -//pub(crate) fn pattern(p: &mut Parser) { +//pub(crate) fn pattern(p: &mut Parser<'_>) { // patterns::pattern(p) //} // -//pub(crate) fn expr(p: &mut Parser) { +//pub(crate) fn expr(p: &mut Parser<'_>) { // expressions::expr(p); //} // -//pub(crate) fn type_(p: &mut Parser) { +//pub(crate) fn type_(p: &mut Parser<'_>) { // types::type_(p) //} -fn name_recovery(p: &mut Parser, recovery: TokenSet) { +fn name_recovery(p: &mut Parser<'_>, recovery: TokenSet) { if p.at(IDENT) { let m = p.start(); p.bump(IDENT); m.complete(p, NAME); } else { - p.error_recover("expected a name", recovery) + p.error_recover("expected a name", recovery); } } -fn name(p: &mut Parser) { - name_recovery(p, TokenSet::empty()) +fn name(p: &mut Parser<'_>) { + name_recovery(p, TokenSet::empty()); } -fn name_ref(p: &mut Parser) { +fn name_ref(p: &mut Parser<'_>) { if p.at(IDENT) { let m = p.start(); p.bump(IDENT); @@ -66,14 +76,14 @@ fn name_ref(p: &mut Parser) { } } -fn name_ref_or_index(p: &mut Parser) { +fn name_ref_or_index(p: &mut Parser<'_>) { assert!(p.at(IDENT) || p.at(INT_NUMBER)); let m = p.start(); p.bump_any(); m.complete(p, NAME_REF); } -fn opt_visibility(p: &mut Parser) -> bool { +fn opt_visibility(p: &mut Parser<'_>) -> bool { match p.current() { T![pub] => { let m = p.start(); @@ -95,7 +105,7 @@ fn opt_visibility(p: &mut Parser) -> bool { } } -fn error_block(p: &mut Parser, message: &str) { +fn error_block(p: &mut Parser<'_>, message: &str) { assert!(p.at(T!['{'])); let m = p.start(); p.error(message); diff --git a/crates/mun_syntax/src/parsing/grammar/adt.rs b/crates/mun_syntax/src/parsing/grammar/adt.rs index 69a68fdc3..8edfc9ad4 100644 --- a/crates/mun_syntax/src/parsing/grammar/adt.rs +++ b/crates/mun_syntax/src/parsing/grammar/adt.rs @@ -1,6 +1,10 @@ -use super::*; +use super::{ + declarations, error_block, name, name_recovery, opt_visibility, types, Marker, Parser, EOF, + GC_KW, IDENT, MEMORY_TYPE_SPECIFIER, RECORD_FIELD_DEF, RECORD_FIELD_DEF_LIST, STRUCT_DEF, + TUPLE_FIELD_DEF, TUPLE_FIELD_DEF_LIST, TYPE_ALIAS_DEF, VALUE_KW, +}; -pub(super) fn struct_def(p: &mut Parser, m: Marker) { +pub(super) fn struct_def(p: &mut Parser<'_>, m: Marker) { assert!(p.at(T![struct])); p.bump(T![struct]); opt_memory_type_specifier(p); @@ -18,7 +22,7 @@ pub(super) fn struct_def(p: &mut Parser, m: Marker) { m.complete(p, STRUCT_DEF); } -pub(super) fn type_alias_def(p: &mut Parser, m: Marker) { +pub(super) fn type_alias_def(p: &mut Parser<'_>, m: Marker) { assert!(p.at(T![type])); p.bump(T![type]); name(p); @@ -29,7 +33,7 @@ pub(super) fn type_alias_def(p: &mut Parser, m: Marker) { m.complete(p, TYPE_ALIAS_DEF); } -pub(super) fn record_field_def_list(p: &mut Parser) { +pub(super) fn record_field_def_list(p: &mut Parser<'_>) { assert!(p.at(T!['{'])); let m = p.start(); p.bump(T!['{']); @@ -48,15 +52,15 @@ pub(super) fn record_field_def_list(p: &mut Parser) { m.complete(p, RECORD_FIELD_DEF_LIST); } -fn opt_memory_type_specifier(p: &mut Parser) { +fn opt_memory_type_specifier(p: &mut Parser<'_>) { if p.at(T!['(']) { let m = p.start(); p.bump(T!['(']); if p.at(IDENT) { if p.at_contextual_kw("gc") { - p.bump_remap(GC_KW) + p.bump_remap(GC_KW); } else if p.at_contextual_kw("value") { - p.bump_remap(VALUE_KW) + p.bump_remap(VALUE_KW); } else { p.error_and_bump("expected memory type specifier"); } @@ -68,7 +72,7 @@ fn opt_memory_type_specifier(p: &mut Parser) { } } -pub(super) fn tuple_field_def_list(p: &mut Parser) { +pub(super) fn tuple_field_def_list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); let m = p.start(); p.bump(T!['(']); @@ -91,7 +95,7 @@ pub(super) fn tuple_field_def_list(p: &mut Parser) { m.complete(p, TUPLE_FIELD_DEF_LIST); } -fn record_field_def(p: &mut Parser) { +fn record_field_def(p: &mut Parser<'_>) { let m = p.start(); opt_visibility(p); if p.at(IDENT) { diff --git a/crates/mun_syntax/src/parsing/grammar/declarations.rs b/crates/mun_syntax/src/parsing/grammar/declarations.rs index b3cdeae8a..8a084dbde 100644 --- a/crates/mun_syntax/src/parsing/grammar/declarations.rs +++ b/crates/mun_syntax/src/parsing/grammar/declarations.rs @@ -1,16 +1,20 @@ -use super::*; +use super::{ + adt, error_block, expressions, name, name_recovery, opt_visibility, params, paths, types, + Marker, Parser, TokenSet, EOF, ERROR, EXTERN, FUNCTION_DEF, RENAME, RET_TYPE, USE, USE_TREE, + USE_TREE_LIST, +}; use crate::{parsing::grammar::paths::is_use_path_start, T}; pub(super) const DECLARATION_RECOVERY_SET: TokenSet = TokenSet::new(&[T![fn], T![pub], T![struct], T![use], T![;]]); -pub(super) fn mod_contents(p: &mut Parser) { +pub(super) fn mod_contents(p: &mut Parser<'_>) { while !p.at(EOF) { declaration(p); } } -pub(super) fn declaration(p: &mut Parser) { +pub(super) fn declaration(p: &mut Parser<'_>) { let m = p.start(); let m = match maybe_declaration(p, m) { Ok(()) => return, @@ -19,7 +23,7 @@ pub(super) fn declaration(p: &mut Parser) { m.abandon(p); if p.at(T!['{']) { - error_block(p, "expected a declaration") + error_block(p, "expected a declaration"); } else if p.at(T!['}']) { let e = p.start(); p.error("unmatched }"); @@ -32,7 +36,7 @@ pub(super) fn declaration(p: &mut Parser) { } } -pub(super) fn maybe_declaration(p: &mut Parser, m: Marker) -> Result<(), Marker> { +pub(super) fn maybe_declaration(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> { opt_visibility(p); let m = match declarations_without_modifiers(p, m) { @@ -54,14 +58,14 @@ pub(super) fn maybe_declaration(p: &mut Parser, m: Marker) -> Result<(), Marker> Ok(()) } -fn abi(p: &mut Parser) { +fn abi(p: &mut Parser<'_>) { assert!(p.at(T![extern])); let abi = p.start(); p.bump(T![extern]); abi.complete(p, EXTERN); } -fn declarations_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { +fn declarations_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> { match p.current() { T![use] => { use_(p, m); @@ -77,7 +81,7 @@ fn declarations_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marke Ok(()) } -pub(super) fn fn_def(p: &mut Parser) { +pub(super) fn fn_def(p: &mut Parser<'_>) { assert!(p.at(T![fn])); p.bump(T![fn]); @@ -86,7 +90,7 @@ pub(super) fn fn_def(p: &mut Parser) { if p.at(T!['(']) { params::param_list(p); } else { - p.error("expected function arguments") + p.error("expected function arguments"); } opt_fn_ret_type(p); @@ -98,7 +102,7 @@ pub(super) fn fn_def(p: &mut Parser) { } } -fn opt_fn_ret_type(p: &mut Parser) -> bool { +fn opt_fn_ret_type(p: &mut Parser<'_>) -> bool { if p.at(T![->]) { let m = p.start(); p.bump(T![->]); @@ -110,7 +114,7 @@ fn opt_fn_ret_type(p: &mut Parser) -> bool { } } -fn use_(p: &mut Parser, m: Marker) { +fn use_(p: &mut Parser<'_>, m: Marker) { assert!(p.at(T![use])); p.bump(T![use]); use_tree(p, true); @@ -119,7 +123,7 @@ fn use_(p: &mut Parser, m: Marker) { } /// Parses a use "tree", such as `foo::bar` in `use foo::bar;`. -fn use_tree(p: &mut Parser, top_level: bool) { +fn use_tree(p: &mut Parser<'_>, top_level: bool) { let m = p.start(); match p.current() { @@ -161,7 +165,7 @@ fn use_tree(p: &mut Parser, top_level: bool) { m.complete(p, USE_TREE); } -fn use_tree_list(p: &mut Parser) { +fn use_tree_list(p: &mut Parser<'_>) { assert!(p.at(T!['{'])); let m = p.start(); p.bump(T!['{']); @@ -175,7 +179,7 @@ fn use_tree_list(p: &mut Parser) { m.complete(p, USE_TREE_LIST); } -fn opt_rename(p: &mut Parser) { +fn opt_rename(p: &mut Parser<'_>) { if p.at(T![as]) { let m = p.start(); p.bump(T![as]); diff --git a/crates/mun_syntax/src/parsing/grammar/expressions.rs b/crates/mun_syntax/src/parsing/grammar/expressions.rs index a683d50e4..0bd403781 100644 --- a/crates/mun_syntax/src/parsing/grammar/expressions.rs +++ b/crates/mun_syntax/src/parsing/grammar/expressions.rs @@ -1,4 +1,11 @@ -use super::*; +use super::{ + error_block, expressions, name_ref_or_index, paths, patterns, types, BlockLike, + CompletedMarker, Marker, Parser, SyntaxKind, TokenSet, ARG_LIST, ARRAY_EXPR, BIN_EXPR, + BLOCK_EXPR, BREAK_EXPR, CALL_EXPR, CONDITION, EOF, ERROR, EXPR_STMT, FIELD_EXPR, FLOAT_NUMBER, + IDENT, IF_EXPR, INDEX, INDEX_EXPR, INT_NUMBER, LET_STMT, LITERAL, LOOP_EXPR, PAREN_EXPR, + PATH_EXPR, PATH_TYPE, PREFIX_EXPR, RECORD_FIELD, RECORD_FIELD_LIST, RECORD_LIT, RETURN_EXPR, + STRING, WHILE_EXPR, +}; use crate::parsing::grammar::paths::PATH_FIRST; pub(crate) const LITERAL_FIRST: TokenSet = @@ -34,7 +41,7 @@ struct Restrictions { forbid_structs: bool, } -pub(crate) fn expr_block_contents(p: &mut Parser) { +pub(crate) fn expr_block_contents(p: &mut Parser<'_>) { while !p.at(EOF) && !p.at(T!['}']) { if p.eat(T![;]) { continue; @@ -45,7 +52,7 @@ pub(crate) fn expr_block_contents(p: &mut Parser) { } /// Parses a block statement -pub(crate) fn block(p: &mut Parser) { +pub(crate) fn block(p: &mut Parser<'_>) { if !p.at(T!['{']) { p.error("expected a block"); return; @@ -53,7 +60,7 @@ pub(crate) fn block(p: &mut Parser) { block_expr(p); } -fn block_expr(p: &mut Parser) -> CompletedMarker { +fn block_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T!['{'])); let m = p.start(); p.bump(T!['{']); @@ -63,7 +70,7 @@ fn block_expr(p: &mut Parser) -> CompletedMarker { } /// Parses a general statement: (let, expr, etc.) -pub(super) fn stmt(p: &mut Parser) { +pub(super) fn stmt(p: &mut Parser<'_>) { let m = p.start(); // Encounters let keyword, so we know it's a let stmt @@ -73,7 +80,7 @@ pub(super) fn stmt(p: &mut Parser) { } let (cm, _blocklike) = expr_stmt(p); - let kind = cm.as_ref().map(|cm| cm.kind()).unwrap_or(ERROR); + let kind = cm.as_ref().map_or(ERROR, CompletedMarker::kind); if p.at(T!['}']) { if let Some(cm) = cm { @@ -88,7 +95,7 @@ pub(super) fn stmt(p: &mut Parser) { } } -fn let_stmt(p: &mut Parser, m: Marker) { +fn let_stmt(p: &mut Parser<'_>, m: Marker) { assert!(p.at(T![let])); p.bump(T![let]); patterns::pattern(p); @@ -103,28 +110,28 @@ fn let_stmt(p: &mut Parser, m: Marker) { m.complete(p, LET_STMT); } -pub(super) fn expr(p: &mut Parser) { +pub(super) fn expr(p: &mut Parser<'_>) { let r = Restrictions { forbid_structs: false, }; expr_bp(p, r, 1); } -fn expr_no_struct(p: &mut Parser) { +fn expr_no_struct(p: &mut Parser<'_>) { let r = Restrictions { forbid_structs: true, }; expr_bp(p, r, 1); } -fn expr_stmt(p: &mut Parser) -> (Option, BlockLike) { +fn expr_stmt(p: &mut Parser<'_>) -> (Option, BlockLike) { let r = Restrictions { forbid_structs: false, }; expr_bp(p, r, 1) } -fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option, BlockLike) { +fn expr_bp(p: &mut Parser<'_>, r: Restrictions, bp: u8) -> (Option, BlockLike) { // Parse left hand side of the expression let mut lhs = match lhs(p, r) { Some((lhs, blocklike)) => { @@ -152,7 +159,7 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option, (Some(lhs), BlockLike::NotBlock) } -fn current_op(p: &Parser) -> (u8, SyntaxKind) { +fn current_op(p: &Parser<'_>) -> (u8, SyntaxKind) { match p.current() { T![+] if p.at(T![+=]) => (1, T![+=]), T![+] => (10, T![+]), @@ -187,7 +194,7 @@ fn current_op(p: &Parser) -> (u8, SyntaxKind) { } } -fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { +fn lhs(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { let m; let kind = match p.current() { T![-] | T![!] => { @@ -205,7 +212,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> } fn postfix_expr( - p: &mut Parser, + p: &mut Parser<'_>, mut lhs: CompletedMarker, // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple // E.g. `while true {break}();` is parsed as @@ -222,19 +229,19 @@ fn postfix_expr( _ => break, }; allow_calls = true; - blocklike = BlockLike::NotBlock + blocklike = BlockLike::NotBlock; } (lhs, blocklike) } -fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { +fn call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { assert!(p.at(T!['('])); let m = lhs.precede(p); arg_list(p); m.complete(p, CALL_EXPR) } -fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { +fn index_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { assert!(p.at(T!['['])); let m = lhs.precede(p); p.bump(T!['[']); @@ -243,7 +250,7 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { m.complete(p, INDEX_EXPR) } -fn arg_list(p: &mut Parser) { +fn arg_list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); let m = p.start(); p.bump(T!['(']); @@ -262,7 +269,7 @@ fn arg_list(p: &mut Parser) { m.complete(p, ARG_LIST); } -fn postfix_dot_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { +fn postfix_dot_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { assert!(p.at(T![.])); if p.nth(1) == IDENT && p.nth(2) == T!['('] { // TODO: Implement method calls here @@ -272,13 +279,13 @@ fn postfix_dot_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { field_expr(p, lhs) } -fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { +fn field_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { assert!(p.at(T![.]) || p.at(INDEX)); let m = lhs.precede(p); if p.at(T![.]) { p.bump(T![.]); if p.at(IDENT) || p.at(INT_NUMBER) { - name_ref_or_index(p) + name_ref_or_index(p); } else { p.error("expected field name or number"); } @@ -290,7 +297,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { m.complete(p, FIELD_EXPR) } -fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { +fn atom_expr(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { if let Some(m) = literal(p) { return Some((m, BlockLike::NotBlock)); } @@ -320,7 +327,7 @@ fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockL Some((marker, blocklike)) } -fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { +fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike) { assert!(paths::is_path_start(p)); let m = p.start(); paths::expr_path(p); @@ -334,7 +341,7 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { } } -fn literal(p: &mut Parser) -> Option { +fn literal(p: &mut Parser<'_>) -> Option { if !p.at_ts(LITERAL_FIRST) { return None; } @@ -343,7 +350,7 @@ fn literal(p: &mut Parser) -> Option { Some(m.complete(p, LITERAL)) } -fn paren_expr(p: &mut Parser) -> CompletedMarker { +fn paren_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T!['('])); let m = p.start(); p.bump(T!['(']); @@ -352,7 +359,7 @@ fn paren_expr(p: &mut Parser) -> CompletedMarker { m.complete(p, PAREN_EXPR) } -fn if_expr(p: &mut Parser) -> CompletedMarker { +fn if_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T![if])); let m = p.start(); p.bump(T![if]); @@ -369,7 +376,7 @@ fn if_expr(p: &mut Parser) -> CompletedMarker { m.complete(p, IF_EXPR) } -fn loop_expr(p: &mut Parser) -> CompletedMarker { +fn loop_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T![loop])); let m = p.start(); p.bump(T![loop]); @@ -377,13 +384,13 @@ fn loop_expr(p: &mut Parser) -> CompletedMarker { m.complete(p, LOOP_EXPR) } -fn cond(p: &mut Parser) { +fn cond(p: &mut Parser<'_>) { let m = p.start(); expr_no_struct(p); m.complete(p, CONDITION); } -fn ret_expr(p: &mut Parser) -> CompletedMarker { +fn ret_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T![return])); let m = p.start(); p.bump(T![return]); @@ -393,7 +400,7 @@ fn ret_expr(p: &mut Parser) -> CompletedMarker { m.complete(p, RETURN_EXPR) } -fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { +fn break_expr(p: &mut Parser<'_>, r: Restrictions) -> CompletedMarker { assert!(p.at(T![break])); let m = p.start(); p.bump(T![break]); @@ -403,7 +410,7 @@ fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { m.complete(p, BREAK_EXPR) } -fn while_expr(p: &mut Parser) -> CompletedMarker { +fn while_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T![while])); let m = p.start(); p.bump(T![while]); @@ -412,7 +419,7 @@ fn while_expr(p: &mut Parser) -> CompletedMarker { m.complete(p, WHILE_EXPR) } -fn record_field_list(p: &mut Parser) { +fn record_field_list(p: &mut Parser<'_>) { assert!(p.at(T!['{'])); let m = p.start(); p.bump(T!['{']); @@ -437,7 +444,7 @@ fn record_field_list(p: &mut Parser) { m.complete(p, RECORD_FIELD_LIST); } -fn array_expr(p: &mut Parser) -> CompletedMarker { +fn array_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T!['['])); let m = p.start(); diff --git a/crates/mun_syntax/src/parsing/grammar/params.rs b/crates/mun_syntax/src/parsing/grammar/params.rs index 7025d5cc8..0f005e405 100644 --- a/crates/mun_syntax/src/parsing/grammar/params.rs +++ b/crates/mun_syntax/src/parsing/grammar/params.rs @@ -1,10 +1,10 @@ -use super::*; +use super::{patterns, types, Parser, TokenSet, EOF, PARAM, PARAM_LIST}; -pub(super) fn param_list(p: &mut Parser) { - list(p) +pub(super) fn param_list(p: &mut Parser<'_>) { + list(p); } -fn list(p: &mut Parser) { +fn list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); let m = p.start(); p.bump(T!['(']); @@ -24,7 +24,7 @@ fn list(p: &mut Parser) { const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST; -fn param(p: &mut Parser) { +fn param(p: &mut Parser<'_>) { let m = p.start(); patterns::pattern(p); types::ascription(p); diff --git a/crates/mun_syntax/src/parsing/grammar/paths.rs b/crates/mun_syntax/src/parsing/grammar/paths.rs index 83288da4c..5ae2fb30b 100644 --- a/crates/mun_syntax/src/parsing/grammar/paths.rs +++ b/crates/mun_syntax/src/parsing/grammar/paths.rs @@ -1,13 +1,13 @@ -use super::*; +use super::{declarations, name_ref, Parser, TokenSet, IDENT, PATH, PATH_SEGMENT}; pub(super) const PATH_FIRST: TokenSet = TokenSet::new(&[IDENT, T![super], T![self], T![package], T![::]]); -pub(super) fn is_path_start(p: &Parser) -> bool { +pub(super) fn is_path_start(p: &Parser<'_>) -> bool { matches!(p.current(), IDENT | T![self] | T![super] | T![package]) } -pub(super) fn is_use_path_start(p: &Parser, top_level: bool) -> bool { +pub(super) fn is_use_path_start(p: &Parser<'_>, top_level: bool) -> bool { if top_level { matches!(p.current(), IDENT | T![self] | T![super] | T![package]) } else { @@ -15,14 +15,14 @@ pub(super) fn is_use_path_start(p: &Parser, top_level: bool) -> bool { } } -pub(super) fn type_path(p: &mut Parser) { - path(p, Mode::Type, true) +pub(super) fn type_path(p: &mut Parser<'_>) { + path(p, Mode::Type, true); } -pub(super) fn expr_path(p: &mut Parser) { - path(p, Mode::Expr, true) +pub(super) fn expr_path(p: &mut Parser<'_>) { + path(p, Mode::Expr, true); } -pub(super) fn use_path(p: &mut Parser, top_level: bool) { - path(p, Mode::Use, top_level) +pub(super) fn use_path(p: &mut Parser<'_>, top_level: bool) { + path(p, Mode::Use, top_level); } #[derive(Clone, Copy, Eq, PartialEq)] @@ -32,7 +32,7 @@ enum Mode { Use, } -fn path(p: &mut Parser, mode: Mode, top_level: bool) { +fn path(p: &mut Parser<'_>, mode: Mode, top_level: bool) { let path = p.start(); path_segment(p, mode, top_level); let mut qualifier = path.complete(p, PATH); @@ -50,7 +50,7 @@ fn path(p: &mut Parser, mode: Mode, top_level: bool) { } } -fn path_segment(p: &mut Parser, _mode: Mode, top_level: bool) { +fn path_segment(p: &mut Parser<'_>, _mode: Mode, top_level: bool) { let m = p.start(); match p.current() { IDENT => { diff --git a/crates/mun_syntax/src/parsing/grammar/patterns.rs b/crates/mun_syntax/src/parsing/grammar/patterns.rs index d7850bb1e..b2ceafc70 100644 --- a/crates/mun_syntax/src/parsing/grammar/patterns.rs +++ b/crates/mun_syntax/src/parsing/grammar/patterns.rs @@ -1,23 +1,26 @@ -use super::*; +use super::{ + expressions, name, paths, CompletedMarker, Parser, TokenSet, BIND_PAT, IDENT, PLACEHOLDER_PAT, +}; pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST .union(paths::PATH_FIRST) .union(TokenSet::new(&[T![-], T![_]])); -pub(super) fn pattern(p: &mut Parser) { +pub(super) fn pattern(p: &mut Parser<'_>) { pattern_r(p, PATTERN_FIRST); } -pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { +pub(super) fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) { atom_pat(p, recovery_set); } -fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { +fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option { let t1 = p.nth(0); if t1 == IDENT { return Some(bind_pat(p)); } + #[allow(clippy::single_match_else)] let m = match t1 { T![_] => placeholder_pat(p), _ => { @@ -28,14 +31,14 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { Some(m) } -fn placeholder_pat(p: &mut Parser) -> CompletedMarker { +fn placeholder_pat(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T![_])); let m = p.start(); p.bump(T![_]); m.complete(p, PLACEHOLDER_PAT) } -fn bind_pat(p: &mut Parser) -> CompletedMarker { +fn bind_pat(p: &mut Parser<'_>) -> CompletedMarker { let m = p.start(); name(p); m.complete(p, BIND_PAT) diff --git a/crates/mun_syntax/src/parsing/grammar/types.rs b/crates/mun_syntax/src/parsing/grammar/types.rs index f04495a98..5ed493ba6 100644 --- a/crates/mun_syntax/src/parsing/grammar/types.rs +++ b/crates/mun_syntax/src/parsing/grammar/types.rs @@ -1,16 +1,16 @@ -use super::*; +use super::{paths, Parser, TokenSet, ARRAY_TYPE, NEVER_TYPE, PATH_TYPE}; pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[T![never], T!['[']])); pub(super) const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[T!['('], T![,], T![pub]]); -pub(super) fn ascription(p: &mut Parser) { +pub(super) fn ascription(p: &mut Parser<'_>) { p.expect(T![:]); type_(p); } -pub(super) fn type_(p: &mut Parser) { +pub(super) fn type_(p: &mut Parser<'_>) { match p.current() { T!['['] => array_type(p), T![never] => never_type(p), @@ -21,20 +21,20 @@ pub(super) fn type_(p: &mut Parser) { } } -pub(super) fn path_type(p: &mut Parser) { +pub(super) fn path_type(p: &mut Parser<'_>) { let m = p.start(); paths::type_path(p); m.complete(p, PATH_TYPE); } -fn never_type(p: &mut Parser) { +fn never_type(p: &mut Parser<'_>) { assert!(p.at(T![never])); let m = p.start(); p.bump(T![never]); m.complete(p, NEVER_TYPE); } -fn array_type(p: &mut Parser) { +fn array_type(p: &mut Parser<'_>) { assert!(p.at(T!['['])); let m = p.start(); p.bump(T!['[']); diff --git a/crates/mun_syntax/src/parsing/lexer.rs b/crates/mun_syntax/src/parsing/lexer.rs index f02340515..f115a89e1 100644 --- a/crates/mun_syntax/src/parsing/lexer.rs +++ b/crates/mun_syntax/src/parsing/lexer.rs @@ -5,10 +5,14 @@ mod numbers; mod strings; use self::{ - classes::*, comments::scan_comment, cursor::Cursor, numbers::scan_number, strings::scan_string, + classes::{is_dec_digit, is_ident_continue, is_ident_start, is_whitespace}, + comments::scan_comment, + cursor::Cursor, + numbers::scan_number, + strings::scan_string, }; use crate::{ - SyntaxKind::{self, *}, + SyntaxKind::{self, ERROR, IDENT, NEQ, STRING, UNDERSCORE, WHITESPACE}, TextSize, }; @@ -45,7 +49,7 @@ pub fn next_token(text: &str) -> Token { Token { kind, len } } -fn next_token_inner(c: char, cursor: &mut Cursor) -> SyntaxKind { +fn next_token_inner(c: char, cursor: &mut Cursor<'_>) -> SyntaxKind { if is_whitespace(c) { cursor.bump_while(is_whitespace); return WHITESPACE; @@ -88,7 +92,7 @@ fn next_token_inner(c: char, cursor: &mut Cursor) -> SyntaxKind { ERROR } -fn scan_identifier_or_keyword(c: char, cursor: &mut Cursor) -> SyntaxKind { +fn scan_identifier_or_keyword(c: char, cursor: &mut Cursor<'_>) -> SyntaxKind { match (c, cursor.current()) { ('_', None) => return UNDERSCORE, ('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE, @@ -101,7 +105,7 @@ fn scan_identifier_or_keyword(c: char, cursor: &mut Cursor) -> SyntaxKind { IDENT } -fn scan_index(c: char, cursor: &mut Cursor) -> Option { +fn scan_index(c: char, cursor: &mut Cursor<'_>) -> Option { if c == '.' { let mut is_first = true; while let Some(cc) = cursor.current() { diff --git a/crates/mun_syntax/src/parsing/lexer/comments.rs b/crates/mun_syntax/src/parsing/lexer/comments.rs index 4a08f09c3..bc969fc85 100644 --- a/crates/mun_syntax/src/parsing/lexer/comments.rs +++ b/crates/mun_syntax/src/parsing/lexer/comments.rs @@ -1,8 +1,8 @@ use crate::parsing::lexer::cursor::Cursor; -use crate::SyntaxKind::{self, *}; +use crate::SyntaxKind::{self, COMMENT}; -pub(crate) fn scan_comment(cursor: &mut Cursor) -> Option { +pub(crate) fn scan_comment(cursor: &mut Cursor<'_>) -> Option { if cursor.matches('/') { bump_until_eol(cursor); Some(COMMENT) @@ -11,7 +11,7 @@ pub(crate) fn scan_comment(cursor: &mut Cursor) -> Option { } } -fn scan_block_comment(cursor: &mut Cursor) -> Option { +fn scan_block_comment(cursor: &mut Cursor<'_>) -> Option { if cursor.matches('*') { cursor.bump(); let mut depth: u32 = 1; @@ -34,7 +34,7 @@ fn scan_block_comment(cursor: &mut Cursor) -> Option { } } -fn bump_until_eol(cursor: &mut Cursor) { +fn bump_until_eol(cursor: &mut Cursor<'_>) { loop { if cursor.matches('\n') || cursor.matches_str("\r\n") { return; diff --git a/crates/mun_syntax/src/parsing/lexer/cursor.rs b/crates/mun_syntax/src/parsing/lexer/cursor.rs index 95e59ef02..d28d33c6d 100644 --- a/crates/mun_syntax/src/parsing/lexer/cursor.rs +++ b/crates/mun_syntax/src/parsing/lexer/cursor.rs @@ -80,7 +80,7 @@ impl<'s> Cursor<'s> { } /// Returns an iterator over the remaining characters. - fn chars(&self) -> Chars { + fn chars(&self) -> Chars<'_> { let len: u32 = self.len.into(); self.text[len as usize..].chars() } diff --git a/crates/mun_syntax/src/parsing/lexer/numbers.rs b/crates/mun_syntax/src/parsing/lexer/numbers.rs index 2a53605de..9aae84746 100644 --- a/crates/mun_syntax/src/parsing/lexer/numbers.rs +++ b/crates/mun_syntax/src/parsing/lexer/numbers.rs @@ -1,8 +1,11 @@ -use crate::parsing::lexer::{classes::*, cursor::Cursor}; +use crate::parsing::lexer::{ + classes::{is_ident_continue, is_ident_start}, + cursor::Cursor, +}; -use crate::SyntaxKind::{self, *}; +use crate::SyntaxKind::{self, FLOAT_NUMBER, INT_NUMBER}; -pub(crate) fn scan_number(c: char, cursor: &mut Cursor) -> SyntaxKind { +pub(crate) fn scan_number(c: char, cursor: &mut Cursor<'_>) -> SyntaxKind { if c == '0' { match cursor.current().unwrap_or('\0') { 'b' | 'o' => { @@ -42,14 +45,14 @@ pub(crate) fn scan_number(c: char, cursor: &mut Cursor) -> SyntaxKind { INT_NUMBER } -fn scan_suffix(cursor: &mut Cursor) { +fn scan_suffix(cursor: &mut Cursor<'_>) { if cursor.matches_nth_if(0, is_ident_start) { cursor.bump(); cursor.bump_while(is_ident_continue); } } -fn scan_digits(cursor: &mut Cursor, allow_hex: bool) { +fn scan_digits(cursor: &mut Cursor<'_>, allow_hex: bool) { while let Some(c) = cursor.current() { match c { '_' | '0'..='9' => { @@ -63,7 +66,7 @@ fn scan_digits(cursor: &mut Cursor, allow_hex: bool) { } } -fn scan_float_exponent(cursor: &mut Cursor) { +fn scan_float_exponent(cursor: &mut Cursor<'_>) { if cursor.matches('e') || cursor.matches('E') { cursor.bump(); if cursor.matches('-') || cursor.matches('+') { diff --git a/crates/mun_syntax/src/parsing/lexer/strings.rs b/crates/mun_syntax/src/parsing/lexer/strings.rs index 9d3e29272..d54ee7a69 100644 --- a/crates/mun_syntax/src/parsing/lexer/strings.rs +++ b/crates/mun_syntax/src/parsing/lexer/strings.rs @@ -1,6 +1,6 @@ use crate::parsing::lexer::cursor::Cursor; -pub(crate) fn scan_string(c: char, cursor: &mut Cursor) { +pub(crate) fn scan_string(c: char, cursor: &mut Cursor<'_>) { let quote_type = c; while let Some(c) = cursor.current() { match c { diff --git a/crates/mun_syntax/src/parsing/parser.rs b/crates/mun_syntax/src/parsing/parser.rs index c0642b6ca..5c1242497 100644 --- a/crates/mun_syntax/src/parsing/parser.rs +++ b/crates/mun_syntax/src/parsing/parser.rs @@ -1,6 +1,6 @@ use crate::{ parsing::{event::Event, token_set::TokenSet, ParseError, TokenSource}, - SyntaxKind::{self, *}, + SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, }; use drop_bomb::DropBomb; use std::cell::Cell; @@ -127,7 +127,7 @@ impl<'t> Parser<'t> { if kind == EOF { return; } - self.do_bump(kind, 1) + self.do_bump(kind, 1); } /// Advances the parser by one token, remapping its kind. @@ -159,7 +159,7 @@ impl<'t> Parser<'t> { /// Create an error node and consume the next token. pub(crate) fn error_and_bump(&mut self, message: &str) { - self.error_recover(message, TokenSet::empty()) + self.error_recover(message, TokenSet::empty()); } /// Create an error node and consume the next token. @@ -175,7 +175,7 @@ impl<'t> Parser<'t> { } fn push_event(&mut self, event: Event) { - self.events.push(event) + self.events.push(event); } /// Consume the next token if `kind` matches. @@ -242,14 +242,16 @@ impl Marker { } /// Finishes the syntax tree node and assigns `kind` to it, and create a `CompletedMarker` for - /// possible future operation like `.precede()` to deal with forward_parent. - pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker { + /// possible future operation like `.precede()` to deal with `forward_parent`. + pub(crate) fn complete(mut self, p: &mut Parser<'_>, kind: SyntaxKind) -> CompletedMarker { self.bomb.defuse(); let idx = self.pos as usize; - match p.events[idx] { - Event::Start { - kind: ref mut slot, .. - } => { + match p + .events + .get_mut(idx) + .expect("marker position must be valid") + { + Event::Start { kind: slot, .. } => { *slot = kind; } _ => unreachable!(), @@ -260,7 +262,7 @@ impl Marker { } /// Abandons the syntax tree node. All its children are attached to its parent instead. - pub(crate) fn abandon(mut self, p: &mut Parser) { + pub(crate) fn abandon(mut self, p: &mut Parser<'_>) { self.bomb.defuse(); let idx = self.pos as usize; if idx == p.events.len() - 1 { @@ -297,15 +299,16 @@ impl CompletedMarker { /// /// Given completed events `[START, FINISH]` and its corresponding `CompletedMarker(pos: 0, _)`, /// append a new `START` event as `[START, FINISH, NEWSTART]`, then mark `NEWSTART` as `START`'s - /// parent with saving its relative distance to `NEWSTART` into forward_parent(=2 in this case). - pub(crate) fn precede(self, p: &mut Parser) -> Marker { + /// parent with saving its relative distance to `NEWSTART` into `forward_parent(=2` in this case). + pub(crate) fn precede(self, p: &mut Parser<'_>) -> Marker { let new_pos = p.start(); let idx = self.start_pos as usize; - match p.events[idx] { - Event::Start { - ref mut forward_parent, - .. - } => { + match p + .events + .get_mut(idx) + .expect("Marker position must be valid") + { + Event::Start { forward_parent, .. } => { *forward_parent = Some(new_pos.pos - self.start_pos); } _ => unreachable!(), @@ -314,18 +317,26 @@ impl CompletedMarker { } /// Undo this completion and turns into a `Marker` - pub(crate) fn undo_completion(self, p: &mut Parser) -> Marker { + pub(crate) fn undo_completion(self, p: &mut Parser<'_>) -> Marker { let start_idx = self.start_pos as usize; let finish_idx = self.finish_pos as usize; - match p.events[start_idx] { + match p + .events + .get_mut(start_idx) + .expect("Marker position must be valid") + { Event::Start { - ref mut kind, + kind, forward_parent: None, } => *kind = TOMBSTONE, _ => unreachable!(), } - match p.events[finish_idx] { - ref mut slot @ Event::Finish => *slot = Event::tombstone(), + match p + .events + .get_mut(finish_idx) + .expect("Marker position must be valid") + { + slot @ Event::Finish => *slot = Event::tombstone(), _ => unreachable!(), } Marker::new(self.start_pos) diff --git a/crates/mun_syntax/src/parsing/text_token_source.rs b/crates/mun_syntax/src/parsing/text_token_source.rs index a66858c5e..d622057cb 100644 --- a/crates/mun_syntax/src/parsing/text_token_source.rs +++ b/crates/mun_syntax/src/parsing/text_token_source.rs @@ -1,6 +1,6 @@ use crate::{ parsing::{lexer::Token, Token as PToken, TokenSource}, - SyntaxKind::*, + SyntaxKind::EOF, TextRange, TextSize, }; @@ -57,7 +57,7 @@ impl<'t> TokenSource for TextTokenSource<'t> { } fn mk_token(pos: usize, start_offsets: &[TextSize], tokens: &[Token]) -> PToken { - let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF); + let kind = tokens.get(pos).map_or(EOF, |t| t.kind); let is_jointed_to_next = if pos + 1 < start_offsets.len() { start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1] } else { diff --git a/crates/mun_syntax/src/parsing/text_tree_sink.rs b/crates/mun_syntax/src/parsing/text_tree_sink.rs index e71bc7804..5a3f2a142 100644 --- a/crates/mun_syntax/src/parsing/text_tree_sink.rs +++ b/crates/mun_syntax/src/parsing/text_tree_sink.rs @@ -4,7 +4,7 @@ use crate::{ parsing::{lexer::Token, ParseError, TreeSink}, syntax_node::GreenNode, SyntaxError, - SyntaxKind::{self, *}, + SyntaxKind::{self, COMMENT, FUNCTION_DEF, WHITESPACE}, SyntaxTreeBuilder, TextRange, TextSize, }; @@ -101,7 +101,7 @@ impl<'a> TextTreeSink<'a> { match mem::replace(&mut self.state, State::Normal) { State::PendingFinish => { self.eat_trivias(); - self.inner.finish_node() + self.inner.finish_node(); } State::PendingStart | State::Normal => unreachable!(), } diff --git a/crates/mun_syntax/src/parsing/token_set.rs b/crates/mun_syntax/src/parsing/token_set.rs index ae8a7a102..49ef792c5 100644 --- a/crates/mun_syntax/src/parsing/token_set.rs +++ b/crates/mun_syntax/src/parsing/token_set.rs @@ -14,7 +14,7 @@ impl TokenSet { let mut i = 0; while i < kinds.len() { res |= mask(kinds[i]); - i += 1 + i += 1; } TokenSet(res) } diff --git a/crates/mun_syntax/src/ptr.rs b/crates/mun_syntax/src/ptr.rs index 557494d47..b40ac9873 100644 --- a/crates/mun_syntax/src/ptr.rs +++ b/crates/mun_syntax/src/ptr.rs @@ -75,7 +75,7 @@ impl PartialEq for AstPtr { impl Hash for AstPtr { fn hash(&self, state: &mut H) { - self.raw.hash(state) + self.raw.hash(state); } } diff --git a/crates/mun_syntax/src/syntax_error.rs b/crates/mun_syntax/src/syntax_error.rs index a178eb50c..081b72940 100644 --- a/crates/mun_syntax/src/syntax_error.rs +++ b/crates/mun_syntax/src/syntax_error.rs @@ -68,7 +68,7 @@ impl SyntaxError { } impl fmt::Display for SyntaxError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.kind.fmt(f) } } @@ -79,8 +79,8 @@ pub enum SyntaxErrorKind { } impl fmt::Display for SyntaxErrorKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use self::SyntaxErrorKind::*; + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use self::SyntaxErrorKind::ParseError; match self { ParseError(msg) => write!(f, "{}", msg.0), } diff --git a/crates/mun_syntax/src/syntax_kind.rs b/crates/mun_syntax/src/syntax_kind.rs index 2c409c1e8..e1144d164 100644 --- a/crates/mun_syntax/src/syntax_kind.rs +++ b/crates/mun_syntax/src/syntax_kind.rs @@ -5,7 +5,7 @@ pub use self::generated::SyntaxKind; use std::fmt; impl fmt::Debug for SyntaxKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = self.info().name; f.write_str(name) } diff --git a/crates/mun_syntax/src/syntax_kind/generated.rs b/crates/mun_syntax/src/syntax_kind/generated.rs index 86da8e711..64663c7b2 100644 --- a/crates/mun_syntax/src/syntax_kind/generated.rs +++ b/crates/mun_syntax/src/syntax_kind/generated.rs @@ -7,6 +7,7 @@ bad_style, missing_docs, unreachable_pub, + clippy::enum_glob_use, clippy::manual_non_exhaustive, clippy::upper_case_acronyms )] diff --git a/crates/mun_syntax/src/syntax_kind/generated.rs.tera b/crates/mun_syntax/src/syntax_kind/generated.rs.tera index 20f29a474..21aab27e9 100644 --- a/crates/mun_syntax/src/syntax_kind/generated.rs.tera +++ b/crates/mun_syntax/src/syntax_kind/generated.rs.tera @@ -3,7 +3,7 @@ the below applies to the result of this template #}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-syntax` is run // Do not edit manually -#![allow(bad_style, missing_docs, unreachable_pub, clippy::manual_non_exhaustive, clippy::upper_case_acronyms)] +#![allow(bad_style, missing_docs, unreachable_pub, clippy::enum_glob_use, clippy::manual_non_exhaustive, clippy::upper_case_acronyms)] use super::SyntaxInfo; /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. diff --git a/crates/mun_syntax/src/syntax_node.rs b/crates/mun_syntax/src/syntax_node.rs index 274b3ae4c..55450b43e 100644 --- a/crates/mun_syntax/src/syntax_node.rs +++ b/crates/mun_syntax/src/syntax_node.rs @@ -68,20 +68,20 @@ impl SyntaxTreeBuilder { pub fn token(&mut self, kind: SyntaxKind, text: &str) { let kind = MunLanguage::kind_to_raw(kind); - self.inner.token(kind, text) + self.inner.token(kind, text); } pub fn start_node(&mut self, kind: SyntaxKind) { let kind = MunLanguage::kind_to_raw(kind); - self.inner.start_node(kind) + self.inner.start_node(kind); } pub fn finish_node(&mut self) { - self.inner.finish_node() + self.inner.finish_node(); } pub fn error(&mut self, error: ParseError, text_pos: TextSize) { let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), text_pos); - self.errors.push(error) + self.errors.push(error); } } diff --git a/crates/mun_syntax/src/tests/lexer.rs b/crates/mun_syntax/src/tests/lexer.rs index 3ad764d5e..4697fa00e 100644 --- a/crates/mun_syntax/src/tests/lexer.rs +++ b/crates/mun_syntax/src/tests/lexer.rs @@ -8,7 +8,7 @@ fn dump_tokens(tokens: &[crate::Token], text: &str) -> String { let len = len as usize; let token_text = &text[offset..offset + len]; offset += len; - writeln!(acc, "{:?} {} {:?}", token.kind, len, token_text).unwrap() + writeln!(acc, "{:?} {} {:?}", token.kind, len, token_text).unwrap(); } acc } diff --git a/crates/mun_syntax/src/tests/parser.rs b/crates/mun_syntax/src/tests/parser.rs index ffc669aa7..830a39a8c 100644 --- a/crates/mun_syntax/src/tests/parser.rs +++ b/crates/mun_syntax/src/tests/parser.rs @@ -58,7 +58,7 @@ fn array_type() { WHITESPACE@48..53 "\n " R_CURLY@53..54 "}" "# - ) + ); } #[test] @@ -213,7 +213,7 @@ fn index_expr() { WHITESPACE@136..141 "\n " R_CURLY@141..142 "}" "# - ) + ); } #[test] @@ -339,7 +339,7 @@ fn array_expr() { WHITESPACE@129..134 "\n " R_CURLY@134..135 "}" "# - ) + ); } #[test] diff --git a/crates/mun_syntax/src/token_text.rs b/crates/mun_syntax/src/token_text.rs index 79dd45099..2ebc6a4d7 100644 --- a/crates/mun_syntax/src/token_text.rs +++ b/crates/mun_syntax/src/token_text.rs @@ -45,7 +45,7 @@ impl AsRef for TokenText<'_> { } impl From> for String { - fn from(token_text: TokenText) -> Self { + fn from(token_text: TokenText<'_>) -> Self { token_text.as_str().into() } } @@ -56,7 +56,7 @@ impl PartialEq<&'_ str> for TokenText<'_> { } } impl PartialEq> for &'_ str { - fn eq(&self, other: &TokenText) -> bool { + fn eq(&self, other: &TokenText<'_>) -> bool { other == self } } @@ -66,12 +66,12 @@ impl PartialEq for TokenText<'_> { } } impl PartialEq> for String { - fn eq(&self, other: &TokenText) -> bool { + fn eq(&self, other: &TokenText<'_>) -> bool { other == self } } impl PartialEq for TokenText<'_> { - fn eq(&self, other: &TokenText) -> bool { + fn eq(&self, other: &TokenText<'_>) -> bool { self.as_str() == other.as_str() } } diff --git a/crates/mun_target/src/abi/mod.rs b/crates/mun_target/src/abi/mod.rs index bfed04fca..28a40a167 100644 --- a/crates/mun_target/src/abi/mod.rs +++ b/crates/mun_target/src/abi/mod.rs @@ -121,22 +121,19 @@ impl TargetDataLayout { ["e"] => dl.endian = Endian::Little, ["E"] => dl.endian = Endian::Big, [p] if p.starts_with('P') => { - dl.instruction_address_space = parse_address_space(&p[1..], "P")? + dl.instruction_address_space = parse_address_space(&p[1..], "P")?; } ["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?, ["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?, ["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?, - [p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => { + [p @ ("p" | "p0"), s, ref a @ ..] => { dl.pointer_size = size(s, p)?; dl.pointer_align = align(a, p)?; } [s, ref a @ ..] if s.starts_with('i') => { - let bits = match s[1..].parse::() { - Ok(bits) => bits, - Err(_) => { - size(&s[1..], "i")?; // For the user error. - continue; - } + let Ok(bits) = s[1..].parse::() else { + size(&s[1..], "i")?; // For the user error. + continue; }; let a = align(a, s)?; match bits { @@ -211,7 +208,7 @@ impl TargetDataLayout { // } pub fn ptr_sized_integer(&self) -> Integer { - use Integer::*; + use Integer::{I16, I32, I64}; match self.pointer_size.bits() { 16 => I16, 32 => I32, diff --git a/crates/mun_target/src/spec/apple_base.rs b/crates/mun_target/src/spec/apple_base.rs index 55aa106e8..e3856a2c4 100644 --- a/crates/mun_target/src/spec/apple_base.rs +++ b/crates/mun_target/src/spec/apple_base.rs @@ -2,7 +2,10 @@ use crate::spec::{LinkerFlavor, TargetOptions}; use std::borrow::Cow; use std::env; -use Arch::*; +use Arch::{ + Arm64, Arm64_32, Arm64_macabi, Arm64_sim, Armv7, Armv7k, Armv7s, X86_64_macabi, X86_64_sim, + I386, I686, X86_64, +}; #[allow(non_camel_case_types, dead_code)] #[derive(Copy, Clone)] @@ -56,16 +59,13 @@ impl Arch { fn target_cpu(self) -> &'static str { match self { - Armv7 => "cortex-a8", // iOS7 is supported on iPhone 4 and higher - Armv7k => "cortex-a8", + Armv7 | Armv7k => "cortex-a8", // iOS7 is supported on iPhone 4 and higher Armv7s => "cortex-a9", Arm64 => "apple-a7", Arm64_32 => "apple-s4", I386 | I686 => "yonah", - X86_64 | X86_64_sim => "core2", - X86_64_macabi => "core2", - Arm64_macabi => "apple-a12", - Arm64_sim => "apple-a12", + X86_64 | X86_64_sim | X86_64_macabi => "core2", + Arm64_macabi | Arm64_sim => "apple-a12", } } } diff --git a/crates/mun_vfs/src/monitor.rs b/crates/mun_vfs/src/monitor.rs index 304b03eb6..f68d46a13 100644 --- a/crates/mun_vfs/src/monitor.rs +++ b/crates/mun_vfs/src/monitor.rs @@ -82,14 +82,14 @@ impl MonitorDirectories { /// this set. pub fn contains_file(&self, path: impl AsRef) -> bool { let ext = path.as_ref().extension().unwrap_or_default(); - if !self + if self .extensions .iter() .any(|include_ext| include_ext.as_str() == ext) { - false - } else { self.includes_path(path) + } else { + false } } @@ -110,7 +110,7 @@ impl MonitorDirectories { include = Some(match include { Some(prev) if prev.starts_with(incl) => prev, _ => incl, - }) + }); } } diff --git a/crates/mun_vfs/src/monitor/notify_monitor.rs b/crates/mun_vfs/src/monitor/notify_monitor.rs index 565ed3a38..9b2740375 100644 --- a/crates/mun_vfs/src/monitor/notify_monitor.rs +++ b/crates/mun_vfs/src/monitor/notify_monitor.rs @@ -146,7 +146,7 @@ impl NotifyThread { move |event| { watcher_sender .send(event) - .expect("unable to send notify event over channel") + .expect("unable to send notify event over channel"); }, Config::default().with_poll_interval(Duration::from_millis(250)), )); @@ -219,9 +219,7 @@ impl NotifyThread { .follow_links(true) .into_iter() .filter_entry(|entry| { - if !entry.file_type().is_dir() { - true - } else { + if entry.file_type().is_dir() { let path = AbsPath::assert_new(entry.path()); root == path || dirs @@ -229,6 +227,8 @@ impl NotifyThread { .iter() .chain(&dirs.include) .all(|dir| dir != path) + } else { + true } }); @@ -240,15 +240,15 @@ impl NotifyThread { if is_dir && watch { self.watch(&abs_path); } - if !is_file { - None - } else { + if is_file { let ext = abs_path.extension().unwrap_or_default(); if dirs.extensions.iter().all(|entry| entry.as_str() != ext) { None } else { Some(abs_path) } + } else { + None } }); diff --git a/crates/mun_vfs/src/path_interner.rs b/crates/mun_vfs/src/path_interner.rs index 788778170..17bab9a79 100644 --- a/crates/mun_vfs/src/path_interner.rs +++ b/crates/mun_vfs/src/path_interner.rs @@ -28,7 +28,7 @@ impl PathInterner { } } - /// Returns the path for the specified FileId. + /// Returns the path for the specified `FileId`. pub fn lookup(&self, id: FileId) -> &AbsPath { &self.id_to_path[id.0 as usize] } diff --git a/crates/tools/src/syntax.rs b/crates/tools/src/syntax.rs index fba2318c5..cbbdbb4d5 100644 --- a/crates/tools/src/syntax.rs +++ b/crates/tools/src/syntax.rs @@ -48,6 +48,21 @@ fn generate_from_template(template: &Path, src: &Path, mode: Mode) -> anyhow::Re /// Creates a new `tera::Tera` instance with some default filters and functions used in our /// templates. fn create_tera() -> tera::Tera { + /// Convert value to CamelCase + fn camel(value: &Value, _: &HashMap) -> tera::Result { + Ok(value.as_str().unwrap().to_upper_camel_case().into()) + } + + /// Convert value to `snake_case` + fn snake(value: &Value, _: &HashMap) -> tera::Result { + Ok(value.as_str().unwrap().to_snake_case().into()) + } + + /// Convert value to `SCREAM_CASE` + fn scream(value: &Value, _: &HashMap) -> tera::Result { + Ok(value.as_str().unwrap().to_shouty_snake_case().into()) + } + let mut res = tera::Tera::default(); res.register_filter("camel", camel); res.register_filter("snake", snake); @@ -65,20 +80,5 @@ fn create_tera() -> tera::Tera { Ok(tera::Value::Array(elements)) }); - return res; - - /// Convert value to CamelCase - fn camel(value: &Value, _: &HashMap) -> tera::Result { - Ok(value.as_str().unwrap().to_upper_camel_case().into()) - } - - /// Convert value to snake_case - fn snake(value: &Value, _: &HashMap) -> tera::Result { - Ok(value.as_str().unwrap().to_snake_case().into()) - } - - /// Convert value to SCREAM_CASE - fn scream(value: &Value, _: &HashMap) -> tera::Result { - Ok(value.as_str().unwrap().to_shouty_snake_case().into()) - } + res }