From 9274b37d99f608e5fde569788ee79bd72fc3cf13 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Fri, 4 Dec 2020 14:47:15 +0100 Subject: [PATCH] Rename `AllocRef` to `Allocator` and `(de)alloc` to `(de)allocate` --- library/alloc/src/alloc.rs | 40 ++-- library/alloc/src/alloc/tests.rs | 4 +- library/alloc/src/boxed.rs | 133 +++++------ library/alloc/src/collections/btree/node.rs | 10 +- library/alloc/src/raw_vec.rs | 24 +- library/alloc/src/raw_vec/tests.rs | 10 +- library/alloc/src/rc.rs | 18 +- library/alloc/src/slice.rs | 24 +- library/alloc/src/sync.rs | 16 +- library/alloc/src/vec.rs | 224 +++++++++--------- library/alloc/tests/heap.rs | 8 +- library/core/src/alloc/layout.rs | 2 +- library/core/src/alloc/mod.rs | 64 ++--- library/core/src/ptr/non_null.rs | 4 +- library/std/src/alloc.rs | 20 +- src/test/ui/allocator/custom.rs | 15 +- src/test/ui/allocator/xcrate-use.rs | 10 +- .../ui/associated-types/defaults-wf.stderr | 2 +- src/test/ui/bad/bad-sized.stderr | 2 +- src/test/ui/box/leak-alloc.rs | 16 +- .../e0119/conflict-with-std.stderr | 2 +- src/test/ui/issues/issue-20433.stderr | 2 +- src/test/ui/issues/issue-41974.stderr | 2 +- src/test/ui/realloc-16687.rs | 6 +- src/test/ui/regions/regions-mock-codegen.rs | 10 +- src/test/ui/unique-object-noncopyable.stderr | 2 +- src/test/ui/unique-pinned-nocopy.stderr | 2 +- 27 files changed, 337 insertions(+), 335 deletions(-) diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index b1bfc2abe44ac..4fbcc4590f1a9 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -38,7 +38,7 @@ extern "Rust" { /// The global memory allocator. /// -/// This type implements the [`AllocRef`] trait by forwarding calls +/// This type implements the [`Allocator`] trait by forwarding calls /// to the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// @@ -59,7 +59,7 @@ pub use std::alloc::Global; /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `alloc` method -/// of the [`Global`] type when it and the [`AllocRef`] trait become stable. +/// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// @@ -93,7 +93,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 { /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `dealloc` method -/// of the [`Global`] type when it and the [`AllocRef`] trait become stable. +/// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// @@ -111,7 +111,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `realloc` method -/// of the [`Global`] type when it and the [`AllocRef`] trait become stable. +/// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// @@ -129,7 +129,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `alloc_zeroed` method -/// of the [`Global`] type when it and the [`AllocRef`] trait become stable. +/// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// @@ -170,7 +170,7 @@ impl Global { } } - // SAFETY: Same as `AllocRef::grow` + // SAFETY: Same as `Allocator::grow` #[inline] unsafe fn grow_impl( &self, @@ -211,7 +211,7 @@ impl Global { old_size => unsafe { let new_ptr = self.alloc_impl(new_layout, zeroed)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); - self.dealloc(ptr, old_layout); + self.deallocate(ptr, old_layout); Ok(new_ptr) }, } @@ -220,19 +220,19 @@ impl Global { #[unstable(feature = "allocator_api", issue = "32838")] #[cfg(not(test))] -unsafe impl AllocRef for Global { +unsafe impl Allocator for Global { #[inline] - fn alloc(&self, layout: Layout) -> Result, AllocError> { + fn allocate(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, false) } #[inline] - fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, true) } #[inline] - unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { if layout.size() != 0 { // SAFETY: `layout` is non-zero in size, // other conditions must be upheld by the caller @@ -277,7 +277,7 @@ unsafe impl AllocRef for Global { match new_layout.size() { // SAFETY: conditions must be upheld by the caller 0 => unsafe { - self.dealloc(ptr, old_layout); + self.deallocate(ptr, old_layout); Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) }, @@ -297,9 +297,9 @@ unsafe impl AllocRef for Global { // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract // for `dealloc` must be upheld by the caller. new_size => unsafe { - let new_ptr = self.alloc(new_layout)?; + let new_ptr = self.allocate(new_layout)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); - self.dealloc(ptr, old_layout); + self.deallocate(ptr, old_layout); Ok(new_ptr) }, } @@ -313,7 +313,7 @@ unsafe impl AllocRef for Global { #[inline] unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; - match Global.alloc(layout) { + match Global.allocate(layout) { Ok(ptr) => ptr.as_mut_ptr(), Err(_) => handle_alloc_error(layout), } @@ -322,16 +322,16 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { #[cfg_attr(not(test), lang = "box_free")] #[inline] // This signature has to be the same as `Box`, otherwise an ICE will happen. -// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as +// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as // well. -// For example if `Box` is changed to `struct Box(Unique, A)`, -// this function has to be changed to `fn box_free(Unique, A)` as well. -pub(crate) unsafe fn box_free(ptr: Unique, alloc: A) { +// For example if `Box` is changed to `struct Box(Unique, A)`, +// this function has to be changed to `fn box_free(Unique, A)` as well. +pub(crate) unsafe fn box_free(ptr: Unique, alloc: A) { unsafe { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - alloc.dealloc(ptr.cast().into(), layout) + alloc.deallocate(ptr.cast().into(), layout) } } diff --git a/library/alloc/src/alloc/tests.rs b/library/alloc/src/alloc/tests.rs index f7463d0daac93..94e05fa448f86 100644 --- a/library/alloc/src/alloc/tests.rs +++ b/library/alloc/src/alloc/tests.rs @@ -9,7 +9,7 @@ fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); let ptr = - Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout)); + Global.allocate_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout)); let mut i = ptr.as_non_null_ptr().as_ptr(); let end = i.add(layout.size()); @@ -17,7 +17,7 @@ fn allocate_zeroed() { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(ptr.as_non_null_ptr(), layout); + Global.deallocate(ptr.as_non_null_ptr(), layout); } } diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index d814c525ceb6e..a6360f25eca31 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -153,7 +153,7 @@ use core::pin::Pin; use core::ptr::{self, Unique}; use core::task::{Context, Poll}; -use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout}; +use crate::alloc::{handle_alloc_error, Allocator, Global, Layout}; use crate::borrow::Cow; use crate::raw_vec::RawVec; use crate::str::from_boxed_utf8_unchecked; @@ -167,7 +167,7 @@ use crate::vec::Vec; #[stable(feature = "rust1", since = "1.0.0")] pub struct Box< T: ?Sized, - #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, >(Unique, A); impl Box { @@ -243,7 +243,7 @@ impl Box { } } -impl Box { +impl Box { /// Allocates memory in the given allocator then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. @@ -291,7 +291,7 @@ impl Box { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_in(alloc: A) -> Box, A> { let layout = Layout::new::>(); - let ptr = alloc.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast(); + let ptr = alloc.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast(); unsafe { Box::from_raw_in(ptr.as_ptr(), alloc) } } @@ -319,7 +319,8 @@ impl Box { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_in(alloc: A) -> Box, A> { let layout = Layout::new::>(); - let ptr = alloc.alloc_zeroed(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast(); + let ptr = + alloc.allocate_zeroed(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast(); unsafe { Box::from_raw_in(ptr.as_ptr(), alloc) } } @@ -339,7 +340,7 @@ impl Box { /// This conversion does not allocate on the heap and happens in place. #[unstable(feature = "box_into_boxed_slice", issue = "71582")] pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> { - let (raw, alloc) = Box::into_raw_with_alloc(boxed); + let (raw, alloc) = Box::into_raw_with_allocator(boxed); unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) } } } @@ -394,7 +395,7 @@ impl Box<[T]> { } } -impl Box<[T], A> { +impl Box<[T], A> { /// Constructs a new boxed slice with uninitialized contents in the provided allocator. /// /// # Examples @@ -450,7 +451,7 @@ impl Box<[T], A> { } } -impl Box, A> { +impl Box, A> { /// Converts to `Box`. /// /// # Safety @@ -482,12 +483,12 @@ impl Box, A> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Box { - let (raw, alloc) = Box::into_raw_with_alloc(self); + let (raw, alloc) = Box::into_raw_with_allocator(self); unsafe { Box::from_raw_in(raw as *mut T, alloc) } } } -impl Box<[mem::MaybeUninit], A> { +impl Box<[mem::MaybeUninit], A> { /// Converts to `Box<[T], A>`. /// /// # Safety @@ -521,7 +522,7 @@ impl Box<[mem::MaybeUninit], A> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Box<[T], A> { - let (raw, alloc) = Box::into_raw_with_alloc(self); + let (raw, alloc) = Box::into_raw_with_allocator(self); unsafe { Box::from_raw_in(raw as *mut [T], alloc) } } } @@ -575,7 +576,7 @@ impl Box { } } -impl Box { +impl Box { /// Constructs a box from a raw pointer in the given allocator. /// /// After calling this function, the raw pointer is owned by the @@ -594,24 +595,24 @@ impl Box { /// # Examples /// /// Recreate a `Box` which was previously converted to a raw pointer - /// using [`Box::into_raw_with_alloc`]: + /// using [`Box::into_raw_with_allocator`]: /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let x = Box::new_in(5, System); - /// let (ptr, alloc) = Box::into_raw_with_alloc(x); + /// let (ptr, alloc) = Box::into_raw_with_allocator(x); /// let x = unsafe { Box::from_raw_in(ptr, alloc) }; /// ``` /// Manually create a `Box` from scratch by using the system allocator: /// ``` /// #![feature(allocator_api, slice_ptr_get)] /// - /// use std::alloc::{AllocRef, Layout, System}; + /// use std::alloc::{Allocator, Layout, System}; /// /// unsafe { - /// let ptr = System.alloc(Layout::new::())?.as_mut_ptr(); + /// let ptr = System.allocate(Layout::new::())?.as_mut_ptr(); /// // In general .write is required to avoid attempting to destruct /// // the (uninitialized) previous contents of `ptr`, though for this /// // simple example `*ptr = 5` would have worked as well. @@ -671,7 +672,7 @@ impl Box { #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub fn into_raw(b: Self) -> *mut T { - Self::into_raw_with_alloc(b).0 + Self::into_raw_with_allocator(b).0 } /// Consumes the `Box`, returning a wrapped raw pointer and the allocator. @@ -687,7 +688,7 @@ impl Box { /// the cleanup. /// /// Note: this is an associated function, which means that you have - /// to call it as `Box::into_raw_with_alloc(b)` instead of `b.into_raw_with_alloc()`. This + /// to call it as `Box::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This /// is so that there is no conflict with a method on the inner type. /// /// # Examples @@ -699,7 +700,7 @@ impl Box { /// use std::alloc::System; /// /// let x = Box::new_in(String::from("Hello"), System); - /// let (ptr, alloc) = Box::into_raw_with_alloc(x); + /// let (ptr, alloc) = Box::into_raw_with_allocator(x); /// let x = unsafe { Box::from_raw_in(ptr, alloc) }; /// ``` /// Manual cleanup by explicitly running the destructor and deallocating @@ -707,22 +708,22 @@ impl Box { /// ``` /// #![feature(allocator_api)] /// - /// use std::alloc::{AllocRef, Layout, System}; + /// use std::alloc::{Allocator, Layout, System}; /// use std::ptr::{self, NonNull}; /// /// let x = Box::new_in(String::from("Hello"), System); - /// let (ptr, alloc) = Box::into_raw_with_alloc(x); + /// let (ptr, alloc) = Box::into_raw_with_allocator(x); /// unsafe { /// ptr::drop_in_place(ptr); /// let non_null = NonNull::new_unchecked(ptr); - /// alloc.dealloc(non_null.cast(), Layout::new::()); + /// alloc.deallocate(non_null.cast(), Layout::new::()); /// } /// ``` /// /// [memory layout]: self#memory-layout #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn into_raw_with_alloc(b: Self) -> (*mut T, A) { + pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { let (leaked, alloc) = Box::into_unique(b); (leaked.as_ptr(), alloc) } @@ -747,11 +748,11 @@ impl Box { /// Returns a reference to the underlying allocator. /// /// Note: this is an associated function, which means that you have - /// to call it as `Box::alloc_ref(&b)` instead of `b.alloc_ref()`. This + /// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This /// is so that there is no conflict with a method on the inner type. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn alloc_ref(b: &Self) -> &A { + pub fn allocator(b: &Self) -> &A { &b.1 } @@ -817,7 +818,7 @@ impl Box { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized, A: AllocRef> Drop for Box { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box { fn drop(&mut self) { // FIXME: Do nothing, drop is currently performed by compiler. } @@ -846,7 +847,7 @@ impl Default for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Box { +impl Clone for Box { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples @@ -900,7 +901,7 @@ impl Clone for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Box { +impl PartialEq for Box { #[inline] fn eq(&self, other: &Self) -> bool { PartialEq::eq(&**self, &**other) @@ -911,7 +912,7 @@ impl PartialEq for Box { } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Box { +impl PartialOrd for Box { #[inline] fn partial_cmp(&self, other: &Self) -> Option { PartialOrd::partial_cmp(&**self, &**other) @@ -934,24 +935,24 @@ impl PartialOrd for Box { } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Box { +impl Ord for Box { #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Box {} +impl Eq for Box {} #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Box { +impl Hash for Box { fn hash(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "indirect_hasher_impl", since = "1.22.0")] -impl Hasher for Box { +impl Hasher for Box { fn finish(&self) -> u64 { (**self).finish() } @@ -1016,7 +1017,7 @@ impl From for Box { } #[stable(feature = "pin", since = "1.33.0")] -impl From> for Pin> +impl From> for Pin> where A: 'static, { @@ -1094,7 +1095,7 @@ impl From> for Box { } #[stable(feature = "boxed_str_conv", since = "1.19.0")] -impl From> for Box<[u8], A> { +impl From> for Box<[u8], A> { /// Converts a `Box` into a `Box<[u8]>` /// /// This conversion does not allocate on the heap and happens in place. @@ -1113,7 +1114,7 @@ impl From> for Box<[u8], A> { /// ``` #[inline] fn from(s: Box) -> Self { - let (raw, alloc) = Box::into_raw_with_alloc(s); + let (raw, alloc) = Box::into_raw_with_allocator(s); unsafe { Box::from_raw_in(raw as *mut [u8], alloc) } } } @@ -1147,7 +1148,7 @@ impl TryFrom> for Box<[T; N]> { } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -1170,7 +1171,7 @@ impl Box { pub fn downcast(self) -> Result, Self> { if self.is::() { unsafe { - let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_alloc(self); + let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_allocator(self); Ok(Box::from_raw_in(raw as *mut T, alloc)) } } else { @@ -1179,7 +1180,7 @@ impl Box { } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -1202,7 +1203,7 @@ impl Box { pub fn downcast(self) -> Result, Self> { if self.is::() { unsafe { - let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_alloc(self); + let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_allocator(self); Ok(Box::from_raw_in(raw as *mut T, alloc)) } } else { @@ -1212,21 +1213,21 @@ impl Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Box { +impl fmt::Display for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Box { +impl fmt::Debug for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Box { +impl fmt::Pointer for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // It's not possible to extract the inner Uniq directly from the Box, // instead we cast it to a *const which aliases the Unique @@ -1236,7 +1237,7 @@ impl fmt::Pointer for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Box { +impl Deref for Box { type Target = T; fn deref(&self) -> &T { @@ -1245,17 +1246,17 @@ impl Deref for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl DerefMut for Box { +impl DerefMut for Box { fn deref_mut(&mut self) -> &mut T { &mut **self } } #[unstable(feature = "receiver_trait", issue = "none")] -impl Receiver for Box {} +impl Receiver for Box {} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Box { +impl Iterator for Box { type Item = I::Item; fn next(&mut self) -> Option { (**self).next() @@ -1276,7 +1277,7 @@ trait BoxIter { fn last(self) -> Option; } -impl BoxIter for Box { +impl BoxIter for Box { type Item = I::Item; default fn last(self) -> Option { #[inline] @@ -1291,14 +1292,14 @@ impl BoxIter for Box { /// Specialization for sized `I`s that uses `I`s implementation of `last()` /// instead of the default. #[stable(feature = "rust1", since = "1.0.0")] -impl BoxIter for Box { +impl BoxIter for Box { fn last(self) -> Option { (*self).last() } } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Box { +impl DoubleEndedIterator for Box { fn next_back(&mut self) -> Option { (**self).next_back() } @@ -1307,7 +1308,7 @@ impl DoubleEndedIterator for Box ExactSizeIterator for Box { +impl ExactSizeIterator for Box { fn len(&self) -> usize { (**self).len() } @@ -1317,10 +1318,10 @@ impl ExactSizeIterator for Box } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Box {} +impl FusedIterator for Box {} #[stable(feature = "boxed_closure_impls", since = "1.35.0")] -impl + ?Sized, A: AllocRef> FnOnce for Box { +impl + ?Sized, A: Allocator> FnOnce for Box { type Output = >::Output; extern "rust-call" fn call_once(self, args: Args) -> Self::Output { @@ -1329,21 +1330,21 @@ impl + ?Sized, A: AllocRef> FnOnce for Box { } #[stable(feature = "boxed_closure_impls", since = "1.35.0")] -impl + ?Sized, A: AllocRef> FnMut for Box { +impl + ?Sized, A: Allocator> FnMut for Box { extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output { >::call_mut(self, args) } } #[stable(feature = "boxed_closure_impls", since = "1.35.0")] -impl + ?Sized, A: AllocRef> Fn for Box { +impl + ?Sized, A: Allocator> Fn for Box { extern "rust-call" fn call(&self, args: Args) -> Self::Output { >::call(self, args) } } #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized, A: AllocRef> CoerceUnsized> for Box {} +impl, U: ?Sized, A: Allocator> CoerceUnsized> for Box {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Box {} @@ -1356,9 +1357,9 @@ impl FromIterator for Box<[I]> { } #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box<[T], A> { +impl Clone for Box<[T], A> { fn clone(&self) -> Self { - let alloc = Box::alloc_ref(self).clone(); + let alloc = Box::allocator(self).clone(); self.to_vec_in(alloc).into_boxed_slice() } @@ -1372,28 +1373,28 @@ impl Clone for Box<[T], A> { } #[stable(feature = "box_borrow", since = "1.1.0")] -impl borrow::Borrow for Box { +impl borrow::Borrow for Box { fn borrow(&self) -> &T { &**self } } #[stable(feature = "box_borrow", since = "1.1.0")] -impl borrow::BorrowMut for Box { +impl borrow::BorrowMut for Box { fn borrow_mut(&mut self) -> &mut T { &mut **self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Box { +impl AsRef for Box { fn as_ref(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsMut for Box { +impl AsMut for Box { fn as_mut(&mut self) -> &mut T { &mut **self } @@ -1422,10 +1423,10 @@ impl AsMut for Box { * could have a method to project a Pin from it. */ #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Box where A: 'static {} +impl Unpin for Box where A: 'static {} #[unstable(feature = "generator_trait", issue = "43122")] -impl + Unpin, R, A: AllocRef> Generator for Box +impl + Unpin, R, A: Allocator> Generator for Box where A: 'static, { @@ -1438,7 +1439,7 @@ where } #[unstable(feature = "generator_trait", issue = "43122")] -impl, R, A: AllocRef> Generator for Pin> +impl, R, A: Allocator> Generator for Pin> where A: 'static, { @@ -1451,7 +1452,7 @@ where } #[stable(feature = "futures_api", since = "1.36.0")] -impl Future for Box +impl Future for Box where A: 'static, { diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 78be070a98382..31809fde57b7a 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -36,7 +36,7 @@ use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; use core::ptr::{self, NonNull}; -use crate::alloc::{AllocRef, Global, Layout}; +use crate::alloc::{Allocator, Global, Layout}; use crate::boxed::Box; const B: usize = 6; @@ -195,7 +195,7 @@ impl NodeRef { self.borrow_mut().clear_parent_link(); unsafe { - Global.dealloc(top.cast(), Layout::new::>()); + Global.deallocate(top.cast(), Layout::new::>()); } } } @@ -449,7 +449,7 @@ impl NodeRef { let node = self.node; let ret = self.ascend().ok(); unsafe { - Global.dealloc( + Global.deallocate( node.cast(), if height > 0 { Layout::new::>() @@ -1407,9 +1407,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { left_node.correct_childrens_parent_links(left_len + 1..=left_len + 1 + right_len); - Global.dealloc(right_node.node.cast(), Layout::new::>()); + Global.deallocate(right_node.node.cast(), Layout::new::>()); } else { - Global.dealloc(right_node.node.cast(), Layout::new::>()); + Global.deallocate(right_node.node.cast(), Layout::new::>()); } let new_idx = match track_edge_idx { diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index edab576bea8dd..edee439ad8dfa 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -9,7 +9,7 @@ use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; -use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout}; +use crate::alloc::{handle_alloc_error, Allocator, Global, Layout}; use crate::boxed::Box; use crate::collections::TryReserveError::{self, *}; @@ -46,7 +46,7 @@ enum AllocInit { /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] -pub struct RawVec { +pub struct RawVec { ptr: Unique, cap: usize, alloc: A, @@ -113,7 +113,7 @@ impl RawVec { } } -impl RawVec { +impl RawVec { /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. #[rustc_allow_const_fn_unstable(const_fn)] @@ -139,7 +139,7 @@ impl RawVec { /// Converts a `Box<[T]>` into a `RawVec`. pub fn from_box(slice: Box<[T], A>) -> Self { unsafe { - let (slice, alloc) = Box::into_raw_with_alloc(slice); + let (slice, alloc) = Box::into_raw_with_allocator(slice); RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), alloc) } } @@ -185,8 +185,8 @@ impl RawVec { Err(_) => capacity_overflow(), } let result = match init { - AllocInit::Uninitialized => alloc.alloc(layout), - AllocInit::Zeroed => alloc.alloc_zeroed(layout), + AllocInit::Uninitialized => alloc.allocate(layout), + AllocInit::Zeroed => alloc.allocate_zeroed(layout), }; let ptr = match result { Ok(ptr) => ptr, @@ -232,7 +232,7 @@ impl RawVec { } /// Returns a shared reference to the allocator backing this `RawVec`. - pub fn alloc_ref(&self) -> &A { + pub fn allocator(&self) -> &A { &self.alloc } @@ -359,7 +359,7 @@ impl RawVec { } } -impl RawVec { +impl RawVec { /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, len: usize, additional: usize) -> bool { @@ -471,7 +471,7 @@ fn finish_grow( alloc: &mut A, ) -> Result, TryReserveError> where - A: AllocRef, + A: Allocator, { // Check for the error here to minimize the size of `RawVec::grow_*`. let new_layout = new_layout.map_err(|_| CapacityOverflow)?; @@ -486,17 +486,17 @@ where alloc.grow(ptr, old_layout, new_layout) } } else { - alloc.alloc(new_layout) + alloc.allocate(new_layout) }; memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }) } -unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec { +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { if let Some((ptr, layout)) = self.current_memory() { - unsafe { self.alloc.dealloc(ptr, layout) } + unsafe { self.alloc.deallocate(ptr, layout) } } } } diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs index cb4fe1b46cd75..8c15a24409bab 100644 --- a/library/alloc/src/raw_vec/tests.rs +++ b/library/alloc/src/raw_vec/tests.rs @@ -20,13 +20,13 @@ fn allocator_param() { struct BoundedAlloc { fuel: Cell, } - unsafe impl AllocRef for BoundedAlloc { - fn alloc(&self, layout: Layout) -> Result, AllocError> { + unsafe impl Allocator for BoundedAlloc { + fn allocate(&self, layout: Layout) -> Result, AllocError> { let size = layout.size(); if size > self.fuel.get() { return Err(AllocError); } - match Global.alloc(layout) { + match Global.allocate(layout) { ok @ Ok(_) => { self.fuel.set(self.fuel.get() - size); ok @@ -34,8 +34,8 @@ fn allocator_param() { err @ Err(_) => err, } } - unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { - unsafe { Global.dealloc(ptr, layout) } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + unsafe { Global.deallocate(ptr, layout) } } } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 6dcd0c6056c30..a96be57143d38 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -262,7 +262,7 @@ use core::pin::Pin; use core::ptr::{self, NonNull}; use core::slice::from_raw_parts_mut; -use crate::alloc::{box_free, handle_alloc_error, AllocError, AllocRef, Global, Layout}; +use crate::alloc::{box_free, handle_alloc_error, AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; use crate::string::String; use crate::vec::Vec; @@ -416,7 +416,7 @@ impl Rc { unsafe { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::(), - |layout| Global.alloc(layout), + |layout| Global.allocate(layout), |mem| mem as *mut RcBox>, )) } @@ -447,7 +447,7 @@ impl Rc { unsafe { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::(), - |layout| Global.alloc_zeroed(layout), + |layout| Global.allocate_zeroed(layout), |mem| mem as *mut RcBox>, )) } @@ -555,7 +555,7 @@ impl Rc<[T]> { unsafe { Rc::from_ptr(Rc::allocate_for_layout( Layout::array::(len).unwrap(), - |layout| Global.alloc_zeroed(layout), + |layout| Global.allocate_zeroed(layout), |mem| { ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[mem::MaybeUninit]> @@ -1040,7 +1040,7 @@ impl Rc { unsafe { Self::allocate_for_layout( Layout::for_value(&*ptr), - |layout| Global.alloc(layout), + |layout| Global.allocate(layout), |mem| set_data_ptr(ptr as *mut T, mem) as *mut RcBox, ) } @@ -1075,7 +1075,7 @@ impl Rc<[T]> { unsafe { Self::allocate_for_layout( Layout::array::(len).unwrap(), - |layout| Global.alloc(layout), + |layout| Global.allocate(layout), |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>, ) } @@ -1125,7 +1125,7 @@ impl Rc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem, self.layout); + Global.deallocate(self.mem, self.layout); } } } @@ -1225,7 +1225,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.inner().dec_weak(); if self.inner().weak() == 0 { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -2040,7 +2040,7 @@ impl Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index 949a3bb1d708c..064700fc72c95 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -87,7 +87,7 @@ use core::cmp::Ordering::{self, Less}; use core::mem::{self, size_of}; use core::ptr; -use crate::alloc::{AllocRef, Global}; +use crate::alloc::{Allocator, Global}; use crate::borrow::ToOwned; use crate::boxed::Box; use crate::vec::Vec; @@ -138,7 +138,7 @@ pub use hack::to_vec; // `core::slice::SliceExt` - we need to supply these functions for the // `test_permutations` test mod hack { - use core::alloc::AllocRef; + use core::alloc::Allocator; use crate::boxed::Box; use crate::vec::Vec; @@ -146,33 +146,33 @@ mod hack { // We shouldn't add inline attribute to this since this is used in // `vec!` macro mostly and causes perf regression. See #71204 for // discussion and perf results. - pub fn into_vec(b: Box<[T], A>) -> Vec { + pub fn into_vec(b: Box<[T], A>) -> Vec { unsafe { let len = b.len(); - let (b, alloc) = Box::into_raw_with_alloc(b); + let (b, alloc) = Box::into_raw_with_allocator(b); Vec::from_raw_parts_in(b as *mut T, len, len, alloc) } } #[inline] - pub fn to_vec(s: &[T], alloc: A) -> Vec { + pub fn to_vec(s: &[T], alloc: A) -> Vec { T::to_vec(s, alloc) } pub trait ConvertVec { - fn to_vec(s: &[Self], alloc: A) -> Vec + fn to_vec(s: &[Self], alloc: A) -> Vec where Self: Sized; } impl ConvertVec for T { #[inline] - default fn to_vec(s: &[Self], alloc: A) -> Vec { - struct DropGuard<'a, T, A: AllocRef> { + default fn to_vec(s: &[Self], alloc: A) -> Vec { + struct DropGuard<'a, T, A: Allocator> { vec: &'a mut Vec, num_init: usize, } - impl<'a, T, A: AllocRef> Drop for DropGuard<'a, T, A> { + impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> { #[inline] fn drop(&mut self) { // SAFETY: @@ -203,7 +203,7 @@ mod hack { impl ConvertVec for T { #[inline] - fn to_vec(s: &[Self], alloc: A) -> Vec { + fn to_vec(s: &[Self], alloc: A) -> Vec { let mut v = Vec::with_capacity_in(s.len(), alloc); // SAFETY: // allocated above with the capacity of `s`, and initialize to `s.len()` in @@ -464,7 +464,7 @@ impl [T] { /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn to_vec_in(&self, alloc: A) -> Vec + pub fn to_vec_in(&self, alloc: A) -> Vec where T: Clone, { @@ -488,7 +488,7 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn into_vec(self: Box) -> Vec { + pub fn into_vec(self: Box) -> Vec { // N.B., see the `hack` module in this file for more details. hack::into_vec(self) } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 5ab930a520884..9d478a302e96c 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -22,7 +22,7 @@ use core::slice::from_raw_parts_mut; use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; -use crate::alloc::{box_free, handle_alloc_error, AllocError, AllocRef, Global, Layout}; +use crate::alloc::{box_free, handle_alloc_error, AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::rc::is_dangling; @@ -434,7 +434,7 @@ impl Arc { unsafe { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::(), - |layout| Global.alloc(layout), + |layout| Global.allocate(layout), |mem| mem as *mut ArcInner>, )) } @@ -465,7 +465,7 @@ impl Arc { unsafe { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::(), - |layout| Global.alloc_zeroed(layout), + |layout| Global.allocate_zeroed(layout), |mem| mem as *mut ArcInner>, )) } @@ -572,7 +572,7 @@ impl Arc<[T]> { unsafe { Arc::from_ptr(Arc::allocate_for_layout( Layout::array::(len).unwrap(), - |layout| Global.alloc_zeroed(layout), + |layout| Global.allocate_zeroed(layout), |mem| { ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[mem::MaybeUninit]> @@ -1015,7 +1015,7 @@ impl Arc { unsafe { Self::allocate_for_layout( Layout::for_value(&*ptr), - |layout| Global.alloc(layout), + |layout| Global.allocate(layout), |mem| set_data_ptr(ptr as *mut T, mem) as *mut ArcInner, ) } @@ -1050,7 +1050,7 @@ impl Arc<[T]> { unsafe { Self::allocate_for_layout( Layout::array::(len).unwrap(), - |layout| Global.alloc(layout), + |layout| Global.allocate(layout), |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, ) } @@ -1102,7 +1102,7 @@ impl Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem, self.layout); + Global.deallocate(self.mem, self.layout); } } } @@ -1925,7 +1925,7 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } + unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } } } diff --git a/library/alloc/src/vec.rs b/library/alloc/src/vec.rs index 5168792092823..9fffb47aa5975 100644 --- a/library/alloc/src/vec.rs +++ b/library/alloc/src/vec.rs @@ -68,7 +68,7 @@ use core::ops::{self, Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; -use crate::alloc::{AllocRef, Global}; +use crate::alloc::{Allocator, Global}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::collections::TryReserveError; @@ -298,7 +298,7 @@ use crate::raw_vec::RawVec; /// [`&`]: ../../std/primitive.reference.html #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")] -pub struct Vec { +pub struct Vec { buf: RawVec, len: usize, } @@ -433,7 +433,7 @@ impl Vec { } } -impl Vec { +impl Vec { /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. @@ -555,7 +555,7 @@ impl Vec { /// let p = v.as_mut_ptr(); /// let len = v.len(); /// let cap = v.capacity(); - /// let alloc = v.alloc_ref(); + /// let alloc = v.allocator(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 @@ -656,7 +656,7 @@ impl Vec { let len = me.len(); let capacity = me.capacity(); let ptr = me.as_mut_ptr(); - let alloc = unsafe { ptr::read(me.alloc_ref()) }; + let alloc = unsafe { ptr::read(me.allocator()) }; (ptr, len, capacity, alloc) } @@ -1058,8 +1058,8 @@ impl Vec { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn alloc_ref(&self) -> &A { - self.buf.alloc_ref() + pub fn allocator(&self) -> &A { + self.buf.allocator() } /// Forces the length of the vector to `new_len`. @@ -1620,12 +1620,12 @@ impl Vec { // the new vector can take over the original buffer and avoid the copy return mem::replace( self, - Vec::with_capacity_in(self.capacity(), self.alloc_ref().clone()), + Vec::with_capacity_in(self.capacity(), self.allocator().clone()), ); } let other_len = self.len - at; - let mut other = Vec::with_capacity_in(other_len, self.alloc_ref().clone()); + let mut other = Vec::with_capacity_in(other_len, self.allocator().clone()); // Unsafely `set_len` and copy items to `other`. unsafe { @@ -1749,7 +1749,7 @@ impl Vec { } } -impl Vec { +impl Vec { /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// /// If `new_len` is greater than `len`, the `Vec` is extended by the @@ -1844,7 +1844,7 @@ impl T> ExtendWith for ExtendFunc { } } -impl Vec { +impl Vec { /// Extend the vector by `n` values, using the given generator. fn extend_with>(&mut self, n: usize, mut value: E) { self.reserve(n); @@ -1904,7 +1904,7 @@ impl Drop for SetLenOnDrop<'_> { } } -impl Vec { +impl Vec { /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. /// @@ -1926,7 +1926,7 @@ impl Vec { } } -impl Vec { +impl Vec { /// Removes the first instance of `item` from the vector if the item exists. /// /// This method will be removed soon. @@ -1959,17 +1959,17 @@ pub fn from_elem(elem: T, n: usize) -> Vec { #[doc(hidden)] #[unstable(feature = "allocator_api", issue = "32838")] -pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { +pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { ::from_elem(elem, n, alloc) } // Specialization trait used for Vec::from_elem trait SpecFromElem: Sized { - fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; + fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; } impl SpecFromElem for T { - default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { + default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, ExtendElement(elem)); v @@ -1978,7 +1978,7 @@ impl SpecFromElem for T { impl SpecFromElem for i8 { #[inline] - fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { + fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -1993,7 +1993,7 @@ impl SpecFromElem for i8 { impl SpecFromElem for u8 { #[inline] - fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { + fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -2008,7 +2008,7 @@ impl SpecFromElem for u8 { impl SpecFromElem for T { #[inline] - fn from_elem(elem: T, n: usize, alloc: A) -> Vec { + fn from_elem(elem: T, n: usize, alloc: A) -> Vec { if elem.is_zero() { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -2093,7 +2093,7 @@ unsafe impl IsZero for Option> { //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl ops::Deref for Vec { +impl ops::Deref for Vec { type Target = [T]; fn deref(&self) -> &[T] { @@ -2102,17 +2102,17 @@ impl ops::Deref for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl ops::DerefMut for Vec { +impl ops::DerefMut for Vec { fn deref_mut(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Vec { +impl Clone for Vec { #[cfg(not(test))] fn clone(&self) -> Self { - let alloc = self.alloc_ref().clone(); + let alloc = self.allocator().clone(); <[T]>::to_vec_in(&**self, alloc) } @@ -2122,7 +2122,7 @@ impl Clone for Vec { // NB see the slice::hack module in slice.rs for more information #[cfg(test)] fn clone(&self) -> Self { - let alloc = self.alloc_ref().clone(); + let alloc = self.allocator().clone(); crate::slice::to_vec(&**self, alloc) } @@ -2141,7 +2141,7 @@ impl Clone for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Vec { +impl Hash for Vec { #[inline] fn hash(&self, state: &mut H) { Hash::hash(&**self, state) @@ -2153,7 +2153,7 @@ impl Hash for Vec { message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: AllocRef> Index for Vec { +impl, A: Allocator> Index for Vec { type Output = I::Output; #[inline] @@ -2167,7 +2167,7 @@ impl, A: AllocRef> Index for Vec { message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: AllocRef> IndexMut for Vec { +impl, A: Allocator> IndexMut for Vec { #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { IndexMut::index_mut(&mut **self, index) @@ -2183,7 +2183,7 @@ impl FromIterator for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for Vec { +impl IntoIterator for Vec { type Item = T; type IntoIter = IntoIter; @@ -2204,7 +2204,7 @@ impl IntoIterator for Vec { fn into_iter(self) -> IntoIter { unsafe { let mut me = ManuallyDrop::new(self); - let alloc = ptr::read(me.alloc_ref()); + let alloc = ptr::read(me.allocator()); let begin = me.as_mut_ptr(); let end = if mem::size_of::() == 0 { arith_offset(begin as *const i8, me.len() as isize) as *const T @@ -2225,7 +2225,7 @@ impl IntoIterator for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: AllocRef> IntoIterator for &'a Vec { +impl<'a, T, A: Allocator> IntoIterator for &'a Vec { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; @@ -2235,7 +2235,7 @@ impl<'a, T, A: AllocRef> IntoIterator for &'a Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: AllocRef> IntoIterator for &'a mut Vec { +impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; @@ -2245,7 +2245,7 @@ impl<'a, T, A: AllocRef> IntoIterator for &'a mut Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for Vec { +impl Extend for Vec { #[inline] fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()) @@ -2533,7 +2533,7 @@ trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend for Vec +impl SpecExtend for Vec where I: Iterator, { @@ -2542,7 +2542,7 @@ where } } -impl SpecExtend for Vec +impl SpecExtend for Vec where I: TrustedLen, { @@ -2575,7 +2575,7 @@ where } } -impl SpecExtend> for Vec { +impl SpecExtend> for Vec { fn spec_extend(&mut self, mut iterator: IntoIter) { unsafe { self.append_elements(iterator.as_slice() as _); @@ -2584,7 +2584,7 @@ impl SpecExtend> for Vec { } } -impl<'a, T: 'a, I, A: AllocRef + 'a> SpecExtend<&'a T, I> for Vec +impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec where I: Iterator, T: Clone, @@ -2594,7 +2594,7 @@ where } } -impl<'a, T: 'a, A: AllocRef + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec +impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec where T: Copy, { @@ -2604,7 +2604,7 @@ where } } -impl Vec { +impl Vec { // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply fn extend_desugared>(&mut self, mut iterator: I) { @@ -2739,7 +2739,7 @@ impl Vec { /// /// [`copy_from_slice`]: ../../std/primitive.slice.html#method.copy_from_slice #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: Copy + 'a, A: AllocRef + 'a> Extend<&'a T> for Vec { +impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()) } @@ -2771,18 +2771,18 @@ macro_rules! __impl_slice_eq1 { } } -__impl_slice_eq1! { [A: AllocRef] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: AllocRef] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: AllocRef] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: AllocRef] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: AllocRef] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: AllocRef] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } -__impl_slice_eq1! { [A: AllocRef] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } -__impl_slice_eq1! { [A: AllocRef] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } +__impl_slice_eq1! { [A: Allocator] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A: Allocator] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: AllocRef, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: AllocRef, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } // NOTE: some less important impls are omitted to reduce code bloat // FIXME(Centril): Reconsider this? @@ -2796,7 +2796,7 @@ __impl_slice_eq1! { [A: AllocRef, const N: usize] Vec, &[U; N], #[stable(f /// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Vec { +impl PartialOrd for Vec { #[inline] fn partial_cmp(&self, other: &Self) -> Option { PartialOrd::partial_cmp(&**self, &**other) @@ -2804,11 +2804,11 @@ impl PartialOrd for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Vec {} +impl Eq for Vec {} /// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Vec { +impl Ord for Vec { #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) @@ -2816,7 +2816,7 @@ impl Ord for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: AllocRef> Drop for Vec { +unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { fn drop(&mut self) { unsafe { // use drop for [T] @@ -2837,35 +2837,35 @@ impl Default for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Vec { +impl fmt::Debug for Vec { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef> for Vec { +impl AsRef> for Vec { fn as_ref(&self) -> &Vec { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut> for Vec { +impl AsMut> for Vec { fn as_mut(&mut self) -> &mut Vec { self } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef<[T]> for Vec { +impl AsRef<[T]> for Vec { fn as_ref(&self) -> &[T] { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut<[T]> for Vec { +impl AsMut<[T]> for Vec { fn as_mut(&mut self) -> &mut [T] { self } @@ -2920,7 +2920,7 @@ where // note: test pulls in libstd, which causes errors here #[cfg(not(test))] #[stable(feature = "vec_from_box", since = "1.18.0")] -impl From> for Vec { +impl From> for Vec { fn from(s: Box<[T], A>) -> Self { let len = s.len(); Self { buf: RawVec::from_box(s), len } @@ -2930,7 +2930,7 @@ impl From> for Vec { // note: test pulls in libstd, which causes errors here #[cfg(not(test))] #[stable(feature = "box_from_vec", since = "1.20.0")] -impl From> for Box<[T], A> { +impl From> for Box<[T], A> { fn from(v: Vec) -> Self { v.into_boxed_slice() } @@ -2944,7 +2944,7 @@ impl From<&str> for Vec { } #[stable(feature = "array_try_from_vec", since = "1.48.0")] -impl TryFrom> for [T; N] { +impl TryFrom> for [T; N] { type Error = Vec; /// Gets the entire contents of the `Vec` as an array, @@ -3045,8 +3045,10 @@ where /// let iter: std::vec::IntoIter<_> = v.into_iter(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub struct IntoIter -{ +pub struct IntoIter< + T, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { buf: NonNull, phantom: PhantomData, cap: usize, @@ -3056,13 +3058,13 @@ pub struct IntoIter fmt::Debug for IntoIter { +impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } -impl IntoIter { +impl IntoIter { /// Returns the remaining items of this iterator as a slice. /// /// # Examples @@ -3100,7 +3102,7 @@ impl IntoIter { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn alloc_ref(&self) -> &A { + pub fn allocator(&self) -> &A { &self.alloc } @@ -3126,19 +3128,19 @@ impl IntoIter { } #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] -impl AsRef<[T]> for IntoIter { +impl AsRef<[T]> for IntoIter { fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for IntoIter {} +unsafe impl Send for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for IntoIter {} +unsafe impl Sync for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = T; #[inline] @@ -3195,7 +3197,7 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { if self.end == self.ptr { @@ -3215,23 +3217,23 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { self.ptr == self.end } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for IntoIter {} +unsafe impl TrustedLen for IntoIter {} #[doc(hidden)] #[unstable(issue = "none", feature = "std_internals")] // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr // and thus we can't implement drop-handling -unsafe impl TrustedRandomAccess for IntoIter +unsafe impl TrustedRandomAccess for IntoIter where T: Copy, { @@ -3241,7 +3243,7 @@ where } #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] -impl Clone for IntoIter { +impl Clone for IntoIter { #[cfg(not(test))] fn clone(&self) -> Self { self.as_slice().to_vec_in(self.alloc.clone()).into_iter() @@ -3253,11 +3255,11 @@ impl Clone for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: AllocRef> Drop for IntoIter { +unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { fn drop(&mut self) { - struct DropGuard<'a, T, A: AllocRef>(&'a mut IntoIter); + struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); - impl Drop for DropGuard<'_, T, A> { + impl Drop for DropGuard<'_, T, A> { fn drop(&mut self) { unsafe { // `IntoIter::alloc` is not used anymore after this @@ -3278,10 +3280,10 @@ unsafe impl<#[may_dangle] T, A: AllocRef> Drop for IntoIter { } #[unstable(issue = "none", feature = "inplace_iteration")] -unsafe impl InPlaceIterable for IntoIter {} +unsafe impl InPlaceIterable for IntoIter {} #[unstable(issue = "none", feature = "inplace_iteration")] -unsafe impl SourceIter for IntoIter { +unsafe impl SourceIter for IntoIter { type Source = Self; #[inline] @@ -3320,7 +3322,7 @@ impl AsIntoIter for IntoIter { pub struct Drain< 'a, T: 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef + 'a = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, > { /// Index of tail to preserve tail_start: usize, @@ -3332,13 +3334,13 @@ pub struct Drain< } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Drain<'_, T, A> { +impl fmt::Debug for Drain<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } -impl<'a, T, A: AllocRef> Drain<'a, T, A> { +impl<'a, T, A: Allocator> Drain<'a, T, A> { /// Returns the remaining items of this iterator as a slice. /// /// # Examples @@ -3358,25 +3360,25 @@ impl<'a, T, A: AllocRef> Drain<'a, T, A> { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn alloc_ref(&self) -> &A { - unsafe { self.vec.as_ref().alloc_ref() } + pub fn allocator(&self) -> &A { + unsafe { self.vec.as_ref().allocator() } } } #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] -impl<'a, T, A: AllocRef> AsRef<[T]> for Drain<'a, T, A> { +impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync for Drain<'_, T, A> {} +unsafe impl Sync for Drain<'_, T, A> {} #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send for Drain<'_, T, A> {} +unsafe impl Send for Drain<'_, T, A> {} #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A> { +impl Iterator for Drain<'_, T, A> { type Item = T; #[inline] @@ -3390,7 +3392,7 @@ impl Iterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, A> { +impl DoubleEndedIterator for Drain<'_, T, A> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) @@ -3398,13 +3400,13 @@ impl DoubleEndedIterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A> { +impl Drop for Drain<'_, T, A> { fn drop(&mut self) { /// Continues dropping the remaining elements in the `Drain`, then moves back the /// un-`Drain`ed elements to restore the original `Vec`. - struct DropGuard<'r, 'a, T, A: AllocRef>(&'r mut Drain<'a, T, A>); + struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); - impl<'r, 'a, T, A: AllocRef> Drop for DropGuard<'r, 'a, T, A> { + impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { fn drop(&mut self) { // Continue the same loop we have below. If the loop already finished, this does // nothing. @@ -3440,17 +3442,17 @@ impl Drop for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, A> { +impl ExactSizeIterator for Drain<'_, T, A> { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for Drain<'_, T, A> {} +unsafe impl TrustedLen for Drain<'_, T, A> {} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A> {} +impl FusedIterator for Drain<'_, T, A> {} /// A splicing iterator for `Vec`. /// @@ -3469,14 +3471,14 @@ impl FusedIterator for Drain<'_, T, A> {} pub struct Splice< 'a, I: Iterator + 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef + 'a = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, > { drain: Drain<'a, I::Item, A>, replace_with: I, } #[stable(feature = "vec_splice", since = "1.21.0")] -impl Iterator for Splice<'_, I, A> { +impl Iterator for Splice<'_, I, A> { type Item = I::Item; fn next(&mut self) -> Option { @@ -3489,17 +3491,17 @@ impl Iterator for Splice<'_, I, A> { } #[stable(feature = "vec_splice", since = "1.21.0")] -impl DoubleEndedIterator for Splice<'_, I, A> { +impl DoubleEndedIterator for Splice<'_, I, A> { fn next_back(&mut self) -> Option { self.drain.next_back() } } #[stable(feature = "vec_splice", since = "1.21.0")] -impl ExactSizeIterator for Splice<'_, I, A> {} +impl ExactSizeIterator for Splice<'_, I, A> {} #[stable(feature = "vec_splice", since = "1.21.0")] -impl Drop for Splice<'_, I, A> { +impl Drop for Splice<'_, I, A> { fn drop(&mut self) { self.drain.by_ref().for_each(drop); @@ -3540,7 +3542,7 @@ impl Drop for Splice<'_, I, A> { } /// Private helper methods for `Splice::drop` -impl Drain<'_, T, A> { +impl Drain<'_, T, A> { /// The range from `self.vec.len` to `self.tail_start` contains elements /// that have been moved out. /// Fill that range as much as possible with new elements from the `replace_with` iterator. @@ -3599,7 +3601,7 @@ pub struct DrainFilter< 'a, T, F, - #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > where F: FnMut(&mut T) -> bool, { @@ -3620,20 +3622,20 @@ pub struct DrainFilter< panic_flag: bool, } -impl DrainFilter<'_, T, F, A> +impl DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn alloc_ref(&self) -> &A { - self.vec.alloc_ref() + pub fn allocator(&self) -> &A { + self.vec.allocator() } } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Iterator for DrainFilter<'_, T, F, A> +impl Iterator for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { @@ -3671,19 +3673,19 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Drop for DrainFilter<'_, T, F, A> +impl Drop for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { - struct BackshiftOnDrop<'a, 'b, T, F, A: AllocRef> + struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> where F: FnMut(&mut T) -> bool, { drain: &'b mut DrainFilter<'a, T, F, A>, } - impl<'a, 'b, T, F, A: AllocRef> Drop for BackshiftOnDrop<'a, 'b, T, F, A> + impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> where F: FnMut(&mut T) -> bool, { diff --git a/library/alloc/tests/heap.rs b/library/alloc/tests/heap.rs index a7239a4b14fae..246b341eeb387 100644 --- a/library/alloc/tests/heap.rs +++ b/library/alloc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocRef, Global, Layout, System}; +use std::alloc::{Allocator, Global, Layout, System}; /// Issue #45955 and #62251. #[test] @@ -11,7 +11,7 @@ fn std_heap_overaligned_request() { check_overalign_requests(Global) } -fn check_overalign_requests(allocator: T) { +fn check_overalign_requests(allocator: T) { for &align in &[4, 8, 16, 32] { // less than and bigger than `MIN_ALIGN` for &size in &[align / 2, align - 1] { @@ -20,7 +20,7 @@ fn check_overalign_requests(allocator: T) { unsafe { let pointers: Vec<_> = (0..iterations) .map(|_| { - allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap() + allocator.allocate(Layout::from_size_align(size, align).unwrap()).unwrap() }) .collect(); for &ptr in &pointers { @@ -33,7 +33,7 @@ fn check_overalign_requests(allocator: T) { // Clean up for &ptr in &pointers { - allocator.dealloc( + allocator.deallocate( ptr.as_non_null_ptr(), Layout::from_size_align(size, align).unwrap(), ) diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs index 339d85902b83f..57c6624b64f9e 100644 --- a/library/core/src/alloc/layout.rs +++ b/library/core/src/alloc/layout.rs @@ -19,7 +19,7 @@ const fn size_align() -> (usize, usize) { /// even though `GlobalAlloc` requires that all memory requests /// be non-zero in size. A caller must either ensure that conditions /// like this are met, use specific allocators with looser -/// requirements, or use the more lenient `AllocRef` interface.) +/// requirements, or use the more lenient `Allocator` interface.) #[stable(feature = "alloc_layout", since = "1.28.0")] #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[lang = "alloc_layout"] diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index bc874e2e52242..045eb58d0135a 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -40,14 +40,14 @@ impl fmt::Display for AllocError { } } -/// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of +/// An implementation of `Allocator` can allocate, grow, shrink, and deallocate arbitrary blocks of /// data described via [`Layout`][]. /// -/// `AllocRef` is designed to be implemented on ZSTs, references, or smart pointers because having +/// `Allocator` is designed to be implemented on ZSTs, references, or smart pointers because having /// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the /// allocated memory. /// -/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying +/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `Allocator`. If an underlying /// allocator does not support this (like jemalloc) or return a null pointer (such as /// `libc::malloc`), this must be caught by the implementation. /// @@ -56,18 +56,18 @@ impl fmt::Display for AllocError { /// Some of the methods require that a memory block be *currently allocated* via an allocator. This /// means that: /// -/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or +/// * the starting address for that memory block was previously returned by [`allocate`], [`grow`], or /// [`shrink`], and /// /// * the memory block has not been subsequently deallocated, where blocks are either deallocated -/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or +/// directly by being passed to [`deallocate`] or were changed by being passed to [`grow`] or /// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer /// remains valid. /// -/// [`alloc`]: AllocRef::alloc -/// [`grow`]: AllocRef::grow -/// [`shrink`]: AllocRef::shrink -/// [`dealloc`]: AllocRef::dealloc +/// [`allocate`]: Allocator::allocate +/// [`grow`]: Allocator::grow +/// [`shrink`]: Allocator::shrink +/// [`deallocate`]: Allocator::deallocate /// /// ### Memory fitting /// @@ -79,7 +79,7 @@ impl fmt::Display for AllocError { /// /// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: /// - `min` is the size of the layout most recently used to allocate the block, and -/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. +/// - `max` is the latest actual size returned from [`allocate`], [`grow`], or [`shrink`]. /// /// [`layout.align()`]: Layout::align /// [`layout.size()`]: Layout::size @@ -97,7 +97,7 @@ impl fmt::Display for AllocError { /// /// [*currently allocated*]: #currently-allocated-memory #[unstable(feature = "allocator_api", issue = "32838")] -pub unsafe trait AllocRef { +pub unsafe trait Allocator { /// Attempts to allocate a block of memory. /// /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. @@ -118,9 +118,9 @@ pub unsafe trait AllocRef { /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc(&self, layout: Layout) -> Result, AllocError>; + fn allocate(&self, layout: Layout) -> Result, AllocError>; - /// Behaves like `alloc`, but also ensures that the returned memory is zero-initialized. + /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. /// /// # Errors /// @@ -135,8 +135,8 @@ pub unsafe trait AllocRef { /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { - let ptr = self.alloc(layout)?; + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + let ptr = self.allocate(layout)?; // SAFETY: `alloc` returns a valid memory block unsafe { ptr.as_non_null_ptr().as_ptr().write_bytes(0, ptr.len()) } Ok(ptr) @@ -151,7 +151,7 @@ pub unsafe trait AllocRef { /// /// [*currently allocated*]: #currently-allocated-memory /// [*fit*]: #memory-fitting - unsafe fn dealloc(&self, ptr: NonNull, layout: Layout); + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); /// Attempts to extend the memory block. /// @@ -200,7 +200,7 @@ pub unsafe trait AllocRef { "`new_layout.size()` must be greater than or equal to `old_layout.size()`" ); - let new_ptr = self.alloc(new_layout)?; + let new_ptr = self.allocate(new_layout)?; // SAFETY: because `new_layout.size()` must be greater than or equal to // `old_layout.size()`, both the old and new memory allocation are valid for reads and @@ -209,7 +209,7 @@ pub unsafe trait AllocRef { // safe. The safety contract for `dealloc` must be upheld by the caller. unsafe { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); - self.dealloc(ptr, old_layout); + self.deallocate(ptr, old_layout); } Ok(new_ptr) @@ -261,7 +261,7 @@ pub unsafe trait AllocRef { "`new_layout.size()` must be greater than or equal to `old_layout.size()`" ); - let new_ptr = self.alloc_zeroed(new_layout)?; + let new_ptr = self.allocate_zeroed(new_layout)?; // SAFETY: because `new_layout.size()` must be greater than or equal to // `old_layout.size()`, both the old and new memory allocation are valid for reads and @@ -270,7 +270,7 @@ pub unsafe trait AllocRef { // safe. The safety contract for `dealloc` must be upheld by the caller. unsafe { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); - self.dealloc(ptr, old_layout); + self.deallocate(ptr, old_layout); } Ok(new_ptr) @@ -323,7 +323,7 @@ pub unsafe trait AllocRef { "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" ); - let new_ptr = self.alloc(new_layout)?; + let new_ptr = self.allocate(new_layout)?; // SAFETY: because `new_layout.size()` must be lower than or equal to // `old_layout.size()`, both the old and new memory allocation are valid for reads and @@ -332,15 +332,15 @@ pub unsafe trait AllocRef { // safe. The safety contract for `dealloc` must be upheld by the caller. unsafe { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_layout.size()); - self.dealloc(ptr, old_layout); + self.deallocate(ptr, old_layout); } Ok(new_ptr) } - /// Creates a "by reference" adaptor for this instance of `AllocRef`. + /// Creates a "by reference" adaptor for this instance of `Allocator`. /// - /// The returned adaptor also implements `AllocRef` and will simply borrow this. + /// The returned adaptor also implements `Allocator` and will simply borrow this. #[inline(always)] fn by_ref(&self) -> &Self { self @@ -348,24 +348,24 @@ pub unsafe trait AllocRef { } #[unstable(feature = "allocator_api", issue = "32838")] -unsafe impl AllocRef for &A +unsafe impl Allocator for &A where - A: AllocRef + ?Sized, + A: Allocator + ?Sized, { #[inline] - fn alloc(&self, layout: Layout) -> Result, AllocError> { - (**self).alloc(layout) + fn allocate(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate(layout) } #[inline] - fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { - (**self).alloc_zeroed(layout) + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate_zeroed(layout) } #[inline] - unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { // SAFETY: the safety contract must be upheld by the caller - unsafe { (**self).dealloc(ptr, layout) } + unsafe { (**self).deallocate(ptr, layout) } } #[inline] diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 5dc7171a7dc26..d849008b88030 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -439,11 +439,11 @@ impl NonNull<[T]> { /// ```rust /// #![feature(allocator_api, ptr_as_uninit)] /// - /// use std::alloc::{AllocRef, Layout, Global}; + /// use std::alloc::{Allocator, Layout, Global}; /// use std::mem::MaybeUninit; /// use std::ptr::NonNull; /// - /// let memory: NonNull<[u8]> = Global.alloc(Layout::new::<[u8; 32]>())?; + /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?; /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes. /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized. /// # #[allow(unused_variables)] diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index 375b015ccc8df..819d57a934dc4 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -149,7 +149,7 @@ impl System { } } - // SAFETY: Same as `AllocRef::grow` + // SAFETY: Same as `Allocator::grow` #[inline] unsafe fn grow_impl( &self, @@ -190,29 +190,29 @@ impl System { old_size => unsafe { let new_ptr = self.alloc_impl(new_layout, zeroed)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); - AllocRef::dealloc(&self, ptr, old_layout); + Allocator::deallocate(&self, ptr, old_layout); Ok(new_ptr) }, } } } -// The AllocRef impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, +// The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, // which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] -unsafe impl AllocRef for System { +unsafe impl Allocator for System { #[inline] - fn alloc(&self, layout: Layout) -> Result, AllocError> { + fn allocate(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, false) } #[inline] - fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, true) } #[inline] - unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { if layout.size() != 0 { // SAFETY: `layout` is non-zero in size, // other conditions must be upheld by the caller @@ -257,7 +257,7 @@ unsafe impl AllocRef for System { match new_layout.size() { // SAFETY: conditions must be upheld by the caller 0 => unsafe { - AllocRef::dealloc(&self, ptr, old_layout); + Allocator::deallocate(&self, ptr, old_layout); Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) }, @@ -277,9 +277,9 @@ unsafe impl AllocRef for System { // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract // for `dealloc` must be upheld by the caller. new_size => unsafe { - let new_ptr = AllocRef::alloc(&self, new_layout)?; + let new_ptr = Allocator::allocate(&self, new_layout)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); - AllocRef::dealloc(&self, ptr, old_layout); + Allocator::deallocate(&self, ptr, old_layout); Ok(new_ptr) }, } diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index dfb5d3e9e38d0..10cbc23c427f0 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -8,9 +8,8 @@ extern crate helper; -use std::alloc::{self, AllocRef, Global, Layout, System}; +use std::alloc::{self, Allocator, Global, Layout, System}; use std::sync::atomic::{AtomicUsize, Ordering}; -use std::ptr::NonNull; static HITS: AtomicUsize = AtomicUsize::new(0); @@ -24,7 +23,7 @@ unsafe impl alloc::GlobalAlloc for A { unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { HITS.fetch_add(1, Ordering::SeqCst); - AllocRef::dealloc(&System, NonNull::new(ptr).unwrap(), layout) + alloc::GlobalAlloc::dealloc(&System, ptr, layout) } } @@ -39,10 +38,10 @@ fn main() { unsafe { let layout = Layout::from_size_align(4, 2).unwrap(); - let memory = Global.alloc(layout.clone()).unwrap(); + let memory = Global.allocate(layout.clone()).unwrap(); helper::work_with(&memory); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory.as_non_null_ptr(), layout); + Global.deallocate(memory.as_non_null_ptr(), layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -51,10 +50,10 @@ fn main() { drop(s); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - let memory = System.alloc(layout.clone()).unwrap(); - assert_eq!(HITS.load(Ordering::SeqCst), n + 4); + let memory = System.allocate(layout.clone()).unwrap(); helper::work_with(&memory); - System.dealloc(memory.as_non_null_ptr(), layout); + assert_eq!(HITS.load(Ordering::SeqCst), n + 4); + System.deallocate(memory.as_non_null_ptr(), layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index a1446b3664d4b..edd4df75e8b83 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -10,7 +10,7 @@ extern crate custom; extern crate helper; -use std::alloc::{AllocRef, Global, Layout, System}; +use std::alloc::{Allocator, Global, Layout, System}; use std::sync::atomic::{AtomicUsize, Ordering}; #[global_allocator] @@ -21,16 +21,16 @@ fn main() { let n = GLOBAL.0.load(Ordering::SeqCst); let layout = Layout::from_size_align(4, 2).unwrap(); - let memory = Global.alloc(layout.clone()).unwrap(); + let memory = Global.allocate(layout.clone()).unwrap(); helper::work_with(&memory); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory.as_non_null_ptr(), layout); + Global.deallocate(memory.as_non_null_ptr(), layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - let memory = System.alloc(layout.clone()).unwrap(); + let memory = System.allocate(layout.clone()).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); helper::work_with(&memory); - System.dealloc(memory.as_non_null_ptr(), layout); + System.deallocate(memory.as_non_null_ptr(), layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/associated-types/defaults-wf.stderr b/src/test/ui/associated-types/defaults-wf.stderr index 26c852601941a..4c43e6a182dc9 100644 --- a/src/test/ui/associated-types/defaults-wf.stderr +++ b/src/test/ui/associated-types/defaults-wf.stderr @@ -6,7 +6,7 @@ LL | type Ty = Vec<[u8]>; | ::: $SRC_DIR/alloc/src/vec.rs:LL:COL | -LL | pub struct Vec { +LL | pub struct Vec { | - required by this bound in `Vec` | = help: the trait `Sized` is not implemented for `[u8]` diff --git a/src/test/ui/bad/bad-sized.stderr b/src/test/ui/bad/bad-sized.stderr index 10d12a09b2579..60a5bb9f78666 100644 --- a/src/test/ui/bad/bad-sized.stderr +++ b/src/test/ui/bad/bad-sized.stderr @@ -17,7 +17,7 @@ LL | let x: Vec = Vec::new(); | ::: $SRC_DIR/alloc/src/vec.rs:LL:COL | -LL | pub struct Vec { +LL | pub struct Vec { | - required by this bound in `Vec` | = help: the trait `Sized` is not implemented for `dyn Trait` diff --git a/src/test/ui/box/leak-alloc.rs b/src/test/ui/box/leak-alloc.rs index 2e73d6f143213..3f0f39f448b91 100644 --- a/src/test/ui/box/leak-alloc.rs +++ b/src/test/ui/box/leak-alloc.rs @@ -1,26 +1,26 @@ #![feature(allocator_api)] -use std::alloc::{AllocError, AllocRef, Layout, System}; +use std::alloc::{AllocError, Allocator, Layout, System}; use std::ptr::NonNull; use std::boxed::Box; -struct Allocator {} +struct Alloc {} -unsafe impl AllocRef for Allocator { - fn alloc(&self, layout: Layout) -> Result, AllocError> { - System.alloc(layout) +unsafe impl Allocator for Alloc { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + System.allocate(layout) } - unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { - System.dealloc(ptr, layout) + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + System.deallocate(ptr, layout) } } fn use_value(_: u32) {} fn main() { - let alloc = Allocator {}; + let alloc = Alloc {}; let boxed = Box::new_in(10, alloc.by_ref()); let theref = Box::leak(boxed); drop(alloc); diff --git a/src/test/ui/error-codes/e0119/conflict-with-std.stderr b/src/test/ui/error-codes/e0119/conflict-with-std.stderr index 9dc1a509cd09f..68551f4377591 100644 --- a/src/test/ui/error-codes/e0119/conflict-with-std.stderr +++ b/src/test/ui/error-codes/e0119/conflict-with-std.stderr @@ -6,7 +6,7 @@ LL | impl AsRef for Box { | = note: conflicting implementation in crate `alloc`: - impl AsRef for Box - where A: AllocRef, T: ?Sized; + where A: Allocator, T: ?Sized; error[E0119]: conflicting implementations of trait `std::convert::From` for type `S`: --> $DIR/conflict-with-std.rs:12:1 diff --git a/src/test/ui/issues/issue-20433.stderr b/src/test/ui/issues/issue-20433.stderr index d40946ae03f50..3f7226c79bf2a 100644 --- a/src/test/ui/issues/issue-20433.stderr +++ b/src/test/ui/issues/issue-20433.stderr @@ -6,7 +6,7 @@ LL | fn iceman(c: Vec<[i32]>) {} | ::: $SRC_DIR/alloc/src/vec.rs:LL:COL | -LL | pub struct Vec { +LL | pub struct Vec { | - required by this bound in `Vec` | = help: the trait `Sized` is not implemented for `[i32]` diff --git a/src/test/ui/issues/issue-41974.stderr b/src/test/ui/issues/issue-41974.stderr index cc4b3707dd663..cde285f73d6b8 100644 --- a/src/test/ui/issues/issue-41974.stderr +++ b/src/test/ui/issues/issue-41974.stderr @@ -6,7 +6,7 @@ LL | impl Drop for T where T: A { | = note: conflicting implementation in crate `alloc`: - impl Drop for Box - where A: AllocRef, T: ?Sized; + where A: Allocator, T: ?Sized; = note: downstream crates may implement trait `A` for type `std::boxed::Box<_, _>` error[E0120]: the `Drop` trait may only be implemented for structs, enums, and unions diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 2e07fdcbe830c..92d98c16c60d8 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -7,7 +7,7 @@ #![feature(allocator_api)] #![feature(slice_ptr_get)] -use std::alloc::{handle_alloc_error, AllocRef, Global, Layout}; +use std::alloc::{handle_alloc_error, Allocator, Global, Layout}; use std::ptr::{self, NonNull}; fn main() { @@ -42,7 +42,7 @@ unsafe fn test_triangle() -> bool { println!("allocate({:?})", layout); } - let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let ptr = Global.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { println!("allocate({:?}) = {:?}", layout, ptr); @@ -56,7 +56,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Global.dealloc(NonNull::new_unchecked(ptr), layout); + Global.deallocate(NonNull::new_unchecked(ptr), layout); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index ad4b9c352aefd..9d0ca76e4095d 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -4,7 +4,7 @@ // pretty-expanded FIXME #23616 #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocRef, Global, Layout}; +use std::alloc::{handle_alloc_error, Allocator, Global, Layout}; use std::ptr::NonNull; struct arena(()); @@ -22,23 +22,23 @@ struct Ccx { x: isize, } -fn alloc(_bcx: &arena) -> &Bcx<'_> { +fn allocate(_bcx: &arena) -> &Bcx<'_> { unsafe { let layout = Layout::new::(); - let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let ptr = Global.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)); &*(ptr.as_ptr() as *const _) } } fn h<'a>(bcx: &'a Bcx<'a>) -> &'a Bcx<'a> { - return alloc(bcx.fcx.arena); + return allocate(bcx.fcx.arena); } fn g(fcx: &Fcx) { let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { - Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); + Global.deallocate(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); } } diff --git a/src/test/ui/unique-object-noncopyable.stderr b/src/test/ui/unique-object-noncopyable.stderr index 2e23ddd905361..09cbb8753387a 100644 --- a/src/test/ui/unique-object-noncopyable.stderr +++ b/src/test/ui/unique-object-noncopyable.stderr @@ -22,7 +22,7 @@ LL | fn clone(&self) -> Self; | LL | / pub struct Box< LL | | T: ?Sized, -LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global, +LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, LL | | >(Unique, A); | |________________- doesn't satisfy `Box: Clone` | diff --git a/src/test/ui/unique-pinned-nocopy.stderr b/src/test/ui/unique-pinned-nocopy.stderr index d533724a009dc..bc081024182af 100644 --- a/src/test/ui/unique-pinned-nocopy.stderr +++ b/src/test/ui/unique-pinned-nocopy.stderr @@ -19,7 +19,7 @@ LL | fn clone(&self) -> Self; | LL | / pub struct Box< LL | | T: ?Sized, -LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global, +LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, LL | | >(Unique, A); | |________________- doesn't satisfy `Box: Clone` |