|
1 | 1 | #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")]
|
2 | 2 |
|
3 |
| -use core::alloc::{LayoutError, GlobalCoAllocMeta}; |
| 3 | +use core::alloc::{self, LayoutError, GlobalCoAllocMeta}; |
4 | 4 | use core::cmp;
|
5 | 5 | use core::intrinsics;
|
6 | 6 | use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
|
@@ -49,12 +49,19 @@ enum AllocInit {
|
49 | 49 | /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
|
50 | 50 | /// `Box<[T]>`, since `capacity()` won't yield the length.
|
51 | 51 | #[allow(missing_debug_implementations)]
|
52 |
| -pub(crate) struct RawVec<T, A: Allocator = Global> { |
| 52 | +// @TODO |
| 53 | +// 1. make const generic _coop come from the target specification |
| 54 | +// 2. apply `_coop` with logical && to `A::IsCoAllocator` |
| 55 | +pub(crate) struct RawVec<T, A: Allocator = Global, const _coop: bool = true> |
| 56 | +where [(); alloc::co_alloc_metadata_num_slots::<A>()]: |
| 57 | +{ |
53 | 58 | ptr: Unique<T>,
|
54 | 59 | cap: usize,
|
55 | 60 | alloc: A,
|
56 |
| - #[allow(dead_code)] |
57 |
| - pub(crate) meta: GlobalCoAllocMeta, |
| 61 | + // As of v1.67.0, `cmp` for `TypeId` is not `const`, unfortunately: |
| 62 | + //pub(crate) meta: [GlobalCoAllocMeta; {if core::any::TypeId::of::<A>()==core::any::TypeId::of::<Global>() {1} else {0}}], |
| 63 | + //pub(crate) meta: [GlobalCoAllocMeta; mem::size_of::<A::IsCoAllocator>()], |
| 64 | + pub(crate) meta: [GlobalCoAllocMeta; alloc::co_alloc_metadata_num_slots::<A>()], |
58 | 65 | }
|
59 | 66 |
|
60 | 67 | impl<T> RawVec<T, Global> {
|
@@ -104,7 +111,9 @@ impl<T> RawVec<T, Global> {
|
104 | 111 | }
|
105 | 112 | }
|
106 | 113 |
|
107 |
| -impl<T, A: Allocator> RawVec<T, A> { |
| 114 | +impl<T, A: Allocator> RawVec<T, A> |
| 115 | +where [(); alloc::co_alloc_metadata_num_slots::<A>()]: |
| 116 | +{ |
108 | 117 | // Tiny Vecs are dumb. Skip to:
|
109 | 118 | // - 8 if the element size is 1, because any heap allocators is likely
|
110 | 119 | // to round up a request of less than 8 bytes to at least 8 bytes.
|
@@ -284,7 +293,9 @@ impl<T, A: Allocator> RawVec<T, A> {
|
284 | 293 | slf: &mut RawVec<T, A>,
|
285 | 294 | len: usize,
|
286 | 295 | additional: usize,
|
287 |
| - ) { |
| 296 | + ) |
| 297 | + where [(); alloc::co_alloc_metadata_num_slots::<A>()]: |
| 298 | + { |
288 | 299 | handle_reserve(slf.grow_amortized(len, additional));
|
289 | 300 | }
|
290 | 301 |
|
@@ -357,14 +368,18 @@ impl<T, A: Allocator> RawVec<T, A> {
|
357 | 368 | }
|
358 | 369 | }
|
359 | 370 |
|
360 |
| -impl<T, A: Allocator> RawVec<T, A> { |
| 371 | +impl<T, A: Allocator> RawVec<T, A> |
| 372 | +where [(); alloc::co_alloc_metadata_num_slots::<A>()]: |
| 373 | +{ |
361 | 374 | /// Returns if the buffer needs to grow to fulfill the needed extra capacity.
|
362 | 375 | /// Mainly used to make inlining reserve-calls possible without inlining `grow`.
|
363 |
| - fn needs_to_grow(&self, len: usize, additional: usize) -> bool { |
| 376 | + fn needs_to_grow(&self, len: usize, additional: usize) -> bool |
| 377 | + { |
364 | 378 | additional > self.capacity().wrapping_sub(len)
|
365 | 379 | }
|
366 | 380 |
|
367 |
| - fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { |
| 381 | + fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) |
| 382 | + { |
368 | 383 | // Allocators currently return a `NonNull<[u8]>` whose length matches
|
369 | 384 | // the size requested. If that ever changes, the capacity here should
|
370 | 385 | // change to `ptr.len() / mem::size_of::<T>()`.
|
@@ -475,24 +490,27 @@ where
|
475 | 490 | memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into())
|
476 | 491 | }
|
477 | 492 |
|
478 |
| -unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> { |
| 493 | +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> |
| 494 | +where [(); alloc::co_alloc_metadata_num_slots::<A>()]: |
| 495 | +{ |
479 | 496 | /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
|
480 | 497 | default fn drop(&mut self) {
|
481 | 498 | if let Some((ptr, layout)) = self.current_memory() {
|
482 |
| - unsafe { self.alloc.deallocate(ptr, layout) } |
| 499 | + unsafe { self.alloc.co_deallocate(ptr, layout) } |
483 | 500 | }
|
484 | 501 | }
|
485 | 502 | }
|
486 | 503 |
|
487 |
| -unsafe impl<#[may_dangle] T> Drop for RawVec<T, Global> { |
| 504 | +// @TODO Custom |
| 505 | +/*unsafe impl<#[may_dangle] T> Drop for RawVec<T, Global> { |
488 | 506 | /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
|
489 | 507 | fn drop(&mut self) {
|
490 | 508 | // @TODO
|
491 | 509 | if let Some((ptr, layout)) = self.current_memory() {
|
492 | 510 | unsafe { self.alloc.deallocate(ptr, layout) }
|
493 | 511 | }
|
494 | 512 | }
|
495 |
| -} |
| 513 | +}*/ |
496 | 514 |
|
497 | 515 | // Central function for reserve error handling.
|
498 | 516 | #[cfg(not(no_global_oom_handling))]
|
|
0 commit comments