|
| 1 | +/* This Source Code Form is subject to the terms of the Mozilla Public |
| 2 | + * License, v. 2.0. If a copy of the MPL was not distributed with this |
| 3 | + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
| 4 | + |
| 5 | + use std::{ |
| 6 | + alloc::Layout, |
| 7 | + ptr::{self, NonNull}, |
| 8 | +}; |
| 9 | + |
| 10 | +use allocator_api2::alloc::{AllocError, Allocator}; |
| 11 | + |
| 12 | +const CHUNK_ALIGNMENT: usize = 32; |
| 13 | +const DEFAULT_CHUNK_SIZE: usize = 128 * 1024; |
| 14 | + |
| 15 | +/// A simple bump allocator, sub-allocating from fixed size chunks that are provided |
| 16 | +/// by a parent allocator. |
| 17 | +/// |
| 18 | +/// If an allocation is larger than the chunk size, a chunk sufficiently large to contain |
| 19 | +/// the allocation is added. |
| 20 | +pub struct BumpAllocator<A: Allocator> { |
| 21 | + /// The chunk we are currently allocating from. |
| 22 | + current_chunk: NonNull<Chunk>, |
| 23 | + /// The defaut size for chunks. |
| 24 | + chunk_size: usize, |
| 25 | + /// For debugging. |
| 26 | + allocation_count: i32, |
| 27 | + /// the allocator that provides the chunks. |
| 28 | + parent_allocator: A, |
| 29 | + |
| 30 | + stats: Stats, |
| 31 | +} |
| 32 | + |
| 33 | +impl<A: Allocator> BumpAllocator<A> { |
| 34 | + pub fn new_in(parent_allocator: A) -> Self { |
| 35 | + Self::with_chunk_size_in(DEFAULT_CHUNK_SIZE, parent_allocator) |
| 36 | + } |
| 37 | + |
| 38 | + pub fn with_chunk_size_in(chunk_size: usize, parent_allocator: A) -> Self { |
| 39 | + let mut stats = Stats::default(); |
| 40 | + stats.chunks = 1; |
| 41 | + stats.reserved_bytes += chunk_size; |
| 42 | + BumpAllocator { |
| 43 | + current_chunk: Chunk::allocate_chunk( |
| 44 | + chunk_size, |
| 45 | + None, |
| 46 | + &parent_allocator |
| 47 | + ).unwrap(), |
| 48 | + chunk_size, |
| 49 | + parent_allocator, |
| 50 | + allocation_count: 0, |
| 51 | + |
| 52 | + stats, |
| 53 | + } |
| 54 | + } |
| 55 | + |
| 56 | + pub fn get_stats(&mut self) -> Stats { |
| 57 | + self.stats.chunk_utilization = self.stats.chunks as f32 - 1.0 + Chunk::utilization(self.current_chunk); |
| 58 | + self.stats |
| 59 | + } |
| 60 | + |
| 61 | + pub fn reset_stats(&mut self) { |
| 62 | + let chunks = self.stats.chunks; |
| 63 | + let reserved_bytes = self.stats.reserved_bytes; |
| 64 | + self.stats = Stats::default(); |
| 65 | + self.stats.chunks = chunks; |
| 66 | + self.stats.reserved_bytes = reserved_bytes; |
| 67 | + } |
| 68 | + |
| 69 | + pub fn allocate_item(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
| 70 | + self.stats.allocations += 1; |
| 71 | + self.stats.allocated_bytes += layout.size(); |
| 72 | + |
| 73 | + if let Ok(alloc) = Chunk::allocate_item(self.current_chunk, layout) { |
| 74 | + self.allocation_count += 1; |
| 75 | + return Ok(alloc); |
| 76 | + } |
| 77 | + |
| 78 | + self.alloc_chunk(layout.size())?; |
| 79 | + |
| 80 | + match Chunk::allocate_item(self.current_chunk, layout) { |
| 81 | + Ok(alloc) => { |
| 82 | + self.allocation_count += 1; |
| 83 | + return Ok(alloc); |
| 84 | + } |
| 85 | + Err(_) => { |
| 86 | + return Err(AllocError); |
| 87 | + } |
| 88 | + } |
| 89 | + } |
| 90 | + |
| 91 | + pub fn deallocate_item(&mut self, ptr: NonNull<u8>, layout: Layout) { |
| 92 | + self.stats.deallocations += 1; |
| 93 | + |
| 94 | + if Chunk::contains_item(self.current_chunk, ptr) { |
| 95 | + unsafe { Chunk::deallocate_item(self.current_chunk, ptr, layout); } |
| 96 | + } |
| 97 | + |
| 98 | + self.allocation_count -= 1; |
| 99 | + debug_assert!(self.allocation_count >= 0); |
| 100 | + } |
| 101 | + |
| 102 | + pub unsafe fn grow_item(&mut self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
| 103 | + debug_assert!( |
| 104 | + new_layout.size() >= old_layout.size(), |
| 105 | + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" |
| 106 | + ); |
| 107 | + |
| 108 | + self.stats.reallocations += 1; |
| 109 | + |
| 110 | + if Chunk::contains_item(self.current_chunk, ptr) { |
| 111 | + if let Ok(alloc) = Chunk::grow_item(self.current_chunk, ptr, old_layout, new_layout) { |
| 112 | + self.stats.in_place_reallocations += 1; |
| 113 | + return Ok(alloc); |
| 114 | + } |
| 115 | + } |
| 116 | + |
| 117 | + let new_alloc = if let Ok(alloc) = Chunk::allocate_item(self.current_chunk, new_layout) { |
| 118 | + alloc |
| 119 | + } else { |
| 120 | + self.alloc_chunk(new_layout.size())?; |
| 121 | + Chunk::allocate_item(self.current_chunk, new_layout).map_err(|_| AllocError)? |
| 122 | + }; |
| 123 | + |
| 124 | + self.stats.reallocated_bytes += old_layout.size(); |
| 125 | + |
| 126 | + unsafe { |
| 127 | + ptr::copy_nonoverlapping(ptr.as_ptr(), new_alloc.as_ptr().cast(), old_layout.size()); |
| 128 | + } |
| 129 | + |
| 130 | + Ok(new_alloc) |
| 131 | + } |
| 132 | + |
| 133 | + pub unsafe fn shrink_item(&mut self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
| 134 | + debug_assert!( |
| 135 | + new_layout.size() <= old_layout.size(), |
| 136 | + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" |
| 137 | + ); |
| 138 | + |
| 139 | + if Chunk::contains_item(self.current_chunk, ptr) { |
| 140 | + return unsafe { Ok(Chunk::shrink_item(self.current_chunk, ptr, old_layout, new_layout)) }; |
| 141 | + } |
| 142 | + |
| 143 | + // Can't actually shrink, so return the full range of the previous allocation. |
| 144 | + Ok(NonNull::slice_from_raw_parts(ptr, old_layout.size())) |
| 145 | + } |
| 146 | + |
| 147 | + fn alloc_chunk(&mut self, item_size: usize) -> Result<(), AllocError> { |
| 148 | + let chunk_size = self.chunk_size.max(align(item_size, CHUNK_ALIGNMENT) + CHUNK_ALIGNMENT); |
| 149 | + self.stats.reserved_bytes += chunk_size; |
| 150 | + let chunk = Chunk::allocate_chunk( |
| 151 | + chunk_size, |
| 152 | + None, |
| 153 | + &self.parent_allocator |
| 154 | + )?; |
| 155 | + |
| 156 | + unsafe { |
| 157 | + (*chunk.as_ptr()).previous = Some(self.current_chunk); |
| 158 | + } |
| 159 | + self.current_chunk = chunk; |
| 160 | + |
| 161 | + self.stats.chunks += 1; |
| 162 | + |
| 163 | + Ok(()) |
| 164 | + } |
| 165 | +} |
| 166 | + |
| 167 | +impl<A: Allocator> Drop for BumpAllocator<A> { |
| 168 | + fn drop(&mut self) { |
| 169 | + assert!(self.allocation_count == 0); |
| 170 | + let mut iter = Some(self.current_chunk); |
| 171 | + while let Some(chunk) = iter { |
| 172 | + iter = unsafe { (*chunk.as_ptr()).previous }; |
| 173 | + Chunk::deallocate_chunk(chunk, &self.parent_allocator) |
| 174 | + } |
| 175 | + } |
| 176 | +} |
| 177 | + |
| 178 | +/// A Contiguous buffer of memory holding multiple sub-allocaions. |
| 179 | +pub struct Chunk { |
| 180 | + previous: Option<NonNull<Chunk>>, |
| 181 | + /// Offset of the next allocation |
| 182 | + cursor: *mut u8, |
| 183 | + /// Points to the first byte after the chunk's buffer. |
| 184 | + chunk_end: *mut u8, |
| 185 | + /// Size of the chunk |
| 186 | + size: usize, |
| 187 | +} |
| 188 | + |
| 189 | +impl Chunk { |
| 190 | + pub fn allocate_chunk( |
| 191 | + size: usize, |
| 192 | + previous: Option<NonNull<Chunk>>, |
| 193 | + allocator: &dyn Allocator, |
| 194 | + ) -> Result<NonNull<Self>, AllocError> { |
| 195 | + assert!(size < usize::MAX / 2); |
| 196 | + |
| 197 | + let layout = match Layout::from_size_align(size, CHUNK_ALIGNMENT) { |
| 198 | + Ok(layout) => layout, |
| 199 | + Err(_) => { |
| 200 | + return Err(AllocError); |
| 201 | + } |
| 202 | + }; |
| 203 | + |
| 204 | + let alloc = allocator.allocate(layout)?; |
| 205 | + let chunk: NonNull<Chunk> = alloc.cast(); |
| 206 | + let chunk_start: *mut u8 = alloc.cast().as_ptr(); |
| 207 | + |
| 208 | + unsafe { |
| 209 | + let chunk_end = chunk_start.add(size); |
| 210 | + let cursor = chunk_start.add(CHUNK_ALIGNMENT); |
| 211 | + ptr::write( |
| 212 | + chunk.as_ptr(), |
| 213 | + Chunk { |
| 214 | + previous, |
| 215 | + chunk_end, |
| 216 | + cursor, |
| 217 | + size, |
| 218 | + }, |
| 219 | + ); |
| 220 | + } |
| 221 | + |
| 222 | + Ok(chunk) |
| 223 | + } |
| 224 | + |
| 225 | + pub fn deallocate_chunk(this: NonNull<Chunk>, allocator: &dyn Allocator) { |
| 226 | + let size = unsafe { (*this.as_ptr()).size }; |
| 227 | + let layout = Layout::from_size_align(size, CHUNK_ALIGNMENT).unwrap(); |
| 228 | + |
| 229 | + unsafe { |
| 230 | + allocator.deallocate(this.cast(), layout); |
| 231 | + } |
| 232 | + } |
| 233 | + |
| 234 | + pub fn allocate_item(this: NonNull<Chunk>, layout: Layout) -> Result<NonNull<[u8]>, ()> { |
| 235 | + // Common wisdom would be to always bump address downward (https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html). |
| 236 | + // However, bump allocation does not show up in profiles with the current workloads |
| 237 | + // so we can keep things simple for now. |
| 238 | + debug_assert!(CHUNK_ALIGNMENT % layout.align() == 0); |
| 239 | + debug_assert!(layout.align() > 0); |
| 240 | + debug_assert!(layout.align().is_power_of_two()); |
| 241 | + |
| 242 | + let size = align(layout.size(), CHUNK_ALIGNMENT); |
| 243 | + |
| 244 | + unsafe { |
| 245 | + let cursor = (*this.as_ptr()).cursor; |
| 246 | + let end = (*this.as_ptr()).chunk_end; |
| 247 | + let available_size = end.offset_from(cursor); |
| 248 | + |
| 249 | + if size as isize > available_size { |
| 250 | + return Err(()); |
| 251 | + } |
| 252 | + |
| 253 | + let next = cursor.add(size); |
| 254 | + |
| 255 | + (*this.as_ptr()).cursor = next; |
| 256 | + |
| 257 | + let cursor = NonNull::new(cursor).unwrap(); |
| 258 | + let suballocation: NonNull<[u8]> = NonNull::slice_from_raw_parts(cursor, size); |
| 259 | + |
| 260 | + Ok(suballocation) |
| 261 | + } |
| 262 | + } |
| 263 | + |
| 264 | + pub unsafe fn deallocate_item(this: NonNull<Chunk>, item: NonNull<u8>, layout: Layout) { |
| 265 | + debug_assert!(Chunk::contains_item(this, item)); |
| 266 | + |
| 267 | + unsafe { |
| 268 | + let size = align(layout.size(), CHUNK_ALIGNMENT); |
| 269 | + let item_end = item.as_ptr().add(size); |
| 270 | + |
| 271 | + // If the item is the last allocation, then move the cursor back |
| 272 | + // to reuse its memory. |
| 273 | + if item_end == (*this.as_ptr()).cursor { |
| 274 | + (*this.as_ptr()).cursor = item.as_ptr(); |
| 275 | + } |
| 276 | + |
| 277 | + // Otherwise, deallocation is a no-op |
| 278 | + } |
| 279 | + } |
| 280 | + |
| 281 | + pub unsafe fn grow_item(this: NonNull<Chunk>, item: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, ()> { |
| 282 | + debug_assert!(Chunk::contains_item(this, item)); |
| 283 | + |
| 284 | + let old_size = align(old_layout.size(), CHUNK_ALIGNMENT); |
| 285 | + let new_size = align(new_layout.size(), CHUNK_ALIGNMENT); |
| 286 | + let old_item_end = item.as_ptr().add(old_size); |
| 287 | + |
| 288 | + if old_item_end != (*this.as_ptr()).cursor { |
| 289 | + return Err(()); |
| 290 | + } |
| 291 | + |
| 292 | + // The item is the last allocation. we can attempt to just move |
| 293 | + // the cursor if the new size fits. |
| 294 | + |
| 295 | + let chunk_end = (*this.as_ptr()).chunk_end; |
| 296 | + let available_size = chunk_end.offset_from(item.as_ptr()); |
| 297 | + |
| 298 | + if new_size as isize > available_size { |
| 299 | + // Does not fit. |
| 300 | + return Err(()); |
| 301 | + } |
| 302 | + |
| 303 | + let new_item_end = item.as_ptr().add(new_size); |
| 304 | + (*this.as_ptr()).cursor = new_item_end; |
| 305 | + |
| 306 | + Ok(NonNull::slice_from_raw_parts(item, new_size)) |
| 307 | + } |
| 308 | + |
| 309 | + pub unsafe fn shrink_item(this: NonNull<Chunk>, item: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> NonNull<[u8]> { |
| 310 | + debug_assert!(Chunk::contains_item(this, item)); |
| 311 | + |
| 312 | + let old_size = align(old_layout.size(), CHUNK_ALIGNMENT); |
| 313 | + let new_size = align(new_layout.size(), CHUNK_ALIGNMENT); |
| 314 | + let old_item_end = item.as_ptr().add(old_size); |
| 315 | + |
| 316 | + // The item is the last allocation. we can attempt to just move |
| 317 | + // the cursor if the new size fits. |
| 318 | + |
| 319 | + if old_item_end == (*this.as_ptr()).cursor { |
| 320 | + let new_item_end = item.as_ptr().add(new_size); |
| 321 | + (*this.as_ptr()).cursor = new_item_end; |
| 322 | + } |
| 323 | + |
| 324 | + NonNull::slice_from_raw_parts(item, new_size) |
| 325 | + } |
| 326 | + |
| 327 | + pub fn contains_item(this: NonNull<Chunk>, item: NonNull<u8>) -> bool { |
| 328 | + unsafe { |
| 329 | + let start: *mut u8 = this.cast::<u8>().as_ptr().add(CHUNK_ALIGNMENT); |
| 330 | + let end: *mut u8 = (*this.as_ptr()).chunk_end; |
| 331 | + let item = item.as_ptr(); |
| 332 | + |
| 333 | + start <= item && item < end |
| 334 | + } |
| 335 | + } |
| 336 | + |
| 337 | + fn available_size(this: NonNull<Chunk>) -> usize { |
| 338 | + unsafe { |
| 339 | + let this = this.as_ptr(); |
| 340 | + (*this).chunk_end.offset_from((*this).cursor) as usize |
| 341 | + } |
| 342 | + } |
| 343 | + |
| 344 | + fn utilization(this: NonNull<Chunk>) -> f32 { |
| 345 | + let size = unsafe { (*this.as_ptr()).size } as f32; |
| 346 | + (size - Chunk::available_size(this) as f32) / size |
| 347 | + } |
| 348 | +} |
| 349 | + |
| 350 | +fn align(val: usize, alignment: usize) -> usize { |
| 351 | + let rem = val % alignment; |
| 352 | + if rem == 0 { |
| 353 | + return val; |
| 354 | + } |
| 355 | + |
| 356 | + val.checked_add(alignment).unwrap() - rem |
| 357 | +} |
| 358 | + |
| 359 | +#[derive(Copy, Clone, Debug, Default)] |
| 360 | +pub struct Stats { |
| 361 | + pub chunks: u32, |
| 362 | + pub chunk_utilization: f32, |
| 363 | + pub allocations: u32, |
| 364 | + pub deallocations: u32, |
| 365 | + pub reallocations: u32, |
| 366 | + pub in_place_reallocations: u32, |
| 367 | + |
| 368 | + pub reallocated_bytes: usize, |
| 369 | + pub allocated_bytes: usize, |
| 370 | + pub reserved_bytes: usize, |
| 371 | +} |
0 commit comments