@@ -366,16 +366,18 @@ unsafe impl<T: Send> Send for TypedArena<T> {}
366366
367367#[ inline( always) ]
368368fn align_down ( val : usize , align : usize ) -> usize {
369- assert ! ( align. is_power_of_two( ) ) ;
369+ debug_assert ! ( align. is_power_of_two( ) ) ;
370370 val & !( align - 1 )
371371}
372372
373373#[ inline( always) ]
374- fn align ( val : usize , align : usize ) -> usize {
375- assert ! ( align. is_power_of_two( ) ) ;
374+ fn align_up ( val : usize , align : usize ) -> usize {
375+ debug_assert ! ( align. is_power_of_two( ) ) ;
376376 ( val + align - 1 ) & !( align - 1 )
377377}
378378
379+ // Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
380+ // to optimize away alignment code.
379381const DROPLESS_ALIGNMENT : usize = mem:: align_of :: < usize > ( ) ;
380382
381383/// An arena that can hold objects of multiple different types that impl `Copy`
@@ -390,6 +392,8 @@ pub struct DroplessArena {
390392 /// start. (This is slightly simpler and faster than allocating upwards,
391393 /// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
392394 /// When this pointer crosses the start pointer, a new chunk is allocated.
395+ ///
396+ /// This is kept aligned to DROPLESS_ALIGNMENT.
393397 end : Cell < * mut u8 > ,
394398
395399 /// A vector of arena chunks.
@@ -433,13 +437,16 @@ impl DroplessArena {
433437 // Also ensure that this chunk can fit `additional`.
434438 new_cap = cmp:: max ( additional, new_cap) ;
435439
436- let mut chunk = ArenaChunk :: new ( align ( new_cap, PAGE ) ) ;
440+ let mut chunk = ArenaChunk :: new ( align_up ( new_cap, PAGE ) ) ;
437441 self . start . set ( chunk. start ( ) ) ;
438442
439443 // Align the end to DROPLESS_ALIGNMENT
440444 let end = align_down ( chunk. end ( ) . addr ( ) , DROPLESS_ALIGNMENT ) ;
441- // Make sure we don't go past `start`
442- let end = cmp:: max ( chunk. start ( ) . addr ( ) , end) ;
445+
446+ // Make sure we don't go past `start`. This should not happen since the allocation
447+ // should be at least DROPLESS_ALIGNMENT - 1 bytes.
448+ debug_assert ! ( chunk. start( ) . addr( ) <= end) ;
449+
443450 self . end . set ( chunk. end ( ) . with_addr ( end) ) ;
444451
445452 chunks. push ( chunk) ;
@@ -469,14 +476,16 @@ impl DroplessArena {
469476 let end = old_end. addr ( ) ;
470477
471478 // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
472- let bytes = align ( layout. size ( ) , DROPLESS_ALIGNMENT ) ;
479+ let bytes = align_up ( layout. size ( ) , DROPLESS_ALIGNMENT ) ;
473480
474481 // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
475482 unsafe { intrinsics:: assume ( end == align_down ( end, DROPLESS_ALIGNMENT ) ) } ;
476483
477484 let new_end = align_down ( end. checked_sub ( bytes) ?, layout. align ( ) ) ;
478485 if start <= new_end {
479486 let new_end = old_end. with_addr ( new_end) ;
487+ // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
488+ // as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
480489 self . end . set ( new_end) ;
481490 Some ( new_end)
482491 } else {
0 commit comments