33
33
#![ allow( deprecated) ]
34
34
35
35
extern crate alloc;
36
+ extern crate rustc_data_structures;
37
+
38
+ use rustc_data_structures:: lock:: Lock ;
36
39
37
40
use std:: cell:: { Cell , RefCell } ;
38
41
use std:: cmp;
@@ -46,6 +49,10 @@ use alloc::raw_vec::RawVec;
46
49
47
50
/// An arena that can hold objects of only one type.
48
51
pub struct TypedArena < T > {
52
+ lock : Lock < TypedArenaInner < T > > ,
53
+ }
54
+
55
+ struct TypedArenaInner < T > {
49
56
/// A pointer to the next object to be allocated.
50
57
ptr : Cell < * mut T > ,
51
58
@@ -109,38 +116,102 @@ impl<T> TypedArenaChunk<T> {
109
116
110
117
const PAGE : usize = 4096 ;
111
118
119
+ impl < T > TypedArenaInner < T > {
120
+ /// Grows the arena.
121
+ #[ inline( never) ]
122
+ #[ cold]
123
+ fn grow ( & self , n : usize ) {
124
+ unsafe {
125
+ let mut chunks = self . chunks . borrow_mut ( ) ;
126
+ let ( chunk, mut new_capacity) ;
127
+ if let Some ( last_chunk) = chunks. last_mut ( ) {
128
+ let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
129
+ let currently_used_cap = used_bytes / mem:: size_of :: < T > ( ) ;
130
+ if last_chunk. storage . reserve_in_place ( currently_used_cap, n) {
131
+ self . end . set ( last_chunk. end ( ) ) ;
132
+ return ;
133
+ } else {
134
+ new_capacity = last_chunk. storage . cap ( ) ;
135
+ loop {
136
+ new_capacity = new_capacity. checked_mul ( 2 ) . unwrap ( ) ;
137
+ if new_capacity >= currently_used_cap + n {
138
+ break ;
139
+ }
140
+ }
141
+ }
142
+ } else {
143
+ let elem_size = cmp:: max ( 1 , mem:: size_of :: < T > ( ) ) ;
144
+ new_capacity = cmp:: max ( n, PAGE / elem_size) ;
145
+ }
146
+ chunk = TypedArenaChunk :: < T > :: new ( new_capacity) ;
147
+ self . ptr . set ( chunk. start ( ) ) ;
148
+ self . end . set ( chunk. end ( ) ) ;
149
+ chunks. push ( chunk) ;
150
+ }
151
+ }
152
+
153
+ // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
154
+ // chunks.
155
+ fn clear_last_chunk ( & self , last_chunk : & mut TypedArenaChunk < T > ) {
156
+ // Determine how much was filled.
157
+ let start = last_chunk. start ( ) as usize ;
158
+ // We obtain the value of the pointer to the first uninitialized element.
159
+ let end = self . ptr . get ( ) as usize ;
160
+ // We then calculate the number of elements to be dropped in the last chunk,
161
+ // which is the filled area's length.
162
+ let diff = if mem:: size_of :: < T > ( ) == 0 {
163
+ // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
164
+ // the number of zero-sized values in the last and only chunk, just out of caution.
165
+ // Recall that `end` was incremented for each allocated value.
166
+ end - start
167
+ } else {
168
+ ( end - start) / mem:: size_of :: < T > ( )
169
+ } ;
170
+ // Pass that to the `destroy` method.
171
+ unsafe {
172
+ last_chunk. destroy ( diff) ;
173
+ }
174
+ // Reset the chunk.
175
+ self . ptr . set ( last_chunk. start ( ) ) ;
176
+ }
177
+ }
178
+
112
179
impl < T > TypedArena < T > {
113
180
/// Creates a new `TypedArena`.
114
181
#[ inline]
115
182
pub fn new ( ) -> TypedArena < T > {
116
183
TypedArena {
117
- // We set both `ptr` and `end` to 0 so that the first call to
118
- // alloc() will trigger a grow().
119
- ptr : Cell :: new ( 0 as * mut T ) ,
120
- end : Cell :: new ( 0 as * mut T ) ,
121
- chunks : RefCell :: new ( vec ! [ ] ) ,
122
- _own : PhantomData ,
184
+ lock : Lock :: new ( TypedArenaInner {
185
+ // We set both `ptr` and `end` to 0 so that the first call to
186
+ // alloc() will trigger a grow().
187
+ ptr : Cell :: new ( 0 as * mut T ) ,
188
+ end : Cell :: new ( 0 as * mut T ) ,
189
+ chunks : RefCell :: new ( vec ! [ ] ) ,
190
+ _own : PhantomData ,
191
+ } )
123
192
}
124
193
}
125
194
126
195
/// Allocates an object in the `TypedArena`, returning a reference to it.
127
196
#[ inline]
128
197
pub fn alloc ( & self , object : T ) -> & mut T {
129
- if self . ptr == self . end {
130
- self . grow ( 1 )
198
+ let this = self . lock . lock ( ) ;
199
+
200
+ if this. ptr == this. end {
201
+ this. grow ( 1 )
131
202
}
132
203
133
204
unsafe {
134
205
if mem:: size_of :: < T > ( ) == 0 {
135
- self . ptr . set ( intrinsics:: arith_offset ( self . ptr . get ( ) as * mut u8 , 1 ) as * mut T ) ;
206
+ this . ptr . set ( intrinsics:: arith_offset ( this . ptr . get ( ) as * mut u8 , 1 ) as * mut T ) ;
136
207
let ptr = mem:: align_of :: < T > ( ) as * mut T ;
137
208
// Don't drop the object. This `write` is equivalent to `forget`.
138
209
ptr:: write ( ptr, object) ;
139
210
& mut * ptr
140
211
} else {
141
- let ptr = self . ptr . get ( ) ;
212
+ let ptr = this . ptr . get ( ) ;
142
213
// Advance the pointer.
143
- self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
214
+ this . ptr . set ( this . ptr . get ( ) . offset ( 1 ) ) ;
144
215
// Write into uninitialized memory.
145
216
ptr:: write ( ptr, object) ;
146
217
& mut * ptr
@@ -160,61 +231,32 @@ impl<T> TypedArena<T> {
160
231
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
161
232
assert ! ( slice. len( ) != 0 ) ;
162
233
163
- let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
234
+ let this = self . lock . lock ( ) ;
235
+
236
+ let available_capacity_bytes = this. end . get ( ) as usize - this. ptr . get ( ) as usize ;
164
237
let at_least_bytes = slice. len ( ) * mem:: size_of :: < T > ( ) ;
165
238
if available_capacity_bytes < at_least_bytes {
166
- self . grow ( slice. len ( ) ) ;
239
+ this . grow ( slice. len ( ) ) ;
167
240
}
168
241
169
242
unsafe {
170
- let start_ptr = self . ptr . get ( ) ;
243
+ let start_ptr = this . ptr . get ( ) ;
171
244
let arena_slice = slice:: from_raw_parts_mut ( start_ptr, slice. len ( ) ) ;
172
- self . ptr . set ( start_ptr. offset ( arena_slice. len ( ) as isize ) ) ;
245
+ this . ptr . set ( start_ptr. offset ( arena_slice. len ( ) as isize ) ) ;
173
246
arena_slice. copy_from_slice ( slice) ;
174
247
arena_slice
175
248
}
176
249
}
177
250
178
- /// Grows the arena.
179
- #[ inline( never) ]
180
- #[ cold]
181
- fn grow ( & self , n : usize ) {
182
- unsafe {
183
- let mut chunks = self . chunks . borrow_mut ( ) ;
184
- let ( chunk, mut new_capacity) ;
185
- if let Some ( last_chunk) = chunks. last_mut ( ) {
186
- let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
187
- let currently_used_cap = used_bytes / mem:: size_of :: < T > ( ) ;
188
- if last_chunk. storage . reserve_in_place ( currently_used_cap, n) {
189
- self . end . set ( last_chunk. end ( ) ) ;
190
- return ;
191
- } else {
192
- new_capacity = last_chunk. storage . cap ( ) ;
193
- loop {
194
- new_capacity = new_capacity. checked_mul ( 2 ) . unwrap ( ) ;
195
- if new_capacity >= currently_used_cap + n {
196
- break ;
197
- }
198
- }
199
- }
200
- } else {
201
- let elem_size = cmp:: max ( 1 , mem:: size_of :: < T > ( ) ) ;
202
- new_capacity = cmp:: max ( n, PAGE / elem_size) ;
203
- }
204
- chunk = TypedArenaChunk :: < T > :: new ( new_capacity) ;
205
- self . ptr . set ( chunk. start ( ) ) ;
206
- self . end . set ( chunk. end ( ) ) ;
207
- chunks. push ( chunk) ;
208
- }
209
- }
210
-
211
251
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
212
252
pub fn clear ( & mut self ) {
253
+ let this = self . lock . lock ( ) ;
254
+
213
255
unsafe {
214
256
// Clear the last chunk, which is partially filled.
215
- let mut chunks_borrow = self . chunks . borrow_mut ( ) ;
257
+ let mut chunks_borrow = this . chunks . borrow_mut ( ) ;
216
258
if let Some ( mut last_chunk) = chunks_borrow. pop ( ) {
217
- self . clear_last_chunk ( & mut last_chunk) ;
259
+ this . clear_last_chunk ( & mut last_chunk) ;
218
260
// If `T` is ZST, code below has no effect.
219
261
for mut chunk in chunks_borrow. drain ( ..) {
220
262
let cap = chunk. storage . cap ( ) ;
@@ -224,41 +266,18 @@ impl<T> TypedArena<T> {
224
266
}
225
267
}
226
268
}
227
-
228
- // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
229
- // chunks.
230
- fn clear_last_chunk ( & self , last_chunk : & mut TypedArenaChunk < T > ) {
231
- // Determine how much was filled.
232
- let start = last_chunk. start ( ) as usize ;
233
- // We obtain the value of the pointer to the first uninitialized element.
234
- let end = self . ptr . get ( ) as usize ;
235
- // We then calculate the number of elements to be dropped in the last chunk,
236
- // which is the filled area's length.
237
- let diff = if mem:: size_of :: < T > ( ) == 0 {
238
- // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
239
- // the number of zero-sized values in the last and only chunk, just out of caution.
240
- // Recall that `end` was incremented for each allocated value.
241
- end - start
242
- } else {
243
- ( end - start) / mem:: size_of :: < T > ( )
244
- } ;
245
- // Pass that to the `destroy` method.
246
- unsafe {
247
- last_chunk. destroy ( diff) ;
248
- }
249
- // Reset the chunk.
250
- self . ptr . set ( last_chunk. start ( ) ) ;
251
- }
252
269
}
253
270
254
271
unsafe impl < #[ may_dangle] T > Drop for TypedArena < T > {
255
272
fn drop ( & mut self ) {
273
+ let this = self . lock . get_mut ( ) ;
274
+
256
275
unsafe {
257
276
// Determine how much was filled.
258
- let mut chunks_borrow = self . chunks . borrow_mut ( ) ;
277
+ let mut chunks_borrow = this . chunks . borrow_mut ( ) ;
259
278
if let Some ( mut last_chunk) = chunks_borrow. pop ( ) {
260
279
// Drop the contents of the last chunk.
261
- self . clear_last_chunk ( & mut last_chunk) ;
280
+ this . clear_last_chunk ( & mut last_chunk) ;
262
281
// The last chunk will be dropped. Destroy all other chunks.
263
282
for chunk in chunks_borrow. iter_mut ( ) {
264
283
let cap = chunk. storage . cap ( ) ;
@@ -270,9 +289,13 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
270
289
}
271
290
}
272
291
273
- unsafe impl < T : Send > Send for TypedArena < T > { }
292
+ unsafe impl < T : Send > Send for TypedArenaInner < T > { }
274
293
275
294
pub struct DroplessArena {
295
+ lock : Lock < DroplessArenaInner > ,
296
+ }
297
+
298
+ struct DroplessArenaInner {
276
299
/// A pointer to the next object to be allocated.
277
300
ptr : Cell < * mut u8 > ,
278
301
@@ -284,26 +307,9 @@ pub struct DroplessArena {
284
307
chunks : RefCell < Vec < TypedArenaChunk < u8 > > > ,
285
308
}
286
309
287
- impl DroplessArena {
288
- pub fn new ( ) -> DroplessArena {
289
- DroplessArena {
290
- ptr : Cell :: new ( 0 as * mut u8 ) ,
291
- end : Cell :: new ( 0 as * mut u8 ) ,
292
- chunks : RefCell :: new ( vec ! [ ] ) ,
293
- }
294
- }
295
-
296
- pub fn in_arena < T : ?Sized > ( & self , ptr : * const T ) -> bool {
297
- let ptr = ptr as * const u8 as * mut u8 ;
298
- for chunk in & * self . chunks . borrow ( ) {
299
- if chunk. start ( ) <= ptr && ptr < chunk. end ( ) {
300
- return true ;
301
- }
302
- }
303
-
304
- false
305
- }
310
+ unsafe impl Send for DroplessArenaInner { }
306
311
312
+ impl DroplessArenaInner {
307
313
fn align_for < T > ( & self ) {
308
314
let align = mem:: align_of :: < T > ( ) ;
309
315
let final_address = ( ( self . ptr . get ( ) as usize ) + align - 1 ) & !( align - 1 ) ;
@@ -341,23 +347,50 @@ impl DroplessArena {
341
347
chunks. push ( chunk) ;
342
348
}
343
349
}
350
+ }
351
+
352
+ impl DroplessArena {
353
+ pub fn new ( ) -> DroplessArena {
354
+ DroplessArena {
355
+ lock : Lock :: new ( DroplessArenaInner {
356
+ ptr : Cell :: new ( 0 as * mut u8 ) ,
357
+ end : Cell :: new ( 0 as * mut u8 ) ,
358
+ chunks : RefCell :: new ( vec ! [ ] ) ,
359
+ } )
360
+ }
361
+ }
362
+
363
+ pub fn in_arena < T : ?Sized > ( & self , ptr : * const T ) -> bool {
364
+ let this = self . lock . lock ( ) ;
365
+
366
+ let ptr = ptr as * const u8 as * mut u8 ;
367
+ for chunk in & * this. chunks . borrow ( ) {
368
+ if chunk. start ( ) <= ptr && ptr < chunk. end ( ) {
369
+ return true ;
370
+ }
371
+ }
372
+
373
+ false
374
+ }
344
375
345
376
#[ inline]
346
377
pub fn alloc < T > ( & self , object : T ) -> & mut T {
347
378
unsafe {
348
379
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
349
380
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
350
381
351
- self . align_for :: < T > ( ) ;
352
- let future_end = intrinsics:: arith_offset ( self . ptr . get ( ) , mem:: size_of :: < T > ( ) as isize ) ;
353
- if ( future_end as * mut u8 ) >= self . end . get ( ) {
354
- self . grow :: < T > ( 1 )
382
+ let this = self . lock . lock ( ) ;
383
+
384
+ this. align_for :: < T > ( ) ;
385
+ let future_end = intrinsics:: arith_offset ( this. ptr . get ( ) , mem:: size_of :: < T > ( ) as isize ) ;
386
+ if ( future_end as * mut u8 ) >= this. end . get ( ) {
387
+ this. grow :: < T > ( 1 )
355
388
}
356
389
357
- let ptr = self . ptr . get ( ) ;
390
+ let ptr = this . ptr . get ( ) ;
358
391
// Set the pointer past ourselves
359
- self . ptr . set ( intrinsics:: arith_offset (
360
- self . ptr . get ( ) , mem:: size_of :: < T > ( ) as isize
392
+ this . ptr . set ( intrinsics:: arith_offset (
393
+ this . ptr . get ( ) , mem:: size_of :: < T > ( ) as isize
361
394
) as * mut u8 ) ;
362
395
// Write into uninitialized memory.
363
396
ptr:: write ( ptr as * mut T , object) ;
@@ -377,19 +410,22 @@ impl DroplessArena {
377
410
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
378
411
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
379
412
assert ! ( slice. len( ) != 0 ) ;
380
- self . align_for :: < T > ( ) ;
413
+
414
+ let this = self . lock . lock ( ) ;
415
+
416
+ this. align_for :: < T > ( ) ;
381
417
382
418
let future_end = unsafe {
383
- intrinsics:: arith_offset ( self . ptr . get ( ) , ( slice. len ( ) * mem:: size_of :: < T > ( ) ) as isize )
419
+ intrinsics:: arith_offset ( this . ptr . get ( ) , ( slice. len ( ) * mem:: size_of :: < T > ( ) ) as isize )
384
420
} ;
385
- if ( future_end as * mut u8 ) >= self . end . get ( ) {
386
- self . grow :: < T > ( slice. len ( ) ) ;
421
+ if ( future_end as * mut u8 ) >= this . end . get ( ) {
422
+ this . grow :: < T > ( slice. len ( ) ) ;
387
423
}
388
424
389
425
unsafe {
390
- let arena_slice = slice:: from_raw_parts_mut ( self . ptr . get ( ) as * mut T , slice. len ( ) ) ;
391
- self . ptr . set ( intrinsics:: arith_offset (
392
- self . ptr . get ( ) , ( slice. len ( ) * mem:: size_of :: < T > ( ) ) as isize
426
+ let arena_slice = slice:: from_raw_parts_mut ( this . ptr . get ( ) as * mut T , slice. len ( ) ) ;
427
+ this . ptr . set ( intrinsics:: arith_offset (
428
+ this . ptr . get ( ) , ( slice. len ( ) * mem:: size_of :: < T > ( ) ) as isize
393
429
) as * mut u8 ) ;
394
430
arena_slice. copy_from_slice ( slice) ;
395
431
arena_slice
0 commit comments