Skip to content

Commit 6849630

Browse files
committed
Auto merge of #45912 - Zoxc:par-query, r=<try>
WIP: Playing with adding thread safety to GlobalCtxt
2 parents 24840da + 5ca1224 commit 6849630

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

83 files changed

+1226
-842
lines changed

src/Cargo.lock

+119-84
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/libarena/Cargo.toml

+3
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,6 @@ version = "0.0.0"
77
name = "arena"
88
path = "lib.rs"
99
crate-type = ["dylib"]
10+
11+
[dependencies]
12+
rustc_data_structures = { path = "../librustc_data_structures" }

src/libarena/lib.rs

+147-111
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@
3333
#![allow(deprecated)]
3434

3535
extern crate alloc;
36+
extern crate rustc_data_structures;
37+
38+
use rustc_data_structures::lock::Lock;
3639

3740
use std::cell::{Cell, RefCell};
3841
use std::cmp;
@@ -46,6 +49,10 @@ use alloc::raw_vec::RawVec;
4649

4750
/// An arena that can hold objects of only one type.
4851
pub struct TypedArena<T> {
52+
lock: Lock<TypedArenaInner<T>>,
53+
}
54+
55+
struct TypedArenaInner<T> {
4956
/// A pointer to the next object to be allocated.
5057
ptr: Cell<*mut T>,
5158

@@ -109,38 +116,102 @@ impl<T> TypedArenaChunk<T> {
109116

110117
const PAGE: usize = 4096;
111118

119+
impl<T> TypedArenaInner<T> {
120+
/// Grows the arena.
121+
#[inline(never)]
122+
#[cold]
123+
fn grow(&self, n: usize) {
124+
unsafe {
125+
let mut chunks = self.chunks.borrow_mut();
126+
let (chunk, mut new_capacity);
127+
if let Some(last_chunk) = chunks.last_mut() {
128+
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
129+
let currently_used_cap = used_bytes / mem::size_of::<T>();
130+
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
131+
self.end.set(last_chunk.end());
132+
return;
133+
} else {
134+
new_capacity = last_chunk.storage.cap();
135+
loop {
136+
new_capacity = new_capacity.checked_mul(2).unwrap();
137+
if new_capacity >= currently_used_cap + n {
138+
break;
139+
}
140+
}
141+
}
142+
} else {
143+
let elem_size = cmp::max(1, mem::size_of::<T>());
144+
new_capacity = cmp::max(n, PAGE / elem_size);
145+
}
146+
chunk = TypedArenaChunk::<T>::new(new_capacity);
147+
self.ptr.set(chunk.start());
148+
self.end.set(chunk.end());
149+
chunks.push(chunk);
150+
}
151+
}
152+
153+
// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
154+
// chunks.
155+
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
156+
// Determine how much was filled.
157+
let start = last_chunk.start() as usize;
158+
// We obtain the value of the pointer to the first uninitialized element.
159+
let end = self.ptr.get() as usize;
160+
// We then calculate the number of elements to be dropped in the last chunk,
161+
// which is the filled area's length.
162+
let diff = if mem::size_of::<T>() == 0 {
163+
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
164+
// the number of zero-sized values in the last and only chunk, just out of caution.
165+
// Recall that `end` was incremented for each allocated value.
166+
end - start
167+
} else {
168+
(end - start) / mem::size_of::<T>()
169+
};
170+
// Pass that to the `destroy` method.
171+
unsafe {
172+
last_chunk.destroy(diff);
173+
}
174+
// Reset the chunk.
175+
self.ptr.set(last_chunk.start());
176+
}
177+
}
178+
112179
impl<T> TypedArena<T> {
113180
/// Creates a new `TypedArena`.
114181
#[inline]
115182
pub fn new() -> TypedArena<T> {
116183
TypedArena {
117-
// We set both `ptr` and `end` to 0 so that the first call to
118-
// alloc() will trigger a grow().
119-
ptr: Cell::new(0 as *mut T),
120-
end: Cell::new(0 as *mut T),
121-
chunks: RefCell::new(vec![]),
122-
_own: PhantomData,
184+
lock: Lock::new(TypedArenaInner {
185+
// We set both `ptr` and `end` to 0 so that the first call to
186+
// alloc() will trigger a grow().
187+
ptr: Cell::new(0 as *mut T),
188+
end: Cell::new(0 as *mut T),
189+
chunks: RefCell::new(vec![]),
190+
_own: PhantomData,
191+
})
123192
}
124193
}
125194

126195
/// Allocates an object in the `TypedArena`, returning a reference to it.
127196
#[inline]
128197
pub fn alloc(&self, object: T) -> &mut T {
129-
if self.ptr == self.end {
130-
self.grow(1)
198+
let this = self.lock.lock();
199+
200+
if this.ptr == this.end {
201+
this.grow(1)
131202
}
132203

133204
unsafe {
134205
if mem::size_of::<T>() == 0 {
135-
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
206+
this.ptr.set(intrinsics::arith_offset(this.ptr.get() as *mut u8, 1) as *mut T);
136207
let ptr = mem::align_of::<T>() as *mut T;
137208
// Don't drop the object. This `write` is equivalent to `forget`.
138209
ptr::write(ptr, object);
139210
&mut *ptr
140211
} else {
141-
let ptr = self.ptr.get();
212+
let ptr = this.ptr.get();
142213
// Advance the pointer.
143-
self.ptr.set(self.ptr.get().offset(1));
214+
this.ptr.set(this.ptr.get().offset(1));
144215
// Write into uninitialized memory.
145216
ptr::write(ptr, object);
146217
&mut *ptr
@@ -160,61 +231,32 @@ impl<T> TypedArena<T> {
160231
assert!(mem::size_of::<T>() != 0);
161232
assert!(slice.len() != 0);
162233

163-
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
234+
let this = self.lock.lock();
235+
236+
let available_capacity_bytes = this.end.get() as usize - this.ptr.get() as usize;
164237
let at_least_bytes = slice.len() * mem::size_of::<T>();
165238
if available_capacity_bytes < at_least_bytes {
166-
self.grow(slice.len());
239+
this.grow(slice.len());
167240
}
168241

169242
unsafe {
170-
let start_ptr = self.ptr.get();
243+
let start_ptr = this.ptr.get();
171244
let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
172-
self.ptr.set(start_ptr.offset(arena_slice.len() as isize));
245+
this.ptr.set(start_ptr.offset(arena_slice.len() as isize));
173246
arena_slice.copy_from_slice(slice);
174247
arena_slice
175248
}
176249
}
177250

178-
/// Grows the arena.
179-
#[inline(never)]
180-
#[cold]
181-
fn grow(&self, n: usize) {
182-
unsafe {
183-
let mut chunks = self.chunks.borrow_mut();
184-
let (chunk, mut new_capacity);
185-
if let Some(last_chunk) = chunks.last_mut() {
186-
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
187-
let currently_used_cap = used_bytes / mem::size_of::<T>();
188-
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
189-
self.end.set(last_chunk.end());
190-
return;
191-
} else {
192-
new_capacity = last_chunk.storage.cap();
193-
loop {
194-
new_capacity = new_capacity.checked_mul(2).unwrap();
195-
if new_capacity >= currently_used_cap + n {
196-
break;
197-
}
198-
}
199-
}
200-
} else {
201-
let elem_size = cmp::max(1, mem::size_of::<T>());
202-
new_capacity = cmp::max(n, PAGE / elem_size);
203-
}
204-
chunk = TypedArenaChunk::<T>::new(new_capacity);
205-
self.ptr.set(chunk.start());
206-
self.end.set(chunk.end());
207-
chunks.push(chunk);
208-
}
209-
}
210-
211251
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
212252
pub fn clear(&mut self) {
253+
let this = self.lock.lock();
254+
213255
unsafe {
214256
// Clear the last chunk, which is partially filled.
215-
let mut chunks_borrow = self.chunks.borrow_mut();
257+
let mut chunks_borrow = this.chunks.borrow_mut();
216258
if let Some(mut last_chunk) = chunks_borrow.pop() {
217-
self.clear_last_chunk(&mut last_chunk);
259+
this.clear_last_chunk(&mut last_chunk);
218260
// If `T` is ZST, code below has no effect.
219261
for mut chunk in chunks_borrow.drain(..) {
220262
let cap = chunk.storage.cap();
@@ -224,41 +266,18 @@ impl<T> TypedArena<T> {
224266
}
225267
}
226268
}
227-
228-
// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
229-
// chunks.
230-
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
231-
// Determine how much was filled.
232-
let start = last_chunk.start() as usize;
233-
// We obtain the value of the pointer to the first uninitialized element.
234-
let end = self.ptr.get() as usize;
235-
// We then calculate the number of elements to be dropped in the last chunk,
236-
// which is the filled area's length.
237-
let diff = if mem::size_of::<T>() == 0 {
238-
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
239-
// the number of zero-sized values in the last and only chunk, just out of caution.
240-
// Recall that `end` was incremented for each allocated value.
241-
end - start
242-
} else {
243-
(end - start) / mem::size_of::<T>()
244-
};
245-
// Pass that to the `destroy` method.
246-
unsafe {
247-
last_chunk.destroy(diff);
248-
}
249-
// Reset the chunk.
250-
self.ptr.set(last_chunk.start());
251-
}
252269
}
253270

254271
unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
255272
fn drop(&mut self) {
273+
let this = self.lock.get_mut();
274+
256275
unsafe {
257276
// Determine how much was filled.
258-
let mut chunks_borrow = self.chunks.borrow_mut();
277+
let mut chunks_borrow = this.chunks.borrow_mut();
259278
if let Some(mut last_chunk) = chunks_borrow.pop() {
260279
// Drop the contents of the last chunk.
261-
self.clear_last_chunk(&mut last_chunk);
280+
this.clear_last_chunk(&mut last_chunk);
262281
// The last chunk will be dropped. Destroy all other chunks.
263282
for chunk in chunks_borrow.iter_mut() {
264283
let cap = chunk.storage.cap();
@@ -270,9 +289,13 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
270289
}
271290
}
272291

273-
unsafe impl<T: Send> Send for TypedArena<T> {}
292+
unsafe impl<T: Send> Send for TypedArenaInner<T> {}
274293

275294
pub struct DroplessArena {
295+
lock: Lock<DroplessArenaInner>,
296+
}
297+
298+
struct DroplessArenaInner {
276299
/// A pointer to the next object to be allocated.
277300
ptr: Cell<*mut u8>,
278301

@@ -284,26 +307,9 @@ pub struct DroplessArena {
284307
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
285308
}
286309

287-
impl DroplessArena {
288-
pub fn new() -> DroplessArena {
289-
DroplessArena {
290-
ptr: Cell::new(0 as *mut u8),
291-
end: Cell::new(0 as *mut u8),
292-
chunks: RefCell::new(vec![]),
293-
}
294-
}
295-
296-
pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
297-
let ptr = ptr as *const u8 as *mut u8;
298-
for chunk in &*self.chunks.borrow() {
299-
if chunk.start() <= ptr && ptr < chunk.end() {
300-
return true;
301-
}
302-
}
303-
304-
false
305-
}
310+
unsafe impl Send for DroplessArenaInner {}
306311

312+
impl DroplessArenaInner {
307313
fn align_for<T>(&self) {
308314
let align = mem::align_of::<T>();
309315
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
@@ -341,23 +347,50 @@ impl DroplessArena {
341347
chunks.push(chunk);
342348
}
343349
}
350+
}
351+
352+
impl DroplessArena {
353+
pub fn new() -> DroplessArena {
354+
DroplessArena {
355+
lock: Lock::new(DroplessArenaInner {
356+
ptr: Cell::new(0 as *mut u8),
357+
end: Cell::new(0 as *mut u8),
358+
chunks: RefCell::new(vec![]),
359+
})
360+
}
361+
}
362+
363+
pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
364+
let this = self.lock.lock();
365+
366+
let ptr = ptr as *const u8 as *mut u8;
367+
for chunk in &*this.chunks.borrow() {
368+
if chunk.start() <= ptr && ptr < chunk.end() {
369+
return true;
370+
}
371+
}
372+
373+
false
374+
}
344375

345376
#[inline]
346377
pub fn alloc<T>(&self, object: T) -> &mut T {
347378
unsafe {
348379
assert!(!mem::needs_drop::<T>());
349380
assert!(mem::size_of::<T>() != 0);
350381

351-
self.align_for::<T>();
352-
let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
353-
if (future_end as *mut u8) >= self.end.get() {
354-
self.grow::<T>(1)
382+
let this = self.lock.lock();
383+
384+
this.align_for::<T>();
385+
let future_end = intrinsics::arith_offset(this.ptr.get(), mem::size_of::<T>() as isize);
386+
if (future_end as *mut u8) >= this.end.get() {
387+
this.grow::<T>(1)
355388
}
356389

357-
let ptr = self.ptr.get();
390+
let ptr = this.ptr.get();
358391
// Set the pointer past ourselves
359-
self.ptr.set(intrinsics::arith_offset(
360-
self.ptr.get(), mem::size_of::<T>() as isize
392+
this.ptr.set(intrinsics::arith_offset(
393+
this.ptr.get(), mem::size_of::<T>() as isize
361394
) as *mut u8);
362395
// Write into uninitialized memory.
363396
ptr::write(ptr as *mut T, object);
@@ -377,19 +410,22 @@ impl DroplessArena {
377410
assert!(!mem::needs_drop::<T>());
378411
assert!(mem::size_of::<T>() != 0);
379412
assert!(slice.len() != 0);
380-
self.align_for::<T>();
413+
414+
let this = self.lock.lock();
415+
416+
this.align_for::<T>();
381417

382418
let future_end = unsafe {
383-
intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
419+
intrinsics::arith_offset(this.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
384420
};
385-
if (future_end as *mut u8) >= self.end.get() {
386-
self.grow::<T>(slice.len());
421+
if (future_end as *mut u8) >= this.end.get() {
422+
this.grow::<T>(slice.len());
387423
}
388424

389425
unsafe {
390-
let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
391-
self.ptr.set(intrinsics::arith_offset(
392-
self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
426+
let arena_slice = slice::from_raw_parts_mut(this.ptr.get() as *mut T, slice.len());
427+
this.ptr.set(intrinsics::arith_offset(
428+
this.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
393429
) as *mut u8);
394430
arena_slice.copy_from_slice(slice);
395431
arena_slice

0 commit comments

Comments
 (0)