Skip to content

Oibit send and friends #20119

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 16 commits into from
Dec 27, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,10 @@ pub struct Arc<T> {
_ptr: *mut ArcInner<T>,
}

unsafe impl<T: Sync + Send> Send for Arc<T> { }
unsafe impl<T: Sync + Send> Sync for Arc<T> { }


/// A weak pointer to an `Arc`.
///
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be used to break cycles
Expand All @@ -129,13 +133,19 @@ pub struct Weak<T> {
_ptr: *mut ArcInner<T>,
}

unsafe impl<T: Sync + Send> Send for Weak<T> { }
unsafe impl<T: Sync + Send> Sync for Weak<T> { }

struct ArcInner<T> {
strong: atomic::AtomicUint,
weak: atomic::AtomicUint,
data: T,
}

impl<T: Sync + Send> Arc<T> {
unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}

impl<T> Arc<T> {
/// Constructs a new `Arc<T>`.
///
/// # Examples
Expand Down Expand Up @@ -587,6 +597,7 @@ mod tests {
use std::str::Str;
use std::sync::atomic;
use std::task;
use std::kinds::Send;
use std::vec::Vec;
use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex;
Expand Down
3 changes: 2 additions & 1 deletion src/liballoc/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ use core::hash::{mod, Hash};
use core::kinds::Sized;
use core::mem;
use core::option::Option;
use core::ptr::Unique;
use core::raw::TraitObject;
use core::result::Result;
use core::result::Result::{Ok, Err};
Expand All @@ -44,7 +45,7 @@ pub static HEAP: () = ();
/// A type that represents a uniquely-owned value.
#[lang = "owned_box"]
#[unstable = "custom allocators will add an additional type parameter (with default)"]
pub struct Box<T>(*mut T);
pub struct Box<T>(Unique<T>);

#[stable]
impl<T: Default> Default for Box<T> {
Expand Down
2 changes: 2 additions & 0 deletions src/libcollections/dlist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ struct Rawlink<T> {
}

impl<T> Copy for Rawlink<T> {}
unsafe impl<T:'static+Send> Send for Rawlink<T> {}
unsafe impl<T:Send+Sync> Sync for Rawlink<T> {}

struct Node<T> {
next: Link<T>,
Expand Down
54 changes: 28 additions & 26 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ use core::kinds::marker::{ContravariantLifetime, InvariantType};
use core::mem;
use core::num::{Int, UnsignedInt};
use core::ops;
use core::ptr;
use core::ptr::{mod, Unique};
use core::raw::Slice as RawSlice;
use core::uint;

Expand Down Expand Up @@ -133,7 +133,7 @@ use slice::CloneSliceExt;
#[unsafe_no_drop_flag]
#[stable]
pub struct Vec<T> {
ptr: *mut T,
ptr: Unique<T>,
len: uint,
cap: uint,
}
Expand Down Expand Up @@ -176,7 +176,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { ptr: EMPTY as *mut T, len: 0, cap: 0 }
Vec { ptr: Unique(EMPTY as *mut T), len: 0, cap: 0 }
}

/// Constructs a new, empty `Vec<T>` with the specified capacity.
Expand Down Expand Up @@ -209,15 +209,15 @@ impl<T> Vec<T> {
#[stable]
pub fn with_capacity(capacity: uint) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: EMPTY as *mut T, len: 0, cap: uint::MAX }
Vec { ptr: Unique(EMPTY as *mut T), len: 0, cap: uint::MAX }
} else if capacity == 0 {
Vec::new()
} else {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
Vec { ptr: ptr as *mut T, len: 0, cap: capacity }
Vec { ptr: Unique(ptr as *mut T), len: 0, cap: capacity }
}
}

Expand Down Expand Up @@ -284,7 +284,7 @@ impl<T> Vec<T> {
#[unstable = "needs finalization"]
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
capacity: uint) -> Vec<T> {
Vec { ptr: ptr, len: length, cap: capacity }
Vec { ptr: Unique(ptr), len: length, cap: capacity }
}

/// Creates a vector by copying the elements from a raw pointer.
Expand Down Expand Up @@ -795,19 +795,19 @@ impl<T> Vec<T> {
if self.len == 0 {
if self.cap != 0 {
unsafe {
dealloc(self.ptr, self.cap)
dealloc(self.ptr.0, self.cap)
}
self.cap = 0;
}
} else {
unsafe {
// Overflow check is unnecessary as the vector is already at
// least this large.
self.ptr = reallocate(self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
if self.ptr.is_null() { ::alloc::oom() }
self.ptr = Unique(reallocate(self.ptr.0 as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T);
if self.ptr.0.is_null() { ::alloc::oom() }
}
self.cap = self.len;
}
Expand Down Expand Up @@ -867,7 +867,7 @@ impl<T> Vec<T> {
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
mem::transmute(RawSlice {
data: self.ptr as *const T,
data: self.ptr.0 as *const T,
len: self.len,
})
}
Expand All @@ -890,9 +890,9 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn into_iter(self) -> IntoIter<T> {
unsafe {
let ptr = self.ptr;
let ptr = self.ptr.0;
let cap = self.cap;
let begin = self.ptr as *const T;
let begin = self.ptr.0 as *const T;
let end = if mem::size_of::<T>() == 0 {
(ptr as uint + self.len()) as *const T
} else {
Expand Down Expand Up @@ -1110,14 +1110,14 @@ impl<T> Vec<T> {
let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
if old_size > size { panic!("capacity overflow") }
unsafe {
self.ptr = alloc_or_realloc(self.ptr, old_size, size);
if self.ptr.is_null() { ::alloc::oom() }
self.ptr = Unique(alloc_or_realloc(self.ptr.0, old_size, size));
if self.ptr.0.is_null() { ::alloc::oom() }
}
self.cap = max(self.cap, 2) * 2;
}

unsafe {
let end = (self.ptr as *const T).offset(self.len as int) as *mut T;
let end = self.ptr.0.offset(self.len as int);
ptr::write(&mut *end, value);
self.len += 1;
}
Expand Down Expand Up @@ -1162,11 +1162,11 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
unsafe {
let begin = self.ptr as *const T;
let begin = self.ptr.0 as *const T;
let end = if mem::size_of::<T>() == 0 {
(self.ptr as uint + self.len()) as *const T
(self.ptr.0 as uint + self.len()) as *const T
} else {
self.ptr.offset(self.len() as int) as *const T
self.ptr.0.offset(self.len() as int) as *const T
};
self.set_len(0);
Drain {
Expand Down Expand Up @@ -1231,8 +1231,10 @@ impl<T> Vec<T> {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
unsafe {
self.ptr = alloc_or_realloc(self.ptr, self.cap * mem::size_of::<T>(), size);
if self.ptr.is_null() { ::alloc::oom() }
self.ptr = Unique(alloc_or_realloc(self.ptr.0,
self.cap * mem::size_of::<T>(),
size));
if self.ptr.0.is_null() { ::alloc::oom() }
}
self.cap = capacity;
}
Expand Down Expand Up @@ -1355,7 +1357,7 @@ impl<T> AsSlice<T> for Vec<T> {
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe {
mem::transmute(RawSlice {
data: self.ptr as *const T,
data: self.ptr.0 as *const T,
len: self.len
})
}
Expand All @@ -1380,7 +1382,7 @@ impl<T> Drop for Vec<T> {
for x in self.iter() {
ptr::read(x);
}
dealloc(self.ptr, self.cap)
dealloc(self.ptr.0, self.cap)
}
}
}
Expand Down Expand Up @@ -1418,7 +1420,7 @@ impl<T> IntoIter<T> {
for _x in self { }
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
mem::forget(self);
Vec { ptr: allocation, cap: cap, len: 0 }
Vec { ptr: Unique(allocation), cap: cap, len: 0 }
}
}

Expand Down
10 changes: 10 additions & 0 deletions src/libcore/atomic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

pub use self::Ordering::*;

use kinds::Sync;

use intrinsics;
use cell::UnsafeCell;

Expand All @@ -23,24 +25,32 @@ pub struct AtomicBool {
v: UnsafeCell<uint>,
}

unsafe impl Sync for AtomicBool {}

/// A signed integer type which can be safely shared between threads.
#[stable]
pub struct AtomicInt {
v: UnsafeCell<int>,
}

unsafe impl Sync for AtomicInt {}

/// An unsigned integer type which can be safely shared between threads.
#[stable]
pub struct AtomicUint {
v: UnsafeCell<uint>,
}

unsafe impl Sync for AtomicUint {}

/// A raw pointer type which can be safely shared between threads.
#[stable]
pub struct AtomicPtr<T> {
p: UnsafeCell<uint>,
}

unsafe impl<T> Sync for AtomicPtr<T> {}

/// Atomic memory orderings
///
/// Memory orderings limit the ways that both the compiler and CPU may reorder
Expand Down
4 changes: 2 additions & 2 deletions src/libcore/kinds.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

/// Types able to be transferred across task boundaries.
#[lang="send"]
pub trait Send for Sized? : 'static {
pub unsafe trait Send for Sized? : 'static {
// empty.
}

Expand Down Expand Up @@ -81,7 +81,7 @@ pub trait Copy for Sized? {
/// reference; not doing this is undefined behaviour (for example,
/// `transmute`-ing from `&T` to `&mut T` is illegal).
#[lang="sync"]
pub trait Sync for Sized? {
pub unsafe trait Sync for Sized? {
// Empty
}

Expand Down
33 changes: 33 additions & 0 deletions src/libcore/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ use clone::Clone;
use intrinsics;
use option::Option;
use option::Option::{Some, None};
use kinds::{Send, Sync};

use cmp::{PartialEq, Eq, Ord, PartialOrd, Equiv};
use cmp::Ordering;
Expand Down Expand Up @@ -501,3 +502,35 @@ impl<T> PartialOrd for *mut T {
#[inline]
fn ge(&self, other: &*mut T) -> bool { *self >= *other }
}

/// A wrapper around a raw `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. This in turn implies that the
/// `Unique<T>` is `Send`/`Sync` if `T` is `Send`/`Sync`, unlike a
/// raw `*mut T` (which conveys no particular ownership semantics).
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
/// internally use raw pointers to manage the memory that they own.
pub struct Unique<T>(pub *mut T);

/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
unsafe impl<T:Send> Send for Unique<T> { }

/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
unsafe impl<T:Sync> Sync for Unique<T> { }

impl<T> Unique<T> {
/// Returns a null Unique.
pub fn null() -> Unique<T> {
Unique(RawPtr::null())
}

/// Return an (unsafe) pointer into the memory owned by `self`.
pub unsafe fn offset(self, offset: int) -> *mut T {
(self.0 as *const T).offset(offset) as *mut T
}
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We may want to avoid constructors or functions on this pointer type for now in terms of stabilization. Some questions that arise:

  • Why have a null constructor while not having constructors for other types of pointers?
  • Why have an offset function without the other array of functions on raw pointers?
  • Should this implement the raw pointer traits?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I figured we would land UniquePtr can call it experimental or unstable for now until we had a chance to debate it. It's not user-visible in any case.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Kept UniquePtr as is based on @nikomatsakis comment.

9 changes: 6 additions & 3 deletions src/libflate/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@

extern crate libc;

use std::c_vec::CVec;
use libc::{c_void, size_t, c_int};
use std::c_vec::CVec;
use std::ptr::Unique;

#[link(name = "miniz", kind = "static")]
extern {
Expand Down Expand Up @@ -59,7 +60,8 @@ fn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Option<CVec<u8>> {
&mut outsz,
flags);
if !res.is_null() {
Some(CVec::new_with_dtor(res as *mut u8, outsz as uint, move|:| libc::free(res)))
let res = Unique(res);
Some(CVec::new_with_dtor(res.0 as *mut u8, outsz as uint, move|:| libc::free(res.0)))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You cast it to a Unique and immediately cast it back?

} else {
None
}
Expand All @@ -84,7 +86,8 @@ fn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Option<CVec<u8>> {
&mut outsz,
flags);
if !res.is_null() {
Some(CVec::new_with_dtor(res as *mut u8, outsz as uint, move|:| libc::free(res)))
let res = Unique(res);
Some(CVec::new_with_dtor(res.0 as *mut u8, outsz as uint, move|:| libc::free(res.0)))
} else {
None
}
Expand Down
3 changes: 2 additions & 1 deletion src/librustc/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,5 +67,6 @@ register_diagnostics! {
E0173,
E0174,
E0177,
E0178
E0178,
E0179
}
Loading