Skip to content

Little cleanups and version bump #84

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Aug 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
### 0.4.5

Released 2019/08/22.

* Drop `unreachable` dependency, now that `core::hints::unreachable_unchecked`
is stable in Rust 1.27.

### 0.4.4

Released 2019/04/15.
Expand Down
2 changes: 1 addition & 1 deletion example/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
// We aren't using the standard library.
#![no_std]
// Replacing the allocator and using the `alloc` crate are still unstable.
#![feature(alloc, core_intrinsics, lang_items, alloc_error_handler)]
#![feature(core_intrinsics, lang_items, alloc_error_handler)]

extern crate alloc;
extern crate wee_alloc;
Expand Down
27 changes: 17 additions & 10 deletions test/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![feature(alloc, allocator_api)]
#![feature(allocator_api)]

extern crate histo;
#[macro_use]
Expand All @@ -8,8 +8,8 @@ extern crate cfg_if;
extern crate rand;
extern crate wee_alloc;

use std::alloc::{Alloc, Layout};
use quickcheck::{Arbitrary, Gen};
use std::alloc::{Alloc, Layout};
use std::f64;
use std::fs;
use std::io::Read;
Expand Down Expand Up @@ -157,12 +157,13 @@ impl Arbitrary for Operations {
}

#[inline(never)]
fn shrink(&self) -> Box<Iterator<Item = Self>> {
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
let ops = self.0.clone();
let prefixes =
(0..self.0.len()).map(move |i| Operations(ops.iter().cloned().take(i).collect()));

let free_indices: Vec<_> = self.0
let free_indices: Vec<_> = self
.0
.iter()
.enumerate()
.filter_map(|(i, op)| if let Free(_) = *op { Some(i) } else { None })
Expand All @@ -178,7 +179,8 @@ impl Arbitrary for Operations {
)
});

let alloc_indices: Vec<_> = self.0
let alloc_indices: Vec<_> = self
.0
.iter()
.enumerate()
.filter_map(|(i, op)| if let Alloc(_) = *op { Some(i) } else { None })
Expand Down Expand Up @@ -433,7 +435,8 @@ fn allocate_size_zero() {
.take(1000)
.chain((0..1000).map(|i| Free(i)))
.collect(),
).run_single_threaded();
)
.run_single_threaded();
}

#[test]
Expand All @@ -447,7 +450,8 @@ fn allocate_many_small() {
.chain(iter::repeat(Alloc(256 * mem::size_of::<usize>())).take(100))
.chain((0..100).map(|i| Free(i + 100)))
.collect(),
).run_single_threaded();
)
.run_single_threaded();
}

#[test]
Expand All @@ -461,7 +465,8 @@ fn allocate_many_large() {
.chain(iter::repeat(Alloc(1024 * mem::size_of::<usize>())).take(100))
.chain((0..100).map(|i| Free(i + 100)))
.collect(),
).run_single_threaded();
)
.run_single_threaded();
}

////////////////////////////////////////////////////////////////////////////////
Expand All @@ -475,7 +480,8 @@ fn smoke() {
let mut a = &wee_alloc::WeeAlloc::INIT;
unsafe {
let layout = Layout::new::<u8>();
let ptr = a.alloc(layout.clone())
let ptr = a
.alloc(layout.clone())
.expect("Should be able to alloc a fresh Layout clone");
{
let ptr = ptr.as_ptr() as *mut u8;
Expand All @@ -484,7 +490,8 @@ fn smoke() {
}
a.dealloc(ptr, layout.clone());

let ptr = a.alloc(layout.clone())
let ptr = a
.alloc(layout.clone())
.expect("Should be able to alloc from a second clone");
{
let ptr = ptr.as_ptr() as *mut u8;
Expand Down
21 changes: 11 additions & 10 deletions trace-malloc-free/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,20 +41,21 @@ main!(|cli: Cli| {

let malloc_re = Regex::new(
r#"^\-\-\d+\-\- (realloc\(0x0,\d+\))?malloc\((?P<size>\d+)\) = 0x(?P<ptr>\w+)$"#,
).unwrap();
)
.unwrap();

let calloc_re = Regex::new(
r#"^\-\-\d+\-\- calloc\((?P<num>\d+),(?P<size>\d+)\) = 0x(?P<ptr>\w+)$"#,
).unwrap();
let calloc_re =
Regex::new(r#"^\-\-\d+\-\- calloc\((?P<num>\d+),(?P<size>\d+)\) = 0x(?P<ptr>\w+)$"#)
.unwrap();

let realloc_re = Regex::new(
r#"^\-\-\d+\-\- realloc\(0x(?P<orig>\w+),(?P<size>\d+)\) = 0x(?P<new>\w+)$"#,
).unwrap();
let realloc_re =
Regex::new(r#"^\-\-\d+\-\- realloc\(0x(?P<orig>\w+),(?P<size>\d+)\) = 0x(?P<new>\w+)$"#)
.unwrap();

// TODO: record the requested alignment and replay that as well.
let memalign_re = Regex::new(
r#"r#"^\-\-\d+\-\- memalign\(al \d+, size (?P<size>\d+)\) = 0x(?P<ptr>\w+)$"#,
).unwrap();
let memalign_re =
Regex::new(r#"r#"^\-\-\d+\-\- memalign\(al \d+, size (?P<size>\d+)\) = 0x(?P<ptr>\w+)$"#)
.unwrap();

let free_re = Regex::new(r#"^\-\-\d+\-\- free\(0x(?P<ptr>\w+)\)$"#).unwrap();

Expand Down
2 changes: 1 addition & 1 deletion wee_alloc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ license = "MPL-2.0"
name = "wee_alloc"
readme = "../README.md"
repository = "https://github.com/rustwasm/wee_alloc"
version = "0.4.4"
version = "0.4.5"

[badges]
travis-ci = { repository = "rustwasm/wee_alloc" }
Expand Down
3 changes: 2 additions & 1 deletion wee_alloc/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ fn export_rerun_rules() {
"./build.rs",
"./src/lib.rs",
"./src/imp_static_array.rs",
].iter()
]
.iter()
{
println!("cargo:rerun-if-changed={}", path);
}
Expand Down
51 changes: 24 additions & 27 deletions wee_alloc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ for hacking!

#![deny(missing_docs)]
#![cfg_attr(not(feature = "use_std_for_test_debugging"), no_std)]
#![cfg_attr(feature = "nightly", feature(alloc, allocator_api, core_intrinsics))]
#![cfg_attr(feature = "nightly", feature(allocator_api, core_intrinsics))]

#[macro_use]
extern crate cfg_if;
Expand Down Expand Up @@ -479,7 +479,7 @@ impl<'a> FreeCell<'a> {
raw: NonNull<u8>,
size: Bytes,
next_free: Option<*const FreeCell<'a>>,
policy: &AllocPolicy<'a>,
policy: &dyn AllocPolicy<'a>,
) -> *const FreeCell<'a> {
assert_is_word_aligned(raw.as_ptr() as *mut u8);

Expand All @@ -499,7 +499,7 @@ impl<'a> FreeCell<'a> {
raw
}

fn into_allocated_cell(&self, policy: &AllocPolicy<'a>) -> &AllocatedCell<'a> {
fn into_allocated_cell(&self, policy: &dyn AllocPolicy<'a>) -> &AllocatedCell<'a> {
assert_local_cell_invariants(&self.header);
assert_is_poisoned_with_free_pattern(self, policy);

Expand All @@ -513,7 +513,7 @@ impl<'a> FreeCell<'a> {
previous: &'b Cell<*const FreeCell<'a>>,
alloc_size: Words,
align: Bytes,
policy: &AllocPolicy<'a>,
policy: &dyn AllocPolicy<'a>,
) -> Option<&'b AllocatedCell<'a>> {
extra_assert!(alloc_size.0 > 0);
extra_assert!(align.0 > 0);
Expand Down Expand Up @@ -575,7 +575,7 @@ impl<'a> FreeCell<'a> {
fn insert_into_free_list<'b>(
&'b self,
head: &'b Cell<*const FreeCell<'a>>,
policy: &AllocPolicy<'a>,
policy: &dyn AllocPolicy<'a>,
) -> &'b Cell<*const FreeCell<'a>> {
extra_assert!(!self.next_free_can_merge());
extra_assert!(self.next_free().is_null());
Expand All @@ -602,7 +602,7 @@ impl<'a> FreeCell<'a> {
}

impl<'a> AllocatedCell<'a> {
unsafe fn into_free_cell(&self, policy: &AllocPolicy<'a>) -> &FreeCell<'a> {
unsafe fn into_free_cell(&self, policy: &dyn AllocPolicy<'a>) -> &FreeCell<'a> {
assert_local_cell_invariants(&self.header);

CellHeader::set_free(&self.header.neighbors);
Expand All @@ -620,7 +620,7 @@ impl<'a> AllocatedCell<'a> {
}

extra_only! {
fn write_free_pattern(cell: &FreeCell, size: Bytes, policy: &AllocPolicy) {
fn write_free_pattern(cell: &FreeCell, size: Bytes, policy: &dyn AllocPolicy) {
unsafe {
let data = cell.tail_data();
let pattern = policy.free_pattern();
Expand All @@ -634,7 +634,7 @@ extra_only! {
}

extra_only! {
fn assert_is_poisoned_with_free_pattern(cell: &FreeCell, policy: &AllocPolicy) {
fn assert_is_poisoned_with_free_pattern(cell: &FreeCell, policy: &dyn AllocPolicy) {
use core::slice;
unsafe {
let size: Bytes = cell.tail_data_size();
Expand Down Expand Up @@ -698,7 +698,7 @@ extra_only! {
//
// This is O(size of free list) and can be pretty slow, so try to restrict
// its usage to verifying that a free list is still valid after mutation.
fn assert_is_valid_free_list(head: *const FreeCell, policy: &AllocPolicy) {
fn assert_is_valid_free_list(head: *const FreeCell, policy: &dyn AllocPolicy) {
unsafe {
let mut left = head;
assert_local_cell_invariants(left as *const CellHeader);
Expand Down Expand Up @@ -777,10 +777,7 @@ impl<'a> AllocPolicy<'a> for LargeAllocPolicy {
// free list with this new cell, make sure that we allocate enough to
// fulfill the requested alignment, and still have the minimum cell size
// left over.
let size: Bytes = cmp::max(
size.into(),
(align + Self::MIN_CELL_SIZE) * Words(2),
);
let size: Bytes = cmp::max(size.into(), (align + Self::MIN_CELL_SIZE) * Words(2));

let pages: Pages = (size + size_of::<CellHeader>()).round_up_to();
let new_pages = imp::alloc_pages(pages)?;
Expand All @@ -790,7 +787,7 @@ impl<'a> AllocPolicy<'a> for LargeAllocPolicy {
new_pages,
allocated_size - size_of::<CellHeader>(),
None,
self as &AllocPolicy<'a>,
self as &dyn AllocPolicy<'a>,
);

let next_cell = (new_pages.as_ptr() as *const u8).add(allocated_size.0);
Expand Down Expand Up @@ -834,7 +831,7 @@ cfg_if! {

unsafe fn walk_free_list<'a, F, T>(
head: &Cell<*const FreeCell<'a>>,
policy: &AllocPolicy<'a>,
policy: &dyn AllocPolicy<'a>,
mut f: F,
) -> Result<T, AllocErr>
where
Expand Down Expand Up @@ -903,7 +900,7 @@ unsafe fn alloc_first_fit<'a>(
size: Words,
align: Bytes,
head: &Cell<*const FreeCell<'a>>,
policy: &AllocPolicy<'a>,
policy: &dyn AllocPolicy<'a>,
) -> Result<NonNull<u8>, AllocErr> {
extra_assert!(size.0 > 0);

Expand All @@ -912,9 +909,7 @@ unsafe fn alloc_first_fit<'a>(

if let Some(allocated) = current.try_alloc(previous, size, align, policy) {
assert_aligned_to(allocated.data(), align);
return Some(unchecked_unwrap(
NonNull::new(allocated.data() as *mut u8),
));
return Some(unchecked_unwrap(NonNull::new(allocated.data() as *mut u8)));
}

None
Expand All @@ -925,7 +920,7 @@ unsafe fn alloc_with_refill<'a, 'b>(
size: Words,
align: Bytes,
head: &'b Cell<*const FreeCell<'a>>,
policy: &AllocPolicy<'a>,
policy: &dyn AllocPolicy<'a>,
) -> Result<NonNull<u8>, AllocErr> {
if let Ok(result) = alloc_first_fit(size, align, head, policy) {
return Ok(result);
Expand Down Expand Up @@ -977,15 +972,15 @@ impl<'a> WeeAlloc<'a> {
#[cfg(feature = "size_classes")]
unsafe fn with_free_list_and_policy_for_size<F, T>(&self, size: Words, align: Bytes, f: F) -> T
where
F: for<'b> FnOnce(&'b Cell<*const FreeCell<'a>>, &'b AllocPolicy<'a>) -> T,
F: for<'b> FnOnce(&'b Cell<*const FreeCell<'a>>, &'b dyn AllocPolicy<'a>) -> T,
{
extra_assert!(size.0 > 0);
extra_assert!(align.0 > 0);

if align <= size_of::<usize>() {
if let Some(head) = self.size_classes.get(size) {
let policy = size_classes::SizeClassAllocPolicy(&self.head);
let policy = &policy as &AllocPolicy<'a>;
let policy = &policy as &dyn AllocPolicy<'a>;
return head.with_exclusive_access(|head| {
let head_cell = Cell::new(*head);
let result = f(&head_cell, policy);
Expand All @@ -995,7 +990,7 @@ impl<'a> WeeAlloc<'a> {
}
}

let policy = &LARGE_ALLOC_POLICY as &AllocPolicy<'a>;
let policy = &LARGE_ALLOC_POLICY as &dyn AllocPolicy<'a>;
self.head.with_exclusive_access(|head| {
let head_cell = Cell::new(*head);
let result = f(&head_cell, policy);
Expand All @@ -1007,10 +1002,10 @@ impl<'a> WeeAlloc<'a> {
#[cfg(not(feature = "size_classes"))]
unsafe fn with_free_list_and_policy_for_size<F, T>(&self, size: Words, _align: Bytes, f: F) -> T
where
F: for<'b> FnOnce(&'b Cell<*const FreeCell<'a>>, &'b AllocPolicy<'a>) -> T,
F: for<'b> FnOnce(&'b Cell<*const FreeCell<'a>>, &'b dyn AllocPolicy<'a>) -> T,
{
extra_assert!(size.0 > 0);
let policy = &LARGE_ALLOC_POLICY as &AllocPolicy;
let policy = &LARGE_ALLOC_POLICY as &dyn AllocPolicy;
self.head.with_exclusive_access(|head| {
let head_cell = Cell::new(*head);
let result = f(&head_cell, policy);
Expand Down Expand Up @@ -1093,7 +1088,8 @@ impl<'a> WeeAlloc<'a> {
// immediately, whereas the consolidating with the next adjacent
// cell must be delayed, as explained above.

if let Some(prev) = free.header
if let Some(prev) = free
.header
.neighbors
.prev()
.and_then(|p| (*p).as_free_cell())
Expand All @@ -1108,7 +1104,8 @@ impl<'a> WeeAlloc<'a> {
return;
}

if let Some(next) = free.header
if let Some(next) = free
.header
.neighbors
.next()
.and_then(|n| (*n).as_free_cell())
Expand Down
2 changes: 1 addition & 1 deletion wee_alloc/src/size_classes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ where
new_cell,
new_cell_size - size_of::<CellHeader>(),
None,
self as &AllocPolicy,
self as &dyn AllocPolicy,
);
let next_cell = (new_cell.as_ptr() as *const u8).offset(new_cell_size.0 as isize);
(*free_cell)
Expand Down