Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement intptrcast methods #779

Merged
merged 7 commits into from
Jun 26, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/fn_call.rs
Original file line number Diff line number Diff line change
Expand Up @@ -980,7 +980,7 @@ fn gen_random<'mir, 'tcx>(
}
let ptr = dest.to_ptr()?;

let data = match &mut this.machine.rng {
let data = match &mut this.memory_mut().extra.rng {
Some(rng) => {
let mut data = vec![0; len];
rng.fill_bytes(&mut data);
Expand Down
100 changes: 100 additions & 0 deletions src/intptrcast.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
use std::cell::{Cell, RefCell};

use rustc::mir::interpret::{AllocId, Pointer, InterpResult};
use rustc_mir::interpret::Memory;
use rustc_target::abi::Size;

use crate::stacked_borrows::Tag;
use crate::Evaluator;

pub type MemoryExtra = RefCell<GlobalState>;

#[derive(Clone, Debug, Default)]
pub struct AllocExtra {
base_addr: Cell<Option<u64>>
}

#[derive(Clone, Debug)]
pub struct GlobalState {
/// This is used as a map between the address of each allocation and its `AllocId`.
/// It is always sorted
pub int_to_ptr_map: Vec<(u64, AllocId)>,
pvdrz marked this conversation as resolved.
Show resolved Hide resolved
/// This is used as a memory address when a new pointer is casted to an integer. It
/// is always larger than any address that was previously made part of a block.
pub next_base_addr: u64,
}

impl Default for GlobalState {
// FIXME: Query the page size in the future
fn default() -> Self {
GlobalState {
int_to_ptr_map: Vec::default(),
next_base_addr: 2u64.pow(16)
}
}
}

impl<'mir, 'tcx> GlobalState {
pub fn int_to_ptr(
int: u64,
memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
) -> InterpResult<'tcx, Pointer<Tag>> {
let global_state = memory.extra.intptrcast.borrow();

match global_state.int_to_ptr_map.binary_search_by_key(&int, |(addr, _)| *addr) {
Ok(pos) => {
let (_, alloc_id) = global_state.int_to_ptr_map[pos];
// `int` is equal to the starting address for an allocation, the offset should be
// zero. The pointer is untagged because it was created from a cast
Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(0), Tag::Untagged))
},
Err(0) => err!(DanglingPointerDeref),
Err(pos) => {
// This is the largest of the adresses smaller than `int`,
// i.e. the greatest lower bound (glb)
let (glb, alloc_id) = global_state.int_to_ptr_map[pos - 1];
// This never overflows because `int >= glb`
let offset = int - glb;
// If the offset exceeds the size of the allocation, this access is illegal
if offset <= memory.get(alloc_id)?.bytes.len() as u64 {
// This pointer is untagged because it was created from a cast
Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(offset), Tag::Untagged))
} else {
err!(DanglingPointerDeref)
}
}
}
}

pub fn ptr_to_int(
ptr: Pointer<Tag>,
memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
) -> InterpResult<'tcx, u64> {
let mut global_state = memory.extra.intptrcast.borrow_mut();

let alloc = memory.get(ptr.alloc_id)?;

let base_addr = match alloc.extra.intptrcast.base_addr.get() {
Some(base_addr) => base_addr,
None => {
// This allocation does not have a base address yet, pick one.
let base_addr = Self::align_addr(global_state.next_base_addr, alloc.align.bytes());
pvdrz marked this conversation as resolved.
Show resolved Hide resolved
global_state.next_base_addr = base_addr + alloc.bytes.len() as u64;
alloc.extra.intptrcast.base_addr.set(Some(base_addr));
// Given that `next_base_addr` increases in each allocation, pushing the
// corresponding tuple keeps `int_to_ptr_map` sorted
global_state.int_to_ptr_map.push((base_addr, ptr.alloc_id));

base_addr
}
};

Ok(base_addr + ptr.offset.bytes())
}

/// Shifts `addr` to make it aligned with `align` by rounding `addr` to the smallest multiple
/// of `align` that is strictly larger to `addr`
fn align_addr(addr: u64, align: u64) -> u64 {
addr + align - addr % align
RalfJung marked this conversation as resolved.
Show resolved Hide resolved
}
}
62 changes: 45 additions & 17 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ mod tls;
mod range_map;
mod mono_hash_map;
mod stacked_borrows;
mod intptrcast;
mod memory;

use std::collections::HashMap;
use std::borrow::Cow;
Expand Down Expand Up @@ -48,6 +50,7 @@ use crate::range_map::RangeMap;
pub use crate::helpers::{EvalContextExt as HelpersEvalContextExt};
use crate::mono_hash_map::MonoHashMap;
pub use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt};
use crate::memory::AllocExtra;

// Used by priroda.
pub use crate::stacked_borrows::{Tag, Permission, Stack, Stacks, Item};
Expand Down Expand Up @@ -79,9 +82,12 @@ pub fn create_ecx<'mir, 'tcx: 'mir>(
let mut ecx = InterpretCx::new(
tcx.at(syntax::source_map::DUMMY_SP),
ty::ParamEnv::reveal_all(),
Evaluator::new(config.validate, config.seed),
Evaluator::new(config.validate),
);

// FIXME: InterpretCx::new should take an initial MemoryExtra
ecx.memory_mut().extra.rng = config.seed.map(StdRng::seed_from_u64);
pvdrz marked this conversation as resolved.
Show resolved Hide resolved

let main_instance = ty::Instance::mono(ecx.tcx.tcx, main_id);
let main_mir = ecx.load_mir(main_instance.def)?;

Expand Down Expand Up @@ -205,7 +211,7 @@ pub fn create_ecx<'mir, 'tcx: 'mir>(
cur_ptr = cur_ptr.offset(char_size, tcx)?;
}
}

assert!(args.next().is_none(), "start lang item has more arguments than expected");

Ok(ecx)
Expand Down Expand Up @@ -341,14 +347,10 @@ pub struct Evaluator<'tcx> {

/// Whether to enforce the validity invariant.
pub(crate) validate: bool,

/// The random number generator to use if Miri
/// is running in non-deterministic mode
pub(crate) rng: Option<StdRng>
}

impl<'tcx> Evaluator<'tcx> {
fn new(validate: bool, seed: Option<u64>) -> Self {
fn new(validate: bool) -> Self {
Evaluator {
env_vars: HashMap::default(),
argc: None,
Expand All @@ -357,7 +359,6 @@ impl<'tcx> Evaluator<'tcx> {
last_error: 0,
tls: TlsData::default(),
validate,
rng: seed.map(|s| StdRng::seed_from_u64(s))
}
}
}
Expand Down Expand Up @@ -386,8 +387,8 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
type MemoryKinds = MiriMemoryKind;

type FrameExtra = stacked_borrows::CallId;
type MemoryExtra = stacked_borrows::MemoryState;
type AllocExtra = stacked_borrows::Stacks;
type MemoryExtra = memory::MemoryExtra;
type AllocExtra = memory::AllocExtra;
type PointerTag = Tag;

type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
Expand Down Expand Up @@ -512,17 +513,17 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
let alloc = alloc.into_owned();
let (extra, base_tag) = Stacks::new_allocation(
let (stacks, base_tag) = Stacks::new_allocation(
id,
Size::from_bytes(alloc.bytes.len() as u64),
Rc::clone(&memory.extra),
Rc::clone(&memory.extra.stacked_borrows),
kind,
);
if kind != MiriMemoryKind::Static.into() {
assert!(alloc.relocations.is_empty(), "Only statics can come initialized with inner pointers");
// Now we can rely on the inner pointers being static, too.
}
let mut memory_extra = memory.extra.borrow_mut();
let mut memory_extra = memory.extra.stacked_borrows.borrow_mut();
let alloc: Allocation<Tag, Self::AllocExtra> = Allocation {
bytes: alloc.bytes,
relocations: Relocations::from_presorted(
Expand All @@ -535,7 +536,10 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
undef_mask: alloc.undef_mask,
align: alloc.align,
mutability: alloc.mutability,
extra,
extra: AllocExtra {
stacked_borrows: stacks,
intptrcast: Default::default(),
},
};
(Cow::Owned(alloc), base_tag)
}
Expand All @@ -545,7 +549,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
id: AllocId,
memory: &Memory<'mir, 'tcx, Self>,
) -> Self::PointerTag {
memory.extra.borrow_mut().static_base_ptr(id)
memory.extra.stacked_borrows.borrow_mut().static_base_ptr(id)
}

#[inline(always)]
Expand All @@ -570,14 +574,38 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
fn stack_push(
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, stacked_borrows::CallId> {
Ok(ecx.memory().extra.borrow_mut().new_call())
Ok(ecx.memory().extra.stacked_borrows.borrow_mut().new_call())
}

#[inline(always)]
fn stack_pop(
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
extra: stacked_borrows::CallId,
) -> InterpResult<'tcx> {
Ok(ecx.memory().extra.borrow_mut().end_call(extra))
Ok(ecx.memory().extra.stacked_borrows.borrow_mut().end_call(extra))
}

fn int_to_ptr(
int: u64,
memory: &Memory<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
if int == 0 {
err!(InvalidNullPointerUsage)
} else if memory.extra.rng.is_none() {
err!(ReadBytesAsPointer)
} else {
intptrcast::GlobalState::int_to_ptr(int, memory)
}
}

fn ptr_to_int(
ptr: Pointer<Self::PointerTag>,
memory: &Memory<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, u64> {
if memory.extra.rng.is_none() {
err!(ReadPointerAsBytes)
} else {
intptrcast::GlobalState::ptr_to_int(ptr, memory)
}
}
}
51 changes: 51 additions & 0 deletions src/memory.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
use rand::rngs::StdRng;

use rustc_mir::interpret::{Pointer, Allocation, AllocationExtra, InterpResult};
use rustc_target::abi::Size;

use crate::{stacked_borrows, intptrcast};
use crate::stacked_borrows::Tag;

#[derive(Default, Clone, Debug)]
pub struct MemoryExtra {
pub stacked_borrows: stacked_borrows::MemoryExtra,
pub intptrcast: intptrcast::MemoryExtra,
/// The random number generator to use if Miri is running in non-deterministic mode and to
/// enable intptrcast
pub(crate) rng: Option<StdRng>
pvdrz marked this conversation as resolved.
Show resolved Hide resolved
}

#[derive(Debug, Clone)]
pub struct AllocExtra {
pub stacked_borrows: stacked_borrows::AllocExtra,
pub intptrcast: intptrcast::AllocExtra,
}

impl AllocationExtra<Tag> for AllocExtra {
#[inline(always)]
fn memory_read<'tcx>(
alloc: &Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
alloc.extra.stacked_borrows.memory_read(ptr, size)
}
pvdrz marked this conversation as resolved.
Show resolved Hide resolved

#[inline(always)]
fn memory_written<'tcx>(
alloc: &mut Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
alloc.extra.stacked_borrows.memory_written(ptr, size)
}

#[inline(always)]
fn memory_deallocated<'tcx>(
alloc: &mut Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
alloc.extra.stacked_borrows.memory_deallocated(ptr, size)
}
}
13 changes: 13 additions & 0 deletions src/operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,19 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {

trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);

// If intptrcast is enabled and the operation is not an offset
// we can force the cast from pointers to integer addresses and
// then dispatch to rustc binary operation method
if self.memory().extra.rng.is_some() && bin_op != Offset {
let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
pvdrz marked this conversation as resolved.
Show resolved Hide resolved
let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;

let left = ImmTy::from_scalar(Scalar::from_uint(l_bits, left.layout.size), left.layout);
let right = ImmTy::from_scalar(Scalar::from_uint(r_bits, left.layout.size), right.layout);

return self.binary_op(bin_op, left, right);
}

// Operations that support fat pointers
match bin_op {
Eq | Ne => {
Expand Down
Loading