Skip to content

Commit d78b62e

Browse files
authored
Merge pull request #524 from RalfJung/escape-to-raw
Stacked Borrows beautififcation, update for EscapeToRaw
2 parents 021bf1f + 827e518 commit d78b62e

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+640
-391
lines changed

.gitignore

-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,5 @@ target
22
/doc
33
tex/*/out
44
*.dot
5-
*.mir
65
*.rs.bk
76
Cargo.lock

README.md

+9-4
Original file line numberDiff line numberDiff line change
@@ -45,14 +45,19 @@ in this directory.
4545
## Running Miri
4646

4747
```sh
48-
cargo +nightly run tests/run-pass/vecs.rs # Or whatever test you like.
48+
cargo +nightly run -- -Zmiri-disable-validation tests/run-pass/vecs.rs # Or whatever test you like.
4949
```
5050

51+
We have to disable validation because that can lead to errors when libstd is not
52+
compiled the right way.
53+
5154
## Running Miri with full libstd
5255

53-
Per default libstd does not contain the MIR of non-polymorphic functions. When
54-
Miri hits a call to such a function, execution terminates. To fix this, it is
55-
possible to compile libstd with full MIR:
56+
Per default libstd does not contain the MIR of non-polymorphic functions, and
57+
also does not contain some extra MIR statements that miri needs for validation.
58+
When Miri hits a call to such a function, execution terminates, and even when
59+
the MIR is present, validation can fail. To fix this, it is possible to compile
60+
libstd with full MIR:
5661

5762
```sh
5863
rustup component add --toolchain nightly rust-src

rust-version

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
nightly-2018-11-15
1+
nightly-2018-11-16

src/fn_call.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -555,7 +555,7 @@ impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx, 'mir> for super::MiriEvalCo
555555
}
556556
"pthread_attr_getstack" => {
557557
// second argument is where we are supposed to write the stack size
558-
let ptr = self.ref_to_mplace(self.read_immediate(args[1])?)?;
558+
let ptr = self.deref_operand(args[1])?;
559559
let stackaddr = Scalar::from_int(0x80000, args[1].layout.size); // just any address
560560
self.write_scalar(stackaddr, ptr.into())?;
561561
// return 0

src/helpers.rs

+22-13
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,13 @@ impl<Tag> ScalarExt for ScalarMaybeUndef<Tag> {
3333
pub trait EvalContextExt<'tcx> {
3434
fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
3535

36-
/// Visit the memory covered by `place` that is frozen -- i.e., NOT
37-
/// what is inside an `UnsafeCell`.
38-
fn visit_frozen(
36+
/// Visit the memory covered by `place`, sensitive to freezing: The 3rd parameter
37+
/// will be true if this is frozen, false if this is in an `UnsafeCell`.
38+
fn visit_freeze_sensitive(
3939
&self,
4040
place: MPlaceTy<'tcx, Borrow>,
4141
size: Size,
42-
action: impl FnMut(Pointer<Borrow>, Size) -> EvalResult<'tcx>,
42+
action: impl FnMut(Pointer<Borrow>, Size, bool) -> EvalResult<'tcx>,
4343
) -> EvalResult<'tcx>;
4444
}
4545

@@ -79,13 +79,11 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
7979
})
8080
}
8181

82-
/// Visit the memory covered by `place` that is frozen -- i.e., NOT
83-
/// what is inside an `UnsafeCell`.
84-
fn visit_frozen(
82+
fn visit_freeze_sensitive(
8583
&self,
8684
place: MPlaceTy<'tcx, Borrow>,
8785
size: Size,
88-
mut frozen_action: impl FnMut(Pointer<Borrow>, Size) -> EvalResult<'tcx>,
86+
mut action: impl FnMut(Pointer<Borrow>, Size, bool) -> EvalResult<'tcx>,
8987
) -> EvalResult<'tcx> {
9088
trace!("visit_frozen(place={:?}, size={:?})", *place, size);
9189
debug_assert_eq!(size,
@@ -99,18 +97,29 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
9997
let mut end_ptr = place.ptr;
10098
// Called when we detected an `UnsafeCell` at the given offset and size.
10199
// Calls `action` and advances `end_ptr`.
102-
let mut unsafe_cell_action = |unsafe_cell_offset, unsafe_cell_size| {
100+
let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar<Borrow>, unsafe_cell_size: Size| {
101+
if unsafe_cell_size != Size::ZERO {
102+
debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().alloc_id,
103+
end_ptr.to_ptr().unwrap().alloc_id);
104+
debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().tag,
105+
end_ptr.to_ptr().unwrap().tag);
106+
}
103107
// We assume that we are given the fields in increasing offset order,
104108
// and nothing else changes.
109+
let unsafe_cell_offset = unsafe_cell_ptr.get_ptr_offset(self);
105110
let end_offset = end_ptr.get_ptr_offset(self);
106111
assert!(unsafe_cell_offset >= end_offset);
107112
let frozen_size = unsafe_cell_offset - end_offset;
108113
// Everything between the end_ptr and this `UnsafeCell` is frozen.
109114
if frozen_size != Size::ZERO {
110-
frozen_action(end_ptr.to_ptr()?, frozen_size)?;
115+
action(end_ptr.to_ptr()?, frozen_size, /*frozen*/true)?;
116+
}
117+
// This `UnsafeCell` is NOT frozen.
118+
if unsafe_cell_size != Size::ZERO {
119+
action(unsafe_cell_ptr.to_ptr()?, unsafe_cell_size, /*frozen*/false)?;
111120
}
112121
// Update end end_ptr.
113-
end_ptr = end_ptr.ptr_wrapping_offset(frozen_size+unsafe_cell_size, self);
122+
end_ptr = unsafe_cell_ptr.ptr_wrapping_offset(unsafe_cell_size, self);
114123
// Done
115124
Ok(())
116125
};
@@ -126,7 +135,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
126135
.unwrap_or_else(|| place.layout.size_and_align());
127136
// Now handle this `UnsafeCell`, unless it is empty.
128137
if unsafe_cell_size != Size::ZERO {
129-
unsafe_cell_action(place.ptr.get_ptr_offset(self), unsafe_cell_size)
138+
unsafe_cell_action(place.ptr, unsafe_cell_size)
130139
} else {
131140
Ok(())
132141
}
@@ -136,7 +145,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
136145
}
137146
// The part between the end_ptr and the end of the place is also frozen.
138147
// So pretend there is a 0-sized `UnsafeCell` at the end.
139-
unsafe_cell_action(place.ptr.get_ptr_offset(self) + size, Size::ZERO)?;
148+
unsafe_cell_action(place.ptr.ptr_wrapping_offset(size, self), Size::ZERO)?;
140149
// Done!
141150
return Ok(());
142151

src/intrinsic.rs

+11-11
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
5959
"atomic_load_relaxed" |
6060
"atomic_load_acq" |
6161
"volatile_load" => {
62-
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
62+
let ptr = self.deref_operand(args[0])?;
6363
let val = self.read_scalar(ptr.into())?; // make sure it fits into a scalar; otherwise it cannot be atomic
6464
self.write_scalar(val, dest)?;
6565
}
@@ -68,7 +68,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
6868
"atomic_store_relaxed" |
6969
"atomic_store_rel" |
7070
"volatile_store" => {
71-
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
71+
let ptr = self.deref_operand(args[0])?;
7272
let val = self.read_scalar(args[1])?; // make sure it fits into a scalar; otherwise it cannot be atomic
7373
self.write_scalar(val, ptr.into())?;
7474
}
@@ -78,18 +78,18 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
7878
}
7979

8080
_ if intrinsic_name.starts_with("atomic_xchg") => {
81-
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
81+
let ptr = self.deref_operand(args[0])?;
8282
let new = self.read_scalar(args[1])?;
8383
let old = self.read_scalar(ptr.into())?;
8484
self.write_scalar(old, dest)?; // old value is returned
8585
self.write_scalar(new, ptr.into())?;
8686
}
8787

8888
_ if intrinsic_name.starts_with("atomic_cxchg") => {
89-
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
90-
let expect_old = self.read_immediate(args[1])?; // read as value for the sake of `binary_op_imm()`
89+
let ptr = self.deref_operand(args[0])?;
90+
let expect_old = self.read_immediate(args[1])?; // read as immediate for the sake of `binary_op_imm()`
9191
let new = self.read_scalar(args[2])?;
92-
let old = self.read_immediate(ptr.into())?; // read as value for the sake of `binary_op_imm()`
92+
let old = self.read_immediate(ptr.into())?; // read as immediate for the sake of `binary_op_imm()`
9393
// binary_op_imm will bail if either of them is not a scalar
9494
let (eq, _) = self.binary_op_imm(mir::BinOp::Eq, old, expect_old)?;
9595
let res = Immediate::ScalarPair(old.to_scalar_or_undef(), eq.into());
@@ -125,7 +125,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
125125
"atomic_xsub_rel" |
126126
"atomic_xsub_acqrel" |
127127
"atomic_xsub_relaxed" => {
128-
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
128+
let ptr = self.deref_operand(args[0])?;
129129
if !ptr.layout.ty.is_integral() {
130130
return err!(Unimplemented(format!("Atomic arithmetic operations only work on integer types")));
131131
}
@@ -167,7 +167,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
167167
}
168168

169169
"discriminant_value" => {
170-
let place = self.ref_to_mplace(self.read_immediate(args[0])?)?;
170+
let place = self.deref_operand(args[0])?;
171171
let discr_val = self.read_discriminant(place.into())?.0;
172172
self.write_scalar(Scalar::from_uint(discr_val, dest.layout.size), dest)?;
173173
}
@@ -279,7 +279,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
279279
}
280280

281281
"move_val_init" => {
282-
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
282+
let ptr = self.deref_operand(args[0])?;
283283
self.copy_op(args[1], ptr.into())?;
284284
}
285285

@@ -347,7 +347,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
347347
}
348348

349349
"size_of_val" => {
350-
let mplace = self.ref_to_mplace(self.read_immediate(args[0])?)?;
350+
let mplace = self.deref_operand(args[0])?;
351351
let (size, _) = self.size_and_align_of_mplace(mplace)?
352352
.expect("size_of_val called on extern type");
353353
let ptr_size = self.pointer_size();
@@ -359,7 +359,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, '
359359

360360
"min_align_of_val" |
361361
"align_of_val" => {
362-
let mplace = self.ref_to_mplace(self.read_immediate(args[0])?)?;
362+
let mplace = self.deref_operand(args[0])?;
363363
let (_, align) = self.size_and_align_of_mplace(mplace)?
364364
.expect("size_of_val called on extern type");
365365
let ptr_size = self.pointer_size();

src/lib.rs

+41-31
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,6 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
296296

297297
type AllocExtra = stacked_borrows::Stacks;
298298
type PointerTag = Borrow;
299-
const ENABLE_PTR_TRACKING_HOOKS: bool = true;
300299

301300
type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Borrow, Self::AllocExtra>)>;
302301

@@ -309,16 +308,18 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
309308

310309
// Some functions are whitelisted until we figure out how to fix them.
311310
// We walk up the stack a few frames to also cover their callees.
312-
const WHITELIST: &[&str] = &[
311+
const WHITELIST: &[(&str, &str)] = &[
313312
// Uses mem::uninitialized
314-
"std::ptr::read",
315-
"std::sys::windows::mutex::Mutex::",
313+
("std::ptr::read", ""),
314+
("std::sys::windows::mutex::Mutex::", ""),
315+
// Should directly take a raw reference
316+
("<std::cell::UnsafeCell<T>>", "::get"),
316317
];
317318
for frame in ecx.stack().iter()
318319
.rev().take(3)
319320
{
320321
let name = frame.instance.to_string();
321-
if WHITELIST.iter().any(|white| name.starts_with(white)) {
322+
if WHITELIST.iter().any(|(prefix, suffix)| name.starts_with(prefix) && name.ends_with(suffix)) {
322323
return false;
323324
}
324325
}
@@ -446,26 +447,6 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
446447
Cow::Owned(alloc)
447448
}
448449

449-
#[inline(always)]
450-
fn tag_reference(
451-
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
452-
place: MPlaceTy<'tcx, Borrow>,
453-
mutability: Option<hir::Mutability>,
454-
) -> EvalResult<'tcx, Scalar<Borrow>> {
455-
let (size, _) = ecx.size_and_align_of_mplace(place)?
456-
// for extern types, just cover what we can
457-
.unwrap_or_else(|| place.layout.size_and_align());
458-
if !ecx.machine.validate || size == Size::ZERO {
459-
// No tracking
460-
Ok(place.ptr)
461-
} else {
462-
let ptr = place.ptr.to_ptr()?;
463-
let tag = ecx.tag_reference(place, size, mutability.into())?;
464-
Ok(Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag)))
465-
}
466-
}
467-
468-
#[inline(always)]
469450
fn tag_dereference(
470451
ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
471452
place: MPlaceTy<'tcx, Borrow>,
@@ -474,11 +455,13 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
474455
let (size, _) = ecx.size_and_align_of_mplace(place)?
475456
// for extern types, just cover what we can
476457
.unwrap_or_else(|| place.layout.size_and_align());
477-
if !ecx.machine.validate || size == Size::ZERO {
458+
if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag ||
459+
!Self::enforce_validity(ecx) || size == Size::ZERO
460+
{
478461
// No tracking
479462
Ok(place.ptr)
480463
} else {
481-
let ptr = place.ptr.to_ptr()?;
464+
let ptr = place.ptr.to_ptr()?; // assert this is not a scalar
482465
let tag = ecx.tag_dereference(place, size, mutability.into())?;
483466
Ok(Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag)))
484467
}
@@ -499,19 +482,46 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
499482
}
500483
}
501484

485+
#[inline]
486+
fn escape_to_raw(
487+
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
488+
ptr: OpTy<'tcx, Self::PointerTag>,
489+
) -> EvalResult<'tcx> {
490+
// It is tempting to check the type here, but drop glue does EscapeToRaw
491+
// on a raw pointer.
492+
// This is deliberately NOT `deref_operand` as we do not want `tag_dereference`
493+
// to be called! That would kill the original tag if we got a raw ptr.
494+
let place = ecx.ref_to_mplace(ecx.read_immediate(ptr)?)?;
495+
let (size, _) = ecx.size_and_align_of_mplace(place)?
496+
// for extern types, just cover what we can
497+
.unwrap_or_else(|| place.layout.size_and_align());
498+
if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag ||
499+
!ecx.machine.validate || size == Size::ZERO
500+
{
501+
// No tracking, or no retagging. The latter is possible because a dependency of ours
502+
// might be called with different flags than we are, so there are `Retag`
503+
// statements but we do not want to execute them.
504+
Ok(())
505+
} else {
506+
ecx.escape_to_raw(place, size)
507+
}
508+
}
509+
502510
#[inline(always)]
503511
fn retag(
504512
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
505513
fn_entry: bool,
506514
place: PlaceTy<'tcx, Borrow>,
507515
) -> EvalResult<'tcx> {
508516
if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || !Self::enforce_validity(ecx) {
509-
// No tracking, or no retagging. This is possible because a dependency of ours might be
510-
// called with different flags than we are,
517+
// No tracking, or no retagging. The latter is possible because a dependency of ours
518+
// might be called with different flags than we are, so there are `Retag`
519+
// statements but we do not want to execute them.
511520
// Also, honor the whitelist in `enforce_validity` because otherwise we might retag
512521
// uninitialized data.
513-
return Ok(())
522+
Ok(())
523+
} else {
524+
ecx.retag(fn_entry, place)
514525
}
515-
ecx.retag(fn_entry, place)
516526
}
517527
}

0 commit comments

Comments
 (0)