Skip to content

Commit

Permalink
Do not allocate a second "background" alloc id for the main allocatio…
Browse files Browse the repository at this point in the history
…n of a static.

Instead we re-use the static's alloc id within the interpreter for its initializer to refer to the `Allocation` that only exists within the interpreter.
  • Loading branch information
oli-obk committed Feb 15, 2024
1 parent e238627 commit 73b38c6
Show file tree
Hide file tree
Showing 19 changed files with 261 additions and 101 deletions.
2 changes: 2 additions & 0 deletions compiler/rustc_const_eval/messages.ftl
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,8 @@ const_eval_realloc_or_alloc_with_offset =
*[other] {""}
} {$ptr} which does not point to the beginning of an object
const_eval_recursive_static = encountered static that tried to initialize itself with itself
const_eval_remainder_by_zero =
calculating the remainder with a divisor of zero
const_eval_remainder_overflow =
Expand Down
4 changes: 3 additions & 1 deletion compiler/rustc_const_eval/src/const_eval/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ use crate::interpret::{ErrorHandled, InterpError, InterpErrorInfo, MachineStopTy
pub enum ConstEvalErrKind {
ConstAccessesMutGlobal,
ModifiedGlobal,
RecursiveStatic,
AssertFailure(AssertKind<ConstInt>),
Panic { msg: Symbol, line: u32, col: u32, file: Symbol },
}
Expand All @@ -31,13 +32,14 @@ impl MachineStopType for ConstEvalErrKind {
ConstAccessesMutGlobal => const_eval_const_accesses_mut_global,
ModifiedGlobal => const_eval_modified_global,
Panic { .. } => const_eval_panic,
RecursiveStatic => const_eval_recursive_static,
AssertFailure(x) => x.diagnostic_message(),
}
}
fn add_args(self: Box<Self>, adder: &mut dyn FnMut(DiagnosticArgName, DiagnosticArgValue)) {
use ConstEvalErrKind::*;
match *self {
ConstAccessesMutGlobal | ModifiedGlobal => {}
RecursiveStatic | ConstAccessesMutGlobal | ModifiedGlobal => {}
AssertFailure(kind) => kind.add_args(adder),
Panic { msg, line, col, file } => {
adder("msg".into(), msg.into_diagnostic_arg());
Expand Down
62 changes: 33 additions & 29 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use either::{Left, Right};

use rustc_hir::def::DefKind;
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
use rustc_middle::mir::pretty::write_allocation_bytes;
use rustc_middle::mir::{self, ConstAlloc, ConstValue};
use rustc_middle::traits::Reveal;
use rustc_middle::ty::layout::LayoutOf;
Expand All @@ -18,8 +17,9 @@ use crate::errors;
use crate::errors::ConstEvalError;
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate, InternKind, InterpCx,
InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, StackPopCleanup,
create_static_alloc, intern_const_alloc_recursive, take_static_root_alloc, CtfeValidationMode,
GlobalId, Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind,
OpTy, RefTracking, StackPopCleanup,
};

// Returns a pointer to where the result lives
Expand Down Expand Up @@ -47,7 +47,21 @@ fn eval_body_using_ecx<'mir, 'tcx>(
);
let layout = ecx.layout_of(body.bound_return_ty().instantiate(tcx, cid.instance.args))?;
assert!(layout.is_sized());
let ret = ecx.allocate(layout, MemoryKind::Stack)?;

let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};

let ret = if let InternKind::Static(_) = intern_kind {
create_static_alloc(ecx, cid.instance.def_id(), layout)?
} else {
ecx.allocate(layout, MemoryKind::Stack)?
};

trace!(
"eval_body_using_ecx: pushing stack frame for global: {}{}",
Expand All @@ -67,14 +81,6 @@ fn eval_body_using_ecx<'mir, 'tcx>(
while ecx.step()? {}

// Intern the result
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;

Ok(ret)
Expand Down Expand Up @@ -259,16 +265,17 @@ pub fn eval_static_initializer_provider<'tcx>(

let instance = ty::Instance::mono(tcx, def_id.to_def_id());
let cid = rustc_middle::mir::interpret::GlobalId { instance, promoted: None };
let ecx = InterpCx::new(
let mut ecx = InterpCx::new(
tcx,
tcx.def_span(def_id),
ty::ParamEnv::reveal_all(),
// Statics (and promoteds inside statics) may access other statics, because unlike consts
// they do not have to behave "as if" they were evaluated at runtime.
CompileTimeInterpreter::new(CanAccessMutGlobal::Yes, CheckAlignment::Error),
);
let alloc_id = eval_in_interpreter(ecx, cid, true)?.alloc_id;
let alloc = tcx.global_alloc(alloc_id).unwrap_memory();
let alloc_id = eval_in_interpreter(&mut ecx, cid, true)?.alloc_id;
let alloc = take_static_root_alloc(&mut ecx, alloc_id);
let alloc = tcx.mk_const_alloc(alloc);
Ok(alloc)
}

Expand Down Expand Up @@ -299,7 +306,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
let def = cid.instance.def.def_id();
let is_static = tcx.is_static(def);

let ecx = InterpCx::new(
let mut ecx = InterpCx::new(
tcx,
tcx.def_span(def),
key.param_env,
Expand All @@ -309,19 +316,19 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
// so we have to reject reading mutable global memory.
CompileTimeInterpreter::new(CanAccessMutGlobal::from(is_static), CheckAlignment::Error),
);
eval_in_interpreter(ecx, cid, is_static)
eval_in_interpreter(&mut ecx, cid, is_static)
}

pub fn eval_in_interpreter<'mir, 'tcx>(
mut ecx: InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
ecx: &mut InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
cid: GlobalId<'tcx>,
is_static: bool,
) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
// `is_static` just means "in static", it could still be a promoted!
debug_assert_eq!(is_static, ecx.tcx.static_mutability(cid.instance.def_id()).is_some());

let res = ecx.load_mir(cid.instance.def, cid.promoted);
match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, body)) {
match res.and_then(|body| eval_body_using_ecx(ecx, cid, body)) {
Err(error) => {
let (error, backtrace) = error.into_parts();
backtrace.print_backtrace();
Expand Down Expand Up @@ -356,8 +363,11 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
}
Ok(mplace) => {
// Since evaluation had no errors, validate the resulting constant.
// This is a separate `try` block to provide more targeted error reporting.

// Temporarily allow access to the static_root_alloc_id for the purpose of validation.
let static_root_alloc_id = ecx.machine.static_root_alloc_id.take();
let validation = const_validate_mplace(&ecx, &mplace, cid);
ecx.machine.static_root_alloc_id = static_root_alloc_id;

let alloc_id = mplace.ptr().provenance.unwrap().alloc_id();

Expand Down Expand Up @@ -409,15 +419,9 @@ pub fn const_report_error<'mir, 'tcx>(

let ub_note = matches!(error, InterpError::UndefinedBehavior(_)).then(|| {});

let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner();
let mut bytes = String::new();
if alloc.size() != abi::Size::ZERO {
bytes = "\n".into();
// FIXME(translation) there might be pieces that are translatable.
write_allocation_bytes(*ecx.tcx, alloc, &mut bytes, " ").unwrap();
}
let raw_bytes =
errors::RawBytesNote { size: alloc.size().bytes(), align: alloc.align.bytes(), bytes };
let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
let (size, align, _) = ecx.get_alloc_info(alloc_id);
let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };

crate::const_eval::report(
*ecx.tcx,
Expand Down
15 changes: 15 additions & 0 deletions compiler/rustc_const_eval/src/const_eval/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,9 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {

/// Whether to check alignment during evaluation.
pub(super) check_alignment: CheckAlignment,

/// Used to prevent reads from a static's base allocation, as that may allow for self-initialization.
pub(crate) static_root_alloc_id: Option<AllocId>,
}

#[derive(Copy, Clone)]
Expand Down Expand Up @@ -91,6 +94,7 @@ impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
stack: Vec::new(),
can_access_mut_global,
check_alignment,
static_root_alloc_id: None,
}
}
}
Expand Down Expand Up @@ -746,6 +750,17 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
// Everything else is fine.
Ok(())
}

fn before_alloc_read(
ecx: &InterpCx<'mir, 'tcx, Self>,
alloc_id: AllocId,
) -> InterpResult<'tcx> {
if Some(alloc_id) == ecx.machine.static_root_alloc_id {
Err(ConstEvalErrKind::RecursiveStatic.into())
} else {
Ok(())
}
}
}

// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
Expand Down
14 changes: 13 additions & 1 deletion compiler/rustc_const_eval/src/interpret/eval_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -899,7 +899,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.local_to_op(self.frame(), mir::RETURN_PLACE, None)
.expect("return place should always be live");
let dest = self.frame().return_place.clone();
let err = self.copy_op_allow_transmute(&op, &dest);
let err = if self.stack().len() == 1 {
// The initializer of constants and statics will get validated separately
// after the constant has been fully evaluated. While we could fall back to the default
// code path, that will cause -Zenforce-validity to cycle on static initializers.
// Reading from a static's memory is not allowed during its evaluation, and will always
// trigger a cycle error. Validation must read from the memory of the current item.
// For Miri this means we do not validate the root frame return value,
// but Miri anyway calls `read_target_isize` on that so separate validation
// is not needed.
self.copy_op_no_dest_validation(&op, &dest)
} else {
self.copy_op_allow_transmute(&op, &dest)
};
trace!("return value: {:?}", self.dump_place(&dest));
// We delay actually short-circuiting on this error until *after* the stack frame is
// popped, since we want this error to be attributed to the caller, whose type defines
Expand Down
32 changes: 27 additions & 5 deletions compiler/rustc_const_eval/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ pub enum InternKind {
///
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
/// tracks where in the value we are and thus can show much better error messages.
///
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
#[instrument(level = "debug", skip(ecx))]
pub fn intern_const_alloc_recursive<
'mir,
Expand All @@ -97,12 +99,12 @@ pub fn intern_const_alloc_recursive<
) -> Result<(), ErrorGuaranteed> {
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
// that we are starting in, and all other allocations that we are encountering recursively.
let (base_mutability, inner_mutability) = match intern_kind {
let (base_mutability, inner_mutability, is_static) = match intern_kind {
InternKind::Constant | InternKind::Promoted => {
// Completely immutable. Interning anything mutably here can only lead to unsoundness,
// since all consts are conceptually independent values but share the same underlying
// memory.
(Mutability::Not, Mutability::Not)
(Mutability::Not, Mutability::Not, false)
}
InternKind::Static(Mutability::Not) => {
(
Expand All @@ -115,22 +117,31 @@ pub fn intern_const_alloc_recursive<
// Inner allocations are never mutable. They can only arise via the "tail
// expression" / "outer scope" rule, and we treat them consistently with `const`.
Mutability::Not,
true,
)
}
InternKind::Static(Mutability::Mut) => {
// Just make everything mutable. We accept code like
// `static mut X = &mut [42]`, so even inner allocations need to be mutable.
(Mutability::Mut, Mutability::Mut)
(Mutability::Mut, Mutability::Mut, true)
}
};

// Intern the base allocation, and initialize todo list for recursive interning.
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
trace!(?base_alloc_id, ?base_mutability);
// First we intern the base allocation, as it requires a different mutability.
// This gives us the initial set of nested allocations, which will then all be processed
// recursively in the loop below.
let mut todo: Vec<_> =
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
let mut todo: Vec<_> = if is_static {
// Do not steal the root allocation, we need it later for `take_static_root_alloc`
// But still change its mutability to match the requested one.
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
alloc.1.mutability = base_mutability;
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
} else {
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect()
};
// We need to distinguish "has just been interned" from "was already in `tcx`",
// so we track this in a separate set.
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
Expand All @@ -148,7 +159,17 @@ pub fn intern_const_alloc_recursive<
// before validation, and interning doesn't know the type of anything, this means we can't show
// better errors. Maybe we should consider doing validation before interning in the future.
while let Some(prov) = todo.pop() {
trace!(?prov);
let alloc_id = prov.alloc_id();

if base_alloc_id == alloc_id && is_static {
// This is a pointer to the static itself. It's ok for a static to refer to itself,
// even mutably. Whether that mutable pointer is legal at all is checked in validation.
// See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
// We also already collected all the nested allocations, so there's no need to do that again.
continue;
}

// Crucially, we check this *before* checking whether the `alloc_id`
// has already been interned. The point of this check is to ensure that when
// there are multiple pointers to the same allocation, they are *all* immutable.
Expand Down Expand Up @@ -176,6 +197,7 @@ pub fn intern_const_alloc_recursive<
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
// on the promotion analysis not screwing up to ensure that it is sound to intern
// promoteds as immutable.
trace!("found bad mutable pointer");
found_bad_mutable_pointer = true;
}
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
Expand Down
15 changes: 15 additions & 0 deletions compiler/rustc_const_eval/src/interpret/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,8 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
/// Takes read-only access to the allocation so we can keep all the memory read
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
///
/// This is not invoked for ZST accesses, as no read actually happens.
#[inline(always)]
fn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
Expand All @@ -399,7 +401,20 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
Ok(())
}

/// Hook for performing extra checks on any memory read access,
/// that involves an allocation, even ZST reads.
///
/// Used to prevent statics from self-initializing by reading from their own memory
/// as it is being initialized.
fn before_alloc_read(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult<'tcx> {
Ok(())
}

/// Hook for performing extra checks on a memory write access.
/// This is not invoked for ZST accesses, as no write actually happens.
#[inline(always)]
fn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
Expand Down
24 changes: 20 additions & 4 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -624,19 +624,20 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
size,
CheckInAllocMsg::MemoryAccessTest,
|alloc_id, offset, prov| {
// We want to call the hook on *all* accesses that involve an AllocId,
// including zero-sized accesses. That means we have to do it here
// rather than below in the `Some` branch.
M::before_alloc_read(self, alloc_id)?;
let alloc = self.get_alloc_raw(alloc_id)?;
Ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
},
)?;

if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
let range = alloc_range(offset, size);
M::before_memory_read(self.tcx, &self.machine, &alloc.extra, (alloc_id, prov), range)?;
Ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
} else {
// Even in this branch we have to be sure that we actually access the allocation, in
// order to ensure that `static FOO: Type = FOO;` causes a cycle error instead of
// magically pulling *any* ZST value from the ether. However, the `get_raw` above is
// always called when `ptr` has an `AllocId`.
Ok(None)
}
}
Expand Down Expand Up @@ -855,6 +856,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
DumpAllocs { ecx: self, allocs }
}

/// Print the allocation's bytes, without any nested allocations.
pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
// Using the "raw" access to avoid the `before_alloc_read` hook, we specifically
// want to be able to read all memory for diagnostics, even if that is cyclic.
let alloc = self.get_alloc_raw(id).unwrap();
let mut bytes = String::new();
if alloc.size() != Size::ZERO {
bytes = "\n".into();
// FIXME(translation) there might be pieces that are translatable.
rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
.unwrap();
}
bytes
}

/// Find leaked allocations. Allocations reachable from `static_roots` or a `Global` allocation
/// are not considered leaked, as well as leaks whose kind's `may_leak()` returns true.
pub fn find_leaked_allocations(
Expand Down
Loading

0 comments on commit 73b38c6

Please sign in to comment.