Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove ConstValue::Slice #105653

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3821,6 +3821,7 @@ dependencies = [
"rustc_apfloat",
"rustc_arena",
"rustc_ast",
"rustc_const_eval",
"rustc_data_structures",
"rustc_errors",
"rustc_fluent_macro",
Expand Down
10 changes: 0 additions & 10 deletions compiler/rustc_codegen_cranelift/src/constant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,16 +227,6 @@ pub(crate) fn codegen_const_value<'tcx>(
.offset_i64(fx, i64::try_from(offset.bytes()).unwrap()),
layout,
),
ConstValue::Slice { data, start, end } => {
let ptr = pointer_for_allocation(fx, data)
.offset_i64(fx, i64::try_from(start).unwrap())
.get_addr(fx);
let len = fx
.bcx
.ins()
.iconst(fx.pointer_type, i64::try_from(end.checked_sub(start).unwrap()).unwrap());
CValue::by_val_pair(ptr, len, layout)
}
}
}

Expand Down
18 changes: 1 addition & 17 deletions compiler/rustc_codegen_ssa/src/mir/operand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::traits::*;
use crate::MemFlags;

use rustc_middle::mir;
use rustc_middle::mir::interpret::{alloc_range, ConstValue, Pointer, Scalar};
use rustc_middle::mir::interpret::{alloc_range, ConstValue};
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::Ty;
use rustc_target::abi::{self, Abi, Align, Size};
Expand Down Expand Up @@ -100,22 +100,6 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
OperandValue::Immediate(llval)
}
ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
ConstValue::Slice { data, start, end } => {
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
};
let a = Scalar::from_pointer(
Pointer::new(bx.tcx().create_memory_alloc(data), Size::from_bytes(start)),
&bx.tcx(),
);
let a_llval = bx.scalar_to_backend(
a,
a_scalar,
bx.scalar_pair_element_backend_type(layout, 0, true),
);
let b_llval = bx.const_usize((end - start) as u64);
OperandValue::Pair(a_llval, b_llval)
}
ConstValue::ByRef { alloc, offset } => {
return Self::from_const_alloc(bx, layout, alloc, offset);
}
Expand Down
41 changes: 15 additions & 26 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ use super::{CompileTimeEvalContext, CompileTimeInterpreter};
use crate::errors;
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
RefTracking, StackPopCleanup,
intern_const_alloc_recursive, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId, Immediate,
InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
StackPopCleanup,
};

// Returns a pointer to where the result lives
Expand Down Expand Up @@ -108,7 +108,7 @@ pub(super) fn mk_eval_cx<'mir, 'tcx>(
/// type system.
#[instrument(skip(ecx), level = "debug")]
pub(super) fn op_to_const<'tcx>(
ecx: &CompileTimeEvalContext<'_, 'tcx>,
ecx: &mut CompileTimeEvalContext<'_, 'tcx>,
op: &OpTy<'tcx>,
) -> ConstValue<'tcx> {
// We do not have value optimizations for everything.
Expand Down Expand Up @@ -143,12 +143,14 @@ pub(super) fn op_to_const<'tcx>(

debug!(?immediate);

let tcx = ecx.tcx;

// We know `offset` is relative to the allocation, so we can use `into_parts`.
let to_const_value = |mplace: &MPlaceTy<'_>| {
debug!("to_const_value(mplace: {:?})", mplace);
match mplace.ptr.into_parts() {
(Some(alloc_id), offset) => {
let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
let alloc = tcx.global_alloc(alloc_id).unwrap_memory();
ConstValue::ByRef { alloc, offset }
}
(None, offset) => {
Expand All @@ -169,24 +171,12 @@ pub(super) fn op_to_const<'tcx>(
Right(imm) => match *imm {
_ if imm.layout.is_zst() => ConstValue::ZeroSized,
Immediate::Scalar(x) => ConstValue::Scalar(x),
Immediate::ScalarPair(a, b) => {
debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (data, start) = match a.to_pointer(ecx).unwrap().into_parts() {
(Some(alloc_id), offset) => {
(ecx.tcx.global_alloc(alloc_id).unwrap_memory(), offset.bytes())
}
(None, _offset) => (
ecx.tcx.mk_const_alloc(Allocation::from_bytes_byte_aligned_immutable(
b"" as &[u8],
)),
0,
),
};
let len = b.to_target_usize(ecx).unwrap();
let start = start.try_into().unwrap();
let len: usize = len.try_into().unwrap();
ConstValue::Slice { data, start, end: start + len }
Immediate::ScalarPair(..) => {
let place = ecx.allocate(imm.layout, MemoryKind::Stack).unwrap();
ecx.write_immediate(*imm, &place.into()).unwrap();
intern_const_alloc_recursive(ecx, InternKind::Constant, &place).unwrap();

to_const_value(&place)
}
Immediate::Uninit => to_const_value(&op.assert_mem_place()),
},
Expand All @@ -202,8 +192,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
let cid = key.value;
let def_id = cid.instance.def.def_id();
let is_static = tcx.is_static(def_id);
// This is just accessing an already computed constant, so no need to check alignment here.
let ecx = mk_eval_cx(
let mut ecx = mk_eval_cx(
tcx,
tcx.def_span(key.value.instance.def_id()),
key.param_env,
Expand All @@ -220,7 +209,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
);

// Turn this into a proper constant.
op_to_const(&ecx, &mplace.into())
op_to_const(&mut ecx, &mplace.into())
}

#[instrument(skip(tcx), level = "debug")]
Expand Down
27 changes: 23 additions & 4 deletions compiler/rustc_const_eval/src/const_eval/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@

use crate::errors::MaxNumNodesInConstErr;
use crate::interpret::{
intern_const_alloc_recursive, ConstValue, InternKind, InterpCx, InterpResult, Scalar,
intern_const_alloc_recursive, ConstValue, ImmTy, InternKind, InterpCx, InterpResult, Scalar,
};
use rustc_middle::mir;
use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId};
use rustc_middle::mir::interpret::{ConstAllocation, EvalToValTreeResult, GlobalId};
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::{source_map::DUMMY_SP, symbol::Symbol};

Expand Down Expand Up @@ -91,7 +91,7 @@ pub(crate) fn try_destructure_mir_constant<'tcx>(
val: mir::ConstantKind<'tcx>,
) -> InterpResult<'tcx, mir::DestructuredConstant<'tcx>> {
trace!("destructure_mir_constant: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let op = ecx.eval_mir_constant(&val, None, None)?;

// We go to `usize` as we cannot allocate anything bigger anyway.
Expand All @@ -112,11 +112,30 @@ pub(crate) fn try_destructure_mir_constant<'tcx>(
let fields_iter = (0..field_count)
.map(|i| {
let field_op = ecx.operand_field(&down, i)?;
let val = op_to_const(&ecx, &field_op);
let val = op_to_const(&mut ecx, &field_op);
Ok(mir::ConstantKind::Val(val, field_op.layout.ty))
})
.collect::<InterpResult<'tcx, Vec<_>>>()?;
let fields = tcx.arena.alloc_from_iter(fields_iter);

Ok(mir::DestructuredConstant { variant, fields })
}

/// Creates an `&[u8]` slice pointing to the given allocation
/// (covering it entirely, i.e., the length is the allocation size).
pub fn slice_for_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc: ConstAllocation<'tcx>) -> ConstValue<'tcx> {
let alloc_id = tcx.create_memory_alloc(alloc);

let a = Scalar::from_pointer(alloc_id.into(), &tcx);
let b = Scalar::from_target_usize(alloc.0.size().bytes(), &tcx);
let imm = crate::interpret::Immediate::ScalarPair(a, b);
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);

let layout = tcx
.layout_of(
ty::ParamEnv::reveal_all()
.and(tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.mk_slice(tcx.types.u8))),
)
.unwrap();
op_to_const(&mut ecx, &ImmTy::from_immediate(imm, layout).into())
}
4 changes: 2 additions & 2 deletions compiler/rustc_const_eval/src/const_eval/valtrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -296,9 +296,9 @@ pub fn valtree_to_const_value<'tcx>(
let imm =
ImmTy::from_immediate(ref_place, tcx.layout_of(param_env_ty).unwrap());

op_to_const(&ecx, &imm.into())
op_to_const(&mut ecx, &imm.into())
}
_ => op_to_const(&ecx, &place.into()),
_ => op_to_const(&mut ecx, &place.into()),
}
}
ty::Never
Expand Down
13 changes: 6 additions & 7 deletions compiler/rustc_const_eval/src/interpret/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@
use rustc_hir::def_id::DefId;
use rustc_middle::mir::{
self,
interpret::{
Allocation, ConstAllocation, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar,
},
interpret::{Allocation, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar},
BinOp, NonDivergingIntrinsic,
};
use rustc_middle::ty;
Expand All @@ -17,6 +15,8 @@ use rustc_middle::ty::{Ty, TyCtxt};
use rustc_span::symbol::{sym, Symbol};
use rustc_target::abi::{Abi, Align, Primitive, Size};

use crate::const_eval::slice_for_alloc;

use super::{
util::ensure_monomorphic_enough, CheckInAllocMsg, ImmTy, InterpCx, Machine, OpTy, PlaceTy,
Pointer,
Expand Down Expand Up @@ -44,10 +44,10 @@ fn numeric_intrinsic<Prov>(name: Symbol, bits: u128, kind: Primitive) -> Scalar<
}

/// Directly returns an `Allocation` containing an absolute path representation of the given type.
pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> {
pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstValue<'tcx> {
let path = crate::util::type_name(tcx, ty);
let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes());
tcx.mk_const_alloc(alloc)
slice_for_alloc(tcx, tcx.mk_const_alloc(alloc))
}

/// The logic for all nullary intrinsics is implemented here. These intrinsics don't get evaluated
Expand All @@ -63,8 +63,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
Ok(match name {
sym::type_name => {
ensure_monomorphic_enough(tcx, tp_ty)?;
let alloc = alloc_type_name(tcx, tp_ty);
ConstValue::Slice { data: alloc, start: 0, end: alloc.inner().len() }
alloc_type_name(tcx, tp_ty)
}
sym::needs_drop => {
ensure_monomorphic_enough(tcx, tp_ty)?;
Expand Down
13 changes: 0 additions & 13 deletions compiler/rustc_const_eval/src/interpret/operand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -657,19 +657,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
ConstValue::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()),
ConstValue::ZeroSized => Operand::Immediate(Immediate::Uninit),
ConstValue::Slice { data, start, end } => {
// We rely on mutability being set correctly in `data` to prevent writes
// where none should happen.
let ptr = Pointer::new(
self.tcx.create_memory_alloc(data),
Size::from_bytes(start), // offset: `start`
);
Operand::Immediate(Immediate::new_slice(
Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx),
u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start`
self,
))
}
};
Ok(OpTy { op, layout, align: Some(layout.align.abi) })
}
Expand Down
36 changes: 27 additions & 9 deletions compiler/rustc_middle/src/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,8 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
/// even when new allocations are pushed to the `HashMap`. `mem_copy_repeatedly` relies
/// on that.
#[inline]
pub fn get_bytes_unchecked(&self, range: AllocRange) -> &[u8] {
pub fn get_bytes_unchecked(&self, range: impl Into<AllocRange>) -> &[u8] {
let range = range.into();
&self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
}

Expand All @@ -430,8 +431,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
pub fn get_bytes_strip_provenance(
&self,
cx: &impl HasDataLayout,
range: AllocRange,
range: impl Into<AllocRange>,
oli-obk marked this conversation as resolved.
Show resolved Hide resolved
) -> AllocResult<&[u8]> {
let range = range.into();
self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
AllocError::InvalidUninitBytes(Some(UninitBytesAccess {
access: range,
Expand All @@ -455,8 +457,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
pub fn get_bytes_mut(
&mut self,
cx: &impl HasDataLayout,
range: AllocRange,
range: impl Into<AllocRange>,
) -> AllocResult<&mut [u8]> {
let range = range.into();
self.mark_init(range, true);
self.provenance.clear(range, cx)?;

Expand All @@ -467,8 +470,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
pub fn get_bytes_mut_ptr(
&mut self,
cx: &impl HasDataLayout,
range: AllocRange,
range: impl Into<AllocRange>,
) -> AllocResult<*mut [u8]> {
let range = range.into();
self.mark_init(range, true);
self.provenance.clear(range, cx)?;

Expand All @@ -482,7 +486,8 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
/// Reading and writing.
impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
/// Sets the init bit for the given range.
fn mark_init(&mut self, range: AllocRange, is_init: bool) {
fn mark_init(&mut self, range: impl Into<AllocRange>, is_init: bool) {
let range = range.into();
if range.size.bytes() == 0 {
return;
}
Expand All @@ -503,9 +508,10 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
pub fn read_scalar(
&self,
cx: &impl HasDataLayout,
range: AllocRange,
range: impl Into<AllocRange>,
read_provenance: bool,
) -> AllocResult<Scalar<Prov>> {
let range = range.into();
// First and foremost, if anything is uninit, bail.
if self.init_mask.is_range_initialized(range).is_err() {
return Err(AllocError::InvalidUninitBytes(None));
Expand Down Expand Up @@ -565,9 +571,10 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
pub fn write_scalar(
&mut self,
cx: &impl HasDataLayout,
range: AllocRange,
range: impl Into<AllocRange>,
val: Scalar<Prov>,
) -> AllocResult {
let range = range.into();
assert!(self.mutability == Mutability::Mut);

// `to_bits_or_ptr_internal` is the right method because we just want to store this data
Expand All @@ -594,7 +601,12 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
}

/// Write "uninit" to the given memory range.
pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
pub fn write_uninit(
&mut self,
cx: &impl HasDataLayout,
range: impl Into<AllocRange>,
) -> AllocResult {
let range = range.into();
self.mark_init(range, false);
self.provenance.clear(range, cx)?;
return Ok(());
Expand All @@ -614,7 +626,13 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
///
/// This is dangerous to use as it can violate internal `Allocation` invariants!
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
pub fn init_mask_apply_copy(&mut self, copy: InitCopy, range: AllocRange, repeat: u64) {
pub fn init_mask_apply_copy(
&mut self,
copy: InitCopy,
range: impl Into<AllocRange>,
repeat: u64,
) {
let range = range.into();
self.init_mask.apply_copy(copy, range, repeat)
}
}
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ pub use self::error::{
ValidationErrorInfo, ValidationErrorKind,
};

pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
pub use self::value::{ConstAlloc, ConstValue, Scalar};

pub use self::allocation::{
alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation,
Expand Down
Loading