Skip to content

Commit a508a80

Browse files
committed
Forward some function calls of Memory to Allocation
1 parent 691bb72 commit a508a80

File tree

7 files changed

+285
-18
lines changed

7 files changed

+285
-18
lines changed

src/librustc/mir/interpret/allocation.rs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ pub struct Allocation<Tag=(),Extra=()> {
5656
pub extra: Extra,
5757
}
5858

59-
pub trait AllocationExtra<Tag> {
59+
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Default + Clone {
6060
/// Hook for performing extra checks on a memory access.
6161
///
6262
/// Takes read-only access to the allocation so we can keep all the memory read
@@ -72,6 +72,9 @@ pub trait AllocationExtra<Tag> {
7272
}
7373
}
7474

75+
/// For the const evaluator
76+
impl AllocationExtra<()> for () {}
77+
7578
impl<Tag, Extra: Default> Allocation<Tag, Extra> {
7679
/// Creates a read-only allocation initialized by the given bytes
7780
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
@@ -144,7 +147,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
144147
}
145148

146149
#[inline]
147-
fn get_bytes(
150+
pub fn get_bytes(
148151
&self,
149152
cx: impl HasDataLayout,
150153
ptr: Pointer<Tag>,
@@ -157,7 +160,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
157160
/// It is the caller's responsibility to handle undefined and pointer bytes.
158161
/// However, this still checks that there are no relocations on the *egdes*.
159162
#[inline]
160-
fn get_bytes_with_undef_and_ptr(
163+
pub fn get_bytes_with_undef_and_ptr(
161164
&self,
162165
cx: impl HasDataLayout,
163166
ptr: Pointer<Tag>,
@@ -169,7 +172,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
169172

170173
/// Just calling this already marks everything as defined and removes relocations,
171174
/// so be sure to actually put data there!
172-
fn get_bytes_mut(
175+
pub fn get_bytes_mut(
173176
&mut self,
174177
cx: impl HasDataLayout,
175178
ptr: Pointer<Tag>,
@@ -408,7 +411,7 @@ fn int_align(cx: impl HasDataLayout, size: Size) -> Align {
408411
/// Relocations
409412
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
410413
/// Return all relocations overlapping with the given ptr-offset pair.
411-
fn relocations(
414+
pub fn relocations(
412415
&self,
413416
cx: impl HasDataLayout,
414417
ptr: Pointer<Tag>,

src/librustc/mir/interpret/mod.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ pub use self::error::{
2626

2727
pub use self::value::{Scalar, ConstValue};
2828

29-
pub use self::allocation::{Allocation, MemoryAccess};
29+
pub use self::allocation::{Allocation, MemoryAccess, AllocationExtra};
3030

3131
use std::fmt;
3232
use mir;
@@ -810,3 +810,8 @@ impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
810810
self.not_undef()?.to_isize(cx)
811811
}
812812
}
813+
814+
impl_stable_hash_for!(enum ::mir::interpret::ScalarMaybeUndef {
815+
Scalar(v),
816+
Undef
817+
});

src/librustc_mir/interpret/machine.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ use rustc::ty::{self, Ty, layout::{Size, TyLayout}, query::TyCtxtAt};
2121

2222
use super::{
2323
Allocation, AllocId, EvalResult, Scalar,
24-
EvalContext, PlaceTy, OpTy, Pointer, MemoryKind, MemoryAccess,
24+
EvalContext, PlaceTy, OpTy, Pointer, MemoryKind, AllocationExtra,
2525
};
2626

2727
/// Whether this kind of memory is allowed to leak
@@ -77,7 +77,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
7777
type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;
7878

7979
/// Extra data stored in every allocation.
80-
type AllocExtra: ::std::fmt::Debug + Default + Clone;
80+
type AllocExtra: AllocationExtra<Self::PointerTag>;
8181

8282
/// Memory's allocation map
8383
type MemoryMap:

src/librustc_mir/interpret/memory.rs

Lines changed: 266 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,16 +21,16 @@ use std::ptr;
2121
use std::borrow::Cow;
2222

2323
use rustc::ty::{self, Instance, ParamEnv, query::TyCtxtAt};
24-
use rustc::ty::layout::{self, Align, TargetDataLayout, Size, HasDataLayout};
25-
pub use rustc::mir::interpret::{truncate, write_target_uint, read_target_uint};
24+
use rustc::ty::layout::{Align, TargetDataLayout, Size, HasDataLayout};
25+
pub use rustc::mir::interpret::{truncate, write_target_uint, read_target_uint, MemoryAccess};
2626
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
2727

2828
use syntax::ast::Mutability;
2929

3030
use super::{
3131
Pointer, AllocId, Allocation, ConstValue, GlobalId,
3232
EvalResult, Scalar, EvalErrorKind, AllocType, PointerArithmetic,
33-
Machine, MemoryAccess, AllocMap, MayLeak, ScalarMaybeUndef,
33+
Machine, AllocMap, MayLeak, ScalarMaybeUndef, AllocationExtra,
3434
};
3535

3636
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
@@ -297,6 +297,17 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
297297
})
298298
}
299299
}
300+
301+
/// Convenience forwarding method for `Allocation::check_bounds`.
302+
#[inline(always)]
303+
pub fn check_bounds(
304+
&self,
305+
ptr: Pointer<M::PointerTag>,
306+
size: Size,
307+
access: bool
308+
) -> EvalResult<'tcx> {
309+
self.get(ptr.alloc_id)?.check_bounds(self, ptr, size, access)
310+
}
300311
}
301312

302313
/// Allocation accessors
@@ -582,6 +593,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
582593
impl<'a, 'mir, 'tcx, M> Memory<'a, 'mir, 'tcx, M>
583594
where
584595
M: Machine<'a, 'mir, 'tcx, PointerTag=(), AllocExtra=()>,
596+
M::AllocExtra: AllocationExtra<()>,
585597
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
586598
{
587599
/// mark an allocation as static and initialized, either mutable or not
@@ -621,3 +633,254 @@ where
621633
Ok(())
622634
}
623635
}
636+
637+
/// Reading and writing
638+
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
639+
pub fn copy(
640+
&mut self,
641+
src: Scalar<M::PointerTag>,
642+
src_align: Align,
643+
dest: Scalar<M::PointerTag>,
644+
dest_align: Align,
645+
size: Size,
646+
nonoverlapping: bool,
647+
) -> EvalResult<'tcx> {
648+
self.copy_repeatedly(src, src_align, dest, dest_align, size, 1, nonoverlapping)
649+
}
650+
651+
pub fn copy_repeatedly(
652+
&mut self,
653+
src: Scalar<M::PointerTag>,
654+
src_align: Align,
655+
dest: Scalar<M::PointerTag>,
656+
dest_align: Align,
657+
size: Size,
658+
length: u64,
659+
nonoverlapping: bool,
660+
) -> EvalResult<'tcx> {
661+
if size.bytes() == 0 {
662+
// Nothing to do for ZST, other than checking alignment and non-NULLness.
663+
self.check_align(src, src_align)?;
664+
self.check_align(dest, dest_align)?;
665+
return Ok(());
666+
}
667+
let src = src.to_ptr()?;
668+
let dest = dest.to_ptr()?;
669+
670+
// first copy the relocations to a temporary buffer, because
671+
// `get_bytes_mut` will clear the relocations, which is correct,
672+
// since we don't want to keep any relocations at the target.
673+
// (`get_bytes_with_undef_and_ptr` below checks that there are no
674+
// relocations overlapping the edges; those would not be handled correctly).
675+
let relocations = {
676+
let relocations = self.relocations(src, size)?;
677+
let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));
678+
for i in 0..length {
679+
new_relocations.extend(
680+
relocations
681+
.iter()
682+
.map(|&(offset, reloc)| {
683+
(offset + dest.offset - src.offset + (i * size * relocations.len() as u64),
684+
reloc)
685+
})
686+
);
687+
}
688+
689+
new_relocations
690+
};
691+
692+
let tcx = self.tcx.tcx;
693+
694+
// This also checks alignment, and relocation edges on the src.
695+
let src_bytes = self
696+
.get(src.alloc_id)?
697+
.get_bytes_with_undef_and_ptr(tcx, src, size, src_align)?
698+
.as_ptr();
699+
let dest_bytes = self
700+
.get_mut(dest.alloc_id)?
701+
.get_bytes_mut(tcx, dest, size * length, dest_align)?
702+
.as_mut_ptr();
703+
704+
// SAFE: The above indexing would have panicked if there weren't at least `size` bytes
705+
// behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and
706+
// `dest` could possibly overlap.
707+
// The pointers above remain valid even if the `HashMap` table is moved around because they
708+
// point into the `Vec` storing the bytes.
709+
unsafe {
710+
assert_eq!(size.bytes() as usize as u64, size.bytes());
711+
if src.alloc_id == dest.alloc_id {
712+
if nonoverlapping {
713+
if (src.offset <= dest.offset && src.offset + size > dest.offset) ||
714+
(dest.offset <= src.offset && dest.offset + size > src.offset)
715+
{
716+
return err!(Intrinsic(
717+
"copy_nonoverlapping called on overlapping ranges".to_string(),
718+
));
719+
}
720+
}
721+
722+
for i in 0..length {
723+
ptr::copy(src_bytes,
724+
dest_bytes.offset((size.bytes() * i) as isize),
725+
size.bytes() as usize);
726+
}
727+
} else {
728+
for i in 0..length {
729+
ptr::copy_nonoverlapping(src_bytes,
730+
dest_bytes.offset((size.bytes() * i) as isize),
731+
size.bytes() as usize);
732+
}
733+
}
734+
}
735+
736+
// copy definedness to the destination
737+
self.copy_undef_mask(src, dest, size, length)?;
738+
// copy the relocations to the destination
739+
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);
740+
741+
Ok(())
742+
}
743+
744+
pub fn read_c_str(&self, ptr: Pointer<M::PointerTag>) -> EvalResult<'tcx, &[u8]> {
745+
self.get(ptr.alloc_id)?.read_c_str(self, ptr)
746+
}
747+
748+
pub fn check_bytes(
749+
&self,
750+
ptr: Scalar<M::PointerTag>,
751+
size: Size,
752+
allow_ptr_and_undef: bool,
753+
) -> EvalResult<'tcx> {
754+
// Empty accesses don't need to be valid pointers, but they should still be non-NULL
755+
let align = Align::from_bytes(1, 1).unwrap();
756+
if size.bytes() == 0 {
757+
self.check_align(ptr, align)?;
758+
return Ok(());
759+
}
760+
let ptr = ptr.to_ptr()?;
761+
self.get(ptr.alloc_id)?.check_bytes(self, ptr, size, allow_ptr_and_undef)
762+
}
763+
764+
pub fn read_bytes(&self, ptr: Scalar<M::PointerTag>, size: Size) -> EvalResult<'tcx, &[u8]> {
765+
// Empty accesses don't need to be valid pointers, but they should still be non-NULL
766+
let align = Align::from_bytes(1, 1).unwrap();
767+
if size.bytes() == 0 {
768+
self.check_align(ptr, align)?;
769+
return Ok(&[]);
770+
}
771+
let ptr = ptr.to_ptr()?;
772+
self.get(ptr.alloc_id)?.get_bytes(self, ptr, size, align)
773+
}
774+
775+
pub fn write_bytes(&mut self, ptr: Scalar<M::PointerTag>, src: &[u8]) -> EvalResult<'tcx> {
776+
// Empty accesses don't need to be valid pointers, but they should still be non-NULL
777+
let align = Align::from_bytes(1, 1).unwrap();
778+
if src.is_empty() {
779+
self.check_align(ptr, align)?;
780+
return Ok(());
781+
}
782+
let ptr = ptr.to_ptr()?;
783+
let tcx = self.tcx.tcx;
784+
self.get_mut(ptr.alloc_id)?.write_bytes(tcx, ptr, src)
785+
}
786+
787+
pub fn write_repeat(
788+
&mut self,
789+
ptr: Scalar<M::PointerTag>,
790+
val: u8,
791+
count: Size
792+
) -> EvalResult<'tcx> {
793+
// Empty accesses don't need to be valid pointers, but they should still be non-NULL
794+
let align = Align::from_bytes(1, 1).unwrap();
795+
if count.bytes() == 0 {
796+
self.check_align(ptr, align)?;
797+
return Ok(());
798+
}
799+
let ptr = ptr.to_ptr()?;
800+
let tcx = self.tcx.tcx;
801+
self.get_mut(ptr.alloc_id)?.write_repeat(tcx, ptr, val, count)
802+
}
803+
804+
/// Read a *non-ZST* scalar
805+
pub fn read_scalar(
806+
&self,
807+
ptr: Pointer<M::PointerTag>,
808+
ptr_align: Align,
809+
size: Size
810+
) -> EvalResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
811+
self.get(ptr.alloc_id)?.read_scalar(self, ptr, ptr_align, size)
812+
}
813+
814+
pub fn read_ptr_sized(
815+
&self,
816+
ptr: Pointer<M::PointerTag>,
817+
ptr_align: Align
818+
) -> EvalResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
819+
self.read_scalar(ptr, ptr_align, self.pointer_size())
820+
}
821+
822+
/// Write a *non-ZST* scalar
823+
pub fn write_scalar(
824+
&mut self,
825+
ptr: Pointer<M::PointerTag>,
826+
ptr_align: Align,
827+
val: ScalarMaybeUndef<M::PointerTag>,
828+
type_size: Size,
829+
) -> EvalResult<'tcx> {
830+
let tcx = self.tcx.tcx;
831+
self.get_mut(ptr.alloc_id)?.write_scalar(tcx, ptr, ptr_align, val, type_size)
832+
}
833+
834+
pub fn write_ptr_sized(
835+
&mut self,
836+
ptr: Pointer<M::PointerTag>,
837+
ptr_align: Align,
838+
val: ScalarMaybeUndef<M::PointerTag>
839+
) -> EvalResult<'tcx> {
840+
let ptr_size = self.pointer_size();
841+
self.write_scalar(ptr, ptr_align, val, ptr_size)
842+
}
843+
}
844+
845+
/// Relocations
846+
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
847+
/// Return all relocations overlapping with the given ptr-offset pair.
848+
fn relocations(
849+
&self,
850+
ptr: Pointer<M::PointerTag>,
851+
size: Size,
852+
) -> EvalResult<'tcx, &[(Size, (M::PointerTag, AllocId))]> {
853+
self.get(ptr.alloc_id)?.relocations(self, ptr, size)
854+
}
855+
}
856+
857+
/// Undefined bytes
858+
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
859+
// FIXME: Add a fast version for the common, nonoverlapping case
860+
fn copy_undef_mask(
861+
&mut self,
862+
src: Pointer<M::PointerTag>,
863+
dest: Pointer<M::PointerTag>,
864+
size: Size,
865+
repeat: u64,
866+
) -> EvalResult<'tcx> {
867+
// The bits have to be saved locally before writing to dest in case src and dest overlap.
868+
assert_eq!(size.bytes() as usize as u64, size.bytes());
869+
870+
let undef_mask = self.get(src.alloc_id)?.undef_mask.clone();
871+
let dest_allocation = self.get_mut(dest.alloc_id)?;
872+
873+
for i in 0..size.bytes() {
874+
let defined = undef_mask.get(src.offset + Size::from_bytes(i));
875+
876+
for j in 0..repeat {
877+
dest_allocation.undef_mask.set(
878+
dest.offset + Size::from_bytes(i + (size.bytes() * j)),
879+
defined
880+
);
881+
}
882+
}
883+
884+
Ok(())
885+
}
886+
}

0 commit comments

Comments
 (0)