@@ -609,90 +609,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
609
609
}
610
610
}
611
611
612
- /// Byte accessors
613
- impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
614
- /// The last argument controls whether we error out when there are undefined
615
- /// or pointer bytes. You should never call this, call `get_bytes` or
616
- /// `get_bytes_with_undef_and_ptr` instead,
617
- ///
618
- /// This function also guarantees that the resulting pointer will remain stable
619
- /// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies
620
- /// on that.
621
- fn get_bytes_internal (
622
- & self ,
623
- ptr : Pointer < M :: PointerTag > ,
624
- size : Size ,
625
- align : Align ,
626
- check_defined_and_ptr : bool ,
627
- ) -> EvalResult < ' tcx , & [ u8 ] > {
628
- assert_ne ! ( size. bytes( ) , 0 , "0-sized accesses should never even get a `Pointer`" ) ;
629
- self . check_align ( ptr. into ( ) , align) ?;
630
- self . check_bounds ( ptr, size, InboundsCheck :: Live ) ?;
631
-
632
- if check_defined_and_ptr {
633
- self . check_defined ( ptr, size) ?;
634
- self . check_relocations ( ptr, size) ?;
635
- } else {
636
- // We still don't want relocations on the *edges*
637
- self . check_relocation_edges ( ptr, size) ?;
638
- }
639
-
640
- let alloc = self . get ( ptr. alloc_id ) ?;
641
- AllocationExtra :: memory_read ( alloc, ptr, size) ?;
642
-
643
- assert_eq ! ( ptr. offset. bytes( ) as usize as u64 , ptr. offset. bytes( ) ) ;
644
- assert_eq ! ( size. bytes( ) as usize as u64 , size. bytes( ) ) ;
645
- let offset = ptr. offset . bytes ( ) as usize ;
646
- Ok ( & alloc. bytes [ offset..offset + size. bytes ( ) as usize ] )
647
- }
648
-
649
- #[ inline]
650
- fn get_bytes (
651
- & self ,
652
- ptr : Pointer < M :: PointerTag > ,
653
- size : Size ,
654
- align : Align
655
- ) -> EvalResult < ' tcx , & [ u8 ] > {
656
- self . get_bytes_internal ( ptr, size, align, true )
657
- }
658
-
659
- /// It is the caller's responsibility to handle undefined and pointer bytes.
660
- /// However, this still checks that there are no relocations on the *edges*.
661
- #[ inline]
662
- fn get_bytes_with_undef_and_ptr (
663
- & self ,
664
- ptr : Pointer < M :: PointerTag > ,
665
- size : Size ,
666
- align : Align
667
- ) -> EvalResult < ' tcx , & [ u8 ] > {
668
- self . get_bytes_internal ( ptr, size, align, false )
669
- }
670
-
671
- /// Just calling this already marks everything as defined and removes relocations,
672
- /// so be sure to actually put data there!
673
- fn get_bytes_mut (
674
- & mut self ,
675
- ptr : Pointer < M :: PointerTag > ,
676
- size : Size ,
677
- align : Align ,
678
- ) -> EvalResult < ' tcx , & mut [ u8 ] > {
679
- assert_ne ! ( size. bytes( ) , 0 , "0-sized accesses should never even get a `Pointer`" ) ;
680
- self . check_align ( ptr. into ( ) , align) ?;
681
- self . check_bounds ( ptr, size, InboundsCheck :: Live ) ?;
682
-
683
- self . mark_definedness ( ptr, size, true ) ?;
684
- self . clear_relocations ( ptr, size) ?;
685
-
686
- let alloc = self . get_mut ( ptr. alloc_id ) ?;
687
- AllocationExtra :: memory_written ( alloc, ptr, size) ?;
688
-
689
- assert_eq ! ( ptr. offset. bytes( ) as usize as u64 , ptr. offset. bytes( ) ) ;
690
- assert_eq ! ( size. bytes( ) as usize as u64 , size. bytes( ) ) ;
691
- let offset = ptr. offset . bytes ( ) as usize ;
692
- Ok ( & mut alloc. bytes [ offset..offset + size. bytes ( ) as usize ] )
693
- }
694
- }
695
-
696
612
/// Interning (for CTFE)
697
613
impl < ' a , ' mir , ' tcx , M > Memory < ' a , ' mir , ' tcx , M >
698
614
where
0 commit comments