@@ -42,6 +42,11 @@ pub struct GlobalStateInner {
4242 /// they do not have an `AllocExtra`.
4343 /// This is the inverse of `int_to_ptr_map`.
4444 base_addr : FxHashMap < AllocId , u64 > ,
45+ /// Temporarily store prepared memory space for global allocations the first time their memory
46+ /// address is required. This is used to ensure that the memory is allocated before Miri assigns
47+ /// it an internal address, which is important for matching the internal address to the machine
48+ /// address so FFI can read from pointers.
49+ prepared_alloc_bytes : FxHashMap < AllocId , MiriAllocBytes > ,
4550 /// A pool of addresses we can reuse for future allocations.
4651 reuse : ReusePool ,
4752 /// Whether an allocation has been exposed or not. This cannot be put
@@ -59,6 +64,7 @@ impl VisitProvenance for GlobalStateInner {
5964 let GlobalStateInner {
6065 int_to_ptr_map : _,
6166 base_addr : _,
67+ prepared_alloc_bytes : _,
6268 reuse : _,
6369 exposed : _,
6470 next_base_addr : _,
@@ -78,6 +84,7 @@ impl GlobalStateInner {
7884 GlobalStateInner {
7985 int_to_ptr_map : Vec :: default ( ) ,
8086 base_addr : FxHashMap :: default ( ) ,
87+ prepared_alloc_bytes : FxHashMap :: default ( ) ,
8188 reuse : ReusePool :: new ( config) ,
8289 exposed : FxHashSet :: default ( ) ,
8390 next_base_addr : stack_addr,
@@ -166,7 +173,39 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
166173 assert ! ( !matches!( kind, AllocKind :: Dead ) ) ;
167174
168175 // This allocation does not have a base address yet, pick or reuse one.
169- let base_addr = if let Some ( ( reuse_addr, clock) ) = global_state. reuse . take_addr (
176+ let base_addr = if ecx. machine . native_lib . is_some ( ) {
177+ // In native lib mode, we use the "real" address of the bytes for this allocation.
178+ // This ensures the interpreted program and native code have the same view of memory.
179+ match kind {
180+ AllocKind :: LiveData => {
181+ let ptr = if ecx. tcx . try_get_global_alloc ( alloc_id) . is_some ( ) {
182+ // For new global allocations, we always pre-allocate the memory to be able use the machine address directly.
183+ let prepared_bytes = MiriAllocBytes :: zeroed ( size, align)
184+ . unwrap_or_else ( || {
185+ panic ! ( "Miri ran out of memory: cannot create allocation of {size:?} bytes" )
186+ } ) ;
187+ let ptr = prepared_bytes. as_ptr ( ) ;
188+ // Store prepared allocation space to be picked up for use later.
189+ global_state. prepared_alloc_bytes . try_insert ( alloc_id, prepared_bytes) . unwrap ( ) ;
190+ ptr
191+ } else {
192+ ecx. get_alloc_bytes_unchecked_raw ( alloc_id) ?
193+ } ;
194+ // Ensure this pointer's provenance is exposed, so that it can be used by FFI code.
195+ ptr. expose_provenance ( ) . try_into ( ) . unwrap ( )
196+ }
197+ AllocKind :: Function | AllocKind :: VTable => {
198+ // Allocate some dummy memory to get a unique address for this function/vtable.
199+ let alloc_bytes = MiriAllocBytes :: from_bytes ( & [ 0u8 ; 1 ] , Align :: from_bytes ( 1 ) . unwrap ( ) ) ;
200+ // We don't need to expose these bytes as nobody is allowed to access them.
201+ let addr = alloc_bytes. as_ptr ( ) . addr ( ) . try_into ( ) . unwrap ( ) ;
202+ // Leak the underlying memory to ensure it remains unique.
203+ std:: mem:: forget ( alloc_bytes) ;
204+ addr
205+ }
206+ AllocKind :: Dead => unreachable ! ( )
207+ }
208+ } else if let Some ( ( reuse_addr, clock) ) = global_state. reuse . take_addr (
170209 & mut * rng,
171210 size,
172211 align,
@@ -318,6 +357,33 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
318357 Ok ( base_ptr. wrapping_offset ( offset, ecx) )
319358 }
320359
360+ // This returns some prepared `MiriAllocBytes`, either because `addr_from_alloc_id` reserved
361+ // memory space in the past, or by doing the pre-allocation right upon being called.
362+ fn get_global_alloc_bytes ( & self , id : AllocId , kind : MemoryKind , bytes : & [ u8 ] , align : Align ) -> InterpResult < ' tcx , MiriAllocBytes > {
363+ let ecx = self . eval_context_ref ( ) ;
364+ Ok ( if ecx. machine . native_lib . is_some ( ) {
365+ // In native lib mode, MiriAllocBytes for global allocations are handled via `prepared_alloc_bytes`.
366+ // This additional call ensures that some `MiriAllocBytes` are always prepared.
367+ ecx. addr_from_alloc_id ( id, kind) ?;
368+ let mut global_state = ecx. machine . alloc_addresses . borrow_mut ( ) ;
369+ // The memory we need here will have already been allocated during an earlier call to
370+ // `addr_from_alloc_id` for this allocation. So don't create a new `MiriAllocBytes` here, instead
371+ // fetch the previously prepared bytes from `prepared_alloc_bytes`.
372+ let mut prepared_alloc_bytes = global_state
373+ . prepared_alloc_bytes
374+ . remove ( & id)
375+ . unwrap_or_else ( || panic ! ( "alloc bytes for {id:?} have not been prepared" ) ) ;
376+ // Sanity-check that the prepared allocation has the right size and alignment.
377+ assert ! ( prepared_alloc_bytes. as_ptr( ) . is_aligned_to( align. bytes_usize( ) ) ) ;
378+ assert_eq ! ( prepared_alloc_bytes. len( ) , bytes. len( ) ) ;
379+ // Copy allocation contents into prepared memory.
380+ prepared_alloc_bytes. copy_from_slice ( bytes) ;
381+ prepared_alloc_bytes
382+ } else {
383+ MiriAllocBytes :: from_bytes ( std:: borrow:: Cow :: Borrowed ( & * bytes) , align)
384+ } )
385+ }
386+
321387 /// When a pointer is used for a memory access, this computes where in which allocation the
322388 /// access is going.
323389 fn ptr_get_alloc (
0 commit comments