66#include "julia_atomics.h"
77#include "julia_gcext.h"
88#include "julia_assert.h"
9- #ifdef __GLIBC__
9+
10+ #if defined(_OS_DARWIN_ )
11+ #include <malloc/malloc.h>
12+ #else
1013#include <malloc.h> // for malloc_trim
1114#endif
1215
@@ -231,6 +234,7 @@ void jl_gc_wait_for_the_world(jl_ptls_t* gc_all_tls_states, int gc_n_threads);
231234#if defined(_OS_WINDOWS_ )
232235STATIC_INLINE void * jl_malloc_aligned (size_t sz , size_t align )
233236{
237+ assert (align == JL_CACHE_BYTE_ALIGNMENT );
234238 return _aligned_malloc (sz ? sz : 1 , align );
235239}
236240STATIC_INLINE void jl_free_aligned (void * p ) JL_NOTSAFEPOINT
@@ -240,6 +244,7 @@ STATIC_INLINE void jl_free_aligned(void *p) JL_NOTSAFEPOINT
240244#else
241245STATIC_INLINE void * jl_malloc_aligned (size_t sz , size_t align )
242246{
247+ assert (align == JL_CACHE_BYTE_ALIGNMENT );
243248#if defined(_P64 ) || defined(__APPLE__ )
244249 if (align <= 16 )
245250 return malloc (sz );
@@ -254,6 +259,16 @@ STATIC_INLINE void jl_free_aligned(void *p) JL_NOTSAFEPOINT
254259 free (p );
255260}
256261#endif
262+ size_t memory_block_usable_size (void * p ) JL_NOTSAFEPOINT
263+ {
264+ #if defined(_OS_WINDOWS_ )
265+ return _aligned_msize (p , JL_CACHE_BYTE_ALIGNMENT , 0 );
266+ #elif defined(_OS_DARWIN_ )
267+ return malloc_size (p );
268+ #else
269+ return malloc_usable_size (p );
270+ #endif
271+ }
257272#define malloc_cache_align (sz ) jl_malloc_aligned(sz, JL_CACHE_BYTE_ALIGNMENT)
258273
259274static void schedule_finalization (void * o , void * f ) JL_NOTSAFEPOINT
@@ -1097,11 +1112,6 @@ void jl_gc_count_allocd(size_t sz) JL_NOTSAFEPOINT
10971112 jl_batch_accum_heap_size (ptls , sz );
10981113}
10991114
1100- void jl_gc_count_freed (size_t sz ) JL_NOTSAFEPOINT
1101- {
1102- jl_batch_accum_free_size (jl_current_task -> ptls , sz );
1103- }
1104-
11051115// Only safe to update the heap inside the GC
11061116static void combine_thread_gc_counts (jl_gc_num_t * dest , int update_heap ) JL_NOTSAFEPOINT
11071117{
@@ -1182,13 +1192,14 @@ static void jl_gc_free_memory(jl_value_t *v, int isaligned) JL_NOTSAFEPOINT
11821192 jl_genericmemory_t * m = (jl_genericmemory_t * )v ;
11831193 assert (jl_genericmemory_how (m ) == 1 || jl_genericmemory_how (m ) == 2 );
11841194 char * d = (char * )m -> ptr ;
1195+ size_t freed_bytes = memory_block_usable_size (d );
11851196 if (isaligned )
11861197 jl_free_aligned (d );
11871198 else
11881199 free (d );
11891200 jl_atomic_store_relaxed (& gc_heap_stats .heap_size ,
1190- jl_atomic_load_relaxed (& gc_heap_stats .heap_size ) - jl_genericmemory_nbytes ( m ) );
1191- gc_num .freed += jl_genericmemory_nbytes ( m ) ;
1201+ jl_atomic_load_relaxed (& gc_heap_stats .heap_size ) - freed_bytes );
1202+ gc_num .freed += freed_bytes ;
11921203 gc_num .freecall ++ ;
11931204}
11941205
@@ -4191,8 +4202,9 @@ JL_DLLEXPORT void *jl_gc_managed_malloc(size_t sz)
41914202 if (b == NULL )
41924203 jl_throw (jl_memory_exception );
41934204
4205+ size_t allocated_bytes = memory_block_usable_size (b );
41944206 jl_atomic_store_relaxed (& ptls -> gc_tls .gc_num .allocd ,
4195- jl_atomic_load_relaxed (& ptls -> gc_tls .gc_num .allocd ) + allocsz );
4207+ jl_atomic_load_relaxed (& ptls -> gc_tls .gc_num .allocd ) + allocated_bytes );
41964208 jl_atomic_store_relaxed (& ptls -> gc_tls .gc_num .malloc ,
41974209 jl_atomic_load_relaxed (& ptls -> gc_tls .gc_num .malloc ) + 1 );
41984210 jl_batch_accum_heap_size (ptls , allocsz );
0 commit comments