@@ -2697,7 +2697,7 @@ imalloc(static_opts_t *sopts, dynamic_opts_t *dopts) {
2697
2697
2698
2698
JEMALLOC_NOINLINE
2699
2699
void *
2700
- malloc_default (size_t size ) {
2700
+ malloc_default (size_t size , size_t * usize ) {
2701
2701
void * ret ;
2702
2702
static_opts_t sopts ;
2703
2703
dynamic_opts_t dopts ;
@@ -2731,6 +2731,7 @@ malloc_default(size_t size) {
2731
2731
2732
2732
LOG ("core.malloc.exit" , "result: %p" , ret );
2733
2733
2734
+ if (usize ) * usize = dopts .usize ;
2734
2735
return ret ;
2735
2736
}
2736
2737
@@ -2743,9 +2744,11 @@ JEMALLOC_EXPORT JEMALLOC_ALLOCATOR JEMALLOC_RESTRICT_RETURN
2743
2744
void JEMALLOC_NOTHROW *
2744
2745
JEMALLOC_ATTR (malloc ) JEMALLOC_ALLOC_SIZE (1 )
2745
2746
je_malloc (size_t size ) {
2746
- return imalloc_fastpath (size , & malloc_default );
2747
+ return imalloc_fastpath (size , NULL , & malloc_default );
2747
2748
}
2748
2749
2750
+
2751
+
2749
2752
JEMALLOC_EXPORT int JEMALLOC_NOTHROW
2750
2753
JEMALLOC_ATTR (nonnull (1 ))
2751
2754
je_posix_memalign (void * * memptr , size_t alignment , size_t size ) {
@@ -2861,7 +2864,7 @@ je_calloc(size_t num, size_t size) {
2861
2864
}
2862
2865
2863
2866
JEMALLOC_ALWAYS_INLINE void
2864
- ifree (tsd_t * tsd , void * ptr , tcache_t * tcache , bool slow_path ) {
2867
+ ifree (tsd_t * tsd , void * ptr , tcache_t * tcache , bool slow_path , size_t * usable ) {
2865
2868
if (!slow_path ) {
2866
2869
tsd_assert_fast (tsd );
2867
2870
}
@@ -2894,6 +2897,7 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
2894
2897
true);
2895
2898
}
2896
2899
thread_dalloc_event (tsd , usize );
2900
+ if (usable ) * usable = usize ;
2897
2901
}
2898
2902
2899
2903
JEMALLOC_ALWAYS_INLINE bool
@@ -2993,7 +2997,7 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
2993
2997
2994
2998
JEMALLOC_NOINLINE
2995
2999
void
2996
- free_default (void * ptr ) {
3000
+ free_default (void * ptr , size_t * usable ) {
2997
3001
UTRACE (ptr , 0 , 0 );
2998
3002
if (likely (ptr != NULL )) {
2999
3003
/*
@@ -3011,14 +3015,14 @@ free_default(void *ptr) {
3011
3015
tcache_t * tcache = tcache_get_from_ind (tsd ,
3012
3016
TCACHE_IND_AUTOMATIC , /* slow */ false,
3013
3017
/* is_alloc */ false);
3014
- ifree (tsd , ptr , tcache , /* slow */ false);
3018
+ ifree (tsd , ptr , tcache , /* slow */ false, usable );
3015
3019
} else {
3016
3020
tcache_t * tcache = tcache_get_from_ind (tsd ,
3017
3021
TCACHE_IND_AUTOMATIC , /* slow */ true,
3018
3022
/* is_alloc */ false);
3019
3023
uintptr_t args_raw [3 ] = {(uintptr_t )ptr };
3020
3024
hook_invoke_dalloc (hook_dalloc_free , ptr , args_raw );
3021
- ifree (tsd , ptr , tcache , /* slow */ true);
3025
+ ifree (tsd , ptr , tcache , /* slow */ true, usable );
3022
3026
}
3023
3027
3024
3028
check_entry_exit_locking (tsd_tsdn (tsd ));
@@ -3062,7 +3066,7 @@ free_fastpath_nonfast_aligned(void *ptr, bool check_prof) {
3062
3066
3063
3067
/* Returns whether or not the free attempt was successful. */
3064
3068
JEMALLOC_ALWAYS_INLINE
3065
- bool free_fastpath (void * ptr , size_t size , bool size_hint ) {
3069
+ bool free_fastpath (void * ptr , size_t size , bool size_hint , size_t * usable ) {
3066
3070
tsd_t * tsd = tsd_get (false);
3067
3071
/* The branch gets optimized away unless tsd_get_allocates(). */
3068
3072
if (unlikely (tsd == NULL )) {
@@ -3116,6 +3120,7 @@ bool free_fastpath(void *ptr, size_t size, bool size_hint) {
3116
3120
te_free_fastpath_ctx (tsd , & deallocated , & threshold );
3117
3121
3118
3122
size_t usize = sz_index2size (alloc_ctx .szind );
3123
+ if (usable ) * usable = usize ;
3119
3124
uint64_t deallocated_after = deallocated + usize ;
3120
3125
/*
3121
3126
* Check for events and tsd non-nominal (fast_threshold will be set to
@@ -3158,8 +3163,8 @@ JEMALLOC_EXPORT void JEMALLOC_NOTHROW
3158
3163
je_free (void * ptr ) {
3159
3164
LOG ("core.free.entry" , "ptr: %p" , ptr );
3160
3165
3161
- if (!free_fastpath (ptr , 0 , false)) {
3162
- free_default (ptr );
3166
+ if (!free_fastpath (ptr , 0 , false, NULL )) {
3167
+ free_default (ptr , NULL );
3163
3168
}
3164
3169
3165
3170
LOG ("core.free.exit" , "" );
@@ -3601,7 +3606,7 @@ do_realloc_nonnull_zero(void *ptr) {
3601
3606
/* is_alloc */ false);
3602
3607
uintptr_t args [3 ] = {(uintptr_t )ptr , 0 };
3603
3608
hook_invoke_dalloc (hook_dalloc_realloc , ptr , args );
3604
- ifree (tsd , ptr , tcache , true);
3609
+ ifree (tsd , ptr , tcache , true, NULL );
3605
3610
3606
3611
check_entry_exit_locking (tsd_tsdn (tsd ));
3607
3612
return NULL ;
@@ -3883,11 +3888,11 @@ je_dallocx(void *ptr, int flags) {
3883
3888
UTRACE (ptr , 0 , 0 );
3884
3889
if (likely (fast )) {
3885
3890
tsd_assert_fast (tsd );
3886
- ifree (tsd , ptr , tcache , false);
3891
+ ifree (tsd , ptr , tcache , false, NULL );
3887
3892
} else {
3888
3893
uintptr_t args_raw [3 ] = {(uintptr_t )ptr , flags };
3889
3894
hook_invoke_dalloc (hook_dalloc_dallocx , ptr , args_raw );
3890
- ifree (tsd , ptr , tcache , true);
3895
+ ifree (tsd , ptr , tcache , true, NULL );
3891
3896
}
3892
3897
check_entry_exit_locking (tsd_tsdn (tsd ));
3893
3898
@@ -3935,7 +3940,7 @@ je_sdallocx(void *ptr, size_t size, int flags) {
3935
3940
LOG ("core.sdallocx.entry" , "ptr: %p, size: %zu, flags: %d" , ptr ,
3936
3941
size , flags );
3937
3942
3938
- if (flags != 0 || !free_fastpath (ptr , size , true)) {
3943
+ if (flags != 0 || !free_fastpath (ptr , size , true, NULL )) {
3939
3944
sdallocx_default (ptr , size , flags );
3940
3945
}
3941
3946
@@ -3947,7 +3952,7 @@ je_sdallocx_noflags(void *ptr, size_t size) {
3947
3952
LOG ("core.sdallocx.entry" , "ptr: %p, size: %zu, flags: 0" , ptr ,
3948
3953
size );
3949
3954
3950
- if (!free_fastpath (ptr , size , true)) {
3955
+ if (!free_fastpath (ptr , size , true, NULL )) {
3951
3956
sdallocx_default (ptr , size , 0 );
3952
3957
}
3953
3958
@@ -4483,3 +4488,21 @@ get_defrag_hint(void* ptr) {
4483
4488
assert (ptr != NULL );
4484
4489
return iget_defrag_hint (TSDN_NULL , ptr );
4485
4490
}
4491
+
4492
+ JEMALLOC_EXPORT JEMALLOC_ALLOCATOR JEMALLOC_RESTRICT_RETURN
4493
+ void JEMALLOC_NOTHROW *
4494
+ JEMALLOC_ATTR (malloc ) JEMALLOC_ALLOC_SIZE (1 )
4495
+ malloc_usable (size_t size , size_t * usize ) {
4496
+ return imalloc_fastpath (size , usize , & malloc_default );
4497
+ }
4498
+
4499
+ JEMALLOC_EXPORT void JEMALLOC_NOTHROW
4500
+ free_usable (void * ptr , size_t * usable ) {
4501
+ LOG ("core.free.entry" , "ptr: %p" , ptr );
4502
+
4503
+ if (!free_fastpath (ptr , 0 , false, usable )) {
4504
+ free_default (ptr , usable );
4505
+ }
4506
+
4507
+ LOG ("core.free.exit" , "" );
4508
+ }
0 commit comments