aboutsummaryrefslogtreecommitdiffstats
path: root/erts/emulator
diff options
context:
space:
mode:
authorErlang/OTP <[email protected]>2011-07-11 15:40:42 +0200
committerErlang/OTP <[email protected]>2011-07-11 15:40:42 +0200
commit3b2f96b9a1f079c9e0502db7d11984597ea5c4f1 (patch)
treeeb14eeaeda15e219244540f0d270f212a5dc9554 /erts/emulator
parente0f60ce445b072b97353f3ab8f18c2f00b04cbe8 (diff)
parent1dfaae304dfcc4f249c62b5ab7ff3bd4acef0d50 (diff)
downloadotp-3b2f96b9a1f079c9e0502db7d11984597ea5c4f1.tar.gz
otp-3b2f96b9a1f079c9e0502db7d11984597ea5c4f1.tar.bz2
otp-3b2f96b9a1f079c9e0502db7d11984597ea5c4f1.zip
Merge branch 'rickard/sbmbc/OTP-9339' into maint-r14
* rickard/sbmbc/OTP-9339: Use separate memory carriers for small blocks
Diffstat (limited to 'erts/emulator')
-rw-r--r--erts/emulator/beam/erl_afit_alloc.c15
-rw-r--r--erts/emulator/beam/erl_alloc.c153
-rw-r--r--erts/emulator/beam/erl_alloc.types15
-rw-r--r--erts/emulator/beam/erl_alloc_util.c701
-rw-r--r--erts/emulator/beam/erl_alloc_util.h43
-rw-r--r--erts/emulator/beam/erl_bestfit_alloc.c167
-rw-r--r--erts/emulator/beam/erl_bestfit_alloc.h3
-rw-r--r--erts/emulator/beam/erl_goodfit_alloc.c22
-rw-r--r--erts/emulator/beam/erl_instrument.c14
-rw-r--r--erts/emulator/beam/erl_lock_check.c1
-rw-r--r--erts/emulator/test/nif_SUITE.erl3
-rw-r--r--erts/emulator/test/send_term_SUITE.erl4
12 files changed, 807 insertions, 334 deletions
diff --git a/erts/emulator/beam/erl_afit_alloc.c b/erts/emulator/beam/erl_afit_alloc.c
index e8b594bb47..d397cd8848 100644
--- a/erts/emulator/beam/erl_afit_alloc.c
+++ b/erts/emulator/beam/erl_afit_alloc.c
@@ -43,9 +43,9 @@
/* Prototypes of callback functions */
static Block_t * get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void link_free_block (Allctr_t *, Block_t *);
-static void unlink_free_block (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+static void link_free_block (Allctr_t *, Block_t *, Uint32);
+static void unlink_free_block (Allctr_t *, Block_t *, Uint32);
static Eterm info_options (Allctr_t *, char *, int *,
@@ -72,6 +72,8 @@ erts_afalc_start(AFAllctr_t *afallctr,
is a struct). */
Allctr_t *allctr = (Allctr_t *) afallctr;
+ init->sbmbct = 0; /* Small mbc not supported by afit */
+
sys_memcpy((void *) afallctr, (void *) &nulled_state, sizeof(AFAllctr_t));
allctr->mbc_header_size = sizeof(Carrier_t);
@@ -105,7 +107,8 @@ erts_afalc_start(AFAllctr_t *afallctr,
}
static Block_t *
-get_free_block(Allctr_t *allctr, Uint size, Block_t *cand_blk, Uint cand_size)
+get_free_block(Allctr_t *allctr, Uint size, Block_t *cand_blk, Uint cand_size,
+ Uint32 flags)
{
AFAllctr_t *afallctr = (AFAllctr_t *) allctr;
@@ -123,7 +126,7 @@ get_free_block(Allctr_t *allctr, Uint size, Block_t *cand_blk, Uint cand_size)
}
static void
-link_free_block(Allctr_t *allctr, Block_t *block)
+link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
AFFreeBlock_t *blk = (AFFreeBlock_t *) block;
AFAllctr_t *afallctr = (AFAllctr_t *) allctr;
@@ -144,7 +147,7 @@ link_free_block(Allctr_t *allctr, Block_t *block)
}
static void
-unlink_free_block(Allctr_t *allctr, Block_t *block)
+unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
AFFreeBlock_t *blk = (AFFreeBlock_t *) block;
AFAllctr_t *afallctr = (AFAllctr_t *) allctr;
diff --git a/erts/emulator/beam/erl_alloc.c b/erts/emulator/beam/erl_alloc.c
index cda404af5e..840534ec5e 100644
--- a/erts/emulator/beam/erl_alloc.c
+++ b/erts/emulator/beam/erl_alloc.c
@@ -87,13 +87,15 @@ typedef union {
char align_afa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(AFAllctr_t))];
} ErtsAllocatorState_t;
-static ErtsAllocatorState_t sl_alloc_state;
+static ErtsAllocatorState_t sbmbc_alloc_state;
static ErtsAllocatorState_t std_alloc_state;
static ErtsAllocatorState_t ll_alloc_state;
#if HALFWORD_HEAP
-static ErtsAllocatorState_t std_alloc_low_state;
-static ErtsAllocatorState_t ll_alloc_low_state;
+static ErtsAllocatorState_t sbmbc_low_alloc_state;
+static ErtsAllocatorState_t std_low_alloc_state;
+static ErtsAllocatorState_t ll_low_alloc_state;
#endif
+static ErtsAllocatorState_t sl_alloc_state;
static ErtsAllocatorState_t temp_alloc_state;
static ErtsAllocatorState_t eheap_alloc_state;
static ErtsAllocatorState_t binary_alloc_state;
@@ -162,6 +164,7 @@ typedef struct {
char *mtrace;
char *nodename;
} instr;
+ struct au_init sbmbc_alloc;
struct au_init sl_alloc;
struct au_init std_alloc;
struct au_init ll_alloc;
@@ -171,8 +174,9 @@ typedef struct {
struct au_init ets_alloc;
struct au_init driver_alloc;
#if HALFWORD_HEAP
- struct au_init std_alloc_low;
- struct au_init ll_alloc_low;
+ struct au_init sbmbc_low_alloc;
+ struct au_init std_low_alloc;
+ struct au_init ll_low_alloc;
#endif
} erts_alc_hndl_args_init_t;
@@ -185,6 +189,34 @@ do { \
} while (0)
static void
+set_default_sbmbc_alloc_opts(struct au_init *ip)
+{
+ SET_DEFAULT_ALLOC_OPTS(ip);
+ ip->enable = 0;
+ ip->thr_spec = 0;
+ ip->atype = BESTFIT;
+ ip->init.bf.ao = 1;
+ ip->init.util.ramv = 0;
+ ip->init.util.mmsbc = 0;
+ ip->init.util.mmmbc = 500;
+ ip->init.util.sbct = ~((UWord) 0);
+ ip->init.util.name_prefix = "sbmbc_";
+ ip->init.util.alloc_no = ERTS_ALC_A_SBMBC;
+#ifndef SMALL_MEMORY
+ ip->init.util.mmbcs = 2*1024*1024; /* Main carrier size */
+#else
+ ip->init.util.mmbcs = 1*1024*1024; /* Main carrier size */
+#endif
+ ip->init.util.ts = ERTS_ALC_MTA_SBMBC;
+ ip->init.util.asbcst = 0;
+ ip->init.util.rsbcst = 0;
+ ip->init.util.rsbcmt = 0;
+ ip->init.util.rmbcmt = 0;
+ ip->init.util.sbmbct = 0;
+ ip->init.util.sbmbcs = 0;
+}
+
+static void
set_default_sl_alloc_opts(struct au_init *ip)
{
SET_DEFAULT_ALLOC_OPTS(ip);
@@ -202,6 +234,7 @@ set_default_sl_alloc_opts(struct au_init *ip)
ip->init.util.ts = ERTS_ALC_MTA_SHORT_LIVED;
ip->init.util.rsbcst = 80;
#if HALFWORD_HEAP
+ ip->init.util.force = 1;
ip->init.util.low_mem = 1;
#endif
@@ -249,6 +282,8 @@ set_default_ll_alloc_opts(struct au_init *ip)
ip->init.util.rsbcst = 0;
ip->init.util.rsbcmt = 0;
ip->init.util.rmbcmt = 0;
+ ip->init.util.sbmbct = 0;
+ ip->init.util.sbmbcs = 0;
}
static void
@@ -269,6 +304,7 @@ set_default_temp_alloc_opts(struct au_init *ip)
ip->init.util.rsbcst = 90;
ip->init.util.rmbcmt = 100;
#if HALFWORD_HEAP
+ ip->init.util.force = 1;
ip->init.util.low_mem = 1;
#endif
}
@@ -291,6 +327,7 @@ set_default_eheap_alloc_opts(struct au_init *ip)
ip->init.util.ts = ERTS_ALC_MTA_EHEAP;
ip->init.util.rsbcst = 50;
#if HALFWORD_HEAP
+ ip->init.util.force = 1;
ip->init.util.low_mem = 1;
#endif
}
@@ -436,10 +473,13 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
hdbg_init();
#endif
+ erts_have_sbmbc_alloc = 0;
+
erts_sys_alloc_init();
init_thr_ix(erts_no_schedulers);
erts_init_utils_mem();
+ set_default_sbmbc_alloc_opts(&init.sbmbc_alloc);
set_default_sl_alloc_opts(&init.sl_alloc);
set_default_std_alloc_opts(&init.std_alloc);
set_default_ll_alloc_opts(&init.ll_alloc);
@@ -453,6 +493,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
handle_args(argc, argv, &init);
if (erts_no_schedulers <= 1) {
+ init.sbmbc_alloc.thr_spec = 0;
init.sl_alloc.thr_spec = 0;
init.std_alloc.thr_spec = 0;
init.ll_alloc.thr_spec = 0;
@@ -464,6 +505,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
if (init.erts_alloc_config) {
/* Adjust flags that erts_alloc_config won't like */
+ init.sbmbc_alloc.thr_spec = 0;
init.temp_alloc.thr_spec = 0;
init.sl_alloc.thr_spec = 0;
init.std_alloc.thr_spec = 0;
@@ -480,6 +522,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
init.temp_alloc.thr_spec = erts_no_schedulers;
/* Others must use thread preferred interface */
+ adjust_tpref(&init.sbmbc_alloc, erts_no_schedulers);
adjust_tpref(&init.sl_alloc, erts_no_schedulers);
adjust_tpref(&init.std_alloc, erts_no_schedulers);
adjust_tpref(&init.ll_alloc, erts_no_schedulers);
@@ -497,6 +540,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
* The following allocators cannot be run with afit strategy.
* Make sure they don't...
*/
+ refuse_af_strategy(&init.sbmbc_alloc);
refuse_af_strategy(&init.sl_alloc);
refuse_af_strategy(&init.std_alloc);
refuse_af_strategy(&init.ll_alloc);
@@ -551,19 +595,30 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
#if HALFWORD_HEAP
/* Init low memory variants by cloning */
- init.std_alloc_low = init.std_alloc;
- init.std_alloc_low.init.util.alloc_no = ERTS_ALC_A_STANDARD_LOW;
- init.std_alloc_low.init.util.low_mem = 1;
-
- init.ll_alloc_low = init.ll_alloc;
- init.ll_alloc_low.init.util.alloc_no = ERTS_ALC_A_LONG_LIVED_LOW;
- init.ll_alloc_low.init.util.low_mem = 1;
-
- set_au_allocator(ERTS_ALC_A_STANDARD_LOW, &init.std_alloc_low);
- set_au_allocator(ERTS_ALC_A_LONG_LIVED_LOW, &init.ll_alloc_low);
+ init.sbmbc_low_alloc = init.sbmbc_alloc;
+ init.sbmbc_low_alloc.init.util.name_prefix = "sbmbc_low_";
+ init.sbmbc_low_alloc.init.util.alloc_no = ERTS_ALC_A_STANDARD_LOW;
+ init.sbmbc_low_alloc.init.util.low_mem = 1;
+
+ init.std_low_alloc = init.std_alloc;
+ init.std_low_alloc.init.util.name_prefix = "std_low_";
+ init.std_low_alloc.init.util.alloc_no = ERTS_ALC_A_STANDARD_LOW;
+ init.std_low_alloc.init.util.force = 1;
+ init.std_low_alloc.init.util.low_mem = 1;
+
+ init.ll_low_alloc = init.ll_alloc;
+ init.ll_low_alloc.init.util.name_prefix = "ll_low_";
+ init.ll_low_alloc.init.util.alloc_no = ERTS_ALC_A_LONG_LIVED_LOW;
+ init.ll_low_alloc.init.util.force = 1;
+ init.ll_low_alloc.init.util.low_mem = 1;
+
+ set_au_allocator(ERTS_ALC_A_SBMBC_LOW, &init.sbmbc_low_alloc);
+ set_au_allocator(ERTS_ALC_A_STANDARD_LOW, &init.std_low_alloc);
+ set_au_allocator(ERTS_ALC_A_LONG_LIVED_LOW, &init.ll_low_alloc);
#endif /* HALFWORD */
set_au_allocator(ERTS_ALC_A_TEMPORARY, &init.temp_alloc);
+ set_au_allocator(ERTS_ALC_A_SBMBC, &init.sbmbc_alloc);
set_au_allocator(ERTS_ALC_A_SHORT_LIVED, &init.sl_alloc);
set_au_allocator(ERTS_ALC_A_STANDARD, &init.std_alloc);
set_au_allocator(ERTS_ALC_A_LONG_LIVED, &init.ll_alloc);
@@ -593,6 +648,20 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
erts_mtrace_init(init.instr.mtrace, init.instr.nodename);
+ /* sbmbc_alloc() needs to be started first */
+ start_au_allocator(ERTS_ALC_A_SBMBC,
+ &init.sbmbc_alloc,
+ &sbmbc_alloc_state);
+#if HALFWORD_HEAP
+ start_au_allocator(ERTS_ALC_A_SBMBC_LOW,
+ &init.sbmbc_low_alloc,
+ &sbmbc_low_alloc_state);
+ erts_have_sbmbc_alloc = (init.sbmbc_alloc.enable
+ && init.sbmbc_low_alloc.enable);
+#else
+ erts_have_sbmbc_alloc = init.sbmbc_alloc.enable;
+#endif
+
start_au_allocator(ERTS_ALC_A_TEMPORARY,
&init.temp_alloc,
&temp_alloc_state);
@@ -610,11 +679,11 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
&ll_alloc_state);
#if HALFWORD_HEAP
start_au_allocator(ERTS_ALC_A_LONG_LIVED_LOW,
- &init.ll_alloc_low,
- &ll_alloc_low_state);
+ &init.ll_low_alloc,
+ &ll_low_alloc_state);
start_au_allocator(ERTS_ALC_A_STANDARD_LOW,
- &init.std_alloc_low,
- &std_alloc_low_state);
+ &init.std_low_alloc,
+ &std_low_alloc_state);
#endif
start_au_allocator(ERTS_ALC_A_EHEAP,
&init.eheap_alloc,
@@ -680,14 +749,11 @@ set_au_allocator(ErtsAlcType_t alctr_n, struct au_init *init)
ErtsAllocatorInfo_t *ai = &erts_allctrs_info[alctr_n];
ErtsAllocatorThrSpec_t *tspec = &erts_allctr_thr_spec[alctr_n];
-#if HALFWORD_HEAP
- /* If halfword heap, silently ignore any disabling of internal
- * allocators for low memory
+ /*
+ * Some allocators are forced on if halfword heap is used.
*/
- if (init->init.util.low_mem) {
+ if (init->init.util.force)
init->enable = 1;
- }
-#endif
if (!init->enable) {
af->alloc = erts_sys_alloc;
@@ -947,6 +1013,20 @@ get_kb_value(char *param_end, char** argv, int* ip)
}
static Uint
+get_byte_value(char *param_end, char** argv, int* ip)
+{
+ Sint tmp;
+ char *rest;
+ char *param = argv[*ip]+1;
+ char *value = get_value(param_end, argv, ip);
+ errno = 0;
+ tmp = (Sint) strtol(value, &rest, 10);
+ if (errno != 0 || rest == value || tmp < 0)
+ bad_value(param, param_end, value);
+ return (Uint) tmp;
+}
+
+static Uint
get_amount_value(char *param_end, char** argv, int* ip)
{
Sint tmp;
@@ -1085,6 +1165,12 @@ handle_au_arg(struct au_init *auip,
if(has_prefix("sbct", sub_param)) {
auip->init.util.sbct = get_kb_value(sub_param + 4, argv, ip);
}
+ else if (has_prefix("sbmbcs", sub_param)) {
+ auip->init.util.sbmbcs = get_byte_value(sub_param + 6, argv, ip);
+ }
+ else if (has_prefix("sbmbct", sub_param)) {
+ auip->init.util.sbmbct = get_byte_value(sub_param + 6, argv, ip);
+ }
else if (has_prefix("smbcs", sub_param)) {
auip->default_.smbcs = 0;
auip->init.util.smbcs = get_kb_value(sub_param + 5, argv, ip);
@@ -1123,6 +1209,7 @@ static void
handle_args(int *argc, char **argv, erts_alc_hndl_args_init_t *init)
{
struct au_init *aui[] = {
+ &init->sbmbc_alloc,
&init->binary_alloc,
&init->std_alloc,
&init->ets_alloc,
@@ -1150,6 +1237,9 @@ handle_args(int *argc, char **argv, erts_alc_hndl_args_init_t *init)
case 'B':
handle_au_arg(&init->binary_alloc, &argv[i][3], argv, &i);
break;
+ case 'C':
+ handle_au_arg(&init->sbmbc_alloc, &argv[i][3], argv, &i);
+ break;
case 'D':
handle_au_arg(&init->std_alloc, &argv[i][3], argv, &i);
break;
@@ -1856,12 +1946,16 @@ erts_memory(int *print_to_p, void *print_to_arg, void *proc, Eterm earg)
return am_badarg;
}
- /* All alloc_util allocators *have* to be enabled */
+ /* All alloc_util allocators except sbmbc_alloc *have* to be enabled */
for (ai = ERTS_ALC_A_MIN; ai <= ERTS_ALC_A_MAX; ai++) {
switch (ai) {
case ERTS_ALC_A_SYSTEM:
case ERTS_ALC_A_FIXED_SIZE:
+ case ERTS_ALC_A_SBMBC:
+#if HALFWORD_HEAP
+ case ERTS_ALC_A_SBMBC_LOW:
+#endif
break;
default:
if (!erts_allctrs_info[ai].enabled
@@ -1901,6 +1995,12 @@ erts_memory(int *print_to_p, void *print_to_arg, void *proc, Eterm earg)
* Often not thread safe and usually never
* contain any allocated memory.
*/
+ case ERTS_ALC_A_SBMBC:
+ /* Included in other allocators */
+#if HALFWORD_HEAP
+ case ERTS_ALC_A_SBMBC_LOW:
+ /* Included in other allocators */
+#endif
continue;
case ERTS_ALC_A_EHEAP:
save = &size.processes;
@@ -2925,6 +3025,7 @@ unsigned long erts_alc_test(unsigned long op,
init.atype = GOODFIT;
init.init.util.name_prefix = (char *) a1;
init.init.util.ts = a2 ? 1 : 0;
+ init.init.util.sbmbct = 0;
if ((char **) a3) {
char **argv = (char **) a3;
diff --git a/erts/emulator/beam/erl_alloc.types b/erts/emulator/beam/erl_alloc.types
index c6cc0e1fac..eda0831441 100644
--- a/erts/emulator/beam/erl_alloc.types
+++ b/erts/emulator/beam/erl_alloc.types
@@ -65,6 +65,11 @@
allocator SYSTEM true sys_alloc
+allocator SBMBC true sbmbc_alloc
++if halfword
+allocator SBMBC_LOW true sbmbc_low_alloc
++endif
+
+if smp
allocator TEMPORARY true temp_alloc
@@ -76,8 +81,8 @@ allocator ETS true ets_alloc
allocator FIXED_SIZE true fix_alloc
+if halfword
-allocator LONG_LIVED_LOW true ll_alloc_low
-allocator STANDARD_LOW true std_alloc_low
+allocator LONG_LIVED_LOW true ll_low_alloc
+allocator STANDARD_LOW true std_low_alloc
+endif
+else # Non smp build
@@ -91,8 +96,8 @@ allocator ETS false ets_alloc
allocator FIXED_SIZE false fix_alloc
+if halfword
-allocator LONG_LIVED_LOW false ll_alloc_low
-allocator STANDARD_LOW false std_alloc_low
+allocator LONG_LIVED_LOW false ll_low_alloc
+allocator STANDARD_LOW false std_low_alloc
+endif
+endif
@@ -134,6 +139,7 @@ class SYSTEM system_data
#
# <TYPE> <ALLOCATOR> <CLASS> <DESCRIPTION>
+type SBMBC SBMBC SYSTEM small_block_mbc
type PROC FIXED_SIZE PROCESSES proc
type ATOM FIXED_SIZE ATOM atom_entry
type MODULE FIXED_SIZE CODE module_entry
@@ -330,6 +336,7 @@ type SSB SHORT_LIVED PROCESSES ssb
+if halfword
+type SBMBC_LOW SBMBC_LOW SYSTEM small_block_mbc_low
type DDLL_PROCESS STANDARD_LOW SYSTEM ddll_processes
type MONITOR_LH STANDARD_LOW PROCESSES monitor_lh
type NLINK_LH STANDARD_LOW PROCESSES nlink_lh
diff --git a/erts/emulator/beam/erl_alloc_util.c b/erts/emulator/beam/erl_alloc_util.c
index cc04ef65bf..19c552d8cd 100644
--- a/erts/emulator/beam/erl_alloc_util.c
+++ b/erts/emulator/beam/erl_alloc_util.c
@@ -66,6 +66,7 @@
static int atoms_initialized = 0;
static int initialized = 0;
+int erts_have_sbmbc_alloc;
#if HAVE_ERTS_MSEG
@@ -85,8 +86,6 @@ static int initialized = 0;
#undef ASSERT
#define ASSERT ASSERT_EXPR
-#define ERTS_ALCU_FLG_FAIL_REALLOC_MOVE ((UWord) 1)
-
#if 0
/* Can be useful for debugging */
#define MBC_REALLOC_ALWAYS_MOVES
@@ -275,14 +274,26 @@ static void check_blk_carrier(Allctr_t *, Block_t *);
#ifdef DEBUG
#define DEBUG_CHECK_CARRIER_NO_SZ(AP) \
- ASSERT(((AP)->sbcs.curr_mseg.no && (AP)->sbcs.curr_mseg.size) \
- || (!(AP)->sbcs.curr_mseg.no && !(AP)->sbcs.curr_mseg.size));\
- ASSERT(((AP)->sbcs.curr_sys_alloc.no && (AP)->sbcs.curr_sys_alloc.size)\
- || (!(AP)->sbcs.curr_sys_alloc.no && !(AP)->sbcs.curr_sys_alloc.size));\
- ASSERT(((AP)->mbcs.curr_mseg.no && (AP)->mbcs.curr_mseg.size) \
- || (!(AP)->mbcs.curr_mseg.no && !(AP)->mbcs.curr_mseg.size));\
- ASSERT(((AP)->mbcs.curr_sys_alloc.no && (AP)->mbcs.curr_sys_alloc.size)\
- || (!(AP)->mbcs.curr_sys_alloc.no && !(AP)->mbcs.curr_sys_alloc.size))
+ ASSERT(((AP)->sbcs.curr.norm.mseg.no \
+ && (AP)->sbcs.curr.norm.mseg.size) \
+ || (!(AP)->sbcs.curr.norm.mseg.no \
+ && !(AP)->sbcs.curr.norm.mseg.size)); \
+ ASSERT(((AP)->sbcs.curr.norm.sys_alloc.no \
+ && (AP)->sbcs.curr.norm.sys_alloc.size) \
+ || (!(AP)->sbcs.curr.norm.sys_alloc.no \
+ && !(AP)->sbcs.curr.norm.sys_alloc.size)); \
+ ASSERT(((AP)->mbcs.curr.norm.mseg.no \
+ && (AP)->mbcs.curr.norm.mseg.size) \
+ || (!(AP)->mbcs.curr.norm.mseg.no \
+ && !(AP)->mbcs.curr.norm.mseg.size)); \
+ ASSERT(((AP)->mbcs.curr.norm.sys_alloc.no \
+ && (AP)->mbcs.curr.norm.sys_alloc.size) \
+ || (!(AP)->mbcs.curr.norm.sys_alloc.no \
+ && !(AP)->mbcs.curr.norm.sys_alloc.size)); \
+ ASSERT(((AP)->sbmbcs.curr.small_block.no \
+ && (AP)->sbmbcs.curr.small_block.size) \
+ || (!(AP)->sbmbcs.curr.small_block.no \
+ && !(AP)->sbmbcs.curr.small_block.size))
#else
#define DEBUG_CHECK_CARRIER_NO_SZ(AP)
@@ -292,27 +303,27 @@ static void check_blk_carrier(Allctr_t *, Block_t *);
(AP)->sbcs.blocks.curr.size += (BSZ); \
if ((AP)->sbcs.blocks.max.size < (AP)->sbcs.blocks.curr.size) \
(AP)->sbcs.blocks.max.size = (AP)->sbcs.blocks.curr.size; \
- if ((AP)->sbcs.max.no < ((AP)->sbcs.curr_mseg.no \
- + (AP)->sbcs.curr_sys_alloc.no)) \
- (AP)->sbcs.max.no = ((AP)->sbcs.curr_mseg.no \
- + (AP)->sbcs.curr_sys_alloc.no); \
- if ((AP)->sbcs.max.size < ((AP)->sbcs.curr_mseg.size \
- + (AP)->sbcs.curr_sys_alloc.size)) \
- (AP)->sbcs.max.size = ((AP)->sbcs.curr_mseg.size \
- + (AP)->sbcs.curr_sys_alloc.size)
+ if ((AP)->sbcs.max.no < ((AP)->sbcs.curr.norm.mseg.no \
+ + (AP)->sbcs.curr.norm.sys_alloc.no)) \
+ (AP)->sbcs.max.no = ((AP)->sbcs.curr.norm.mseg.no \
+ + (AP)->sbcs.curr.norm.sys_alloc.no); \
+ if ((AP)->sbcs.max.size < ((AP)->sbcs.curr.norm.mseg.size \
+ + (AP)->sbcs.curr.norm.sys_alloc.size)) \
+ (AP)->sbcs.max.size = ((AP)->sbcs.curr.norm.mseg.size \
+ + (AP)->sbcs.curr.norm.sys_alloc.size)
#define STAT_MSEG_SBC_ALLOC(AP, CSZ, BSZ) \
do { \
- (AP)->sbcs.curr_mseg.no++; \
- (AP)->sbcs.curr_mseg.size += (CSZ); \
+ (AP)->sbcs.curr.norm.mseg.no++; \
+ (AP)->sbcs.curr.norm.mseg.size += (CSZ); \
STAT_SBC_ALLOC((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_SBC_ALLOC(AP, CSZ, BSZ) \
do { \
- (AP)->sbcs.curr_sys_alloc.no++; \
- (AP)->sbcs.curr_sys_alloc.size += (CSZ); \
+ (AP)->sbcs.curr.norm.sys_alloc.no++; \
+ (AP)->sbcs.curr.norm.sys_alloc.size += (CSZ); \
STAT_SBC_ALLOC((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
@@ -324,85 +335,111 @@ do { \
#define STAT_MSEG_SBC_FREE(AP, CSZ, BSZ) \
do { \
- ASSERT((AP)->sbcs.curr_mseg.no > 0); \
- (AP)->sbcs.curr_mseg.no--; \
- ASSERT((AP)->sbcs.curr_mseg.size >= (CSZ)); \
- (AP)->sbcs.curr_mseg.size -= (CSZ); \
+ ASSERT((AP)->sbcs.curr.norm.mseg.no > 0); \
+ (AP)->sbcs.curr.norm.mseg.no--; \
+ ASSERT((AP)->sbcs.curr.norm.mseg.size >= (CSZ)); \
+ (AP)->sbcs.curr.norm.mseg.size -= (CSZ); \
STAT_SBC_FREE((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_SBC_FREE(AP, CSZ, BSZ) \
do { \
- ASSERT((AP)->sbcs.curr_sys_alloc.no > 0); \
- (AP)->sbcs.curr_sys_alloc.no--; \
- ASSERT((AP)->sbcs.curr_sys_alloc.size >= (CSZ)); \
- (AP)->sbcs.curr_sys_alloc.size -= (CSZ); \
+ ASSERT((AP)->sbcs.curr.norm.sys_alloc.no > 0); \
+ (AP)->sbcs.curr.norm.sys_alloc.no--; \
+ ASSERT((AP)->sbcs.curr.norm.sys_alloc.size >= (CSZ)); \
+ (AP)->sbcs.curr.norm.sys_alloc.size -= (CSZ); \
STAT_SBC_FREE((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_MBC_ALLOC(AP) \
- if ((AP)->mbcs.max.no < ((AP)->mbcs.curr_mseg.no \
- + (AP)->mbcs.curr_sys_alloc.no)) \
- (AP)->mbcs.max.no = ((AP)->mbcs.curr_mseg.no \
- + (AP)->mbcs.curr_sys_alloc.no); \
- if ((AP)->mbcs.max.size < ((AP)->mbcs.curr_mseg.size \
- + (AP)->mbcs.curr_sys_alloc.size)) \
- (AP)->mbcs.max.size = ((AP)->mbcs.curr_mseg.size \
- + (AP)->mbcs.curr_sys_alloc.size)
+ if ((AP)->mbcs.max.no < ((AP)->mbcs.curr.norm.mseg.no \
+ + (AP)->mbcs.curr.norm.sys_alloc.no)) \
+ (AP)->mbcs.max.no = ((AP)->mbcs.curr.norm.mseg.no \
+ + (AP)->mbcs.curr.norm.sys_alloc.no); \
+ if ((AP)->mbcs.max.size < ((AP)->mbcs.curr.norm.mseg.size \
+ + (AP)->mbcs.curr.norm.sys_alloc.size)) \
+ (AP)->mbcs.max.size = ((AP)->mbcs.curr.norm.mseg.size \
+ + (AP)->mbcs.curr.norm.sys_alloc.size)
+
+#define STAT_SBMBC_ALLOC(AP, CSZ) \
+do { \
+ (AP)->sbmbcs.curr.small_block.no++; \
+ (AP)->sbmbcs.curr.small_block.size += (CSZ); \
+ if ((AP)->sbmbcs.max.no < (AP)->sbmbcs.curr.small_block.no) \
+ (AP)->sbmbcs.max.no = (AP)->sbmbcs.curr.small_block.no; \
+ if ((AP)->sbmbcs.max.size < (AP)->sbmbcs.curr.small_block.size) \
+ (AP)->sbmbcs.max.size = (AP)->sbmbcs.curr.small_block.size; \
+ DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
+} while (0)
#define STAT_MSEG_MBC_ALLOC(AP, CSZ) \
do { \
- (AP)->mbcs.curr_mseg.no++; \
- (AP)->mbcs.curr_mseg.size += (CSZ); \
+ (AP)->mbcs.curr.norm.mseg.no++; \
+ (AP)->mbcs.curr.norm.mseg.size += (CSZ); \
STAT_MBC_ALLOC((AP)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_MBC_ALLOC(AP, CSZ) \
do { \
- (AP)->mbcs.curr_sys_alloc.no++; \
- (AP)->mbcs.curr_sys_alloc.size += (CSZ); \
+ (AP)->mbcs.curr.norm.sys_alloc.no++; \
+ (AP)->mbcs.curr.norm.sys_alloc.size += (CSZ); \
STAT_MBC_ALLOC((AP)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
+#define STAT_SBMBC_FREE(AP, CSZ) \
+do { \
+ ASSERT((AP)->sbmbcs.curr.small_block.no > 0); \
+ (AP)->sbmbcs.curr.small_block.no--; \
+ ASSERT((AP)->sbmbcs.curr.small_block.size >= (CSZ)); \
+ (AP)->sbmbcs.curr.small_block.size -= (CSZ); \
+ DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
+} while (0)
+
#define STAT_MSEG_MBC_FREE(AP, CSZ) \
do { \
- ASSERT((AP)->mbcs.curr_mseg.no > 0); \
- (AP)->mbcs.curr_mseg.no--; \
- ASSERT((AP)->mbcs.curr_mseg.size >= (CSZ)); \
- (AP)->mbcs.curr_mseg.size -= (CSZ); \
+ ASSERT((AP)->mbcs.curr.norm.mseg.no > 0); \
+ (AP)->mbcs.curr.norm.mseg.no--; \
+ ASSERT((AP)->mbcs.curr.norm.mseg.size >= (CSZ)); \
+ (AP)->mbcs.curr.norm.mseg.size -= (CSZ); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_MBC_FREE(AP, CSZ) \
do { \
- ASSERT((AP)->mbcs.curr_sys_alloc.no > 0); \
- (AP)->mbcs.curr_sys_alloc.no--; \
- ASSERT((AP)->mbcs.curr_sys_alloc.size >= (CSZ)); \
- (AP)->mbcs.curr_sys_alloc.size -= (CSZ); \
+ ASSERT((AP)->mbcs.curr.norm.sys_alloc.no > 0); \
+ (AP)->mbcs.curr.norm.sys_alloc.no--; \
+ ASSERT((AP)->mbcs.curr.norm.sys_alloc.size >= (CSZ)); \
+ (AP)->mbcs.curr.norm.sys_alloc.size -= (CSZ); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
-#define STAT_MBC_BLK_ALLOC(AP, BSZ) \
+#define STAT_MBC_BLK_ALLOC(AP, BSZ, FLGS) \
do { \
- (AP)->mbcs.blocks.curr.no++; \
- if ((AP)->mbcs.blocks.max.no < (AP)->mbcs.blocks.curr.no) \
- (AP)->mbcs.blocks.max.no = (AP)->mbcs.blocks.curr.no; \
- (AP)->mbcs.blocks.curr.size += (BSZ); \
- if ((AP)->mbcs.blocks.max.size < (AP)->mbcs.blocks.curr.size) \
- (AP)->mbcs.blocks.max.size = (AP)->mbcs.blocks.curr.size; \
+ CarriersStats_t *cstats__ = (((FLGS) & ERTS_ALCU_FLG_SBMBC) \
+ ? &(AP)->sbmbcs \
+ : &(AP)->mbcs); \
+ cstats__->blocks.curr.no++; \
+ if (cstats__->blocks.max.no < cstats__->blocks.curr.no) \
+ cstats__->blocks.max.no = cstats__->blocks.curr.no; \
+ cstats__->blocks.curr.size += (BSZ); \
+ if (cstats__->blocks.max.size < cstats__->blocks.curr.size) \
+ cstats__->blocks.max.size = cstats__->blocks.curr.size; \
} while (0)
-#define STAT_MBC_BLK_FREE(AP, BSZ) \
+#define STAT_MBC_BLK_FREE(AP, BSZ, FLGS) \
do { \
- ASSERT((AP)->mbcs.blocks.curr.no > 0); \
- (AP)->mbcs.blocks.curr.no--; \
- ASSERT((AP)->mbcs.blocks.curr.size >= (BSZ)); \
- (AP)->mbcs.blocks.curr.size -= (BSZ); \
+ CarriersStats_t *cstats__ = (((FLGS) & ERTS_ALCU_FLG_SBMBC) \
+ ? &(AP)->sbmbcs \
+ : &(AP)->mbcs); \
+ ASSERT(cstats__->blocks.curr.no > 0); \
+ cstats__->blocks.curr.no--; \
+ ASSERT(cstats__->blocks.curr.size >= (BSZ)); \
+ cstats__->blocks.curr.size -= (BSZ); \
} while (0)
/* Debug stuff... */
@@ -410,7 +447,7 @@ do { \
static UWord carrier_alignment;
#define DEBUG_SAVE_ALIGNMENT(C) \
do { \
- UWord algnmnt__ = sizeof(Unit_t) - (((UWord) (C)) % sizeof(Unit_t)); \
+ UWord algnmnt__ = sizeof(Unit_t) - (((UWord) (C)) % sizeof(Unit_t));\
carrier_alignment = MIN(carrier_alignment, algnmnt__); \
ASSERT(((UWord) (C)) % sizeof(UWord) == 0); \
} while (0)
@@ -524,8 +561,8 @@ static Uint
get_next_mbc_size(Allctr_t *allctr)
{
Uint size;
- int cs = (allctr->mbcs.curr_mseg.no
- + allctr->mbcs.curr_sys_alloc.no
+ int cs = (allctr->mbcs.curr.norm.mseg.no
+ + allctr->mbcs.curr.norm.sys_alloc.no
- (allctr->main_carrier ? 1 : 0));
ASSERT(cs >= 0);
@@ -609,7 +646,8 @@ unlink_carrier(CarrierList_t *cl, Carrier_t *crr)
}
}
-
+static Block_t *create_sbmbc(Allctr_t *allctr, Uint umem_sz);
+static void destroy_sbmbc(Allctr_t *allctr, Block_t *blk);
static Block_t *create_carrier(Allctr_t *, Uint, UWord);
static void destroy_carrier(Allctr_t *, Block_t *);
@@ -619,39 +657,57 @@ static void destroy_carrier(Allctr_t *, Block_t *);
* block in a sbc.
*/
static ERTS_INLINE void *
-mbc_alloc_block(Allctr_t *allctr, Uint size, Uint *blk_szp)
+mbc_alloc_block(Allctr_t *allctr, Uint size, Uint *blk_szp, Uint32 *alcu_flgsp)
{
Block_t *blk;
+ Uint get_blk_sz;
+ Uint sbmbct;
ASSERT(size);
ASSERT(size < allctr->sbc_threshold);
- *blk_szp = UMEMSZ2BLKSZ(allctr, size);
+ *blk_szp = get_blk_sz = UMEMSZ2BLKSZ(allctr, size);
+
+ sbmbct = allctr->sbmbc_threshold;
+ if (sbmbct) {
+ if (get_blk_sz < sbmbct) {
+ *alcu_flgsp |= ERTS_ALCU_FLG_SBMBC;
+ if (get_blk_sz + allctr->min_block_size > sbmbct) {
+ /* Since we use block size to determine if blocks are
+ located in sbmbc or not... */
+ get_blk_sz += allctr->min_block_size;
+ }
+ }
+ }
- blk = (*allctr->get_free_block)(allctr, *blk_szp, NULL, 0);
+ blk = (*allctr->get_free_block)(allctr, get_blk_sz, NULL, 0, *alcu_flgsp);
-#if HALFWORD_HEAP
if (!blk) {
- blk = create_carrier(allctr, *blk_szp, CFLG_MBC|CFLG_FORCE_MSEG);
- }
+ if ((*alcu_flgsp) & ERTS_ALCU_FLG_SBMBC)
+ blk = create_sbmbc(allctr, get_blk_sz);
+ else {
+#if HALFWORD_HEAP
+ blk = create_carrier(allctr, get_blk_sz, CFLG_MBC|CFLG_FORCE_MSEG);
#else
- if (!blk) {
- blk = create_carrier(allctr, *blk_szp, CFLG_MBC);
- if (!blk) {
- /* Emergency! We couldn't create the carrier as we wanted.
- Try to place it in a sys_alloced sbc. */
- blk = create_carrier(allctr,
- size,
- CFLG_SBC|CFLG_FORCE_SIZE|CFLG_FORCE_SYS_ALLOC);
+ blk = create_carrier(allctr, get_blk_sz, CFLG_MBC);
+ if (!blk) {
+ /* Emergency! We couldn't create the carrier as we wanted.
+ Try to place it in a sys_alloced sbc. */
+ blk = create_carrier(allctr,
+ size,
+ (CFLG_SBC
+ | CFLG_FORCE_SIZE
+ | CFLG_FORCE_SYS_ALLOC));
+ }
+#endif
}
}
-#endif
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
if (IS_MBC_BLK(blk)) {
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, *alcu_flgsp);
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, *alcu_flgsp);
}
#endif
@@ -664,7 +720,8 @@ mbc_alloc_finalize(Allctr_t *allctr,
Uint org_blk_sz,
UWord flags,
Uint want_blk_sz,
- int valid_blk_info)
+ int valid_blk_info,
+ Uint32 alcu_flgs)
{
Uint blk_sz;
Uint nxt_blk_sz;
@@ -700,7 +757,7 @@ mbc_alloc_finalize(Allctr_t *allctr,
SET_PREV_BLK_FREE(nxt_nxt_blk);
}
}
- (*allctr->link_free_block)(allctr, nxt_blk);
+ (*allctr->link_free_block)(allctr, nxt_blk, alcu_flgs);
ASSERT(IS_NOT_LAST_BLK(blk));
ASSERT(IS_FREE_BLK(nxt_blk));
@@ -741,7 +798,7 @@ mbc_alloc_finalize(Allctr_t *allctr,
: IS_NOT_LAST_BLK(blk));
}
- STAT_MBC_BLK_ALLOC(allctr, blk_sz);
+ STAT_MBC_BLK_ALLOC(allctr, blk_sz, alcu_flgs);
ASSERT(IS_ALLOCED_BLK(blk));
ASSERT(blk_sz == BLK_SZ(blk));
@@ -761,7 +818,8 @@ mbc_alloc(Allctr_t *allctr, Uint size)
{
Block_t *blk;
Uint blk_sz;
- blk = mbc_alloc_block(allctr, size, &blk_sz);
+ Uint32 alcu_flgs = 0;
+ blk = mbc_alloc_block(allctr, size, &blk_sz, &alcu_flgs);
if (!blk)
return NULL;
if (IS_MBC_BLK(blk))
@@ -770,7 +828,8 @@ mbc_alloc(Allctr_t *allctr, Uint size)
BLK_SZ(blk),
GET_BLK_HDR_FLGS(blk),
blk_sz,
- 1);
+ 1,
+ alcu_flgs);
return BLK2UMEM(blk);
}
@@ -779,6 +838,7 @@ mbc_free(Allctr_t *allctr, void *p)
{
Uint is_first_blk;
Uint is_last_blk;
+ Uint32 alcu_flgs = 0;
Uint blk_sz;
Block_t *blk;
Block_t *nxt_blk;
@@ -788,13 +848,15 @@ mbc_free(Allctr_t *allctr, void *p)
blk = UMEM2BLK(p);
blk_sz = BLK_SZ(blk);
+ if (blk_sz < allctr->sbmbc_threshold)
+ alcu_flgs |= ERTS_ALCU_FLG_SBMBC;
ASSERT(IS_MBC_BLK(blk));
ASSERT(blk_sz >= allctr->min_block_size);
HARD_CHECK_BLK_CARRIER(allctr, blk);
- STAT_MBC_BLK_FREE(allctr, blk_sz);
+ STAT_MBC_BLK_FREE(allctr, blk_sz, alcu_flgs);
is_first_blk = IS_FIRST_BLK(blk);
is_last_blk = IS_LAST_BLK(blk);
@@ -802,7 +864,7 @@ mbc_free(Allctr_t *allctr, void *p)
if (!is_first_blk && IS_PREV_BLK_FREE(blk)) {
/* Coalesce with previous block... */
blk = PREV_BLK(blk);
- (*allctr->unlink_free_block)(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, alcu_flgs);
blk_sz += BLK_SZ(blk);
is_first_blk = IS_FIRST_BLK(blk);
@@ -818,7 +880,7 @@ mbc_free(Allctr_t *allctr, void *p)
nxt_blk = NXT_BLK(blk);
if (IS_FREE_BLK(nxt_blk)) {
/* Coalesce with next block... */
- (*allctr->unlink_free_block)(allctr, nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_blk, alcu_flgs);
blk_sz += BLK_SZ(nxt_blk);
SET_BLK_SZ(blk, blk_sz);
@@ -850,16 +912,20 @@ mbc_free(Allctr_t *allctr, void *p)
if (is_first_blk
&& is_last_blk
- && allctr->main_carrier != FBLK2MBC(allctr, blk))
- destroy_carrier(allctr, blk);
+ && allctr->main_carrier != FBLK2MBC(allctr, blk)) {
+ if (alcu_flgs & ERTS_ALCU_FLG_SBMBC)
+ destroy_sbmbc(allctr, blk);
+ else
+ destroy_carrier(allctr, blk);
+ }
else {
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, alcu_flgs);
HARD_CHECK_BLK_CARRIER(allctr, blk);
}
}
static void *
-mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
+mbc_realloc(Allctr_t *allctr, void *p, Uint size, Uint32 alcu_flgs)
{
void *new_p;
Uint old_blk_sz;
@@ -867,7 +933,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
#ifndef MBC_REALLOC_ALWAYS_MOVES
Block_t *new_blk, *cand_blk;
Uint cand_blk_sz;
- Uint blk_sz;
+ Uint blk_sz, get_blk_sz;
Block_t *nxt_blk;
Uint nxt_blk_sz;
Uint is_last_blk;
@@ -883,10 +949,16 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
ASSERT(old_blk_sz >= allctr->min_block_size);
#ifdef MBC_REALLOC_ALWAYS_MOVES
- if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
#else /* !MBC_REALLOC_ALWAYS_MOVES */
- blk_sz = UMEMSZ2BLKSZ(allctr, size);
+ get_blk_sz = blk_sz = UMEMSZ2BLKSZ(allctr, size);
+ if ((alcu_flgs & ERTS_ALCU_FLG_SBMBC)
+ && (blk_sz + allctr->min_block_size > allctr->sbmbc_threshold)) {
+ /* Since we use block size to determine if blocks are
+ located in sbmbc or not... */
+ get_blk_sz = blk_sz + allctr->min_block_size;
+ }
ASSERT(IS_ALLOCED_BLK(blk));
ASSERT(IS_MBC_BLK(blk));
@@ -901,6 +973,9 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
Uint diff_sz_val = old_blk_sz - blk_sz;
Uint old_blk_sz_val = old_blk_sz;
+ if (get_blk_sz >= old_blk_sz)
+ return p;
+
if (diff_sz_val >= (~((Uint) 0) / 100)) {
/* div both by 128 */
old_blk_sz_val >>= 7;
@@ -909,7 +984,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
/* Avoid fragmentation by moving the block if it is shrunk much */
if (100*diff_sz_val > allctr->mbc_move_threshold*old_blk_sz_val) {
- if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
cand_blk_sz = old_blk_sz;
@@ -926,9 +1001,10 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
new_blk = (*allctr->get_free_block)(allctr,
- blk_sz,
+ get_blk_sz,
cand_blk,
- cand_blk_sz);
+ cand_blk_sz,
+ alcu_flgs);
if (new_blk || cand_blk != blk)
goto move_into_new_blk;
@@ -952,8 +1028,8 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
nxt_blk_sz,
SBH_THIS_FREE|SBH_PREV_ALLOCED|SBH_NOT_LAST_BLK);
- STAT_MBC_BLK_FREE(allctr, old_blk_sz);
- STAT_MBC_BLK_ALLOC(allctr, blk_sz);
+ STAT_MBC_BLK_FREE(allctr, old_blk_sz, alcu_flgs);
+ STAT_MBC_BLK_ALLOC(allctr, blk_sz, alcu_flgs);
ASSERT(BLK_SZ(blk) >= allctr->min_block_size);
@@ -964,7 +1040,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
if (IS_FREE_BLK(nxt_nxt_blk)) {
/* Coalesce with next free block... */
nxt_blk_sz += BLK_SZ(nxt_nxt_blk);
- (*allctr->unlink_free_block)(allctr, nxt_nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_nxt_blk, alcu_flgs);
SET_BLK_SZ(nxt_blk, nxt_blk_sz);
is_last_blk = IS_LAST_BLK(nxt_nxt_blk);
@@ -979,7 +1055,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
}
- (*allctr->link_free_block)(allctr, nxt_blk);
+ (*allctr->link_free_block)(allctr, nxt_blk, alcu_flgs);
ASSERT(IS_ALLOCED_BLK(blk));
@@ -1009,12 +1085,12 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
if (!is_last_blk) {
nxt_blk = NXT_BLK(blk);
nxt_blk_sz = BLK_SZ(nxt_blk);
- if (IS_FREE_BLK(nxt_blk) && blk_sz <= old_blk_sz + nxt_blk_sz) {
+ if (IS_FREE_BLK(nxt_blk) && get_blk_sz <= old_blk_sz + nxt_blk_sz) {
/* Grow into next block... */
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_blk, alcu_flgs);
nxt_blk_sz -= blk_sz - old_blk_sz;
is_last_blk = IS_LAST_BLK(nxt_blk);
@@ -1051,13 +1127,13 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
else
SET_BLK_SZ_FTR(nxt_blk, nxt_blk_sz);
- (*allctr->link_free_block)(allctr, nxt_blk);
+ (*allctr->link_free_block)(allctr, nxt_blk, alcu_flgs);
ASSERT(IS_FREE_BLK(nxt_blk));
}
- STAT_MBC_BLK_FREE(allctr, old_blk_sz);
- STAT_MBC_BLK_ALLOC(allctr, blk_sz);
+ STAT_MBC_BLK_FREE(allctr, old_blk_sz, alcu_flgs);
+ STAT_MBC_BLK_ALLOC(allctr, blk_sz, alcu_flgs);
ASSERT(IS_ALLOCED_BLK(blk));
@@ -1088,7 +1164,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
}
- if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
/* Need to grow in another block */
@@ -1108,7 +1184,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
}
- if (cand_blk_sz < blk_sz) {
+ if (cand_blk_sz < get_blk_sz) {
/* We wont fit in cand_blk get a new one */
#endif /* !MBC_REALLOC_ALWAYS_MOVES */
@@ -1127,9 +1203,10 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
/* We will at least fit in cand_blk */
new_blk = (*allctr->get_free_block)(allctr,
- blk_sz,
+ get_blk_sz,
cand_blk,
- cand_blk_sz);
+ cand_blk_sz,
+ alcu_flgs);
move_into_new_blk:
/*
* new_blk, and cand_blk have to be correctly set
@@ -1142,7 +1219,8 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
BLK_SZ(new_blk),
GET_BLK_HDR_FLGS(new_blk),
blk_sz,
- 1);
+ 1,
+ alcu_flgs);
new_p = BLK2UMEM(new_blk);
sys_memcpy(new_p, p, MIN(size, old_blk_sz - ABLK_HDR_SZ));
mbc_free(allctr, p);
@@ -1164,7 +1242,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, new_blk); /* prev */
+ (*allctr->unlink_free_block)(allctr, new_blk, alcu_flgs); /* prev */
if (is_last_blk)
new_blk_flgs |= LAST_BLK_HDR_FLG;
@@ -1173,7 +1251,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
if (IS_FREE_BLK(nxt_blk)) {
new_blk_flgs |= GET_LAST_BLK_HDR_FLG(nxt_blk);
new_blk_sz += BLK_SZ(nxt_blk);
- (*allctr->unlink_free_block)(allctr, nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_blk, alcu_flgs);
}
}
@@ -1196,9 +1274,10 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
new_blk_sz,
new_blk_flgs,
blk_sz,
- 0);
+ 0,
+ alcu_flgs);
- STAT_MBC_BLK_FREE(allctr, old_blk_sz);
+ STAT_MBC_BLK_FREE(allctr, old_blk_sz, alcu_flgs);
return new_p;
}
@@ -1243,6 +1322,100 @@ do { \
#define CHECK_1BLK_CARRIER(A, SBC, MSEGED, C, CSZ, B, BSZ)
#endif
+static Block_t *
+create_sbmbc(Allctr_t *allctr, Uint umem_sz)
+{
+ Block_t *blk;
+ Uint blk_sz;
+ Uint crr_sz = allctr->sbmbc_size;
+ Carrier_t *crr;
+
+#if HALFWORD_HEAP
+ if (allctr->mseg_opt.low_mem)
+ crr = erts_alloc(ERTS_ALC_T_SBMBC_LOW, crr_sz);
+ else
+#endif
+ crr = erts_alloc(ERTS_ALC_T_SBMBC, crr_sz);
+
+ INC_CC(allctr->calls.sbmbc_alloc);
+ SET_CARRIER_HDR(crr, crr_sz, SCH_SYS_ALLOC|SCH_MBC);
+
+ blk = MBC2FBLK(allctr, crr);
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
+ crr_sz -= sizeof(UWord);
+#endif
+
+ blk_sz = UNIT_FLOOR(crr_sz - allctr->mbc_header_size);
+
+ SET_MBC_BLK_FTR(((UWord *) blk)[-1]);
+ SET_BLK_HDR(blk, blk_sz, SBH_THIS_FREE|SBH_PREV_FREE|SBH_LAST_BLK);
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ *((Carrier_t **) NXT_BLK(blk)) = crr;
+#endif
+
+ link_carrier(&allctr->sbmbc_list, crr);
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
+ crr_sz += sizeof(UWord);
+#endif
+
+ STAT_SBMBC_ALLOC(allctr, crr_sz);
+ CHECK_1BLK_CARRIER(allctr, 0, 0, crr, crr_sz, blk, blk_sz);
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
+ crr_sz -= sizeof(UWord);
+#endif
+ if (allctr->creating_mbc)
+ (*allctr->creating_mbc)(allctr, crr, ERTS_ALCU_FLG_SBMBC);
+
+ DEBUG_SAVE_ALIGNMENT(crr);
+ return blk;
+}
+
+static void
+destroy_sbmbc(Allctr_t *allctr, Block_t *blk)
+{
+ Uint crr_sz;
+ Carrier_t *crr;
+
+ ASSERT(IS_FIRST_BLK(blk));
+
+ ASSERT(IS_MBC_BLK(blk));
+
+ crr = FBLK2MBC(allctr, blk);
+ crr_sz = CARRIER_SZ(crr);
+
+#ifdef DEBUG
+ if (!allctr->stopped) {
+ ASSERT(IS_LAST_BLK(blk));
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ (*allctr->link_free_block)(allctr, blk, ERTS_ALCU_FLG_SBMBC);
+ HARD_CHECK_BLK_CARRIER(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, ERTS_ALCU_FLG_SBMBC);
+#endif
+ }
+#endif
+
+ STAT_SBMBC_FREE(allctr, crr_sz);
+
+ unlink_carrier(&allctr->sbmbc_list, crr);
+ if (allctr->destroying_mbc)
+ (*allctr->destroying_mbc)(allctr, crr, ERTS_ALCU_FLG_SBMBC);
+
+ INC_CC(allctr->calls.sbmbc_free);
+
+#if HALFWORD_HEAP
+ if (allctr->mseg_opt.low_mem)
+ erts_free(ERTS_ALC_T_SBMBC_LOW, crr);
+ else
+#endif
+ erts_free(ERTS_ALC_T_SBMBC, crr);
+}
static Block_t *
create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
@@ -1271,11 +1444,11 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
if (erts_mseg_no() >= max_mseg_carriers)
goto try_sys_alloc;
if (flags & CFLG_SBC) {
- if (allctr->sbcs.curr_mseg.no >= allctr->max_mseg_sbcs)
+ if (allctr->sbcs.curr.norm.mseg.no >= allctr->max_mseg_sbcs)
goto try_sys_alloc;
}
else {
- if (allctr->mbcs.curr_mseg.no >= allctr->max_mseg_mbcs)
+ if (allctr->mbcs.curr.norm.mseg.no >= allctr->max_mseg_mbcs)
goto try_sys_alloc;
}
@@ -1289,7 +1462,7 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
if (crr_sz < allctr->mbc_header_size + blk_sz)
crr_sz = allctr->mbc_header_size + blk_sz;
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz += sizeof(UWord);
#endif
}
@@ -1330,7 +1503,7 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
&& bcrr_sz < allctr->smallest_mbc_size)
bcrr_sz = allctr->smallest_mbc_size;
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
bcrr_sz += sizeof(UWord);
#endif
@@ -1385,7 +1558,7 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
blk = MBC2FBLK(allctr, crr);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz -= sizeof(UWord);
#endif
@@ -1406,16 +1579,16 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
link_carrier(&allctr->mbc_list, crr);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz += sizeof(UWord);
#endif
CHECK_1BLK_CARRIER(allctr, 0, is_mseg, crr, crr_sz, blk, blk_sz);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz -= sizeof(UWord);
#endif
if (allctr->creating_mbc)
- (*allctr->creating_mbc)(allctr, crr);
+ (*allctr->creating_mbc)(allctr, crr, 0);
}
@@ -1595,9 +1768,9 @@ destroy_carrier(Allctr_t *allctr, Block_t *blk)
ASSERT(IS_LAST_BLK(blk));
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, 0);
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, 0);
#endif
}
#endif
@@ -1614,7 +1787,7 @@ destroy_carrier(Allctr_t *allctr, Block_t *blk)
unlink_carrier(&allctr->mbc_list, crr);
if (allctr->destroying_mbc)
- (*allctr->destroying_mbc)(allctr, crr);
+ (*allctr->destroying_mbc)(allctr, crr, 0);
}
@@ -1658,12 +1831,15 @@ static struct {
Eterm lmbcs;
Eterm smbcs;
Eterm mbcgs;
+ Eterm sbmbcs;
+ Eterm sbmbct;
#if HAVE_ERTS_MSEG
Eterm mmc;
#endif
Eterm ycs;
+ /* Eterm sbmbcs; */
Eterm mbcs;
Eterm sbcs;
Eterm sys_alloc_carriers_size;
@@ -1688,6 +1864,8 @@ static struct {
Eterm mseg_dealloc;
Eterm mseg_realloc;
#endif
+ Eterm sbmbc_alloc;
+ Eterm sbmbc_free;
#ifdef DEBUG
Eterm end_of_atoms;
#endif
@@ -1746,12 +1924,15 @@ init_atoms(Allctr_t *allctr)
AM_INIT(lmbcs);
AM_INIT(smbcs);
AM_INIT(mbcgs);
+ AM_INIT(sbmbcs);
+ AM_INIT(sbmbct);
#if HAVE_ERTS_MSEG
AM_INIT(mmc);
#endif
AM_INIT(ycs);
+ /*AM_INIT(sbmbcs);*/
AM_INIT(mbcs);
AM_INIT(sbcs);
AM_INIT(sys_alloc_carriers_size);
@@ -1776,6 +1957,8 @@ init_atoms(Allctr_t *allctr)
AM_INIT(mseg_dealloc);
AM_INIT(mseg_realloc);
#endif
+ AM_INIT(sbmbc_free);
+ AM_INIT(sbmbc_alloc);
#ifdef DEBUG
for (atom = (Eterm *) &am; atom < &am.end_of_atoms; atom++) {
@@ -1869,7 +2052,9 @@ sz_info_carriers(Allctr_t *allctr,
Uint *szp)
{
Eterm res = THE_NON_VALUE;
- Uint curr_size = cs->curr_mseg.size + cs->curr_sys_alloc.size;
+ Uint curr_size = (cs == &allctr->sbmbcs
+ ? cs->curr.small_block.size
+ : cs->curr.norm.mseg.size + cs->curr.norm.sys_alloc.size);
if (print_to_p) {
int to = *print_to_p;
@@ -1917,8 +2102,17 @@ info_carriers(Allctr_t *allctr,
Uint *szp)
{
Eterm res = THE_NON_VALUE;
- Uint curr_no = cs->curr_mseg.no + cs->curr_sys_alloc.no;
- Uint curr_size = cs->curr_mseg.size + cs->curr_sys_alloc.size;
+ Uint curr_no, curr_size;
+ int small_block = cs == &allctr->sbmbcs;
+
+ if (small_block) {
+ curr_no = cs->curr.small_block.no;
+ curr_size = cs->curr.small_block.size;
+ }
+ else {
+ curr_no = cs->curr.norm.mseg.no + cs->curr.norm.sys_alloc.no;
+ curr_size = cs->curr.norm.mseg.size + cs->curr.norm.sys_alloc.size;
+ }
if (print_to_p) {
int to = *print_to_p;
@@ -1944,18 +2138,20 @@ info_carriers(Allctr_t *allctr,
curr_no,
cs->max.no,
cs->max_ever.no);
+ if (!small_block) {
#if HAVE_ERTS_MSEG
- erts_print(to,
- arg,
- "%smseg carriers: %bpu\n",
- prefix,
- cs->curr_mseg.no);
-#endif
- erts_print(to,
- arg,
- "%ssys_alloc carriers: %bpu\n",
- prefix,
- cs->curr_sys_alloc.no);
+ erts_print(to,
+ arg,
+ "%smseg carriers: %bpu\n",
+ prefix,
+ cs->curr.norm.mseg.no);
+#endif
+ erts_print(to,
+ arg,
+ "%ssys_alloc carriers: %bpu\n",
+ prefix,
+ cs->curr.norm.sys_alloc.no);
+ }
erts_print(to,
arg,
"%scarriers size: %beu %bpu %bpu\n",
@@ -1963,43 +2159,49 @@ info_carriers(Allctr_t *allctr,
curr_size,
cs->max.size,
cs->max_ever.size);
+ if (!small_block) {
#if HAVE_ERTS_MSEG
- erts_print(to,
- arg,
- "%smseg carriers size: %bpu\n",
- prefix,
- cs->curr_mseg.size);
-#endif
- erts_print(to,
- arg,
- "%ssys_alloc carriers size: %bpu\n",
- prefix,
- cs->curr_sys_alloc.size);
+ erts_print(to,
+ arg,
+ "%smseg carriers size: %bpu\n",
+ prefix,
+ cs->curr.norm.mseg.size);
+#endif
+ erts_print(to,
+ arg,
+ "%ssys_alloc carriers size: %bpu\n",
+ prefix,
+ cs->curr.norm.sys_alloc.size);
+ }
}
if (hpp || szp) {
res = NIL;
- add_2tup(hpp, szp, &res,
- am.sys_alloc_carriers_size,
- bld_unstable_uint(hpp, szp, cs->curr_sys_alloc.size));
+ if (!small_block) {
+ add_2tup(hpp, szp, &res,
+ am.sys_alloc_carriers_size,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.sys_alloc.size));
#if HAVE_ERTS_MSEG
- add_2tup(hpp, szp, &res,
- am.mseg_alloc_carriers_size,
- bld_unstable_uint(hpp, szp, cs->curr_mseg.size));
+ add_2tup(hpp, szp, &res,
+ am.mseg_alloc_carriers_size,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.mseg.size));
#endif
+ }
add_4tup(hpp, szp, &res,
am.carriers_size,
bld_unstable_uint(hpp, szp, curr_size),
bld_unstable_uint(hpp, szp, cs->max.size),
bld_unstable_uint(hpp, szp, cs->max_ever.size));
- add_2tup(hpp, szp, &res,
- am.sys_alloc_carriers,
- bld_unstable_uint(hpp, szp, cs->curr_sys_alloc.no));
+ if (!small_block) {
+ add_2tup(hpp, szp, &res,
+ am.sys_alloc_carriers,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.sys_alloc.no));
#if HAVE_ERTS_MSEG
- add_2tup(hpp, szp, &res,
- am.mseg_alloc_carriers,
- bld_unstable_uint(hpp, szp, cs->curr_mseg.no));
+ add_2tup(hpp, szp, &res,
+ am.mseg_alloc_carriers,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.mseg.no));
#endif
+ }
add_4tup(hpp, szp, &res,
am.carriers,
bld_unstable_uint(hpp, szp, curr_no),
@@ -2077,6 +2279,9 @@ info_calls(Allctr_t *allctr,
PRINT_CC_5(to, arg, prefix, "free", allctr->calls.this_free);
PRINT_CC_5(to, arg, prefix, "realloc", allctr->calls.this_realloc);
+ PRINT_CC_4(to, arg, "sbmbc_alloc", allctr->calls.sbmbc_alloc);
+ PRINT_CC_4(to, arg, "sbmbc_free", allctr->calls.sbmbc_free);
+
#if HAVE_ERTS_MSEG
PRINT_CC_4(to, arg, "mseg_alloc", allctr->calls.mseg_alloc);
PRINT_CC_4(to, arg, "mseg_dealloc", allctr->calls.mseg_dealloc);
@@ -2128,6 +2333,14 @@ info_calls(Allctr_t *allctr,
bld_unstable_uint(hpp, szp, allctr->calls.mseg_alloc.no));
#endif
add_3tup(hpp, szp, &res,
+ am.sbmbc_free,
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_free.giga_no),
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_free.no));
+ add_3tup(hpp, szp, &res,
+ am.sbmbc_alloc,
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_alloc.giga_no),
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_alloc.no));
+ add_3tup(hpp, szp, &res,
allctr->name.realloc,
bld_unstable_uint(hpp, szp, allctr->calls.this_realloc.giga_no),
bld_unstable_uint(hpp, szp, allctr->calls.this_realloc.no));
@@ -2191,7 +2404,9 @@ info_options(Allctr_t *allctr,
#endif
"option lmbcs: %beu\n"
"option smbcs: %beu\n"
- "option mbcgs: %beu\n",
+ "option mbcgs: %beu\n"
+ "option sbmbcs: %beu\n"
+ "option sbmbct: %beu\n",
topt,
allctr->ramv ? "true" : "false",
#if HALFWORD_HEAP
@@ -2211,7 +2426,9 @@ info_options(Allctr_t *allctr,
#endif
allctr->largest_mbc_size,
allctr->smallest_mbc_size,
- allctr->mbc_growth_stages);
+ allctr->mbc_growth_stages,
+ allctr->sbmbc_size,
+ allctr->sbmbc_threshold);
}
res = (*allctr->info_options)(allctr, "option ", print_to_p, print_to_arg,
@@ -2219,6 +2436,12 @@ info_options(Allctr_t *allctr,
if (hpp || szp) {
add_2tup(hpp, szp, &res,
+ am.sbmbct,
+ bld_uint(hpp, szp, allctr->sbmbc_threshold));
+ add_2tup(hpp, szp, &res,
+ am.sbmbcs,
+ bld_uint(hpp, szp, allctr->sbmbc_size));
+ add_2tup(hpp, szp, &res,
am.mbcgs,
bld_uint(hpp, szp, allctr->mbc_growth_stages));
add_2tup(hpp, szp, &res,
@@ -2285,10 +2508,10 @@ update_max_ever_values(CarriersStats_t *cs)
static ERTS_INLINE void
reset_max_values(CarriersStats_t *cs)
{
- cs->max.no = cs->curr_mseg.no + cs->curr_sys_alloc.no;
- cs->max.size = cs->curr_mseg.size + cs->curr_sys_alloc.size;
- cs->blocks.max.no = cs->blocks.curr.no;
- cs->blocks.max.size = cs->blocks.curr.size;
+ cs->max.no = cs->curr.norm.mseg.no + cs->curr.norm.sys_alloc.no;
+ cs->max.size = cs->curr.norm.mseg.size + cs->curr.norm.sys_alloc.size;
+ cs->blocks.max.no = cs->blocks.curr.no;
+ cs->blocks.max.size = cs->blocks.curr.size;
}
@@ -2367,7 +2590,7 @@ erts_alcu_sz_info(Allctr_t *allctr,
Uint **hpp,
Uint *szp)
{
- Eterm res, mbcs, sbcs;
+ Eterm res, sbmbcs, mbcs, sbcs;
res = THE_NON_VALUE;
@@ -2389,24 +2612,29 @@ erts_alcu_sz_info(Allctr_t *allctr,
/* Update sbc values not continously updated */
allctr->sbcs.blocks.curr.no
- = allctr->sbcs.curr_mseg.no + allctr->sbcs.curr_sys_alloc.no;
+ = allctr->sbcs.curr.norm.mseg.no + allctr->sbcs.curr.norm.sys_alloc.no;
allctr->sbcs.blocks.max.no = allctr->sbcs.max.no;
+ update_max_ever_values(&allctr->sbmbcs);
update_max_ever_values(&allctr->mbcs);
update_max_ever_values(&allctr->sbcs);
- mbcs = sz_info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
- print_to_arg, hpp, szp);
- sbcs = sz_info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
- print_to_arg, hpp, szp);
+ sbmbcs = sz_info_carriers(allctr, &allctr->sbmbcs, "sbmbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ mbcs = sz_info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ sbcs = sz_info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
+ print_to_arg, hpp, szp);
if (hpp || szp) {
res = NIL;
add_2tup(hpp, szp, &res, am.sbcs, sbcs);
add_2tup(hpp, szp, &res, am.mbcs, mbcs);
+ add_2tup(hpp, szp, &res, am.sbmbcs, sbmbcs);
}
if (begin_max_period) {
+ reset_max_values(&allctr->sbmbcs);
reset_max_values(&allctr->mbcs);
reset_max_values(&allctr->sbcs);
}
@@ -2428,7 +2656,7 @@ erts_alcu_info(Allctr_t *allctr,
Uint **hpp,
Uint *szp)
{
- Eterm res, sett, mbcs, sbcs, calls;
+ Eterm res, sett, sbmbcs, mbcs, sbcs, calls;
res = THE_NON_VALUE;
@@ -2450,9 +2678,10 @@ erts_alcu_info(Allctr_t *allctr,
/* Update sbc values not continously updated */
allctr->sbcs.blocks.curr.no
- = allctr->sbcs.curr_mseg.no + allctr->sbcs.curr_sys_alloc.no;
+ = allctr->sbcs.curr.norm.mseg.no + allctr->sbcs.curr.norm.sys_alloc.no;
allctr->sbcs.blocks.max.no = allctr->sbcs.max.no;
+ update_max_ever_values(&allctr->sbmbcs);
update_max_ever_values(&allctr->mbcs);
update_max_ever_values(&allctr->sbcs);
@@ -2464,11 +2693,13 @@ erts_alcu_info(Allctr_t *allctr,
ERTS_ALCU_VSN_STR);
}
- sett = info_options(allctr, print_to_p, print_to_arg, hpp, szp);
- mbcs = info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
- print_to_arg, hpp, szp);
- sbcs = info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
- print_to_arg, hpp, szp);
+ sett = info_options(allctr, print_to_p, print_to_arg, hpp, szp);
+ sbmbcs = info_carriers(allctr, &allctr->sbmbcs, "sbmbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ mbcs = info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ sbcs = info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
+ print_to_arg, hpp, szp);
calls = info_calls(allctr, print_to_p, print_to_arg, hpp, szp);
if (hpp || szp) {
@@ -2477,6 +2708,7 @@ erts_alcu_info(Allctr_t *allctr,
add_2tup(hpp, szp, &res, am.calls, calls);
add_2tup(hpp, szp, &res, am.sbcs, sbcs);
add_2tup(hpp, szp, &res, am.mbcs, mbcs);
+ add_2tup(hpp, szp, &res, am.sbmbcs, sbmbcs);
add_2tup(hpp, szp, &res, am.options, sett);
add_3tup(hpp, szp, &res,
am.versions,
@@ -2485,6 +2717,7 @@ erts_alcu_info(Allctr_t *allctr,
}
if (begin_max_period) {
+ reset_max_values(&allctr->sbmbcs);
reset_max_values(&allctr->mbcs);
reset_max_values(&allctr->sbcs);
}
@@ -2508,12 +2741,14 @@ erts_alcu_current_size(Allctr_t *allctr, AllctrSize_t *size)
erts_mtx_lock(&allctr->mutex);
#endif
- size->carriers = allctr->mbcs.curr_mseg.size;
- size->carriers += allctr->mbcs.curr_sys_alloc.size;
- size->carriers += allctr->sbcs.curr_mseg.size;
- size->carriers += allctr->sbcs.curr_sys_alloc.size;
+ size->carriers = allctr->mbcs.curr.norm.mseg.size;
+ size->carriers += allctr->mbcs.curr.norm.sys_alloc.size;
+ size->carriers += allctr->sbmbcs.curr.small_block.size;
+ size->carriers += allctr->sbcs.curr.norm.mseg.size;
+ size->carriers += allctr->sbcs.curr.norm.sys_alloc.size;
size->blocks = allctr->mbcs.blocks.curr.size;
+ size->blocks += allctr->sbmbcs.blocks.curr.size;
size->blocks += allctr->sbcs.blocks.curr.size;
#ifdef USE_THREADS
@@ -2725,7 +2960,7 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
void *extra,
void *p,
Uint size,
- UWord flgs)
+ Uint32 alcu_flgs)
{
Allctr_t *allctr = (Allctr_t *) extra;
Block_t *blk;
@@ -2758,9 +2993,32 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
blk = UMEM2BLK(p);
+ if (allctr->sbmbc_threshold > 0) {
+ Uint old_sz, new_sz, lim;
+ lim = allctr->sbmbc_threshold;
+ old_sz = BLK_SZ(blk);
+ new_sz = UMEMSZ2BLKSZ(allctr, size);
+ if ((old_sz < lim && lim <= new_sz)
+ || (new_sz < lim && lim <= old_sz)) {
+ /* *Need* to move it... */
+
+ INC_CC(allctr->calls.this_realloc);
+ res = do_erts_alcu_alloc(type, extra, size);
+ DEC_CC(allctr->calls.this_alloc);
+
+ sys_memcpy(res, p, MIN(size, old_sz - ABLK_HDR_SZ));
+
+ do_erts_alcu_free(type, extra, p);
+ DEC_CC(allctr->calls.this_free);
+ return res;
+ }
+ if (old_sz < lim)
+ alcu_flgs |= ERTS_ALCU_FLG_SBMBC;
+ }
+
if (size < allctr->sbc_threshold) {
if (IS_MBC_BLK(blk))
- res = mbc_realloc(allctr, p, size, flgs);
+ res = mbc_realloc(allctr, p, size, alcu_flgs);
else {
Uint used_sz = allctr->sbc_header_size + ABLK_HDR_SZ + size;
Uint crr_sz;
@@ -2791,7 +3049,7 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
if (100*diff_sz_val < allctr->sbc_move_threshold*crr_sz_val)
/* Data won't be copied into a new carrier... */
goto do_carrier_resize;
- else if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ else if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
res = mbc_alloc(allctr, size);
@@ -2814,7 +3072,7 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
#endif
res = new_blk ? BLK2UMEM(new_blk) : NULL;
}
- else if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ else if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
else {
#if HALFWORD_HEAP
@@ -3174,6 +3432,29 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init)
allctr->min_block_size = UNIT_CEILING(allctr->min_block_size
+ sizeof(UWord));
+
+ allctr->sbmbc_threshold = init->sbmbct;
+
+ if (!erts_have_sbmbc_alloc
+#if HALFWORD_HEAP
+ || allctr->alloc_no == ERTS_ALC_A_SBMBC_LOW
+#endif
+ || allctr->alloc_no == ERTS_ALC_A_SBMBC)
+ allctr->sbmbc_threshold = 0;
+
+ if (!allctr->sbmbc_threshold)
+ allctr->sbmbc_size = 0;
+ else {
+ Uint min_size;
+ allctr->sbmbc_size = init->sbmbcs;
+ min_size = allctr->sbmbc_threshold;
+ min_size += allctr->min_block_size;
+ min_size += allctr->mbc_header_size;
+ if (allctr->sbmbc_size < min_size)
+ allctr->sbmbc_size = min_size;
+ }
+
+
#if HAVE_ERTS_MSEG
if (allctr->mseg_opt.abs_shrink_th > ~((UWord) 0) / 100)
allctr->mseg_opt.abs_shrink_th = ~((UWord) 0) / 100;
@@ -3185,12 +3466,16 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init)
#ifdef ERTS_ENABLE_LOCK_COUNT
erts_mtx_init_x_opt(&allctr->mutex,
- "alcu_allocator",
- make_small(allctr->alloc_no),
- ERTS_LCNT_LT_ALLOC);
+ allctr->alloc_no == ERTS_ALC_A_SBMBC
+ ? "sbmbc_alloc"
+ : "alcu_allocator",
+ make_small(allctr->alloc_no),
+ ERTS_LCNT_LT_ALLOC);
#else
erts_mtx_init_x(&allctr->mutex,
- "alcu_allocator",
+ allctr->alloc_no == ERTS_ALC_A_SBMBC
+ ? "sbmbc_alloc"
+ : "alcu_allocator",
make_small(allctr->alloc_no));
#endif /*ERTS_ENABLE_LOCK_COUNT*/
@@ -3260,7 +3545,7 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init)
if (!blk)
goto error;
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, 0);
HARD_CHECK_BLK_CARRIER(allctr, blk);
@@ -3290,6 +3575,8 @@ erts_alcu_stop(Allctr_t *allctr)
destroy_carrier(allctr, SBC2BLK(allctr, allctr->sbc_list.first));
while (allctr->mbc_list.first)
destroy_carrier(allctr, MBC2FBLK(allctr, allctr->mbc_list.first));
+ while (allctr->sbmbc_list.first)
+ destroy_sbmbc(allctr, MBC2FBLK(allctr, allctr->sbmbc_list.first));
#ifdef USE_THREADS
if (allctr->thread_safe)
@@ -3387,13 +3674,15 @@ erts_alcu_verify_unused(Allctr_t *allctr)
{
UWord no;
- no = allctr->sbcs.curr_mseg.no;
- no += allctr->sbcs.curr_sys_alloc.no;
+ no = allctr->sbcs.curr.norm.mseg.no;
+ no += allctr->sbcs.curr.norm.sys_alloc.no;
no += allctr->mbcs.blocks.curr.no;
+ no += allctr->sbmbcs.blocks.curr.no;
if (no) {
UWord sz = allctr->sbcs.blocks.curr.size;
sz += allctr->mbcs.blocks.curr.size;
+ sz += allctr->sbmbcs.blocks.curr.size;
erl_exit(ERTS_ABORT_EXIT,
"%salloc() used when expected to be unused!\n"
"Total amount of blocks allocated: %bpu\n"
@@ -3492,7 +3781,7 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
(*allctr->check_block)(allctr, blk, (int) is_free_blk);
if (IS_LAST_BLK(blk)) {
- carrier_end = ((char *) NXT_BLK(blk)) + sizeof(UWord);
+ carrier_end = ((char *) NXT_BLK(blk));
mbc = *((Carrier_t **) NXT_BLK(blk));
prev_blk = NULL;
blk = MBC2FBLK(allctr, mbc);
@@ -3507,9 +3796,9 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
ASSERT(IS_MB_CARRIER(mbc));
ASSERT((((char *) mbc)
+ allctr->mbc_header_size
- + tot_blk_sz
- + sizeof(UWord)) == carrier_end);
- ASSERT(((char *) mbc) + CARRIER_SZ(mbc) == carrier_end);
+ + tot_blk_sz) == carrier_end);
+ ASSERT(((char *) mbc) + CARRIER_SZ(mbc) - sizeof(Unit_t) <= carrier_end
+ && carrier_end <= ((char *) mbc) + CARRIER_SZ(mbc));
if (allctr->check_mbc)
(*allctr->check_mbc)(allctr, mbc);
@@ -3523,6 +3812,7 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
cl = &allctr->mbc_list;
}
+#if 0 /* FIXIT sbmbc */
if (cl->first == crr) {
ASSERT(!crr->prev);
}
@@ -3537,6 +3827,7 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
ASSERT(crr->next);
ASSERT(crr->next->prev == crr);
}
+#endif
}
#endif
diff --git a/erts/emulator/beam/erl_alloc_util.h b/erts/emulator/beam/erl_alloc_util.h
index ddf84c086c..fed4d3dbe6 100644
--- a/erts/emulator/beam/erl_alloc_util.h
+++ b/erts/emulator/beam/erl_alloc_util.h
@@ -34,6 +34,7 @@ typedef struct {
typedef struct {
char *name_prefix;
ErtsAlcType_t alloc_no;
+ int force;
int ts;
int tspec;
int tpref;
@@ -50,6 +51,8 @@ typedef struct {
UWord lmbcs;
UWord smbcs;
UWord mbcgs;
+ UWord sbmbct;
+ UWord sbmbcs;
} AllctrInit_t;
typedef struct {
@@ -67,6 +70,7 @@ typedef struct {
#define ERTS_DEFAULT_ALLCTR_INIT { \
NULL, \
ERTS_ALC_A_INVALID, /* (number) alloc_no: allocator number */\
+ 0, /* (bool) force: force enabled */\
1, /* (bool) ts: thread safe */\
0, /* (bool) tspec: thread specific */\
0, /* (bool) tpref: thread preferred */\
@@ -82,7 +86,9 @@ typedef struct {
10, /* (amount) mmmbc: max mseg mbcs */\
10*1024*1024, /* (bytes) lmbcs: largest mbc size */\
1024*1024, /* (bytes) smbcs: smallest mbc size */\
- 10 /* (amount) mbcgs: mbc growth stages */\
+ 10, /* (amount) mbcgs: mbc growth stages */\
+ 256, /* (bytes) sbmbct: small block mbc threshold */\
+ 8*1024 /* (bytes) sbmbcs: small block mbc size */\
}
#else /* if SMALL_MEMORY */
@@ -95,6 +101,7 @@ typedef struct {
#define ERTS_DEFAULT_ALLCTR_INIT { \
NULL, \
ERTS_ALC_A_INVALID, /* (number) alloc_no: allocator number */\
+ 0, /* (bool) force: force enabled */\
1, /* (bool) ts: thread safe */\
0, /* (bool) tspec: thread specific */\
0, /* (bool) tpref: thread preferred */\
@@ -109,7 +116,9 @@ typedef struct {
10, /* (amount) mmmbc: max mseg mbcs */\
1024*1024, /* (bytes) lmbcs: largest mbc size */\
128*1024, /* (bytes) smbcs: smallest mbc size */\
- 10 /* (amount) mbcgs: mbc growth stages */\
+ 10, /* (amount) mbcgs: mbc growth stages */\
+ 256, /* (bytes) sbmbct: small block mbc threshold */\
+ 8*1024 /* (bytes) sbmbcs: small block mbc size */\
}
#endif
@@ -144,6 +153,9 @@ void erts_alcu_current_size(Allctr_t *, AllctrSize_t *);
#if defined(GET_ERL_ALLOC_UTIL_IMPL) && !defined(ERL_ALLOC_UTIL_IMPL__)
#define ERL_ALLOC_UTIL_IMPL__
+#define ERTS_ALCU_FLG_FAIL_REALLOC_MOVE (((Uint32) 1) << 0)
+#define ERTS_ALCU_FLG_SBMBC (((Uint32) 1) << 1)
+
#ifdef USE_THREADS
#define ERL_THREADS_EMU_INTERNAL__
#include "erl_threads.h"
@@ -188,6 +200,8 @@ void erts_alcu_current_size(Allctr_t *, AllctrSize_t *);
#define CARRIER_SZ(C) \
((C)->chdr & SZ_MASK)
+extern int erts_have_sbmbc_alloc;
+
typedef union {char c[8]; long l; double d;} Unit_t;
typedef struct Carrier_t_ Carrier_t;
@@ -216,8 +230,13 @@ typedef struct {
} StatValues_t;
typedef struct {
- StatValues_t curr_mseg;
- StatValues_t curr_sys_alloc;
+ union {
+ struct {
+ StatValues_t mseg;
+ StatValues_t sys_alloc;
+ } norm;
+ StatValues_t small_block;
+ } curr;
StatValues_t max;
StatValues_t max_ever;
struct {
@@ -257,6 +276,8 @@ struct Allctr_t_ {
Uint largest_mbc_size;
Uint smallest_mbc_size;
Uint mbc_growth_stages;
+ Uint sbmbc_threshold;
+ Uint sbmbc_size;
#if HAVE_ERTS_MSEG
ErtsMsegOpt_t mseg_opt;
#endif
@@ -269,6 +290,7 @@ struct Allctr_t_ {
Uint min_block_size;
/* Carriers */
+ CarrierList_t sbmbc_list;
CarrierList_t mbc_list;
CarrierList_t sbc_list;
@@ -277,15 +299,15 @@ struct Allctr_t_ {
/* Callback functions (first 4 are mandatory) */
Block_t * (*get_free_block) (Allctr_t *, Uint,
- Block_t *, Uint);
- void (*link_free_block) (Allctr_t *, Block_t *);
- void (*unlink_free_block) (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+ void (*link_free_block) (Allctr_t *, Block_t *, Uint32);
+ void (*unlink_free_block) (Allctr_t *, Block_t *, Uint32);
Eterm (*info_options) (Allctr_t *, char *, int *,
void *, Uint **, Uint *);
Uint (*get_next_mbc_size) (Allctr_t *);
- void (*creating_mbc) (Allctr_t *, Carrier_t *);
- void (*destroying_mbc) (Allctr_t *, Carrier_t *);
+ void (*creating_mbc) (Allctr_t *, Carrier_t *, Uint32);
+ void (*destroying_mbc) (Allctr_t *, Carrier_t *, Uint32);
void (*init_atoms) (void);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
@@ -312,6 +334,8 @@ struct Allctr_t_ {
CallCounter_t this_alloc;
CallCounter_t this_free;
CallCounter_t this_realloc;
+ CallCounter_t sbmbc_alloc;
+ CallCounter_t sbmbc_free;
CallCounter_t mseg_alloc;
CallCounter_t mseg_dealloc;
CallCounter_t mseg_realloc;
@@ -322,6 +346,7 @@ struct Allctr_t_ {
CarriersStats_t sbcs;
CarriersStats_t mbcs;
+ CarriersStats_t sbmbcs;
#ifdef DEBUG
#ifdef USE_THREADS
diff --git a/erts/emulator/beam/erl_bestfit_alloc.c b/erts/emulator/beam/erl_bestfit_alloc.c
index 3035e5df16..d9b1170a3d 100644
--- a/erts/emulator/beam/erl_bestfit_alloc.c
+++ b/erts/emulator/beam/erl_bestfit_alloc.c
@@ -84,24 +84,24 @@
#ifdef HARD_DEBUG
-static RBTree_t * check_tree(BFAllctr_t *, Uint);
+static RBTree_t * check_tree(RBTree_t, int, Uint);
#endif
-static void tree_delete(Allctr_t *allctr, Block_t *del);
+static void tree_delete(Allctr_t *allctr, Block_t *del, Uint32 flags);
/* Prototypes of callback functions */
/* "address order best fit" specific callback functions */
static Block_t * aobf_get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void aobf_link_free_block (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+static void aobf_link_free_block (Allctr_t *, Block_t *, Uint32);
#define aobf_unlink_free_block tree_delete
/* "best fit" specific callback functions */
static Block_t * bf_get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void bf_link_free_block (Allctr_t *, Block_t *);
-static ERTS_INLINE void bf_unlink_free_block (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+static void bf_link_free_block (Allctr_t *, Block_t *, Uint32);
+static ERTS_INLINE void bf_unlink_free_block (Allctr_t *, Block_t *, Uint32);
static Eterm info_options (Allctr_t *, char *, int *,
@@ -303,7 +303,7 @@ replace(RBTree_t **root, RBTree_t *x, RBTree_t *y)
}
static void
-tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
+tree_insert_fixup(RBTree_t **root, RBTree_t *blk)
{
RBTree_t *x = blk, *y;
@@ -311,7 +311,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
* Rearrange the tree so that it satisfies the Red-Black Tree properties
*/
- RBT_ASSERT(x != bfallctr->root && IS_RED(x->parent));
+ RBT_ASSERT(x != *root && IS_RED(x->parent));
do {
/*
@@ -336,7 +336,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
if (x == x->parent->right) {
x = x->parent;
- left_rotate(&bfallctr->root, x);
+ left_rotate(root, x);
}
RBT_ASSERT(x == x->parent->parent->left->left);
@@ -347,7 +347,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
SET_BLACK(x->parent);
SET_RED(x->parent->parent);
- right_rotate(&bfallctr->root, x->parent->parent);
+ right_rotate(root, x->parent->parent);
RBT_ASSERT(x == x->parent->left);
RBT_ASSERT(IS_RED(x));
@@ -370,7 +370,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
if (x == x->parent->left) {
x = x->parent;
- right_rotate(&bfallctr->root, x);
+ right_rotate(root, x);
}
RBT_ASSERT(x == x->parent->parent->right->right);
@@ -381,7 +381,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
SET_BLACK(x->parent);
SET_RED(x->parent->parent);
- left_rotate(&bfallctr->root, x->parent->parent);
+ left_rotate(root, x->parent->parent);
RBT_ASSERT(x == x->parent->right);
RBT_ASSERT(IS_RED(x));
@@ -390,9 +390,9 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
break;
}
}
- } while (x != bfallctr->root && IS_RED(x->parent));
+ } while (x != *root && IS_RED(x->parent));
- SET_BLACK(bfallctr->root);
+ SET_BLACK(*root);
}
@@ -402,18 +402,22 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
* callback function in the address order case.
*/
static void
-tree_delete(Allctr_t *allctr, Block_t *del)
+tree_delete(Allctr_t *allctr, Block_t *del, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
Uint spliced_is_black;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *x, *y, *z = (RBTree_t *) del;
RBTree_t null_x; /* null_x is used to get the fixup started when we
splice out a node without children. */
null_x.parent = NULL;
+
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(*root, bfallctr->address_order, 0);
#endif
/* Remove node from tree... */
@@ -440,8 +444,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
if (!y->parent) {
- RBT_ASSERT(bfallctr->root == y);
- bfallctr->root = x;
+ RBT_ASSERT(*root == y);
+ *root = x;
}
else if (y == y->parent->left)
y->parent->left = x;
@@ -451,7 +455,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
if (y != z) {
/* We spliced out the successor of z; replace z by the successor */
- replace(&bfallctr->root, z, y);
+ replace(root, z, y);
}
if (spliced_is_black) {
@@ -476,7 +480,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
SET_BLACK(y);
RBT_ASSERT(IS_BLACK(x->parent));
SET_RED(x->parent);
- left_rotate(&bfallctr->root, x->parent);
+ left_rotate(root, x->parent);
y = x->parent->right;
}
RBT_ASSERT(y);
@@ -489,7 +493,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
if (IS_BLACK(y->right)) {
SET_BLACK(y->left);
SET_RED(y);
- right_rotate(&bfallctr->root, y);
+ right_rotate(root, y);
y = x->parent->right;
}
RBT_ASSERT(y);
@@ -500,8 +504,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
RBT_ASSERT(y->right);
SET_BLACK(y->right);
- left_rotate(&bfallctr->root, x->parent);
- x = bfallctr->root;
+ left_rotate(root, x->parent);
+ x = *root;
break;
}
}
@@ -515,7 +519,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
SET_BLACK(y);
RBT_ASSERT(IS_BLACK(x->parent));
SET_RED(x->parent);
- right_rotate(&bfallctr->root, x->parent);
+ right_rotate(root, x->parent);
y = x->parent->left;
}
RBT_ASSERT(y);
@@ -528,7 +532,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
if (IS_BLACK(y->left)) {
SET_BLACK(y->right);
SET_RED(y);
- left_rotate(&bfallctr->root, y);
+ left_rotate(root, y);
y = x->parent->left;
}
RBT_ASSERT(y);
@@ -538,8 +542,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
RBT_ASSERT(y->left);
SET_BLACK(y->left);
- right_rotate(&bfallctr->root, x->parent);
- x = bfallctr->root;
+ right_rotate(root, x->parent);
+ x = *root;
break;
}
}
@@ -556,8 +560,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
RBT_ASSERT(!null_x.left);
RBT_ASSERT(!null_x.right);
}
- else if (bfallctr->root == &null_x) {
- bfallctr->root = NULL;
+ else if (*root == &null_x) {
+ *root = NULL;
RBT_ASSERT(!null_x.left);
RBT_ASSERT(!null_x.right);
}
@@ -567,7 +571,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
DESTROY_TREE_NODE(del);
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, bfallctr->address_order, 0);
#endif
}
@@ -577,23 +581,28 @@ tree_delete(Allctr_t *allctr, Block_t *del)
\* */
static void
-aobf_link_free_block(Allctr_t *allctr, Block_t *block)
+aobf_link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *blk = (RBTree_t *) block;
Uint blk_sz = BLK_SZ(blk);
+
+
blk->flags = 0;
blk->left = NULL;
blk->right = NULL;
- if (!bfallctr->root) {
+ if (!*root) {
blk->parent = NULL;
SET_BLACK(blk);
- bfallctr->root = blk;
+ *root = blk;
}
else {
- RBTree_t *x = bfallctr->root;
+ RBTree_t *x = *root;
while (1) {
Uint size;
@@ -623,28 +632,32 @@ aobf_link_free_block(Allctr_t *allctr, Block_t *block)
SET_RED(blk);
if (IS_RED(blk->parent))
- tree_insert_fixup(bfallctr, blk);
+ tree_insert_fixup(root, blk);
}
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, 1, 0);
#endif
}
#if 0 /* tree_delete() is directly used instead */
static void
-aobf_unlink_free_block(Allctr_t *allctr, Block_t *block)
+aobf_unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
- tree_delete(allctr, block);
+ tree_delete(allctr, block, flags);
}
#endif
static Block_t *
aobf_get_free_block(Allctr_t *allctr, Uint size,
- Block_t *cand_blk, Uint cand_size)
+ Block_t *cand_blk, Uint cand_size,
+ Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
- RBTree_t *x = bfallctr->root;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
+ RBTree_t *x = *root;
RBTree_t *blk = NULL;
Uint blk_sz;
@@ -665,7 +678,7 @@ aobf_get_free_block(Allctr_t *allctr, Uint size,
return NULL;
#ifdef HARD_DEBUG
- ASSERT(blk == check_tree(bfallctr, size));
+ ASSERT(blk == check_tree(root, 1, size));
#endif
if (cand_blk) {
@@ -676,7 +689,7 @@ aobf_get_free_block(Allctr_t *allctr, Uint size,
return NULL; /* cand_blk was better */
}
- aobf_unlink_free_block(allctr, (Block_t *) blk);
+ aobf_unlink_free_block(allctr, (Block_t *) blk, flags);
return (Block_t *) blk;
}
@@ -687,9 +700,12 @@ aobf_get_free_block(Allctr_t *allctr, Uint size,
\* */
static void
-bf_link_free_block(Allctr_t *allctr, Block_t *block)
+bf_link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *blk = (RBTree_t *) block;
Uint blk_sz = BLK_SZ(blk);
@@ -700,13 +716,13 @@ bf_link_free_block(Allctr_t *allctr, Block_t *block)
blk->left = NULL;
blk->right = NULL;
- if (!bfallctr->root) {
+ if (!*root) {
blk->parent = NULL;
SET_BLACK(blk);
- bfallctr->root = blk;
+ *root = blk;
}
else {
- RBTree_t *x = bfallctr->root;
+ RBTree_t *x = *root;
while (1) {
Uint size;
@@ -745,7 +761,7 @@ bf_link_free_block(Allctr_t *allctr, Block_t *block)
SET_RED(blk);
if (IS_RED(blk->parent))
- tree_insert_fixup(bfallctr, blk);
+ tree_insert_fixup(root, blk);
}
@@ -753,14 +769,17 @@ bf_link_free_block(Allctr_t *allctr, Block_t *block)
LIST_NEXT(blk) = NULL;
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, 0, 0);
#endif
}
static ERTS_INLINE void
-bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
+bf_unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *x = (RBTree_t *) block;
if (IS_LIST_ELEM(x)) {
@@ -778,9 +797,9 @@ bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
ASSERT(IS_LIST_ELEM(LIST_NEXT(x)));
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, 0, 0);
#endif
- replace(&bfallctr->root, x, LIST_NEXT(x));
+ replace(root, x, LIST_NEXT(x));
#ifdef HARD_DEBUG
check_tree(bfallctr, 0);
@@ -788,7 +807,7 @@ bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
}
else {
/* Remove from tree */
- tree_delete(allctr, block);
+ tree_delete(allctr, block, flags);
}
DESTROY_LIST_ELEM(x);
@@ -797,10 +816,14 @@ bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
static Block_t *
bf_get_free_block(Allctr_t *allctr, Uint size,
- Block_t *cand_blk, Uint cand_size)
+ Block_t *cand_blk, Uint cand_size,
+ Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
- RBTree_t *x = bfallctr->root;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
+ RBTree_t *x = *root;
RBTree_t *blk = NULL;
Uint blk_sz;
@@ -827,7 +850,7 @@ bf_get_free_block(Allctr_t *allctr, Uint size,
#ifdef HARD_DEBUG
{
- RBTree_t *ct_blk = check_tree(bfallctr, size);
+ RBTree_t *ct_blk = check_tree(root, 0, size);
ASSERT(BLK_SZ(ct_blk) == BLK_SZ(blk));
}
#endif
@@ -839,7 +862,7 @@ bf_get_free_block(Allctr_t *allctr, Uint size,
the tree node */
blk = LIST_NEXT(blk) ? LIST_NEXT(blk) : blk;
- bf_unlink_free_block(allctr, (Block_t *) blk);
+ bf_unlink_free_block(allctr, (Block_t *) blk, flags);
return (Block_t *) blk;
}
@@ -949,7 +972,7 @@ erts_bfalc_test(unsigned long op, unsigned long a1, unsigned long a2)
{
switch (op) {
case 0x200: return (unsigned long) ((BFAllctr_t *) a1)->address_order;
- case 0x201: return (unsigned long) ((BFAllctr_t *) a1)->root;
+ case 0x201: return (unsigned long) ((BFAllctr_t *) a1)->mbc_root;
case 0x202: return (unsigned long) ((RBTree_t *) a1)->parent;
case 0x203: return (unsigned long) ((RBTree_t *) a1)->left;
case 0x204: return (unsigned long) ((RBTree_t *) a1)->right;
@@ -985,7 +1008,7 @@ erts_bfalc_test(unsigned long op, unsigned long a1, unsigned long a2)
#endif
#ifdef PRINT_TREE
-static void print_tree(BFAllctr_t *);
+static void print_tree(RBTree_t *, int);
#endif
/*
@@ -1003,7 +1026,7 @@ static void print_tree(BFAllctr_t *);
*/
static RBTree_t *
-check_tree(BFAllctr_t *bfallctr, Uint size)
+check_tree(RBTree_t *root, int ao, Uint size)
{
RBTree_t *res = NULL;
Sint blacks;
@@ -1011,13 +1034,13 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
RBTree_t *x;
#ifdef PRINT_TREE
- print_tree(bfallctr);
+ print_tree(root, ao);
#endif
- if (!bfallctr->root)
+ if (!root)
return res;
- x = bfallctr->root;
+ x = root;
ASSERT(IS_BLACK(x));
ASSERT(!x->parent);
curr_blacks = 1;
@@ -1060,11 +1083,11 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
ASSERT(IS_BLACK(x->left));
}
- ASSERT(x->parent || x == bfallctr->root);
+ ASSERT(x->parent || x == root);
if (x->left) {
ASSERT(x->left->parent == x);
- if (bfallctr->address_order) {
+ if (ao) {
ASSERT(BLK_SZ(x->left) < BLK_SZ(x)
|| (BLK_SZ(x->left) == BLK_SZ(x) && x->left < x));
}
@@ -1076,7 +1099,7 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
if (x->right) {
ASSERT(x->right->parent == x);
- if (bfallctr->address_order) {
+ if (ao) {
ASSERT(BLK_SZ(x->right) > BLK_SZ(x)
|| (BLK_SZ(x->right) == BLK_SZ(x) && x->right > x));
}
@@ -1087,7 +1110,7 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
}
if (size && BLK_SZ(x) >= size) {
- if (bfallctr->address_order) {
+ if (ao) {
if (!res
|| BLK_SZ(x) < BLK_SZ(res)
|| (BLK_SZ(x) == BLK_SZ(res) && x < res))
@@ -1109,8 +1132,8 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
ASSERT(curr_blacks == 0);
- UNSET_LEFT_VISITED(bfallctr->root);
- UNSET_RIGHT_VISITED(bfallctr->root);
+ UNSET_LEFT_VISITED(root);
+ UNSET_RIGHT_VISITED(root);
return res;
@@ -1148,11 +1171,11 @@ print_tree_aux(RBTree_t *x, int indent)
static void
-print_tree(BFAllctr_t *bfallctr)
+print_tree(RBTree_t *root, int ao)
{
- char *type = bfallctr->address_order ? "Size-Adress" : "Size";
+ char *type = ao ? "Size-Adress" : "Size";
fprintf(stderr, " --- %s tree begin ---\r\n", type);
- print_tree_aux(bfallctr->root, 0);
+ print_tree_aux(root, 0);
fprintf(stderr, " --- %s tree end ---\r\n", type);
}
diff --git a/erts/emulator/beam/erl_bestfit_alloc.h b/erts/emulator/beam/erl_bestfit_alloc.h
index cb35e21e57..faa2d9742e 100644
--- a/erts/emulator/beam/erl_bestfit_alloc.h
+++ b/erts/emulator/beam/erl_bestfit_alloc.h
@@ -54,7 +54,8 @@ typedef struct RBTree_t_ RBTree_t;
struct BFAllctr_t_ {
Allctr_t allctr; /* Has to be first! */
- RBTree_t * root;
+ RBTree_t * mbc_root;
+ RBTree_t * sbmbc_root;
int address_order;
};
diff --git a/erts/emulator/beam/erl_goodfit_alloc.c b/erts/emulator/beam/erl_goodfit_alloc.c
index 76b206d76f..1cc508ac5a 100644
--- a/erts/emulator/beam/erl_goodfit_alloc.c
+++ b/erts/emulator/beam/erl_goodfit_alloc.c
@@ -163,10 +163,10 @@ BKT_MIN_SZ(GFAllctr_t *gfallctr, int ix)
/* Prototypes of callback functions */
static Block_t * get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void link_free_block (Allctr_t *, Block_t *);
-static void unlink_free_block (Allctr_t *, Block_t *);
-static void update_last_aux_mbc (Allctr_t *, Carrier_t *);
+ Block_t *, Uint, Uint32);
+static void link_free_block (Allctr_t *, Block_t *, Uint32);
+static void unlink_free_block (Allctr_t *, Block_t *, Uint32);
+static void update_last_aux_mbc (Allctr_t *, Carrier_t *, Uint32);
static Eterm info_options (Allctr_t *, char *, int *,
void *, Uint **, Uint *);
static void init_atoms (void);
@@ -197,6 +197,8 @@ erts_gfalc_start(GFAllctr_t *gfallctr,
is a struct). */
Allctr_t *allctr = (Allctr_t *) gfallctr;
+ init->sbmbct = 0; /* Small mbc not yet supported by goodfit */
+
sys_memcpy((void *) gfallctr, (void *) &nulled_state, sizeof(GFAllctr_t));
allctr->mbc_header_size = sizeof(Carrier_t);
@@ -379,7 +381,7 @@ search_bucket(Allctr_t *allctr, int ix, Uint size)
static Block_t *
get_free_block(Allctr_t *allctr, Uint size,
- Block_t *cand_blk, Uint cand_size)
+ Block_t *cand_blk, Uint cand_size, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
int unsafe_bi, min_bi;
@@ -398,7 +400,7 @@ get_free_block(Allctr_t *allctr, Uint size,
if (blk) {
if (cand_blk && cand_size <= BLK_SZ(blk))
return NULL; /* cand_blk was better */
- unlink_free_block(allctr, blk);
+ unlink_free_block(allctr, blk, flags);
return blk;
}
if (min_bi < NO_OF_BKTS - 1) {
@@ -418,14 +420,14 @@ get_free_block(Allctr_t *allctr, Uint size,
ASSERT(blk);
if (cand_blk && cand_size <= BLK_SZ(blk))
return NULL; /* cand_blk was better */
- unlink_free_block(allctr, blk);
+ unlink_free_block(allctr, blk, flags);
return blk;
}
static void
-link_free_block(Allctr_t *allctr, Block_t *block)
+link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
GFFreeBlock_t *blk = (GFFreeBlock_t *) block;
@@ -446,7 +448,7 @@ link_free_block(Allctr_t *allctr, Block_t *block)
}
static void
-unlink_free_block(Allctr_t *allctr, Block_t *block)
+unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
GFFreeBlock_t *blk = (GFFreeBlock_t *) block;
@@ -467,7 +469,7 @@ unlink_free_block(Allctr_t *allctr, Block_t *block)
}
static void
-update_last_aux_mbc(Allctr_t *allctr, Carrier_t *mbc)
+update_last_aux_mbc(Allctr_t *allctr, Carrier_t *mbc, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
diff --git a/erts/emulator/beam/erl_instrument.c b/erts/emulator/beam/erl_instrument.c
index f3f3c22933..c5615818f2 100644
--- a/erts/emulator/beam/erl_instrument.c
+++ b/erts/emulator/beam/erl_instrument.c
@@ -1152,6 +1152,14 @@ erts_instr_get_type_info(Process *proc)
return res;
}
+#if HALFWORD_HEAP
+#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \
+ ((NO) == ERTS_ALC_A_SBMBC || (NO) == ERTS_ALC_A_SBMBC_LOW)
+#else
+#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \
+ ((NO) == ERTS_ALC_A_SBMBC)
+#endif
+
Uint
erts_instr_init(int stat, int map_stat)
{
@@ -1186,6 +1194,8 @@ erts_instr_init(int stat, int map_stat)
sys_memzero((void *) stats->n, sizeof(Stat_t)*(ERTS_ALC_N_MAX+1));
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
+ if (ERTS_IS_SBMBC_ALLOCATOR_NO__(i))
+ continue;
if (erts_allctrs_info[i].enabled)
stats->ap[i] = &stats->a[i];
else
@@ -1199,6 +1209,8 @@ erts_instr_init(int stat, int map_stat)
erts_instr_memory_map = 1;
erts_instr_stat = 1;
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
+ if (ERTS_IS_SBMBC_ALLOCATOR_NO__(i))
+ continue;
erts_allctrs[i].alloc = map_stat_alloc;
erts_allctrs[i].realloc = map_stat_realloc;
erts_allctrs[i].free = map_stat_free;
@@ -1209,6 +1221,8 @@ erts_instr_init(int stat, int map_stat)
else {
erts_instr_stat = 1;
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
+ if (ERTS_IS_SBMBC_ALLOCATOR_NO__(i))
+ continue;
erts_allctrs[i].alloc = stat_alloc;
erts_allctrs[i].realloc = stat_realloc;
erts_allctrs[i].free = stat_free;
diff --git a/erts/emulator/beam/erl_lock_check.c b/erts/emulator/beam/erl_lock_check.c
index 9180508a49..587d82f2bb 100644
--- a/erts/emulator/beam/erl_lock_check.c
+++ b/erts/emulator/beam/erl_lock_check.c
@@ -153,6 +153,7 @@ static erts_lc_lock_order_t erts_lock_order[] = {
{ "instr", NULL },
{ "fix_alloc", "index" },
{ "alcu_allocator", "index" },
+ { "sbmbc_alloc", "index" },
{ "alcu_delayed_free", "index" },
{ "mseg", NULL },
#if HALFWORD_HEAP
diff --git a/erts/emulator/test/nif_SUITE.erl b/erts/emulator/test/nif_SUITE.erl
index 91d695d979..b9b964f526 100644
--- a/erts/emulator/test/nif_SUITE.erl
+++ b/erts/emulator/test/nif_SUITE.erl
@@ -1170,9 +1170,10 @@ tmpmem() ->
MemInfo ->
MSBCS = lists:foldl(
fun ({instance, _, L}, Acc) ->
+ {value,{_,SBMBCS}} = lists:keysearch(sbmbcs, 1, L),
{value,{_,MBCS}} = lists:keysearch(mbcs, 1, L),
{value,{_,SBCS}} = lists:keysearch(sbcs, 1, L),
- [MBCS,SBCS | Acc]
+ [SBMBCS,MBCS,SBCS | Acc]
end,
[],
MemInfo),
diff --git a/erts/emulator/test/send_term_SUITE.erl b/erts/emulator/test/send_term_SUITE.erl
index 6615873392..ba0ba804ca 100644
--- a/erts/emulator/test/send_term_SUITE.erl
+++ b/erts/emulator/test/send_term_SUITE.erl
@@ -175,6 +175,10 @@ chk_temp_alloc() ->
%% Verify that we havn't got anything allocated by temp_alloc
lists:foreach(
fun ({instance, _, TI}) ->
+ ?line {value, {sbmbcs, SBMBCInfo}}
+ = lists:keysearch(sbmbcs, 1, TI),
+ ?line {value, {blocks, 0, _, _}}
+ = lists:keysearch(blocks, 1, SBMBCInfo),
?line {value, {mbcs, MBCInfo}}
= lists:keysearch(mbcs, 1, TI),
?line {value, {blocks, 0, _, _}}