aboutsummaryrefslogtreecommitdiffstats
path: root/erts
diff options
context:
space:
mode:
authorAnders Svensson <[email protected]>2011-08-16 17:01:45 +0200
committerAnders Svensson <[email protected]>2011-08-16 17:01:45 +0200
commita7c0e439ee84d25ce18f65959be06315063a7de8 (patch)
treeb5a54174171fdd0f10d556c8a96909eb4af7f1d6 /erts
parent5a57f3798edd56caa61514378c673ff78ac397d2 (diff)
parent46af29aac0e851105620dab25644cf27219e2b0a (diff)
downloadotp-a7c0e439ee84d25ce18f65959be06315063a7de8.tar.gz
otp-a7c0e439ee84d25ce18f65959be06315063a7de8.tar.bz2
otp-a7c0e439ee84d25ce18f65959be06315063a7de8.zip
Merge remote branch 'upstream/dev' into dev
Diffstat (limited to 'erts')
-rw-r--r--erts/doc/src/erts_alloc.xml66
-rw-r--r--erts/emulator/Makefile.in2
-rw-r--r--erts/emulator/beam/beam_emu.c2
-rw-r--r--erts/emulator/beam/beam_load.c181
-rw-r--r--erts/emulator/beam/erl_afit_alloc.c15
-rw-r--r--erts/emulator/beam/erl_alloc.c184
-rw-r--r--erts/emulator/beam/erl_alloc.h8
-rw-r--r--erts/emulator/beam/erl_alloc.types15
-rw-r--r--erts/emulator/beam/erl_alloc_util.c698
-rw-r--r--erts/emulator/beam/erl_alloc_util.h43
-rw-r--r--erts/emulator/beam/erl_ao_firstfit_alloc.c972
-rw-r--r--erts/emulator/beam/erl_ao_firstfit_alloc.h60
-rw-r--r--erts/emulator/beam/erl_bestfit_alloc.c168
-rw-r--r--erts/emulator/beam/erl_bestfit_alloc.h3
-rw-r--r--erts/emulator/beam/erl_bits.h2
-rw-r--r--erts/emulator/beam/erl_db.c47
-rw-r--r--erts/emulator/beam/erl_db_util.c4
-rw-r--r--erts/emulator/beam/erl_goodfit_alloc.c22
-rw-r--r--erts/emulator/beam/erl_instrument.c6
-rw-r--r--erts/emulator/beam/erl_lock_check.c1
-rw-r--r--erts/emulator/beam/erl_nif.c10
-rw-r--r--erts/emulator/beam/ops.tab2
-rw-r--r--erts/emulator/drivers/common/inet_drv.c4
-rw-r--r--erts/emulator/hipe/hipe_mode_switch.c7
-rw-r--r--erts/emulator/test/alloc_SUITE_data/allocator_test.h20
-rw-r--r--erts/emulator/test/alloc_SUITE_data/coalesce.c2
-rw-r--r--erts/emulator/test/alloc_SUITE_data/rbtree.c86
-rw-r--r--erts/emulator/test/bs_construct_SUITE.erl17
-rw-r--r--erts/emulator/test/fun_SUITE.erl10
-rw-r--r--erts/emulator/test/hibernate_SUITE.erl31
-rw-r--r--erts/emulator/test/match_spec_SUITE.erl61
-rw-r--r--erts/emulator/test/nif_SUITE.erl45
-rw-r--r--erts/emulator/test/nif_SUITE_data/nif_SUITE.c46
-rw-r--r--erts/emulator/test/send_term_SUITE.erl4
-rwxr-xr-xerts/emulator/utils/beam_makeops71
-rw-r--r--erts/epmd/src/epmd_int.h9
-rw-r--r--erts/epmd/src/epmd_srv.c20
-rw-r--r--erts/etc/common/erlexec.c3
-rw-r--r--erts/lib_src/common/erl_misc_utils.c2
-rw-r--r--erts/lib_src/common/erl_printf_format.c4
40 files changed, 2399 insertions, 554 deletions
diff --git a/erts/doc/src/erts_alloc.xml b/erts/doc/src/erts_alloc.xml
index 51a4a2bca0..90347824d5 100644
--- a/erts/doc/src/erts_alloc.xml
+++ b/erts/doc/src/erts_alloc.xml
@@ -78,14 +78,20 @@
segments are allocated, cached segments are used if possible
instead of creating new segments. This in order to reduce
the number of system calls made.</item>
+ <tag><c>sbmbc_alloc</c></tag>
+ <item>Allocator used by other allocators for allocation of carriers
+ where only small blocks are placed. Currently this allocator is
+ disabled by default.</item>
</taglist>
<p><c>sys_alloc</c> and <c>fix_alloc</c> are always enabled and
cannot be disabled. <c>mseg_alloc</c> is always enabled if it is
available and an allocator that uses it is enabled. All other
allocators can be <seealso marker="#M_e">enabled or disabled</seealso>.
By default all allocators are enabled.
- When an allocator is disabled, <c>sys_alloc</c>
- is used instead of the disabled allocator.</p>
+ When an allocator is disabled, <c>sys_alloc</c> is used instead of
+ the disabled allocator. <c>sbmbc_alloc</c> is an exception. If
+ <c>sbmbc_alloc</c> is disabled, other allocators will not handle
+ small blocks in separate carriers.</p>
<p>The main idea with the <c>erts_alloc</c> library is to separate
memory blocks that are used differently into different memory
areas, and by this achieving less memory fragmentation. By
@@ -103,15 +109,20 @@
following does <em>not</em> apply to them.</p>
<p>An allocator manages multiple areas, called carriers, in which
memory blocks are placed. A carrier is either placed in a
- separate memory segment (allocated via <c>mseg_alloc</c>) or in
- the heap segment (allocated via <c>sys_alloc</c>). Multiblock
+ separate memory segment (allocated via <c>mseg_alloc</c>), in
+ the heap segment (allocated via <c>sys_alloc</c>), or inside
+ another carrier (in case it is a carrier created by
+ <c>sbmbc_alloc</c>). Multiblock
carriers are used for storage of several blocks. Singleblock
carriers are used for storage of one block. Blocks that are
larger than the value of the singleblock carrier threshold
(<seealso marker="#M_sbct">sbct</seealso>) parameter are placed
- in singleblock carriers. Blocks smaller than the value of the
- <c>sbct</c> parameter are placed in multiblock
- carriers. Normally an allocator creates a "main multiblock
+ in singleblock carriers. Blocks that are smaller than the value
+ of the <c>sbct</c> parameter are placed in multiblock
+ carriers. Blocks that are smaller than the small block multiblock
+ carrier threshold (<seealso marker="#M_sbmbct">sbmbct</seealso>)
+ will be placed in multiblock carriers only used for small blocks.
+ Normally an allocator creates a "main multiblock
carrier". Main multiblock carriers are never deallocated. The
size of the main multiblock carrier is determined by the value
of the <seealso marker="#M_mmbcs">mmbcs</seealso> parameter.</p>
@@ -133,8 +144,11 @@
<c>sbct</c> parameter should be larger than the value of the
<c>lmbcs</c> parameter, the allocator may have to create
multiblock carriers that are larger than the value of the
- <c>lmbcs</c> parameter, though. Singleblock carriers allocated
- via <c>mseg_alloc</c> are sized to whole pages.</p>
+ <c>lmbcs</c> parameter, though. The size of multiblock carriers
+ for small blocks is determined by the small block multiblock
+ carrier size (<seealso marker="#M_sbmbcs">sbmbcs</seealso>).
+ Singleblock carriers allocated via <c>mseg_alloc</c> are sized
+ to whole pages.</p>
<p>Sizes of carriers allocated via <c>sys_alloc</c> are
decided based on the value of the <c>sys_alloc</c> carrier size
(<seealso marker="#Muycs">ycs</seealso>) parameter. The size of
@@ -166,6 +180,14 @@
used. The time complexity is proportional to log N, where
N is the number of free blocks.</p>
</item>
+ <tag>Address order first fit</tag>
+ <item>
+ <p>Strategy: Find the block with the lowest address that satisfies the
+ requested block size.</p>
+ <p>Implementation: A balanced binary search tree is
+ used. The time complexity is proportional to log N, where
+ N is the number of free blocks.</p>
+ </item>
<tag>Good fit</tag>
<item>
<p>Strategy: Try to find the best fit, but settle for the best fit
@@ -194,6 +216,11 @@
</taglist>
</section>
+ <note><p>
+ Currently only allocators using the best fit and the address order
+ best fit strategies are able to use "small block multi block carriers".
+ </p></note>
+
<section>
<marker id="flags"></marker>
<title>System Flags Effecting erts_alloc</title>
@@ -215,6 +242,7 @@
the currently present allocators:</p>
<list type="bulleted">
<item><c>B: binary_alloc</c></item>
+ <item><c>C: sbmbc_alloc</c></item>
<item><c>D: std_alloc</c></item>
<item><c>E: ets_alloc</c></item>
<item><c>F: fix_alloc</c></item>
@@ -300,11 +328,11 @@
subsystem identifier, only the specific allocator identified will be
effected:</p>
<taglist>
- <tag><marker id="M_as"><c><![CDATA[+M<S>as bf|aobf|gf|af]]></c></marker></tag>
+ <tag><marker id="M_as"><c><![CDATA[+M<S>as bf|aobf|aoff|gf|af]]></c></marker></tag>
<item>
Allocation strategy. Valid strategies are <c>bf</c> (best fit),
- <c>aobf</c> (address order best fit), <c>gf</c> (good fit),
- and <c>af</c> (a fit). See
+ <c>aobf</c> (address order best fit), <c>aoff</c> (address order first fit),
+ <c>gf</c> (good fit), and <c>af</c> (a fit). See
<seealso marker="#strategy">the description of allocation strategies</seealso> in "the <c>alloc_util</c> framework" section.</item>
<tag><marker id="M_asbcst"><c><![CDATA[+M<S>asbcst <size>]]></c></marker></tag>
<item>
@@ -395,6 +423,20 @@
threshold will be placed in singleblock carriers. Blocks
smaller than this threshold will be placed in multiblock
carriers.</item>
+ <tag><marker id="M_sbmbcs"><c><![CDATA[+M<S>sbmbcs <size>]]></c></marker></tag>
+ <item>
+ Small block multiblock carrier size (in bytes). Memory blocks smaller
+ than the small block multiblock carrier threshold
+ (<seealso marker="#M_sbmbct">sbmbct</seealso>) will be placed in
+ multiblock carriers used for small blocks only. This parameter
+ determines the size of such carriers.
+ </item>
+ <tag><marker id="M_sbmbct"><c><![CDATA[+M<S>sbmbct <size>]]></c></marker></tag>
+ <item>
+ Small block multiblock carrier threshold (in bytes). Memory blocks
+ smaller than this threshold will be placed in multiblock carriers
+ used for small blocks only.
+ </item>
<tag><marker id="M_smbcs"><c><![CDATA[+M<S>smbcs <size>]]></c></marker></tag>
<item>
Smallest (<c>mseg_alloc</c>) multiblock carrier size (in
diff --git a/erts/emulator/Makefile.in b/erts/emulator/Makefile.in
index d9362a2a8f..b658e79378 100644
--- a/erts/emulator/Makefile.in
+++ b/erts/emulator/Makefile.in
@@ -742,7 +742,7 @@ RUN_OBJS = \
$(OBJDIR)/erl_bif_re.o $(OBJDIR)/erl_unicode.o \
$(OBJDIR)/packet_parser.o $(OBJDIR)/safe_hash.o \
$(OBJDIR)/erl_zlib.o $(OBJDIR)/erl_nif.o \
- $(OBJDIR)/erl_bif_binary.o
+ $(OBJDIR)/erl_bif_binary.o $(OBJDIR)/erl_ao_firstfit_alloc.o
ifeq ($(TARGET),win32)
DRV_OBJS = \
diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c
index fb90a7d4f7..937b3d9e53 100644
--- a/erts/emulator/beam/beam_emu.c
+++ b/erts/emulator/beam/beam_emu.c
@@ -3561,7 +3561,7 @@ void process_main(void)
* Operands: NotUsed Live Dst
*/
do_bs_init_bits_known:
- num_bytes = (num_bits+7) >> 3;
+ num_bytes = ((Uint64)num_bits+(Uint64)7) >> 3;
if (num_bits & 7) {
alloc += ERL_SUB_BIN_SIZE;
}
diff --git a/erts/emulator/beam/beam_load.c b/erts/emulator/beam/beam_load.c
index 57fe25453d..eb10ae59a8 100644
--- a/erts/emulator/beam/beam_load.c
+++ b/erts/emulator/beam/beam_load.c
@@ -332,20 +332,22 @@ typedef struct {
Eterm* func_tab[1]; /* Pointers to each function. */
} LoadedCode;
-#define GetTagAndValue(Stp, Tag, Val) \
- do { \
- BeamInstr __w; \
- GetByte(Stp, __w); \
- Tag = __w & 0x07; \
- if ((__w & 0x08) == 0) { \
- Val = __w >> 4; \
- } else if ((__w & 0x10) == 0) { \
- Val = ((__w >> 5) << 8); \
- GetByte(Stp, __w); \
- Val |= __w; \
- } else { \
- if (!get_int_val(Stp, __w, &(Val))) goto load_error; \
- } \
+#define GetTagAndValue(Stp, Tag, Val) \
+ do { \
+ BeamInstr __w; \
+ GetByte(Stp, __w); \
+ Tag = __w & 0x07; \
+ if ((__w & 0x08) == 0) { \
+ Val = __w >> 4; \
+ } else if ((__w & 0x10) == 0) { \
+ Val = ((__w >> 5) << 8); \
+ GetByte(Stp, __w); \
+ Val |= __w; \
+ } else { \
+ int __res = get_tag_and_value(Stp, __w, (Tag), &(Val)); \
+ if (__res < 0) goto load_error; \
+ Tag = (unsigned) __res; \
+ } \
} while (0)
@@ -489,8 +491,8 @@ static void load_printf(int line, LoaderState* context, char *fmt, ...);
static int transform_engine(LoaderState* st);
static void id_to_string(Uint id, char* s);
static void new_genop(LoaderState* stp);
-static int get_int_val(LoaderState* stp, Uint len_code, BeamInstr* result);
-static int get_erlang_integer(LoaderState* stp, Uint len_code, BeamInstr* result);
+static int get_tag_and_value(LoaderState* stp, Uint len_code,
+ unsigned tag, BeamInstr* result);
static int new_label(LoaderState* stp);
static void new_literal_patch(LoaderState* stp, int pos);
static void new_string_patch(LoaderState* stp, int pos);
@@ -1470,46 +1472,15 @@ load_code(LoaderState* stp)
last_op->arity = 0;
ASSERT(arity <= MAX_OPARGS);
-#define GetValue(Stp, First, Val) \
- do { \
- if (((First) & 0x08) == 0) { \
- Val = (First) >> 4; \
- } else if (((First) & 0x10) == 0) { \
- BeamInstr __w; \
- GetByte(Stp, __w); \
- Val = (((First) >> 5) << 8) | __w; \
- } else { \
- if (!get_int_val(Stp, (First), &(Val))) goto load_error; \
- } \
- } while (0)
-
for (arg = 0; arg < arity; arg++) {
- BeamInstr first;
-
- GetByte(stp, first);
- last_op->a[arg].type = first & 0x07;
+ GetTagAndValue(stp, last_op->a[arg].type, last_op->a[arg].val);
switch (last_op->a[arg].type) {
case TAG_i:
- if ((first & 0x08) == 0) {
- last_op->a[arg].val = first >> 4;
- } else if ((first & 0x10) == 0) {
- BeamInstr w;
- GetByte(stp, w);
- ASSERT(first < 0x800);
- last_op->a[arg].val = ((first >> 5) << 8) | w;
- } else {
- int i = get_erlang_integer(stp, first, &(last_op->a[arg].val));
- if (i < 0) {
- goto load_error;
- }
- last_op->a[arg].type = i;
- }
- break;
case TAG_u:
- GetValue(stp, first, last_op->a[arg].val);
+ case TAG_q:
+ case TAG_o:
break;
case TAG_x:
- GetValue(stp, first, last_op->a[arg].val);
if (last_op->a[arg].val == 0) {
last_op->a[arg].type = TAG_r;
} else if (last_op->a[arg].val >= MAX_REG) {
@@ -1518,7 +1489,6 @@ load_code(LoaderState* stp)
}
break;
case TAG_y:
- GetValue(stp, first, last_op->a[arg].val);
if (last_op->a[arg].val >= MAX_REG) {
LoadError1(stp, "invalid y register number: %u",
last_op->a[arg].val);
@@ -1526,7 +1496,6 @@ load_code(LoaderState* stp)
last_op->a[arg].val += CP_SIZE;
break;
case TAG_a:
- GetValue(stp, first, last_op->a[arg].val);
if (last_op->a[arg].val == 0) {
last_op->a[arg].type = TAG_n;
} else if (last_op->a[arg].val >= stp->num_atoms) {
@@ -1536,7 +1505,6 @@ load_code(LoaderState* stp)
}
break;
case TAG_f:
- GetValue(stp, first, last_op->a[arg].val);
if (last_op->a[arg].val == 0) {
last_op->a[arg].type = TAG_p;
} else if (last_op->a[arg].val >= stp->num_labels) {
@@ -1544,7 +1512,6 @@ load_code(LoaderState* stp)
}
break;
case TAG_h:
- GetValue(stp, first, last_op->a[arg].val);
if (last_op->a[arg].val > 65535) {
LoadError1(stp, "invalid range for character data type: %u",
last_op->a[arg].val);
@@ -1552,11 +1519,9 @@ load_code(LoaderState* stp)
break;
case TAG_z:
{
- BeamInstr ext_tag;
unsigned tag;
- GetValue(stp, first, ext_tag);
- switch (ext_tag) {
+ switch (last_op->a[arg].val) {
case 0: /* Floating point number */
{
Eterm* hp;
@@ -1648,7 +1613,8 @@ load_code(LoaderState* stp)
break;
}
default:
- LoadError1(stp, "invalid extended tag %d", ext_tag);
+ LoadError1(stp, "invalid extended tag %d",
+ last_op->a[arg].val);
break;
}
}
@@ -1659,7 +1625,6 @@ load_code(LoaderState* stp)
}
last_op->arity++;
}
-#undef GetValue
ASSERT(arity == last_op->arity);
@@ -2562,13 +2527,8 @@ should_gen_heap_bin(LoaderState* stp, GenOpArg Src)
static int
binary_too_big(LoaderState* stp, GenOpArg Size)
{
- return Size.type == TAG_u && ((Size.val >> (8*sizeof(Uint)-3)) != 0);
-}
-
-static int
-binary_too_big_bits(LoaderState* stp, GenOpArg Size)
-{
- return Size.type == TAG_u && (((Size.val+7)/8) >> (8*sizeof(Uint)-3) != 0);
+ return Size.type == TAG_o ||
+ (Size.type == TAG_u && ((Size.val >> (8*sizeof(Uint)-3)) != 0));
}
static GenOp*
@@ -4317,41 +4277,9 @@ load_printf(int line, LoaderState* context, char *fmt,...)
erts_send_error_to_logger(context->group_leader, dsbufp);
}
-
-static int
-get_int_val(LoaderState* stp, Uint len_code, BeamInstr* result)
-{
- Uint count;
- Uint val;
-
- len_code >>= 5;
- ASSERT(len_code < 8);
- if (len_code == 7) {
- LoadError0(stp, "can't load integers bigger than 8 bytes yet\n");
- }
- count = len_code + 2;
- if (count == 5) {
- Uint msb;
- GetByte(stp, msb);
- if (msb == 0) {
- count--;
- }
- GetInt(stp, 4, *result);
- } else if (count <= 4) {
- GetInt(stp, count, val);
- *result = ((val << 8*(sizeof(val)-count)) >> 8*(sizeof(val)-count));
- } else {
- LoadError1(stp, "too big integer; %d bytes\n", count);
- }
- return 1;
-
- load_error:
- return 0;
-}
-
-
static int
-get_erlang_integer(LoaderState* stp, Uint len_code, BeamInstr* result)
+get_tag_and_value(LoaderState* stp, Uint len_code,
+ unsigned tag, BeamInstr* result)
{
Uint count;
Sint val;
@@ -4371,17 +4299,62 @@ get_erlang_integer(LoaderState* stp, Uint len_code, BeamInstr* result)
if (len_code < 7) {
count = len_code + 2;
} else {
- Uint tag;
+ unsigned sztag;
UWord len_word;
ASSERT(len_code == 7);
- GetTagAndValue(stp, tag, len_word);
- VerifyTag(stp, TAG_u, tag);
+ GetTagAndValue(stp, sztag, len_word);
+ VerifyTag(stp, sztag, TAG_u);
count = len_word + 9;
}
/*
- * Handle values up to the size of an int, meaning either a small or bignum.
+ * The value for tags except TAG_i must be an unsigned integer
+ * fitting in an Uint. If it does not fit, we'll indicate overflow
+ * by changing the tag to TAG_o.
+ */
+
+ if (tag != TAG_i) {
+ if (count == sizeof(Uint)+1) {
+ Uint msb;
+
+ /*
+ * The encoded value has one more byte than an Uint.
+ * It will still fit in an Uint if the most significant
+ * byte is 0.
+ */
+ GetByte(stp, msb);
+ GetInt(stp, sizeof(Uint), *result);
+ if (msb != 0) {
+ /* Overflow: Negative or too big. */
+ return TAG_o;
+ }
+ } else if (count == sizeof(Uint)) {
+ /*
+ * The value must be positive (or the encoded value would
+ * have been one byte longer).
+ */
+ GetInt(stp, count, *result);
+ } else if (count < sizeof(Uint)) {
+ GetInt(stp, count, *result);
+
+ /*
+ * If the sign bit is set, the value is negative
+ * (not allowed).
+ */
+ if (*result & ((Uint)1 << (count*8-1))) {
+ return TAG_o;
+ }
+ } else {
+ GetInt(stp, count, *result);
+ return TAG_o;
+ }
+ return tag;
+ }
+
+ /*
+ * TAG_i: First handle values up to the size of an Uint (i.e. either
+ * a small or a bignum).
*/
if (count <= sizeof(val)) {
diff --git a/erts/emulator/beam/erl_afit_alloc.c b/erts/emulator/beam/erl_afit_alloc.c
index e8b594bb47..d397cd8848 100644
--- a/erts/emulator/beam/erl_afit_alloc.c
+++ b/erts/emulator/beam/erl_afit_alloc.c
@@ -43,9 +43,9 @@
/* Prototypes of callback functions */
static Block_t * get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void link_free_block (Allctr_t *, Block_t *);
-static void unlink_free_block (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+static void link_free_block (Allctr_t *, Block_t *, Uint32);
+static void unlink_free_block (Allctr_t *, Block_t *, Uint32);
static Eterm info_options (Allctr_t *, char *, int *,
@@ -72,6 +72,8 @@ erts_afalc_start(AFAllctr_t *afallctr,
is a struct). */
Allctr_t *allctr = (Allctr_t *) afallctr;
+ init->sbmbct = 0; /* Small mbc not supported by afit */
+
sys_memcpy((void *) afallctr, (void *) &nulled_state, sizeof(AFAllctr_t));
allctr->mbc_header_size = sizeof(Carrier_t);
@@ -105,7 +107,8 @@ erts_afalc_start(AFAllctr_t *afallctr,
}
static Block_t *
-get_free_block(Allctr_t *allctr, Uint size, Block_t *cand_blk, Uint cand_size)
+get_free_block(Allctr_t *allctr, Uint size, Block_t *cand_blk, Uint cand_size,
+ Uint32 flags)
{
AFAllctr_t *afallctr = (AFAllctr_t *) allctr;
@@ -123,7 +126,7 @@ get_free_block(Allctr_t *allctr, Uint size, Block_t *cand_blk, Uint cand_size)
}
static void
-link_free_block(Allctr_t *allctr, Block_t *block)
+link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
AFFreeBlock_t *blk = (AFFreeBlock_t *) block;
AFAllctr_t *afallctr = (AFAllctr_t *) allctr;
@@ -144,7 +147,7 @@ link_free_block(Allctr_t *allctr, Block_t *block)
}
static void
-unlink_free_block(Allctr_t *allctr, Block_t *block)
+unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
AFFreeBlock_t *blk = (AFFreeBlock_t *) block;
AFAllctr_t *afallctr = (AFAllctr_t *) allctr;
diff --git a/erts/emulator/beam/erl_alloc.c b/erts/emulator/beam/erl_alloc.c
index cda404af5e..bbc8a445a7 100644
--- a/erts/emulator/beam/erl_alloc.c
+++ b/erts/emulator/beam/erl_alloc.c
@@ -50,6 +50,9 @@
#include "erl_bestfit_alloc.h"
#define GET_ERL_AF_ALLOC_IMPL
#include "erl_afit_alloc.h"
+#define GET_ERL_AOFF_ALLOC_IMPL
+#include "erl_ao_firstfit_alloc.h"
+
#define ERTS_ALC_DEFAULT_MAX_THR_PREF 16
@@ -85,15 +88,19 @@ typedef union {
char align_bfa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(BFAllctr_t))];
AFAllctr_t afa;
char align_afa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(AFAllctr_t))];
+ AOFFAllctr_t aoffa;
+ char align_aoffa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(AOFFAllctr_t))];
} ErtsAllocatorState_t;
-static ErtsAllocatorState_t sl_alloc_state;
+static ErtsAllocatorState_t sbmbc_alloc_state;
static ErtsAllocatorState_t std_alloc_state;
static ErtsAllocatorState_t ll_alloc_state;
#if HALFWORD_HEAP
-static ErtsAllocatorState_t std_alloc_low_state;
-static ErtsAllocatorState_t ll_alloc_low_state;
+static ErtsAllocatorState_t sbmbc_low_alloc_state;
+static ErtsAllocatorState_t std_low_alloc_state;
+static ErtsAllocatorState_t ll_low_alloc_state;
#endif
+static ErtsAllocatorState_t sl_alloc_state;
static ErtsAllocatorState_t temp_alloc_state;
static ErtsAllocatorState_t eheap_alloc_state;
static ErtsAllocatorState_t binary_alloc_state;
@@ -120,7 +127,8 @@ static void *fix_core_alloc(Uint size)
enum allctr_type {
GOODFIT,
BESTFIT,
- AFIT
+ AFIT,
+ AOFIRSTFIT
};
struct au_init {
@@ -132,6 +140,7 @@ struct au_init {
GFAllctrInit_t gf;
BFAllctrInit_t bf;
AFAllctrInit_t af;
+ AOFFAllctrInit_t aoff;
} init;
struct {
int mmbcs;
@@ -145,7 +154,8 @@ struct au_init {
ERTS_DEFAULT_ALLCTR_INIT, \
ERTS_DEFAULT_GF_ALLCTR_INIT, \
ERTS_DEFAULT_BF_ALLCTR_INIT, \
- ERTS_DEFAULT_AF_ALLCTR_INIT \
+ ERTS_DEFAULT_AF_ALLCTR_INIT, \
+ ERTS_DEFAULT_AOFF_ALLCTR_INIT \
}
typedef struct {
@@ -162,6 +172,7 @@ typedef struct {
char *mtrace;
char *nodename;
} instr;
+ struct au_init sbmbc_alloc;
struct au_init sl_alloc;
struct au_init std_alloc;
struct au_init ll_alloc;
@@ -171,8 +182,9 @@ typedef struct {
struct au_init ets_alloc;
struct au_init driver_alloc;
#if HALFWORD_HEAP
- struct au_init std_alloc_low;
- struct au_init ll_alloc_low;
+ struct au_init sbmbc_low_alloc;
+ struct au_init std_low_alloc;
+ struct au_init ll_low_alloc;
#endif
} erts_alc_hndl_args_init_t;
@@ -185,6 +197,34 @@ do { \
} while (0)
static void
+set_default_sbmbc_alloc_opts(struct au_init *ip)
+{
+ SET_DEFAULT_ALLOC_OPTS(ip);
+ ip->enable = 0;
+ ip->thr_spec = 0;
+ ip->atype = BESTFIT;
+ ip->init.bf.ao = 1;
+ ip->init.util.ramv = 0;
+ ip->init.util.mmsbc = 0;
+ ip->init.util.mmmbc = 500;
+ ip->init.util.sbct = ~((UWord) 0);
+ ip->init.util.name_prefix = "sbmbc_";
+ ip->init.util.alloc_no = ERTS_ALC_A_SBMBC;
+#ifndef SMALL_MEMORY
+ ip->init.util.mmbcs = 2*1024*1024; /* Main carrier size */
+#else
+ ip->init.util.mmbcs = 1*1024*1024; /* Main carrier size */
+#endif
+ ip->init.util.ts = ERTS_ALC_MTA_SBMBC;
+ ip->init.util.asbcst = 0;
+ ip->init.util.rsbcst = 0;
+ ip->init.util.rsbcmt = 0;
+ ip->init.util.rmbcmt = 0;
+ ip->init.util.sbmbct = 0;
+ ip->init.util.sbmbcs = 0;
+}
+
+static void
set_default_sl_alloc_opts(struct au_init *ip)
{
SET_DEFAULT_ALLOC_OPTS(ip);
@@ -202,6 +242,7 @@ set_default_sl_alloc_opts(struct au_init *ip)
ip->init.util.ts = ERTS_ALC_MTA_SHORT_LIVED;
ip->init.util.rsbcst = 80;
#if HALFWORD_HEAP
+ ip->init.util.force = 1;
ip->init.util.low_mem = 1;
#endif
@@ -249,6 +290,8 @@ set_default_ll_alloc_opts(struct au_init *ip)
ip->init.util.rsbcst = 0;
ip->init.util.rsbcmt = 0;
ip->init.util.rmbcmt = 0;
+ ip->init.util.sbmbct = 0;
+ ip->init.util.sbmbcs = 0;
}
static void
@@ -269,6 +312,7 @@ set_default_temp_alloc_opts(struct au_init *ip)
ip->init.util.rsbcst = 90;
ip->init.util.rmbcmt = 100;
#if HALFWORD_HEAP
+ ip->init.util.force = 1;
ip->init.util.low_mem = 1;
#endif
}
@@ -291,6 +335,7 @@ set_default_eheap_alloc_opts(struct au_init *ip)
ip->init.util.ts = ERTS_ALC_MTA_EHEAP;
ip->init.util.rsbcst = 50;
#if HALFWORD_HEAP
+ ip->init.util.force = 1;
ip->init.util.low_mem = 1;
#endif
}
@@ -436,10 +481,13 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
hdbg_init();
#endif
+ erts_have_sbmbc_alloc = 0;
+
erts_sys_alloc_init();
init_thr_ix(erts_no_schedulers);
erts_init_utils_mem();
+ set_default_sbmbc_alloc_opts(&init.sbmbc_alloc);
set_default_sl_alloc_opts(&init.sl_alloc);
set_default_std_alloc_opts(&init.std_alloc);
set_default_ll_alloc_opts(&init.ll_alloc);
@@ -453,6 +501,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
handle_args(argc, argv, &init);
if (erts_no_schedulers <= 1) {
+ init.sbmbc_alloc.thr_spec = 0;
init.sl_alloc.thr_spec = 0;
init.std_alloc.thr_spec = 0;
init.ll_alloc.thr_spec = 0;
@@ -464,6 +513,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
if (init.erts_alloc_config) {
/* Adjust flags that erts_alloc_config won't like */
+ init.sbmbc_alloc.thr_spec = 0;
init.temp_alloc.thr_spec = 0;
init.sl_alloc.thr_spec = 0;
init.std_alloc.thr_spec = 0;
@@ -480,6 +530,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
init.temp_alloc.thr_spec = erts_no_schedulers;
/* Others must use thread preferred interface */
+ adjust_tpref(&init.sbmbc_alloc, erts_no_schedulers);
adjust_tpref(&init.sl_alloc, erts_no_schedulers);
adjust_tpref(&init.std_alloc, erts_no_schedulers);
adjust_tpref(&init.ll_alloc, erts_no_schedulers);
@@ -497,6 +548,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
* The following allocators cannot be run with afit strategy.
* Make sure they don't...
*/
+ refuse_af_strategy(&init.sbmbc_alloc);
refuse_af_strategy(&init.sl_alloc);
refuse_af_strategy(&init.std_alloc);
refuse_af_strategy(&init.ll_alloc);
@@ -518,6 +570,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
erts_afalc_init();
erts_bfalc_init();
erts_gfalc_init();
+ erts_aoffalc_init();
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
erts_allctrs[i].alloc = NULL;
@@ -551,19 +604,30 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
#if HALFWORD_HEAP
/* Init low memory variants by cloning */
- init.std_alloc_low = init.std_alloc;
- init.std_alloc_low.init.util.alloc_no = ERTS_ALC_A_STANDARD_LOW;
- init.std_alloc_low.init.util.low_mem = 1;
-
- init.ll_alloc_low = init.ll_alloc;
- init.ll_alloc_low.init.util.alloc_no = ERTS_ALC_A_LONG_LIVED_LOW;
- init.ll_alloc_low.init.util.low_mem = 1;
-
- set_au_allocator(ERTS_ALC_A_STANDARD_LOW, &init.std_alloc_low);
- set_au_allocator(ERTS_ALC_A_LONG_LIVED_LOW, &init.ll_alloc_low);
+ init.sbmbc_low_alloc = init.sbmbc_alloc;
+ init.sbmbc_low_alloc.init.util.name_prefix = "sbmbc_low_";
+ init.sbmbc_low_alloc.init.util.alloc_no = ERTS_ALC_A_SBMBC_LOW;
+ init.sbmbc_low_alloc.init.util.low_mem = 1;
+
+ init.std_low_alloc = init.std_alloc;
+ init.std_low_alloc.init.util.name_prefix = "std_low_";
+ init.std_low_alloc.init.util.alloc_no = ERTS_ALC_A_STANDARD_LOW;
+ init.std_low_alloc.init.util.force = 1;
+ init.std_low_alloc.init.util.low_mem = 1;
+
+ init.ll_low_alloc = init.ll_alloc;
+ init.ll_low_alloc.init.util.name_prefix = "ll_low_";
+ init.ll_low_alloc.init.util.alloc_no = ERTS_ALC_A_LONG_LIVED_LOW;
+ init.ll_low_alloc.init.util.force = 1;
+ init.ll_low_alloc.init.util.low_mem = 1;
+
+ set_au_allocator(ERTS_ALC_A_SBMBC_LOW, &init.sbmbc_low_alloc);
+ set_au_allocator(ERTS_ALC_A_STANDARD_LOW, &init.std_low_alloc);
+ set_au_allocator(ERTS_ALC_A_LONG_LIVED_LOW, &init.ll_low_alloc);
#endif /* HALFWORD */
set_au_allocator(ERTS_ALC_A_TEMPORARY, &init.temp_alloc);
+ set_au_allocator(ERTS_ALC_A_SBMBC, &init.sbmbc_alloc);
set_au_allocator(ERTS_ALC_A_SHORT_LIVED, &init.sl_alloc);
set_au_allocator(ERTS_ALC_A_STANDARD, &init.std_alloc);
set_au_allocator(ERTS_ALC_A_LONG_LIVED, &init.ll_alloc);
@@ -593,6 +657,20 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
erts_mtrace_init(init.instr.mtrace, init.instr.nodename);
+ /* sbmbc_alloc() needs to be started first */
+ start_au_allocator(ERTS_ALC_A_SBMBC,
+ &init.sbmbc_alloc,
+ &sbmbc_alloc_state);
+#if HALFWORD_HEAP
+ start_au_allocator(ERTS_ALC_A_SBMBC_LOW,
+ &init.sbmbc_low_alloc,
+ &sbmbc_low_alloc_state);
+ erts_have_sbmbc_alloc = (init.sbmbc_alloc.enable
+ && init.sbmbc_low_alloc.enable);
+#else
+ erts_have_sbmbc_alloc = init.sbmbc_alloc.enable;
+#endif
+
start_au_allocator(ERTS_ALC_A_TEMPORARY,
&init.temp_alloc,
&temp_alloc_state);
@@ -610,11 +688,11 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop)
&ll_alloc_state);
#if HALFWORD_HEAP
start_au_allocator(ERTS_ALC_A_LONG_LIVED_LOW,
- &init.ll_alloc_low,
- &ll_alloc_low_state);
+ &init.ll_low_alloc,
+ &ll_low_alloc_state);
start_au_allocator(ERTS_ALC_A_STANDARD_LOW,
- &init.std_alloc_low,
- &std_alloc_low_state);
+ &init.std_low_alloc,
+ &std_low_alloc_state);
#endif
start_au_allocator(ERTS_ALC_A_EHEAP,
&init.eheap_alloc,
@@ -680,14 +758,11 @@ set_au_allocator(ErtsAlcType_t alctr_n, struct au_init *init)
ErtsAllocatorInfo_t *ai = &erts_allctrs_info[alctr_n];
ErtsAllocatorThrSpec_t *tspec = &erts_allctr_thr_spec[alctr_n];
-#if HALFWORD_HEAP
- /* If halfword heap, silently ignore any disabling of internal
- * allocators for low memory
+ /*
+ * Some allocators are forced on if halfword heap is used.
*/
- if (init->init.util.low_mem) {
+ if (init->init.util.force)
init->enable = 1;
- }
-#endif
if (!init->enable) {
af->alloc = erts_sys_alloc;
@@ -837,6 +912,12 @@ start_au_allocator(ErtsAlcType_t alctr_n,
&init->init.af,
&init->init.util);
break;
+ case AOFIRSTFIT:
+ as = (void *) erts_aoffalc_start((AOFFAllctr_t *) as0,
+ &init->init.aoff,
+ &init->init.util);
+ break;
+
default:
as = NULL;
ASSERT(0);
@@ -947,6 +1028,20 @@ get_kb_value(char *param_end, char** argv, int* ip)
}
static Uint
+get_byte_value(char *param_end, char** argv, int* ip)
+{
+ Sint tmp;
+ char *rest;
+ char *param = argv[*ip]+1;
+ char *value = get_value(param_end, argv, ip);
+ errno = 0;
+ tmp = (Sint) strtol(value, &rest, 10);
+ if (errno != 0 || rest == value || tmp < 0)
+ bad_value(param, param_end, value);
+ return (Uint) tmp;
+}
+
+static Uint
get_amount_value(char *param_end, char** argv, int* ip)
{
Sint tmp;
@@ -1017,6 +1112,9 @@ handle_au_arg(struct au_init *auip,
else if (strcmp("af", alg) == 0) {
auip->atype = AFIT;
}
+ else if (strcmp("aoff", alg) == 0) {
+ auip->atype = AOFIRSTFIT;
+ }
else {
bad_value(param, sub_param + 1, alg);
}
@@ -1085,6 +1183,12 @@ handle_au_arg(struct au_init *auip,
if(has_prefix("sbct", sub_param)) {
auip->init.util.sbct = get_kb_value(sub_param + 4, argv, ip);
}
+ else if (has_prefix("sbmbcs", sub_param)) {
+ auip->init.util.sbmbcs = get_byte_value(sub_param + 6, argv, ip);
+ }
+ else if (has_prefix("sbmbct", sub_param)) {
+ auip->init.util.sbmbct = get_byte_value(sub_param + 6, argv, ip);
+ }
else if (has_prefix("smbcs", sub_param)) {
auip->default_.smbcs = 0;
auip->init.util.smbcs = get_kb_value(sub_param + 5, argv, ip);
@@ -1123,6 +1227,7 @@ static void
handle_args(int *argc, char **argv, erts_alc_hndl_args_init_t *init)
{
struct au_init *aui[] = {
+ &init->sbmbc_alloc,
&init->binary_alloc,
&init->std_alloc,
&init->ets_alloc,
@@ -1150,6 +1255,9 @@ handle_args(int *argc, char **argv, erts_alc_hndl_args_init_t *init)
case 'B':
handle_au_arg(&init->binary_alloc, &argv[i][3], argv, &i);
break;
+ case 'C':
+ handle_au_arg(&init->sbmbc_alloc, &argv[i][3], argv, &i);
+ break;
case 'D':
handle_au_arg(&init->std_alloc, &argv[i][3], argv, &i);
break;
@@ -1856,12 +1964,16 @@ erts_memory(int *print_to_p, void *print_to_arg, void *proc, Eterm earg)
return am_badarg;
}
- /* All alloc_util allocators *have* to be enabled */
+ /* All alloc_util allocators except sbmbc_alloc *have* to be enabled */
for (ai = ERTS_ALC_A_MIN; ai <= ERTS_ALC_A_MAX; ai++) {
switch (ai) {
case ERTS_ALC_A_SYSTEM:
case ERTS_ALC_A_FIXED_SIZE:
+ case ERTS_ALC_A_SBMBC:
+#if HALFWORD_HEAP
+ case ERTS_ALC_A_SBMBC_LOW:
+#endif
break;
default:
if (!erts_allctrs_info[ai].enabled
@@ -1901,6 +2013,12 @@ erts_memory(int *print_to_p, void *print_to_arg, void *proc, Eterm earg)
* Often not thread safe and usually never
* contain any allocated memory.
*/
+ case ERTS_ALC_A_SBMBC:
+ /* Included in other allocators */
+#if HALFWORD_HEAP
+ case ERTS_ALC_A_SBMBC_LOW:
+ /* Included in other allocators */
+#endif
continue;
case ERTS_ALC_A_EHEAP:
save = &size.processes;
@@ -2882,6 +3000,7 @@ unsigned long erts_alc_test(unsigned long op,
case 0x2: return erts_bfalc_test(op, a1, a2);
case 0x3: return erts_afalc_test(op, a1, a2);
case 0x4: return erts_mseg_test(op, a1, a2, a3);
+ case 0x5: return erts_aoffalc_test(op, a1, a2);
case 0xf:
switch (op) {
case 0xf00:
@@ -2925,6 +3044,7 @@ unsigned long erts_alc_test(unsigned long op,
init.atype = GOODFIT;
init.init.util.name_prefix = (char *) a1;
init.init.util.ts = a2 ? 1 : 0;
+ init.init.util.sbmbct = 0;
if ((char **) a3) {
char **argv = (char **) a3;
@@ -2960,6 +3080,14 @@ unsigned long erts_alc_test(unsigned long op,
&init.init.af,
&init.init.util);
break;
+ case AOFIRSTFIT:
+ allctr = erts_aoffalc_start((AOFFAllctr_t *)
+ erts_alloc(ERTS_ALC_T_UNDEF,
+ sizeof(AOFFAllctr_t)),
+ &init.init.aoff,
+ &init.init.util);
+ break;
+
default:
ASSERT(0);
allctr = NULL;
diff --git a/erts/emulator/beam/erl_alloc.h b/erts/emulator/beam/erl_alloc.h
index ce792d4d17..c35a60da22 100644
--- a/erts/emulator/beam/erl_alloc.h
+++ b/erts/emulator/beam/erl_alloc.h
@@ -99,6 +99,14 @@ unsigned long erts_alc_test(unsigned long,
#define ERTS_ALC_MIN_LONG_LIVED_TIME (10*60*1000)
+#if HALFWORD_HEAP
+#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \
+ ((NO) == ERTS_ALC_A_SBMBC || (NO) == ERTS_ALC_A_SBMBC_LOW)
+#else
+#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \
+ ((NO) == ERTS_ALC_A_SBMBC)
+#endif
+
typedef struct {
int alloc_util;
int enabled;
diff --git a/erts/emulator/beam/erl_alloc.types b/erts/emulator/beam/erl_alloc.types
index c6cc0e1fac..eda0831441 100644
--- a/erts/emulator/beam/erl_alloc.types
+++ b/erts/emulator/beam/erl_alloc.types
@@ -65,6 +65,11 @@
allocator SYSTEM true sys_alloc
+allocator SBMBC true sbmbc_alloc
++if halfword
+allocator SBMBC_LOW true sbmbc_low_alloc
++endif
+
+if smp
allocator TEMPORARY true temp_alloc
@@ -76,8 +81,8 @@ allocator ETS true ets_alloc
allocator FIXED_SIZE true fix_alloc
+if halfword
-allocator LONG_LIVED_LOW true ll_alloc_low
-allocator STANDARD_LOW true std_alloc_low
+allocator LONG_LIVED_LOW true ll_low_alloc
+allocator STANDARD_LOW true std_low_alloc
+endif
+else # Non smp build
@@ -91,8 +96,8 @@ allocator ETS false ets_alloc
allocator FIXED_SIZE false fix_alloc
+if halfword
-allocator LONG_LIVED_LOW false ll_alloc_low
-allocator STANDARD_LOW false std_alloc_low
+allocator LONG_LIVED_LOW false ll_low_alloc
+allocator STANDARD_LOW false std_low_alloc
+endif
+endif
@@ -134,6 +139,7 @@ class SYSTEM system_data
#
# <TYPE> <ALLOCATOR> <CLASS> <DESCRIPTION>
+type SBMBC SBMBC SYSTEM small_block_mbc
type PROC FIXED_SIZE PROCESSES proc
type ATOM FIXED_SIZE ATOM atom_entry
type MODULE FIXED_SIZE CODE module_entry
@@ -330,6 +336,7 @@ type SSB SHORT_LIVED PROCESSES ssb
+if halfword
+type SBMBC_LOW SBMBC_LOW SYSTEM small_block_mbc_low
type DDLL_PROCESS STANDARD_LOW SYSTEM ddll_processes
type MONITOR_LH STANDARD_LOW PROCESSES monitor_lh
type NLINK_LH STANDARD_LOW PROCESSES nlink_lh
diff --git a/erts/emulator/beam/erl_alloc_util.c b/erts/emulator/beam/erl_alloc_util.c
index cc04ef65bf..d51ed0c36d 100644
--- a/erts/emulator/beam/erl_alloc_util.c
+++ b/erts/emulator/beam/erl_alloc_util.c
@@ -66,6 +66,7 @@
static int atoms_initialized = 0;
static int initialized = 0;
+int erts_have_sbmbc_alloc;
#if HAVE_ERTS_MSEG
@@ -85,8 +86,6 @@ static int initialized = 0;
#undef ASSERT
#define ASSERT ASSERT_EXPR
-#define ERTS_ALCU_FLG_FAIL_REALLOC_MOVE ((UWord) 1)
-
#if 0
/* Can be useful for debugging */
#define MBC_REALLOC_ALWAYS_MOVES
@@ -275,14 +274,26 @@ static void check_blk_carrier(Allctr_t *, Block_t *);
#ifdef DEBUG
#define DEBUG_CHECK_CARRIER_NO_SZ(AP) \
- ASSERT(((AP)->sbcs.curr_mseg.no && (AP)->sbcs.curr_mseg.size) \
- || (!(AP)->sbcs.curr_mseg.no && !(AP)->sbcs.curr_mseg.size));\
- ASSERT(((AP)->sbcs.curr_sys_alloc.no && (AP)->sbcs.curr_sys_alloc.size)\
- || (!(AP)->sbcs.curr_sys_alloc.no && !(AP)->sbcs.curr_sys_alloc.size));\
- ASSERT(((AP)->mbcs.curr_mseg.no && (AP)->mbcs.curr_mseg.size) \
- || (!(AP)->mbcs.curr_mseg.no && !(AP)->mbcs.curr_mseg.size));\
- ASSERT(((AP)->mbcs.curr_sys_alloc.no && (AP)->mbcs.curr_sys_alloc.size)\
- || (!(AP)->mbcs.curr_sys_alloc.no && !(AP)->mbcs.curr_sys_alloc.size))
+ ASSERT(((AP)->sbcs.curr.norm.mseg.no \
+ && (AP)->sbcs.curr.norm.mseg.size) \
+ || (!(AP)->sbcs.curr.norm.mseg.no \
+ && !(AP)->sbcs.curr.norm.mseg.size)); \
+ ASSERT(((AP)->sbcs.curr.norm.sys_alloc.no \
+ && (AP)->sbcs.curr.norm.sys_alloc.size) \
+ || (!(AP)->sbcs.curr.norm.sys_alloc.no \
+ && !(AP)->sbcs.curr.norm.sys_alloc.size)); \
+ ASSERT(((AP)->mbcs.curr.norm.mseg.no \
+ && (AP)->mbcs.curr.norm.mseg.size) \
+ || (!(AP)->mbcs.curr.norm.mseg.no \
+ && !(AP)->mbcs.curr.norm.mseg.size)); \
+ ASSERT(((AP)->mbcs.curr.norm.sys_alloc.no \
+ && (AP)->mbcs.curr.norm.sys_alloc.size) \
+ || (!(AP)->mbcs.curr.norm.sys_alloc.no \
+ && !(AP)->mbcs.curr.norm.sys_alloc.size)); \
+ ASSERT(((AP)->sbmbcs.curr.small_block.no \
+ && (AP)->sbmbcs.curr.small_block.size) \
+ || (!(AP)->sbmbcs.curr.small_block.no \
+ && !(AP)->sbmbcs.curr.small_block.size))
#else
#define DEBUG_CHECK_CARRIER_NO_SZ(AP)
@@ -292,27 +303,27 @@ static void check_blk_carrier(Allctr_t *, Block_t *);
(AP)->sbcs.blocks.curr.size += (BSZ); \
if ((AP)->sbcs.blocks.max.size < (AP)->sbcs.blocks.curr.size) \
(AP)->sbcs.blocks.max.size = (AP)->sbcs.blocks.curr.size; \
- if ((AP)->sbcs.max.no < ((AP)->sbcs.curr_mseg.no \
- + (AP)->sbcs.curr_sys_alloc.no)) \
- (AP)->sbcs.max.no = ((AP)->sbcs.curr_mseg.no \
- + (AP)->sbcs.curr_sys_alloc.no); \
- if ((AP)->sbcs.max.size < ((AP)->sbcs.curr_mseg.size \
- + (AP)->sbcs.curr_sys_alloc.size)) \
- (AP)->sbcs.max.size = ((AP)->sbcs.curr_mseg.size \
- + (AP)->sbcs.curr_sys_alloc.size)
+ if ((AP)->sbcs.max.no < ((AP)->sbcs.curr.norm.mseg.no \
+ + (AP)->sbcs.curr.norm.sys_alloc.no)) \
+ (AP)->sbcs.max.no = ((AP)->sbcs.curr.norm.mseg.no \
+ + (AP)->sbcs.curr.norm.sys_alloc.no); \
+ if ((AP)->sbcs.max.size < ((AP)->sbcs.curr.norm.mseg.size \
+ + (AP)->sbcs.curr.norm.sys_alloc.size)) \
+ (AP)->sbcs.max.size = ((AP)->sbcs.curr.norm.mseg.size \
+ + (AP)->sbcs.curr.norm.sys_alloc.size)
#define STAT_MSEG_SBC_ALLOC(AP, CSZ, BSZ) \
do { \
- (AP)->sbcs.curr_mseg.no++; \
- (AP)->sbcs.curr_mseg.size += (CSZ); \
+ (AP)->sbcs.curr.norm.mseg.no++; \
+ (AP)->sbcs.curr.norm.mseg.size += (CSZ); \
STAT_SBC_ALLOC((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_SBC_ALLOC(AP, CSZ, BSZ) \
do { \
- (AP)->sbcs.curr_sys_alloc.no++; \
- (AP)->sbcs.curr_sys_alloc.size += (CSZ); \
+ (AP)->sbcs.curr.norm.sys_alloc.no++; \
+ (AP)->sbcs.curr.norm.sys_alloc.size += (CSZ); \
STAT_SBC_ALLOC((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
@@ -324,85 +335,111 @@ do { \
#define STAT_MSEG_SBC_FREE(AP, CSZ, BSZ) \
do { \
- ASSERT((AP)->sbcs.curr_mseg.no > 0); \
- (AP)->sbcs.curr_mseg.no--; \
- ASSERT((AP)->sbcs.curr_mseg.size >= (CSZ)); \
- (AP)->sbcs.curr_mseg.size -= (CSZ); \
+ ASSERT((AP)->sbcs.curr.norm.mseg.no > 0); \
+ (AP)->sbcs.curr.norm.mseg.no--; \
+ ASSERT((AP)->sbcs.curr.norm.mseg.size >= (CSZ)); \
+ (AP)->sbcs.curr.norm.mseg.size -= (CSZ); \
STAT_SBC_FREE((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_SBC_FREE(AP, CSZ, BSZ) \
do { \
- ASSERT((AP)->sbcs.curr_sys_alloc.no > 0); \
- (AP)->sbcs.curr_sys_alloc.no--; \
- ASSERT((AP)->sbcs.curr_sys_alloc.size >= (CSZ)); \
- (AP)->sbcs.curr_sys_alloc.size -= (CSZ); \
+ ASSERT((AP)->sbcs.curr.norm.sys_alloc.no > 0); \
+ (AP)->sbcs.curr.norm.sys_alloc.no--; \
+ ASSERT((AP)->sbcs.curr.norm.sys_alloc.size >= (CSZ)); \
+ (AP)->sbcs.curr.norm.sys_alloc.size -= (CSZ); \
STAT_SBC_FREE((AP), (BSZ)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_MBC_ALLOC(AP) \
- if ((AP)->mbcs.max.no < ((AP)->mbcs.curr_mseg.no \
- + (AP)->mbcs.curr_sys_alloc.no)) \
- (AP)->mbcs.max.no = ((AP)->mbcs.curr_mseg.no \
- + (AP)->mbcs.curr_sys_alloc.no); \
- if ((AP)->mbcs.max.size < ((AP)->mbcs.curr_mseg.size \
- + (AP)->mbcs.curr_sys_alloc.size)) \
- (AP)->mbcs.max.size = ((AP)->mbcs.curr_mseg.size \
- + (AP)->mbcs.curr_sys_alloc.size)
+ if ((AP)->mbcs.max.no < ((AP)->mbcs.curr.norm.mseg.no \
+ + (AP)->mbcs.curr.norm.sys_alloc.no)) \
+ (AP)->mbcs.max.no = ((AP)->mbcs.curr.norm.mseg.no \
+ + (AP)->mbcs.curr.norm.sys_alloc.no); \
+ if ((AP)->mbcs.max.size < ((AP)->mbcs.curr.norm.mseg.size \
+ + (AP)->mbcs.curr.norm.sys_alloc.size)) \
+ (AP)->mbcs.max.size = ((AP)->mbcs.curr.norm.mseg.size \
+ + (AP)->mbcs.curr.norm.sys_alloc.size)
+
+#define STAT_SBMBC_ALLOC(AP, CSZ) \
+do { \
+ (AP)->sbmbcs.curr.small_block.no++; \
+ (AP)->sbmbcs.curr.small_block.size += (CSZ); \
+ if ((AP)->sbmbcs.max.no < (AP)->sbmbcs.curr.small_block.no) \
+ (AP)->sbmbcs.max.no = (AP)->sbmbcs.curr.small_block.no; \
+ if ((AP)->sbmbcs.max.size < (AP)->sbmbcs.curr.small_block.size) \
+ (AP)->sbmbcs.max.size = (AP)->sbmbcs.curr.small_block.size; \
+ DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
+} while (0)
#define STAT_MSEG_MBC_ALLOC(AP, CSZ) \
do { \
- (AP)->mbcs.curr_mseg.no++; \
- (AP)->mbcs.curr_mseg.size += (CSZ); \
+ (AP)->mbcs.curr.norm.mseg.no++; \
+ (AP)->mbcs.curr.norm.mseg.size += (CSZ); \
STAT_MBC_ALLOC((AP)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_MBC_ALLOC(AP, CSZ) \
do { \
- (AP)->mbcs.curr_sys_alloc.no++; \
- (AP)->mbcs.curr_sys_alloc.size += (CSZ); \
+ (AP)->mbcs.curr.norm.sys_alloc.no++; \
+ (AP)->mbcs.curr.norm.sys_alloc.size += (CSZ); \
STAT_MBC_ALLOC((AP)); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
+#define STAT_SBMBC_FREE(AP, CSZ) \
+do { \
+ ASSERT((AP)->sbmbcs.curr.small_block.no > 0); \
+ (AP)->sbmbcs.curr.small_block.no--; \
+ ASSERT((AP)->sbmbcs.curr.small_block.size >= (CSZ)); \
+ (AP)->sbmbcs.curr.small_block.size -= (CSZ); \
+ DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
+} while (0)
+
#define STAT_MSEG_MBC_FREE(AP, CSZ) \
do { \
- ASSERT((AP)->mbcs.curr_mseg.no > 0); \
- (AP)->mbcs.curr_mseg.no--; \
- ASSERT((AP)->mbcs.curr_mseg.size >= (CSZ)); \
- (AP)->mbcs.curr_mseg.size -= (CSZ); \
+ ASSERT((AP)->mbcs.curr.norm.mseg.no > 0); \
+ (AP)->mbcs.curr.norm.mseg.no--; \
+ ASSERT((AP)->mbcs.curr.norm.mseg.size >= (CSZ)); \
+ (AP)->mbcs.curr.norm.mseg.size -= (CSZ); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
#define STAT_SYS_ALLOC_MBC_FREE(AP, CSZ) \
do { \
- ASSERT((AP)->mbcs.curr_sys_alloc.no > 0); \
- (AP)->mbcs.curr_sys_alloc.no--; \
- ASSERT((AP)->mbcs.curr_sys_alloc.size >= (CSZ)); \
- (AP)->mbcs.curr_sys_alloc.size -= (CSZ); \
+ ASSERT((AP)->mbcs.curr.norm.sys_alloc.no > 0); \
+ (AP)->mbcs.curr.norm.sys_alloc.no--; \
+ ASSERT((AP)->mbcs.curr.norm.sys_alloc.size >= (CSZ)); \
+ (AP)->mbcs.curr.norm.sys_alloc.size -= (CSZ); \
DEBUG_CHECK_CARRIER_NO_SZ((AP)); \
} while (0)
-#define STAT_MBC_BLK_ALLOC(AP, BSZ) \
+#define STAT_MBC_BLK_ALLOC(AP, BSZ, FLGS) \
do { \
- (AP)->mbcs.blocks.curr.no++; \
- if ((AP)->mbcs.blocks.max.no < (AP)->mbcs.blocks.curr.no) \
- (AP)->mbcs.blocks.max.no = (AP)->mbcs.blocks.curr.no; \
- (AP)->mbcs.blocks.curr.size += (BSZ); \
- if ((AP)->mbcs.blocks.max.size < (AP)->mbcs.blocks.curr.size) \
- (AP)->mbcs.blocks.max.size = (AP)->mbcs.blocks.curr.size; \
+ CarriersStats_t *cstats__ = (((FLGS) & ERTS_ALCU_FLG_SBMBC) \
+ ? &(AP)->sbmbcs \
+ : &(AP)->mbcs); \
+ cstats__->blocks.curr.no++; \
+ if (cstats__->blocks.max.no < cstats__->blocks.curr.no) \
+ cstats__->blocks.max.no = cstats__->blocks.curr.no; \
+ cstats__->blocks.curr.size += (BSZ); \
+ if (cstats__->blocks.max.size < cstats__->blocks.curr.size) \
+ cstats__->blocks.max.size = cstats__->blocks.curr.size; \
} while (0)
-#define STAT_MBC_BLK_FREE(AP, BSZ) \
+#define STAT_MBC_BLK_FREE(AP, BSZ, FLGS) \
do { \
- ASSERT((AP)->mbcs.blocks.curr.no > 0); \
- (AP)->mbcs.blocks.curr.no--; \
- ASSERT((AP)->mbcs.blocks.curr.size >= (BSZ)); \
- (AP)->mbcs.blocks.curr.size -= (BSZ); \
+ CarriersStats_t *cstats__ = (((FLGS) & ERTS_ALCU_FLG_SBMBC) \
+ ? &(AP)->sbmbcs \
+ : &(AP)->mbcs); \
+ ASSERT(cstats__->blocks.curr.no > 0); \
+ cstats__->blocks.curr.no--; \
+ ASSERT(cstats__->blocks.curr.size >= (BSZ)); \
+ cstats__->blocks.curr.size -= (BSZ); \
} while (0)
/* Debug stuff... */
@@ -410,7 +447,7 @@ do { \
static UWord carrier_alignment;
#define DEBUG_SAVE_ALIGNMENT(C) \
do { \
- UWord algnmnt__ = sizeof(Unit_t) - (((UWord) (C)) % sizeof(Unit_t)); \
+ UWord algnmnt__ = sizeof(Unit_t) - (((UWord) (C)) % sizeof(Unit_t));\
carrier_alignment = MIN(carrier_alignment, algnmnt__); \
ASSERT(((UWord) (C)) % sizeof(UWord) == 0); \
} while (0)
@@ -524,8 +561,8 @@ static Uint
get_next_mbc_size(Allctr_t *allctr)
{
Uint size;
- int cs = (allctr->mbcs.curr_mseg.no
- + allctr->mbcs.curr_sys_alloc.no
+ int cs = (allctr->mbcs.curr.norm.mseg.no
+ + allctr->mbcs.curr.norm.sys_alloc.no
- (allctr->main_carrier ? 1 : 0));
ASSERT(cs >= 0);
@@ -609,7 +646,8 @@ unlink_carrier(CarrierList_t *cl, Carrier_t *crr)
}
}
-
+static Block_t *create_sbmbc(Allctr_t *allctr, Uint umem_sz);
+static void destroy_sbmbc(Allctr_t *allctr, Block_t *blk);
static Block_t *create_carrier(Allctr_t *, Uint, UWord);
static void destroy_carrier(Allctr_t *, Block_t *);
@@ -619,39 +657,57 @@ static void destroy_carrier(Allctr_t *, Block_t *);
* block in a sbc.
*/
static ERTS_INLINE void *
-mbc_alloc_block(Allctr_t *allctr, Uint size, Uint *blk_szp)
+mbc_alloc_block(Allctr_t *allctr, Uint size, Uint *blk_szp, Uint32 *alcu_flgsp)
{
Block_t *blk;
+ Uint get_blk_sz;
+ Uint sbmbct;
ASSERT(size);
ASSERT(size < allctr->sbc_threshold);
- *blk_szp = UMEMSZ2BLKSZ(allctr, size);
+ *blk_szp = get_blk_sz = UMEMSZ2BLKSZ(allctr, size);
+
+ sbmbct = allctr->sbmbc_threshold;
+ if (sbmbct) {
+ if (get_blk_sz < sbmbct) {
+ *alcu_flgsp |= ERTS_ALCU_FLG_SBMBC;
+ if (get_blk_sz + allctr->min_block_size > sbmbct) {
+ /* Since we use block size to determine if blocks are
+ located in sbmbc or not... */
+ get_blk_sz += allctr->min_block_size;
+ }
+ }
+ }
- blk = (*allctr->get_free_block)(allctr, *blk_szp, NULL, 0);
+ blk = (*allctr->get_free_block)(allctr, get_blk_sz, NULL, 0, *alcu_flgsp);
-#if HALFWORD_HEAP
if (!blk) {
- blk = create_carrier(allctr, *blk_szp, CFLG_MBC|CFLG_FORCE_MSEG);
- }
+ if ((*alcu_flgsp) & ERTS_ALCU_FLG_SBMBC)
+ blk = create_sbmbc(allctr, get_blk_sz);
+ else {
+#if HALFWORD_HEAP
+ blk = create_carrier(allctr, get_blk_sz, CFLG_MBC|CFLG_FORCE_MSEG);
#else
- if (!blk) {
- blk = create_carrier(allctr, *blk_szp, CFLG_MBC);
- if (!blk) {
- /* Emergency! We couldn't create the carrier as we wanted.
- Try to place it in a sys_alloced sbc. */
- blk = create_carrier(allctr,
- size,
- CFLG_SBC|CFLG_FORCE_SIZE|CFLG_FORCE_SYS_ALLOC);
+ blk = create_carrier(allctr, get_blk_sz, CFLG_MBC);
+ if (!blk) {
+ /* Emergency! We couldn't create the carrier as we wanted.
+ Try to place it in a sys_alloced sbc. */
+ blk = create_carrier(allctr,
+ size,
+ (CFLG_SBC
+ | CFLG_FORCE_SIZE
+ | CFLG_FORCE_SYS_ALLOC));
+ }
+#endif
}
}
-#endif
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
if (IS_MBC_BLK(blk)) {
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, *alcu_flgsp);
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, *alcu_flgsp);
}
#endif
@@ -664,7 +720,8 @@ mbc_alloc_finalize(Allctr_t *allctr,
Uint org_blk_sz,
UWord flags,
Uint want_blk_sz,
- int valid_blk_info)
+ int valid_blk_info,
+ Uint32 alcu_flgs)
{
Uint blk_sz;
Uint nxt_blk_sz;
@@ -700,7 +757,7 @@ mbc_alloc_finalize(Allctr_t *allctr,
SET_PREV_BLK_FREE(nxt_nxt_blk);
}
}
- (*allctr->link_free_block)(allctr, nxt_blk);
+ (*allctr->link_free_block)(allctr, nxt_blk, alcu_flgs);
ASSERT(IS_NOT_LAST_BLK(blk));
ASSERT(IS_FREE_BLK(nxt_blk));
@@ -741,7 +798,7 @@ mbc_alloc_finalize(Allctr_t *allctr,
: IS_NOT_LAST_BLK(blk));
}
- STAT_MBC_BLK_ALLOC(allctr, blk_sz);
+ STAT_MBC_BLK_ALLOC(allctr, blk_sz, alcu_flgs);
ASSERT(IS_ALLOCED_BLK(blk));
ASSERT(blk_sz == BLK_SZ(blk));
@@ -761,7 +818,8 @@ mbc_alloc(Allctr_t *allctr, Uint size)
{
Block_t *blk;
Uint blk_sz;
- blk = mbc_alloc_block(allctr, size, &blk_sz);
+ Uint32 alcu_flgs = 0;
+ blk = mbc_alloc_block(allctr, size, &blk_sz, &alcu_flgs);
if (!blk)
return NULL;
if (IS_MBC_BLK(blk))
@@ -770,7 +828,8 @@ mbc_alloc(Allctr_t *allctr, Uint size)
BLK_SZ(blk),
GET_BLK_HDR_FLGS(blk),
blk_sz,
- 1);
+ 1,
+ alcu_flgs);
return BLK2UMEM(blk);
}
@@ -779,6 +838,7 @@ mbc_free(Allctr_t *allctr, void *p)
{
Uint is_first_blk;
Uint is_last_blk;
+ Uint32 alcu_flgs = 0;
Uint blk_sz;
Block_t *blk;
Block_t *nxt_blk;
@@ -788,13 +848,15 @@ mbc_free(Allctr_t *allctr, void *p)
blk = UMEM2BLK(p);
blk_sz = BLK_SZ(blk);
+ if (blk_sz < allctr->sbmbc_threshold)
+ alcu_flgs |= ERTS_ALCU_FLG_SBMBC;
ASSERT(IS_MBC_BLK(blk));
ASSERT(blk_sz >= allctr->min_block_size);
HARD_CHECK_BLK_CARRIER(allctr, blk);
- STAT_MBC_BLK_FREE(allctr, blk_sz);
+ STAT_MBC_BLK_FREE(allctr, blk_sz, alcu_flgs);
is_first_blk = IS_FIRST_BLK(blk);
is_last_blk = IS_LAST_BLK(blk);
@@ -802,7 +864,7 @@ mbc_free(Allctr_t *allctr, void *p)
if (!is_first_blk && IS_PREV_BLK_FREE(blk)) {
/* Coalesce with previous block... */
blk = PREV_BLK(blk);
- (*allctr->unlink_free_block)(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, alcu_flgs);
blk_sz += BLK_SZ(blk);
is_first_blk = IS_FIRST_BLK(blk);
@@ -818,7 +880,7 @@ mbc_free(Allctr_t *allctr, void *p)
nxt_blk = NXT_BLK(blk);
if (IS_FREE_BLK(nxt_blk)) {
/* Coalesce with next block... */
- (*allctr->unlink_free_block)(allctr, nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_blk, alcu_flgs);
blk_sz += BLK_SZ(nxt_blk);
SET_BLK_SZ(blk, blk_sz);
@@ -850,16 +912,20 @@ mbc_free(Allctr_t *allctr, void *p)
if (is_first_blk
&& is_last_blk
- && allctr->main_carrier != FBLK2MBC(allctr, blk))
- destroy_carrier(allctr, blk);
+ && allctr->main_carrier != FBLK2MBC(allctr, blk)) {
+ if (alcu_flgs & ERTS_ALCU_FLG_SBMBC)
+ destroy_sbmbc(allctr, blk);
+ else
+ destroy_carrier(allctr, blk);
+ }
else {
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, alcu_flgs);
HARD_CHECK_BLK_CARRIER(allctr, blk);
}
}
static void *
-mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
+mbc_realloc(Allctr_t *allctr, void *p, Uint size, Uint32 alcu_flgs)
{
void *new_p;
Uint old_blk_sz;
@@ -867,7 +933,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
#ifndef MBC_REALLOC_ALWAYS_MOVES
Block_t *new_blk, *cand_blk;
Uint cand_blk_sz;
- Uint blk_sz;
+ Uint blk_sz, get_blk_sz;
Block_t *nxt_blk;
Uint nxt_blk_sz;
Uint is_last_blk;
@@ -883,10 +949,16 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
ASSERT(old_blk_sz >= allctr->min_block_size);
#ifdef MBC_REALLOC_ALWAYS_MOVES
- if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
#else /* !MBC_REALLOC_ALWAYS_MOVES */
- blk_sz = UMEMSZ2BLKSZ(allctr, size);
+ get_blk_sz = blk_sz = UMEMSZ2BLKSZ(allctr, size);
+ if ((alcu_flgs & ERTS_ALCU_FLG_SBMBC)
+ && (blk_sz + allctr->min_block_size > allctr->sbmbc_threshold)) {
+ /* Since we use block size to determine if blocks are
+ located in sbmbc or not... */
+ get_blk_sz = blk_sz + allctr->min_block_size;
+ }
ASSERT(IS_ALLOCED_BLK(blk));
ASSERT(IS_MBC_BLK(blk));
@@ -901,6 +973,9 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
Uint diff_sz_val = old_blk_sz - blk_sz;
Uint old_blk_sz_val = old_blk_sz;
+ if (get_blk_sz >= old_blk_sz)
+ return p;
+
if (diff_sz_val >= (~((Uint) 0) / 100)) {
/* div both by 128 */
old_blk_sz_val >>= 7;
@@ -909,7 +984,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
/* Avoid fragmentation by moving the block if it is shrunk much */
if (100*diff_sz_val > allctr->mbc_move_threshold*old_blk_sz_val) {
- if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
cand_blk_sz = old_blk_sz;
@@ -926,9 +1001,10 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
new_blk = (*allctr->get_free_block)(allctr,
- blk_sz,
+ get_blk_sz,
cand_blk,
- cand_blk_sz);
+ cand_blk_sz,
+ alcu_flgs);
if (new_blk || cand_blk != blk)
goto move_into_new_blk;
@@ -952,8 +1028,8 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
nxt_blk_sz,
SBH_THIS_FREE|SBH_PREV_ALLOCED|SBH_NOT_LAST_BLK);
- STAT_MBC_BLK_FREE(allctr, old_blk_sz);
- STAT_MBC_BLK_ALLOC(allctr, blk_sz);
+ STAT_MBC_BLK_FREE(allctr, old_blk_sz, alcu_flgs);
+ STAT_MBC_BLK_ALLOC(allctr, blk_sz, alcu_flgs);
ASSERT(BLK_SZ(blk) >= allctr->min_block_size);
@@ -964,7 +1040,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
if (IS_FREE_BLK(nxt_nxt_blk)) {
/* Coalesce with next free block... */
nxt_blk_sz += BLK_SZ(nxt_nxt_blk);
- (*allctr->unlink_free_block)(allctr, nxt_nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_nxt_blk, alcu_flgs);
SET_BLK_SZ(nxt_blk, nxt_blk_sz);
is_last_blk = IS_LAST_BLK(nxt_nxt_blk);
@@ -979,7 +1055,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
}
- (*allctr->link_free_block)(allctr, nxt_blk);
+ (*allctr->link_free_block)(allctr, nxt_blk, alcu_flgs);
ASSERT(IS_ALLOCED_BLK(blk));
@@ -1009,12 +1085,12 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
if (!is_last_blk) {
nxt_blk = NXT_BLK(blk);
nxt_blk_sz = BLK_SZ(nxt_blk);
- if (IS_FREE_BLK(nxt_blk) && blk_sz <= old_blk_sz + nxt_blk_sz) {
+ if (IS_FREE_BLK(nxt_blk) && get_blk_sz <= old_blk_sz + nxt_blk_sz) {
/* Grow into next block... */
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_blk, alcu_flgs);
nxt_blk_sz -= blk_sz - old_blk_sz;
is_last_blk = IS_LAST_BLK(nxt_blk);
@@ -1051,13 +1127,13 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
else
SET_BLK_SZ_FTR(nxt_blk, nxt_blk_sz);
- (*allctr->link_free_block)(allctr, nxt_blk);
+ (*allctr->link_free_block)(allctr, nxt_blk, alcu_flgs);
ASSERT(IS_FREE_BLK(nxt_blk));
}
- STAT_MBC_BLK_FREE(allctr, old_blk_sz);
- STAT_MBC_BLK_ALLOC(allctr, blk_sz);
+ STAT_MBC_BLK_FREE(allctr, old_blk_sz, alcu_flgs);
+ STAT_MBC_BLK_ALLOC(allctr, blk_sz, alcu_flgs);
ASSERT(IS_ALLOCED_BLK(blk));
@@ -1088,7 +1164,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
}
- if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
/* Need to grow in another block */
@@ -1108,7 +1184,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
}
}
- if (cand_blk_sz < blk_sz) {
+ if (cand_blk_sz < get_blk_sz) {
/* We wont fit in cand_blk get a new one */
#endif /* !MBC_REALLOC_ALWAYS_MOVES */
@@ -1127,9 +1203,10 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
/* We will at least fit in cand_blk */
new_blk = (*allctr->get_free_block)(allctr,
- blk_sz,
+ get_blk_sz,
cand_blk,
- cand_blk_sz);
+ cand_blk_sz,
+ alcu_flgs);
move_into_new_blk:
/*
* new_blk, and cand_blk have to be correctly set
@@ -1142,7 +1219,8 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
BLK_SZ(new_blk),
GET_BLK_HDR_FLGS(new_blk),
blk_sz,
- 1);
+ 1,
+ alcu_flgs);
new_p = BLK2UMEM(new_blk);
sys_memcpy(new_p, p, MIN(size, old_blk_sz - ABLK_HDR_SZ));
mbc_free(allctr, p);
@@ -1164,7 +1242,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, new_blk); /* prev */
+ (*allctr->unlink_free_block)(allctr, new_blk, alcu_flgs); /* prev */
if (is_last_blk)
new_blk_flgs |= LAST_BLK_HDR_FLG;
@@ -1173,7 +1251,7 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
if (IS_FREE_BLK(nxt_blk)) {
new_blk_flgs |= GET_LAST_BLK_HDR_FLG(nxt_blk);
new_blk_sz += BLK_SZ(nxt_blk);
- (*allctr->unlink_free_block)(allctr, nxt_blk);
+ (*allctr->unlink_free_block)(allctr, nxt_blk, alcu_flgs);
}
}
@@ -1196,9 +1274,10 @@ mbc_realloc(Allctr_t *allctr, void *p, Uint size, UWord flgs)
new_blk_sz,
new_blk_flgs,
blk_sz,
- 0);
+ 0,
+ alcu_flgs);
- STAT_MBC_BLK_FREE(allctr, old_blk_sz);
+ STAT_MBC_BLK_FREE(allctr, old_blk_sz, alcu_flgs);
return new_p;
}
@@ -1243,6 +1322,100 @@ do { \
#define CHECK_1BLK_CARRIER(A, SBC, MSEGED, C, CSZ, B, BSZ)
#endif
+static Block_t *
+create_sbmbc(Allctr_t *allctr, Uint umem_sz)
+{
+ Block_t *blk;
+ Uint blk_sz;
+ Uint crr_sz = allctr->sbmbc_size;
+ Carrier_t *crr;
+
+#if HALFWORD_HEAP
+ if (allctr->mseg_opt.low_mem)
+ crr = erts_alloc(ERTS_ALC_T_SBMBC_LOW, crr_sz);
+ else
+#endif
+ crr = erts_alloc(ERTS_ALC_T_SBMBC, crr_sz);
+
+ INC_CC(allctr->calls.sbmbc_alloc);
+ SET_CARRIER_HDR(crr, crr_sz, SCH_SYS_ALLOC|SCH_MBC);
+
+ blk = MBC2FBLK(allctr, crr);
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
+ crr_sz -= sizeof(UWord);
+#endif
+
+ blk_sz = UNIT_FLOOR(crr_sz - allctr->mbc_header_size);
+
+ SET_MBC_BLK_FTR(((UWord *) blk)[-1]);
+ SET_BLK_HDR(blk, blk_sz, SBH_THIS_FREE|SBH_PREV_FREE|SBH_LAST_BLK);
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ *((Carrier_t **) NXT_BLK(blk)) = crr;
+#endif
+
+ link_carrier(&allctr->sbmbc_list, crr);
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
+ crr_sz += sizeof(UWord);
+#endif
+
+ STAT_SBMBC_ALLOC(allctr, crr_sz);
+ CHECK_1BLK_CARRIER(allctr, 0, 0, crr, crr_sz, blk, blk_sz);
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
+ crr_sz -= sizeof(UWord);
+#endif
+ if (allctr->creating_mbc)
+ (*allctr->creating_mbc)(allctr, crr, ERTS_ALCU_FLG_SBMBC);
+
+ DEBUG_SAVE_ALIGNMENT(crr);
+ return blk;
+}
+
+static void
+destroy_sbmbc(Allctr_t *allctr, Block_t *blk)
+{
+ Uint crr_sz;
+ Carrier_t *crr;
+
+ ASSERT(IS_FIRST_BLK(blk));
+
+ ASSERT(IS_MBC_BLK(blk));
+
+ crr = FBLK2MBC(allctr, blk);
+ crr_sz = CARRIER_SZ(crr);
+
+#ifdef DEBUG
+ if (!allctr->stopped) {
+ ASSERT(IS_LAST_BLK(blk));
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ (*allctr->link_free_block)(allctr, blk, ERTS_ALCU_FLG_SBMBC);
+ HARD_CHECK_BLK_CARRIER(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, ERTS_ALCU_FLG_SBMBC);
+#endif
+ }
+#endif
+
+ STAT_SBMBC_FREE(allctr, crr_sz);
+
+ unlink_carrier(&allctr->sbmbc_list, crr);
+ if (allctr->destroying_mbc)
+ (*allctr->destroying_mbc)(allctr, crr, ERTS_ALCU_FLG_SBMBC);
+
+ INC_CC(allctr->calls.sbmbc_free);
+
+#if HALFWORD_HEAP
+ if (allctr->mseg_opt.low_mem)
+ erts_free(ERTS_ALC_T_SBMBC_LOW, crr);
+ else
+#endif
+ erts_free(ERTS_ALC_T_SBMBC, crr);
+}
static Block_t *
create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
@@ -1271,11 +1444,11 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
if (erts_mseg_no() >= max_mseg_carriers)
goto try_sys_alloc;
if (flags & CFLG_SBC) {
- if (allctr->sbcs.curr_mseg.no >= allctr->max_mseg_sbcs)
+ if (allctr->sbcs.curr.norm.mseg.no >= allctr->max_mseg_sbcs)
goto try_sys_alloc;
}
else {
- if (allctr->mbcs.curr_mseg.no >= allctr->max_mseg_mbcs)
+ if (allctr->mbcs.curr.norm.mseg.no >= allctr->max_mseg_mbcs)
goto try_sys_alloc;
}
@@ -1289,7 +1462,7 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
if (crr_sz < allctr->mbc_header_size + blk_sz)
crr_sz = allctr->mbc_header_size + blk_sz;
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz += sizeof(UWord);
#endif
}
@@ -1330,7 +1503,7 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
&& bcrr_sz < allctr->smallest_mbc_size)
bcrr_sz = allctr->smallest_mbc_size;
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
bcrr_sz += sizeof(UWord);
#endif
@@ -1385,7 +1558,7 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
blk = MBC2FBLK(allctr, crr);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz -= sizeof(UWord);
#endif
@@ -1406,16 +1579,16 @@ create_carrier(Allctr_t *allctr, Uint umem_sz, UWord flags)
link_carrier(&allctr->mbc_list, crr);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz += sizeof(UWord);
#endif
CHECK_1BLK_CARRIER(allctr, 0, is_mseg, crr, crr_sz, blk, blk_sz);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- if (sizeof(Unit_t) == sizeof(UWord))
+ if (allctr->mbc_header_size % sizeof(Unit_t) == 0)
crr_sz -= sizeof(UWord);
#endif
if (allctr->creating_mbc)
- (*allctr->creating_mbc)(allctr, crr);
+ (*allctr->creating_mbc)(allctr, crr, 0);
}
@@ -1595,9 +1768,9 @@ destroy_carrier(Allctr_t *allctr, Block_t *blk)
ASSERT(IS_LAST_BLK(blk));
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, 0);
HARD_CHECK_BLK_CARRIER(allctr, blk);
- (*allctr->unlink_free_block)(allctr, blk);
+ (*allctr->unlink_free_block)(allctr, blk, 0);
#endif
}
#endif
@@ -1614,7 +1787,7 @@ destroy_carrier(Allctr_t *allctr, Block_t *blk)
unlink_carrier(&allctr->mbc_list, crr);
if (allctr->destroying_mbc)
- (*allctr->destroying_mbc)(allctr, crr);
+ (*allctr->destroying_mbc)(allctr, crr, 0);
}
@@ -1658,12 +1831,15 @@ static struct {
Eterm lmbcs;
Eterm smbcs;
Eterm mbcgs;
+ Eterm sbmbcs;
+ Eterm sbmbct;
#if HAVE_ERTS_MSEG
Eterm mmc;
#endif
Eterm ycs;
+ /* Eterm sbmbcs; */
Eterm mbcs;
Eterm sbcs;
Eterm sys_alloc_carriers_size;
@@ -1688,6 +1864,8 @@ static struct {
Eterm mseg_dealloc;
Eterm mseg_realloc;
#endif
+ Eterm sbmbc_alloc;
+ Eterm sbmbc_free;
#ifdef DEBUG
Eterm end_of_atoms;
#endif
@@ -1746,12 +1924,15 @@ init_atoms(Allctr_t *allctr)
AM_INIT(lmbcs);
AM_INIT(smbcs);
AM_INIT(mbcgs);
+ AM_INIT(sbmbcs);
+ AM_INIT(sbmbct);
#if HAVE_ERTS_MSEG
AM_INIT(mmc);
#endif
AM_INIT(ycs);
+ /*AM_INIT(sbmbcs);*/
AM_INIT(mbcs);
AM_INIT(sbcs);
AM_INIT(sys_alloc_carriers_size);
@@ -1776,6 +1957,8 @@ init_atoms(Allctr_t *allctr)
AM_INIT(mseg_dealloc);
AM_INIT(mseg_realloc);
#endif
+ AM_INIT(sbmbc_free);
+ AM_INIT(sbmbc_alloc);
#ifdef DEBUG
for (atom = (Eterm *) &am; atom < &am.end_of_atoms; atom++) {
@@ -1869,7 +2052,9 @@ sz_info_carriers(Allctr_t *allctr,
Uint *szp)
{
Eterm res = THE_NON_VALUE;
- Uint curr_size = cs->curr_mseg.size + cs->curr_sys_alloc.size;
+ Uint curr_size = (cs == &allctr->sbmbcs
+ ? cs->curr.small_block.size
+ : cs->curr.norm.mseg.size + cs->curr.norm.sys_alloc.size);
if (print_to_p) {
int to = *print_to_p;
@@ -1917,8 +2102,17 @@ info_carriers(Allctr_t *allctr,
Uint *szp)
{
Eterm res = THE_NON_VALUE;
- Uint curr_no = cs->curr_mseg.no + cs->curr_sys_alloc.no;
- Uint curr_size = cs->curr_mseg.size + cs->curr_sys_alloc.size;
+ Uint curr_no, curr_size;
+ int small_block = cs == &allctr->sbmbcs;
+
+ if (small_block) {
+ curr_no = cs->curr.small_block.no;
+ curr_size = cs->curr.small_block.size;
+ }
+ else {
+ curr_no = cs->curr.norm.mseg.no + cs->curr.norm.sys_alloc.no;
+ curr_size = cs->curr.norm.mseg.size + cs->curr.norm.sys_alloc.size;
+ }
if (print_to_p) {
int to = *print_to_p;
@@ -1944,18 +2138,20 @@ info_carriers(Allctr_t *allctr,
curr_no,
cs->max.no,
cs->max_ever.no);
+ if (!small_block) {
#if HAVE_ERTS_MSEG
- erts_print(to,
- arg,
- "%smseg carriers: %bpu\n",
- prefix,
- cs->curr_mseg.no);
-#endif
- erts_print(to,
- arg,
- "%ssys_alloc carriers: %bpu\n",
- prefix,
- cs->curr_sys_alloc.no);
+ erts_print(to,
+ arg,
+ "%smseg carriers: %bpu\n",
+ prefix,
+ cs->curr.norm.mseg.no);
+#endif
+ erts_print(to,
+ arg,
+ "%ssys_alloc carriers: %bpu\n",
+ prefix,
+ cs->curr.norm.sys_alloc.no);
+ }
erts_print(to,
arg,
"%scarriers size: %beu %bpu %bpu\n",
@@ -1963,43 +2159,49 @@ info_carriers(Allctr_t *allctr,
curr_size,
cs->max.size,
cs->max_ever.size);
+ if (!small_block) {
#if HAVE_ERTS_MSEG
- erts_print(to,
- arg,
- "%smseg carriers size: %bpu\n",
- prefix,
- cs->curr_mseg.size);
-#endif
- erts_print(to,
- arg,
- "%ssys_alloc carriers size: %bpu\n",
- prefix,
- cs->curr_sys_alloc.size);
+ erts_print(to,
+ arg,
+ "%smseg carriers size: %bpu\n",
+ prefix,
+ cs->curr.norm.mseg.size);
+#endif
+ erts_print(to,
+ arg,
+ "%ssys_alloc carriers size: %bpu\n",
+ prefix,
+ cs->curr.norm.sys_alloc.size);
+ }
}
if (hpp || szp) {
res = NIL;
- add_2tup(hpp, szp, &res,
- am.sys_alloc_carriers_size,
- bld_unstable_uint(hpp, szp, cs->curr_sys_alloc.size));
+ if (!small_block) {
+ add_2tup(hpp, szp, &res,
+ am.sys_alloc_carriers_size,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.sys_alloc.size));
#if HAVE_ERTS_MSEG
- add_2tup(hpp, szp, &res,
- am.mseg_alloc_carriers_size,
- bld_unstable_uint(hpp, szp, cs->curr_mseg.size));
+ add_2tup(hpp, szp, &res,
+ am.mseg_alloc_carriers_size,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.mseg.size));
#endif
+ }
add_4tup(hpp, szp, &res,
am.carriers_size,
bld_unstable_uint(hpp, szp, curr_size),
bld_unstable_uint(hpp, szp, cs->max.size),
bld_unstable_uint(hpp, szp, cs->max_ever.size));
- add_2tup(hpp, szp, &res,
- am.sys_alloc_carriers,
- bld_unstable_uint(hpp, szp, cs->curr_sys_alloc.no));
+ if (!small_block) {
+ add_2tup(hpp, szp, &res,
+ am.sys_alloc_carriers,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.sys_alloc.no));
#if HAVE_ERTS_MSEG
- add_2tup(hpp, szp, &res,
- am.mseg_alloc_carriers,
- bld_unstable_uint(hpp, szp, cs->curr_mseg.no));
+ add_2tup(hpp, szp, &res,
+ am.mseg_alloc_carriers,
+ bld_unstable_uint(hpp, szp, cs->curr.norm.mseg.no));
#endif
+ }
add_4tup(hpp, szp, &res,
am.carriers,
bld_unstable_uint(hpp, szp, curr_no),
@@ -2077,6 +2279,9 @@ info_calls(Allctr_t *allctr,
PRINT_CC_5(to, arg, prefix, "free", allctr->calls.this_free);
PRINT_CC_5(to, arg, prefix, "realloc", allctr->calls.this_realloc);
+ PRINT_CC_4(to, arg, "sbmbc_alloc", allctr->calls.sbmbc_alloc);
+ PRINT_CC_4(to, arg, "sbmbc_free", allctr->calls.sbmbc_free);
+
#if HAVE_ERTS_MSEG
PRINT_CC_4(to, arg, "mseg_alloc", allctr->calls.mseg_alloc);
PRINT_CC_4(to, arg, "mseg_dealloc", allctr->calls.mseg_dealloc);
@@ -2128,6 +2333,14 @@ info_calls(Allctr_t *allctr,
bld_unstable_uint(hpp, szp, allctr->calls.mseg_alloc.no));
#endif
add_3tup(hpp, szp, &res,
+ am.sbmbc_free,
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_free.giga_no),
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_free.no));
+ add_3tup(hpp, szp, &res,
+ am.sbmbc_alloc,
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_alloc.giga_no),
+ bld_unstable_uint(hpp, szp, allctr->calls.sbmbc_alloc.no));
+ add_3tup(hpp, szp, &res,
allctr->name.realloc,
bld_unstable_uint(hpp, szp, allctr->calls.this_realloc.giga_no),
bld_unstable_uint(hpp, szp, allctr->calls.this_realloc.no));
@@ -2191,7 +2404,9 @@ info_options(Allctr_t *allctr,
#endif
"option lmbcs: %beu\n"
"option smbcs: %beu\n"
- "option mbcgs: %beu\n",
+ "option mbcgs: %beu\n"
+ "option sbmbcs: %beu\n"
+ "option sbmbct: %beu\n",
topt,
allctr->ramv ? "true" : "false",
#if HALFWORD_HEAP
@@ -2211,7 +2426,9 @@ info_options(Allctr_t *allctr,
#endif
allctr->largest_mbc_size,
allctr->smallest_mbc_size,
- allctr->mbc_growth_stages);
+ allctr->mbc_growth_stages,
+ allctr->sbmbc_size,
+ allctr->sbmbc_threshold);
}
res = (*allctr->info_options)(allctr, "option ", print_to_p, print_to_arg,
@@ -2219,6 +2436,12 @@ info_options(Allctr_t *allctr,
if (hpp || szp) {
add_2tup(hpp, szp, &res,
+ am.sbmbct,
+ bld_uint(hpp, szp, allctr->sbmbc_threshold));
+ add_2tup(hpp, szp, &res,
+ am.sbmbcs,
+ bld_uint(hpp, szp, allctr->sbmbc_size));
+ add_2tup(hpp, szp, &res,
am.mbcgs,
bld_uint(hpp, szp, allctr->mbc_growth_stages));
add_2tup(hpp, szp, &res,
@@ -2285,10 +2508,10 @@ update_max_ever_values(CarriersStats_t *cs)
static ERTS_INLINE void
reset_max_values(CarriersStats_t *cs)
{
- cs->max.no = cs->curr_mseg.no + cs->curr_sys_alloc.no;
- cs->max.size = cs->curr_mseg.size + cs->curr_sys_alloc.size;
- cs->blocks.max.no = cs->blocks.curr.no;
- cs->blocks.max.size = cs->blocks.curr.size;
+ cs->max.no = cs->curr.norm.mseg.no + cs->curr.norm.sys_alloc.no;
+ cs->max.size = cs->curr.norm.mseg.size + cs->curr.norm.sys_alloc.size;
+ cs->blocks.max.no = cs->blocks.curr.no;
+ cs->blocks.max.size = cs->blocks.curr.size;
}
@@ -2367,7 +2590,7 @@ erts_alcu_sz_info(Allctr_t *allctr,
Uint **hpp,
Uint *szp)
{
- Eterm res, mbcs, sbcs;
+ Eterm res, sbmbcs, mbcs, sbcs;
res = THE_NON_VALUE;
@@ -2389,24 +2612,29 @@ erts_alcu_sz_info(Allctr_t *allctr,
/* Update sbc values not continously updated */
allctr->sbcs.blocks.curr.no
- = allctr->sbcs.curr_mseg.no + allctr->sbcs.curr_sys_alloc.no;
+ = allctr->sbcs.curr.norm.mseg.no + allctr->sbcs.curr.norm.sys_alloc.no;
allctr->sbcs.blocks.max.no = allctr->sbcs.max.no;
+ update_max_ever_values(&allctr->sbmbcs);
update_max_ever_values(&allctr->mbcs);
update_max_ever_values(&allctr->sbcs);
- mbcs = sz_info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
- print_to_arg, hpp, szp);
- sbcs = sz_info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
- print_to_arg, hpp, szp);
+ sbmbcs = sz_info_carriers(allctr, &allctr->sbmbcs, "sbmbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ mbcs = sz_info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ sbcs = sz_info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
+ print_to_arg, hpp, szp);
if (hpp || szp) {
res = NIL;
add_2tup(hpp, szp, &res, am.sbcs, sbcs);
add_2tup(hpp, szp, &res, am.mbcs, mbcs);
+ add_2tup(hpp, szp, &res, am.sbmbcs, sbmbcs);
}
if (begin_max_period) {
+ reset_max_values(&allctr->sbmbcs);
reset_max_values(&allctr->mbcs);
reset_max_values(&allctr->sbcs);
}
@@ -2428,7 +2656,7 @@ erts_alcu_info(Allctr_t *allctr,
Uint **hpp,
Uint *szp)
{
- Eterm res, sett, mbcs, sbcs, calls;
+ Eterm res, sett, sbmbcs, mbcs, sbcs, calls;
res = THE_NON_VALUE;
@@ -2450,9 +2678,10 @@ erts_alcu_info(Allctr_t *allctr,
/* Update sbc values not continously updated */
allctr->sbcs.blocks.curr.no
- = allctr->sbcs.curr_mseg.no + allctr->sbcs.curr_sys_alloc.no;
+ = allctr->sbcs.curr.norm.mseg.no + allctr->sbcs.curr.norm.sys_alloc.no;
allctr->sbcs.blocks.max.no = allctr->sbcs.max.no;
+ update_max_ever_values(&allctr->sbmbcs);
update_max_ever_values(&allctr->mbcs);
update_max_ever_values(&allctr->sbcs);
@@ -2464,11 +2693,13 @@ erts_alcu_info(Allctr_t *allctr,
ERTS_ALCU_VSN_STR);
}
- sett = info_options(allctr, print_to_p, print_to_arg, hpp, szp);
- mbcs = info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
- print_to_arg, hpp, szp);
- sbcs = info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
- print_to_arg, hpp, szp);
+ sett = info_options(allctr, print_to_p, print_to_arg, hpp, szp);
+ sbmbcs = info_carriers(allctr, &allctr->sbmbcs, "sbmbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ mbcs = info_carriers(allctr, &allctr->mbcs, "mbcs ", print_to_p,
+ print_to_arg, hpp, szp);
+ sbcs = info_carriers(allctr, &allctr->sbcs, "sbcs ", print_to_p,
+ print_to_arg, hpp, szp);
calls = info_calls(allctr, print_to_p, print_to_arg, hpp, szp);
if (hpp || szp) {
@@ -2477,6 +2708,7 @@ erts_alcu_info(Allctr_t *allctr,
add_2tup(hpp, szp, &res, am.calls, calls);
add_2tup(hpp, szp, &res, am.sbcs, sbcs);
add_2tup(hpp, szp, &res, am.mbcs, mbcs);
+ add_2tup(hpp, szp, &res, am.sbmbcs, sbmbcs);
add_2tup(hpp, szp, &res, am.options, sett);
add_3tup(hpp, szp, &res,
am.versions,
@@ -2485,6 +2717,7 @@ erts_alcu_info(Allctr_t *allctr,
}
if (begin_max_period) {
+ reset_max_values(&allctr->sbmbcs);
reset_max_values(&allctr->mbcs);
reset_max_values(&allctr->sbcs);
}
@@ -2508,12 +2741,14 @@ erts_alcu_current_size(Allctr_t *allctr, AllctrSize_t *size)
erts_mtx_lock(&allctr->mutex);
#endif
- size->carriers = allctr->mbcs.curr_mseg.size;
- size->carriers += allctr->mbcs.curr_sys_alloc.size;
- size->carriers += allctr->sbcs.curr_mseg.size;
- size->carriers += allctr->sbcs.curr_sys_alloc.size;
+ size->carriers = allctr->mbcs.curr.norm.mseg.size;
+ size->carriers += allctr->mbcs.curr.norm.sys_alloc.size;
+ size->carriers += allctr->sbmbcs.curr.small_block.size;
+ size->carriers += allctr->sbcs.curr.norm.mseg.size;
+ size->carriers += allctr->sbcs.curr.norm.sys_alloc.size;
size->blocks = allctr->mbcs.blocks.curr.size;
+ size->blocks += allctr->sbmbcs.blocks.curr.size;
size->blocks += allctr->sbcs.blocks.curr.size;
#ifdef USE_THREADS
@@ -2725,7 +2960,7 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
void *extra,
void *p,
Uint size,
- UWord flgs)
+ Uint32 alcu_flgs)
{
Allctr_t *allctr = (Allctr_t *) extra;
Block_t *blk;
@@ -2758,9 +2993,32 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
blk = UMEM2BLK(p);
+ if (allctr->sbmbc_threshold > 0) {
+ Uint old_sz, new_sz, lim;
+ lim = allctr->sbmbc_threshold;
+ old_sz = BLK_SZ(blk);
+ new_sz = UMEMSZ2BLKSZ(allctr, size);
+ if ((old_sz < lim && lim <= new_sz)
+ || (new_sz < lim && lim <= old_sz)) {
+ /* *Need* to move it... */
+
+ INC_CC(allctr->calls.this_realloc);
+ res = do_erts_alcu_alloc(type, extra, size);
+ DEC_CC(allctr->calls.this_alloc);
+
+ sys_memcpy(res, p, MIN(size, old_sz - ABLK_HDR_SZ));
+
+ do_erts_alcu_free(type, extra, p);
+ DEC_CC(allctr->calls.this_free);
+ return res;
+ }
+ if (old_sz < lim)
+ alcu_flgs |= ERTS_ALCU_FLG_SBMBC;
+ }
+
if (size < allctr->sbc_threshold) {
if (IS_MBC_BLK(blk))
- res = mbc_realloc(allctr, p, size, flgs);
+ res = mbc_realloc(allctr, p, size, alcu_flgs);
else {
Uint used_sz = allctr->sbc_header_size + ABLK_HDR_SZ + size;
Uint crr_sz;
@@ -2791,7 +3049,7 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
if (100*diff_sz_val < allctr->sbc_move_threshold*crr_sz_val)
/* Data won't be copied into a new carrier... */
goto do_carrier_resize;
- else if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ else if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
res = mbc_alloc(allctr, size);
@@ -2814,7 +3072,7 @@ do_erts_alcu_realloc(ErtsAlcType_t type,
#endif
res = new_blk ? BLK2UMEM(new_blk) : NULL;
}
- else if (flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
+ else if (alcu_flgs & ERTS_ALCU_FLG_FAIL_REALLOC_MOVE)
return NULL;
else {
#if HALFWORD_HEAP
@@ -3174,6 +3432,26 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init)
allctr->min_block_size = UNIT_CEILING(allctr->min_block_size
+ sizeof(UWord));
+
+ allctr->sbmbc_threshold = init->sbmbct;
+
+ if (!erts_have_sbmbc_alloc
+ || ERTS_IS_SBMBC_ALLOCATOR_NO__(allctr->alloc_no))
+ allctr->sbmbc_threshold = 0;
+
+ if (!allctr->sbmbc_threshold)
+ allctr->sbmbc_size = 0;
+ else {
+ Uint min_size;
+ allctr->sbmbc_size = init->sbmbcs;
+ min_size = allctr->sbmbc_threshold;
+ min_size += allctr->min_block_size;
+ min_size += allctr->mbc_header_size;
+ if (allctr->sbmbc_size < min_size)
+ allctr->sbmbc_size = min_size;
+ }
+
+
#if HAVE_ERTS_MSEG
if (allctr->mseg_opt.abs_shrink_th > ~((UWord) 0) / 100)
allctr->mseg_opt.abs_shrink_th = ~((UWord) 0) / 100;
@@ -3185,12 +3463,16 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init)
#ifdef ERTS_ENABLE_LOCK_COUNT
erts_mtx_init_x_opt(&allctr->mutex,
- "alcu_allocator",
- make_small(allctr->alloc_no),
- ERTS_LCNT_LT_ALLOC);
+ ERTS_IS_SBMBC_ALLOCATOR_NO__(allctr->alloc_no)
+ ? "sbmbc_alloc"
+ : "alcu_allocator",
+ make_small(allctr->alloc_no),
+ ERTS_LCNT_LT_ALLOC);
#else
erts_mtx_init_x(&allctr->mutex,
- "alcu_allocator",
+ ERTS_IS_SBMBC_ALLOCATOR_NO__(allctr->alloc_no)
+ ? "sbmbc_alloc"
+ : "alcu_allocator",
make_small(allctr->alloc_no));
#endif /*ERTS_ENABLE_LOCK_COUNT*/
@@ -3260,7 +3542,7 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init)
if (!blk)
goto error;
- (*allctr->link_free_block)(allctr, blk);
+ (*allctr->link_free_block)(allctr, blk, 0);
HARD_CHECK_BLK_CARRIER(allctr, blk);
@@ -3290,6 +3572,8 @@ erts_alcu_stop(Allctr_t *allctr)
destroy_carrier(allctr, SBC2BLK(allctr, allctr->sbc_list.first));
while (allctr->mbc_list.first)
destroy_carrier(allctr, MBC2FBLK(allctr, allctr->mbc_list.first));
+ while (allctr->sbmbc_list.first)
+ destroy_sbmbc(allctr, MBC2FBLK(allctr, allctr->sbmbc_list.first));
#ifdef USE_THREADS
if (allctr->thread_safe)
@@ -3387,13 +3671,15 @@ erts_alcu_verify_unused(Allctr_t *allctr)
{
UWord no;
- no = allctr->sbcs.curr_mseg.no;
- no += allctr->sbcs.curr_sys_alloc.no;
+ no = allctr->sbcs.curr.norm.mseg.no;
+ no += allctr->sbcs.curr.norm.sys_alloc.no;
no += allctr->mbcs.blocks.curr.no;
+ no += allctr->sbmbcs.blocks.curr.no;
if (no) {
UWord sz = allctr->sbcs.blocks.curr.size;
sz += allctr->mbcs.blocks.curr.size;
+ sz += allctr->sbmbcs.blocks.curr.size;
erl_exit(ERTS_ABORT_EXIT,
"%salloc() used when expected to be unused!\n"
"Total amount of blocks allocated: %bpu\n"
@@ -3492,7 +3778,7 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
(*allctr->check_block)(allctr, blk, (int) is_free_blk);
if (IS_LAST_BLK(blk)) {
- carrier_end = ((char *) NXT_BLK(blk)) + sizeof(UWord);
+ carrier_end = ((char *) NXT_BLK(blk));
mbc = *((Carrier_t **) NXT_BLK(blk));
prev_blk = NULL;
blk = MBC2FBLK(allctr, mbc);
@@ -3507,9 +3793,9 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
ASSERT(IS_MB_CARRIER(mbc));
ASSERT((((char *) mbc)
+ allctr->mbc_header_size
- + tot_blk_sz
- + sizeof(UWord)) == carrier_end);
- ASSERT(((char *) mbc) + CARRIER_SZ(mbc) == carrier_end);
+ + tot_blk_sz) == carrier_end);
+ ASSERT(((char *) mbc) + CARRIER_SZ(mbc) - sizeof(Unit_t) <= carrier_end
+ && carrier_end <= ((char *) mbc) + CARRIER_SZ(mbc));
if (allctr->check_mbc)
(*allctr->check_mbc)(allctr, mbc);
@@ -3523,6 +3809,7 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
cl = &allctr->mbc_list;
}
+#if 0 /* FIXIT sbmbc */
if (cl->first == crr) {
ASSERT(!crr->prev);
}
@@ -3537,6 +3824,7 @@ check_blk_carrier(Allctr_t *allctr, Block_t *iblk)
ASSERT(crr->next);
ASSERT(crr->next->prev == crr);
}
+#endif
}
#endif
diff --git a/erts/emulator/beam/erl_alloc_util.h b/erts/emulator/beam/erl_alloc_util.h
index ddf84c086c..fed4d3dbe6 100644
--- a/erts/emulator/beam/erl_alloc_util.h
+++ b/erts/emulator/beam/erl_alloc_util.h
@@ -34,6 +34,7 @@ typedef struct {
typedef struct {
char *name_prefix;
ErtsAlcType_t alloc_no;
+ int force;
int ts;
int tspec;
int tpref;
@@ -50,6 +51,8 @@ typedef struct {
UWord lmbcs;
UWord smbcs;
UWord mbcgs;
+ UWord sbmbct;
+ UWord sbmbcs;
} AllctrInit_t;
typedef struct {
@@ -67,6 +70,7 @@ typedef struct {
#define ERTS_DEFAULT_ALLCTR_INIT { \
NULL, \
ERTS_ALC_A_INVALID, /* (number) alloc_no: allocator number */\
+ 0, /* (bool) force: force enabled */\
1, /* (bool) ts: thread safe */\
0, /* (bool) tspec: thread specific */\
0, /* (bool) tpref: thread preferred */\
@@ -82,7 +86,9 @@ typedef struct {
10, /* (amount) mmmbc: max mseg mbcs */\
10*1024*1024, /* (bytes) lmbcs: largest mbc size */\
1024*1024, /* (bytes) smbcs: smallest mbc size */\
- 10 /* (amount) mbcgs: mbc growth stages */\
+ 10, /* (amount) mbcgs: mbc growth stages */\
+ 256, /* (bytes) sbmbct: small block mbc threshold */\
+ 8*1024 /* (bytes) sbmbcs: small block mbc size */\
}
#else /* if SMALL_MEMORY */
@@ -95,6 +101,7 @@ typedef struct {
#define ERTS_DEFAULT_ALLCTR_INIT { \
NULL, \
ERTS_ALC_A_INVALID, /* (number) alloc_no: allocator number */\
+ 0, /* (bool) force: force enabled */\
1, /* (bool) ts: thread safe */\
0, /* (bool) tspec: thread specific */\
0, /* (bool) tpref: thread preferred */\
@@ -109,7 +116,9 @@ typedef struct {
10, /* (amount) mmmbc: max mseg mbcs */\
1024*1024, /* (bytes) lmbcs: largest mbc size */\
128*1024, /* (bytes) smbcs: smallest mbc size */\
- 10 /* (amount) mbcgs: mbc growth stages */\
+ 10, /* (amount) mbcgs: mbc growth stages */\
+ 256, /* (bytes) sbmbct: small block mbc threshold */\
+ 8*1024 /* (bytes) sbmbcs: small block mbc size */\
}
#endif
@@ -144,6 +153,9 @@ void erts_alcu_current_size(Allctr_t *, AllctrSize_t *);
#if defined(GET_ERL_ALLOC_UTIL_IMPL) && !defined(ERL_ALLOC_UTIL_IMPL__)
#define ERL_ALLOC_UTIL_IMPL__
+#define ERTS_ALCU_FLG_FAIL_REALLOC_MOVE (((Uint32) 1) << 0)
+#define ERTS_ALCU_FLG_SBMBC (((Uint32) 1) << 1)
+
#ifdef USE_THREADS
#define ERL_THREADS_EMU_INTERNAL__
#include "erl_threads.h"
@@ -188,6 +200,8 @@ void erts_alcu_current_size(Allctr_t *, AllctrSize_t *);
#define CARRIER_SZ(C) \
((C)->chdr & SZ_MASK)
+extern int erts_have_sbmbc_alloc;
+
typedef union {char c[8]; long l; double d;} Unit_t;
typedef struct Carrier_t_ Carrier_t;
@@ -216,8 +230,13 @@ typedef struct {
} StatValues_t;
typedef struct {
- StatValues_t curr_mseg;
- StatValues_t curr_sys_alloc;
+ union {
+ struct {
+ StatValues_t mseg;
+ StatValues_t sys_alloc;
+ } norm;
+ StatValues_t small_block;
+ } curr;
StatValues_t max;
StatValues_t max_ever;
struct {
@@ -257,6 +276,8 @@ struct Allctr_t_ {
Uint largest_mbc_size;
Uint smallest_mbc_size;
Uint mbc_growth_stages;
+ Uint sbmbc_threshold;
+ Uint sbmbc_size;
#if HAVE_ERTS_MSEG
ErtsMsegOpt_t mseg_opt;
#endif
@@ -269,6 +290,7 @@ struct Allctr_t_ {
Uint min_block_size;
/* Carriers */
+ CarrierList_t sbmbc_list;
CarrierList_t mbc_list;
CarrierList_t sbc_list;
@@ -277,15 +299,15 @@ struct Allctr_t_ {
/* Callback functions (first 4 are mandatory) */
Block_t * (*get_free_block) (Allctr_t *, Uint,
- Block_t *, Uint);
- void (*link_free_block) (Allctr_t *, Block_t *);
- void (*unlink_free_block) (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+ void (*link_free_block) (Allctr_t *, Block_t *, Uint32);
+ void (*unlink_free_block) (Allctr_t *, Block_t *, Uint32);
Eterm (*info_options) (Allctr_t *, char *, int *,
void *, Uint **, Uint *);
Uint (*get_next_mbc_size) (Allctr_t *);
- void (*creating_mbc) (Allctr_t *, Carrier_t *);
- void (*destroying_mbc) (Allctr_t *, Carrier_t *);
+ void (*creating_mbc) (Allctr_t *, Carrier_t *, Uint32);
+ void (*destroying_mbc) (Allctr_t *, Carrier_t *, Uint32);
void (*init_atoms) (void);
#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
@@ -312,6 +334,8 @@ struct Allctr_t_ {
CallCounter_t this_alloc;
CallCounter_t this_free;
CallCounter_t this_realloc;
+ CallCounter_t sbmbc_alloc;
+ CallCounter_t sbmbc_free;
CallCounter_t mseg_alloc;
CallCounter_t mseg_dealloc;
CallCounter_t mseg_realloc;
@@ -322,6 +346,7 @@ struct Allctr_t_ {
CarriersStats_t sbcs;
CarriersStats_t mbcs;
+ CarriersStats_t sbmbcs;
#ifdef DEBUG
#ifdef USE_THREADS
diff --git a/erts/emulator/beam/erl_ao_firstfit_alloc.c b/erts/emulator/beam/erl_ao_firstfit_alloc.c
new file mode 100644
index 0000000000..002852cdad
--- /dev/null
+++ b/erts/emulator/beam/erl_ao_firstfit_alloc.c
@@ -0,0 +1,972 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2003-2009. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+
+/*
+ * Description: An "address order first fit" allocator
+ * based on a Red-Black (binary search) Tree. The search,
+ * insert, and delete operations are all O(log n) operations
+ * on a Red-Black Tree.
+ * Red-Black Trees are described in "Introduction to Algorithms",
+ * by Thomas H. Cormen, Charles E. Leiserson, and Ronald L. Riverest.
+ *
+ * This module is a callback-module for erl_alloc_util.c
+ *
+ * Algorithm: The tree nodes are free-blocks ordered in address order.
+ * Every node also keeps the size of the largest block in its
+ * sub-tree ('max_size'). By that we can start from root and keep
+ * left (for low addresses) while dismissing entire sub-trees with
+ * too small blocks.
+ *
+ * Authors: Rickard Green/Sverker Eriksson
+ */
+
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+#include "global.h"
+#define GET_ERL_AOFF_ALLOC_IMPL
+#include "erl_ao_firstfit_alloc.h"
+
+#ifdef DEBUG
+#if 0
+#define HARD_DEBUG
+#endif
+#else
+#undef HARD_DEBUG
+#endif
+
+#define MIN_MBC_SZ (16*1024)
+#define MIN_MBC_FIRST_FREE_SZ (4*1024)
+
+#define TREE_NODE_FLG (((Uint) 1) << 0)
+#define RED_FLG (((Uint) 1) << 1)
+#ifdef HARD_DEBUG
+# define LEFT_VISITED_FLG (((Uint) 1) << 2)
+# define RIGHT_VISITED_FLG (((Uint) 1) << 3)
+#endif
+
+#define IS_RED(N) (((AOFF_RBTree_t *) (N)) \
+ && ((AOFF_RBTree_t *) (N))->flags & RED_FLG)
+#define IS_BLACK(N) (!IS_RED(((AOFF_RBTree_t *) (N))))
+
+#define SET_RED(N) (((AOFF_RBTree_t *) (N))->flags |= RED_FLG)
+#define SET_BLACK(N) (((AOFF_RBTree_t *) (N))->flags &= ~RED_FLG)
+
+#undef ASSERT
+#define ASSERT ASSERT_EXPR
+
+#if 1
+#define RBT_ASSERT ASSERT
+#else
+#define RBT_ASSERT(x)
+#endif
+
+
+/* Types... */
+typedef struct AOFF_RBTree_t_ AOFF_RBTree_t;
+
+struct AOFF_RBTree_t_ {
+ Block_t hdr;
+ Uint flags;
+ AOFF_RBTree_t *parent;
+ AOFF_RBTree_t *left;
+ AOFF_RBTree_t *right;
+ Uint max_sz; /* of all blocks in this sub-tree */
+};
+
+#ifdef HARD_DEBUG
+static AOFF_RBTree_t * check_tree(AOFF_RBTree_t* root, Uint);
+#endif
+
+
+/* Calculate 'max_size' of tree node x by only looking at the direct children
+ * of x and x itself.
+ */
+static ERTS_INLINE Uint node_max_size(AOFF_RBTree_t *x)
+{
+ Uint sz = BLK_SZ(x);
+ if (x->left && x->left->max_sz > sz) {
+ sz = x->left->max_sz;
+ }
+ if (x->right && x->right->max_sz > sz) {
+ sz = x->right->max_sz;
+ }
+ return sz;
+}
+
+/* Set new possibly lower 'max_size' of node and propagate change toward root
+*/
+static ERTS_INLINE void lower_max_size(AOFF_RBTree_t *node,
+ AOFF_RBTree_t* stop_at)
+{
+ AOFF_RBTree_t* x = node;
+ Uint old_max = x->max_sz;
+ Uint new_max = node_max_size(x);
+
+ if (new_max < old_max) {
+ x->max_sz = new_max;
+ while ((x=x->parent) != stop_at && x->max_sz == old_max) {
+ x->max_sz = node_max_size(x);
+ }
+ ASSERT(x == stop_at || x->max_sz > old_max);
+ }
+ else ASSERT(new_max == old_max);
+}
+
+
+/* Prototypes of callback functions */
+static Block_t* aoff_get_free_block(Allctr_t *, Uint, Block_t *, Uint, Uint32 flags);
+static void aoff_link_free_block(Allctr_t *, Block_t*, Uint32 flags);
+static void aoff_unlink_free_block(Allctr_t *allctr, Block_t *del, Uint32 flags);
+
+static Eterm info_options(Allctr_t *, char *, int *, void *, Uint **, Uint *);
+static void init_atoms(void);
+
+
+
+#ifdef DEBUG
+
+/* Destroy all tree fields */
+#define DESTROY_TREE_NODE(N) \
+ sys_memset((void *) (((Block_t *) (N)) + 1), \
+ 0xff, \
+ (sizeof(AOFF_RBTree_t) - sizeof(Block_t)))
+
+#else
+
+#define DESTROY_TREE_NODE(N)
+
+#endif
+
+
+static int atoms_initialized = 0;
+
+void
+erts_aoffalc_init(void)
+{
+ atoms_initialized = 0;
+}
+
+Allctr_t *
+erts_aoffalc_start(AOFFAllctr_t *alc,
+ AOFFAllctrInit_t* aoffinit,
+ AllctrInit_t *init)
+{
+ AOFFAllctr_t nulled_state = {{0}};
+ /* {{0}} is used instead of {0}, in order to avoid (an incorrect) gcc
+ warning. gcc warns if {0} is used as initializer of a struct when
+ the first member is a struct (not if, for example, the third member
+ is a struct). */
+ Allctr_t *allctr = (Allctr_t *) alc;
+
+ sys_memcpy((void *) alc, (void *) &nulled_state, sizeof(AOFFAllctr_t));
+
+ allctr->mbc_header_size = sizeof(Carrier_t);
+ allctr->min_mbc_size = MIN_MBC_SZ;
+ allctr->min_mbc_first_free_size = MIN_MBC_FIRST_FREE_SZ;
+ allctr->min_block_size = sizeof(AOFF_RBTree_t);
+
+ allctr->vsn_str = ERTS_ALC_AOFF_ALLOC_VSN_STR;
+
+
+ /* Callback functions */
+
+ allctr->get_free_block = aoff_get_free_block;
+ allctr->link_free_block = aoff_link_free_block;
+ allctr->unlink_free_block = aoff_unlink_free_block;
+ allctr->info_options = info_options;
+
+ allctr->get_next_mbc_size = NULL;
+ allctr->creating_mbc = NULL;
+ allctr->destroying_mbc = NULL;
+ allctr->init_atoms = init_atoms;
+
+#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG
+ allctr->check_block = NULL;
+ allctr->check_mbc = NULL;
+#endif
+
+ allctr->atoms_initialized = 0;
+
+ if (!erts_alcu_start(allctr, init))
+ return NULL;
+
+ return allctr;
+}
+
+/*
+ * Red-Black Tree operations needed
+ */
+
+static ERTS_INLINE void
+left_rotate(AOFF_RBTree_t **root, AOFF_RBTree_t *x)
+{
+ AOFF_RBTree_t *y = x->right;
+ x->right = y->left;
+ if (y->left)
+ y->left->parent = x;
+ y->parent = x->parent;
+ if (!y->parent) {
+ RBT_ASSERT(*root == x);
+ *root = y;
+ }
+ else if (x == x->parent->left)
+ x->parent->left = y;
+ else {
+ RBT_ASSERT(x == x->parent->right);
+ x->parent->right = y;
+ }
+ y->left = x;
+ x->parent = y;
+
+ y->max_sz = x->max_sz;
+ x->max_sz = node_max_size(x);
+ ASSERT(y->max_sz >= x->max_sz);
+}
+
+static ERTS_INLINE void
+right_rotate(AOFF_RBTree_t **root, AOFF_RBTree_t *x)
+{
+ AOFF_RBTree_t *y = x->left;
+ x->left = y->right;
+ if (y->right)
+ y->right->parent = x;
+ y->parent = x->parent;
+ if (!y->parent) {
+ RBT_ASSERT(*root == x);
+ *root = y;
+ }
+ else if (x == x->parent->right)
+ x->parent->right = y;
+ else {
+ RBT_ASSERT(x == x->parent->left);
+ x->parent->left = y;
+ }
+ y->right = x;
+ x->parent = y;
+ y->max_sz = x->max_sz;
+ x->max_sz = node_max_size(x);
+ ASSERT(y->max_sz >= x->max_sz);
+}
+
+
+/*
+ * Replace node x with node y
+ * NOTE: block header of y is not changed
+ */
+static ERTS_INLINE void
+replace(AOFF_RBTree_t **root, AOFF_RBTree_t *x, AOFF_RBTree_t *y)
+{
+
+ if (!x->parent) {
+ RBT_ASSERT(*root == x);
+ *root = y;
+ }
+ else if (x == x->parent->left)
+ x->parent->left = y;
+ else {
+ RBT_ASSERT(x == x->parent->right);
+ x->parent->right = y;
+ }
+ if (x->left) {
+ RBT_ASSERT(x->left->parent == x);
+ x->left->parent = y;
+ }
+ if (x->right) {
+ RBT_ASSERT(x->right->parent == x);
+ x->right->parent = y;
+ }
+
+ y->flags = x->flags;
+ y->parent = x->parent;
+ y->right = x->right;
+ y->left = x->left;
+
+ y->max_sz = x->max_sz;
+ lower_max_size(y, NULL);
+ DESTROY_TREE_NODE(x);
+}
+
+static void
+tree_insert_fixup(AOFF_RBTree_t** root, AOFF_RBTree_t *blk)
+{
+ AOFF_RBTree_t *x = blk, *y;
+
+ /*
+ * Rearrange the tree so that it satisfies the Red-Black Tree properties
+ */
+
+ RBT_ASSERT(x != *root && IS_RED(x->parent));
+ do {
+
+ /*
+ * x and its parent are both red. Move the red pair up the tree
+ * until we get to the root or until we can separate them.
+ */
+
+ RBT_ASSERT(IS_RED(x));
+ RBT_ASSERT(IS_BLACK(x->parent->parent));
+ RBT_ASSERT(x->parent->parent);
+
+ if (x->parent == x->parent->parent->left) {
+ y = x->parent->parent->right;
+ if (IS_RED(y)) {
+ SET_BLACK(y);
+ x = x->parent;
+ SET_BLACK(x);
+ x = x->parent;
+ SET_RED(x);
+ }
+ else {
+
+ if (x == x->parent->right) {
+ x = x->parent;
+ left_rotate(root, x);
+ }
+
+ RBT_ASSERT(x == x->parent->parent->left->left);
+ RBT_ASSERT(IS_RED(x));
+ RBT_ASSERT(IS_RED(x->parent));
+ RBT_ASSERT(IS_BLACK(x->parent->parent));
+ RBT_ASSERT(IS_BLACK(y));
+
+ SET_BLACK(x->parent);
+ SET_RED(x->parent->parent);
+ right_rotate(root, x->parent->parent);
+
+ RBT_ASSERT(x == x->parent->left);
+ RBT_ASSERT(IS_RED(x));
+ RBT_ASSERT(IS_RED(x->parent->right));
+ RBT_ASSERT(IS_BLACK(x->parent));
+ break;
+ }
+ }
+ else {
+ RBT_ASSERT(x->parent == x->parent->parent->right);
+ y = x->parent->parent->left;
+ if (IS_RED(y)) {
+ SET_BLACK(y);
+ x = x->parent;
+ SET_BLACK(x);
+ x = x->parent;
+ SET_RED(x);
+ }
+ else {
+
+ if (x == x->parent->left) {
+ x = x->parent;
+ right_rotate(root, x);
+ }
+
+ RBT_ASSERT(x == x->parent->parent->right->right);
+ RBT_ASSERT(IS_RED(x));
+ RBT_ASSERT(IS_RED(x->parent));
+ RBT_ASSERT(IS_BLACK(x->parent->parent));
+ RBT_ASSERT(IS_BLACK(y));
+
+ SET_BLACK(x->parent);
+ SET_RED(x->parent->parent);
+ left_rotate(root, x->parent->parent);
+
+ RBT_ASSERT(x == x->parent->right);
+ RBT_ASSERT(IS_RED(x));
+ RBT_ASSERT(IS_RED(x->parent->left));
+ RBT_ASSERT(IS_BLACK(x->parent));
+ break;
+ }
+ }
+ } while (x != *root && IS_RED(x->parent));
+
+ SET_BLACK(*root);
+}
+
+static void
+aoff_unlink_free_block(Allctr_t *allctr, Block_t *del, Uint32 flags)
+{
+ AOFFAllctr_t *alc = (AOFFAllctr_t *) allctr;
+ AOFF_RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &alc->sbmbc_root : &alc->mbc_root);
+ Uint spliced_is_black;
+ AOFF_RBTree_t *x, *y, *z = (AOFF_RBTree_t *) del;
+ AOFF_RBTree_t null_x; /* null_x is used to get the fixup started when we
+ splice out a node without children. */
+
+ null_x.parent = NULL;
+
+#ifdef HARD_DEBUG
+ check_tree(*root, 0);
+#endif
+
+ /* Remove node from tree... */
+
+ /* Find node to splice out */
+ if (!z->left || !z->right)
+ y = z;
+ else
+ /* Set y to z:s successor */
+ for(y = z->right; y->left; y = y->left);
+ /* splice out y */
+ x = y->left ? y->left : y->right;
+ spliced_is_black = IS_BLACK(y);
+ if (x) {
+ x->parent = y->parent;
+ }
+ else if (spliced_is_black) {
+ x = &null_x;
+ x->flags = 0;
+ SET_BLACK(x);
+ x->right = x->left = NULL;
+ x->max_sz = 0;
+ x->parent = y->parent;
+ y->left = x;
+ }
+
+ if (!y->parent) {
+ RBT_ASSERT(*root == y);
+ *root = x;
+ }
+ else {
+ if (y == y->parent->left) {
+ y->parent->left = x;
+ }
+ else {
+ RBT_ASSERT(y == y->parent->right);
+ y->parent->right = x;
+ }
+ if (y->parent != z) {
+ lower_max_size(y->parent, (y==z ? NULL : z));
+ }
+ }
+ if (y != z) {
+ /* We spliced out the successor of z; replace z by the successor */
+ replace(root, z, y);
+ }
+
+ if (spliced_is_black) {
+ /* We removed a black node which makes the resulting tree
+ violate the Red-Black Tree properties. Fixup tree... */
+
+ while (IS_BLACK(x) && x->parent) {
+
+ /*
+ * x has an "extra black" which we move up the tree
+ * until we reach the root or until we can get rid of it.
+ *
+ * y is the sibbling of x
+ */
+
+ if (x == x->parent->left) {
+ y = x->parent->right;
+ RBT_ASSERT(y);
+ if (IS_RED(y)) {
+ RBT_ASSERT(y->right);
+ RBT_ASSERT(y->left);
+ SET_BLACK(y);
+ RBT_ASSERT(IS_BLACK(x->parent));
+ SET_RED(x->parent);
+ left_rotate(root, x->parent);
+ y = x->parent->right;
+ }
+ RBT_ASSERT(y);
+ RBT_ASSERT(IS_BLACK(y));
+ if (IS_BLACK(y->left) && IS_BLACK(y->right)) {
+ SET_RED(y);
+ x = x->parent;
+ }
+ else {
+ if (IS_BLACK(y->right)) {
+ SET_BLACK(y->left);
+ SET_RED(y);
+ right_rotate(root, y);
+ y = x->parent->right;
+ }
+ RBT_ASSERT(y);
+ if (IS_RED(x->parent)) {
+
+ SET_BLACK(x->parent);
+ SET_RED(y);
+ }
+ RBT_ASSERT(y->right);
+ SET_BLACK(y->right);
+ left_rotate(root, x->parent);
+ x = *root;
+ break;
+ }
+ }
+ else {
+ RBT_ASSERT(x == x->parent->right);
+ y = x->parent->left;
+ RBT_ASSERT(y);
+ if (IS_RED(y)) {
+ RBT_ASSERT(y->right);
+ RBT_ASSERT(y->left);
+ SET_BLACK(y);
+ RBT_ASSERT(IS_BLACK(x->parent));
+ SET_RED(x->parent);
+ right_rotate(root, x->parent);
+ y = x->parent->left;
+ }
+ RBT_ASSERT(y);
+ RBT_ASSERT(IS_BLACK(y));
+ if (IS_BLACK(y->right) && IS_BLACK(y->left)) {
+ SET_RED(y);
+ x = x->parent;
+ }
+ else {
+ if (IS_BLACK(y->left)) {
+ SET_BLACK(y->right);
+ SET_RED(y);
+ left_rotate(root, y);
+ y = x->parent->left;
+ }
+ RBT_ASSERT(y);
+ if (IS_RED(x->parent)) {
+ SET_BLACK(x->parent);
+ SET_RED(y);
+ }
+ RBT_ASSERT(y->left);
+ SET_BLACK(y->left);
+ right_rotate(root, x->parent);
+ x = *root;
+ break;
+ }
+ }
+ }
+ SET_BLACK(x);
+
+ if (null_x.parent) {
+ if (null_x.parent->left == &null_x)
+ null_x.parent->left = NULL;
+ else {
+ RBT_ASSERT(null_x.parent->right == &null_x);
+ null_x.parent->right = NULL;
+ }
+ RBT_ASSERT(!null_x.left);
+ RBT_ASSERT(!null_x.right);
+ }
+ else if (*root == &null_x) {
+ *root = NULL;
+ RBT_ASSERT(!null_x.left);
+ RBT_ASSERT(!null_x.right);
+ }
+ }
+
+ DESTROY_TREE_NODE(del);
+
+#ifdef HARD_DEBUG
+ check_tree(*root, 0);
+#endif
+}
+
+static void
+aoff_link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
+{
+ AOFFAllctr_t *alc = (AOFFAllctr_t *) allctr;
+ AOFF_RBTree_t *blk = (AOFF_RBTree_t *) block;
+ AOFF_RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &alc->sbmbc_root : &alc->mbc_root);
+ Uint blk_sz = BLK_SZ(blk);
+
+#ifdef HARD_DEBUG
+ check_tree(*root, 0);
+#endif
+
+ blk->flags = 0;
+ blk->left = NULL;
+ blk->right = NULL;
+ blk->max_sz = blk_sz;
+
+ if (!*root) {
+ blk->parent = NULL;
+ SET_BLACK(blk);
+ *root = blk;
+ }
+ else {
+ AOFF_RBTree_t *x = *root;
+ while (1) {
+ if (x->max_sz < blk_sz) {
+ x->max_sz = blk_sz;
+ }
+ if (blk < x) {
+ if (!x->left) {
+ blk->parent = x;
+ x->left = blk;
+ break;
+ }
+ x = x->left;
+ }
+ else {
+ if (!x->right) {
+ blk->parent = x;
+ x->right = blk;
+ break;
+ }
+ x = x->right;
+ }
+
+ }
+
+ /* Insert block into size tree */
+ RBT_ASSERT(blk->parent);
+
+ SET_RED(blk);
+ if (IS_RED(blk->parent))
+ tree_insert_fixup(root, blk);
+ }
+
+#ifdef HARD_DEBUG
+ check_tree(*root, 0);
+#endif
+}
+
+static Block_t *
+aoff_get_free_block(Allctr_t *allctr, Uint size,
+ Block_t *cand_blk, Uint cand_size, Uint32 flags)
+{
+ AOFFAllctr_t *alc = (AOFFAllctr_t *) allctr;
+ AOFF_RBTree_t *x = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? alc->sbmbc_root : alc->mbc_root);
+ AOFF_RBTree_t *blk = NULL;
+#ifdef HARD_DEBUG
+ AOFF_RBTree_t* dbg_blk = check_tree(x, size);
+#endif
+
+ ASSERT(!cand_blk || cand_size >= size);
+
+ while (x) {
+ if (x->left && x->left->max_sz >= size) {
+ x = x->left;
+ }
+ else if (BLK_SZ(x) >= size) {
+ blk = x;
+ break;
+ }
+ else {
+ x = x->right;
+ }
+ }
+
+#ifdef HARD_DEBUG
+ ASSERT(blk == dbg_blk);
+#endif
+
+ if (!blk)
+ return NULL;
+
+ if (cand_blk && cand_blk < &blk->hdr) {
+ return NULL; /* cand_blk was better */
+ }
+
+ aoff_unlink_free_block(allctr, (Block_t *) blk, flags);
+
+ return (Block_t *) blk;
+}
+
+
+/*
+ * info_options()
+ */
+
+static struct {
+ Eterm as;
+ Eterm aoff;
+#ifdef DEBUG
+ Eterm end_of_atoms;
+#endif
+} am;
+
+static void ERTS_INLINE atom_init(Eterm *atom, char *name)
+{
+ *atom = am_atom_put(name, strlen(name));
+}
+#define AM_INIT(AM) atom_init(&am.AM, #AM)
+
+static void
+init_atoms(void)
+{
+#ifdef DEBUG
+ Eterm *atom;
+#endif
+
+ if (atoms_initialized)
+ return;
+
+#ifdef DEBUG
+ for (atom = (Eterm *) &am; atom <= &am.end_of_atoms; atom++) {
+ *atom = THE_NON_VALUE;
+ }
+#endif
+ AM_INIT(as);
+ AM_INIT(aoff);
+
+#ifdef DEBUG
+ for (atom = (Eterm *) &am; atom < &am.end_of_atoms; atom++) {
+ ASSERT(*atom != THE_NON_VALUE);
+ }
+#endif
+
+ atoms_initialized = 1;
+}
+
+
+#define bld_uint erts_bld_uint
+#define bld_cons erts_bld_cons
+#define bld_tuple erts_bld_tuple
+
+static ERTS_INLINE void
+add_2tup(Uint **hpp, Uint *szp, Eterm *lp, Eterm el1, Eterm el2)
+{
+ *lp = bld_cons(hpp, szp, bld_tuple(hpp, szp, 2, el1, el2), *lp);
+}
+
+static Eterm
+info_options(Allctr_t *allctr,
+ char *prefix,
+ int *print_to_p,
+ void *print_to_arg,
+ Uint **hpp,
+ Uint *szp)
+{
+ Eterm res = THE_NON_VALUE;
+
+ if (print_to_p) {
+ erts_print(*print_to_p,
+ print_to_arg,
+ "%sas: %s\n",
+ prefix,
+ "aoff");
+ }
+
+ if (hpp || szp) {
+
+ if (!atoms_initialized)
+ erl_exit(1, "%s:%d: Internal error: Atoms not initialized",
+ __FILE__, __LINE__);;
+
+ res = NIL;
+ add_2tup(hpp, szp, &res, am.as, am.aoff);
+ }
+
+ return res;
+}
+
+
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\
+ * NOTE: erts_aoffalc_test() is only supposed to be used for testing. *
+ * *
+ * Keep alloc_SUITE_data/allocator_test.h updated if changes are made *
+ * to erts_aoffalc_test() *
+\* */
+
+unsigned long
+erts_aoffalc_test(unsigned long op, unsigned long a1, unsigned long a2)
+{
+ switch (op) {
+ case 0x500: return (unsigned long) 0; /* IS_AOBF */
+ case 0x501: return (unsigned long) ((AOFFAllctr_t *) a1)->mbc_root;
+ case 0x502: return (unsigned long) ((AOFF_RBTree_t *) a1)->parent;
+ case 0x503: return (unsigned long) ((AOFF_RBTree_t *) a1)->left;
+ case 0x504: return (unsigned long) ((AOFF_RBTree_t *) a1)->right;
+ case 0x506: return (unsigned long) IS_BLACK((AOFF_RBTree_t *) a1);
+ case 0x508: return (unsigned long) 1; /* IS_AOFF */
+ case 0x509: return (unsigned long) ((AOFF_RBTree_t *) a1)->max_sz;
+ default: ASSERT(0); return ~((unsigned long) 0);
+ }
+}
+
+
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\
+ * Debug functions *
+\* */
+
+
+#ifdef HARD_DEBUG
+
+#define IS_LEFT_VISITED(FB) ((FB)->flags & LEFT_VISITED_FLG)
+#define IS_RIGHT_VISITED(FB) ((FB)->flags & RIGHT_VISITED_FLG)
+
+#define SET_LEFT_VISITED(FB) ((FB)->flags |= LEFT_VISITED_FLG)
+#define SET_RIGHT_VISITED(FB) ((FB)->flags |= RIGHT_VISITED_FLG)
+
+#define UNSET_LEFT_VISITED(FB) ((FB)->flags &= ~LEFT_VISITED_FLG)
+#define UNSET_RIGHT_VISITED(FB) ((FB)->flags &= ~RIGHT_VISITED_FLG)
+
+
+#if 0
+# define PRINT_TREE
+#else
+# undef PRINT_TREE
+#endif
+
+#ifdef PRINT_TREE
+static void print_tree(AOFF_RBTree_t*);
+#endif
+
+/*
+ * Checks that the order between parent and children are correct,
+ * and that the Red-Black Tree properies are satisfied. if size > 0,
+ * check_tree() returns the node that satisfies "address order first fit"
+ *
+ * The Red-Black Tree properies are:
+ * 1. Every node is either red or black.
+ * 2. Every leaf (NIL) is black.
+ * 3. If a node is red, then both its children are black.
+ * 4. Every simple path from a node to a descendant leaf
+ * contains the same number of black nodes.
+ *
+ * + own.max_size == MAX(own.size, left.max_size, right.max_size)
+ */
+
+static AOFF_RBTree_t *
+check_tree(AOFF_RBTree_t* root, Uint size)
+{
+ AOFF_RBTree_t *res = NULL;
+ Sint blacks;
+ Sint curr_blacks;
+ AOFF_RBTree_t *x;
+
+#ifdef PRINT_TREE
+ print_tree(root);
+#endif
+
+ if (!root)
+ return res;
+
+ x = root;
+ ASSERT(IS_BLACK(x));
+ ASSERT(!x->parent);
+ curr_blacks = 1;
+ blacks = -1;
+
+ while (x) {
+ if (!IS_LEFT_VISITED(x)) {
+ SET_LEFT_VISITED(x);
+ if (x->left) {
+ x = x->left;
+ if (IS_BLACK(x))
+ curr_blacks++;
+ continue;
+ }
+ else {
+ if (blacks < 0)
+ blacks = curr_blacks;
+ ASSERT(blacks == curr_blacks);
+ }
+ }
+
+ if (!IS_RIGHT_VISITED(x)) {
+ SET_RIGHT_VISITED(x);
+ if (x->right) {
+ x = x->right;
+ if (IS_BLACK(x))
+ curr_blacks++;
+ continue;
+ }
+ else {
+ if (blacks < 0)
+ blacks = curr_blacks;
+ ASSERT(blacks == curr_blacks);
+ }
+ }
+
+
+ if (IS_RED(x)) {
+ ASSERT(IS_BLACK(x->right));
+ ASSERT(IS_BLACK(x->left));
+ }
+
+ ASSERT(x->parent || x == root);
+
+ if (x->left) {
+ ASSERT(x->left->parent == x);
+ ASSERT(x->left < x);
+ ASSERT(x->left->max_sz <= x->max_sz);
+ }
+
+ if (x->right) {
+ ASSERT(x->right->parent == x);
+ ASSERT(x->right > x);
+ ASSERT(x->right->max_sz <= x->max_sz);
+ }
+ ASSERT(x->max_sz >= BLK_SZ(x));
+ ASSERT(x->max_sz == BLK_SZ(x)
+ || x->max_sz == (x->left ? x->left->max_sz : 0)
+ || x->max_sz == (x->right ? x->right->max_sz : 0));
+
+ if (size && BLK_SZ(x) >= size) {
+ if (!res || x < res) {
+ res = x;
+ }
+ }
+
+ UNSET_LEFT_VISITED(x);
+ UNSET_RIGHT_VISITED(x);
+ if (IS_BLACK(x))
+ curr_blacks--;
+ x = x->parent;
+
+ }
+
+ ASSERT(curr_blacks == 0);
+
+ UNSET_LEFT_VISITED(root);
+ UNSET_RIGHT_VISITED(root);
+
+ return res;
+
+}
+
+
+#ifdef PRINT_TREE
+#define INDENT_STEP 2
+
+#include <stdio.h>
+
+static void
+print_tree_aux(AOFF_RBTree_t *x, int indent)
+{
+ int i;
+
+ if (x) {
+ print_tree_aux(x->right, indent + INDENT_STEP);
+ for (i = 0; i < indent; i++) {
+ putc(' ', stderr);
+ }
+ fprintf(stderr, "%s: sz=%lu addr=0x%lx max_size=%lu\r\n",
+ IS_BLACK(x) ? "BLACK" : "RED",
+ BLK_SZ(x), (Uint)x, x->max_sz);
+ print_tree_aux(x->left, indent + INDENT_STEP);
+ }
+}
+
+
+static void
+print_tree(AOFF_RBTree_t* root)
+{
+ fprintf(stderr, " --- AOFF tree begin ---\r\n");
+ print_tree_aux(root, 0);
+ fprintf(stderr, " --- AOFF tree end ---\r\n");
+}
+
+#endif /* PRINT_TREE */
+
+#endif /* HARD_DEBUG */
+
diff --git a/erts/emulator/beam/erl_ao_firstfit_alloc.h b/erts/emulator/beam/erl_ao_firstfit_alloc.h
new file mode 100644
index 0000000000..0bf0ec8cee
--- /dev/null
+++ b/erts/emulator/beam/erl_ao_firstfit_alloc.h
@@ -0,0 +1,60 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2003-2009. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+
+#ifndef ERL_AO_FIRSTFIT_ALLOC__
+#define ERL_AO_FIRSTFIT_ALLOC__
+
+#include "erl_alloc_util.h"
+
+#define ERTS_ALC_AOFF_ALLOC_VSN_STR "0.9"
+
+typedef struct AOFFAllctr_t_ AOFFAllctr_t;
+
+typedef struct {
+ int dummy;
+} AOFFAllctrInit_t;
+
+#define ERTS_DEFAULT_AOFF_ALLCTR_INIT {0/*dummy*/}
+
+void erts_aoffalc_init(void);
+Allctr_t *erts_aoffalc_start(AOFFAllctr_t *, AOFFAllctrInit_t*, AllctrInit_t *);
+
+#endif /* #ifndef ERL_AO_FIRSTFIT_ALLOC__ */
+
+
+
+#if defined(GET_ERL_AOFF_ALLOC_IMPL) && !defined(ERL_AOFF_ALLOC_IMPL__)
+#define ERL_AOFF_ALLOC_IMPL__
+
+#define GET_ERL_ALLOC_UTIL_IMPL
+#include "erl_alloc_util.h"
+
+
+struct AOFFAllctr_t_ {
+ Allctr_t allctr; /* Has to be first! */
+
+ struct AOFF_RBTree_t_* mbc_root;
+ struct AOFF_RBTree_t_* sbmbc_root;
+};
+
+unsigned long erts_aoffalc_test(unsigned long, unsigned long, unsigned long);
+
+#endif /* #if defined(GET_ERL_AOFF_ALLOC_IMPL)
+ && !defined(ERL_AOFF_ALLOC_IMPL__) */
diff --git a/erts/emulator/beam/erl_bestfit_alloc.c b/erts/emulator/beam/erl_bestfit_alloc.c
index 3035e5df16..5e3032ddaa 100644
--- a/erts/emulator/beam/erl_bestfit_alloc.c
+++ b/erts/emulator/beam/erl_bestfit_alloc.c
@@ -84,24 +84,24 @@
#ifdef HARD_DEBUG
-static RBTree_t * check_tree(BFAllctr_t *, Uint);
+static RBTree_t * check_tree(RBTree_t, int, Uint);
#endif
-static void tree_delete(Allctr_t *allctr, Block_t *del);
+static void tree_delete(Allctr_t *allctr, Block_t *del, Uint32 flags);
/* Prototypes of callback functions */
/* "address order best fit" specific callback functions */
static Block_t * aobf_get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void aobf_link_free_block (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+static void aobf_link_free_block (Allctr_t *, Block_t *, Uint32);
#define aobf_unlink_free_block tree_delete
/* "best fit" specific callback functions */
static Block_t * bf_get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void bf_link_free_block (Allctr_t *, Block_t *);
-static ERTS_INLINE void bf_unlink_free_block (Allctr_t *, Block_t *);
+ Block_t *, Uint, Uint32);
+static void bf_link_free_block (Allctr_t *, Block_t *, Uint32);
+static ERTS_INLINE void bf_unlink_free_block (Allctr_t *, Block_t *, Uint32);
static Eterm info_options (Allctr_t *, char *, int *,
@@ -303,7 +303,7 @@ replace(RBTree_t **root, RBTree_t *x, RBTree_t *y)
}
static void
-tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
+tree_insert_fixup(RBTree_t **root, RBTree_t *blk)
{
RBTree_t *x = blk, *y;
@@ -311,7 +311,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
* Rearrange the tree so that it satisfies the Red-Black Tree properties
*/
- RBT_ASSERT(x != bfallctr->root && IS_RED(x->parent));
+ RBT_ASSERT(x != *root && IS_RED(x->parent));
do {
/*
@@ -336,7 +336,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
if (x == x->parent->right) {
x = x->parent;
- left_rotate(&bfallctr->root, x);
+ left_rotate(root, x);
}
RBT_ASSERT(x == x->parent->parent->left->left);
@@ -347,7 +347,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
SET_BLACK(x->parent);
SET_RED(x->parent->parent);
- right_rotate(&bfallctr->root, x->parent->parent);
+ right_rotate(root, x->parent->parent);
RBT_ASSERT(x == x->parent->left);
RBT_ASSERT(IS_RED(x));
@@ -370,7 +370,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
if (x == x->parent->left) {
x = x->parent;
- right_rotate(&bfallctr->root, x);
+ right_rotate(root, x);
}
RBT_ASSERT(x == x->parent->parent->right->right);
@@ -381,7 +381,7 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
SET_BLACK(x->parent);
SET_RED(x->parent->parent);
- left_rotate(&bfallctr->root, x->parent->parent);
+ left_rotate(root, x->parent->parent);
RBT_ASSERT(x == x->parent->right);
RBT_ASSERT(IS_RED(x));
@@ -390,9 +390,9 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
break;
}
}
- } while (x != bfallctr->root && IS_RED(x->parent));
+ } while (x != *root && IS_RED(x->parent));
- SET_BLACK(bfallctr->root);
+ SET_BLACK(*root);
}
@@ -402,18 +402,22 @@ tree_insert_fixup(BFAllctr_t *bfallctr, RBTree_t *blk)
* callback function in the address order case.
*/
static void
-tree_delete(Allctr_t *allctr, Block_t *del)
+tree_delete(Allctr_t *allctr, Block_t *del, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
Uint spliced_is_black;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *x, *y, *z = (RBTree_t *) del;
RBTree_t null_x; /* null_x is used to get the fixup started when we
splice out a node without children. */
null_x.parent = NULL;
+
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(*root, bfallctr->address_order, 0);
#endif
/* Remove node from tree... */
@@ -440,8 +444,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
if (!y->parent) {
- RBT_ASSERT(bfallctr->root == y);
- bfallctr->root = x;
+ RBT_ASSERT(*root == y);
+ *root = x;
}
else if (y == y->parent->left)
y->parent->left = x;
@@ -451,7 +455,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
if (y != z) {
/* We spliced out the successor of z; replace z by the successor */
- replace(&bfallctr->root, z, y);
+ replace(root, z, y);
}
if (spliced_is_black) {
@@ -476,7 +480,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
SET_BLACK(y);
RBT_ASSERT(IS_BLACK(x->parent));
SET_RED(x->parent);
- left_rotate(&bfallctr->root, x->parent);
+ left_rotate(root, x->parent);
y = x->parent->right;
}
RBT_ASSERT(y);
@@ -489,7 +493,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
if (IS_BLACK(y->right)) {
SET_BLACK(y->left);
SET_RED(y);
- right_rotate(&bfallctr->root, y);
+ right_rotate(root, y);
y = x->parent->right;
}
RBT_ASSERT(y);
@@ -500,8 +504,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
RBT_ASSERT(y->right);
SET_BLACK(y->right);
- left_rotate(&bfallctr->root, x->parent);
- x = bfallctr->root;
+ left_rotate(root, x->parent);
+ x = *root;
break;
}
}
@@ -515,7 +519,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
SET_BLACK(y);
RBT_ASSERT(IS_BLACK(x->parent));
SET_RED(x->parent);
- right_rotate(&bfallctr->root, x->parent);
+ right_rotate(root, x->parent);
y = x->parent->left;
}
RBT_ASSERT(y);
@@ -528,7 +532,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
if (IS_BLACK(y->left)) {
SET_BLACK(y->right);
SET_RED(y);
- left_rotate(&bfallctr->root, y);
+ left_rotate(root, y);
y = x->parent->left;
}
RBT_ASSERT(y);
@@ -538,8 +542,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
}
RBT_ASSERT(y->left);
SET_BLACK(y->left);
- right_rotate(&bfallctr->root, x->parent);
- x = bfallctr->root;
+ right_rotate(root, x->parent);
+ x = *root;
break;
}
}
@@ -556,8 +560,8 @@ tree_delete(Allctr_t *allctr, Block_t *del)
RBT_ASSERT(!null_x.left);
RBT_ASSERT(!null_x.right);
}
- else if (bfallctr->root == &null_x) {
- bfallctr->root = NULL;
+ else if (*root == &null_x) {
+ *root = NULL;
RBT_ASSERT(!null_x.left);
RBT_ASSERT(!null_x.right);
}
@@ -567,7 +571,7 @@ tree_delete(Allctr_t *allctr, Block_t *del)
DESTROY_TREE_NODE(del);
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, bfallctr->address_order, 0);
#endif
}
@@ -577,23 +581,28 @@ tree_delete(Allctr_t *allctr, Block_t *del)
\* */
static void
-aobf_link_free_block(Allctr_t *allctr, Block_t *block)
+aobf_link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *blk = (RBTree_t *) block;
Uint blk_sz = BLK_SZ(blk);
+
+
blk->flags = 0;
blk->left = NULL;
blk->right = NULL;
- if (!bfallctr->root) {
+ if (!*root) {
blk->parent = NULL;
SET_BLACK(blk);
- bfallctr->root = blk;
+ *root = blk;
}
else {
- RBTree_t *x = bfallctr->root;
+ RBTree_t *x = *root;
while (1) {
Uint size;
@@ -623,28 +632,32 @@ aobf_link_free_block(Allctr_t *allctr, Block_t *block)
SET_RED(blk);
if (IS_RED(blk->parent))
- tree_insert_fixup(bfallctr, blk);
+ tree_insert_fixup(root, blk);
}
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, 1, 0);
#endif
}
#if 0 /* tree_delete() is directly used instead */
static void
-aobf_unlink_free_block(Allctr_t *allctr, Block_t *block)
+aobf_unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
- tree_delete(allctr, block);
+ tree_delete(allctr, block, flags);
}
#endif
static Block_t *
aobf_get_free_block(Allctr_t *allctr, Uint size,
- Block_t *cand_blk, Uint cand_size)
+ Block_t *cand_blk, Uint cand_size,
+ Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
- RBTree_t *x = bfallctr->root;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
+ RBTree_t *x = *root;
RBTree_t *blk = NULL;
Uint blk_sz;
@@ -665,7 +678,7 @@ aobf_get_free_block(Allctr_t *allctr, Uint size,
return NULL;
#ifdef HARD_DEBUG
- ASSERT(blk == check_tree(bfallctr, size));
+ ASSERT(blk == check_tree(root, 1, size));
#endif
if (cand_blk) {
@@ -676,7 +689,7 @@ aobf_get_free_block(Allctr_t *allctr, Uint size,
return NULL; /* cand_blk was better */
}
- aobf_unlink_free_block(allctr, (Block_t *) blk);
+ aobf_unlink_free_block(allctr, (Block_t *) blk, flags);
return (Block_t *) blk;
}
@@ -687,9 +700,12 @@ aobf_get_free_block(Allctr_t *allctr, Uint size,
\* */
static void
-bf_link_free_block(Allctr_t *allctr, Block_t *block)
+bf_link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *blk = (RBTree_t *) block;
Uint blk_sz = BLK_SZ(blk);
@@ -700,13 +716,13 @@ bf_link_free_block(Allctr_t *allctr, Block_t *block)
blk->left = NULL;
blk->right = NULL;
- if (!bfallctr->root) {
+ if (!*root) {
blk->parent = NULL;
SET_BLACK(blk);
- bfallctr->root = blk;
+ *root = blk;
}
else {
- RBTree_t *x = bfallctr->root;
+ RBTree_t *x = *root;
while (1) {
Uint size;
@@ -745,7 +761,7 @@ bf_link_free_block(Allctr_t *allctr, Block_t *block)
SET_RED(blk);
if (IS_RED(blk->parent))
- tree_insert_fixup(bfallctr, blk);
+ tree_insert_fixup(root, blk);
}
@@ -753,14 +769,17 @@ bf_link_free_block(Allctr_t *allctr, Block_t *block)
LIST_NEXT(blk) = NULL;
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, 0, 0);
#endif
}
static ERTS_INLINE void
-bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
+bf_unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
RBTree_t *x = (RBTree_t *) block;
if (IS_LIST_ELEM(x)) {
@@ -778,9 +797,9 @@ bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
ASSERT(IS_LIST_ELEM(LIST_NEXT(x)));
#ifdef HARD_DEBUG
- check_tree(bfallctr, 0);
+ check_tree(root, 0, 0);
#endif
- replace(&bfallctr->root, x, LIST_NEXT(x));
+ replace(root, x, LIST_NEXT(x));
#ifdef HARD_DEBUG
check_tree(bfallctr, 0);
@@ -788,7 +807,7 @@ bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
}
else {
/* Remove from tree */
- tree_delete(allctr, block);
+ tree_delete(allctr, block, flags);
}
DESTROY_LIST_ELEM(x);
@@ -797,10 +816,14 @@ bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
static Block_t *
bf_get_free_block(Allctr_t *allctr, Uint size,
- Block_t *cand_blk, Uint cand_size)
+ Block_t *cand_blk, Uint cand_size,
+ Uint32 flags)
{
BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
- RBTree_t *x = bfallctr->root;
+ RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC)
+ ? &bfallctr->sbmbc_root
+ : &bfallctr->mbc_root);
+ RBTree_t *x = *root;
RBTree_t *blk = NULL;
Uint blk_sz;
@@ -827,7 +850,7 @@ bf_get_free_block(Allctr_t *allctr, Uint size,
#ifdef HARD_DEBUG
{
- RBTree_t *ct_blk = check_tree(bfallctr, size);
+ RBTree_t *ct_blk = check_tree(root, 0, size);
ASSERT(BLK_SZ(ct_blk) == BLK_SZ(blk));
}
#endif
@@ -839,7 +862,7 @@ bf_get_free_block(Allctr_t *allctr, Uint size,
the tree node */
blk = LIST_NEXT(blk) ? LIST_NEXT(blk) : blk;
- bf_unlink_free_block(allctr, (Block_t *) blk);
+ bf_unlink_free_block(allctr, (Block_t *) blk, flags);
return (Block_t *) blk;
}
@@ -949,13 +972,14 @@ erts_bfalc_test(unsigned long op, unsigned long a1, unsigned long a2)
{
switch (op) {
case 0x200: return (unsigned long) ((BFAllctr_t *) a1)->address_order;
- case 0x201: return (unsigned long) ((BFAllctr_t *) a1)->root;
+ case 0x201: return (unsigned long) ((BFAllctr_t *) a1)->mbc_root;
case 0x202: return (unsigned long) ((RBTree_t *) a1)->parent;
case 0x203: return (unsigned long) ((RBTree_t *) a1)->left;
case 0x204: return (unsigned long) ((RBTree_t *) a1)->right;
case 0x205: return (unsigned long) ((RBTreeList_t *) a1)->next;
case 0x206: return (unsigned long) IS_BLACK((RBTree_t *) a1);
case 0x207: return (unsigned long) IS_TREE_NODE((RBTree_t *) a1);
+ case 0x208: return (unsigned long) 0; /* IS_AOFF */
default: ASSERT(0); return ~((unsigned long) 0);
}
}
@@ -985,7 +1009,7 @@ erts_bfalc_test(unsigned long op, unsigned long a1, unsigned long a2)
#endif
#ifdef PRINT_TREE
-static void print_tree(BFAllctr_t *);
+static void print_tree(RBTree_t *, int);
#endif
/*
@@ -1003,7 +1027,7 @@ static void print_tree(BFAllctr_t *);
*/
static RBTree_t *
-check_tree(BFAllctr_t *bfallctr, Uint size)
+check_tree(RBTree_t *root, int ao, Uint size)
{
RBTree_t *res = NULL;
Sint blacks;
@@ -1011,13 +1035,13 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
RBTree_t *x;
#ifdef PRINT_TREE
- print_tree(bfallctr);
+ print_tree(root, ao);
#endif
- if (!bfallctr->root)
+ if (!root)
return res;
- x = bfallctr->root;
+ x = root;
ASSERT(IS_BLACK(x));
ASSERT(!x->parent);
curr_blacks = 1;
@@ -1060,11 +1084,11 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
ASSERT(IS_BLACK(x->left));
}
- ASSERT(x->parent || x == bfallctr->root);
+ ASSERT(x->parent || x == root);
if (x->left) {
ASSERT(x->left->parent == x);
- if (bfallctr->address_order) {
+ if (ao) {
ASSERT(BLK_SZ(x->left) < BLK_SZ(x)
|| (BLK_SZ(x->left) == BLK_SZ(x) && x->left < x));
}
@@ -1076,7 +1100,7 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
if (x->right) {
ASSERT(x->right->parent == x);
- if (bfallctr->address_order) {
+ if (ao) {
ASSERT(BLK_SZ(x->right) > BLK_SZ(x)
|| (BLK_SZ(x->right) == BLK_SZ(x) && x->right > x));
}
@@ -1087,7 +1111,7 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
}
if (size && BLK_SZ(x) >= size) {
- if (bfallctr->address_order) {
+ if (ao) {
if (!res
|| BLK_SZ(x) < BLK_SZ(res)
|| (BLK_SZ(x) == BLK_SZ(res) && x < res))
@@ -1109,8 +1133,8 @@ check_tree(BFAllctr_t *bfallctr, Uint size)
ASSERT(curr_blacks == 0);
- UNSET_LEFT_VISITED(bfallctr->root);
- UNSET_RIGHT_VISITED(bfallctr->root);
+ UNSET_LEFT_VISITED(root);
+ UNSET_RIGHT_VISITED(root);
return res;
@@ -1148,11 +1172,11 @@ print_tree_aux(RBTree_t *x, int indent)
static void
-print_tree(BFAllctr_t *bfallctr)
+print_tree(RBTree_t *root, int ao)
{
- char *type = bfallctr->address_order ? "Size-Adress" : "Size";
+ char *type = ao ? "Size-Adress" : "Size";
fprintf(stderr, " --- %s tree begin ---\r\n", type);
- print_tree_aux(bfallctr->root, 0);
+ print_tree_aux(root, 0);
fprintf(stderr, " --- %s tree end ---\r\n", type);
}
diff --git a/erts/emulator/beam/erl_bestfit_alloc.h b/erts/emulator/beam/erl_bestfit_alloc.h
index cb35e21e57..faa2d9742e 100644
--- a/erts/emulator/beam/erl_bestfit_alloc.h
+++ b/erts/emulator/beam/erl_bestfit_alloc.h
@@ -54,7 +54,8 @@ typedef struct RBTree_t_ RBTree_t;
struct BFAllctr_t_ {
Allctr_t allctr; /* Has to be first! */
- RBTree_t * root;
+ RBTree_t * mbc_root;
+ RBTree_t * sbmbc_root;
int address_order;
};
diff --git a/erts/emulator/beam/erl_bits.h b/erts/emulator/beam/erl_bits.h
index 0f67733fa4..3309ea706b 100644
--- a/erts/emulator/beam/erl_bits.h
+++ b/erts/emulator/beam/erl_bits.h
@@ -150,7 +150,7 @@ void erts_bits_destroy_state(ERL_BITS_PROTO_0);
* NBYTES(x) returns the number of bytes needed to store x bits.
*/
-#define NBYTES(x) (((x) + 7) >> 3)
+#define NBYTES(x) (((Uint64)(x) + (Uint64) 7) >> 3)
#define BYTE_OFFSET(ofs) ((Uint) (ofs) >> 3)
#define BIT_OFFSET(ofs) ((ofs) & 7)
diff --git a/erts/emulator/beam/erl_db.c b/erts/emulator/beam/erl_db.c
index e0a6aa05c6..eb50f56502 100644
--- a/erts/emulator/beam/erl_db.c
+++ b/erts/emulator/beam/erl_db.c
@@ -338,13 +338,13 @@ static ERTS_INLINE void db_unlock(DbTable* tb, db_lock_kind_t kind)
ASSERT(tb != meta_pid_to_tab && tb != meta_pid_to_fixed_tab);
if (tb->common.type & DB_FINE_LOCKED) {
- if (tb->common.is_thread_safe) {
- ASSERT(kind == LCK_WRITE);
+ if (kind == LCK_WRITE) {
+ ASSERT(tb->common.is_thread_safe);
tb->common.is_thread_safe = 0;
erts_smp_rwmtx_rwunlock(&tb->common.rwlock);
}
else {
- ASSERT(kind != LCK_WRITE);
+ ASSERT(!tb->common.is_thread_safe);
erts_smp_rwmtx_runlock(&tb->common.rwlock);
}
}
@@ -543,9 +543,9 @@ static int remove_named_tab(DbTable *tb, int have_lock)
* We keep our increased refc over this op in order to
* prevent the table from disapearing.
*/
- erts_smp_rwmtx_rwunlock(&tb->common.rwlock);
+ db_unlock(tb, LCK_WRITE);
erts_smp_rwmtx_rwlock(rwlock);
- erts_smp_rwmtx_rwlock(&tb->common.rwlock);
+ db_lock(tb, LCK_WRITE);
}
#endif
@@ -1650,24 +1650,6 @@ BIF_RETTYPE ets_delete_1(BIF_ALIST_1)
tb->common.status &= ~(DB_PROTECTED|DB_PUBLIC|DB_PRIVATE);
tb->common.status |= DB_DELETE;
- mmtl = get_meta_main_tab_lock(tb->common.slot);
-#ifdef ERTS_SMP
- if (erts_smp_rwmtx_tryrwlock(mmtl) == EBUSY) {
- /*
- * We keep our increased refc over this op in order to
- * prevent the table from disapearing.
- */
- erts_smp_rwmtx_rwunlock(&tb->common.rwlock);
- erts_smp_rwmtx_rwlock(mmtl);
- erts_smp_rwmtx_rwlock(&tb->common.rwlock);
- }
-#endif
- /* We must keep the slot, to be found by db_proc_dead() if process dies */
- MARK_SLOT_DEAD(tb->common.slot);
- erts_smp_rwmtx_rwunlock(mmtl);
- if (is_atom(tb->common.id))
- remove_named_tab(tb, 0);
-
if (tb->common.owner != BIF_P->id) {
DeclareTmpHeap(meta_tuple,3,BIF_P);
@@ -1691,6 +1673,25 @@ BIF_RETTYPE ets_delete_1(BIF_ALIST_1)
db_meta_unlock(meta_pid_to_tab, LCK_WRITE_REC);
UnUseTmpHeap(3,BIF_P);
}
+
+ mmtl = get_meta_main_tab_lock(tb->common.slot);
+#ifdef ERTS_SMP
+ if (erts_smp_rwmtx_tryrwlock(mmtl) == EBUSY) {
+ /*
+ * We keep our increased refc over this op in order to
+ * prevent the table from disapearing.
+ */
+ db_unlock(tb, LCK_WRITE);
+ erts_smp_rwmtx_rwlock(mmtl);
+ db_lock(tb, LCK_WRITE);
+ }
+#endif
+ /* We must keep the slot, to be found by db_proc_dead() if process dies */
+ MARK_SLOT_DEAD(tb->common.slot);
+ erts_smp_rwmtx_rwunlock(mmtl);
+ if (is_atom(tb->common.id))
+ remove_named_tab(tb, 0);
+
/* disable inheritance */
free_heir_data(tb);
tb->common.heir = am_none;
diff --git a/erts/emulator/beam/erl_db_util.c b/erts/emulator/beam/erl_db_util.c
index c3b074f782..e5be1f253a 100644
--- a/erts/emulator/beam/erl_db_util.c
+++ b/erts/emulator/beam/erl_db_util.c
@@ -1731,6 +1731,7 @@ Eterm db_prog_match(Process *c_p, Binary *bprog,
#define BEGIN_ATOMIC_TRACE(p) \
do { \
if (! atomic_trace) { \
+ erts_refc_inc(&bprog->refc, 2); \
erts_smp_proc_unlock((p), ERTS_PROC_LOCK_MAIN); \
erts_smp_block_system(0); \
atomic_trace = !0; \
@@ -1741,6 +1742,9 @@ Eterm db_prog_match(Process *c_p, Binary *bprog,
if (atomic_trace) { \
erts_smp_release_system(); \
erts_smp_proc_lock((p), ERTS_PROC_LOCK_MAIN); \
+ if (erts_refc_dectest(&bprog->refc, 0) == 0) {\
+ erts_bin_free(bprog); \
+ } \
atomic_trace = 0; \
} \
} while (0)
diff --git a/erts/emulator/beam/erl_goodfit_alloc.c b/erts/emulator/beam/erl_goodfit_alloc.c
index 76b206d76f..1cc508ac5a 100644
--- a/erts/emulator/beam/erl_goodfit_alloc.c
+++ b/erts/emulator/beam/erl_goodfit_alloc.c
@@ -163,10 +163,10 @@ BKT_MIN_SZ(GFAllctr_t *gfallctr, int ix)
/* Prototypes of callback functions */
static Block_t * get_free_block (Allctr_t *, Uint,
- Block_t *, Uint);
-static void link_free_block (Allctr_t *, Block_t *);
-static void unlink_free_block (Allctr_t *, Block_t *);
-static void update_last_aux_mbc (Allctr_t *, Carrier_t *);
+ Block_t *, Uint, Uint32);
+static void link_free_block (Allctr_t *, Block_t *, Uint32);
+static void unlink_free_block (Allctr_t *, Block_t *, Uint32);
+static void update_last_aux_mbc (Allctr_t *, Carrier_t *, Uint32);
static Eterm info_options (Allctr_t *, char *, int *,
void *, Uint **, Uint *);
static void init_atoms (void);
@@ -197,6 +197,8 @@ erts_gfalc_start(GFAllctr_t *gfallctr,
is a struct). */
Allctr_t *allctr = (Allctr_t *) gfallctr;
+ init->sbmbct = 0; /* Small mbc not yet supported by goodfit */
+
sys_memcpy((void *) gfallctr, (void *) &nulled_state, sizeof(GFAllctr_t));
allctr->mbc_header_size = sizeof(Carrier_t);
@@ -379,7 +381,7 @@ search_bucket(Allctr_t *allctr, int ix, Uint size)
static Block_t *
get_free_block(Allctr_t *allctr, Uint size,
- Block_t *cand_blk, Uint cand_size)
+ Block_t *cand_blk, Uint cand_size, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
int unsafe_bi, min_bi;
@@ -398,7 +400,7 @@ get_free_block(Allctr_t *allctr, Uint size,
if (blk) {
if (cand_blk && cand_size <= BLK_SZ(blk))
return NULL; /* cand_blk was better */
- unlink_free_block(allctr, blk);
+ unlink_free_block(allctr, blk, flags);
return blk;
}
if (min_bi < NO_OF_BKTS - 1) {
@@ -418,14 +420,14 @@ get_free_block(Allctr_t *allctr, Uint size,
ASSERT(blk);
if (cand_blk && cand_size <= BLK_SZ(blk))
return NULL; /* cand_blk was better */
- unlink_free_block(allctr, blk);
+ unlink_free_block(allctr, blk, flags);
return blk;
}
static void
-link_free_block(Allctr_t *allctr, Block_t *block)
+link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
GFFreeBlock_t *blk = (GFFreeBlock_t *) block;
@@ -446,7 +448,7 @@ link_free_block(Allctr_t *allctr, Block_t *block)
}
static void
-unlink_free_block(Allctr_t *allctr, Block_t *block)
+unlink_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
GFFreeBlock_t *blk = (GFFreeBlock_t *) block;
@@ -467,7 +469,7 @@ unlink_free_block(Allctr_t *allctr, Block_t *block)
}
static void
-update_last_aux_mbc(Allctr_t *allctr, Carrier_t *mbc)
+update_last_aux_mbc(Allctr_t *allctr, Carrier_t *mbc, Uint32 flags)
{
GFAllctr_t *gfallctr = (GFAllctr_t *) allctr;
diff --git a/erts/emulator/beam/erl_instrument.c b/erts/emulator/beam/erl_instrument.c
index f3f3c22933..04ea004ef7 100644
--- a/erts/emulator/beam/erl_instrument.c
+++ b/erts/emulator/beam/erl_instrument.c
@@ -1186,6 +1186,8 @@ erts_instr_init(int stat, int map_stat)
sys_memzero((void *) stats->n, sizeof(Stat_t)*(ERTS_ALC_N_MAX+1));
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
+ if (ERTS_IS_SBMBC_ALLOCATOR_NO__(i))
+ continue;
if (erts_allctrs_info[i].enabled)
stats->ap[i] = &stats->a[i];
else
@@ -1199,6 +1201,8 @@ erts_instr_init(int stat, int map_stat)
erts_instr_memory_map = 1;
erts_instr_stat = 1;
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
+ if (ERTS_IS_SBMBC_ALLOCATOR_NO__(i))
+ continue;
erts_allctrs[i].alloc = map_stat_alloc;
erts_allctrs[i].realloc = map_stat_realloc;
erts_allctrs[i].free = map_stat_free;
@@ -1209,6 +1213,8 @@ erts_instr_init(int stat, int map_stat)
else {
erts_instr_stat = 1;
for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) {
+ if (ERTS_IS_SBMBC_ALLOCATOR_NO__(i))
+ continue;
erts_allctrs[i].alloc = stat_alloc;
erts_allctrs[i].realloc = stat_realloc;
erts_allctrs[i].free = stat_free;
diff --git a/erts/emulator/beam/erl_lock_check.c b/erts/emulator/beam/erl_lock_check.c
index 9180508a49..587d82f2bb 100644
--- a/erts/emulator/beam/erl_lock_check.c
+++ b/erts/emulator/beam/erl_lock_check.c
@@ -153,6 +153,7 @@ static erts_lc_lock_order_t erts_lock_order[] = {
{ "instr", NULL },
{ "fix_alloc", "index" },
{ "alcu_allocator", "index" },
+ { "sbmbc_alloc", "index" },
{ "alcu_delayed_free", "index" },
{ "mseg", NULL },
#if HALFWORD_HEAP
diff --git a/erts/emulator/beam/erl_nif.c b/erts/emulator/beam/erl_nif.c
index 68421b4387..d9b1a8e89d 100644
--- a/erts/emulator/beam/erl_nif.c
+++ b/erts/emulator/beam/erl_nif.c
@@ -833,8 +833,11 @@ ERL_NIF_TERM enif_make_uint(ErlNifEnv* env, unsigned i)
ERL_NIF_TERM enif_make_long(ErlNifEnv* env, long i)
{
+ if (IS_SSMALL(i)) {
+ return make_small(i);
+ }
#if SIZEOF_LONG == ERTS_SIZEOF_ETERM
- return IS_SSMALL(i) ? make_small(i) : small_to_big(i, alloc_heap(env,2));
+ return small_to_big(i, alloc_heap(env,2));
#elif SIZEOF_LONG == 8
ensure_heap(env,3);
return erts_sint64_to_big(i, &env->hp);
@@ -843,8 +846,11 @@ ERL_NIF_TERM enif_make_long(ErlNifEnv* env, long i)
ERL_NIF_TERM enif_make_ulong(ErlNifEnv* env, unsigned long i)
{
+ if (IS_USMALL(0,i)) {
+ return make_small(i);
+ }
#if SIZEOF_LONG == ERTS_SIZEOF_ETERM
- return IS_USMALL(0,i) ? make_small(i) : uint_to_big(i,alloc_heap(env,2));
+ return uint_to_big(i,alloc_heap(env,2));
#elif SIZEOF_LONG == 8
ensure_heap(env,3);
return erts_uint64_to_big(i, &env->hp);
diff --git a/erts/emulator/beam/ops.tab b/erts/emulator/beam/ops.tab
index 8a5763b4bb..304ce22ef2 100644
--- a/erts/emulator/beam/ops.tab
+++ b/erts/emulator/beam/ops.tab
@@ -1236,7 +1236,7 @@ i_bs_init_heap I I I d
i_bs_init_heap_bin_heap I I I d
-bs_init_bits Fail Sz Words Regs Flags Dst | binary_too_big_bits(Sz) => system_limit Fail
+bs_init_bits Fail Sz=o Words Regs Flags Dst => system_limit Fail
bs_init_bits Fail Sz=u Words=u==0 Regs Flags Dst => i_bs_init_bits Sz Regs Dst
bs_init_bits Fail Sz=u Words Regs Flags Dst => i_bs_init_bits_heap Sz Words Regs Dst
diff --git a/erts/emulator/drivers/common/inet_drv.c b/erts/emulator/drivers/common/inet_drv.c
index 40c4a0df08..ebc4469a23 100644
--- a/erts/emulator/drivers/common/inet_drv.c
+++ b/erts/emulator/drivers/common/inet_drv.c
@@ -3709,6 +3709,8 @@ static int inet_ctl_fdopen(inet_descriptor* desc, int domain, int type,
/* check that it is a socket and that the socket is bound */
if (IS_SOCKET_ERROR(sock_name(s, (struct sockaddr*) &name, &sz)))
return ctl_error(sock_errno(), rbuf, rsize);
+ if (name.sa.sa_family != domain)
+ return ctl_error(EINVAL, rbuf, rsize);
desc->s = s;
if ((desc->event = sock_create_event(desc)) == INVALID_EVENT)
return ctl_error(sock_errno(), rbuf, rsize);
@@ -9739,7 +9741,7 @@ static int packet_inet_ctl(ErlDrvData e, unsigned int cmd, char* buf, int len,
if (desc->active || (len != 8))
return ctl_error(EINVAL, rbuf, rsize);
timeout = get_int32(buf);
- /* The 2nd arg, Length(4), is ignored for both UDP ans SCTP protocols,
+ /* The 2nd arg, Length(4), is ignored for both UDP and SCTP protocols,
since they are msg-oriented. */
if (enq_async(desc, tbuf, PACKET_REQ_RECV) < 0)
diff --git a/erts/emulator/hipe/hipe_mode_switch.c b/erts/emulator/hipe/hipe_mode_switch.c
index 16f8fb1347..e3e8367b62 100644
--- a/erts/emulator/hipe/hipe_mode_switch.c
+++ b/erts/emulator/hipe/hipe_mode_switch.c
@@ -346,7 +346,12 @@ Process *hipe_mode_switch(Process *p, unsigned cmd, Eterm reg[])
p->arity = callee_arity;
}
- /* If process is in P_WAITING state, we schedule the next process */
+ /* Schedule next process if current process was hibernated or is waiting
+ for messages */
+ if (p->flags & F_HIBERNATE_SCHED) {
+ p->flags &= ~F_HIBERNATE_SCHED;
+ goto do_schedule;
+ }
if (p->status == P_WAITING) {
goto do_schedule;
}
diff --git a/erts/emulator/test/alloc_SUITE_data/allocator_test.h b/erts/emulator/test/alloc_SUITE_data/allocator_test.h
index b869a4079c..8b34375980 100644
--- a/erts/emulator/test/alloc_SUITE_data/allocator_test.h
+++ b/erts/emulator/test/alloc_SUITE_data/allocator_test.h
@@ -82,15 +82,17 @@ typedef void* erts_cond;
#define NO_OF_BKTS ((Ulong) ALC_TEST0(0x102))
#define FIND_BKT(A, I) ((int) ALC_TEST2(0x103, (A), (I)))
-/* From erl_bestfit_alloc.c */
-#define IS_AOBF(A) ((Ulong) ALC_TEST1(0x200, (A)))
-#define RBT_ROOT(A) ((RBT_t *) ALC_TEST1(0x201, (A)))
-#define RBT_PARENT(T) ((RBT_t *) ALC_TEST1(0x202, (T)))
-#define RBT_LEFT(T) ((RBT_t *) ALC_TEST1(0x203, (T)))
-#define RBT_RIGHT(T) ((RBT_t *) ALC_TEST1(0x204, (T)))
-#define RBT_NEXT(T) ((RBTL_t *) ALC_TEST1(0x205, (T)))
-#define RBT_IS_BLACK(T) ((Ulong) ALC_TEST1(0x206, (T)))
-#define RBT_IS_TREE(T) ((Ulong) ALC_TEST1(0x207, (T)))
+/* From erl_bestfit_alloc.c and erl_ao_firstfit_alloc.c */
+#define IS_AOBF(A) ((Ulong) ALC_TEST1(RBT_OP(0), (A)))
+#define RBT_ROOT(A) ((RBT_t *) ALC_TEST1(RBT_OP(1), (A)))
+#define RBT_PARENT(T) ((RBT_t *) ALC_TEST1(RBT_OP(2), (T)))
+#define RBT_LEFT(T) ((RBT_t *) ALC_TEST1(RBT_OP(3), (T)))
+#define RBT_RIGHT(T) ((RBT_t *) ALC_TEST1(RBT_OP(4), (T)))
+#define RBT_NEXT(T) ((RBTL_t *) ALC_TEST1(RBT_OP(5), (T)))
+#define RBT_IS_BLACK(T) ((Ulong) ALC_TEST1(RBT_OP(6), (T)))
+#define RBT_IS_TREE(T) ((Ulong) ALC_TEST1(RBT_OP(7), (T)))
+#define IS_AOFF(A) ((Ulong) ALC_TEST1(RBT_OP(8), (A)))
+#define RBT_MAX_SZ(T) ((Ulong) ALC_TEST1(RBT_OP(9), (T)))
/* From erl_mseg.c */
#define HAVE_MSEG() ((int) ALC_TEST0(0x400))
diff --git a/erts/emulator/test/alloc_SUITE_data/coalesce.c b/erts/emulator/test/alloc_SUITE_data/coalesce.c
index c84da97d35..6f35d3279b 100644
--- a/erts/emulator/test/alloc_SUITE_data/coalesce.c
+++ b/erts/emulator/test/alloc_SUITE_data/coalesce.c
@@ -267,7 +267,7 @@ void
testcase_run(TestCaseState_t *tcs)
{
char *argv_org[] = {"-tmmbcs1024", "-tsbct2048", "-trmbcmt100", "-tas", NULL, NULL};
- char *alg[] = {"af", "gf", "bf", "aobf", NULL};
+ char *alg[] = {"af", "gf", "bf", "aobf", "aoff", NULL};
int i;
for (i = 0; alg[i]; i++) {
diff --git a/erts/emulator/test/alloc_SUITE_data/rbtree.c b/erts/emulator/test/alloc_SUITE_data/rbtree.c
index c97e0aac1a..4e7f821baf 100644
--- a/erts/emulator/test/alloc_SUITE_data/rbtree.c
+++ b/erts/emulator/test/alloc_SUITE_data/rbtree.c
@@ -34,6 +34,14 @@ typedef struct {
#define PRINT_TREE
#endif
+/* Ugly hack to steer the test code towards the right allocator */
+#define RBT_OP(CMD) (current_rbt_type_op_base + (CMD))
+static enum {
+ BESTFIT_OP_BASE = 0x200,
+ AO_FIRSTFIT_OP_BASE = 0x500
+}current_rbt_type_op_base;
+
+
#ifdef PRINT_TREE
#define INDENT_STEP 5
@@ -65,12 +73,11 @@ print_tree_aux(TestCaseState_t *tcs, RBT_t *x, int indent)
static void
-print_tree(TestCaseState_t *tcs, RBT_t *root, int aobf)
+print_tree(TestCaseState_t *tcs, RBT_t *root)
{
- char *type = aobf ? "Size-Adress" : "Size";
- testcase_printf(tcs, " --- %s tree begin ---\r\n", type);
+ testcase_printf(tcs, " --- Tree begin ---\r\n");
print_tree_aux(tcs, root, 0);
- testcase_printf(tcs, " --- %s tree end ---\r\n", type);
+ testcase_printf(tcs, " --- Tree end ---\r\n");
}
#endif
@@ -78,7 +85,8 @@ print_tree(TestCaseState_t *tcs, RBT_t *root, int aobf)
static RBT_t *
check_tree(TestCaseState_t *tcs, Allctr_t *alc, Ulong size)
{
- int i, max_i, address_order;
+ enum { BF, AOBF, AOFF }type;
+ int i, max_i;
char stk[128];
RBT_t *root, *x, *y, *res;
Ulong x_sz, y_sz, is_x_black;
@@ -86,11 +94,14 @@ check_tree(TestCaseState_t *tcs, Allctr_t *alc, Ulong size)
res = NULL;
- address_order = IS_AOBF(alc);
+ if (IS_AOBF(alc)) type = AOBF;
+ else if (IS_AOFF(alc)) type = AOFF;
+ else type = BF;
+
root = RBT_ROOT(alc);
#ifdef PRINT_TREE
- print_tree(tcs, root, address_order);
+ print_tree(tcs, root);
#endif
max_i = i = -1;
@@ -165,12 +176,18 @@ check_tree(TestCaseState_t *tcs, Allctr_t *alc, Ulong size)
if (y) {
y_sz = BLK_SZ(y);
ASSERT(tcs, RBT_PARENT(y) == x);
- if (address_order) {
+ switch (type) {
+ case AOBF:
ASSERT(tcs, y_sz < x_sz || (y_sz == x_sz && y < x));
- }
- else {
+ break;
+ case BF:
ASSERT(tcs, RBT_IS_TREE(y));
ASSERT(tcs, y_sz < x_sz);
+ break;
+ case AOFF:
+ ASSERT(tcs, y < x);
+ ASSERT(tcs, RBT_MAX_SZ(y) <= RBT_MAX_SZ(x));
+ break;
}
}
@@ -178,16 +195,22 @@ check_tree(TestCaseState_t *tcs, Allctr_t *alc, Ulong size)
if (y) {
y_sz = BLK_SZ(y);
ASSERT(tcs, RBT_PARENT(y) == x);
- if (address_order) {
+ switch (type) {
+ case AOBF:
ASSERT(tcs, y_sz > x_sz || (y_sz == x_sz && y > x));
- }
- else {
+ break;
+ case BF:
ASSERT(tcs, RBT_IS_TREE(y));
ASSERT(tcs, y_sz > x_sz);
+ break;
+ case AOFF:
+ ASSERT(tcs, y > x);
+ ASSERT(tcs, RBT_MAX_SZ(y) <= RBT_MAX_SZ(x));
+ break;
}
}
- if (!address_order) {
+ if (type == BF) {
Ulong l_sz;
RBTL_t *l = RBT_NEXT(x);
for (l = RBT_NEXT(x); l; l = RBT_NEXT(l)) {
@@ -202,13 +225,20 @@ check_tree(TestCaseState_t *tcs, Allctr_t *alc, Ulong size)
res = x;
else {
y_sz = BLK_SZ(res);
- if (address_order) {
+ switch (type) {
+ case AOBF:
if (x_sz < y_sz || (x_sz == y_sz && x < res))
res = x;
- }
- else {
- if (!res || x_sz < y_sz)
+ break;
+ case BF:
+ if (x_sz < y_sz)
res = x;
+ break;
+ case AOFF:
+ if (x < res) {
+ res = x;
+ }
+ break;
}
}
}
@@ -257,7 +287,7 @@ static void
test_it(TestCaseState_t *tcs)
{
int i;
- Allctr_t a = ((rbtree_test_data *) tcs->extra)->allocator;
+ Allctr_t* a = ((rbtree_test_data *) tcs->extra)->allocator;
void **blk = ((rbtree_test_data *) tcs->extra)->blk;
void **fence = ((rbtree_test_data *) tcs->extra)->fence;
Ulong min_blk_sz;
@@ -338,6 +368,7 @@ testcase_run(TestCaseState_t *tcs)
{
char *argv1[] = {"-tasbf", NULL};
char *argv2[] = {"-tasaobf", NULL};
+ char *argv3[] = {"-tasaoff", NULL};
Allctr_t *a;
rbtree_test_data *td;
@@ -355,6 +386,7 @@ testcase_run(TestCaseState_t *tcs)
testcase_printf(tcs, "Starting test of best fit...\n");
+ current_rbt_type_op_base = BESTFIT_OP_BASE;
td->allocator = a = START_ALC("rbtree_bf_", 0, argv1);
ASSERT(tcs, a);
@@ -371,6 +403,7 @@ testcase_run(TestCaseState_t *tcs)
testcase_printf(tcs, "Starting test of address order best fit...\n");
+ current_rbt_type_op_base = BESTFIT_OP_BASE;
td->allocator = a = START_ALC("rbtree_aobf_", 0, argv2);
ASSERT(tcs, a);
@@ -383,4 +416,19 @@ testcase_run(TestCaseState_t *tcs)
testcase_printf(tcs, "Address order best fit test succeeded!\n");
+ /* Address order first fit... */
+
+ testcase_printf(tcs, "Starting test of address order first fit...\n");
+
+ current_rbt_type_op_base = AO_FIRSTFIT_OP_BASE;
+ td->allocator = a = START_ALC("rbtree_aoff_", 0, argv3);
+
+ ASSERT(tcs, a);
+
+ test_it(tcs);
+
+ STOP_ALC(a);
+ td->allocator = NULL;
+
+ testcase_printf(tcs, "Address order first fit test succeeded!\n");
}
diff --git a/erts/emulator/test/bs_construct_SUITE.erl b/erts/emulator/test/bs_construct_SUITE.erl
index 1959803385..7fdf36711b 100644
--- a/erts/emulator/test/bs_construct_SUITE.erl
+++ b/erts/emulator/test/bs_construct_SUITE.erl
@@ -553,6 +553,11 @@ huge_float_check({'EXIT',{badarg,_}}) -> ok.
huge_binary(Config) when is_list(Config) ->
?line 16777216 = size(<<0:(id(1 bsl 26)),(-1):(id(1 bsl 26))>>),
+ ?line garbage_collect(),
+ ?line id(<<0:((1 bsl 32)-1)>>),
+ ?line garbage_collect(),
+ ?line id(<<0:(id((1 bsl 32)-1))>>),
+ ?line garbage_collect(),
ok.
system_limit(Config) when is_list(Config) ->
@@ -565,6 +570,10 @@ system_limit(Config) when is_list(Config) ->
?line {'EXIT',{system_limit,_}} =
(catch <<(id(<<>>))/binary,0:(id(1 bsl 100))>>),
+ %% Would fail to load.
+ ?line {'EXIT',{system_limit,_}} = (catch <<0:(1 bsl 67)>>),
+ ?line {'EXIT',{system_limit,_}} = (catch <<0:((1 bsl 64)+1)>>),
+
case WordSize of
4 ->
system_limit_32();
@@ -581,6 +590,14 @@ system_limit_32() ->
?line {'EXIT',{system_limit,_}} = (catch <<0:(id(8)),42:536870912/unit:8>>),
?line {'EXIT',{system_limit,_}} =
(catch <<0:(id(8)),42:(id(536870912))/unit:8>>),
+
+ %% The size would be silently truncated, resulting in a crash.
+ ?line {'EXIT',{system_limit,_}} = (catch <<0:(1 bsl 35)>>),
+ ?line {'EXIT',{system_limit,_}} = (catch <<0:((1 bsl 32)+1)>>),
+
+ %% Would fail to load.
+ ?line {'EXIT',{system_limit,_}} = (catch <<0:(1 bsl 43)>>),
+ ?line {'EXIT',{system_limit,_}} = (catch <<0:((1 bsl 40)+1)>>),
ok.
badarg(Config) when is_list(Config) ->
diff --git a/erts/emulator/test/fun_SUITE.erl b/erts/emulator/test/fun_SUITE.erl
index 7795efe57e..559e540016 100644
--- a/erts/emulator/test/fun_SUITE.erl
+++ b/erts/emulator/test/fun_SUITE.erl
@@ -647,17 +647,11 @@ refc_dist_1() ->
%% Fun is passed in an exit signal. Wait until it is gone.
?line wait_until(fun () -> 4 =/= fun_refc(F2) end),
?line 3 = fun_refc(F2),
- erts_debug:set_internal_state(available_internal_state, true),
- ?line F_refc = case erts_debug:get_internal_state(force_heap_frags) of
- false -> 3;
- true -> 2 % GC after bif already decreased it
- end,
- ?line F_refc = fun_refc(F),
- erts_debug:set_internal_state(available_internal_state, false),
+ ?line true = erlang:garbage_collect(),
+ ?line 2 = fun_refc(F),
refc_dist_send(Node, F).
refc_dist_send(Node, F) ->
- ?line true = erlang:garbage_collect(),
?line Pid = spawn_link(Node,
fun() -> receive
{To,Fun} when is_function(Fun) ->
diff --git a/erts/emulator/test/hibernate_SUITE.erl b/erts/emulator/test/hibernate_SUITE.erl
index 203fa6b48e..82a0aad189 100644
--- a/erts/emulator/test/hibernate_SUITE.erl
+++ b/erts/emulator/test/hibernate_SUITE.erl
@@ -25,16 +25,16 @@
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
basic/1,dynamic_call/1,min_heap_size/1,bad_args/1,
- messages_in_queue/1,undefined_mfa/1, no_heap/1]).
+ messages_in_queue/1,undefined_mfa/1,no_heap/1,wake_up_and_bif_trap/1]).
%% Used by test cases.
--export([basic_hibernator/1,dynamic_call_hibernator/2,messages_in_queue_restart/2, no_heap_loop/0]).
+-export([basic_hibernator/1,dynamic_call_hibernator/2,messages_in_queue_restart/2, no_heap_loop/0,characters_to_list_trap/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[basic, dynamic_call, min_heap_size, bad_args, messages_in_queue,
- undefined_mfa, no_heap].
+ undefined_mfa, no_heap, wake_up_and_bif_trap].
groups() ->
[].
@@ -384,6 +384,31 @@ clean_dict() ->
lists:foreach(fun ({Key, _}) -> erase(Key) end, Dict).
%%
+%% Wake up and then immediatly bif trap with a lengthy computation.
+%%
+
+wake_up_and_bif_trap(doc) -> [];
+wake_up_and_bif_trap(suite) -> [];
+wake_up_and_bif_trap(Config) when is_list(Config) ->
+ ?line Self = self(),
+ ?line Pid = spawn_link(fun() -> erlang:hibernate(?MODULE, characters_to_list_trap, [Self]) end),
+ ?line Pid ! wakeup,
+ ?line receive
+ {ok, Pid0} when Pid0 =:= Pid -> ok
+ after 5000 ->
+ ?line ?t:fail(process_blocked)
+ end,
+ ?line unlink(Pid),
+ ?line exit(Pid, bye).
+
+%% Lengthy computation that traps (in characters_to_list_trap_3).
+characters_to_list_trap(Parent) ->
+ Bin0 = <<"abcdefghijklmnopqrstuvwxz0123456789">>,
+ Bin = binary:copy(Bin0, 1500),
+ unicode:characters_to_list(Bin),
+ Parent ! {ok, self()}.
+
+%%
%% Misc
%%
diff --git a/erts/emulator/test/match_spec_SUITE.erl b/erts/emulator/test/match_spec_SUITE.erl
index 2b21fa58f4..461773114e 100644
--- a/erts/emulator/test/match_spec_SUITE.erl
+++ b/erts/emulator/test/match_spec_SUITE.erl
@@ -27,8 +27,9 @@
destructive_in_test_bif/1, guard_exceptions/1,
unary_plus/1, unary_minus/1, moving_labels/1]).
-export([fpe/1]).
+-export([otp_9422/1]).
--export([runner/2]).
+-export([runner/2, loop_runner/3]).
-export([f1/1, f2/2, f3/2, fn/1, fn/2, fn/3]).
-export([do_boxed_and_small/0]).
@@ -57,7 +58,8 @@ all() ->
trace_control_word, silent, silent_no_ms, ms_trace2,
ms_trace3, boxed_and_small, destructive_in_test_bif,
guard_exceptions, unary_plus, unary_minus, fpe,
- moving_labels];
+ moving_labels,
+ otp_9422];
true -> [not_run]
end.
@@ -208,6 +210,43 @@ test_3(Config) when is_list(Config) ->
?line collect(P1, [{trace, P1, call, {?MODULE, f2, [a, b]}, [true]}]),
?line ok.
+otp_9422(doc) -> [];
+otp_9422(Config) when is_list(Config) ->
+ Laps = 1000,
+ ?line Fun1 = fun() -> otp_9422_tracee() end,
+ ?line P1 = spawn_link(?MODULE, loop_runner, [self(), Fun1, Laps]),
+ io:format("spawned ~p as tracee\n", [P1]),
+
+ ?line erlang:trace(P1, true, [call, silent]),
+
+ ?line Fun2 = fun() -> otp_9422_trace_changer() end,
+ ?line P2 = spawn_link(?MODULE, loop_runner, [self(), Fun2, Laps]),
+ io:format("spawned ~p as trace_changer\n", [P2]),
+
+ start_collect(P1),
+ start_collect(P2),
+
+ %%receive after 10*1000 -> ok end,
+
+ stop_collect(P1),
+ stop_collect(P2),
+ ok.
+
+otp_9422_tracee() ->
+ ?MODULE:f1(a),
+ ?MODULE:f1(b),
+ ?MODULE:f1(c).
+
+otp_9422_trace_changer() ->
+ Pat1 = [{[a], [], [{enable_trace, arity}]}],
+ ?line erlang:trace_pattern({?MODULE, f1, 1}, Pat1),
+ Pat2 = [{[b], [], [{disable_trace, arity}]}],
+ ?line erlang:trace_pattern({?MODULE, f1, 1}, Pat2).
+
+
+
+
+
bad_match_spec_bin(Config) when is_list(Config) ->
{'EXIT',{badarg,_}} = (catch ets:match_spec_run([1], <<>>)),
B0 = <<1,2>>,
@@ -932,6 +971,24 @@ runner(Collector, Fun) ->
Collector ! {gone, self()}
end.
+loop_runner(Collector, Fun, Laps) ->
+ receive
+ {go, Collector} ->
+ go
+ end,
+ loop_runner_cont(Collector, Fun, 0, Laps).
+
+loop_runner_cont(_Collector, _Fun, Laps, Laps) ->
+ receive
+ {done, Collector} ->
+ io:format("loop_runner ~p exit after ~p laps\n", [self(), Laps]),
+ Collector ! {gone, self()}
+ end;
+loop_runner_cont(Collector, Fun, N, Laps) ->
+ Fun(),
+ loop_runner_cont(Collector, Fun, N+1, Laps).
+
+
f1(X) ->
{X}.
diff --git a/erts/emulator/test/nif_SUITE.erl b/erts/emulator/test/nif_SUITE.erl
index 91d695d979..2867e8e2e4 100644
--- a/erts/emulator/test/nif_SUITE.erl
+++ b/erts/emulator/test/nif_SUITE.erl
@@ -257,10 +257,48 @@ types(Config) when is_list(Config) ->
end,
[{},{ok},{{}},{[],{}},{1,2,3,4,5}]),
Stuff = [[],{},0,0.0,(1 bsl 100),(fun()-> ok end),make_ref(),self()],
- [eq_cmp(A,clone(B)) || A<-Stuff, B<-Stuff],
+ [eq_cmp(A,clone(B)) || A<-Stuff, B<-Stuff],
+
+ {IntSz, LongSz} = type_sizes(),
+ UintMax = (1 bsl (IntSz*8)) - 1,
+ IntMax = UintMax bsr 1,
+ IntMin = -(IntMax+1),
+ UlongMax = (1 bsl (LongSz*8)) - 1,
+ LongMax = UlongMax bsr 1,
+ LongMin = -(LongMax+1),
+ Uint64Max = (1 bsl 64) - 1,
+ Int64Max = Uint64Max bsr 1,
+ Int64Min = -(Int64Max+1),
+ Limits = [{IntMin,IntMax},{0,UintMax},{LongMin,LongMax},{0,UlongMax},{Int64Min,Int64Max},{0,Uint64Max}],
+ io:format("Limits = ~p\n", [Limits]),
+ lists:foreach(fun(I) ->
+ R1 = echo_int(I),
+ %%io:format("echo_int(~p) -> ~p\n", [I, R1]),
+ R2 = my_echo_int(I, Limits),
+ ?line R1 = R2,
+ ?line true = (R1 =:= R2),
+ ?line true = (R1 == R2)
+ end, int_list()),
+
?line verify_tmpmem(TmpMem),
ok.
+int_list() ->
+ Start = 1 bsl 200,
+ int_list([Start], -Start).
+int_list([N | _]=List, End) when N<End ->
+ List;
+int_list([N | _]=List, End) ->
+ int_list([N - (1 + (abs(N) div 3)) | List], End).
+
+my_echo_int(I, Limits) ->
+ lists:map(fun({Min,Max}) ->
+ if I < Min -> false;
+ I > Max -> false;
+ true -> I
+ end
+ end, Limits).
+
clone(X) ->
binary_to_term(term_to_binary(X)).
@@ -1170,9 +1208,10 @@ tmpmem() ->
MemInfo ->
MSBCS = lists:foldl(
fun ({instance, _, L}, Acc) ->
+ {value,{_,SBMBCS}} = lists:keysearch(sbmbcs, 1, L),
{value,{_,MBCS}} = lists:keysearch(mbcs, 1, L),
{value,{_,SBCS}} = lists:keysearch(sbcs, 1, L),
- [MBCS,SBCS | Acc]
+ [SBMBCS,MBCS,SBCS | Acc]
end,
[],
MemInfo),
@@ -1269,6 +1308,8 @@ send_blob_thread(_,_,_) -> ?nif_stub.
join_send_thread(_) -> ?nif_stub.
copy_blob(_) -> ?nif_stub.
send_term(_,_) -> ?nif_stub.
+echo_int(_) -> ?nif_stub.
+type_sizes() -> ?nif_stub.
nif_stub_error(Line) ->
exit({nif_not_loaded,module,?MODULE,line,Line}).
diff --git a/erts/emulator/test/nif_SUITE_data/nif_SUITE.c b/erts/emulator/test/nif_SUITE_data/nif_SUITE.c
index 0bb93daa33..00a1365bc3 100644
--- a/erts/emulator/test/nif_SUITE_data/nif_SUITE.c
+++ b/erts/emulator/test/nif_SUITE_data/nif_SUITE.c
@@ -28,6 +28,7 @@
static int static_cntA; /* zero by default */
static int static_cntB = NIF_SUITE_LIB_VER * 100;
+static ERL_NIF_TERM atom_false;
static ERL_NIF_TERM atom_self;
static ERL_NIF_TERM atom_ok;
static ERL_NIF_TERM atom_join;
@@ -103,7 +104,7 @@ static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
msgenv_resource_type = enif_open_resource_type(env,NULL,"nif_SUITE.msgenv",
msgenv_dtor,
ERL_NIF_RT_CREATE, NULL);
-
+ atom_false = enif_make_atom(env,"false");
atom_self = enif_make_atom(env,"self");
atom_ok = enif_make_atom(env,"ok");
atom_join = enif_make_atom(env,"join");
@@ -481,6 +482,45 @@ error:
return enif_make_atom(env,"error");
}
+static ERL_NIF_TERM echo_int(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+ int sint;
+ unsigned uint;
+ long slong;
+ unsigned long ulong;
+ ErlNifSInt64 sint64;
+ ErlNifUInt64 uint64;
+ ERL_NIF_TERM sint_term = atom_false, uint_term = atom_false;
+ ERL_NIF_TERM slong_term = atom_false, ulong_term = atom_false;
+ ERL_NIF_TERM sint64_term = atom_false, uint64_term = atom_false;
+
+ if (enif_get_int(env, argv[0], &sint)) {
+ sint_term = enif_make_int(env, sint);
+ }
+ if (enif_get_uint(env, argv[0], &uint)) {
+ uint_term = enif_make_uint(env, uint);
+ }
+ if (enif_get_long(env, argv[0], &slong)) {
+ slong_term = enif_make_long(env, slong);
+ }
+ if (enif_get_ulong(env, argv[0], &ulong)) {
+ ulong_term = enif_make_ulong(env, ulong);
+ }
+ if (enif_get_int64(env, argv[0], &sint64)) {
+ sint64_term = enif_make_int64(env, sint64);
+ }
+ if (enif_get_uint64(env, argv[0], &uint64)) {
+ uint64_term = enif_make_uint64(env, uint64);
+ }
+ return enif_make_list6(env, sint_term, uint_term, slong_term, ulong_term, sint64_term, uint64_term);
+}
+
+static ERL_NIF_TERM type_sizes(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+ return enif_make_tuple2(env, enif_make_int(env, sizeof(int)),
+ enif_make_int(env, sizeof(long)));
+}
+
static ERL_NIF_TERM tuple_2_list(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
int arity = -1;
@@ -1417,7 +1457,9 @@ static ErlNifFunc nif_funcs[] =
{"send_blob_thread", 3, send_blob_thread},
{"join_send_thread", 1, join_send_thread},
{"copy_blob", 1, copy_blob},
- {"send_term", 2, send_term}
+ {"send_term", 2, send_term},
+ {"echo_int", 1, echo_int},
+ {"type_sizes", 0, type_sizes}
};
ERL_NIF_INIT(nif_SUITE,nif_funcs,load,reload,upgrade,unload)
diff --git a/erts/emulator/test/send_term_SUITE.erl b/erts/emulator/test/send_term_SUITE.erl
index 6615873392..ba0ba804ca 100644
--- a/erts/emulator/test/send_term_SUITE.erl
+++ b/erts/emulator/test/send_term_SUITE.erl
@@ -175,6 +175,10 @@ chk_temp_alloc() ->
%% Verify that we havn't got anything allocated by temp_alloc
lists:foreach(
fun ({instance, _, TI}) ->
+ ?line {value, {sbmbcs, SBMBCInfo}}
+ = lists:keysearch(sbmbcs, 1, TI),
+ ?line {value, {blocks, 0, _, _}}
+ = lists:keysearch(blocks, 1, SBMBCInfo),
?line {value, {mbcs, MBCInfo}}
= lists:keysearch(mbcs, 1, TI),
?line {value, {blocks, 0, _, _}}
diff --git a/erts/emulator/utils/beam_makeops b/erts/emulator/utils/beam_makeops
index e7c57142c0..354439b5e3 100755
--- a/erts/emulator/utils/beam_makeops
+++ b/erts/emulator/utils/beam_makeops
@@ -67,6 +67,10 @@ my $max_gen_operands = 8;
# Must be even. The beam_load.c file must be updated, too.
my $max_spec_operands = 6;
+# The maximum number of primitive genop_types.
+
+my $max_genop_types = 16;
+
my %gen_opnum;
my %num_specific;
my %gen_to_spec;
@@ -106,7 +110,7 @@ my @pred_table;
# Operand types for generic instructions.
my $compiler_types = "uiaxyfhz";
-my $loader_types = "nprvlq";
+my $loader_types = "nprvlqo";
my $genop_types = $compiler_types . $loader_types;
#
@@ -142,34 +146,61 @@ my %arg_size = ('r' => 0, # x(0) - x register zero
my %type_bit;
my @tag_type;
+sub define_type_bit {
+ my($tag,$val) = @_;
+ defined $type_bit{$tag} and
+ sanity("the tag '$tag' has already been defined with the value ",
+ $type_bit{$tag});
+ $type_bit{$tag} = $val;
+}
+
{
my($bit) = 1;
my(%bit);
foreach (split('', $genop_types)) {
push(@tag_type, $_);
- $type_bit{$_} = $bit;
+ define_type_bit($_, $bit);
$bit{$_} = $bit;
$bit *= 2;
}
# Composed types.
- $type_bit{'d'} = $type_bit{'x'} | $type_bit{'y'} | $type_bit{'r'};
- $type_bit{'c'} = $type_bit{'i'} | $type_bit{'a'} | $type_bit{'n'} | $type_bit{'q'};
- $type_bit{'s'} = $type_bit{'d'} | $type_bit{'i'} | $type_bit{'a'} | $type_bit{'n'};
- $type_bit{'j'} = $type_bit{'f'} | $type_bit{'p'};
+ define_type_bit('d', $type_bit{'x'} | $type_bit{'y'} | $type_bit{'r'});
+ define_type_bit('c', $type_bit{'i'} | $type_bit{'a'} |
+ $type_bit{'n'} | $type_bit{'q'});
+ define_type_bit('s', $type_bit{'d'} | $type_bit{'i'} |
+ $type_bit{'a'} | $type_bit{'n'});
+ define_type_bit('j', $type_bit{'f'} | $type_bit{'p'});
# Aliases (for matching purposes).
- $type_bit{'I'} = $type_bit{'u'};
- $type_bit{'t'} = $type_bit{'u'};
- $type_bit{'A'} = $type_bit{'u'};
- $type_bit{'L'} = $type_bit{'u'};
- $type_bit{'b'} = $type_bit{'u'};
- $type_bit{'N'} = $type_bit{'u'};
- $type_bit{'U'} = $type_bit{'u'};
- $type_bit{'e'} = $type_bit{'u'};
- $type_bit{'P'} = $type_bit{'u'};
- $type_bit{'Q'} = $type_bit{'u'};
+ define_type_bit('I', $type_bit{'u'});
+ define_type_bit('t', $type_bit{'u'});
+ define_type_bit('A', $type_bit{'u'});
+ define_type_bit('L', $type_bit{'u'});
+ define_type_bit('b', $type_bit{'u'});
+ define_type_bit('N', $type_bit{'u'});
+ define_type_bit('U', $type_bit{'u'});
+ define_type_bit('e', $type_bit{'u'});
+ define_type_bit('P', $type_bit{'u'});
+ define_type_bit('Q', $type_bit{'u'});
+}
+
+#
+# Sanity checks.
+#
+
+{
+ if (@tag_type > $max_genop_types) {
+ sanity("\$max_genop_types is $max_genop_types, ",
+ "but there are ", scalar(@tag_type),
+ " primitive tags defined\n");
+ }
+
+ foreach my $tag (@tag_type) {
+ sanity("tag '$tag': primitive tags must be named with lowercase letters")
+ unless $tag =~ /^[a-z]$/;
+ }
}
#
@@ -436,12 +467,12 @@ sub emulator_output {
#
my(@bits) = (0) x ($max_spec_operands/2);
- my($shift) = 16;
my($i);
for ($i = 0; $i < $max_spec_operands && defined $args[$i]; $i++) {
my $t = $args[$i];
if (defined $type_bit{$t}) {
- $bits[int($i/2)] |= $type_bit{$t} << (16*($i%2));
+ my $shift = $max_genop_types * ($i % 2);
+ $bits[int($i/2)] |= $type_bit{$t} << $shift;
}
}
@@ -753,6 +784,10 @@ sub error {
die $where, @message, "\n";
}
+sub sanity {
+ die "internal error: ", @_, "\n";
+}
+
sub comment {
my($lang, @comments) = @_;
my($prefix);
diff --git a/erts/epmd/src/epmd_int.h b/erts/epmd/src/epmd_int.h
index 2a0de4df9c..a2d7559f9d 100644
--- a/erts/epmd/src/epmd_int.h
+++ b/erts/epmd/src/epmd_int.h
@@ -240,6 +240,14 @@
#define put_int16(i, s) {((unsigned char*)(s))[0] = ((i) >> 8) & 0xff; \
((unsigned char*)(s))[1] = (i) & 0xff;}
+#if defined(__GNUC__)
+# define EPMD_INLINE __inline__
+#elif defined(__WIN32__)
+# define EPMD_INLINE __inline
+#else
+# define EPMD_INLINE
+#endif
+
/* ************************************************************************ */
/* Stuctures used by server */
@@ -295,6 +303,7 @@ typedef struct {
unsigned delay_write;
int max_conn;
int active_conn;
+ int select_fd_top;
char *progname;
Connection *conn;
Nodes nodes;
diff --git a/erts/epmd/src/epmd_srv.c b/erts/epmd/src/epmd_srv.c
index 4d9b454f97..5debae26b6 100644
--- a/erts/epmd/src/epmd_srv.c
+++ b/erts/epmd/src/epmd_srv.c
@@ -80,6 +80,13 @@ static int reply(EpmdVars*,int,char *,int);
static void dbg_print_buf(EpmdVars*,char *,int);
static void print_names(EpmdVars*);
+static EPMD_INLINE void select_fd_set(EpmdVars* g, int fd)
+{
+ FD_SET(fd, &g->orig_read_mask);
+ if (fd >= g->select_fd_top) {
+ g->select_fd_top = fd + 1;
+ }
+}
void run(EpmdVars *g)
{
@@ -171,6 +178,7 @@ void run(EpmdVars *g)
g->max_conn -= num_sockets;
FD_ZERO(&g->orig_read_mask);
+ g->select_fd_top = 0;
for (i = 0; i < num_sockets; i++)
{
@@ -232,14 +240,14 @@ void run(EpmdVars *g)
dbg_perror(g,"failed to listen on socket");
epmd_cleanup_exit(g,1);
}
- FD_SET(listensock[i],&g->orig_read_mask);
+ select_fd_set(g, listensock[i]);
}
dbg_tty_printf(g,2,"entering the main select() loop");
select_again:
while(1)
- {
+ {
fd_set read_mask = g->orig_read_mask;
struct timeval timeout;
int ret;
@@ -251,7 +259,8 @@ void run(EpmdVars *g)
timeout.tv_sec = (g->packet_timeout < IDLE_TIMEOUT) ? 1 : IDLE_TIMEOUT;
timeout.tv_usec = 0;
- if ((ret = select(g->max_conn,&read_mask,(fd_set *)0,(fd_set *)0,&timeout)) < 0) {
+ if ((ret = select(g->select_fd_top,
+ &read_mask, (fd_set *)0,(fd_set *)0,&timeout)) < 0) {
dbg_perror(g,"error in select ");
switch (errno) {
case EAGAIN:
@@ -821,7 +830,7 @@ static int conn_open(EpmdVars *g,int fd)
s = &g->conn[i];
/* From now on we want to know if there are data to be read */
- FD_SET(fd, &g->orig_read_mask);
+ select_fd_set(g, fd);
s->fd = fd;
s->open = EPMD_TRUE;
@@ -886,6 +895,7 @@ int epmd_conn_close(EpmdVars *g,Connection *s)
dbg_tty_printf(g,2,"closing connection on file descriptor %d",s->fd);
FD_CLR(s->fd,&g->orig_read_mask);
+ /* we don't bother lowering g->select_fd_top */
close(s->fd); /* Sometimes already closed but close anyway */
s->open = EPMD_FALSE;
if (s->buf != NULL) { /* Should never be NULL but test anyway */
@@ -1115,7 +1125,7 @@ static Node *node_reg2(EpmdVars *g,
node->extralen = extralen;
memcpy(node->extra,extra,extralen);
strcpy(node->symname,name);
- FD_SET(fd,&g->orig_read_mask);
+ select_fd_set(g, fd);
if (highvsn == 0) {
dbg_tty_printf(g,1,"registering '%s:%d', port %d",
diff --git a/erts/etc/common/erlexec.c b/erts/etc/common/erlexec.c
index 7e04724f5a..2bd576d8e8 100644
--- a/erts/etc/common/erlexec.c
+++ b/erts/etc/common/erlexec.c
@@ -64,6 +64,7 @@
static const char plusM_au_allocs[]= {
'u', /* all alloc_util allocators */
'B', /* binary_alloc */
+ 'C', /* sbmbc_alloc */
'D', /* std_alloc */
'E', /* ets_alloc */
'H', /* eheap_alloc */
@@ -93,6 +94,8 @@ static char *plusM_au_alloc_switches[] = {
"rsbcst",
"sbct",
"smbcs",
+ "sbmbcs",
+ "sbmbct",
NULL
};
diff --git a/erts/lib_src/common/erl_misc_utils.c b/erts/lib_src/common/erl_misc_utils.c
index 5dbf98c7d1..35b148990a 100644
--- a/erts/lib_src/common/erl_misc_utils.c
+++ b/erts/lib_src/common/erl_misc_utils.c
@@ -1511,7 +1511,7 @@ const char* parse_topology_spec_group(erts_cpu_info_t *cpuinfo, const char* xml,
}
}
- if (cacheLevel == 0) {
+ if (parentCacheLevel == 0) {
*core_p = 0;
*processor_p = (*processor_p)++;
} else {
diff --git a/erts/lib_src/common/erl_printf_format.c b/erts/lib_src/common/erl_printf_format.c
index fba3fd723c..473791dce4 100644
--- a/erts/lib_src/common/erl_printf_format.c
+++ b/erts/lib_src/common/erl_printf_format.c
@@ -388,7 +388,7 @@ static int fmt_double(fmtfn_t fn,void*arg,double val,
max_size++;
if (precision)
max_size += precision;
- else if (fmt && FMTF_alt)
+ else if (fmt & FMTF_alt)
max_size++;
break;
case FMTC_E:
@@ -402,7 +402,7 @@ static int fmt_double(fmtfn_t fn,void*arg,double val,
max_size += 4;
if (precision)
max_size += precision;
- else if (fmt && FMTF_alt)
+ else if (fmt & FMTF_alt)
max_size++;
aexp = exp >= 0 ? exp : -exp;
if (aexp < 100)