diff options
Diffstat (limited to 'erts/emulator/beam')
-rw-r--r-- | erts/emulator/beam/beam_bif_load.c | 30 | ||||
-rw-r--r-- | erts/emulator/beam/beam_emu.c | 2 | ||||
-rw-r--r-- | erts/emulator/beam/beam_load.c | 181 | ||||
-rw-r--r-- | erts/emulator/beam/bif.tab | 6 | ||||
-rw-r--r-- | erts/emulator/beam/erl_alloc.c | 33 | ||||
-rw-r--r-- | erts/emulator/beam/erl_alloc.h | 8 | ||||
-rw-r--r-- | erts/emulator/beam/erl_alloc_util.c | 9 | ||||
-rw-r--r-- | erts/emulator/beam/erl_ao_firstfit_alloc.c | 972 | ||||
-rw-r--r-- | erts/emulator/beam/erl_ao_firstfit_alloc.h | 60 | ||||
-rw-r--r-- | erts/emulator/beam/erl_bestfit_alloc.c | 1 | ||||
-rw-r--r-- | erts/emulator/beam/erl_bits.h | 2 | ||||
-rw-r--r-- | erts/emulator/beam/erl_gc.c | 40 | ||||
-rw-r--r-- | erts/emulator/beam/erl_instrument.c | 8 | ||||
-rw-r--r-- | erts/emulator/beam/ops.tab | 2 |
14 files changed, 1208 insertions, 146 deletions
diff --git a/erts/emulator/beam/beam_bif_load.c b/erts/emulator/beam/beam_bif_load.c index d76a7d8e9f..2561d7a630 100644 --- a/erts/emulator/beam/beam_bif_load.c +++ b/erts/emulator/beam/beam_bif_load.c @@ -162,6 +162,23 @@ BIF_RETTYPE code_make_stub_module_3(BIF_ALIST_3) return res; } +BIF_RETTYPE +check_old_code_1(BIF_ALIST_1) +{ + Module* modp; + + if (is_not_atom(BIF_ARG_1)) { + BIF_ERROR(BIF_P, BADARG); + } + modp = erts_get_module(BIF_ARG_1); + if (modp == NULL) { /* Doesn't exist. */ + BIF_RET(am_false); + } else if (modp->old_code == NULL) { /* No old code. */ + BIF_RET(am_false); + } + BIF_RET(am_true); +} + Eterm check_process_code_2(BIF_ALIST_2) { @@ -175,6 +192,13 @@ check_process_code_2(BIF_ALIST_2) Eterm res; if (internal_pid_index(BIF_ARG_1) >= erts_max_processes) goto error; + modp = erts_get_module(BIF_ARG_2); + if (modp == NULL) { /* Doesn't exist. */ + return am_false; + } else if (modp->old_code == NULL) { /* No old code. */ + return am_false; + } + #ifdef ERTS_SMP rp = erts_pid2proc_suspend(BIF_P, ERTS_PROC_LOCK_MAIN, BIF_ARG_1, ERTS_PROC_LOCK_MAIN); @@ -188,7 +212,6 @@ check_process_code_2(BIF_ALIST_2) ERTS_BIF_YIELD2(bif_export[BIF_check_process_code_2], BIF_P, BIF_ARG_1, BIF_ARG_2); } - modp = erts_get_module(BIF_ARG_2); res = check_process_code(rp, modp); #ifdef ERTS_SMP if (BIF_P != rp) { @@ -412,11 +435,6 @@ check_process_code(Process* rp, Module* modp) #endif #define INSIDE(a) (start <= (a) && (a) < end) - if (modp == NULL) { /* Doesn't exist. */ - return am_false; - } else if (modp->old_code == NULL) { /* No old code. */ - return am_false; - } /* * Pick up limits for the module. diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index fb90a7d4f7..937b3d9e53 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -3561,7 +3561,7 @@ void process_main(void) * Operands: NotUsed Live Dst */ do_bs_init_bits_known: - num_bytes = (num_bits+7) >> 3; + num_bytes = ((Uint64)num_bits+(Uint64)7) >> 3; if (num_bits & 7) { alloc += ERL_SUB_BIN_SIZE; } diff --git a/erts/emulator/beam/beam_load.c b/erts/emulator/beam/beam_load.c index 57fe25453d..eb10ae59a8 100644 --- a/erts/emulator/beam/beam_load.c +++ b/erts/emulator/beam/beam_load.c @@ -332,20 +332,22 @@ typedef struct { Eterm* func_tab[1]; /* Pointers to each function. */ } LoadedCode; -#define GetTagAndValue(Stp, Tag, Val) \ - do { \ - BeamInstr __w; \ - GetByte(Stp, __w); \ - Tag = __w & 0x07; \ - if ((__w & 0x08) == 0) { \ - Val = __w >> 4; \ - } else if ((__w & 0x10) == 0) { \ - Val = ((__w >> 5) << 8); \ - GetByte(Stp, __w); \ - Val |= __w; \ - } else { \ - if (!get_int_val(Stp, __w, &(Val))) goto load_error; \ - } \ +#define GetTagAndValue(Stp, Tag, Val) \ + do { \ + BeamInstr __w; \ + GetByte(Stp, __w); \ + Tag = __w & 0x07; \ + if ((__w & 0x08) == 0) { \ + Val = __w >> 4; \ + } else if ((__w & 0x10) == 0) { \ + Val = ((__w >> 5) << 8); \ + GetByte(Stp, __w); \ + Val |= __w; \ + } else { \ + int __res = get_tag_and_value(Stp, __w, (Tag), &(Val)); \ + if (__res < 0) goto load_error; \ + Tag = (unsigned) __res; \ + } \ } while (0) @@ -489,8 +491,8 @@ static void load_printf(int line, LoaderState* context, char *fmt, ...); static int transform_engine(LoaderState* st); static void id_to_string(Uint id, char* s); static void new_genop(LoaderState* stp); -static int get_int_val(LoaderState* stp, Uint len_code, BeamInstr* result); -static int get_erlang_integer(LoaderState* stp, Uint len_code, BeamInstr* result); +static int get_tag_and_value(LoaderState* stp, Uint len_code, + unsigned tag, BeamInstr* result); static int new_label(LoaderState* stp); static void new_literal_patch(LoaderState* stp, int pos); static void new_string_patch(LoaderState* stp, int pos); @@ -1470,46 +1472,15 @@ load_code(LoaderState* stp) last_op->arity = 0; ASSERT(arity <= MAX_OPARGS); -#define GetValue(Stp, First, Val) \ - do { \ - if (((First) & 0x08) == 0) { \ - Val = (First) >> 4; \ - } else if (((First) & 0x10) == 0) { \ - BeamInstr __w; \ - GetByte(Stp, __w); \ - Val = (((First) >> 5) << 8) | __w; \ - } else { \ - if (!get_int_val(Stp, (First), &(Val))) goto load_error; \ - } \ - } while (0) - for (arg = 0; arg < arity; arg++) { - BeamInstr first; - - GetByte(stp, first); - last_op->a[arg].type = first & 0x07; + GetTagAndValue(stp, last_op->a[arg].type, last_op->a[arg].val); switch (last_op->a[arg].type) { case TAG_i: - if ((first & 0x08) == 0) { - last_op->a[arg].val = first >> 4; - } else if ((first & 0x10) == 0) { - BeamInstr w; - GetByte(stp, w); - ASSERT(first < 0x800); - last_op->a[arg].val = ((first >> 5) << 8) | w; - } else { - int i = get_erlang_integer(stp, first, &(last_op->a[arg].val)); - if (i < 0) { - goto load_error; - } - last_op->a[arg].type = i; - } - break; case TAG_u: - GetValue(stp, first, last_op->a[arg].val); + case TAG_q: + case TAG_o: break; case TAG_x: - GetValue(stp, first, last_op->a[arg].val); if (last_op->a[arg].val == 0) { last_op->a[arg].type = TAG_r; } else if (last_op->a[arg].val >= MAX_REG) { @@ -1518,7 +1489,6 @@ load_code(LoaderState* stp) } break; case TAG_y: - GetValue(stp, first, last_op->a[arg].val); if (last_op->a[arg].val >= MAX_REG) { LoadError1(stp, "invalid y register number: %u", last_op->a[arg].val); @@ -1526,7 +1496,6 @@ load_code(LoaderState* stp) last_op->a[arg].val += CP_SIZE; break; case TAG_a: - GetValue(stp, first, last_op->a[arg].val); if (last_op->a[arg].val == 0) { last_op->a[arg].type = TAG_n; } else if (last_op->a[arg].val >= stp->num_atoms) { @@ -1536,7 +1505,6 @@ load_code(LoaderState* stp) } break; case TAG_f: - GetValue(stp, first, last_op->a[arg].val); if (last_op->a[arg].val == 0) { last_op->a[arg].type = TAG_p; } else if (last_op->a[arg].val >= stp->num_labels) { @@ -1544,7 +1512,6 @@ load_code(LoaderState* stp) } break; case TAG_h: - GetValue(stp, first, last_op->a[arg].val); if (last_op->a[arg].val > 65535) { LoadError1(stp, "invalid range for character data type: %u", last_op->a[arg].val); @@ -1552,11 +1519,9 @@ load_code(LoaderState* stp) break; case TAG_z: { - BeamInstr ext_tag; unsigned tag; - GetValue(stp, first, ext_tag); - switch (ext_tag) { + switch (last_op->a[arg].val) { case 0: /* Floating point number */ { Eterm* hp; @@ -1648,7 +1613,8 @@ load_code(LoaderState* stp) break; } default: - LoadError1(stp, "invalid extended tag %d", ext_tag); + LoadError1(stp, "invalid extended tag %d", + last_op->a[arg].val); break; } } @@ -1659,7 +1625,6 @@ load_code(LoaderState* stp) } last_op->arity++; } -#undef GetValue ASSERT(arity == last_op->arity); @@ -2562,13 +2527,8 @@ should_gen_heap_bin(LoaderState* stp, GenOpArg Src) static int binary_too_big(LoaderState* stp, GenOpArg Size) { - return Size.type == TAG_u && ((Size.val >> (8*sizeof(Uint)-3)) != 0); -} - -static int -binary_too_big_bits(LoaderState* stp, GenOpArg Size) -{ - return Size.type == TAG_u && (((Size.val+7)/8) >> (8*sizeof(Uint)-3) != 0); + return Size.type == TAG_o || + (Size.type == TAG_u && ((Size.val >> (8*sizeof(Uint)-3)) != 0)); } static GenOp* @@ -4317,41 +4277,9 @@ load_printf(int line, LoaderState* context, char *fmt,...) erts_send_error_to_logger(context->group_leader, dsbufp); } - -static int -get_int_val(LoaderState* stp, Uint len_code, BeamInstr* result) -{ - Uint count; - Uint val; - - len_code >>= 5; - ASSERT(len_code < 8); - if (len_code == 7) { - LoadError0(stp, "can't load integers bigger than 8 bytes yet\n"); - } - count = len_code + 2; - if (count == 5) { - Uint msb; - GetByte(stp, msb); - if (msb == 0) { - count--; - } - GetInt(stp, 4, *result); - } else if (count <= 4) { - GetInt(stp, count, val); - *result = ((val << 8*(sizeof(val)-count)) >> 8*(sizeof(val)-count)); - } else { - LoadError1(stp, "too big integer; %d bytes\n", count); - } - return 1; - - load_error: - return 0; -} - - static int -get_erlang_integer(LoaderState* stp, Uint len_code, BeamInstr* result) +get_tag_and_value(LoaderState* stp, Uint len_code, + unsigned tag, BeamInstr* result) { Uint count; Sint val; @@ -4371,17 +4299,62 @@ get_erlang_integer(LoaderState* stp, Uint len_code, BeamInstr* result) if (len_code < 7) { count = len_code + 2; } else { - Uint tag; + unsigned sztag; UWord len_word; ASSERT(len_code == 7); - GetTagAndValue(stp, tag, len_word); - VerifyTag(stp, TAG_u, tag); + GetTagAndValue(stp, sztag, len_word); + VerifyTag(stp, sztag, TAG_u); count = len_word + 9; } /* - * Handle values up to the size of an int, meaning either a small or bignum. + * The value for tags except TAG_i must be an unsigned integer + * fitting in an Uint. If it does not fit, we'll indicate overflow + * by changing the tag to TAG_o. + */ + + if (tag != TAG_i) { + if (count == sizeof(Uint)+1) { + Uint msb; + + /* + * The encoded value has one more byte than an Uint. + * It will still fit in an Uint if the most significant + * byte is 0. + */ + GetByte(stp, msb); + GetInt(stp, sizeof(Uint), *result); + if (msb != 0) { + /* Overflow: Negative or too big. */ + return TAG_o; + } + } else if (count == sizeof(Uint)) { + /* + * The value must be positive (or the encoded value would + * have been one byte longer). + */ + GetInt(stp, count, *result); + } else if (count < sizeof(Uint)) { + GetInt(stp, count, *result); + + /* + * If the sign bit is set, the value is negative + * (not allowed). + */ + if (*result & ((Uint)1 << (count*8-1))) { + return TAG_o; + } + } else { + GetInt(stp, count, *result); + return TAG_o; + } + return tag; + } + + /* + * TAG_i: First handle values up to the size of an Uint (i.e. either + * a small or a bignum). */ if (count <= sizeof(val)) { diff --git a/erts/emulator/beam/bif.tab b/erts/emulator/beam/bif.tab index d9dd80fa8b..b171e99e03 100644 --- a/erts/emulator/beam/bif.tab +++ b/erts/emulator/beam/bif.tab @@ -802,6 +802,12 @@ bif prim_file:internal_name2native/1 bif prim_file:internal_native2name/1 bif prim_file:internal_normalize_utf8/1 bif file:native_name_encoding/0 + +# +# New in R14B04. +# +bif erlang:check_old_code/1 + # # Obsolete # diff --git a/erts/emulator/beam/erl_alloc.c b/erts/emulator/beam/erl_alloc.c index 840534ec5e..bbc8a445a7 100644 --- a/erts/emulator/beam/erl_alloc.c +++ b/erts/emulator/beam/erl_alloc.c @@ -50,6 +50,9 @@ #include "erl_bestfit_alloc.h" #define GET_ERL_AF_ALLOC_IMPL #include "erl_afit_alloc.h" +#define GET_ERL_AOFF_ALLOC_IMPL +#include "erl_ao_firstfit_alloc.h" + #define ERTS_ALC_DEFAULT_MAX_THR_PREF 16 @@ -85,6 +88,8 @@ typedef union { char align_bfa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(BFAllctr_t))]; AFAllctr_t afa; char align_afa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(AFAllctr_t))]; + AOFFAllctr_t aoffa; + char align_aoffa[ERTS_ALC_CACHE_LINE_ALIGN_SIZE(sizeof(AOFFAllctr_t))]; } ErtsAllocatorState_t; static ErtsAllocatorState_t sbmbc_alloc_state; @@ -122,7 +127,8 @@ static void *fix_core_alloc(Uint size) enum allctr_type { GOODFIT, BESTFIT, - AFIT + AFIT, + AOFIRSTFIT }; struct au_init { @@ -134,6 +140,7 @@ struct au_init { GFAllctrInit_t gf; BFAllctrInit_t bf; AFAllctrInit_t af; + AOFFAllctrInit_t aoff; } init; struct { int mmbcs; @@ -147,7 +154,8 @@ struct au_init { ERTS_DEFAULT_ALLCTR_INIT, \ ERTS_DEFAULT_GF_ALLCTR_INIT, \ ERTS_DEFAULT_BF_ALLCTR_INIT, \ - ERTS_DEFAULT_AF_ALLCTR_INIT \ + ERTS_DEFAULT_AF_ALLCTR_INIT, \ + ERTS_DEFAULT_AOFF_ALLCTR_INIT \ } typedef struct { @@ -562,6 +570,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop) erts_afalc_init(); erts_bfalc_init(); erts_gfalc_init(); + erts_aoffalc_init(); for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) { erts_allctrs[i].alloc = NULL; @@ -597,7 +606,7 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop) /* Init low memory variants by cloning */ init.sbmbc_low_alloc = init.sbmbc_alloc; init.sbmbc_low_alloc.init.util.name_prefix = "sbmbc_low_"; - init.sbmbc_low_alloc.init.util.alloc_no = ERTS_ALC_A_STANDARD_LOW; + init.sbmbc_low_alloc.init.util.alloc_no = ERTS_ALC_A_SBMBC_LOW; init.sbmbc_low_alloc.init.util.low_mem = 1; init.std_low_alloc = init.std_alloc; @@ -903,6 +912,12 @@ start_au_allocator(ErtsAlcType_t alctr_n, &init->init.af, &init->init.util); break; + case AOFIRSTFIT: + as = (void *) erts_aoffalc_start((AOFFAllctr_t *) as0, + &init->init.aoff, + &init->init.util); + break; + default: as = NULL; ASSERT(0); @@ -1097,6 +1112,9 @@ handle_au_arg(struct au_init *auip, else if (strcmp("af", alg) == 0) { auip->atype = AFIT; } + else if (strcmp("aoff", alg) == 0) { + auip->atype = AOFIRSTFIT; + } else { bad_value(param, sub_param + 1, alg); } @@ -2982,6 +3000,7 @@ unsigned long erts_alc_test(unsigned long op, case 0x2: return erts_bfalc_test(op, a1, a2); case 0x3: return erts_afalc_test(op, a1, a2); case 0x4: return erts_mseg_test(op, a1, a2, a3); + case 0x5: return erts_aoffalc_test(op, a1, a2); case 0xf: switch (op) { case 0xf00: @@ -3061,6 +3080,14 @@ unsigned long erts_alc_test(unsigned long op, &init.init.af, &init.init.util); break; + case AOFIRSTFIT: + allctr = erts_aoffalc_start((AOFFAllctr_t *) + erts_alloc(ERTS_ALC_T_UNDEF, + sizeof(AOFFAllctr_t)), + &init.init.aoff, + &init.init.util); + break; + default: ASSERT(0); allctr = NULL; diff --git a/erts/emulator/beam/erl_alloc.h b/erts/emulator/beam/erl_alloc.h index ce792d4d17..c35a60da22 100644 --- a/erts/emulator/beam/erl_alloc.h +++ b/erts/emulator/beam/erl_alloc.h @@ -99,6 +99,14 @@ unsigned long erts_alc_test(unsigned long, #define ERTS_ALC_MIN_LONG_LIVED_TIME (10*60*1000) +#if HALFWORD_HEAP +#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \ + ((NO) == ERTS_ALC_A_SBMBC || (NO) == ERTS_ALC_A_SBMBC_LOW) +#else +#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \ + ((NO) == ERTS_ALC_A_SBMBC) +#endif + typedef struct { int alloc_util; int enabled; diff --git a/erts/emulator/beam/erl_alloc_util.c b/erts/emulator/beam/erl_alloc_util.c index 19c552d8cd..d51ed0c36d 100644 --- a/erts/emulator/beam/erl_alloc_util.c +++ b/erts/emulator/beam/erl_alloc_util.c @@ -3436,10 +3436,7 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init) allctr->sbmbc_threshold = init->sbmbct; if (!erts_have_sbmbc_alloc -#if HALFWORD_HEAP - || allctr->alloc_no == ERTS_ALC_A_SBMBC_LOW -#endif - || allctr->alloc_no == ERTS_ALC_A_SBMBC) + || ERTS_IS_SBMBC_ALLOCATOR_NO__(allctr->alloc_no)) allctr->sbmbc_threshold = 0; if (!allctr->sbmbc_threshold) @@ -3466,14 +3463,14 @@ erts_alcu_start(Allctr_t *allctr, AllctrInit_t *init) #ifdef ERTS_ENABLE_LOCK_COUNT erts_mtx_init_x_opt(&allctr->mutex, - allctr->alloc_no == ERTS_ALC_A_SBMBC + ERTS_IS_SBMBC_ALLOCATOR_NO__(allctr->alloc_no) ? "sbmbc_alloc" : "alcu_allocator", make_small(allctr->alloc_no), ERTS_LCNT_LT_ALLOC); #else erts_mtx_init_x(&allctr->mutex, - allctr->alloc_no == ERTS_ALC_A_SBMBC + ERTS_IS_SBMBC_ALLOCATOR_NO__(allctr->alloc_no) ? "sbmbc_alloc" : "alcu_allocator", make_small(allctr->alloc_no)); diff --git a/erts/emulator/beam/erl_ao_firstfit_alloc.c b/erts/emulator/beam/erl_ao_firstfit_alloc.c new file mode 100644 index 0000000000..002852cdad --- /dev/null +++ b/erts/emulator/beam/erl_ao_firstfit_alloc.c @@ -0,0 +1,972 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2003-2009. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + + +/* + * Description: An "address order first fit" allocator + * based on a Red-Black (binary search) Tree. The search, + * insert, and delete operations are all O(log n) operations + * on a Red-Black Tree. + * Red-Black Trees are described in "Introduction to Algorithms", + * by Thomas H. Cormen, Charles E. Leiserson, and Ronald L. Riverest. + * + * This module is a callback-module for erl_alloc_util.c + * + * Algorithm: The tree nodes are free-blocks ordered in address order. + * Every node also keeps the size of the largest block in its + * sub-tree ('max_size'). By that we can start from root and keep + * left (for low addresses) while dismissing entire sub-trees with + * too small blocks. + * + * Authors: Rickard Green/Sverker Eriksson + */ + + +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif +#include "global.h" +#define GET_ERL_AOFF_ALLOC_IMPL +#include "erl_ao_firstfit_alloc.h" + +#ifdef DEBUG +#if 0 +#define HARD_DEBUG +#endif +#else +#undef HARD_DEBUG +#endif + +#define MIN_MBC_SZ (16*1024) +#define MIN_MBC_FIRST_FREE_SZ (4*1024) + +#define TREE_NODE_FLG (((Uint) 1) << 0) +#define RED_FLG (((Uint) 1) << 1) +#ifdef HARD_DEBUG +# define LEFT_VISITED_FLG (((Uint) 1) << 2) +# define RIGHT_VISITED_FLG (((Uint) 1) << 3) +#endif + +#define IS_RED(N) (((AOFF_RBTree_t *) (N)) \ + && ((AOFF_RBTree_t *) (N))->flags & RED_FLG) +#define IS_BLACK(N) (!IS_RED(((AOFF_RBTree_t *) (N)))) + +#define SET_RED(N) (((AOFF_RBTree_t *) (N))->flags |= RED_FLG) +#define SET_BLACK(N) (((AOFF_RBTree_t *) (N))->flags &= ~RED_FLG) + +#undef ASSERT +#define ASSERT ASSERT_EXPR + +#if 1 +#define RBT_ASSERT ASSERT +#else +#define RBT_ASSERT(x) +#endif + + +/* Types... */ +typedef struct AOFF_RBTree_t_ AOFF_RBTree_t; + +struct AOFF_RBTree_t_ { + Block_t hdr; + Uint flags; + AOFF_RBTree_t *parent; + AOFF_RBTree_t *left; + AOFF_RBTree_t *right; + Uint max_sz; /* of all blocks in this sub-tree */ +}; + +#ifdef HARD_DEBUG +static AOFF_RBTree_t * check_tree(AOFF_RBTree_t* root, Uint); +#endif + + +/* Calculate 'max_size' of tree node x by only looking at the direct children + * of x and x itself. + */ +static ERTS_INLINE Uint node_max_size(AOFF_RBTree_t *x) +{ + Uint sz = BLK_SZ(x); + if (x->left && x->left->max_sz > sz) { + sz = x->left->max_sz; + } + if (x->right && x->right->max_sz > sz) { + sz = x->right->max_sz; + } + return sz; +} + +/* Set new possibly lower 'max_size' of node and propagate change toward root +*/ +static ERTS_INLINE void lower_max_size(AOFF_RBTree_t *node, + AOFF_RBTree_t* stop_at) +{ + AOFF_RBTree_t* x = node; + Uint old_max = x->max_sz; + Uint new_max = node_max_size(x); + + if (new_max < old_max) { + x->max_sz = new_max; + while ((x=x->parent) != stop_at && x->max_sz == old_max) { + x->max_sz = node_max_size(x); + } + ASSERT(x == stop_at || x->max_sz > old_max); + } + else ASSERT(new_max == old_max); +} + + +/* Prototypes of callback functions */ +static Block_t* aoff_get_free_block(Allctr_t *, Uint, Block_t *, Uint, Uint32 flags); +static void aoff_link_free_block(Allctr_t *, Block_t*, Uint32 flags); +static void aoff_unlink_free_block(Allctr_t *allctr, Block_t *del, Uint32 flags); + +static Eterm info_options(Allctr_t *, char *, int *, void *, Uint **, Uint *); +static void init_atoms(void); + + + +#ifdef DEBUG + +/* Destroy all tree fields */ +#define DESTROY_TREE_NODE(N) \ + sys_memset((void *) (((Block_t *) (N)) + 1), \ + 0xff, \ + (sizeof(AOFF_RBTree_t) - sizeof(Block_t))) + +#else + +#define DESTROY_TREE_NODE(N) + +#endif + + +static int atoms_initialized = 0; + +void +erts_aoffalc_init(void) +{ + atoms_initialized = 0; +} + +Allctr_t * +erts_aoffalc_start(AOFFAllctr_t *alc, + AOFFAllctrInit_t* aoffinit, + AllctrInit_t *init) +{ + AOFFAllctr_t nulled_state = {{0}}; + /* {{0}} is used instead of {0}, in order to avoid (an incorrect) gcc + warning. gcc warns if {0} is used as initializer of a struct when + the first member is a struct (not if, for example, the third member + is a struct). */ + Allctr_t *allctr = (Allctr_t *) alc; + + sys_memcpy((void *) alc, (void *) &nulled_state, sizeof(AOFFAllctr_t)); + + allctr->mbc_header_size = sizeof(Carrier_t); + allctr->min_mbc_size = MIN_MBC_SZ; + allctr->min_mbc_first_free_size = MIN_MBC_FIRST_FREE_SZ; + allctr->min_block_size = sizeof(AOFF_RBTree_t); + + allctr->vsn_str = ERTS_ALC_AOFF_ALLOC_VSN_STR; + + + /* Callback functions */ + + allctr->get_free_block = aoff_get_free_block; + allctr->link_free_block = aoff_link_free_block; + allctr->unlink_free_block = aoff_unlink_free_block; + allctr->info_options = info_options; + + allctr->get_next_mbc_size = NULL; + allctr->creating_mbc = NULL; + allctr->destroying_mbc = NULL; + allctr->init_atoms = init_atoms; + +#ifdef ERTS_ALLOC_UTIL_HARD_DEBUG + allctr->check_block = NULL; + allctr->check_mbc = NULL; +#endif + + allctr->atoms_initialized = 0; + + if (!erts_alcu_start(allctr, init)) + return NULL; + + return allctr; +} + +/* + * Red-Black Tree operations needed + */ + +static ERTS_INLINE void +left_rotate(AOFF_RBTree_t **root, AOFF_RBTree_t *x) +{ + AOFF_RBTree_t *y = x->right; + x->right = y->left; + if (y->left) + y->left->parent = x; + y->parent = x->parent; + if (!y->parent) { + RBT_ASSERT(*root == x); + *root = y; + } + else if (x == x->parent->left) + x->parent->left = y; + else { + RBT_ASSERT(x == x->parent->right); + x->parent->right = y; + } + y->left = x; + x->parent = y; + + y->max_sz = x->max_sz; + x->max_sz = node_max_size(x); + ASSERT(y->max_sz >= x->max_sz); +} + +static ERTS_INLINE void +right_rotate(AOFF_RBTree_t **root, AOFF_RBTree_t *x) +{ + AOFF_RBTree_t *y = x->left; + x->left = y->right; + if (y->right) + y->right->parent = x; + y->parent = x->parent; + if (!y->parent) { + RBT_ASSERT(*root == x); + *root = y; + } + else if (x == x->parent->right) + x->parent->right = y; + else { + RBT_ASSERT(x == x->parent->left); + x->parent->left = y; + } + y->right = x; + x->parent = y; + y->max_sz = x->max_sz; + x->max_sz = node_max_size(x); + ASSERT(y->max_sz >= x->max_sz); +} + + +/* + * Replace node x with node y + * NOTE: block header of y is not changed + */ +static ERTS_INLINE void +replace(AOFF_RBTree_t **root, AOFF_RBTree_t *x, AOFF_RBTree_t *y) +{ + + if (!x->parent) { + RBT_ASSERT(*root == x); + *root = y; + } + else if (x == x->parent->left) + x->parent->left = y; + else { + RBT_ASSERT(x == x->parent->right); + x->parent->right = y; + } + if (x->left) { + RBT_ASSERT(x->left->parent == x); + x->left->parent = y; + } + if (x->right) { + RBT_ASSERT(x->right->parent == x); + x->right->parent = y; + } + + y->flags = x->flags; + y->parent = x->parent; + y->right = x->right; + y->left = x->left; + + y->max_sz = x->max_sz; + lower_max_size(y, NULL); + DESTROY_TREE_NODE(x); +} + +static void +tree_insert_fixup(AOFF_RBTree_t** root, AOFF_RBTree_t *blk) +{ + AOFF_RBTree_t *x = blk, *y; + + /* + * Rearrange the tree so that it satisfies the Red-Black Tree properties + */ + + RBT_ASSERT(x != *root && IS_RED(x->parent)); + do { + + /* + * x and its parent are both red. Move the red pair up the tree + * until we get to the root or until we can separate them. + */ + + RBT_ASSERT(IS_RED(x)); + RBT_ASSERT(IS_BLACK(x->parent->parent)); + RBT_ASSERT(x->parent->parent); + + if (x->parent == x->parent->parent->left) { + y = x->parent->parent->right; + if (IS_RED(y)) { + SET_BLACK(y); + x = x->parent; + SET_BLACK(x); + x = x->parent; + SET_RED(x); + } + else { + + if (x == x->parent->right) { + x = x->parent; + left_rotate(root, x); + } + + RBT_ASSERT(x == x->parent->parent->left->left); + RBT_ASSERT(IS_RED(x)); + RBT_ASSERT(IS_RED(x->parent)); + RBT_ASSERT(IS_BLACK(x->parent->parent)); + RBT_ASSERT(IS_BLACK(y)); + + SET_BLACK(x->parent); + SET_RED(x->parent->parent); + right_rotate(root, x->parent->parent); + + RBT_ASSERT(x == x->parent->left); + RBT_ASSERT(IS_RED(x)); + RBT_ASSERT(IS_RED(x->parent->right)); + RBT_ASSERT(IS_BLACK(x->parent)); + break; + } + } + else { + RBT_ASSERT(x->parent == x->parent->parent->right); + y = x->parent->parent->left; + if (IS_RED(y)) { + SET_BLACK(y); + x = x->parent; + SET_BLACK(x); + x = x->parent; + SET_RED(x); + } + else { + + if (x == x->parent->left) { + x = x->parent; + right_rotate(root, x); + } + + RBT_ASSERT(x == x->parent->parent->right->right); + RBT_ASSERT(IS_RED(x)); + RBT_ASSERT(IS_RED(x->parent)); + RBT_ASSERT(IS_BLACK(x->parent->parent)); + RBT_ASSERT(IS_BLACK(y)); + + SET_BLACK(x->parent); + SET_RED(x->parent->parent); + left_rotate(root, x->parent->parent); + + RBT_ASSERT(x == x->parent->right); + RBT_ASSERT(IS_RED(x)); + RBT_ASSERT(IS_RED(x->parent->left)); + RBT_ASSERT(IS_BLACK(x->parent)); + break; + } + } + } while (x != *root && IS_RED(x->parent)); + + SET_BLACK(*root); +} + +static void +aoff_unlink_free_block(Allctr_t *allctr, Block_t *del, Uint32 flags) +{ + AOFFAllctr_t *alc = (AOFFAllctr_t *) allctr; + AOFF_RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC) + ? &alc->sbmbc_root : &alc->mbc_root); + Uint spliced_is_black; + AOFF_RBTree_t *x, *y, *z = (AOFF_RBTree_t *) del; + AOFF_RBTree_t null_x; /* null_x is used to get the fixup started when we + splice out a node without children. */ + + null_x.parent = NULL; + +#ifdef HARD_DEBUG + check_tree(*root, 0); +#endif + + /* Remove node from tree... */ + + /* Find node to splice out */ + if (!z->left || !z->right) + y = z; + else + /* Set y to z:s successor */ + for(y = z->right; y->left; y = y->left); + /* splice out y */ + x = y->left ? y->left : y->right; + spliced_is_black = IS_BLACK(y); + if (x) { + x->parent = y->parent; + } + else if (spliced_is_black) { + x = &null_x; + x->flags = 0; + SET_BLACK(x); + x->right = x->left = NULL; + x->max_sz = 0; + x->parent = y->parent; + y->left = x; + } + + if (!y->parent) { + RBT_ASSERT(*root == y); + *root = x; + } + else { + if (y == y->parent->left) { + y->parent->left = x; + } + else { + RBT_ASSERT(y == y->parent->right); + y->parent->right = x; + } + if (y->parent != z) { + lower_max_size(y->parent, (y==z ? NULL : z)); + } + } + if (y != z) { + /* We spliced out the successor of z; replace z by the successor */ + replace(root, z, y); + } + + if (spliced_is_black) { + /* We removed a black node which makes the resulting tree + violate the Red-Black Tree properties. Fixup tree... */ + + while (IS_BLACK(x) && x->parent) { + + /* + * x has an "extra black" which we move up the tree + * until we reach the root or until we can get rid of it. + * + * y is the sibbling of x + */ + + if (x == x->parent->left) { + y = x->parent->right; + RBT_ASSERT(y); + if (IS_RED(y)) { + RBT_ASSERT(y->right); + RBT_ASSERT(y->left); + SET_BLACK(y); + RBT_ASSERT(IS_BLACK(x->parent)); + SET_RED(x->parent); + left_rotate(root, x->parent); + y = x->parent->right; + } + RBT_ASSERT(y); + RBT_ASSERT(IS_BLACK(y)); + if (IS_BLACK(y->left) && IS_BLACK(y->right)) { + SET_RED(y); + x = x->parent; + } + else { + if (IS_BLACK(y->right)) { + SET_BLACK(y->left); + SET_RED(y); + right_rotate(root, y); + y = x->parent->right; + } + RBT_ASSERT(y); + if (IS_RED(x->parent)) { + + SET_BLACK(x->parent); + SET_RED(y); + } + RBT_ASSERT(y->right); + SET_BLACK(y->right); + left_rotate(root, x->parent); + x = *root; + break; + } + } + else { + RBT_ASSERT(x == x->parent->right); + y = x->parent->left; + RBT_ASSERT(y); + if (IS_RED(y)) { + RBT_ASSERT(y->right); + RBT_ASSERT(y->left); + SET_BLACK(y); + RBT_ASSERT(IS_BLACK(x->parent)); + SET_RED(x->parent); + right_rotate(root, x->parent); + y = x->parent->left; + } + RBT_ASSERT(y); + RBT_ASSERT(IS_BLACK(y)); + if (IS_BLACK(y->right) && IS_BLACK(y->left)) { + SET_RED(y); + x = x->parent; + } + else { + if (IS_BLACK(y->left)) { + SET_BLACK(y->right); + SET_RED(y); + left_rotate(root, y); + y = x->parent->left; + } + RBT_ASSERT(y); + if (IS_RED(x->parent)) { + SET_BLACK(x->parent); + SET_RED(y); + } + RBT_ASSERT(y->left); + SET_BLACK(y->left); + right_rotate(root, x->parent); + x = *root; + break; + } + } + } + SET_BLACK(x); + + if (null_x.parent) { + if (null_x.parent->left == &null_x) + null_x.parent->left = NULL; + else { + RBT_ASSERT(null_x.parent->right == &null_x); + null_x.parent->right = NULL; + } + RBT_ASSERT(!null_x.left); + RBT_ASSERT(!null_x.right); + } + else if (*root == &null_x) { + *root = NULL; + RBT_ASSERT(!null_x.left); + RBT_ASSERT(!null_x.right); + } + } + + DESTROY_TREE_NODE(del); + +#ifdef HARD_DEBUG + check_tree(*root, 0); +#endif +} + +static void +aoff_link_free_block(Allctr_t *allctr, Block_t *block, Uint32 flags) +{ + AOFFAllctr_t *alc = (AOFFAllctr_t *) allctr; + AOFF_RBTree_t *blk = (AOFF_RBTree_t *) block; + AOFF_RBTree_t **root = ((flags & ERTS_ALCU_FLG_SBMBC) + ? &alc->sbmbc_root : &alc->mbc_root); + Uint blk_sz = BLK_SZ(blk); + +#ifdef HARD_DEBUG + check_tree(*root, 0); +#endif + + blk->flags = 0; + blk->left = NULL; + blk->right = NULL; + blk->max_sz = blk_sz; + + if (!*root) { + blk->parent = NULL; + SET_BLACK(blk); + *root = blk; + } + else { + AOFF_RBTree_t *x = *root; + while (1) { + if (x->max_sz < blk_sz) { + x->max_sz = blk_sz; + } + if (blk < x) { + if (!x->left) { + blk->parent = x; + x->left = blk; + break; + } + x = x->left; + } + else { + if (!x->right) { + blk->parent = x; + x->right = blk; + break; + } + x = x->right; + } + + } + + /* Insert block into size tree */ + RBT_ASSERT(blk->parent); + + SET_RED(blk); + if (IS_RED(blk->parent)) + tree_insert_fixup(root, blk); + } + +#ifdef HARD_DEBUG + check_tree(*root, 0); +#endif +} + +static Block_t * +aoff_get_free_block(Allctr_t *allctr, Uint size, + Block_t *cand_blk, Uint cand_size, Uint32 flags) +{ + AOFFAllctr_t *alc = (AOFFAllctr_t *) allctr; + AOFF_RBTree_t *x = ((flags & ERTS_ALCU_FLG_SBMBC) + ? alc->sbmbc_root : alc->mbc_root); + AOFF_RBTree_t *blk = NULL; +#ifdef HARD_DEBUG + AOFF_RBTree_t* dbg_blk = check_tree(x, size); +#endif + + ASSERT(!cand_blk || cand_size >= size); + + while (x) { + if (x->left && x->left->max_sz >= size) { + x = x->left; + } + else if (BLK_SZ(x) >= size) { + blk = x; + break; + } + else { + x = x->right; + } + } + +#ifdef HARD_DEBUG + ASSERT(blk == dbg_blk); +#endif + + if (!blk) + return NULL; + + if (cand_blk && cand_blk < &blk->hdr) { + return NULL; /* cand_blk was better */ + } + + aoff_unlink_free_block(allctr, (Block_t *) blk, flags); + + return (Block_t *) blk; +} + + +/* + * info_options() + */ + +static struct { + Eterm as; + Eterm aoff; +#ifdef DEBUG + Eterm end_of_atoms; +#endif +} am; + +static void ERTS_INLINE atom_init(Eterm *atom, char *name) +{ + *atom = am_atom_put(name, strlen(name)); +} +#define AM_INIT(AM) atom_init(&am.AM, #AM) + +static void +init_atoms(void) +{ +#ifdef DEBUG + Eterm *atom; +#endif + + if (atoms_initialized) + return; + +#ifdef DEBUG + for (atom = (Eterm *) &am; atom <= &am.end_of_atoms; atom++) { + *atom = THE_NON_VALUE; + } +#endif + AM_INIT(as); + AM_INIT(aoff); + +#ifdef DEBUG + for (atom = (Eterm *) &am; atom < &am.end_of_atoms; atom++) { + ASSERT(*atom != THE_NON_VALUE); + } +#endif + + atoms_initialized = 1; +} + + +#define bld_uint erts_bld_uint +#define bld_cons erts_bld_cons +#define bld_tuple erts_bld_tuple + +static ERTS_INLINE void +add_2tup(Uint **hpp, Uint *szp, Eterm *lp, Eterm el1, Eterm el2) +{ + *lp = bld_cons(hpp, szp, bld_tuple(hpp, szp, 2, el1, el2), *lp); +} + +static Eterm +info_options(Allctr_t *allctr, + char *prefix, + int *print_to_p, + void *print_to_arg, + Uint **hpp, + Uint *szp) +{ + Eterm res = THE_NON_VALUE; + + if (print_to_p) { + erts_print(*print_to_p, + print_to_arg, + "%sas: %s\n", + prefix, + "aoff"); + } + + if (hpp || szp) { + + if (!atoms_initialized) + erl_exit(1, "%s:%d: Internal error: Atoms not initialized", + __FILE__, __LINE__);; + + res = NIL; + add_2tup(hpp, szp, &res, am.as, am.aoff); + } + + return res; +} + + +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\ + * NOTE: erts_aoffalc_test() is only supposed to be used for testing. * + * * + * Keep alloc_SUITE_data/allocator_test.h updated if changes are made * + * to erts_aoffalc_test() * +\* */ + +unsigned long +erts_aoffalc_test(unsigned long op, unsigned long a1, unsigned long a2) +{ + switch (op) { + case 0x500: return (unsigned long) 0; /* IS_AOBF */ + case 0x501: return (unsigned long) ((AOFFAllctr_t *) a1)->mbc_root; + case 0x502: return (unsigned long) ((AOFF_RBTree_t *) a1)->parent; + case 0x503: return (unsigned long) ((AOFF_RBTree_t *) a1)->left; + case 0x504: return (unsigned long) ((AOFF_RBTree_t *) a1)->right; + case 0x506: return (unsigned long) IS_BLACK((AOFF_RBTree_t *) a1); + case 0x508: return (unsigned long) 1; /* IS_AOFF */ + case 0x509: return (unsigned long) ((AOFF_RBTree_t *) a1)->max_sz; + default: ASSERT(0); return ~((unsigned long) 0); + } +} + + +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\ + * Debug functions * +\* */ + + +#ifdef HARD_DEBUG + +#define IS_LEFT_VISITED(FB) ((FB)->flags & LEFT_VISITED_FLG) +#define IS_RIGHT_VISITED(FB) ((FB)->flags & RIGHT_VISITED_FLG) + +#define SET_LEFT_VISITED(FB) ((FB)->flags |= LEFT_VISITED_FLG) +#define SET_RIGHT_VISITED(FB) ((FB)->flags |= RIGHT_VISITED_FLG) + +#define UNSET_LEFT_VISITED(FB) ((FB)->flags &= ~LEFT_VISITED_FLG) +#define UNSET_RIGHT_VISITED(FB) ((FB)->flags &= ~RIGHT_VISITED_FLG) + + +#if 0 +# define PRINT_TREE +#else +# undef PRINT_TREE +#endif + +#ifdef PRINT_TREE +static void print_tree(AOFF_RBTree_t*); +#endif + +/* + * Checks that the order between parent and children are correct, + * and that the Red-Black Tree properies are satisfied. if size > 0, + * check_tree() returns the node that satisfies "address order first fit" + * + * The Red-Black Tree properies are: + * 1. Every node is either red or black. + * 2. Every leaf (NIL) is black. + * 3. If a node is red, then both its children are black. + * 4. Every simple path from a node to a descendant leaf + * contains the same number of black nodes. + * + * + own.max_size == MAX(own.size, left.max_size, right.max_size) + */ + +static AOFF_RBTree_t * +check_tree(AOFF_RBTree_t* root, Uint size) +{ + AOFF_RBTree_t *res = NULL; + Sint blacks; + Sint curr_blacks; + AOFF_RBTree_t *x; + +#ifdef PRINT_TREE + print_tree(root); +#endif + + if (!root) + return res; + + x = root; + ASSERT(IS_BLACK(x)); + ASSERT(!x->parent); + curr_blacks = 1; + blacks = -1; + + while (x) { + if (!IS_LEFT_VISITED(x)) { + SET_LEFT_VISITED(x); + if (x->left) { + x = x->left; + if (IS_BLACK(x)) + curr_blacks++; + continue; + } + else { + if (blacks < 0) + blacks = curr_blacks; + ASSERT(blacks == curr_blacks); + } + } + + if (!IS_RIGHT_VISITED(x)) { + SET_RIGHT_VISITED(x); + if (x->right) { + x = x->right; + if (IS_BLACK(x)) + curr_blacks++; + continue; + } + else { + if (blacks < 0) + blacks = curr_blacks; + ASSERT(blacks == curr_blacks); + } + } + + + if (IS_RED(x)) { + ASSERT(IS_BLACK(x->right)); + ASSERT(IS_BLACK(x->left)); + } + + ASSERT(x->parent || x == root); + + if (x->left) { + ASSERT(x->left->parent == x); + ASSERT(x->left < x); + ASSERT(x->left->max_sz <= x->max_sz); + } + + if (x->right) { + ASSERT(x->right->parent == x); + ASSERT(x->right > x); + ASSERT(x->right->max_sz <= x->max_sz); + } + ASSERT(x->max_sz >= BLK_SZ(x)); + ASSERT(x->max_sz == BLK_SZ(x) + || x->max_sz == (x->left ? x->left->max_sz : 0) + || x->max_sz == (x->right ? x->right->max_sz : 0)); + + if (size && BLK_SZ(x) >= size) { + if (!res || x < res) { + res = x; + } + } + + UNSET_LEFT_VISITED(x); + UNSET_RIGHT_VISITED(x); + if (IS_BLACK(x)) + curr_blacks--; + x = x->parent; + + } + + ASSERT(curr_blacks == 0); + + UNSET_LEFT_VISITED(root); + UNSET_RIGHT_VISITED(root); + + return res; + +} + + +#ifdef PRINT_TREE +#define INDENT_STEP 2 + +#include <stdio.h> + +static void +print_tree_aux(AOFF_RBTree_t *x, int indent) +{ + int i; + + if (x) { + print_tree_aux(x->right, indent + INDENT_STEP); + for (i = 0; i < indent; i++) { + putc(' ', stderr); + } + fprintf(stderr, "%s: sz=%lu addr=0x%lx max_size=%lu\r\n", + IS_BLACK(x) ? "BLACK" : "RED", + BLK_SZ(x), (Uint)x, x->max_sz); + print_tree_aux(x->left, indent + INDENT_STEP); + } +} + + +static void +print_tree(AOFF_RBTree_t* root) +{ + fprintf(stderr, " --- AOFF tree begin ---\r\n"); + print_tree_aux(root, 0); + fprintf(stderr, " --- AOFF tree end ---\r\n"); +} + +#endif /* PRINT_TREE */ + +#endif /* HARD_DEBUG */ + diff --git a/erts/emulator/beam/erl_ao_firstfit_alloc.h b/erts/emulator/beam/erl_ao_firstfit_alloc.h new file mode 100644 index 0000000000..0bf0ec8cee --- /dev/null +++ b/erts/emulator/beam/erl_ao_firstfit_alloc.h @@ -0,0 +1,60 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2003-2009. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + + +#ifndef ERL_AO_FIRSTFIT_ALLOC__ +#define ERL_AO_FIRSTFIT_ALLOC__ + +#include "erl_alloc_util.h" + +#define ERTS_ALC_AOFF_ALLOC_VSN_STR "0.9" + +typedef struct AOFFAllctr_t_ AOFFAllctr_t; + +typedef struct { + int dummy; +} AOFFAllctrInit_t; + +#define ERTS_DEFAULT_AOFF_ALLCTR_INIT {0/*dummy*/} + +void erts_aoffalc_init(void); +Allctr_t *erts_aoffalc_start(AOFFAllctr_t *, AOFFAllctrInit_t*, AllctrInit_t *); + +#endif /* #ifndef ERL_AO_FIRSTFIT_ALLOC__ */ + + + +#if defined(GET_ERL_AOFF_ALLOC_IMPL) && !defined(ERL_AOFF_ALLOC_IMPL__) +#define ERL_AOFF_ALLOC_IMPL__ + +#define GET_ERL_ALLOC_UTIL_IMPL +#include "erl_alloc_util.h" + + +struct AOFFAllctr_t_ { + Allctr_t allctr; /* Has to be first! */ + + struct AOFF_RBTree_t_* mbc_root; + struct AOFF_RBTree_t_* sbmbc_root; +}; + +unsigned long erts_aoffalc_test(unsigned long, unsigned long, unsigned long); + +#endif /* #if defined(GET_ERL_AOFF_ALLOC_IMPL) + && !defined(ERL_AOFF_ALLOC_IMPL__) */ diff --git a/erts/emulator/beam/erl_bestfit_alloc.c b/erts/emulator/beam/erl_bestfit_alloc.c index d9b1170a3d..5e3032ddaa 100644 --- a/erts/emulator/beam/erl_bestfit_alloc.c +++ b/erts/emulator/beam/erl_bestfit_alloc.c @@ -979,6 +979,7 @@ erts_bfalc_test(unsigned long op, unsigned long a1, unsigned long a2) case 0x205: return (unsigned long) ((RBTreeList_t *) a1)->next; case 0x206: return (unsigned long) IS_BLACK((RBTree_t *) a1); case 0x207: return (unsigned long) IS_TREE_NODE((RBTree_t *) a1); + case 0x208: return (unsigned long) 0; /* IS_AOFF */ default: ASSERT(0); return ~((unsigned long) 0); } } diff --git a/erts/emulator/beam/erl_bits.h b/erts/emulator/beam/erl_bits.h index 0f67733fa4..3309ea706b 100644 --- a/erts/emulator/beam/erl_bits.h +++ b/erts/emulator/beam/erl_bits.h @@ -150,7 +150,7 @@ void erts_bits_destroy_state(ERL_BITS_PROTO_0); * NBYTES(x) returns the number of bytes needed to store x bits. */ -#define NBYTES(x) (((x) + 7) >> 3) +#define NBYTES(x) (((Uint64)(x) + (Uint64) 7) >> 3) #define BYTE_OFFSET(ofs) ((Uint) (ofs) >> 3) #define BIT_OFFSET(ofs) ((ofs) & 7) diff --git a/erts/emulator/beam/erl_gc.c b/erts/emulator/beam/erl_gc.c index 5edcd667e7..e3445bcdc5 100644 --- a/erts/emulator/beam/erl_gc.c +++ b/erts/emulator/beam/erl_gc.c @@ -100,14 +100,14 @@ static Uint combined_message_size(Process* p); static void remove_message_buffers(Process* p); static int major_collection(Process* p, int need, Eterm* objv, int nobj, Uint *recl); static int minor_collection(Process* p, int need, Eterm* objv, int nobj, Uint *recl); -static void do_minor(Process *p, int new_sz, Eterm* objv, int nobj); +static void do_minor(Process *p, Uint new_sz, Eterm* objv, int nobj); static Eterm* sweep_rootset(Rootset *rootset, Eterm* htop, char* src, Uint src_size); static Eterm* sweep_one_area(Eterm* n_hp, Eterm* n_htop, char* src, Uint src_size); static Eterm* sweep_one_heap(Eterm* heap_ptr, Eterm* heap_end, Eterm* htop, char* src, Uint src_size); static Eterm* collect_heap_frags(Process* p, Eterm* heap, Eterm* htop, Eterm* objv, int nobj); -static Uint adjust_after_fullsweep(Process *p, int size_before, +static Uint adjust_after_fullsweep(Process *p, Uint size_before, int need, Eterm *objv, int nobj); static void shrink_new_heap(Process *p, Uint new_sz, Eterm *objv, int nobj); static void grow_new_heap(Process *p, Uint new_sz, Eterm* objv, int nobj); @@ -441,7 +441,15 @@ erts_garbage_collect(Process* p, int need, Eterm* objv, int nobj) p->last_old_htop = p->old_htop; #endif - return ((int) (HEAP_TOP(p) - HEAP_START(p)) / 10); + /* FIXME: This function should really return an Sint, i.e., a possibly + 64 bit wide signed integer, but that requires updating all the code + that calls it. For now, we just return INT_MAX if the result is too + large for an int. */ + { + Sint result = (HEAP_TOP(p) - HEAP_START(p)) / 10; + if (result >= INT_MAX) return INT_MAX; + else return (int) result; + } } /* @@ -599,7 +607,7 @@ erts_garbage_collect_literals(Process* p, Eterm* literals, Uint lit_size) char* area; Uint area_size; Eterm* old_htop; - int n; + Uint n; /* * Set GC state. @@ -731,7 +739,7 @@ minor_collection(Process* p, int need, Eterm* objv, int nobj, Uint *recl) * This improved Estone by more than 1200 estones on my computer * (Ultra Sparc 10). */ - size_t new_sz = erts_next_heap_size(HEAP_TOP(p) - HEAP_START(p), 1); + Uint new_sz = erts_next_heap_size(HEAP_TOP(p) - HEAP_START(p), 1); /* Create new, empty old_heap */ n_old = (Eterm *) ERTS_HEAP_ALLOC(ERTS_ALC_T_OLD_HEAP, @@ -871,12 +879,12 @@ minor_collection(Process* p, int need, Eterm* objv, int nobj, Uint *recl) #endif /* HIPE */ static void -do_minor(Process *p, int new_sz, Eterm* objv, int nobj) +do_minor(Process *p, Uint new_sz, Eterm* objv, int nobj) { Rootset rootset; /* Rootset for GC (stack, dictionary, etc). */ Roots* roots; Eterm* n_htop; - int n; + Uint n; Eterm* ptr; Eterm val; Eterm gval; @@ -1079,14 +1087,14 @@ major_collection(Process* p, int need, Eterm* objv, int nobj, Uint *recl) { Rootset rootset; Roots* roots; - int size_before; + Uint size_before; Eterm* n_heap; Eterm* n_htop; char* src = (char *) HEAP_START(p); Uint src_size = (char *) HEAP_TOP(p) - src; char* oh = (char *) OLD_HEAP(p); Uint oh_size = (char *) OLD_HTOP(p) - oh; - int n; + Uint n; Uint new_sz; Uint fragments = MBUF_SIZE(p) + combined_message_size(p); ErlMessage *msgp; @@ -1312,10 +1320,10 @@ major_collection(Process* p, int need, Eterm* objv, int nobj, Uint *recl) } static Uint -adjust_after_fullsweep(Process *p, int size_before, int need, Eterm *objv, int nobj) +adjust_after_fullsweep(Process *p, Uint size_before, int need, Eterm *objv, int nobj) { - int wanted, sz, size_after, need_after; - int stack_size = STACK_SZ_ON_HEAP(p); + Uint wanted, sz, size_after, need_after; + Uint stack_size = STACK_SZ_ON_HEAP(p); Uint reclaimed_now; size_after = (HEAP_TOP(p) - HEAP_START(p)); @@ -1915,8 +1923,8 @@ static void grow_new_heap(Process *p, Uint new_sz, Eterm* objv, int nobj) { Eterm* new_heap; - int heap_size = HEAP_TOP(p) - HEAP_START(p); - int stack_size = p->hend - p->stop; + Uint heap_size = HEAP_TOP(p) - HEAP_START(p); + Uint stack_size = p->hend - p->stop; Sint offs; ASSERT(HEAP_SIZE(p) < new_sz); @@ -1954,10 +1962,10 @@ static void shrink_new_heap(Process *p, Uint new_sz, Eterm *objv, int nobj) { Eterm* new_heap; - int heap_size = HEAP_TOP(p) - HEAP_START(p); + Uint heap_size = HEAP_TOP(p) - HEAP_START(p); Sint offs; - int stack_size = p->hend - p->stop; + Uint stack_size = p->hend - p->stop; ASSERT(new_sz < p->heap_sz); sys_memmove(p->heap + new_sz - stack_size, p->stop, stack_size * diff --git a/erts/emulator/beam/erl_instrument.c b/erts/emulator/beam/erl_instrument.c index c5615818f2..04ea004ef7 100644 --- a/erts/emulator/beam/erl_instrument.c +++ b/erts/emulator/beam/erl_instrument.c @@ -1152,14 +1152,6 @@ erts_instr_get_type_info(Process *proc) return res; } -#if HALFWORD_HEAP -#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \ - ((NO) == ERTS_ALC_A_SBMBC || (NO) == ERTS_ALC_A_SBMBC_LOW) -#else -#define ERTS_IS_SBMBC_ALLOCATOR_NO__(NO) \ - ((NO) == ERTS_ALC_A_SBMBC) -#endif - Uint erts_instr_init(int stat, int map_stat) { diff --git a/erts/emulator/beam/ops.tab b/erts/emulator/beam/ops.tab index 8a5763b4bb..304ce22ef2 100644 --- a/erts/emulator/beam/ops.tab +++ b/erts/emulator/beam/ops.tab @@ -1236,7 +1236,7 @@ i_bs_init_heap I I I d i_bs_init_heap_bin_heap I I I d -bs_init_bits Fail Sz Words Regs Flags Dst | binary_too_big_bits(Sz) => system_limit Fail +bs_init_bits Fail Sz=o Words Regs Flags Dst => system_limit Fail bs_init_bits Fail Sz=u Words=u==0 Regs Flags Dst => i_bs_init_bits Sz Regs Dst bs_init_bits Fail Sz=u Words Regs Flags Dst => i_bs_init_bits_heap Sz Words Regs Dst |