diff options
162 files changed, 7044 insertions, 2588 deletions
diff --git a/HOWTO/INSTALL-WIN32.md b/HOWTO/INSTALL-WIN32.md index 0387572dd3..3039a5efea 100644 --- a/HOWTO/INSTALL-WIN32.md +++ b/HOWTO/INSTALL-WIN32.md @@ -639,30 +639,25 @@ OpenSSL. Well' here's the list: The installation locations chosen are where configure will look for OpenSSL, so try to keep them as is. -* Building with wxWidgets. Download wxWidgets-2.8.9 or higher patch - release (2.9.\* is a developer release which currently does not work - with wxErlang). +* Building with wxWidgets. Download wxWidgets-3.0.2 or higher patch + release. Install or unpack it to `DRIVE:/PATH/cygwin/opt/local/pgm`. - edit: `C:\cygwin\opt\local\pgm\wxMSW-2.8.11\include\wx\msw\setup.h` - enable `wxUSE_GLCANVAS`, `wxUSE_POSTSCRIPT` and `wxUSE_GRAPHICS_CONTEXT` + edit: `C:\cygwin\opt\local\pgm\wxMSW-3.0.2\include\wx\msw\setup.h` + enable `wxUSE_POSTSCRIPT` build: From a command prompt with the VC tools available (See the instructions for OpenSSL build above for help on starting the proper command prompt in RELEASE mode): - C:\...\> cd C:\cygwin\opt\local\pgm\wxMSW-2.8.11\build\msw - C:\...\> nmake BUILD=release SHARED=0 UNICODE=1 USE_OPENGL=1 USE_GDIPLUS=1 DIR_SUFFIX_CPU= -f makefile.vc - C:\...\> cd C:\cygwin\opt\local\pgm\wxMSW-2.8.11\contrib\build\stc - C:\...\> nmake BUILD=release SHARED=0 UNICODE=1 USE_OPENGL=1 USE_GDIPLUS=1 DIR_SUFFIX_CPU= -f makefile.vc + C:\...\> cd C:\cygwin\opt\local\pgm\wxMSW-3.0.2\build\msw + C:\...\> nmake BUILD=release SHARED=0 DIR_SUFFIX_CPU= -f makefile.vc Or - if building a 64bit version: - C:\...\> cd C:\cygwin\opt\local\pgm\wxMSW-2.8.11\build\msw - C:\...\> nmake TARGET_CPU=amd64 BUILD=release SHARED=0 UNICODE=1 USE_OPENGL=1 USE_GDIPLUS=1 DIR_SUFFIX_CPU= -f makefile.vc - C:\...\> cd C:\cygwin\opt\local\pgm\wxMSW-2.8.11\contrib\build\stc - C:\...\> nmake TARGET_CPU=amd64 BUILD=release SHARED=0 UNICODE=1 USE_OPENGL=1 USE_GDIPLUS=1 DIR_SUFFIX_CPU= -f makefile.vc + C:\...\> cd C:\cygwin\opt\local\pgm\wxMSW-3.0.2\build\msw + C:\...\> nmake TARGET_CPU=amd64 BUILD=release SHARED=0 DIR_SUFFIX_CPU= -f makefile.vc * The Erlang source distribution (from <http://www.erlang.org/download.html>). The same as for Unix platforms. Preferably use tar from within Cygwin to diff --git a/OTP_VERSION b/OTP_VERSION index 1fbc0d2431..9d7cce5373 100644 --- a/OTP_VERSION +++ b/OTP_VERSION @@ -1 +1 @@ -18.0-rc0 +18.0-rc1 diff --git a/bootstrap/lib/kernel/ebin/inet_dns.beam b/bootstrap/lib/kernel/ebin/inet_dns.beam Binary files differindex 618f9c407a..0c5b6c73e1 100644 --- a/bootstrap/lib/kernel/ebin/inet_dns.beam +++ b/bootstrap/lib/kernel/ebin/inet_dns.beam diff --git a/erts/doc/src/notes.xml b/erts/doc/src/notes.xml index af0d4d7377..a2b4ae49a4 100644 --- a/erts/doc/src/notes.xml +++ b/erts/doc/src/notes.xml @@ -30,6 +30,98 @@ </header> <p>This document describes the changes made to the ERTS application.</p> +<section><title>Erts 6.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix missing quotation in the <c>LM_FIND_EMU_CC</c> + <c>autoconf</c> macro which could cause build failures.</p> + <p> + Own Id: OTP-12388</p> + </item> + <item> + <p> + Fix erroneous printout of monitors in crashdump file.</p> + <p> + Own Id: OTP-12537</p> + </item> + <item> + <p> + The runtime system without SMP support could crash in the + BIF <c>port_control/3</c> if the port that was being + accessed died during the call to the BIF.</p> + <p> + Own Id: OTP-12544 Aux Id: Seq12777 </p> + </item> + <item> + <p> + Avoid corrupt oversized integer to be created from binary + matching. Instead throw system_limit exception which is + the correct behavior. A peculiar symptom of this bug was + that bitwise operations (band, bor, bxor) on such + oversized integers could return the empty list []. + Credit: Mikael Pettersson, Nico Kruber</p> + <p> + Own Id: OTP-12556</p> + </item> + <item> + <p> + A race condition when calling <c>port_info/1</c> could + cause a memory fault has been fixed.</p> + <p> + Own Id: OTP-12587</p> + </item> + <item> + <p> + Fix comparison of exact terms. An overflow that could + cause faulty comparisons has been fixed. Comparison of + exact terms is exclusively used within Maps.</p> + <p> + Own Id: OTP-12623</p> + </item> + <item> + <p> + Fix bug in <c>list_to_integer/1</c> for very long lists + that could cause VM crash.</p> + <p> + Own Id: OTP-12624</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Introduced a runtime system internal 64-bit API for + atomic memory operations.</p> + <p> + Own Id: OTP-12351</p> + </item> + <item> + <p> + Add command line argument option for the initial size of + process dictionaries.</p> + <p> + Use '+hpds <size>' to set initial process + dictionary size for spawned processes.</p> + <p> + Own Id: OTP-12535 Aux Id: seq12809 </p> + </item> + <item> + <p> + Fix documentation on $char for Unicode</p> + <p> + Own Id: OTP-12545</p> + </item> + </list> + </section> + +</section> + <section><title>Erts 6.3.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 6526e87e4c..fdb84aae42 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -6573,6 +6573,7 @@ new_map(Process* p, Eterm* reg, BeamInstr* I) ptr = &Arg(4); if (n > 2*MAP_SMALL_MAP_LIMIT) { + Eterm res; if (HeapWordsLeft(p) < n) { erts_garbage_collect(p, n, reg, Arg(2)); } @@ -6589,7 +6590,15 @@ new_map(Process* p, Eterm* reg, BeamInstr* I) p->htop = mhp; factory.p = p; - return erts_hashmap_from_array(&factory, thp, n/2, 0); + res = erts_hashmap_from_array(&factory, thp, n/2, 0); + if (p->mbuf) { + Uint live = Arg(2); + reg[live] = res; + erts_garbage_collect(p, 0, reg, live+1); + res = reg[live]; + E = p->stop; + } + return res; } if (HeapWordsLeft(p) < need) { @@ -6602,8 +6611,8 @@ new_map(Process* p, Eterm* reg, BeamInstr* I) keys = make_tuple(thp); *thp++ = make_arityval(n/2); - mp = (flatmap_t *)mhp; mhp += MAP_HEADER_SIZE; - mp->thing_word = MAP_HEADER; + mp = (flatmap_t *)mhp; mhp += MAP_HEADER_FLATMAP_SZ; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = n/2; mp->keys = keys; @@ -6684,7 +6693,7 @@ update_map_assoc(Process* p, Eterm* reg, Eterm map, BeamInstr* I) * update list are new). */ - need = 2*(num_old+num_updates) + 1 + MAP_HEADER_SIZE; + need = 2*(num_old+num_updates) + 1 + MAP_HEADER_FLATMAP_SZ; if (HeapWordsLeft(p) < need) { Uint live = Arg(3); reg[live] = map; @@ -6723,8 +6732,8 @@ update_map_assoc(Process* p, Eterm* reg, Eterm map, BeamInstr* I) res = make_flatmap(hp); mp = (flatmap_t *)hp; - hp += MAP_HEADER_SIZE; - mp->thing_word = MAP_HEADER; + hp += MAP_HEADER_FLATMAP_SZ; + mp->thing_word = MAP_HEADER_FLATMAP; mp->keys = make_tuple(kp-1); old_vals = flatmap_get_values(old_mp); @@ -6906,7 +6915,7 @@ update_map_exact(Process* p, Eterm* reg, Eterm map, BeamInstr* I) * Allocate the exact heap space needed. */ - need = num_old + MAP_HEADER_SIZE; + need = num_old + MAP_HEADER_FLATMAP_SZ; if (HeapWordsLeft(p) < need) { Uint live = Arg(3); reg[live] = map; @@ -6927,8 +6936,8 @@ update_map_exact(Process* p, Eterm* reg, Eterm map, BeamInstr* I) res = make_flatmap(hp); mp = (flatmap_t *)hp; - hp += MAP_HEADER_SIZE; - mp->thing_word = MAP_HEADER; + hp += MAP_HEADER_FLATMAP_SZ; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = num_old; mp->keys = old_mp->keys; diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c index cc20ec7440..022150da55 100644 --- a/erts/emulator/beam/bif.c +++ b/erts/emulator/beam/bif.c @@ -2930,7 +2930,7 @@ static int do_list_to_integer(Process *p, Eterm orig_list, Uint ui = 0; int skip = 0; int neg = 0; - int n = 0; + Sint n = 0; int m; int lg2; Eterm res; @@ -3010,7 +3010,9 @@ static int do_list_to_integer(Process *p, Eterm orig_list, else i = (Sint)ui; res = make_small(i); } else { - lg2 = (n+1)*230/69+1; + /* Convert from log10 to log2 by multiplying with 1/log10(2)=3.3219 + which we round up to (3 + 1/3) */ + lg2 = (n+1)*3 + (n+1)/3 + 1; m = (lg2+D_EXP-1)/D_EXP; /* number of digits */ m = BIG_NEED_SIZE(m); /* number of words + thing */ diff --git a/erts/emulator/beam/copy.c b/erts/emulator/beam/copy.c index 027b85b079..4d12dae787 100644 --- a/erts/emulator/beam/copy.c +++ b/erts/emulator/beam/copy.c @@ -127,8 +127,25 @@ Uint size_object(Eterm obj) obj = *bptr; break; } - case HASHMAP_SUBTAG: + case MAP_SUBTAG: switch (MAP_HEADER_TYPE(hdr)) { + case MAP_HEADER_TAG_FLATMAP_HEAD : + { + Uint n; + flatmap_t *mp; + mp = (flatmap_t*)flatmap_val_rel(obj,base); + ptr = (Eterm *)mp; + n = flatmap_get_size(mp) + 1; + sum += n + 2; + ptr += 2; /* hdr + size words */ + while (n--) { + obj = *ptr++; + if (!IS_CONST(obj)) { + ESTACK_PUSH(s, obj); + } + } + goto pop_next; + } case MAP_HEADER_TAG_HAMT_HEAD_BITMAP : case MAP_HEADER_TAG_HAMT_HEAD_ARRAY : case MAP_HEADER_TAG_HAMT_NODE_BITMAP : @@ -183,25 +200,7 @@ Uint size_object(Eterm obj) goto pop_next; } break; - case MAP_SUBTAG: - { - Uint n; - flatmap_t *mp; - mp = (flatmap_t*)flatmap_val_rel(obj,base); - ptr = (Eterm *)mp; - n = flatmap_get_size(mp) + 1; - sum += n + 2; - ptr += 2; /* hdr + size words */ - while (n--) { - obj = *ptr++; - if (!IS_CONST(obj)) { - ESTACK_PUSH(s, obj); - } - } - goto pop_next; - } - break; - case BIN_MATCHSTATE_SUBTAG: + case BIN_MATCHSTATE_SUBTAG: erl_exit(ERTS_ABORT_EXIT, "size_object: matchstate term not allowed"); default: @@ -369,15 +368,6 @@ Eterm copy_struct(Eterm obj, Uint sz, Eterm** hpp, ErlOffHeap* off_heap) } } break; - case MAP_SUBTAG: - { - i = flatmap_get_size(objp) + 3; - *argp = make_flatmap_rel(htop, dst_base); - while (i--) { - *htop++ = *objp++; - } - } - break; case REFC_BINARY_SUBTAG: { ProcBin* pb; @@ -502,9 +492,16 @@ Eterm copy_struct(Eterm obj, Uint sz, Eterm** hpp, ErlOffHeap* off_heap) *argp = make_external_rel(tp, dst_base); } break; - case HASHMAP_SUBTAG: + case MAP_SUBTAG: tp = htop; switch (MAP_HEADER_TYPE(hdr)) { + case MAP_HEADER_TAG_FLATMAP_HEAD : + i = flatmap_get_size(objp) + 3; + *argp = make_flatmap_rel(htop, dst_base); + while (i--) { + *htop++ = *objp++; + } + break; case MAP_HEADER_TAG_HAMT_HEAD_BITMAP : case MAP_HEADER_TAG_HAMT_HEAD_ARRAY : *htop++ = *objp++; diff --git a/erts/emulator/beam/dist.c b/erts/emulator/beam/dist.c index bfecac1612..32f3cda4f5 100644 --- a/erts/emulator/beam/dist.c +++ b/erts/emulator/beam/dist.c @@ -712,7 +712,7 @@ void erts_dsend_context_dtor(Binary* ctx_bin) ErtsSendContext* ctx = ERTS_MAGIC_BIN_DATA(ctx_bin); switch (ctx->dss.phase) { case ERTS_DSIG_SEND_PHASE_MSG_SIZE: - DESTROY_SAVED_ESTACK(&ctx->dss.u.sc.estack); + DESTROY_SAVED_WSTACK(&ctx->dss.u.sc.wstack); break; case ERTS_DSIG_SEND_PHASE_MSG_ENCODE: DESTROY_SAVED_WSTACK(&ctx->dss.u.ec.wstack); @@ -1800,7 +1800,7 @@ erts_dsig_send(ErtsDSigData *dsdp, struct erts_dsig_send_context* ctx) erts_encode_dist_ext_size(ctx->ctl, ctx->flags, ctx->acmp, &ctx->data_size); if (is_value(ctx->msg)) { - ctx->u.sc.estack.start = NULL; + ctx->u.sc.wstack.wstart = NULL; ctx->u.sc.flags = ctx->flags; ctx->u.sc.level = 0; ctx->phase = ERTS_DSIG_SEND_PHASE_MSG_SIZE; diff --git a/erts/emulator/beam/dist.h b/erts/emulator/beam/dist.h index 2a2ba0c83f..cd2cc0ef4a 100644 --- a/erts/emulator/beam/dist.h +++ b/erts/emulator/beam/dist.h @@ -282,7 +282,7 @@ typedef struct TTBSizeContext_ { int level; Uint result; Eterm obj; - ErtsEStack estack; + ErtsWStack wstack; } TTBSizeContext; typedef struct TTBEncodeContext_ { diff --git a/erts/emulator/beam/erl_bif_info.c b/erts/emulator/beam/erl_bif_info.c index b2658a1fd6..fa7de23f00 100644 --- a/erts/emulator/beam/erl_bif_info.c +++ b/erts/emulator/beam/erl_bif_info.c @@ -3747,6 +3747,20 @@ BIF_RETTYPE erts_internal_is_system_process_1(BIF_ALIST_1) static erts_smp_atomic_t hipe_test_reschedule_flag; +#if defined(VALGRIND) && defined(__GNUC__) +/* Force noinline for valgrind suppression */ +static void broken_halt_test(Eterm bif_arg_2) __attribute__((noinline)); +#endif + +static void broken_halt_test(Eterm bif_arg_2) +{ + /* Ugly ugly code used by bif_SUITE:erlang_halt/1 */ +#if defined(ERTS_HAVE_TRY_CATCH) + erts_get_scheduler_data()->run_queue = NULL; +#endif + erl_exit(ERTS_DUMP_EXIT, "%T", bif_arg_2); +} + BIF_RETTYPE erts_debug_set_internal_state_2(BIF_ALIST_2) { @@ -4040,11 +4054,7 @@ BIF_RETTYPE erts_debug_set_internal_state_2(BIF_ALIST_2) } } else if (ERTS_IS_ATOM_STR("broken_halt", BIF_ARG_1)) { - /* Ugly ugly code used by bif_SUITE:erlang_halt/1 */ -#if defined(ERTS_HAVE_TRY_CATCH) - erts_get_scheduler_data()->run_queue = NULL; -#endif - erl_exit(ERTS_DUMP_EXIT, "%T", BIF_ARG_2); + broken_halt_test(BIF_ARG_2); } else if (ERTS_IS_ATOM_STR("unique_monotonic_integer_state", BIF_ARG_1)) { int res = erts_debug_set_unique_monotonic_integer_state(BIF_ARG_2); diff --git a/erts/emulator/beam/erl_bif_lists.c b/erts/emulator/beam/erl_bif_lists.c index 820ed2385d..e006d57124 100644 --- a/erts/emulator/beam/erl_bif_lists.c +++ b/erts/emulator/beam/erl_bif_lists.c @@ -390,7 +390,7 @@ keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List) Eterm *tuple_ptr = tuple_val(term); if (pos <= arityval(*tuple_ptr)) { Eterm element = tuple_ptr[pos]; - if (CMP(Key, element) == 0) { + if (CMP_EQ(Key, element)) { return term; } } diff --git a/erts/emulator/beam/erl_db_hash.c b/erts/emulator/beam/erl_db_hash.c index 045c8ae135..383ee7c430 100644 --- a/erts/emulator/beam/erl_db_hash.c +++ b/erts/emulator/beam/erl_db_hash.c @@ -2471,10 +2471,10 @@ static int alloc_seg(DbTableHash *tb) */ static int free_seg(DbTableHash *tb, int free_records) { - int seg_ix = (tb->nslots >> SEGSZ_EXP) - 1; + const int seg_ix = (tb->nslots >> SEGSZ_EXP) - 1; + struct segment** const segtab = SEGTAB(tb); + struct ext_segment* const top = (struct ext_segment*) segtab[seg_ix]; int bytes; - struct segment** segtab = SEGTAB(tb); - struct ext_segment* top = (struct ext_segment*) segtab[seg_ix]; int nrecords = 0; ASSERT(top != NULL); @@ -2537,7 +2537,7 @@ static int free_seg(DbTableHash *tb, int free_records) (void*)top, bytes); #ifdef DEBUG if (seg_ix > 0) { - if (seg_ix < tb->nsegs) SEGTAB(tb)[seg_ix] = NULL; + segtab[seg_ix] = NULL; } else { SET_SEGTAB(tb, NULL); } diff --git a/erts/emulator/beam/erl_db_tree.c b/erts/emulator/beam/erl_db_tree.c index 577da35b75..d90af46659 100644 --- a/erts/emulator/beam/erl_db_tree.c +++ b/erts/emulator/beam/erl_db_tree.c @@ -1116,7 +1116,7 @@ static int db_select_tree(Process *p, DbTable *tbl, sc.all_objects = mpi.all_objects; if (!mpi.got_partial && mpi.some_limitation && - CMP(mpi.least,mpi.most) == 0) { + CMP_EQ(mpi.least,mpi.most)) { doit_select(tb,mpi.save_term,&sc,0 /* direction doesn't matter */); RET_TO_BIF(sc.accum,DB_ERROR_NONE); } @@ -1324,7 +1324,7 @@ static int db_select_count_tree(Process *p, DbTable *tbl, sc.all_objects = mpi.all_objects; if (!mpi.got_partial && mpi.some_limitation && - CMP(mpi.least,mpi.most) == 0) { + CMP_EQ(mpi.least,mpi.most)) { doit_select_count(tb,mpi.save_term,&sc,0 /* dummy */); RET_TO_BIF(erts_make_integer(sc.got,p),DB_ERROR_NONE); } @@ -1429,7 +1429,7 @@ static int db_select_chunk_tree(Process *p, DbTable *tbl, sc.all_objects = mpi.all_objects; if (!mpi.got_partial && mpi.some_limitation && - CMP(mpi.least,mpi.most) == 0) { + CMP_EQ(mpi.least,mpi.most)) { doit_select(tb,mpi.save_term,&sc, 0 /* direction doesn't matter */); if (sc.accum != NIL) { hp=HAlloc(p, 3); @@ -1673,7 +1673,7 @@ static int db_select_delete_tree(Process *p, DbTable *tbl, sc.mp = mpi.mp; if (!mpi.got_partial && mpi.some_limitation && - CMP(mpi.least,mpi.most) == 0) { + CMP_EQ(mpi.least,mpi.most)) { doit_select_delete(tb,mpi.save_term,&sc, 0 /* direction doesn't matter */); RET_TO_BIF(erts_make_integer(sc.accum,p),DB_ERROR_NONE); diff --git a/erts/emulator/beam/erl_db_util.c b/erts/emulator/beam/erl_db_util.c index 9d699d4b22..0bf562d937 100644 --- a/erts/emulator/beam/erl_db_util.c +++ b/erts/emulator/beam/erl_db_util.c @@ -2153,16 +2153,16 @@ restart: break; case matchMkFlatMap: n = *pc++; - ehp = HAllocX(build_proc, 1 + MAP_HEADER_SIZE + n, HEAP_XTRA); + ehp = HAllocX(build_proc, 1 + MAP_HEADER_FLATMAP_SZ + n, HEAP_XTRA); t = *ehp++ = *--esp; { flatmap_t *m = (flatmap_t *)ehp; - m->thing_word = MAP_HEADER; + m->thing_word = MAP_HEADER_FLATMAP; m->size = n; m->keys = t; } t = make_flatmap(ehp); - ehp += MAP_HEADER_SIZE; + ehp += MAP_HEADER_FLATMAP_SZ; while (n--) { *ehp++ = *--esp; } @@ -3540,14 +3540,10 @@ static DMCRet dmc_one_term(DMCContext *context, DMC_PUSH(*stack, c); break; case (_TAG_HEADER_MAP >> _TAG_PRIMARY_SIZE): - n = flatmap_get_size(flatmap_val(c)); - DMC_PUSH(*text, matchPushM); - ++(context->stack_used); - DMC_PUSH(*text, n); - DMC_PUSH(*stack, c); - break; - case (_TAG_HEADER_HASHMAP >> _TAG_PRIMARY_SIZE): - n = hashmap_size(c); + if (is_flatmap(c)) + n = flatmap_get_size(flatmap_val(c)); + else + n = hashmap_size(c); DMC_PUSH(*text, matchPushM); ++(context->stack_used); DMC_PUSH(*text, n); diff --git a/erts/emulator/beam/erl_drv_thread.c b/erts/emulator/beam/erl_drv_thread.c index 31b05d22af..240faa823d 100644 --- a/erts/emulator/beam/erl_drv_thread.c +++ b/erts/emulator/beam/erl_drv_thread.c @@ -604,10 +604,12 @@ erl_drv_thread_create(char *name, ethr_thr_opts ethr_opts = ETHR_THR_OPTS_DEFAULT_INITER; ethr_thr_opts *use_opts; - if (!opts) + if (!opts && !name) use_opts = NULL; else { - ethr_opts.suggested_stack_size = opts->suggested_stack_size; + if(opts) + ethr_opts.suggested_stack_size = opts->suggested_stack_size; + ethr_opts.name = name; use_opts = ðr_opts; } diff --git a/erts/emulator/beam/erl_gc.h b/erts/emulator/beam/erl_gc.h index 8afcb060a1..bd6dcc9078 100644 --- a/erts/emulator/beam/erl_gc.h +++ b/erts/emulator/beam/erl_gc.h @@ -55,9 +55,10 @@ do { \ nelts = header_arity(HDR); \ switch ((HDR) & _HEADER_SUBTAG_MASK) { \ case SUB_BINARY_SUBTAG: nelts++; break; \ - case MAP_SUBTAG: nelts+=flatmap_get_size(PTR) + 1; break; \ - case HASHMAP_SUBTAG: nelts=hashmap_bitcount(MAP_HEADER_VAL(HDR)); \ - nelts += is_hashmap_header_head(HDR) ? 1 : 0; break; \ + case MAP_SUBTAG: \ + if (is_flatmap_header(HDR)) nelts+=flatmap_get_size(PTR) + 1; \ + else nelts += hashmap_bitcount(MAP_HEADER_VAL(HDR)); \ + break; \ case FUN_SUBTAG: nelts+=((ErlFunThing*)(PTR))->num_free+1; break; \ } \ gval = make_boxed(HTOP); \ diff --git a/erts/emulator/beam/erl_map.c b/erts/emulator/beam/erl_map.c index 35446501d4..d1df737723 100644 --- a/erts/emulator/beam/erl_map.c +++ b/erts/emulator/beam/erl_map.c @@ -89,7 +89,7 @@ static Eterm flatmap_from_validated_list(Process *p, Eterm list, Uint size); static Eterm hashmap_from_validated_list(Process *p, Eterm list, Uint size); static Eterm hashmap_from_unsorted_array(ErtsHeapFactory*, hxnode_t *hxns, Uint n, int reject_dupkeys); static Eterm hashmap_from_sorted_unique_array(ErtsHeapFactory*, hxnode_t *hxns, Uint n, int is_root); -static Eterm hashmap_from_chunked_array(ErtsHeapFactory*, hxnode_t *hxns, Uint n, int is_root); +static Eterm hashmap_from_chunked_array(ErtsHeapFactory*, hxnode_t *hxns, Uint n, Uint size, int is_root); static Eterm hashmap_info(Process *p, Eterm node); static Eterm hashmap_bld_tuple_uint(Uint **hpp, Uint *szp, Uint n, Uint nums[]); static int hxnodecmp(hxnode_t* a, hxnode_t* b); @@ -303,10 +303,10 @@ static Eterm flatmap_from_validated_list(Process *p, Eterm list, Uint size) { hp += size; mp = (flatmap_t*)hp; res = make_flatmap(mp); - hp += MAP_HEADER_SIZE; + hp += MAP_HEADER_FLATMAP_SZ; vs = hp; - mp->thing_word = MAP_HEADER; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = size; /* set later, might shrink*/ mp->keys = keys; @@ -392,12 +392,11 @@ static Eterm hashmap_from_validated_list(Process *p, Eterm list, Uint size) { Eterm item = list; Eterm *hp; Eterm *kv, res; - Eterm tmp[2]; Uint32 sw, hx; Uint ix = 0; hxnode_t *hxns; ErtsHeapFactory factory; - + DeclareTmpHeap(tmp,2,p); ASSERT(size > 0); hp = HAlloc(p, (2 * size)); @@ -405,6 +404,7 @@ static Eterm hashmap_from_validated_list(Process *p, Eterm list, Uint size) { /* create tmp hx values and leaf ptrs */ hxns = (hxnode_t *)erts_alloc(ERTS_ALC_T_TMP, size * sizeof(hxnode_t)); + UseTmpHeap(2,p); while(is_list(item)) { res = CAR(list_val(item)); kv = tuple_val(res); @@ -417,6 +417,7 @@ static Eterm hashmap_from_validated_list(Process *p, Eterm list, Uint size) { ix++; item = CDR(list_val(item)); } + UnUseTmpHeap(2,p); factory.p = p; res = hashmap_from_unsorted_array(&factory, hxns, size, 0); @@ -438,10 +439,10 @@ static Eterm hashmap_from_validated_list(Process *p, Eterm list, Uint size) { ks = hp; hp += n; mp = (flatmap_t*)hp; - hp += MAP_HEADER_SIZE; + hp += MAP_HEADER_FLATMAP_SZ; vs = hp; - mp->thing_word = MAP_HEADER; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = n; mp->keys = keys; @@ -567,14 +568,14 @@ static Eterm hashmap_from_unsorted_array(ErtsHeapFactory* factory, while(ix < jx) { lx = ix; - while(ix < jx && EQ(CAR(list_val(hxns[ix].val)), CAR(list_val(hxns[lx].val)))) { + while(++ix < jx && EQ(CAR(list_val(hxns[ix].val)), + CAR(list_val(hxns[lx].val)))) { if (reject_dupkeys) return THE_NON_VALUE; if (hxns[ix].i > hxns[lx].i) { lx = ix; } - ix++; } hxns[cx].hx = hxns[lx].hx; hxns[cx].val = hxns[lx].val; @@ -625,9 +626,9 @@ static Eterm hashmap_from_sorted_unique_array(ErtsHeapFactory* factory, Uint i,ix,jx,elems; Uint32 sw, hx; Eterm val; - Eterm th[2]; hxnode_t *tmp; - + DeclareTmpHeapNoproc(th,2); + UseTmpHeapNoproc(2); ASSERT(lvl < 32); ix = 0; elems = 1; @@ -661,16 +662,17 @@ static Eterm hashmap_from_sorted_unique_array(ErtsHeapFactory* factory, ix++; } - res = hashmap_from_chunked_array(factory, hxns, elems, !lvl); + res = hashmap_from_chunked_array(factory, hxns, elems, n, !lvl); ERTS_FACTORY_HOLE_CHECK(factory); + UnUseTmpHeapNoproc(2); return res; } #define HALLOC_EXTRA 200 -static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, - hxnode_t *hxns, Uint n, int is_root) { +static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, hxnode_t *hxns, Uint n, + Uint size, int is_root) { Uint ix, d, dn, dc, slot, elems; Uint32 v, vp, vn, hdr; Uint bp, sz; @@ -710,8 +712,7 @@ static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, } } - ESTACK_PUSH(stack, res); - ESTACK_PUSH(stack, 1 << slot); + ESTACK_PUSH2(stack,res,1 << slot); /* all of the other nodes .. */ elems = n - 2; /* remove first and last elements */ @@ -755,14 +756,12 @@ static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, slot = maskval(v, d); bp = 1 << slot; /* no more collisions */ - ESTACK_PUSH(stack,res); - ESTACK_PUSH(stack,bp); + ESTACK_PUSH2(stack,res,bp); } else if (d == dn) { /* no collisions at all */ slot = maskval(v, d); bp = 1 << slot; - ESTACK_PUSH(stack,res); - ESTACK_PUSH(stack,hdr | bp); + ESTACK_PUSH2(stack,res,hdr | bp); } else { /* dn < n, we have a drop and we are done * build nodes and subtree */ @@ -786,8 +785,7 @@ static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, hdr = ESTACK_POP(stack); d--; } - ESTACK_PUSH(stack, res); - ESTACK_PUSH(stack, hdr); + ESTACK_PUSH2(stack,res,hdr); } vp = v; @@ -844,7 +842,7 @@ static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, if (is_root) { *hp++ = (hdr == 0xffff) ? MAP_HEADER_HAMT_HEAD_ARRAY : MAP_HEADER_HAMT_HEAD_BITMAP(hdr); - *hp++ = n; + *hp++ = size; } else { *hp++ = MAP_HEADER_HAMT_NODE_BITMAP(hdr); } @@ -862,7 +860,12 @@ static Eterm hashmap_from_chunked_array(ErtsHeapFactory *factory, #undef HALLOC_EXTRA static int hxnodecmpkey(hxnode_t *a, hxnode_t *b) { - return CMP_TERM(CAR(list_val(a->val)), CAR(list_val(b->val))); + Sint c = CMP_TERM(CAR(list_val(a->val)), CAR(list_val(b->val))); +#if ERTS_SIZEOF_ETERM <= SIZEOF_INT + return c; +#else + return c > 0 ? 1 : (c < 0 ? -1 : 0); +#endif } static int hxnodecmp(hxnode_t *a, hxnode_t *b) { @@ -944,16 +947,16 @@ static Eterm flatmap_merge(Process *p, Eterm nodeA, Eterm nodeB) { n1 = flatmap_get_size(mp1); n2 = flatmap_get_size(mp2); - need = MAP_HEADER_SIZE + 1 + 2*(n1 + n2); + need = MAP_HEADER_FLATMAP_SZ + 1 + 2 * (n1 + n2); hp = HAlloc(p, need); thp = hp; tup = make_tuple(thp); ks = hp + 1; hp += 1 + n1 + n2; - mp_new = (flatmap_t*)hp; hp += MAP_HEADER_SIZE; + mp_new = (flatmap_t*)hp; hp += MAP_HEADER_FLATMAP_SZ; vs = hp; hp += n1 + n2; - mp_new->thing_word = MAP_HEADER; + mp_new->thing_word = MAP_HEADER_FLATMAP; mp_new->size = 0; mp_new->keys = tup; @@ -1008,6 +1011,7 @@ static Eterm flatmap_merge(Process *p, Eterm nodeA, Eterm nodeB) { n = n1 + n2 - unused_size; *thp = make_arityval(n); + mp_new->size = n; /* Reshape map to a hashmap if the map exceeds the limit */ @@ -1043,8 +1047,6 @@ static Eterm flatmap_merge(Process *p, Eterm nodeA, Eterm nodeB) { return res; } - mp_new->size = n; - return make_flatmap(mp_new); } @@ -1104,12 +1106,13 @@ static Eterm hashmap_merge(Process *p, Eterm nodeA, Eterm nodeB) { struct HashmapMergePStackType* sp = PSTACK_PUSH(s); Eterm *hp, *nhp; Eterm hdrA, hdrB; - Eterm th[2]; Uint32 ahx, bhx; Uint size; /* total key-value counter */ int keepA = 0; - unsigned lvl = 0; + unsigned int lvl = 0; + DeclareTmpHeap(th,2,p); Eterm res = THE_NON_VALUE; + UseTmpHeap(2,p); /* * Strategy: Do depth-first traversal of both trees (at the same time) @@ -1302,6 +1305,7 @@ recurse: res = make_boxed(nhp); } PSTACK_DESTROY(s); + UnUseTmpHeap(2,p); return res; } @@ -1320,8 +1324,9 @@ static int hash_cmp(Uint32 ha, Uint32 hb) int hashmap_key_hash_cmp(Eterm* ap, Eterm* bp) { - Eterm th[2]; - unsigned lvl = 0; + unsigned int lvl = 0; + DeclareTmpHeapNoproc(th,2); + UseTmpHeapNoproc(2); if (ap && bp) { ASSERT(CMP_TERM(CAR(ap), CAR(bp)) != 0); @@ -1329,11 +1334,14 @@ int hashmap_key_hash_cmp(Eterm* ap, Eterm* bp) Uint32 ha = hashmap_restore_hash(th, lvl, CAR(ap)); Uint32 hb = hashmap_restore_hash(th, lvl, CAR(bp)); int cmp = hash_cmp(ha, hb); - if (cmp) + if (cmp) { + UnUseTmpHeapNoproc(2); return cmp; + } lvl += 8; } } + UnUseTmpHeapNoproc(2); return ap ? -1 : 1; } @@ -1344,12 +1352,12 @@ BIF_RETTYPE maps_new_0(BIF_ALIST_0) { Eterm tup; flatmap_t *mp; - hp = HAlloc(BIF_P, (MAP_HEADER_SIZE + 1)); + hp = HAlloc(BIF_P, (MAP_HEADER_FLATMAP_SZ + 1)); tup = make_tuple(hp); *hp++ = make_arityval(0); mp = (flatmap_t*)hp; - mp->thing_word = MAP_HEADER; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = 0; mp->keys = tup; @@ -1401,7 +1409,7 @@ int erts_maps_remove(Process *p, Eterm key, Eterm map, Eterm *res) { *thp++ = make_arityval(n - 1); *res = make_flatmap(mhp); - *mhp++ = MAP_HEADER; + *mhp++ = MAP_HEADER_FLATMAP; *mhp++ = n - 1; *mhp++ = tup; @@ -1478,9 +1486,9 @@ int erts_maps_update(Process *p, Eterm key, Eterm value, Eterm map, Eterm *res) * assume key-tuple will be intact */ - hp = HAlloc(p, MAP_HEADER_SIZE + n); + hp = HAlloc(p, MAP_HEADER_FLATMAP_SZ + n); shp = hp; - *hp++ = MAP_HEADER; + *hp++ = MAP_HEADER_FLATMAP; *hp++ = n; *hp++ = mp->keys; @@ -1502,7 +1510,7 @@ int erts_maps_update(Process *p, Eterm key, Eterm value, Eterm map, Eterm *res) } } - HRelease(p, shp + MAP_HEADER_SIZE + n, shp); + HRelease(p, shp + MAP_HEADER_FLATMAP_SZ + n, shp); return 0; found_key: @@ -1536,12 +1544,12 @@ Eterm erts_maps_put(Process *p, Eterm key, Eterm value, Eterm map) { n = flatmap_get_size(mp); if (n == 0) { - hp = HAlloc(p, MAP_HEADER_SIZE + 1 + 2); + hp = HAlloc(p, MAP_HEADER_FLATMAP_SZ + 1 + 2); tup = make_tuple(hp); *hp++ = make_arityval(1); *hp++ = key; res = make_flatmap(hp); - *hp++ = MAP_HEADER; + *hp++ = MAP_HEADER_FLATMAP; *hp++ = 1; *hp++ = tup; *hp++ = value; @@ -1556,10 +1564,10 @@ Eterm erts_maps_put(Process *p, Eterm key, Eterm value, Eterm map) { * assume key-tuple will be intact */ - hp = HAlloc(p, MAP_HEADER_SIZE + n); + hp = HAlloc(p, MAP_HEADER_FLATMAP_SZ + n); shp = hp; /* save hp, used if optimistic update fails */ res = make_flatmap(hp); - *hp++ = MAP_HEADER; + *hp++ = MAP_HEADER_FLATMAP; *hp++ = n; *hp++ = mp->keys; @@ -1591,7 +1599,7 @@ Eterm erts_maps_put(Process *p, Eterm key, Eterm value, Eterm map) { /* the map will grow */ if (n >= MAP_SMALL_MAP_LIMIT) { - HRelease(p, shp + MAP_HEADER_SIZE + n, shp); + HRelease(p, shp + MAP_HEADER_FLATMAP_SZ + n, shp); ks = flatmap_get_keys(mp); vs = flatmap_get_values(mp); @@ -1608,7 +1616,7 @@ Eterm erts_maps_put(Process *p, Eterm key, Eterm value, Eterm map) { hp = HAlloc(p, 3 + n + 1); res = make_flatmap(hp); - *hp++ = MAP_HEADER; + *hp++ = MAP_HEADER_FLATMAP; *hp++ = n + 1; *hp++ = tup; @@ -1906,13 +1914,14 @@ int erts_hashmap_insert_down(Uint32 hx, Eterm key, Eterm node, Uint *sz, Uint *update_size, ErtsEStack *sp, int is_update) { Eterm *ptr; Eterm hdr, ckey; - Eterm th[2]; Uint32 ix, cix, bp, hval, chx; Uint slot, lvl = 0, clvl; Uint size = 0, n = 0; + DeclareTmpHeapNoproc(th,2); *update_size = 1; + UseTmpHeapNoproc(2); for (;;) { switch(primary_tag(node)) { case TAG_PRIMARY_LIST: /* LEAF NODE [K|V] */ @@ -1923,6 +1932,7 @@ int erts_hashmap_insert_down(Uint32 hx, Eterm key, Eterm node, Uint *sz, goto unroll; } if (is_update) { + UnUseTmpHeapNoproc(2); return 0; } goto insert_subnodes; @@ -1942,27 +1952,32 @@ int erts_hashmap_insert_down(Uint32 hx, Eterm key, Eterm node, Uint *sz, case HAMT_SUBTAG_NODE_BITMAP: hval = MAP_HEADER_VAL(hdr); ix = hashmap_index(hx); - bp = 1 << ix; - slot = hashmap_bitcount(hval & (bp - 1)); - n = hashmap_bitcount(hval); - - ESTACK_PUSH(*sp, n); - ESTACK_PUSH3(*sp, bp, slot, node); + bp = 1 << ix; + if (hval == 0xffff) { + slot = ix; + n = 16; + } else { + slot = hashmap_bitcount(hval & (bp - 1)); + n = hashmap_bitcount(hval); + } + + ESTACK_PUSH4(*sp, n, bp, slot, node); + + if (!(bp & hval)) { /* not occupied */ + if (is_update) { + UnUseTmpHeapNoproc(2); + return 0; + } + size += HAMT_NODE_BITMAP_SZ(n+1); + goto unroll; + } + + hx = hashmap_shift_hash(th,hx,lvl,key); + node = ptr[slot+1]; + ASSERT(HAMT_NODE_BITMAP_SZ(n) <= 17); + size += HAMT_NODE_BITMAP_SZ(n); + break; - /* occupied */ - if (bp & hval) { - hx = hashmap_shift_hash(th,hx,lvl,key); - node = ptr[slot+1]; - ASSERT(HAMT_NODE_BITMAP_SZ(n) <= 17); - size += HAMT_NODE_BITMAP_SZ(n); - break; - } - /* not occupied */ - if (is_update) { - return 0; - } - size += HAMT_NODE_BITMAP_SZ(n+1); - goto unroll; case HAMT_SUBTAG_HEAD_BITMAP: hval = MAP_HEADER_VAL(hdr); ix = hashmap_index(hx); @@ -1970,8 +1985,7 @@ int erts_hashmap_insert_down(Uint32 hx, Eterm key, Eterm node, Uint *sz, slot = hashmap_bitcount(hval & (bp - 1)); n = hashmap_bitcount(hval); - ESTACK_PUSH(*sp, n); - ESTACK_PUSH3(*sp, bp, slot, node); + ESTACK_PUSH4(*sp, n, bp, slot, node); /* occupied */ if (bp & hval) { @@ -1983,6 +1997,7 @@ int erts_hashmap_insert_down(Uint32 hx, Eterm key, Eterm node, Uint *sz, } /* not occupied */ if (is_update) { + UnUseTmpHeapNoproc(2); return 0; } size += HAMT_HEAD_BITMAP_SZ(n+1); @@ -2005,8 +2020,7 @@ insert_subnodes: cix = hashmap_index(chx); while (cix == ix) { - ESTACK_PUSH(*sp, 0); - ESTACK_PUSH3(*sp, 1 << ix, 0, MAP_HEADER_HAMT_NODE_BITMAP(0)); + ESTACK_PUSH4(*sp, 0, 1 << ix, 0, MAP_HEADER_HAMT_NODE_BITMAP(0)); size += HAMT_NODE_BITMAP_SZ(1); hx = hashmap_shift_hash(th,hx,lvl,key); chx = hashmap_shift_hash(th,chx,clvl,ckey); @@ -2017,6 +2031,7 @@ insert_subnodes: unroll: *sz = size + /* res cons */ 2; + UnUseTmpHeapNoproc(2); return 1; } @@ -2159,13 +2174,14 @@ static Eterm hashmap_values(Process* p, Eterm node) { static Eterm hashmap_delete(Process *p, Uint32 hx, Eterm key, Eterm map) { Eterm *hp = NULL, *nhp = NULL, *hp_end = NULL; - Eterm th[2]; Eterm *ptr; Eterm hdr, res = map, node = map; Uint32 ix, bp, hval; Uint slot, lvl = 0; Uint size = 0, n = 0; DECLARE_ESTACK(stack); + DeclareTmpHeapNoproc(th,2); + UseTmpHeapNoproc(2); for (;;) { switch(primary_tag(node)) { @@ -2191,22 +2207,25 @@ static Eterm hashmap_delete(Process *p, Uint32 hx, Eterm key, Eterm map) { hval = MAP_HEADER_VAL(hdr); ix = hashmap_index(hx); bp = 1 << ix; - slot = hashmap_bitcount(hval & (bp - 1)); - n = hashmap_bitcount(hval); - - ESTACK_PUSH(stack, n); - ESTACK_PUSH3(stack, bp, slot, node); + if (hval == 0xffff) { + slot = ix; + n = 16; + } else if (bp & hval) { + slot = hashmap_bitcount(hval & (bp - 1)); + n = hashmap_bitcount(hval); + } else { + /* not occupied */ + goto not_found; + } + + ESTACK_PUSH4(stack, n, bp, slot, node); + + hx = hashmap_shift_hash(th,hx,lvl,key); + node = ptr[slot+1]; + ASSERT(HAMT_NODE_BITMAP_SZ(n) <= 17); + size += HAMT_NODE_BITMAP_SZ(n); + break; - /* occupied */ - if (bp & hval) { - hx = hashmap_shift_hash(th,hx,lvl,key); - node = ptr[slot+1]; - ASSERT(HAMT_NODE_BITMAP_SZ(n) <= 17); - size += HAMT_NODE_BITMAP_SZ(n); - break; - } - /* not occupied */ - goto not_found; case HAMT_SUBTAG_HEAD_BITMAP: hval = MAP_HEADER_VAL(hdr); ix = hashmap_index(hx); @@ -2214,8 +2233,7 @@ static Eterm hashmap_delete(Process *p, Uint32 hx, Eterm key, Eterm map) { slot = hashmap_bitcount(hval & (bp - 1)); n = hashmap_bitcount(hval); - ESTACK_PUSH(stack, n); - ESTACK_PUSH3(stack, bp, slot, node); + ESTACK_PUSH4(stack, n, bp, slot, node); /* occupied */ if (bp & hval) { @@ -2258,10 +2276,10 @@ unroll: ks = hp; hp += n; mp = (flatmap_t*)hp; - hp += MAP_HEADER_SIZE; + hp += MAP_HEADER_FLATMAP_SZ; vs = hp; - mp->thing_word = MAP_HEADER; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = n; mp->keys = keys; @@ -2278,6 +2296,7 @@ unroll: erts_validate_and_sort_flatmap(mp); DESTROY_WSTACK(wstack); + UnUseTmpHeapNoproc(2); return make_flatmap(mp); } @@ -2418,6 +2437,7 @@ not_found: DESTROY_ESTACK(stack); ERTS_VERIFY_UNUSED_TEMP_ALLOC(p); ERTS_HOLE_CHECK(p); + UnUseTmpHeapNoproc(2); return res; } diff --git a/erts/emulator/beam/erl_map.h b/erts/emulator/beam/erl_map.h index 9fc1a72b68..2cc6768bfc 100644 --- a/erts/emulator/beam/erl_map.h +++ b/erts/emulator/beam/erl_map.h @@ -55,9 +55,9 @@ typedef struct flatmap_s { /* the head-node is a bitmap or array with an untagged size */ -#define hashmap_size(x) (((hashmap_head_t*) hashmap_val(x))->size) +#define hashmap_size(x) (((hashmap_head_t*) hashmap_val(x))->size) #define hashmap_size_rel(RTERM, BASE) hashmap_size(rterm2wterm(RTERM, BASE)) -#define hashmap_make_hash(Key) make_internal_hash(Key) +#define hashmap_make_hash(Key) make_internal_hash(Key) #define hashmap_restore_hash(Heap,Lvl,Key) \ (((Lvl) < 8) ? hashmap_make_hash(Key) >> (4*(Lvl)) : hashmap_make_hash(CONS(Heap, make_small((Lvl)>>3), (Key))) >> (4*((Lvl) & 7))) @@ -66,27 +66,15 @@ typedef struct flatmap_s { /* erl_term.h stuff */ -#define make_flatmap(x) make_boxed((Eterm*)(x)) -#define make_flatmap_rel(x, BASE) make_boxed_rel((Eterm*)(x),(BASE)) -#define is_flatmap(x) (is_boxed((x)) && is_flatmap_header(*boxed_val((x)))) -#define is_flatmap_rel(RTERM,BASE) is_flatmap(rterm2wterm(RTERM,BASE)) -#define is_not_flatmap(x) (!is_flatmap((x))) -#define is_flatmap_header(x) (((x) & (_TAG_HEADER_MASK)) == _TAG_HEADER_MAP) -#define header_is_flatmap(x) ((((x) & (_HEADER_SUBTAG_MASK)) == MAP_SUBTAG)) -#define flatmap_val(x) (_unchecked_boxed_val((x))) -#define flatmap_val_rel(RTERM, BASE) flatmap_val(rterm2wterm(RTERM, BASE)) - -#define flatmap_get_values(x) (((Eterm *)(x)) + 3) -#define flatmap_get_keys(x) (((Eterm *)tuple_val(((flatmap_t *)(x))->keys)) + 1) -#define flatmap_get_size(x) (((flatmap_t*)(x))->size) +#define flatmap_get_values(x) (((Eterm *)(x)) + 3) +#define flatmap_get_keys(x) (((Eterm *)tuple_val(((flatmap_t *)(x))->keys)) + 1) +#define flatmap_get_size(x) (((flatmap_t*)(x))->size) #ifdef DEBUG #define MAP_SMALL_MAP_LIMIT (3) #else #define MAP_SMALL_MAP_LIMIT (32) #endif -#define MAP_HEADER _make_header(1,_TAG_HEADER_MAP) -#define MAP_HEADER_SIZE (sizeof(flatmap_t) / sizeof(Eterm)) struct ErtsWStack_; struct ErtsEStack_; @@ -170,7 +158,10 @@ typedef struct hashmap_head_s { #define is_hashmap_header_head(x) ((MAP_HEADER_TYPE(x) & (0x2))) #define MAKE_MAP_HEADER(Type,Arity,Val) \ - (_make_header(((((Uint16)(Val)) << MAP_HEADER_ARITY_SZ) | (Arity)) << MAP_HEADER_TAG_SZ | (Type) , _TAG_HEADER_HASHMAP)) + (_make_header(((((Uint16)(Val)) << MAP_HEADER_ARITY_SZ) | (Arity)) << MAP_HEADER_TAG_SZ | (Type) , _TAG_HEADER_MAP)) + +#define MAP_HEADER_FLATMAP \ + MAKE_MAP_HEADER(MAP_HEADER_TAG_FLATMAP_HEAD,0x1,0x0) #define MAP_HEADER_HAMT_HEAD_ARRAY \ MAKE_MAP_HEADER(MAP_HEADER_TAG_HAMT_HEAD_ARRAY,0x1,0xffff) @@ -181,15 +172,22 @@ typedef struct hashmap_head_s { #define MAP_HEADER_HAMT_NODE_BITMAP(Bmp) \ MAKE_MAP_HEADER(MAP_HEADER_TAG_HAMT_NODE_BITMAP,0x0,Bmp) -#define HAMT_HEAD_EMPTY_SZ (2) -#define HAMT_HEAD_ARRAY_SZ (18) -#define HAMT_NODE_BITMAP_SZ(n) (1 + n) -#define HAMT_HEAD_BITMAP_SZ(n) (2 + n) +#define MAP_HEADER_FLATMAP_SZ (sizeof(flatmap_t) / sizeof(Eterm)) + +#define HAMT_NODE_ARRAY_SZ (17) +#define HAMT_HEAD_ARRAY_SZ (18) +#define HAMT_NODE_BITMAP_SZ(n) (1 + n) +#define HAMT_HEAD_BITMAP_SZ(n) (2 + n) + +/* 2 bits maps tag + 4 bits subtag + 2 ignore bits */ +#define _HEADER_MAP_SUBTAG_MASK (0xfc) +/* 1 bit map tag + 1 ignore bit + 4 bits subtag + 2 ignore bits */ +#define _HEADER_MAP_HASHMAP_HEAD_MASK (0xbc) -#define _HEADER_MAP_SUBTAG_MASK (0xfc) /* 2 bits maps tag + 4 bits subtag + 2 ignore bits */ -#define HAMT_SUBTAG_NODE_BITMAP ((MAP_HEADER_TAG_HAMT_NODE_BITMAP << _HEADER_ARITY_OFFS) | HASHMAP_SUBTAG) -#define HAMT_SUBTAG_HEAD_ARRAY ((MAP_HEADER_TAG_HAMT_HEAD_ARRAY << _HEADER_ARITY_OFFS) | HASHMAP_SUBTAG) -#define HAMT_SUBTAG_HEAD_BITMAP ((MAP_HEADER_TAG_HAMT_HEAD_BITMAP << _HEADER_ARITY_OFFS) | HASHMAP_SUBTAG) +#define HAMT_SUBTAG_NODE_BITMAP ((MAP_HEADER_TAG_HAMT_NODE_BITMAP << _HEADER_ARITY_OFFS) | MAP_SUBTAG) +#define HAMT_SUBTAG_HEAD_ARRAY ((MAP_HEADER_TAG_HAMT_HEAD_ARRAY << _HEADER_ARITY_OFFS) | MAP_SUBTAG) +#define HAMT_SUBTAG_HEAD_BITMAP ((MAP_HEADER_TAG_HAMT_HEAD_BITMAP << _HEADER_ARITY_OFFS) | MAP_SUBTAG) +#define HAMT_SUBTAG_HEAD_FLATMAP ((MAP_HEADER_TAG_FLATMAP_HEAD << _HEADER_ARITY_OFFS) | MAP_SUBTAG) #define hashmap_index(hash) (((Uint32)hash) & 0xf) diff --git a/erts/emulator/beam/erl_nif.c b/erts/emulator/beam/erl_nif.c index c7c8b3fee3..660f446a52 100644 --- a/erts/emulator/beam/erl_nif.c +++ b/erts/emulator/beam/erl_nif.c @@ -1926,14 +1926,14 @@ int enif_get_map_size(ErlNifEnv* env, ERL_NIF_TERM term, size_t *size) ERL_NIF_TERM enif_make_new_map(ErlNifEnv* env) { - Eterm* hp = alloc_heap(env,MAP_HEADER_SIZE+1); + Eterm* hp = alloc_heap(env,MAP_HEADER_FLATMAP_SZ+1); Eterm tup; flatmap_t *mp; tup = make_tuple(hp); *hp++ = make_arityval(0); mp = (flatmap_t*)hp; - mp->thing_word = MAP_HEADER; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = 0; mp->keys = tup; diff --git a/erts/emulator/beam/erl_printf_term.c b/erts/emulator/beam/erl_printf_term.c index ac5b139f8d..e0dfbd31b8 100644 --- a/erts/emulator/beam/erl_printf_term.c +++ b/erts/emulator/beam/erl_printf_term.c @@ -565,77 +565,74 @@ print_term(fmtfn_t fn, void* arg, Eterm obj, long *dcount, } break; case MAP_DEF: - { - Uint n; - Eterm *ks, *vs; - flatmap_t *mp = (flatmap_t *)flatmap_val(wobj); - n = flatmap_get_size(mp); - ks = flatmap_get_keys(mp); - vs = flatmap_get_values(mp); - - PRINT_CHAR(res, fn, arg, '#'); - PRINT_CHAR(res, fn, arg, '{'); - WSTACK_PUSH(s, PRT_CLOSE_TUPLE); - if (n > 0) { - n--; - WSTACK_PUSH5(s, vs[n], PRT_TERM, PRT_ASSOC, ks[n], PRT_TERM); - while (n--) { - WSTACK_PUSH6(s, PRT_COMMA, vs[n], PRT_TERM, PRT_ASSOC, - ks[n], PRT_TERM); - } - } - } - break; - case HASHMAP_DEF: - { - Uint n,mapval; - Eterm *head; - head = hashmap_val(wobj); - mapval = MAP_HEADER_VAL(*head); - switch (MAP_HEADER_TYPE(*head)) { - case MAP_HEADER_TAG_HAMT_HEAD_ARRAY: - case MAP_HEADER_TAG_HAMT_HEAD_BITMAP: - PRINT_STRING(res, fn, arg, "#<"); - PRINT_UWORD(res, fn, arg, 'x', 0, 1, mapval); - PRINT_STRING(res, fn, arg, ">{"); - WSTACK_PUSH(s,PRT_CLOSE_TUPLE); - n = hashmap_bitcount(mapval); - ASSERT(n < 17); - head += 2; - if (n > 0) { - n--; - WSTACK_PUSH(s, head[n]); - WSTACK_PUSH(s, PRT_TERM); - while (n--) { - WSTACK_PUSH(s, PRT_COMMA); - WSTACK_PUSH(s, head[n]); - WSTACK_PUSH(s, PRT_TERM); - } - } - break; - case MAP_HEADER_TAG_HAMT_NODE_BITMAP: - n = hashmap_bitcount(mapval); - head++; - PRINT_CHAR(res, fn, arg, '<'); - PRINT_UWORD(res, fn, arg, 'x', 0, 1, mapval); - PRINT_STRING(res, fn, arg, ">{"); - WSTACK_PUSH(s,PRT_CLOSE_TUPLE); - ASSERT(n < 17); - if (n > 0) { - n--; - WSTACK_PUSH(s, head[n]); - WSTACK_PUSH(s, PRT_TERM); - while (n--) { - WSTACK_PUSH(s, PRT_COMMA); - WSTACK_PUSH(s, head[n]); - WSTACK_PUSH(s, PRT_TERM); - } - } - break; - } - } - break; - default: + if (is_flatmap(wobj)) { + Uint n; + Eterm *ks, *vs; + flatmap_t *mp = (flatmap_t *)flatmap_val(wobj); + n = flatmap_get_size(mp); + ks = flatmap_get_keys(mp); + vs = flatmap_get_values(mp); + + PRINT_CHAR(res, fn, arg, '#'); + PRINT_CHAR(res, fn, arg, '{'); + WSTACK_PUSH(s, PRT_CLOSE_TUPLE); + if (n > 0) { + n--; + WSTACK_PUSH5(s, vs[n], PRT_TERM, PRT_ASSOC, ks[n], PRT_TERM); + while (n--) { + WSTACK_PUSH6(s, PRT_COMMA, vs[n], PRT_TERM, PRT_ASSOC, + ks[n], PRT_TERM); + } + } + } else { + Uint n, mapval; + Eterm *head; + head = hashmap_val(wobj); + mapval = MAP_HEADER_VAL(*head); + switch (MAP_HEADER_TYPE(*head)) { + case MAP_HEADER_TAG_HAMT_HEAD_ARRAY: + case MAP_HEADER_TAG_HAMT_HEAD_BITMAP: + PRINT_STRING(res, fn, arg, "#<"); + PRINT_UWORD(res, fn, arg, 'x', 0, 1, mapval); + PRINT_STRING(res, fn, arg, ">{"); + WSTACK_PUSH(s,PRT_CLOSE_TUPLE); + n = hashmap_bitcount(mapval); + ASSERT(n < 17); + head += 2; + if (n > 0) { + n--; + WSTACK_PUSH(s, head[n]); + WSTACK_PUSH(s, PRT_TERM); + while (n--) { + WSTACK_PUSH(s, PRT_COMMA); + WSTACK_PUSH(s, head[n]); + WSTACK_PUSH(s, PRT_TERM); + } + } + break; + case MAP_HEADER_TAG_HAMT_NODE_BITMAP: + n = hashmap_bitcount(mapval); + head++; + PRINT_CHAR(res, fn, arg, '<'); + PRINT_UWORD(res, fn, arg, 'x', 0, 1, mapval); + PRINT_STRING(res, fn, arg, ">{"); + WSTACK_PUSH(s,PRT_CLOSE_TUPLE); + ASSERT(n < 17); + if (n > 0) { + n--; + WSTACK_PUSH(s, head[n]); + WSTACK_PUSH(s, PRT_TERM); + while (n--) { + WSTACK_PUSH(s, PRT_COMMA); + WSTACK_PUSH(s, head[n]); + WSTACK_PUSH(s, PRT_TERM); + } + } + break; + } + } + break; + default: PRINT_STRING(res, fn, arg, "<unknown:"); PRINT_POINTER(res, fn, arg, wobj); PRINT_CHAR(res, fn, arg, '>'); diff --git a/erts/emulator/beam/erl_term.c b/erts/emulator/beam/erl_term.c index d6fb88ea61..bc04d7b78e 100644 --- a/erts/emulator/beam/erl_term.c +++ b/erts/emulator/beam/erl_term.c @@ -90,7 +90,6 @@ unsigned tag_val_def(Wterm x) case (_TAG_HEADER_REFC_BIN >> _TAG_PRIMARY_SIZE): return BINARY_DEF; case (_TAG_HEADER_HEAP_BIN >> _TAG_PRIMARY_SIZE): return BINARY_DEF; case (_TAG_HEADER_SUB_BIN >> _TAG_PRIMARY_SIZE): return BINARY_DEF; - case (_TAG_HEADER_HASHMAP >> _TAG_PRIMARY_SIZE): return HASHMAP_DEF; } break; diff --git a/erts/emulator/beam/erl_term.h b/erts/emulator/beam/erl_term.h index 1625a4ec15..095aa54ddd 100644 --- a/erts/emulator/beam/erl_term.h +++ b/erts/emulator/beam/erl_term.h @@ -141,29 +141,27 @@ struct erl_node_; /* Declared in erl_node_tables.h */ #define HEAP_BINARY_SUBTAG (0x9 << _TAG_PRIMARY_SIZE) /* BINARY */ #define SUB_BINARY_SUBTAG (0xA << _TAG_PRIMARY_SIZE) /* BINARY */ /* _BINARY_XXX_MASK depends on 0xB being unused */ -#define HASHMAP_SUBTAG (0xB << _TAG_PRIMARY_SIZE) /* HASHMAP */ #define EXTERNAL_PID_SUBTAG (0xC << _TAG_PRIMARY_SIZE) /* EXTERNAL_PID */ #define EXTERNAL_PORT_SUBTAG (0xD << _TAG_PRIMARY_SIZE) /* EXTERNAL_PORT */ #define EXTERNAL_REF_SUBTAG (0xE << _TAG_PRIMARY_SIZE) /* EXTERNAL_REF */ #define MAP_SUBTAG (0xF << _TAG_PRIMARY_SIZE) /* MAP */ -#define _TAG_HEADER_ARITYVAL (TAG_PRIMARY_HEADER|ARITYVAL_SUBTAG) -#define _TAG_HEADER_FUN (TAG_PRIMARY_HEADER|FUN_SUBTAG) -#define _TAG_HEADER_POS_BIG (TAG_PRIMARY_HEADER|POS_BIG_SUBTAG) -#define _TAG_HEADER_NEG_BIG (TAG_PRIMARY_HEADER|NEG_BIG_SUBTAG) -#define _TAG_HEADER_FLOAT (TAG_PRIMARY_HEADER|FLOAT_SUBTAG) -#define _TAG_HEADER_EXPORT (TAG_PRIMARY_HEADER|EXPORT_SUBTAG) -#define _TAG_HEADER_REF (TAG_PRIMARY_HEADER|REF_SUBTAG) -#define _TAG_HEADER_REFC_BIN (TAG_PRIMARY_HEADER|REFC_BINARY_SUBTAG) -#define _TAG_HEADER_HEAP_BIN (TAG_PRIMARY_HEADER|HEAP_BINARY_SUBTAG) -#define _TAG_HEADER_SUB_BIN (TAG_PRIMARY_HEADER|SUB_BINARY_SUBTAG) -#define _TAG_HEADER_EXTERNAL_PID (TAG_PRIMARY_HEADER|EXTERNAL_PID_SUBTAG) -#define _TAG_HEADER_EXTERNAL_PORT (TAG_PRIMARY_HEADER|EXTERNAL_PORT_SUBTAG) -#define _TAG_HEADER_EXTERNAL_REF (TAG_PRIMARY_HEADER|EXTERNAL_REF_SUBTAG) +#define _TAG_HEADER_ARITYVAL (TAG_PRIMARY_HEADER|ARITYVAL_SUBTAG) +#define _TAG_HEADER_FUN (TAG_PRIMARY_HEADER|FUN_SUBTAG) +#define _TAG_HEADER_POS_BIG (TAG_PRIMARY_HEADER|POS_BIG_SUBTAG) +#define _TAG_HEADER_NEG_BIG (TAG_PRIMARY_HEADER|NEG_BIG_SUBTAG) +#define _TAG_HEADER_FLOAT (TAG_PRIMARY_HEADER|FLOAT_SUBTAG) +#define _TAG_HEADER_EXPORT (TAG_PRIMARY_HEADER|EXPORT_SUBTAG) +#define _TAG_HEADER_REF (TAG_PRIMARY_HEADER|REF_SUBTAG) +#define _TAG_HEADER_REFC_BIN (TAG_PRIMARY_HEADER|REFC_BINARY_SUBTAG) +#define _TAG_HEADER_HEAP_BIN (TAG_PRIMARY_HEADER|HEAP_BINARY_SUBTAG) +#define _TAG_HEADER_SUB_BIN (TAG_PRIMARY_HEADER|SUB_BINARY_SUBTAG) +#define _TAG_HEADER_EXTERNAL_PID (TAG_PRIMARY_HEADER|EXTERNAL_PID_SUBTAG) +#define _TAG_HEADER_EXTERNAL_PORT (TAG_PRIMARY_HEADER|EXTERNAL_PORT_SUBTAG) +#define _TAG_HEADER_EXTERNAL_REF (TAG_PRIMARY_HEADER|EXTERNAL_REF_SUBTAG) #define _TAG_HEADER_BIN_MATCHSTATE (TAG_PRIMARY_HEADER|BIN_MATCHSTATE_SUBTAG) -#define _TAG_HEADER_MAP (TAG_PRIMARY_HEADER|MAP_SUBTAG) -#define _TAG_HEADER_HASHMAP (TAG_PRIMARY_HEADER|HASHMAP_SUBTAG) +#define _TAG_HEADER_MAP (TAG_PRIMARY_HEADER|MAP_SUBTAG) #define _TAG_HEADER_MASK 0x3F @@ -302,7 +300,7 @@ _ET_DECLARE_CHECKED(Uint,atom_val,Eterm) #define is_header(x) (((x) & _TAG_PRIMARY_MASK) == TAG_PRIMARY_HEADER) //#define _unchecked_header_arity(x) ((x) >> _HEADER_ARITY_OFFS) #define _unchecked_header_arity(x) \ - (is_hashmap_header(x) ? MAP_HEADER_ARITY(x) : ((x) >> _HEADER_ARITY_OFFS)) + (is_map_header(x) ? MAP_HEADER_ARITY(x) : ((x) >> _HEADER_ARITY_OFFS)) _ET_DECLARE_CHECKED(Uint,header_arity,Eterm) #define header_arity(x) _ET_APPLY(header_arity,(x)) @@ -1001,7 +999,7 @@ _ET_DECLARE_CHECKED(struct erl_node_*,external_ref_node,Eterm) #define MAP_HEADER_ARITY_SZ (8) #define MAP_HEADER_VAL_SZ (16) -#define MAP_HEADER_TAG_FLAT (0x0) +#define MAP_HEADER_TAG_FLATMAP_HEAD (0x0) #define MAP_HEADER_TAG_HAMT_NODE_BITMAP (0x1) #define MAP_HEADER_TAG_HAMT_HEAD_ARRAY (0x2) #define MAP_HEADER_TAG_HAMT_HEAD_BITMAP (0x3) @@ -1010,17 +1008,27 @@ _ET_DECLARE_CHECKED(struct erl_node_*,external_ref_node,Eterm) #define MAP_HEADER_ARITY(Hdr) (((Hdr) >> (_HEADER_ARITY_OFFS + MAP_HEADER_TAG_SZ)) & (0xff)) #define MAP_HEADER_VAL(Hdr) (((Hdr) >> (_HEADER_ARITY_OFFS + MAP_HEADER_TAG_SZ + MAP_HEADER_ARITY_SZ)) & (0xffff)) -#define make_hashmap(x) make_boxed((Eterm*)(x)) -#define make_hashmap_rel make_boxed_rel -#define is_hashmap(x) (is_boxed((x)) && is_hashmap_header(*boxed_val((x)))) -#define is_not_hashmap(x) (!is_hashmap(x)) -#define is_hashmap_rel(RTERM,BASE) is_hashmap(rterm2wterm(RTERM,BASE)) -#define is_hashmap_header(x) (((x) & (_TAG_HEADER_MASK)) == _TAG_HEADER_HASHMAP) -#define hashmap_val(x) _unchecked_boxed_val((x)) -#define hashmap_val_rel(RTERM, BASE) hashmap_val(rterm2wterm(RTERM, BASE)) - -#define is_map(x) (is_flatmap(x) || is_hashmap(x)) -#define is_map_rel(x,BASE) (is_flatmap_rel(x,BASE) || is_hashmap_rel(x,BASE)) +#define make_hashmap(x) make_boxed((Eterm*)(x)) +#define make_hashmap_rel make_boxed_rel +#define is_hashmap(x) (is_boxed((x)) && is_hashmap_header(*boxed_val((x)))) +#define is_not_hashmap(x) (!is_hashmap(x)) +#define is_hashmap_rel(RTERM,BASE) is_hashmap(rterm2wterm(RTERM,BASE)) +#define is_hashmap_header(x) (((x) & (_HEADER_MAP_HASHMAP_HEAD_MASK)) == HAMT_SUBTAG_HEAD_ARRAY) +#define hashmap_val(x) _unchecked_boxed_val((x)) +#define hashmap_val_rel(RTERM, BASE) hashmap_val(rterm2wterm(RTERM, BASE)) + +#define make_flatmap(x) make_boxed((Eterm*)(x)) +#define make_flatmap_rel(x, BASE) make_boxed_rel((Eterm*)(x),(BASE)) +#define is_flatmap(x) (is_boxed((x)) && is_flatmap_header(*boxed_val((x)))) +#define is_flatmap_rel(RTERM,BASE) is_flatmap(rterm2wterm(RTERM,BASE)) +#define is_not_flatmap(x) (!is_flatmap((x))) +#define is_flatmap_header(x) (((x) & (_HEADER_MAP_SUBTAG_MASK)) == HAMT_SUBTAG_HEAD_FLATMAP) +#define flatmap_val(x) (_unchecked_boxed_val((x))) +#define flatmap_val_rel(RTERM, BASE) flatmap_val(rterm2wterm(RTERM, BASE)) + +#define is_map_header(x) (((x) & (_TAG_HEADER_MASK)) == _TAG_HEADER_MAP) +#define is_map(x) (is_boxed((x)) && is_map_header(*boxed_val(x))) +#define is_map_rel(RTERM,BASE) is_map(rterm2wterm(RTERM,BASE)) /* number tests */ @@ -1113,23 +1121,22 @@ _ET_DECLARE_CHECKED(Uint,y_reg_index,Uint) #define BINARY_DEF 0x0 #define LIST_DEF 0x1 #define NIL_DEF 0x2 -#define HASHMAP_DEF 0x3 -#define MAP_DEF 0x4 -#define TUPLE_DEF 0x5 -#define PID_DEF 0x6 -#define EXTERNAL_PID_DEF 0x7 -#define PORT_DEF 0x8 -#define EXTERNAL_PORT_DEF 0x9 -#define EXPORT_DEF 0xa -#define FUN_DEF 0xb -#define REF_DEF 0xc -#define EXTERNAL_REF_DEF 0xd -#define ATOM_DEF 0xe -#define FLOAT_DEF 0xf -#define BIG_DEF 0x10 -#define SMALL_DEF 0x11 - -#define FIRST_VACANT_TAG_DEF 0x12 +#define MAP_DEF 0x3 +#define TUPLE_DEF 0x4 +#define PID_DEF 0x5 +#define EXTERNAL_PID_DEF 0x6 +#define PORT_DEF 0x7 +#define EXTERNAL_PORT_DEF 0x8 +#define EXPORT_DEF 0x9 +#define FUN_DEF 0xa +#define REF_DEF 0xb +#define EXTERNAL_REF_DEF 0xc +#define ATOM_DEF 0xd +#define FLOAT_DEF 0xe +#define BIG_DEF 0xf +#define SMALL_DEF 0x10 + +#define FIRST_VACANT_TAG_DEF 0x11 #if ET_DEBUG extern unsigned tag_val_def_debug(Wterm, const char*, unsigned); diff --git a/erts/emulator/beam/erl_time_sup.c b/erts/emulator/beam/erl_time_sup.c index 8203436c85..bbdedcc128 100644 --- a/erts/emulator/beam/erl_time_sup.c +++ b/erts/emulator/beam/erl_time_sup.c @@ -1693,7 +1693,7 @@ static void send_time_offset_changed_notifications(void *new_offsetp) { ErtsMonotonicTime new_offset; - ErtsTimeOffsetMonitorInfo *to_mon_info; + ErtsTimeOffsetMonitorInfo *to_mon_info = NULL; /* Shut up faulty warning */ Uint no_monitors; char *tmp = NULL; diff --git a/erts/emulator/beam/erl_utils.h b/erts/emulator/beam/erl_utils.h index 7cb8972e29..6a28105cb9 100644 --- a/erts/emulator/beam/erl_utils.h +++ b/erts/emulator/beam/erl_utils.h @@ -167,24 +167,26 @@ int eq(Eterm, Eterm); #define EQ(x,y) (((x) == (y)) || (is_not_both_immed((x),(y)) && eq((x),(y)))) #if HALFWORD_HEAP -Sint erts_cmp_rel_opt(Eterm, Eterm*, Eterm, Eterm*, int); -#define cmp_rel(A,A_BASE,B,B_BASE) erts_cmp_rel_opt(A,A_BASE,B,B_BASE,0) -#define cmp_rel_term(A,A_BASE,B,B_BASE) erts_cmp_rel_opt(A,A_BASE,B,B_BASE,1) -#define CMP(A,B) erts_cmp_rel_opt(A,NULL,B,NULL,0) -#define CMP_TERM(A,B) erts_cmp_rel_opt(A,NULL,B,NULL,1) +Sint erts_cmp_rel_opt(Eterm, Eterm*, Eterm, Eterm*, int, int); +#define cmp_rel(A,A_BASE,B,B_BASE) erts_cmp_rel_opt(A,A_BASE,B,B_BASE,0,0) +#define cmp_rel_term(A,A_BASE,B,B_BASE) erts_cmp_rel_opt(A,A_BASE,B,B_BASE,1,0) +#define CMP(A,B) erts_cmp_rel_opt(A,NULL,B,NULL,0,0) +#define CMP_TERM(A,B) erts_cmp_rel_opt(A,NULL,B,NULL,1,0) +#define CMP_EQ_ONLY(A,B) erts_cmp_rel_opt(A,NULL,B,NULL,0,1) #else -Sint cmp(Eterm, Eterm); -Sint erts_cmp(Eterm, Eterm, int); -#define cmp_rel(A,A_BASE,B,B_BASE) erts_cmp(A,B,0) -#define cmp_rel_term(A,A_BASE,B,B_BASE) erts_cmp(A,B,1) -#define CMP(A,B) erts_cmp(A,B,0) -#define CMP_TERM(A,B) erts_cmp(A,B,1) +Sint erts_cmp(Eterm, Eterm, int, int); +Sint cmp(Eterm a, Eterm b); +#define cmp_rel(A,A_BASE,B,B_BASE) erts_cmp(A,B,0,0) +#define cmp_rel_term(A,A_BASE,B,B_BASE) erts_cmp(A,B,1,0) +#define CMP(A,B) erts_cmp(A,B,0,0) +#define CMP_TERM(A,B) erts_cmp(A,B,1,0) +#define CMP_EQ_ONLY(A,B) erts_cmp(A,B,0,1) #endif #define cmp_lt(a,b) (CMP((a),(b)) < 0) #define cmp_le(a,b) (CMP((a),(b)) <= 0) -#define cmp_eq(a,b) (CMP((a),(b)) == 0) -#define cmp_ne(a,b) (CMP((a),(b)) != 0) +#define cmp_eq(a,b) (CMP_EQ_ONLY((a),(b)) == 0) +#define cmp_ne(a,b) (CMP_EQ_ONLY((a),(b)) != 0) #define cmp_ge(a,b) (CMP((a),(b)) >= 0) #define cmp_gt(a,b) (CMP((a),(b)) > 0) diff --git a/erts/emulator/beam/external.c b/erts/emulator/beam/external.c index b0b232f185..fe48298155 100644 --- a/erts/emulator/beam/external.c +++ b/erts/emulator/beam/external.c @@ -1779,7 +1779,7 @@ static void ttb_context_destructor(Binary *context_bin) context->alive = 0; switch (context->state) { case TTBSize: - DESTROY_SAVED_ESTACK(&context->s.sc.estack); + DESTROY_SAVED_WSTACK(&context->s.sc.wstack); break; case TTBEncode: DESTROY_SAVED_WSTACK(&context->s.ec.wstack); @@ -1847,7 +1847,7 @@ static Eterm erts_term_to_binary_int(Process* p, Eterm Term, int level, Uint fla /* Setup enough to get started */ context->state = TTBSize; context->alive = 1; - context->s.sc.estack.start = NULL; + context->s.sc.wstack.wstart = NULL; context->s.sc.flags = flags; context->s.sc.level = level; } else { @@ -2604,7 +2604,7 @@ enc_term_int(TTBEncodeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, break; case MAP_DEF: - { + if (is_flatmap(obj)) { flatmap_t *mp = (flatmap_t*)flatmap_val(obj); Uint size = flatmap_get_size(mp); @@ -2618,11 +2618,7 @@ enc_term_int(TTBEncodeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, WSTACK_PUSH4(s, (UWord)kptr, (UWord)vptr, ENC_MAP_PAIR, size); } - } - break; - - case HASHMAP_DEF: - { + } else { Eterm hdr; Uint node_sz; ptr = boxed_val(obj); @@ -2656,7 +2652,6 @@ enc_term_int(TTBEncodeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, } } break; - case FLOAT_DEF: GET_DOUBLE(obj, f); if (dflags & DFLAG_NEW_FLOATS) { @@ -3607,7 +3602,7 @@ dec_term_atom_common: kptr = hp - 1; mp = (flatmap_t*)hp; - hp += MAP_HEADER_SIZE; + hp += MAP_HEADER_FLATMAP_SZ; hp += size; vptr = hp - 1; @@ -3893,7 +3888,7 @@ dec_term_atom_common: while (maps_list) { next = (Eterm *)(EXPAND_POINTER(*maps_list)); - *maps_list = MAP_HEADER; + *maps_list = MAP_HEADER_FLATMAP; if (!erts_validate_and_sort_flatmap((flatmap_t*)maps_list)) goto error; maps_list = next; @@ -3967,51 +3962,35 @@ static int encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, unsigned dflags, Sint *reds, Uint *res) { - DECLARE_ESTACK(s); + DECLARE_WSTACK(s); Uint m, i, arity; Uint result = 0; Sint r = 0; if (ctx) { - ESTACK_CHANGE_ALLOCATOR(s, ERTS_ALC_T_SAVED_ESTACK); + WSTACK_CHANGE_ALLOCATOR(s, ERTS_ALC_T_SAVED_ESTACK); r = *reds; - if (ctx->estack.start) { /* restore saved stack */ - ESTACK_RESTORE(s, &ctx->estack); + if (ctx->wstack.wstart) { /* restore saved stack */ + WSTACK_RESTORE(s, &ctx->wstack); result = ctx->result; obj = ctx->obj; } } - goto L_jump_start; +#define LIST_TAIL_OP ((0 << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER) +#define TERM_ARRAY_OP(N) (((N) << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER) +#define TERM_ARRAY_OP_DEC(OP) ((OP) - (1 << _TAG_PRIMARY_SIZE)) + + + for (;;) { + ASSERT(!is_header(obj)); - outer_loop: - while (!ESTACK_ISEMPTY(s)) { - obj = ESTACK_POP(s); - handle_popped_obj: - if (is_list(obj)) { - Eterm* cons = list_val(obj); - Eterm tl; - - tl = CDR(cons); - obj = CAR(cons); - ESTACK_PUSH(s, tl); - } else if (is_nil(obj)) { - result++; - goto outer_loop; - } else { - /* - * Other term (in the tail of a non-proper list or - * in a fun's environment). - */ - } - - L_jump_start: if (ctx && --r == 0) { *reds = r; ctx->obj = obj; ctx->result = result; - ESTACK_SAVE(s, &ctx->estack); + WSTACK_SAVE(s, &ctx->wstack); return -1; } switch (tag_val_def(obj)) { @@ -4094,75 +4073,44 @@ encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, result += m + 2 + 1; } else { result += 5; - goto handle_popped_obj; + WSTACK_PUSH2(s, (UWord)CDR(list_val(obj)), (UWord)LIST_TAIL_OP); + obj = CAR(list_val(obj)); + continue; /* big loop */ } break; case TUPLE_DEF: { Eterm* ptr = tuple_val(obj); - Uint i; arity = arityval(*ptr); if (arity <= 0xff) { result += 1 + 1; } else { result += 1 + 4; } - for (i = 1; i <= arity; ++i) { - if (is_list(ptr[i])) { - if ((m = is_string(obj)) && (m < MAX_STRING_LEN)) { - result += m + 2 + 1; - } else { - result += 5; - } - } - ESTACK_PUSH(s,ptr[i]); + if (arity > 1) { + WSTACK_PUSH2(s, (UWord) (ptr + 2), + (UWord) TERM_ARRAY_OP(arity-1)); } - goto outer_loop; + else if (arity == 0) { + break; + } + obj = ptr[1]; + continue; /* big loop */ } - break; case MAP_DEF: - { + if (is_flatmap(obj)) { flatmap_t *mp = (flatmap_t*)flatmap_val(obj); Uint size = flatmap_get_size(mp); - Uint i; - Eterm *ptr; result += 1 + 4; /* tag + 4 bytes size */ - /* push values first */ - ptr = flatmap_get_values(mp); - i = size; - while(i--) { - if (is_list(*ptr)) { - if ((m = is_string(*ptr)) && (m < MAX_STRING_LEN)) { - result += m + 2 + 1; - } else { - result += 5; - } - } - ESTACK_PUSH(s,*ptr); - ++ptr; + if (size) { + WSTACK_PUSH4(s, (UWord) flatmap_get_values(mp), + (UWord) TERM_ARRAY_OP(size), + (UWord) flatmap_get_keys(mp), + (UWord) TERM_ARRAY_OP(size)); } - - ptr = flatmap_get_keys(mp); - i = size; - while(i--) { - if (is_list(*ptr)) { - if ((m = is_string(*ptr)) && (m < MAX_STRING_LEN)) { - result += m + 2 + 1; - } else { - result += 5; - } - } - ESTACK_PUSH(s,*ptr); - ++ptr; - } - goto outer_loop; - } - break; - - case HASHMAP_DEF: - { + } else { Eterm *ptr; Eterm hdr; Uint node_sz; @@ -4173,8 +4121,12 @@ encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, case HAMT_SUBTAG_HEAD_ARRAY: ptr++; node_sz = 16; + result += 1 + 4; /* tag + 4 bytes size */ break; - case HAMT_SUBTAG_HEAD_BITMAP: ptr++; + case HAMT_SUBTAG_HEAD_BITMAP: + ptr++; + result += 1 + 4; /* tag + 4 bytes size */ + /*fall through*/ case HAMT_SUBTAG_NODE_BITMAP: node_sz = hashmap_bitcount(MAP_HEADER_VAL(hdr)); ASSERT(node_sz < 17); @@ -4184,13 +4136,17 @@ encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, } ptr++; - ESTACK_RESERVE(s, node_sz); + WSTACK_RESERVE(s, node_sz*2); while(node_sz--) { - ESTACK_FAST_PUSH(s, *ptr++); + if (is_list(*ptr)) { + WSTACK_FAST_PUSH(s, CAR(list_val(*ptr))); + WSTACK_FAST_PUSH(s, CDR(list_val(*ptr))); + } else { + WSTACK_FAST_PUSH(s, *ptr); + } + ptr++; } - result += 1 + 4; /* tag + 4 bytes size */ } - break; case FLOAT_DEF: if (dflags & DFLAG_NEW_FLOATS) { @@ -4242,25 +4198,13 @@ encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, result += 2 * (1 + 4); /* Index + Uniq */ result += 1 + (funp->num_free < 0x100 ? 1 : 4); } - for (i = 1; i < funp->num_free; i++) { - obj = funp->env[i]; - - if (is_not_list(obj)) { - /* Push any non-list terms on the stack */ - ESTACK_PUSH(s, obj); - } else { - /* Lists must be handled specially. */ - if ((m = is_string(obj)) && (m < MAX_STRING_LEN)) { - result += m + 2 + 1; - } else { - result += 5; - ESTACK_PUSH(s, obj); - } - } + if (funp->num_free > 1) { + WSTACK_PUSH2(s, (UWord) (funp->env + 1), + (UWord) TERM_ARRAY_OP(funp->num_free-1)); } if (funp->num_free != 0) { obj = funp->env[0]; - goto L_jump_start; + continue; /* big loop */ } break; } @@ -4283,11 +4227,40 @@ encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, erl_exit(1,"Internal data structure error (in encode_size_struct2)%x\n", obj); } + + if (WSTACK_ISEMPTY(s)) { + break; + } + obj = (Eterm) WSTACK_POP(s); + + if (is_header(obj)) { + switch (obj) { + case LIST_TAIL_OP: + obj = (Eterm) WSTACK_POP(s); + if (is_list(obj)) { + Eterm* cons = list_val(obj); + + WSTACK_PUSH2(s, (UWord)CDR(cons), (UWord)LIST_TAIL_OP); + obj = CAR(cons); + } + break; + + case TERM_ARRAY_OP(1): + obj = *(Eterm*)WSTACK_POP(s); + break; + default: { /* TERM_ARRAY_OP(N) when N > 1 */ + Eterm* ptr = (Eterm*) WSTACK_POP(s); + WSTACK_PUSH2(s, (UWord) (ptr+1), + (UWord) TERM_ARRAY_OP_DEC(obj)); + obj = *ptr; + } + } + } } - DESTROY_ESTACK(s); + WSTACK_DESTROY(s); if (ctx) { - ASSERT(ctx->estack.start == NULL); + ASSERT(ctx->wstack.wstart == NULL); *reds = r; } *res = result; diff --git a/erts/emulator/beam/io.c b/erts/emulator/beam/io.c index 46f0eba5e0..1db3a9fba7 100644 --- a/erts/emulator/beam/io.c +++ b/erts/emulator/beam/io.c @@ -5354,7 +5354,7 @@ driver_deliver_term(Eterm to, ErlDrvTermData* data, int len) if (ptr[0] > MAP_SMALL_MAP_LIMIT) { need += hashmap_over_estimated_heap_size(ptr[0]); } else { - need += MAP_HEADER_SIZE + 1 + 2*ptr[0]; + need += MAP_HEADER_FLATMAP_SZ + 1 + 2*ptr[0]; } depth -= 2*ptr[0]; if (depth < 0) ERTS_DDT_FAIL; @@ -5627,12 +5627,12 @@ driver_deliver_term(Eterm to, ErlDrvTermData* data, int len) hp += 1 + size; mp = (flatmap_t*)hp; - mp->thing_word = MAP_HEADER; + mp->thing_word = MAP_HEADER_FLATMAP; mp->size = size; mp->keys = make_tuple(tp); mess = make_flatmap(mp); - hp += MAP_HEADER_SIZE + size; /* advance "heap" pointer */ + hp += MAP_HEADER_FLATMAP_SZ + size; tp += size; /* point at last key */ vp = hp - 1; /* point at last value */ diff --git a/erts/emulator/beam/time.c b/erts/emulator/beam/time.c index 3dfd3f79d4..2bdda6c8af 100644 --- a/erts/emulator/beam/time.c +++ b/erts/emulator/beam/time.c @@ -576,7 +576,7 @@ erts_cancel_timer(ErlTimer *p) { ErtsTimerWheel *tiw; ErlCancelProc cancel; - void *arg; + void *arg = NULL; /* Shut up faulty warning... */ tiw = get_timer_wheel(p); if (!tiw) diff --git a/erts/emulator/beam/utils.c b/erts/emulator/beam/utils.c index 6edb466a36..bf6d9fff50 100644 --- a/erts/emulator/beam/utils.c +++ b/erts/emulator/beam/utils.c @@ -1025,11 +1025,8 @@ tail_recur: break; } case MAP_DEF: - case HASHMAP_DEF: - { - hash = hash*FUNNY_NUMBER13 + FUNNY_NUMBER14 + make_hash2(term); - break; - } + hash = hash*FUNNY_NUMBER13 + FUNNY_NUMBER14 + make_hash2(term); + break; case TUPLE_DEF: { Eterm* ptr = tuple_val(term); @@ -1273,41 +1270,40 @@ make_hash2(Eterm term) } } break; - case MAP_SUBTAG: - { - flatmap_t *mp = (flatmap_t *)flatmap_val(term); - int i; - int size = flatmap_get_size(mp); - Eterm *ks = flatmap_get_keys(mp); - Eterm *vs = flatmap_get_values(mp); - UINT32_HASH(size, HCONST_16); - if (size == 0) { - goto hash2_common; - } - /* We want a portable hash function that is *independent* of - * the order in which keys and values are encountered. - * We therefore calculate context independent hashes for all . - * key-value pairs and then xor them together. - */ - ESTACK_PUSH(s, hash_xor_pairs); - ESTACK_PUSH(s, hash); - ESTACK_PUSH(s, HASH_MAP_TAIL); - hash = 0; - hash_xor_pairs = 0; - for (i = size - 1; i >= 0; i--) { - ESTACK_PUSH(s, HASH_MAP_PAIR); - ESTACK_PUSH(s, vs[i]); - ESTACK_PUSH(s, ks[i]); - } - goto hash2_common; - } - break; - case HASHMAP_SUBTAG: + case MAP_SUBTAG: { Eterm* ptr = boxed_val(term) + 1; Uint size; int i; switch (hdr & _HEADER_MAP_SUBTAG_MASK) { + case HAMT_SUBTAG_HEAD_FLATMAP: + { + flatmap_t *mp = (flatmap_t *)flatmap_val(term); + Eterm *ks = flatmap_get_keys(mp); + Eterm *vs = flatmap_get_values(mp); + size = flatmap_get_size(mp); + UINT32_HASH(size, HCONST_16); + if (size == 0) + goto hash2_common; + + /* We want a portable hash function that is *independent* of + * the order in which keys and values are encountered. + * We therefore calculate context independent hashes for all . + * key-value pairs and then xor them together. + */ + ESTACK_PUSH(s, hash_xor_pairs); + ESTACK_PUSH(s, hash); + ESTACK_PUSH(s, HASH_MAP_TAIL); + hash = 0; + hash_xor_pairs = 0; + for (i = size - 1; i >= 0; i--) { + ESTACK_PUSH(s, HASH_MAP_PAIR); + ESTACK_PUSH(s, vs[i]); + ESTACK_PUSH(s, ks[i]); + } + goto hash2_common; + } + case HAMT_SUBTAG_HEAD_ARRAY: case HAMT_SUBTAG_HEAD_BITMAP: size = *ptr++; @@ -1618,7 +1614,6 @@ make_internal_hash(Eterm term) Eterm tmp; DECLARE_ESTACK(s); - UseTmpHeapNoproc(2); hash = 0; for (;;) { switch (primary_tag(term)) { @@ -1675,41 +1670,40 @@ make_internal_hash(Eterm term) } } break; - case MAP_SUBTAG: - { - flatmap_t *mp = (flatmap_t *)flatmap_val(term); - int i; - int size = flatmap_get_size(mp); - Eterm *ks = flatmap_get_keys(mp); - Eterm *vs = flatmap_get_values(mp); - UINT32_HASH(size, HCONST_16); - if (size == 0) { - goto pop_next; - } - /* We want a hash function that is *independent* of - * the order in which keys and values are encountered. - * We therefore calculate context independent hashes for all . - * key-value pairs and then xor them together. - */ - ESTACK_PUSH(s, hash_xor_pairs); - ESTACK_PUSH(s, hash); - ESTACK_PUSH(s, HASH_MAP_TAIL); - hash = 0; - hash_xor_pairs = 0; - for (i = size - 1; i >= 0; i--) { - ESTACK_PUSH(s, HASH_MAP_PAIR); - ESTACK_PUSH(s, vs[i]); - ESTACK_PUSH(s, ks[i]); - } - goto pop_next; - } - break; - case HASHMAP_SUBTAG: + + case MAP_SUBTAG: { Eterm* ptr = boxed_val(term) + 1; Uint size; int i; switch (hdr & _HEADER_MAP_SUBTAG_MASK) { + case HAMT_SUBTAG_HEAD_FLATMAP: + { + flatmap_t *mp = (flatmap_t *)flatmap_val(term); + Eterm *ks = flatmap_get_keys(mp); + Eterm *vs = flatmap_get_values(mp); + size = flatmap_get_size(mp); + UINT32_HASH(size, HCONST_16); + if (size == 0) + goto pop_next; + + /* We want a hash function that is *independent* of + * the order in which keys and values are encountered. + * We therefore calculate context independent hashes for all . + * key-value pairs and then xor them together. + */ + ESTACK_PUSH(s, hash_xor_pairs); + ESTACK_PUSH(s, hash); + ESTACK_PUSH(s, HASH_MAP_TAIL); + hash = 0; + hash_xor_pairs = 0; + for (i = size - 1; i >= 0; i--) { + ESTACK_PUSH(s, HASH_MAP_PAIR); + ESTACK_PUSH(s, vs[i]); + ESTACK_PUSH(s, ks[i]); + } + goto pop_next; + } case HAMT_SUBTAG_HEAD_ARRAY: case HAMT_SUBTAG_HEAD_BITMAP: size = *ptr++; @@ -1899,10 +1893,12 @@ make_internal_hash(Eterm term) { FloatDef ff; GET_DOUBLE(term, ff); + if (ff.fd == 0.0) { + ff.fd = 0.0; /* ensure pos. 0.0 */ + } UINT32_HASH_2(ff.fw[0], ff.fw[1], HCONST_12); goto pop_next; } - default: erl_exit(1, "Invalid tag in make_hash2(0x%X)\n", term); } @@ -1922,7 +1918,6 @@ make_internal_hash(Eterm term) pop_next: if (ESTACK_ISEMPTY(s)) { DESTROY_ESTACK(s); - UnUseTmpHeapNoproc(2); return hash; } @@ -2170,11 +2165,8 @@ tail_recur: break; case MAP_DEF: - case HASHMAP_DEF: - { - hash = hash*FUNNY_NUMBER13 + FUNNY_NUMBER14 + make_hash2(term); - break; - } + hash = hash*FUNNY_NUMBER13 + FUNNY_NUMBER14 + make_hash2(term); + break; case TUPLE_DEF: { Eterm* ptr = tuple_val(term); @@ -2579,22 +2571,6 @@ tailrecur_ne: ++bb; goto term_array; } - case MAP_SUBTAG: - { - aa = flatmap_val_rel(a, a_base); - if (!is_boxed(b) || *boxed_val_rel(b,b_base) != *aa) - goto not_equal; - bb = flatmap_val_rel(b,b_base); - sz = flatmap_get_size((flatmap_t*)aa); - - if (sz != flatmap_get_size((flatmap_t*)bb)) goto not_equal; - if (sz == 0) goto pop_next; - - aa += 2; - bb += 2; - sz += 1; /* increment for tuple-keys */ - goto term_array; - } case REFC_BINARY_SUBTAG: case HEAP_BINARY_SUBTAG: case SUB_BINARY_SUBTAG: @@ -2786,8 +2762,23 @@ tailrecur_ne: } break; /* not equal */ } - case HASHMAP_SUBTAG: - { + case MAP_SUBTAG: + if (is_flatmap_rel(a, a_base)) { + aa = flatmap_val_rel(a, a_base); + if (!is_boxed(b) || *boxed_val_rel(b,b_base) != *aa) + goto not_equal; + bb = flatmap_val_rel(b,b_base); + sz = flatmap_get_size((flatmap_t*)aa); + + if (sz != flatmap_get_size((flatmap_t*)bb)) goto not_equal; + if (sz == 0) goto pop_next; + + aa += 2; + bb += 2; + sz += 1; /* increment for tuple-keys */ + goto term_array; + + } else { if (!is_boxed(b) || *boxed_val_rel(b,b_base) != hdr) goto not_equal; @@ -2920,7 +2911,7 @@ static int cmp_atoms(Eterm a, Eterm b) */ Sint cmp(Eterm a, Eterm b) { - return erts_cmp(a, b, 0); + return erts_cmp(a, b, 0, 0); } #endif @@ -2929,9 +2920,10 @@ Sint cmp(Eterm a, Eterm b) * exact = 0 -> arith-based compare */ #if HALFWORD_HEAP -Sint erts_cmp_rel_opt(Eterm a, Eterm* a_base, Eterm b, Eterm* b_base, int exact) +Sint erts_cmp_rel_opt(Eterm a, Eterm* a_base, Eterm b, Eterm* b_base, + int exact, int eq_only) #else -Sint erts_cmp(Eterm a, Eterm b, int exact) +Sint erts_cmp(Eterm a, Eterm b, int exact, int eq_only) #endif { #define PSTACK_TYPE struct erts_cmp_hashmap_state @@ -3124,44 +3116,56 @@ tailrecur_ne: ++aa; ++bb; goto term_array; - case (_TAG_HEADER_MAP >> _TAG_PRIMARY_SIZE) : - if (!is_flatmap_rel(b,b_base)) { - a_tag = MAP_DEF; - goto mixed_types; - } - aa = (Eterm *)flatmap_val_rel(a,a_base); - bb = (Eterm *)flatmap_val_rel(b,b_base); - - i = flatmap_get_size((flatmap_t*)aa); - if (i != flatmap_get_size((flatmap_t*)bb)) { - RETURN_NEQ((int)(i - flatmap_get_size((flatmap_t*)bb))); - } - if (i == 0) { - goto pop_next; - } - aa += 2; - bb += 2; - if (exact) { - i += 1; /* increment for tuple-keys */ - goto term_array; - } - else { - /* Value array */ - WSTACK_PUSH3(stack, (UWord)(bb+1), (UWord)(aa+1), TERM_ARRAY_OP_WORD(i)); - /* Switch back from 'exact' key compare */ - WSTACK_PUSH(stack, OP_WORD(SWITCH_EXACT_OFF_OP)); - /* Now do 'exact' compare of key tuples */ - a = *aa; - b = *bb; - exact = 1; - goto bodyrecur; - } - - case (_TAG_HEADER_HASHMAP >> _TAG_PRIMARY_SIZE) : + case (_TAG_HEADER_MAP >> _TAG_PRIMARY_SIZE) : { struct erts_cmp_hashmap_state* sp; + if (is_flatmap_header(ahdr)) { + if (!is_flatmap_rel(b,b_base)) { + if (is_hashmap_rel(b,b_base)) { + aa = (Eterm *)flatmap_val_rel(a,a_base); + i = flatmap_get_size((flatmap_t*)aa) - hashmap_size_rel(b,b_base); + ASSERT(i != 0); + RETURN_NEQ(i); + } + a_tag = MAP_DEF; + goto mixed_types; + } + aa = (Eterm *)flatmap_val_rel(a,a_base); + bb = (Eterm *)flatmap_val_rel(b,b_base); + + i = flatmap_get_size((flatmap_t*)aa); + if (i != flatmap_get_size((flatmap_t*)bb)) { + RETURN_NEQ((int)(i - flatmap_get_size((flatmap_t*)bb))); + } + if (i == 0) { + goto pop_next; + } + aa += 2; + bb += 2; + if (exact) { + i += 1; /* increment for tuple-keys */ + goto term_array; + } + else { + /* Value array */ + WSTACK_PUSH3(stack,(UWord)(bb+1),(UWord)(aa+1),TERM_ARRAY_OP_WORD(i)); + /* Switch back from 'exact' key compare */ + WSTACK_PUSH(stack,OP_WORD(SWITCH_EXACT_OFF_OP)); + /* Now do 'exact' compare of key tuples */ + a = *aa; + b = *bb; + exact = 1; + goto bodyrecur; + } + } if (!is_hashmap_rel(b,b_base)) { - a_tag = HASHMAP_DEF; + if (is_flatmap_rel(b,b_base)) { + bb = (Eterm *)flatmap_val_rel(b,b_base); + i = hashmap_size_rel(a,a_base) - flatmap_get_size((flatmap_t*)bb); + ASSERT(i != 0); + RETURN_NEQ(i); + } + a_tag = MAP_DEF; goto mixed_types; } i = hashmap_size_rel(a,a_base) - hashmap_size_rel(b,b_base); @@ -3739,7 +3743,7 @@ pop_next: return 0; not_equal: - if (!PSTACK_IS_EMPTY(hmap_stack)) { + if (!PSTACK_IS_EMPTY(hmap_stack) && !eq_only) { WSTACK_ROLLBACK(stack, PSTACK_TOP(hmap_stack)->wstack_rollback); goto pop_next; } diff --git a/erts/emulator/sys/unix/sys_time.c b/erts/emulator/sys/unix/sys_time.c index e764607c25..d535457977 100644 --- a/erts/emulator/sys/unix/sys_time.c +++ b/erts/emulator/sys/unix/sys_time.c @@ -410,11 +410,7 @@ erts_os_system_time(void) stime = (ErtsSystemTime) posix_clock_gettime(WALL_CLOCK_ID, WALL_CLOCK_ID_STR); -#if defined(OS_MONOTONIC_TIME_USING_CLOCK_GETTIME) - return stime; -#else return adj_stime_time_unit(stime, (Uint32) 1000*1000*1000); -#endif } #endif /* defined(OS_SYSTEM_TIME_USING_CLOCK_GETTIME) */ @@ -516,8 +512,7 @@ ErtsMonotonicTime erts_os_monotonic_time(void) #if defined(OS_SYSTEM_TIME_USING_CLOCK_GETTIME) -static void erts_os_times(ErtsMonotonicTime *mtimep, - ErtsSystemTime *stimep) +void erts_os_times(ErtsMonotonicTime *mtimep, ErtsSystemTime *stimep) { posix_clock_gettime_times(mtimep, stimep); } @@ -627,11 +622,7 @@ erts_os_system_time(void) ErtsSystemTime stime; stime = (ErtsSystemTime) mach_clock_gettime(WALL_CLOCK_ID, WALL_CLOCK_ID_STR); -#if defined(OS_MONOTONIC_TIME_USING_MACH_CLOCK_GET_TIME) - return stime; -#else return adj_stime_time_unit(stime, (Uint32) 1000*1000*1000); -#endif } #endif /* defined(OS_SYSTEM_TIME_USING_MACH_CLOCK_GET_TIME) */ @@ -790,7 +781,7 @@ erts_os_monotonic_time(void) ErtsSysHrTime erts_sys_hrtime(void) { - return (ErtsSysHrTime) erts_os_system_time(); + return (ErtsSysHrTime) ERTS_MONOTONIC_TO_NSEC(erts_os_system_time()); } #endif diff --git a/erts/emulator/sys/win32/sys_time.c b/erts/emulator/sys/win32/sys_time.c index 7fe61084ce..b292d9279e 100644 --- a/erts/emulator/sys/win32/sys_time.c +++ b/erts/emulator/sys/win32/sys_time.c @@ -302,7 +302,7 @@ sys_init_time(ErtsSysInitTimeResult *init_resp) internal_state.w.f.last_tick_count = 0; init_resp->os_monotonic_time_info.func = "GetTickCount"; - init_resp->os_monotonic_time_info.locked_use = 1; + init_resp->os_monotonic_time_info.locked_use = 0; /* 10-16 ms resolution according to MicroSoft documentation */ init_resp->os_monotonic_time_info.resolution = 100; /* 10 ms */ time_unit = (ErtsMonotonicTime) 1000; diff --git a/erts/emulator/test/map_SUITE.erl b/erts/emulator/test/map_SUITE.erl index 228832ac0a..dc6286fdb6 100644 --- a/erts/emulator/test/map_SUITE.erl +++ b/erts/emulator/test/map_SUITE.erl @@ -21,12 +21,18 @@ ]). -export([ - t_build_and_match_literals/1, - t_update_literals/1,t_match_and_update_literals/1, + t_build_and_match_literals/1, t_build_and_match_literals_large/1, + t_update_literals/1, t_update_literals_large/1, + t_match_and_update_literals/1, t_match_and_update_literals_large/1, t_update_map_expressions/1, - t_update_assoc/1,t_update_exact/1, - t_guard_bifs/1, t_guard_sequence/1, t_guard_update/1, - t_guard_receive/1, t_guard_fun/1, + t_update_assoc/1, t_update_assoc_large/1, + t_update_exact/1, t_update_exact_large/1, + t_guard_bifs/1, + t_guard_sequence/1, t_guard_sequence_large/1, + t_guard_update/1, t_guard_update_large/1, + t_guard_receive/1, t_guard_receive_large/1, + t_guard_fun/1, + t_update_deep/1, t_list_comprehension/1, t_map_sort_literals/1, t_map_equal/1, @@ -64,6 +70,7 @@ %% misc t_hashmap_balance/1, t_erts_internal_order/1, + t_erts_internal_hash/1, t_pdict/1, t_ets/1, t_dets/1, @@ -82,12 +89,18 @@ suite() -> []. all() -> [ - t_build_and_match_literals, - t_update_literals, t_match_and_update_literals, + t_build_and_match_literals, t_build_and_match_literals_large, + t_update_literals, t_update_literals_large, + t_match_and_update_literals, t_match_and_update_literals_large, t_update_map_expressions, - t_update_assoc,t_update_exact, - t_guard_bifs, t_guard_sequence, t_guard_update, - t_guard_receive,t_guard_fun, t_list_comprehension, + t_update_assoc, t_update_assoc_large, + t_update_exact, t_update_exact_large, + t_guard_bifs, + t_guard_sequence, t_guard_sequence_large, + t_guard_update, t_guard_update_large, + t_guard_receive, t_guard_receive_large, + t_guard_fun, t_list_comprehension, + t_update_deep, t_map_equal, t_map_compare, t_map_sort_literals, @@ -115,6 +128,7 @@ all() -> [ %% Other functions t_hashmap_balance, t_erts_internal_order, + t_erts_internal_hash, t_pdict, t_ets, t_tracing @@ -164,6 +178,461 @@ t_build_and_match_literals(Config) when is_list(Config) -> {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id(#{x=>"three"}))), ok. +t_build_and_match_literals_large(Config) when is_list(Config) -> + % normal non-repeating + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M0, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M0, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M0, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M0, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M0, + + 60 = map_size(M0), + 60 = maps:size(M0), + + % with repeating + M1 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 10=>na0,20=>nb0,30=>"nc0","40"=>"nd0",<<"50">>=>"ne0",{["00"]}=>"n10", + 11=>na1,21=>nb1,31=>"nc1","41"=>"nd1",<<"51">>=>"ne1",{["01"]}=>"n11", + 12=>na2,22=>nb2,32=>"nc2","42"=>"nd2",<<"52">>=>"ne2",{["02"]}=>"n12", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + + 13=>na3,23=>nb3,33=>"nc3","43"=>"nd3",<<"53">>=>"ne3",{["03"]}=>"n13", + 14=>na4,24=>nb4,34=>"nc4","44"=>"nd4",<<"54">>=>"ne4",{["04"]}=>"n14", + + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }), + + #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1, + #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1, + #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1, + #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1, + #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1, + + 60 = map_size(M1), + 60 = maps:size(M1), + + % with floats + + M2 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9"}), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + 90 = map_size(M2), + 90 = maps:size(M2), + + % with bignums + M3 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + 36893488147419103232=>big1, 73786976294838206464=>big2, + 147573952589676412928=>big3, 18446744073709551616=>big4, + 4294967296=>big5, 8589934592=>big6, + 4294967295=>big7, 67108863=>big8 + }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + + 98 = map_size(M3), + 98 = maps:size(M3), + + %% with maps + + M4 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M4, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M4, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M4, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M4, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M4, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M4, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M4, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M4, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M4, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M4, + + #{ #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := "small map key 2", + #{ third => small, map => key } := "small map key 3" } = M4, + + #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M4, + + + #{ 15:=V1,25:=b5,35:=V2,"45":="d5",<<"55">>:=V3,{["05"]}:="15", + #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := V4, + #{ third => small, map => key } := "small map key 3", + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := V5 } = M4, + + a5 = V1, + "c5" = V2, + "e5" = V3, + "small map key 2" = V4, + "large map key 1" = V5, + + 95 = map_size(M4), + 95 = maps:size(M4), + + % call for value + + M5 = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + #{ one => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M5, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M5, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M5, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M5, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M5, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M5, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M5, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M5, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M5, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M5, + + #{ #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := "small map key 2", + #{ third => small, map => key } := "small map key 3" } = M5, + + #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M5, + + 95 = map_size(M5), + 95 = maps:size(M5), + + %% remember + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0, + + #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1, + #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1, + #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1, + #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1, + #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + + ok. + t_map_size(Config) when is_list(Config) -> 0 = map_size(id(#{})), @@ -183,6 +652,17 @@ t_map_size(Config) when is_list(Config) -> Ks = [build_key(fun(K) -> <<1,K:32,1>> end,I)||I<-lists:seq(1,100)], ok = build_and_check_size(Ks,0,#{}), + %% try deep collisions + %% statistically we get another subtree at 50k -> 100k elements + %% Try to be nice and don't use too much memory in the testcase, + + N = 500000, + Is = lists:seq(1,N), + N = map_size(maps:from_list([{I,I}||I<-Is])), + N = map_size(maps:from_list([{<<I:32>>,I}||I<-Is])), + N = map_size(maps:from_list([{integer_to_list(I),I}||I<-Is])), + N = map_size(maps:from_list([{float(I),I}||I<-Is])), + %% Error cases. {'EXIT',{badarg,_}} = (catch map_size([])), {'EXIT',{badarg,_}} = (catch map_size(<<1,2,3>>)), @@ -201,6 +681,64 @@ map_is_size(M,N) when map_size(M) =:= N -> true; map_is_size(_,_) -> false. % test map updates without matching +t_update_literals_large(Config) when is_list(Config) -> + Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + #{ one => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [ + {"a","1"},{"b","2"},{"c","3"},{"d","4"} + ]), + ok. + t_update_literals(Config) when is_list(Config) -> Map = #{x=>1,y=>2,z=>3,q=>4}, #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [ @@ -208,13 +746,15 @@ t_update_literals(Config) when is_list(Config) -> ]), ok. + loop_update_literals_x_q(Map, []) -> Map; loop_update_literals_x_q(Map, [{X,Q}|Vs]) -> loop_update_literals_x_q(Map#{q=>Q,x=>X},Vs). % test map updates with matching t_match_and_update_literals(Config) when is_list(Config) -> - Map = #{x=>0,y=>"untouched",z=>"also untouched",q=>1}, + Map = #{ x=>0,y=>"untouched",z=>"also untouched",q=>1, + #{ "one" => small, map => key } => "small map key 1" }, #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [ {1,2},{3,4},{5,6},{7,8} ]), @@ -228,8 +768,78 @@ t_match_and_update_literals(Config) when is_list(Config) -> #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number }, ok. +t_match_and_update_literals_large(Config) when is_list(Config) -> + Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + x=>0,y=>"untouched",z=>"also untouched",q=>1, + + #{ "one" => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [ + {1,2},{3,4},{5,6},{7,8} + ]), + M0 = id(Map#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, + 4 => number, 18446744073709551629 => wat}), + M1 = id(Map#{}), + M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, + 4 => number, 18446744073709551629 => wat}, + M0 = M2, + + #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number }, + ok. + + loop_match_and_update_literals_x_q(Map, []) -> Map; -loop_match_and_update_literals_x_q(#{q:=Q0,x:=X0} = Map, [{X,Q}|Vs]) -> +loop_match_and_update_literals_x_q(#{ q:=Q0, x:=X0, + #{ "one" => small, map => key } := "small map key 1" } = Map, [{X,Q}|Vs]) -> loop_match_and_update_literals_x_q(Map#{q=>Q0+Q,x=>X0+X},Vs). @@ -241,13 +851,15 @@ t_update_map_expressions(Config) when is_list(Config) -> #{ a :=42, b:=42, c:=42 } = (maps:from_list([{a,1},{b,2},{c,3}]))#{ a := 42, b := 42, c := 42 }, #{ "a" :=1, "b":=42, "c":=42 } = (maps:from_list([{"a",1},{"b",2}]))#{ "b" := 42, "c" => 42 }, + Ks = lists:seq($a,$z), + #{ "aa" := {$a,$a}, "ac":=41, "dc":=42 } = + (maps:from_list([{[K1,K2],{K1,K2}}|| K1 <- Ks, K2 <- Ks]))#{ "ac" := 41, "dc" => 42 }, %% Error cases, FIXME: should be 'badmap'? {'EXIT',{badarg,_}} = (catch (id(<<>>))#{ a := 42, b => 2 }), {'EXIT',{badarg,_}} = (catch (id([]))#{ a := 42, b => 2 }), ok. - t_update_assoc(Config) when is_list(Config) -> M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}), @@ -262,7 +874,75 @@ t_update_assoc(Config) when is_list(Config) -> %% Errors cases. BadMap = id(badmap), {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>val}), + ok. + +t_update_assoc_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + M1 = M0#{1=>42,2=>100,4=>[a,b,c]}, + #{1:=42,2:=100,10.0:=fa0,4:=[a,b,c],25:=b5} = M1, + #{ 10:=43, 24:=b4, 15:=a5, 35:="c5", 2.0:=100, 13.0:=fa3, 4.0:=[a,b,c]} = + M0#{1.0=>float,10:=43,2.0=>wrong,2.0=>100,4.0=>[a,b,c]}, + + M2 = M0#{13.0=>new}, + #{10:=a0,20:=b0,13.0:=new,"40":="d0",<<"50">>:="e0"} = M2, + M2 = M0#{13.0:=wrong,13.0=>new}, + + %% Errors cases. + BadMap = id(badmap), + {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>M0}), ok. t_update_exact(Config) when is_list(Config) -> @@ -293,6 +973,110 @@ t_update_exact(Config) when is_list(Config) -> ok. +t_update_exact_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + M1 = M0#{10:=42,<<"55">>:=100,10.0:=[a,b,c]}, + #{ 10:=42,<<"55">>:=100,{["05"]}:="15",10.0:=[a,b,c], + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1" } = M1, + + M1 = M0#{10:=wrong,10=>42,<<"55">>=>wrong,<<"55">>:=100,10.0:=[a,b,c]}, + + M2 = M0#{13.0:=new}, + #{10:=a0,20:=b0,13.0:=new} = M2, + M2 = M0#{13.0=>wrong,13.0:=new}, + + %% Errors cases. + {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}), + {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}), + {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}), + {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), + + ok. + +t_update_deep(Config) when is_list(Config) -> + N = 250000, + M0 = maps:from_list([{integer_to_list(I),a}||I<-lists:seq(1,N)]), + #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0, + + M1 = M0#{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b }, + #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0, + #{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b } = M1, + + M2 = M0#{ "1" => c, "10" => c, "100" => c, "1000" => c, "10000" => c }, + #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0, + #{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b } = M1, + #{ "1" := c, "10" := c, "100" := c, "1000" := c, "10000" := c } = M2, + + M3 = M2#{ "n1" => d, "n10" => d, "n100" => d, "n1000" => d, "n10000" => d }, + #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0, + #{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b } = M1, + #{ "1" := c, "10" := c, "100" := c, "1000" := c, "10000" := c } = M2, + #{ "1" := c, "10" := c, "100" := c, "1000" := c, "10000" := c } = M3, + #{ "n1" := d, "n10" := d, "n100" := d, "n1000" := d, "n10000" := d } = M3, + ok. + t_guard_bifs(Config) when is_list(Config) -> true = map_guard_head(#{a=>1}), false = map_guard_head([]), @@ -322,12 +1106,82 @@ t_guard_sequence(Config) when is_list(Config) -> {3,gg,M3} = map_guard_sequence_2(M3 = id(#{a=>gg, b=>4})), {4,sc,sc,M4} = map_guard_sequence_2(M4 = id(#{a=>sc, b=>3, c=>sc2})), {5,kk,kk,M5} = map_guard_sequence_2(M5 = id(#{a=>kk, b=>other, c=>sc2})), - + %% error case {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(#{seq=>6,val=>id("e")})), {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(#{b=>5})), ok. +t_guard_sequence_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + {1, "a"} = map_guard_sequence_1(M0#{seq=>1,val=>id("a")}), + {2, "b"} = map_guard_sequence_1(M0#{seq=>2,val=>id("b")}), + {3, "c"} = map_guard_sequence_1(M0#{seq=>3,val=>id("c")}), + {4, "d"} = map_guard_sequence_1(M0#{seq=>4,val=>id("d")}), + {5, "e"} = map_guard_sequence_1(M0#{seq=>5,val=>id("e")}), + + {1,M1} = map_guard_sequence_2(M1 = id(M0#{a=>3})), + {2,M2} = map_guard_sequence_2(M2 = id(M0#{a=>4, b=>4})), + {3,gg,M3} = map_guard_sequence_2(M3 = id(M0#{a=>gg, b=>4})), + {4,sc,sc,M4} = map_guard_sequence_2(M4 = id(M0#{a=>sc, b=>3, c=>sc2})), + {5,kk,kk,M5} = map_guard_sequence_2(M5 = id(M0#{a=>kk, b=>other, c=>sc2})), + + {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(M0#{seq=>6,val=>id("e")})), + {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(M0#{b=>5})), + ok. + + map_guard_sequence_1(#{seq:=1=Seq, val:=Val}) -> {Seq,Val}; map_guard_sequence_1(#{seq:=2=Seq, val:=Val}) -> {Seq,Val}; map_guard_sequence_1(#{seq:=3=Seq, val:=Val}) -> {Seq,Val}; @@ -347,6 +1201,66 @@ t_guard_update(Config) when is_list(Config) -> second = map_guard_update(#{y=>old}, #{x=>second,y=>old}), ok. +t_guard_update_large(Config) when is_list(Config) -> + M0 = id(#{ 70=>a0,80=>b0,90=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10", + 71=>a1,81=>b1,91=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11", + 72=>a2,82=>b2,92=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12", + 73=>a3,83=>b3,93=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13", + 74=>a4,84=>b4,94=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14", + + 75=>a5,85=>b5,95=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15", + 76=>a6,86=>b6,96=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16", + 77=>a7,87=>b7,97=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17", + 78=>a8,88=>b8,98=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18", + 79=>a9,89=>b9,99=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19", + + 70.0=>fa0,80.0=>fb0,90.0=>"fc0", + 71.0=>fa1,81.0=>fb1,91.0=>"fc1", + 72.0=>fa2,82.0=>fb2,92.0=>"fc2", + 73.0=>fa3,83.0=>fb3,93.0=>"fc3", + 74.0=>fa4,84.0=>fb4,94.0=>"fc4", + + 75.0=>fa5,85.0=>fb5,95.0=>"fc5", + 76.0=>fa6,86.0=>fb6,96.0=>"fc6", + 77.0=>fa7,87.0=>fb7,97.0=>"fc7", + 78.0=>fa8,88.0=>fb8,98.0=>"fc8", + 79.0=>fa9,89.0=>fb9,99.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + error = map_guard_update(M0#{},M0#{}), + first = map_guard_update(M0#{},M0#{x=>first}), + second = map_guard_update(M0#{y=>old}, M0#{x=>second,y=>old}), + ok. + + map_guard_update(M1, M2) when M1#{x=>first} =:= M2 -> first; map_guard_update(M1, M2) when M1#{x=>second} =:= M2 -> second; map_guard_update(_, _) -> error. @@ -376,6 +1290,42 @@ t_guard_receive(Config) when is_list(Config) -> done = call(Pid, done), ok. +-define(t_guard_receive_large_procs, 1500). + +t_guard_receive_large(Config) when is_list(Config) -> + M = lists:foldl(fun(_,#{procs := Ps } = M) -> + M#{ procs := Ps#{ spawn_link(fun() -> grecv_loop() end) => 0 }} + end, #{procs => #{}, done => 0}, lists:seq(1,?t_guard_receive_large_procs)), + lists:foreach(fun(Pid) -> + Pid ! {self(), hello} + end, maps:keys(maps:get(procs,M))), + ok = guard_receive_large_loop(M), + ok. + +guard_receive_large_loop(#{done := ?t_guard_receive_large_procs}) -> + ok; +guard_receive_large_loop(M) -> + receive + #{pid := Pid, msg := hello} -> + case M of + #{done := Count, procs := #{Pid := 150}} -> + Pid ! {self(), done}, + guard_receive_large_loop(M#{done := Count + 1}); + #{procs := #{Pid := Count} = Ps} -> + Pid ! {self(), hello}, + guard_receive_large_loop(M#{procs := Ps#{Pid := Count + 1}}) + end + end. + +grecv_loop() -> + receive + {_, done} -> + ok; + {Pid, hello} -> + Pid ! #{pid=>self(), msg=>hello}, + grecv_loop() + end. + call(Pid, M) -> Pid ! {self(), M}, receive {Pid, Res} -> Res end. @@ -406,6 +1356,11 @@ guard_receive_loop() -> t_list_comprehension(Config) when is_list(Config) -> [#{k:=1},#{k:=2},#{k:=3}] = [#{k=>I} || I <- [1,2,3]], + + Ks = lists:seq($a,$z), + Ms = [#{[K1,K2]=>{K1,K2}} || K1 <- Ks, K2 <- Ks], + [#{"aa" := {$a,$a}},#{"ab":={$a,$b}}|_] = Ms, + [#{"zz" := {$z,$z}},#{"zy":={$z,$y}}|_] = lists:reverse(Ms), ok. t_guard_fun(Config) when is_list(Config) -> @@ -899,6 +1854,40 @@ t_bif_map_merge(Config) when is_list(Config) -> #{4 := integer, 18446744073709551629 := wat, float := 3.3, int := 3, {1,2} := "tuple", "hi" := "hello again", <<"key">> := <<"value">>} = maps:merge(M0,M1), + %% try deep collisions + N = 150000, + Is = lists:seq(1,N), + M2 = maps:from_list([{I,I}||I<-Is]), + 150000 = maps:size(M2), + M3 = maps:from_list([{<<I:32>>,I}||I<-Is]), + 150000 = maps:size(M3), + M4 = maps:merge(M2,M3), + 300000 = maps:size(M4), + M5 = maps:from_list([{integer_to_list(I),I}||I<-Is]), + 150000 = maps:size(M5), + M6 = maps:merge(M4,M5), + 450000 = maps:size(M6), + M7 = maps:from_list([{float(I),I}||I<-Is]), + 150000 = maps:size(M7), + M8 = maps:merge(M7,M6), + 600000 = maps:size(M8), + + #{ 1 := 1, "1" := 1, <<1:32>> := 1 } = M8, + #{ 10 := 10, "10" := 10, <<10:32>> := 10 } = M8, + #{ 100 := 100, "100" := 100, <<100:32>> := 100 } = M8, + #{ 1000 := 1000, "1000" := 1000, <<1000:32>> := 1000 } = M8, + #{ 10000 := 10000, "10000" := 10000, <<10000:32>> := 10000 } = M8, + #{ 100000 := 100000, "100000" := 100000, <<100000:32>> := 100000 } = M8, + + %% overlapping + M8 = maps:merge(M2,M8), + M8 = maps:merge(M3,M8), + M8 = maps:merge(M4,M8), + M8 = maps:merge(M5,M8), + M8 = maps:merge(M6,M8), + M8 = maps:merge(M7,M8), + M8 = maps:merge(M8,M8), + %% error case {'EXIT',{badarg,[{maps,merge,_,_}|_]}} = (catch maps:merge((1 bsl 65 + 3), <<>>)), {'EXIT',{badarg,[{maps,merge,_,_}|_]}} = (catch maps:merge(<<>>, id(#{ a => 1}))), @@ -1301,19 +2290,23 @@ build_and_check(0, M0, _, Ks) -> {M0, Ks}; build_and_check(N, M0, F, Ks) -> K = build_key(F,N), M1 = maps:put(K,K,M0), - ok = check_keys_exist([K|Ks], M1), + ok = check_keys_exist([I||{I,_} <- [{K,M1}|Ks]], M1), M2 = maps:update(K,v,M1), v = maps:get(K,M2), - build_and_check(N-1,M1,F,[K|Ks]). + build_and_check(N-1,M1,F,[{K,M1}|Ks]). remove_and_check([],_) -> ok; -remove_and_check([K|Ks], M0) -> +remove_and_check([{K,Mc}|Ks], M0) -> K = maps:get(K,M0), true = maps:is_key(K,M0), + true = Mc =:= M0, + true = M0 == Mc, M1 = maps:remove(K,M0), + false = M1 =:= Mc, + false = Mc == M1, false = maps:is_key(K,M1), true = maps:is_key(K,M0), - ok = check_keys_exist(Ks,M1), + ok = check_keys_exist([I||{I,_} <- Ks],M1), error = maps:find(K,M1), remove_and_check(Ks, M1). @@ -1549,6 +2542,18 @@ t_erts_internal_order(_Config) when is_list(_Config) -> 1 = maps:get(0,M1), ok. +t_erts_internal_hash(_Config) when is_list(_Config) -> + K1 = 0.0, + K2 = 0.0/-1, + + M1 = (maps:from_list([{I,I}||I<-lists:seq(1,32)]))#{ K1 => a, K2 => b }, + b = maps:get(K2,M1), + + M2 = (maps:from_list([{I,I}||I<-lists:seq(1,32)]))#{ K2 => a, K1 => b }, + b = maps:get(K1,M2), + + ok. + t_pdict(_Config) -> put(#{ a => b, b => a},#{ c => d}), diff --git a/erts/emulator/test/num_bif_SUITE.erl b/erts/emulator/test/num_bif_SUITE.erl index 8cf8377c30..abe5b8eb91 100644 --- a/erts/emulator/test/num_bif_SUITE.erl +++ b/erts/emulator/test/num_bif_SUITE.erl @@ -441,7 +441,11 @@ t_string_to_integer(Config) when is_list(Config) -> {"1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111z",16}, {"1z111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111",16}, {"111z11111111",16}]), - + + %% log2 calculation overflow bug in do_integer_to_list (OTP-12624) + %% Would crash with segv + 0 = list_to_integer(lists:duplicate(10000000,$0)), + ok. test_sti(Num) -> diff --git a/erts/emulator/valgrind/suppress.patched.3.6.0 b/erts/emulator/valgrind/suppress.patched.3.6.0 index f79e3ff634..16cecf2dba 100644 --- a/erts/emulator/valgrind/suppress.patched.3.6.0 +++ b/erts/emulator/valgrind/suppress.patched.3.6.0 @@ -362,3 +362,15 @@ fun:async_main ... } +{ +Deliberate invalid read by test case bif_SUITE:erlang_halt +Memcheck:Addr4 +... +fun:erts_print_scheduler_info +... +fun:erl_exit +fun:broken_halt_test +fun:erts_debug_set_internal_state_2 +fun:process_main +} + diff --git a/erts/emulator/valgrind/suppress.standard b/erts/emulator/valgrind/suppress.standard index b3c77119fb..a1f3f82364 100644 --- a/erts/emulator/valgrind/suppress.standard +++ b/erts/emulator/valgrind/suppress.standard @@ -330,3 +330,15 @@ fun:async_main ... } +{ +Deliberate invalid read by test case bif_SUITE:erlang_halt +Memcheck:Addr4 +... +fun:erts_print_scheduler_info +... +fun:erl_exit +fun:broken_halt_test +fun:erts_debug_set_internal_state_2 +fun:process_main +} + diff --git a/erts/preloaded/ebin/erl_prim_loader.beam b/erts/preloaded/ebin/erl_prim_loader.beam Binary files differindex c8ec111e57..df768f9ed6 100644 --- a/erts/preloaded/ebin/erl_prim_loader.beam +++ b/erts/preloaded/ebin/erl_prim_loader.beam diff --git a/erts/preloaded/ebin/erlang.beam b/erts/preloaded/ebin/erlang.beam Binary files differindex 7e7ac99b1c..3478a80dd4 100644 --- a/erts/preloaded/ebin/erlang.beam +++ b/erts/preloaded/ebin/erlang.beam diff --git a/erts/preloaded/ebin/erts_internal.beam b/erts/preloaded/ebin/erts_internal.beam Binary files differindex ad43962b8b..9ed45b34bf 100644 --- a/erts/preloaded/ebin/erts_internal.beam +++ b/erts/preloaded/ebin/erts_internal.beam diff --git a/erts/preloaded/ebin/init.beam b/erts/preloaded/ebin/init.beam Binary files differindex f196952ef2..7361139cde 100644 --- a/erts/preloaded/ebin/init.beam +++ b/erts/preloaded/ebin/init.beam diff --git a/erts/preloaded/ebin/otp_ring0.beam b/erts/preloaded/ebin/otp_ring0.beam Binary files differindex 4d22d8bace..4af9d233b5 100644 --- a/erts/preloaded/ebin/otp_ring0.beam +++ b/erts/preloaded/ebin/otp_ring0.beam diff --git a/erts/preloaded/ebin/prim_eval.beam b/erts/preloaded/ebin/prim_eval.beam Binary files differindex efc8347b6e..7c0b49235e 100644 --- a/erts/preloaded/ebin/prim_eval.beam +++ b/erts/preloaded/ebin/prim_eval.beam diff --git a/erts/preloaded/ebin/prim_file.beam b/erts/preloaded/ebin/prim_file.beam Binary files differindex 6c49b5185e..00babefbb4 100644 --- a/erts/preloaded/ebin/prim_file.beam +++ b/erts/preloaded/ebin/prim_file.beam diff --git a/erts/preloaded/ebin/prim_inet.beam b/erts/preloaded/ebin/prim_inet.beam Binary files differindex f58ee4b4d5..6640a29c62 100644 --- a/erts/preloaded/ebin/prim_inet.beam +++ b/erts/preloaded/ebin/prim_inet.beam diff --git a/erts/preloaded/ebin/prim_zip.beam b/erts/preloaded/ebin/prim_zip.beam Binary files differindex 73be297bbb..3d6f1548d0 100644 --- a/erts/preloaded/ebin/prim_zip.beam +++ b/erts/preloaded/ebin/prim_zip.beam diff --git a/erts/preloaded/ebin/zlib.beam b/erts/preloaded/ebin/zlib.beam Binary files differindex 9eaf8b9e59..3224546179 100644 --- a/erts/preloaded/ebin/zlib.beam +++ b/erts/preloaded/ebin/zlib.beam diff --git a/erts/preloaded/src/erlang.erl b/erts/preloaded/src/erlang.erl index 4bbad4df99..fd11c101bc 100644 --- a/erts/preloaded/src/erlang.erl +++ b/erts/preloaded/src/erlang.erl @@ -1738,7 +1738,7 @@ start_timer(0, Dest, Msg) -> orelse (erlang:is_atom(Dest) andalso Dest /= undefined)), TimerRef = erlang:make_ref(), - try Dest ! {timeout, TimerRef, Msg} catch _:_ -> ok end, + _ = try Dest ! {timeout, TimerRef, Msg} catch _:_ -> ok end, TimerRef catch _:_ -> diff --git a/erts/preloaded/src/erts_internal.erl b/erts/preloaded/src/erts_internal.erl index e2c3d40f61..e489001532 100644 --- a/erts/preloaded/src/erts_internal.erl +++ b/erts/preloaded/src/erts_internal.erl @@ -249,7 +249,7 @@ create_bif_timer() -> -spec erts_internal:access_bif_timer(Ref) -> Res when Ref :: reference(), - Res :: {reference(), pid(), reference()}. + Res :: {reference(), pid()} | 'undefined'. access_bif_timer(_Ref) -> erlang:nif_error(undefined). @@ -369,7 +369,7 @@ tsrv_handle_msg({cancel_timeout, BTR, From, Reply, Req, TRef}, false -> ok; _ -> - try From ! {cancel_timer, Req, false} catch _:_ -> ok end + _ = try From ! {cancel_timer, Req, false} catch _:_ -> ok end end, Nxt; [{Time, TRef} = TKey] -> @@ -387,7 +387,7 @@ tsrv_handle_msg({cancel_timeout, BTR, From, Reply, Req, TRef}, false -> ((1000*(Time - RcvTime)) div Unit) end, - try From ! {cancel_timer, Req, RT} catch _:_ -> ok end + _ = try From ! {cancel_timer, Req, RT} catch _:_ -> ok end end, case Time =:= Nxt of false -> @@ -406,14 +406,14 @@ tsrv_handle_msg({read_timeout, BTR, From, Req, TRef}, Nxt) -> case ets:lookup(RTab, TRef) of [] -> - try From ! {read_timer, Req, false} catch _:_ -> ok end; + _ = try From ! {read_timer, Req, false} catch _:_ -> ok end; [{Time, TRef}] -> RcvTime = erlang:monotonic_time(), RT = case Time =< RcvTime of true -> 0; false -> (1000*(Time - RcvTime)) div Unit end, - try From ! {read_timer, Req, RT} catch _:_ -> ok end + _ = try From ! {read_timer, Req, RT} catch _:_ -> ok end end, Nxt; tsrv_handle_msg({'DOWN', TRef, process, _, _}, @@ -473,6 +473,6 @@ tsrv_handle_timeout(CallTime, #tsrv_state{rtab = RTab, end, ets:delete(TTab, TKey), ets:delete(RTab, TRef), - try Proc ! Msg catch _:_ -> ok end, + _ = try Proc ! Msg catch _:_ -> ok end, tsrv_handle_timeout(CallTime, S) end. diff --git a/lib/asn1/doc/src/notes.xml b/lib/asn1/doc/src/notes.xml index cf87c01658..9feb673c04 100644 --- a/lib/asn1/doc/src/notes.xml +++ b/lib/asn1/doc/src/notes.xml @@ -31,6 +31,21 @@ <p>This document describes the changes made to the asn1 application.</p> +<section><title>Asn1 3.0.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p>The ASN.1 compiler would crash if a SEQUENCE ended + with a double set of ellipses (<c>...</c>).</p> + <p> + Own Id: OTP-12546 Aux Id: seq12815 </p> + </item> + </list> + </section> + +</section> + <section><title>Asn1 3.0.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/asn1/vsn.mk b/lib/asn1/vsn.mk index daaf26a17f..d1c364c34a 100644 --- a/lib/asn1/vsn.mk +++ b/lib/asn1/vsn.mk @@ -1,2 +1,2 @@ #next version number to use is 2.0 -ASN1_VSN = 3.0.3 +ASN1_VSN = 3.0.4 diff --git a/lib/common_test/doc/src/notes.xml b/lib/common_test/doc/src/notes.xml index 94738d2eff..822ebf146e 100644 --- a/lib/common_test/doc/src/notes.xml +++ b/lib/common_test/doc/src/notes.xml @@ -32,6 +32,166 @@ <file>notes.xml</file> </header> +<section><title>Common_Test 1.10</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + The tests overview file, index.html, did not always get + updated correctly after a new test run. This was because + of a bug in the Common Test log cache mechanism which has + now been corrected.</p> + <p> + Own Id: OTP-11400</p> + </item> + <item> + <p> + When a successful test case returns, Common Test should, + according to the documentation, send a tc_done event to + the event handlers with Result = ok in the data field. + However, Common Test sets Result to the return value of + the test case instead. Common Test has been modified now + to comply with the documentation.</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-12279 Aux Id: seq12737, OTP-12531 </p> + </item> + <item> + <p> + A ct_telnet:expect/3 call could never be aborted before + an idle_timeout, even if total_timeout had been set to a + lower value (i.e. a shorter time). This problem has been + fixed.</p> + <p> + Own Id: OTP-12335</p> + </item> + <item> + <p> + The undocumented return value {skipped,Reason} from + config functions and test cases was handled + inconsistently. Test cases were e.g. reported as + "skipped" to CT Hook functions, but "successful" to event + handlers. Now, the above return value is consistently + handled the same way as {skip,Reason} and this has also + been documented.</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-12359 Aux Id: seq12760 </p> + </item> + <item> + <p> + The Erlang source code to HTML generator would sometimes + fail because epp:parse_erl_form/1 could not find and + expand required macros in included header files. The + problem has been solved by making sure common_test always + passes the full include path to epp. Also, a bug that + could cause erl_syntax:revert/1 to fail because of a + badly formed syntax tree has been corrected.</p> + <p> + Own Id: OTP-12419</p> + </item> + <item> + <p> + A missing group option in the ct_run help text has been + added.</p> + <p> + Own Id: OTP-12433 Aux Id: seq12788 </p> + </item> + <item> + <p> + Printouts by means of ct:log/2/3 or ct:pal/2/3 from the + hook functions on_tc_fail/2 and on_tc_skip/2 would (quite + unexpectedly) end up in the "unexpected i/o" log file + instead of in the test case log file. This behaviour has + been changed so that now, all printouts (including stdio + printouts) from these hook functions will be routed to + the test case log file.</p> + <p> + Own Id: OTP-12468</p> + </item> + <item> + <p> + ct_netconfc:action/3 will now - if the return type is + void - accept an RPC reply on the form + {ok,[simple_xml()]}, and in this event return only the + atom ok.</p> + <p> + Own Id: OTP-12491 Aux Id: seq12797 </p> + </item> + <item> + <p> + OTP-11971 erroneously changed the handling of relative + paths for incl_dirs specified in the cover spec file. + This is now corrected so these are expected to be + relative to the directory where the cover spec file + itself is stored</p> + <p> + Own Id: OTP-12498 Aux Id: OTP-11971 </p> + </item> + <item> + <p> + Some test cases have been updated to use ct:sleep/1 + instead of timer:sleep/1. The reason being that the sleep + times need to be scaled to compensate for slow execution + (e.g. when cover is running).</p> + <p> + Own Id: OTP-12574</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Common Test now exports a function, + ct:get_event_mgr_ref/0, that returns the name of the + Common Test event manager. This makes it possible to plug + in event handlers to the event manager while tests are + running (using the gen_event API).</p> + <p> + Own Id: OTP-12506 Aux Id: seq12802 </p> + </item> + <item> + <p> + When a test case (or configuration function) fails + because of an exit signal from a linked process, Common + Test previously passed only the reason for process + termination to the CT post hook functions and the event + handlers (in the tc_done event). This has been changed so + that now the tuple {'EXIT',ReasonForProcessTermination} + is passed instead. This makes it much easier in the CT + post hook functions to distinguish a failure of this sort + from other types of errors and from the return value of a + successful test case.</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-12531 Aux Id: OTP-12279 </p> + </item> + <item> + <p> + A new feature has been introduced in ct_telnet:get_data/1 + that makes it possible to automatically poll the telnet + connection in case an incomplete string (one that has not + yet been terminated by a newline) remains in the receive + buffer. The polling is controlled by two new telnet + config values, which are documented in the ct_telnet + reference manual. The polling mechanism is disabled by + default (making the get_data/1 function backwards + compatible).</p> + <p> + Own Id: OTP-12627</p> + </item> + </list> + </section> + +</section> + <section><title>Common_Test 1.9</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/common_test/src/ct_telnet.erl b/lib/common_test/src/ct_telnet.erl index 4e03bf8630..d906a267a1 100644 --- a/lib/common_test/src/ct_telnet.erl +++ b/lib/common_test/src/ct_telnet.erl @@ -29,7 +29,9 @@ %% Command timeout = 10 sec (time to wait for a command to return) %% Max no of reconnection attempts = 3 %% Reconnection interval = 5 sek (time to wait in between reconnection attempts) -%% Keep alive = true (will send NOP to the server every 10 sec if connection is idle)</pre> +%% Keep alive = true (will send NOP to the server every 10 sec if connection is idle) +%% Polling limit = 0 (max number of times to poll to get a remaining string terminated) +%% Polling interval = 1 sec (sleep time between polls)</pre> %% <p>These parameters can be altered by the user with the following %% configuration term:</p> %% <pre> @@ -37,7 +39,9 @@ %% {command_timeout,Millisec}, %% {reconnection_attempts,N}, %% {reconnection_interval,Millisec}, -%% {keep_alive,Bool}]}.</pre> +%% {keep_alive,Bool}, +%% {poll_limit,N}, +%% {poll_interval,Millisec}]}.</pre> %% <p><code>Millisec = integer(), N = integer()</code></p> %% <p>Enter the <code>telnet_settings</code> term in a configuration %% file included in the test and ct_telnet will retrieve the information @@ -156,6 +160,8 @@ -define(RECONN_TIMEOUT,5000). -define(DEFAULT_TIMEOUT,10000). -define(DEFAULT_PORT,23). +-define(POLL_LIMIT,0). +-define(POLL_INTERVAL,1000). -include("ct_util.hrl"). @@ -169,6 +175,8 @@ type, target_mod, keep_alive, + poll_limit=?POLL_LIMIT, + poll_interval=?POLL_INTERVAL, extra, conn_to=?DEFAULT_TIMEOUT, com_to=?DEFAULT_TIMEOUT, @@ -379,8 +387,15 @@ cmdf(Connection,CmdFormat,Args,Opts) when is_list(Args) -> %%% Connection = ct_telnet:connection() %%% Data = [string()] %%% Reason = term() -%%% @doc Get all data which has been received by the telnet client -%%% since last command was sent. +%%% @doc Get all data that has been received by the telnet client +%%% since the last command was sent. Note that only newline terminated +%%% strings are returned. If the last string received has not yet +%%% been terminated, the connection may be polled automatically until +%%% the string is complete. The polling feature is controlled +%%% by the `poll_limit' and `poll_interval' config values and is +%%% by default disabled (meaning the function will immediately +%%% return all complete strings received and save a remaining +%%% non-terminated string for a later `get_data' call). get_data(Connection) -> case get_handle(Connection) of {ok,Pid} -> @@ -596,9 +611,12 @@ init(Name,{Ip,Port,Type},{TargetMod,KeepAlive,Extra}) -> "Reconnection attempts: ~p\n" "Reconnection interval: ~p\n" "Connection timeout: ~p\n" - "Keep alive: ~w", + "Keep alive: ~w\n" + "Poll limit: ~w\n" + "Poll interval: ~w", [Ip,Port,S1#state.com_to,S1#state.reconns, - S1#state.reconn_int,S1#state.conn_to,KeepAlive]), + S1#state.reconn_int,S1#state.conn_to,KeepAlive, + S1#state.poll_limit,S1#state.poll_interval]), {ok,TelnPid,S1}; {'EXIT',Reason} -> {error,Reason}; @@ -619,6 +637,10 @@ set_telnet_defaults([{reconnection_interval,RInt}|Ss],S) -> set_telnet_defaults(Ss,S#state{reconn_int=RInt}); set_telnet_defaults([{keep_alive,_}|Ss],S) -> set_telnet_defaults(Ss,S); +set_telnet_defaults([{poll_limit,PL}|Ss],S) -> + set_telnet_defaults(Ss,S#state{poll_limit=PL}); +set_telnet_defaults([{poll_interval,PI}|Ss],S) -> + set_telnet_defaults(Ss,S#state{poll_interval=PI}); set_telnet_defaults([Unknown|Ss],S) -> force_log(S,error, "Bad element in telnet_settings: ~p",[Unknown]), @@ -706,10 +728,8 @@ handle_msg({send,Cmd,Opts},State) -> handle_msg(get_data,State) -> start_gen_log(heading(get_data,State#state.name)), log(State,cmd,"Reading data...",[]), - {ok,Data,Buffer} = teln_get_all_data(State#state.teln_pid, - State#state.prx, - State#state.buffer, - [],[]), + {ok,Data,Buffer} = teln_get_all_data(State,State#state.buffer,[],[], + State#state.poll_limit), log(State,recv,"Return: ~p",[{ok,Data}]), end_gen_log(), {{ok,Data},State#state{buffer=Buffer}}; @@ -944,16 +964,25 @@ teln_cmd(Pid,Cmd,Prx,Newline,Timeout) -> ct_telnet_client:send_data(Pid,Cmd,Newline), teln_receive_until_prompt(Pid,Prx,Timeout). -teln_get_all_data(Pid,Prx,Data,Acc,LastLine) -> +teln_get_all_data(State=#state{teln_pid=Pid,prx=Prx},Data,Acc,LastLine,Polls) -> case check_for_prompt(Prx,LastLine++Data) of {prompt,Lines,_PromptType,Rest} -> - teln_get_all_data(Pid,Prx,Rest,[Lines|Acc],[]); + teln_get_all_data(State,Rest,[Lines|Acc],[],State#state.poll_limit); {noprompt,Lines,LastLine1} -> case ct_telnet_client:get_data(Pid) of + {ok,[]} when LastLine1 /= [], Polls > 0 -> + %% No more data from server but the last string is not + %% a complete line (maybe because of a slow connection), + timer:sleep(State#state.poll_interval), + NewPolls = if Polls == infinity -> infinity; + true -> Polls-1 + end, + teln_get_all_data(State,[],[Lines|Acc],LastLine1,NewPolls); {ok,[]} -> {ok,lists:reverse(lists:append([Lines|Acc])),LastLine1}; {ok,Data1} -> - teln_get_all_data(Pid,Prx,Data1,[Lines|Acc],LastLine1) + teln_get_all_data(State,Data1,[Lines|Acc],LastLine1, + State#state.poll_limit) end end. @@ -1106,12 +1135,18 @@ repeat_expect(Name,Pid,Data,Pattern,Acc,EO) -> teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO, total_timeout=TotalTO}) -> - ExpectFun = case EO#eo.seq of + %% TotalTO is a float value in this loop (unless it's 'infinity'), + %% but an integer value will be passed to the other functions + EOMod = if TotalTO /= infinity -> EO#eo{total_timeout=trunc(TotalTO)}; + true -> EO + end, + + ExpectFun = case EOMod#eo.seq of true -> fun() -> - seq_expect(Name,Pid,Data,Pattern,Acc,EO) + seq_expect(Name,Pid,Data,Pattern,Acc,EOMod) end; false -> fun() -> - one_expect(Name,Pid,Data,Pattern,EO) + one_expect(Name,Pid,Data,Pattern,EOMod) end end, case ExpectFun() of @@ -1121,9 +1156,14 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO, {halt,Why,Rest}; NotFinished -> %% Get more data - Fun = fun() -> get_data1(EO#eo.teln_pid) end, - BreakAfter = if TotalTO < IdleTO -> TotalTO; true -> IdleTO end, - case timer:tc(ct_gen_conn, do_within_time, [Fun, BreakAfter]) of + Fun = fun() -> get_data1(EOMod#eo.teln_pid) end, + BreakAfter = if TotalTO < IdleTO -> + %% use the integer value + EOMod#eo.total_timeout; + true -> + IdleTO + end, + case timer:tc(ct_gen_conn, do_within_time, [Fun,BreakAfter]) of {_,{error,Reason}} -> %% A timeout will occur when the telnet connection %% is idle for EO#eo.idle_timeout milliseconds. @@ -1132,13 +1172,15 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO, case NotFinished of {nomatch,Rest} -> %% One expect - teln_expect1(Name,Pid,Rest++Data1,Pattern,[],EO); + teln_expect1(Name,Pid,Rest++Data1, + Pattern,[],EOMod); {continue,Patterns1,Acc1,Rest} -> %% Sequence - teln_expect1(Name,Pid,Rest++Data1,Patterns1,Acc1,EO) + teln_expect1(Name,Pid,Rest++Data1, + Patterns1,Acc1,EOMod) end; {Elapsed,{ok,Data1}} -> - TVal = trunc(TotalTO - (Elapsed/1000)), + TVal = TotalTO - (Elapsed/1000), if TVal =< 0 -> {error,timeout}; true -> @@ -1146,10 +1188,12 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO, case NotFinished of {nomatch,Rest} -> %% One expect - teln_expect1(Name,Pid,Rest++Data1,Pattern,[],EO1); + teln_expect1(Name,Pid,Rest++Data1, + Pattern,[],EO1); {continue,Patterns1,Acc1,Rest} -> %% Sequence - teln_expect1(Name,Pid,Rest++Data1,Patterns1,Acc1,EO1) + teln_expect1(Name,Pid,Rest++Data1, + Patterns1,Acc1,EO1) end end end @@ -1430,8 +1474,10 @@ check_for_prompt(Prx,Data) -> split_lines(String) -> split_lines(String,[],[]). -split_lines([$\n|Rest],Line,Lines) -> +split_lines([$\n|Rest],Line,Lines) when Line /= [] -> split_lines(Rest,[],[lists:reverse(Line)|Lines]); +split_lines([$\n|Rest],[],Lines) -> + split_lines(Rest,[],Lines); split_lines([$\r|Rest],Line,Lines) -> split_lines(Rest,Line,Lines); split_lines([0|Rest],Line,Lines) -> diff --git a/lib/common_test/src/ct_telnet_client.erl b/lib/common_test/src/ct_telnet_client.erl index f39863824c..b0734d8d65 100644 --- a/lib/common_test/src/ct_telnet_client.erl +++ b/lib/common_test/src/ct_telnet_client.erl @@ -32,7 +32,7 @@ -module(ct_telnet_client). -%% -define(debug, true). +%%-define(debug, true). -export([open/2, open/3, open/4, open/5, close/1]). -export([send_data/2, send_data/3, get_data/1]). @@ -111,7 +111,6 @@ get_data(Pid) -> {ok,Data} end. - %%%----------------------------------------------------------------- %%% Internal functions init(Parent, Server, Port, Timeout, KeepAlive, ConnName) -> @@ -146,7 +145,7 @@ loop(State, Sock, Acc) -> ok end; {tcp,_,Msg0} -> - dbg("tcp msg: ~tp~n",[Msg0]), + dbg("rcv tcp msg: ~tp~n",[Msg0]), Msg = check_msg(Sock,Msg0,[]), loop(State, Sock, [Msg | Acc]); {send_data,Data} -> @@ -180,6 +179,7 @@ loop(State, Sock, Acc) -> NewState = case State of #state{keep_alive = true, get_data = 0} -> + dbg("sending NOP\n",[]), if Acc == [] -> send([?IAC,?NOP], Sock, State#state.conn_name); true -> ok @@ -225,15 +225,17 @@ loop(State, Sock, Acc) -> gen_tcp:close(Sock), Pid ! closed after wait(State#state.keep_alive,?IDLE_TIMEOUT) -> + dbg("idle timeout\n",[]), Data = lists:reverse(lists:append(Acc)), case Data of [] -> + dbg("sending NOP\n",[]), send([?IAC,?NOP], Sock, State#state.conn_name), loop(State, Sock, Acc); _ when State#state.log_pos == length(Data)+1 -> loop(State, Sock, Acc); _ -> - dbg("Idle timeout, printing ~tp\n",[Data]), + dbg("idle timeout, printing ~tp\n",[Data]), Len = length(Data), ct_telnet:log(State#state.conn_name, general_io, "~ts", diff --git a/lib/common_test/test/ct_telnet_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE.erl index 84e69c2b54..62cb821ede 100644 --- a/lib/common_test/test/ct_telnet_SUITE.erl +++ b/lib/common_test/test/ct_telnet_SUITE.erl @@ -203,7 +203,9 @@ telnet_config(_, LogType) -> {command_timeout,10000}, {reconnection_attempts,0}, {reconnection_interval,0}, - {keep_alive,true}]} | + {keep_alive,true}, + {poll_limit,10}, + {poll_interval,1000}]} | if LogType == legacy -> [{ct_conn_log,[]}]; true -> diff --git a/lib/common_test/vsn.mk b/lib/common_test/vsn.mk index 849edc15e1..d654a8afb3 100644 --- a/lib/common_test/vsn.mk +++ b/lib/common_test/vsn.mk @@ -1 +1 @@ -COMMON_TEST_VSN = 1.9 +COMMON_TEST_VSN = 1.10 diff --git a/lib/compiler/doc/src/notes.xml b/lib/compiler/doc/src/notes.xml index 84ebd2f210..9b5b44f3e1 100644 --- a/lib/compiler/doc/src/notes.xml +++ b/lib/compiler/doc/src/notes.xml @@ -31,6 +31,58 @@ <p>This document describes the changes made to the Compiler application.</p> +<section><title>Compiler 5.0.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Matching out a map from a record and then updating the + record could cause a 'badarg' exception at run-time. + (Thanks to Dmitry Aleksandrov for reporting this bug.)</p> + <p> + Own Id: OTP-12402</p> + </item> + <item> + <p>The compiler would crash when compiling some complex, + nonsensical guards such as:</p> + <p> ... <c>when {{X}}, -X</c>...</p> + <p> + Own Id: OTP-12410</p> + </item> + <item> + <p> + In rare circumstances, using binary pattern in the value + part of a map pattern would cause the compiler to crash.</p> + <p> + Own Id: OTP-12414</p> + </item> + <item> + <p>Case expressions where a map was wrapped in a tuple or + list such as:</p> + <p><c>case {a,Map} of</c><br/> <c>{a,#{k:=_}}=Tuple -> + Tuple</c><br/> <c>end.</c></p> + <p>would be unsafely "optimized" to either cause an + exception at run-time or would return an empty map.</p> + <p> + Own Id: OTP-12451</p> + </item> + <item> + <p>When a variable was compared to a literal map using + the '<c>==</c>' operator, the compiler would change the + operator to '<c>=:=</c>' since it is more efficient. + However, this optimization is not safe if the map literal + has numeric keys or values. The compiler will now only do + the optimization if all keys and values are + non-numeric.</p> + <p> + Own Id: OTP-12456</p> + </item> + </list> + </section> + +</section> + <section><title>Compiler 5.0.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl index 1c9574c2a2..8870315084 100644 --- a/lib/compiler/test/map_SUITE.erl +++ b/lib/compiler/test/map_SUITE.erl @@ -22,12 +22,17 @@ -export([ %% literals - t_build_and_match_literals/1, - t_update_literals/1,t_match_and_update_literals/1, + t_build_and_match_literals/1, t_build_and_match_literals_large/1, + t_update_literals/1, t_update_literals_large/1, + t_match_and_update_literals/1, t_match_and_update_literals_large/1, t_update_map_expressions/1, - t_update_assoc/1,t_update_exact/1, - t_guard_bifs/1, t_guard_sequence/1, t_guard_update/1, - t_guard_receive/1, t_guard_fun/1, + t_update_assoc/1, t_update_assoc_large/1, + t_update_exact/1, t_update_exact_large/1, + t_guard_bifs/1, + t_guard_sequence/1, t_guard_sequence_large/1, + t_guard_update/1, t_guard_update_large/1, + t_guard_receive/1, t_guard_receive_large/1, + t_guard_fun/1, t_list_comprehension/1, t_map_sort_literals/1, t_map_size/1, @@ -65,12 +70,17 @@ all() -> test_lib:recompile(?MODULE), [ %% literals - t_build_and_match_literals, - t_update_literals, t_match_and_update_literals, + t_build_and_match_literals, t_build_and_match_literals_large, + t_update_literals, t_update_literals_large, + t_match_and_update_literals, t_match_and_update_literals_large, t_update_map_expressions, - t_update_assoc,t_update_exact, - t_guard_bifs, t_guard_sequence, t_guard_update, - t_guard_receive,t_guard_fun, t_list_comprehension, + t_update_assoc, t_update_assoc_large, + t_update_exact, t_update_exact_large, + t_guard_bifs, + t_guard_sequence, t_guard_sequence_large, + t_guard_update, t_guard_update_large, + t_guard_receive, t_guard_receive_large, + t_guard_fun, t_list_comprehension, t_map_sort_literals, t_map_size, t_build_and_match_aliasing, @@ -157,6 +167,461 @@ t_build_and_match_literals(Config) when is_list(Config) -> {'EXIT',{{badmatch,_},_}} = (catch (#{#{"a"=>42} := 3}=id(#{#{"a"=>3}=>42}))), ok. +t_build_and_match_literals_large(Config) when is_list(Config) -> + % normal non-repeating + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M0, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M0, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M0, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M0, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M0, + + 60 = map_size(M0), + 60 = maps:size(M0), + + % with repeating + M1 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 10=>na0,20=>nb0,30=>"nc0","40"=>"nd0",<<"50">>=>"ne0",{["00"]}=>"n10", + 11=>na1,21=>nb1,31=>"nc1","41"=>"nd1",<<"51">>=>"ne1",{["01"]}=>"n11", + 12=>na2,22=>nb2,32=>"nc2","42"=>"nd2",<<"52">>=>"ne2",{["02"]}=>"n12", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + + 13=>na3,23=>nb3,33=>"nc3","43"=>"nd3",<<"53">>=>"ne3",{["03"]}=>"n13", + 14=>na4,24=>nb4,34=>"nc4","44"=>"nd4",<<"54">>=>"ne4",{["04"]}=>"n14", + + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }), + + #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1, + #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1, + #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1, + #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1, + #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1, + + 60 = map_size(M1), + 60 = maps:size(M1), + + % with floats + + M2 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9"}), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + 90 = map_size(M2), + 90 = maps:size(M2), + + % with bignums + M3 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + 36893488147419103232=>big1, 73786976294838206464=>big2, + 147573952589676412928=>big3, 18446744073709551616=>big4, + 4294967296=>big5, 8589934592=>big6, + 4294967295=>big7, 67108863=>big8 + }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + + 98 = map_size(M3), + 98 = maps:size(M3), + + %% with maps + + M4 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M4, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M4, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M4, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M4, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M4, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M4, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M4, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M4, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M4, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M4, + + #{ #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := "small map key 2", + #{ third => small, map => key } := "small map key 3" } = M4, + + #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M4, + + + #{ 15:=V1,25:=b5,35:=V2,"45":="d5",<<"55">>:=V3,{["05"]}:="15", + #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := V4, + #{ third => small, map => key } := "small map key 3", + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := V5 } = M4, + + a5 = V1, + "c5" = V2, + "e5" = V3, + "small map key 2" = V4, + "large map key 1" = V5, + + 95 = map_size(M4), + 95 = maps:size(M4), + + % call for value + + M5 = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + #{ one => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M5, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M5, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M5, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M5, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M5, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M5, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M5, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M5, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M5, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M5, + + #{ #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := "small map key 2", + #{ third => small, map => key } := "small map key 3" } = M5, + + #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M5, + + 95 = map_size(M5), + 95 = maps:size(M5), + + %% remember + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0, + + #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1, + #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1, + #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1, + #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1, + #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + + ok. + t_build_and_match_aliasing(Config) when is_list(Config) -> M1 = id(#{a=>1,b=>2,c=>3,d=>4}), @@ -218,13 +683,75 @@ t_update_literals(Config) when is_list(Config) -> ]), ok. +t_update_literals_large(Config) when is_list(Config) -> + Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + #{ one => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [ + {"a","1"},{"b","2"},{"c","3"},{"d","4"} + ]), + ok. + + + loop_update_literals_x_q(Map, []) -> Map; loop_update_literals_x_q(Map, [{X,Q}|Vs]) -> loop_update_literals_x_q(Map#{q=>Q,x=>X},Vs). % test map updates with matching t_match_and_update_literals(Config) when is_list(Config) -> - Map = #{x=>0,y=>"untouched",z=>"also untouched",q=>1}, + Map = #{ x=>0,y=>"untouched",z=>"also untouched",q=>1, + #{ "one" => small, map => key } => "small map key 1" }, + #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [ {1,2},{3,4},{5,6},{7,8} ]), @@ -238,8 +765,77 @@ t_match_and_update_literals(Config) when is_list(Config) -> #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number }, ok. +t_match_and_update_literals_large(Config) when is_list(Config) -> + Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + x=>0,y=>"untouched",z=>"also untouched",q=>1, + + #{ "one" => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [ + {1,2},{3,4},{5,6},{7,8} + ]), + M0 = id(Map#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, + 4 => number, 18446744073709551629 => wat}), + M1 = id(Map#{}), + M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, + 4 => number, 18446744073709551629 => wat}, + M0 = M2, + + #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number }, + ok. + loop_match_and_update_literals_x_q(Map, []) -> Map; -loop_match_and_update_literals_x_q(#{q:=Q0,x:=X0} = Map, [{X,Q}|Vs]) -> +loop_match_and_update_literals_x_q(#{ q:=Q0, x:=X0, + #{ "one" => small, map => key } := "small map key 1" } = Map, [{X,Q}|Vs]) -> loop_match_and_update_literals_x_q(Map#{q=>Q0+Q,x=>X0+X},Vs). @@ -292,6 +888,76 @@ t_update_assoc(Config) when is_list(Config) -> {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting=>val}), ok. +t_update_assoc_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + M1 = M0#{1=>42,2=>100,4=>[a,b,c]}, + #{1:=42,2:=100,10.0:=fa0,4:=[a,b,c],25:=b5} = M1, + #{ 10:=43, 24:=b4, 15:=a5, 35:="c5", 2.0:=100, 13.0:=fa3, 4.0:=[a,b,c]} = + M0#{1.0=>float,10:=43,2.0=>wrong,2.0=>100,4.0=>[a,b,c]}, + + M2 = M0#{13.0=>new}, + #{10:=a0,20:=b0,13.0:=new,"40":="d0",<<"50">>:="e0"} = M2, + M2 = M0#{13.0:=wrong,13.0=>new}, + + %% Errors cases. + BadMap = id(badmap), + {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>M0}), + ok. + + + t_update_exact(Config) when is_list(Config) -> M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}), @@ -327,6 +993,88 @@ t_update_exact(Config) when is_list(Config) -> {'EXIT',{badarg,_}} = (catch Empty#{a:=1}), ok. +t_update_exact_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + M1 = M0#{10:=42,<<"55">>:=100,10.0:=[a,b,c]}, + #{ 10:=42,<<"55">>:=100,{["05"]}:="15",10.0:=[a,b,c], + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1" } = M1, + + M1 = M0#{10:=wrong,10=>42,<<"55">>=>wrong,<<"55">>:=100,10.0:=[a,b,c]}, + + M2 = M0#{13.0:=new}, + #{10:=a0,20:=b0,13.0:=new} = M2, + M2 = M0#{13.0=>wrong,13.0:=new}, + + %% Errors cases. + {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}), + {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}), + {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}), + {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), + + ok. + t_update_values(Config) when is_list(Config) -> V0 = id(1337), M0 = #{ a => 1, val => V0}, @@ -411,6 +1159,75 @@ t_guard_sequence(Config) when is_list(Config) -> {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(#{b=>5})), ok. +t_guard_sequence_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + {1, "a"} = map_guard_sequence_1(M0#{seq=>1,val=>id("a")}), + {2, "b"} = map_guard_sequence_1(M0#{seq=>2,val=>id("b")}), + {3, "c"} = map_guard_sequence_1(M0#{seq=>3,val=>id("c")}), + {4, "d"} = map_guard_sequence_1(M0#{seq=>4,val=>id("d")}), + {5, "e"} = map_guard_sequence_1(M0#{seq=>5,val=>id("e")}), + + {1,M1} = map_guard_sequence_2(M1 = id(M0#{a=>3})), + {2,M2} = map_guard_sequence_2(M2 = id(M0#{a=>4, b=>4})), + {3,gg,M3} = map_guard_sequence_2(M3 = id(M0#{a=>gg, b=>4})), + {4,sc,sc,M4} = map_guard_sequence_2(M4 = id(M0#{a=>sc, b=>3, c=>sc2})), + {5,kk,kk,M5} = map_guard_sequence_2(M5 = id(M0#{a=>kk, b=>other, c=>sc2})), + + {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(M0#{seq=>6,val=>id("e")})), + {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(M0#{b=>5})), + ok. + map_guard_sequence_1(#{seq:=1=Seq, val:=Val}) -> {Seq,Val}; map_guard_sequence_1(#{seq:=2=Seq, val:=Val}) -> {Seq,Val}; map_guard_sequence_1(#{seq:=3=Seq, val:=Val}) -> {Seq,Val}; @@ -431,6 +1248,65 @@ t_guard_update(Config) when is_list(Config) -> third = map_guard_update(#{x=>old,y=>old}, #{x=>third,y=>old}), ok. +t_guard_update_large(Config) when is_list(Config) -> + M0 = id(#{ 70=>a0,80=>b0,90=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10", + 71=>a1,81=>b1,91=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11", + 72=>a2,82=>b2,92=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12", + 73=>a3,83=>b3,93=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13", + 74=>a4,84=>b4,94=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14", + + 75=>a5,85=>b5,95=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15", + 76=>a6,86=>b6,96=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16", + 77=>a7,87=>b7,97=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17", + 78=>a8,88=>b8,98=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18", + 79=>a9,89=>b9,99=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19", + + 70.0=>fa0,80.0=>fb0,90.0=>"fc0", + 71.0=>fa1,81.0=>fb1,91.0=>"fc1", + 72.0=>fa2,82.0=>fb2,92.0=>"fc2", + 73.0=>fa3,83.0=>fb3,93.0=>"fc3", + 74.0=>fa4,84.0=>fb4,94.0=>"fc4", + + 75.0=>fa5,85.0=>fb5,95.0=>"fc5", + 76.0=>fa6,86.0=>fb6,96.0=>"fc6", + 77.0=>fa7,87.0=>fb7,97.0=>"fc7", + 78.0=>fa8,88.0=>fb8,98.0=>"fc8", + 79.0=>fa9,89.0=>fb9,99.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + error = map_guard_update(M0#{},M0#{}), + first = map_guard_update(M0#{},M0#{x=>first}), + second = map_guard_update(M0#{y=>old}, M0#{x=>second,y=>old}), + ok. + map_guard_update(M1, M2) when M1#{x=>first} =:= M2 -> first; map_guard_update(M1, M2) when M1#{x=>second} =:= M2 -> second; map_guard_update(M1, M2) when M1#{x:=third} =:= M2 -> third; @@ -461,6 +1337,43 @@ t_guard_receive(Config) when is_list(Config) -> done = call(Pid, done), ok. +-define(t_guard_receive_large_procs, 1500). + +t_guard_receive_large(Config) when is_list(Config) -> + M = lists:foldl(fun(_,#{procs := Ps } = M) -> + M#{ procs := Ps#{ spawn_link(fun() -> grecv_loop() end) => 0 }} + end, #{procs => #{}, done => 0}, lists:seq(1,?t_guard_receive_large_procs)), + lists:foreach(fun(Pid) -> + Pid ! {self(), hello} + end, maps:keys(maps:get(procs,M))), + ok = guard_receive_large_loop(M), + ok. + +guard_receive_large_loop(#{done := ?t_guard_receive_large_procs}) -> + ok; +guard_receive_large_loop(M) -> + receive + #{pid := Pid, msg := hello} -> + case M of + #{done := Count, procs := #{Pid := 150}} -> + Pid ! {self(), done}, + guard_receive_large_loop(M#{done := Count + 1}); + #{procs := #{Pid := Count} = Ps} -> + Pid ! {self(), hello}, + guard_receive_large_loop(M#{procs := Ps#{Pid := Count + 1}}) + end + end. + +grecv_loop() -> + receive + {_, done} -> + ok; + {Pid, hello} -> + Pid ! #{pid=>self(), msg=>hello}, + grecv_loop() + end. + + call(Pid, M) -> Pid ! {self(), M}, receive {Pid, Res} -> Res end. @@ -494,6 +1407,11 @@ t_list_comprehension(Config) when is_list(Config) -> Ls = id([#{<<2:301>> => I, "wat" => I + 1} || I <- [1,2,3]]), [#{<<2:301>>:=1,"wat":=2},#{<<2:301>>:=2,"wat":=3},#{<<2:301>>:=3,"wat":=4}] = Ls, [{1,2},{2,3},{3,4}] = id([{I2,I1} || #{"wat" := I1, <<2:301>> := I2} <- Ls]), + + Ks = lists:seq($a,$z), + Ms = [#{[K1,K2]=>{K1,K2}} || K1 <- Ks, K2 <- Ks], + [#{"aa" := {$a,$a}},#{"ab":={$a,$b}}|_] = Ms, + [#{"zz" := {$z,$z}},#{"zy":={$z,$y}}|_] = lists:reverse(Ms), ok. t_guard_fun(Config) when is_list(Config) -> diff --git a/lib/compiler/vsn.mk b/lib/compiler/vsn.mk index b1a6c15ac9..05e682c893 100644 --- a/lib/compiler/vsn.mk +++ b/lib/compiler/vsn.mk @@ -1 +1 @@ -COMPILER_VSN = 5.0.3 +COMPILER_VSN = 5.0.4 diff --git a/lib/crypto/doc/src/notes.xml b/lib/crypto/doc/src/notes.xml index 605d61e8e4..a0ebc4b3dd 100644 --- a/lib/crypto/doc/src/notes.xml +++ b/lib/crypto/doc/src/notes.xml @@ -30,6 +30,23 @@ </header> <p>This document describes the changes made to the Crypto application.</p> +<section><title>Crypto 3.5</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Extend block_encrypt/decrypt for aes_cfb8 and aes_cfb128 + to accept keys of length 128, 192 and 256 bits. Before + only 128 bit keys were accepted.</p> + <p> + Own Id: OTP-12467</p> + </item> + </list> + </section> + +</section> + <section><title>Crypto 3.4.2</title> <section><title>Improvements and New Features</title> diff --git a/lib/crypto/vsn.mk b/lib/crypto/vsn.mk index b87685cb3f..8489b59562 100644 --- a/lib/crypto/vsn.mk +++ b/lib/crypto/vsn.mk @@ -1 +1 @@ -CRYPTO_VSN = 3.4.2 +CRYPTO_VSN = 3.5 diff --git a/lib/debugger/doc/src/notes.xml b/lib/debugger/doc/src/notes.xml index b4baa2a1cd..7384189a6f 100644 --- a/lib/debugger/doc/src/notes.xml +++ b/lib/debugger/doc/src/notes.xml @@ -32,6 +32,21 @@ <p>This document describes the changes made to the Debugger application.</p> +<section><title>Debugger 4.0.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix save state which did not work on Mac.</p> + <p> + Own Id: OTP-12378</p> + </item> + </list> + </section> + +</section> + <section><title>Debugger 4.0.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/debugger/test/map_SUITE.erl b/lib/debugger/test/map_SUITE.erl index b114d29f44..ac2e16f00a 100644 --- a/lib/debugger/test/map_SUITE.erl +++ b/lib/debugger/test/map_SUITE.erl @@ -24,12 +24,17 @@ ]). -export([ - t_build_and_match_literals/1, - t_update_literals/1,t_match_and_update_literals/1, + t_build_and_match_literals/1, t_build_and_match_literals_large/1, + t_update_literals/1, t_update_literals_large/1, + t_match_and_update_literals/1, t_match_and_update_literals_large/1, t_update_map_expressions/1, - t_update_assoc/1,t_update_exact/1, - t_guard_bifs/1, t_guard_sequence/1, t_guard_update/1, - t_guard_receive/1, t_guard_fun/1, + t_update_assoc/1, t_update_assoc_large/1, + t_update_exact/1, t_update_exact_large/1, + t_guard_bifs/1, + t_guard_sequence/1, t_guard_sequence_large/1, + t_guard_update/1, t_guard_update_large/1, + t_guard_receive/1, t_guard_receive_large/1, + t_guard_fun/1, t_list_comprehension/1, t_map_sort_literals/1, t_map_size/1, @@ -90,12 +95,17 @@ suite() -> []. all() -> [ - t_build_and_match_literals, - t_update_literals, t_match_and_update_literals, + t_build_and_match_literals, t_build_and_match_literals_large, + t_update_literals, t_update_literals_large, + t_match_and_update_literals, t_match_and_update_literals_large, t_update_map_expressions, - t_update_assoc,t_update_exact, - t_guard_bifs, t_guard_sequence, t_guard_update, - t_guard_receive,t_guard_fun, t_list_comprehension, + t_update_assoc, t_update_assoc_large, + t_update_exact, t_update_exact_large, + t_guard_bifs, + t_guard_sequence, t_guard_sequence_large, + t_guard_update, t_guard_update_large, + t_guard_receive, t_guard_receive_large, + t_guard_fun, t_list_comprehension, t_map_sort_literals, t_build_and_match_aliasing, @@ -189,6 +199,462 @@ t_build_and_match_literals(Config) when is_list(Config) -> {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id(#{x=>"three"}))), ok. +t_build_and_match_literals_large(Config) when is_list(Config) -> + % normal non-repeating + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M0, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M0, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M0, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M0, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M0, + + 60 = map_size(M0), + 60 = maps:size(M0), + + % with repeating + M1 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 10=>na0,20=>nb0,30=>"nc0","40"=>"nd0",<<"50">>=>"ne0",{["00"]}=>"n10", + 11=>na1,21=>nb1,31=>"nc1","41"=>"nd1",<<"51">>=>"ne1",{["01"]}=>"n11", + 12=>na2,22=>nb2,32=>"nc2","42"=>"nd2",<<"52">>=>"ne2",{["02"]}=>"n12", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + + 13=>na3,23=>nb3,33=>"nc3","43"=>"nd3",<<"53">>=>"ne3",{["03"]}=>"n13", + 14=>na4,24=>nb4,34=>"nc4","44"=>"nd4",<<"54">>=>"ne4",{["04"]}=>"n14", + + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }), + + #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1, + #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1, + #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1, + #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1, + #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1, + + 60 = map_size(M1), + 60 = maps:size(M1), + + % with floats + + M2 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9"}), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + 90 = map_size(M2), + 90 = maps:size(M2), + + % with bignums + M3 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + 36893488147419103232=>big1, 73786976294838206464=>big2, + 147573952589676412928=>big3, 18446744073709551616=>big4, + 4294967296=>big5, 8589934592=>big6, + 4294967295=>big7, 67108863=>big8 + }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + + 98 = map_size(M3), + 98 = maps:size(M3), + + %% with maps + + M4 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M4, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M4, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M4, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M4, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M4, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M4, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M4, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M4, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M4, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M4, + + #{ #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := "small map key 2", + #{ third => small, map => key } := "small map key 3" } = M4, + + #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M4, + + + #{ 15:=V1,25:=b5,35:=V2,"45":="d5",<<"55">>:=V3,{["05"]}:="15", + #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := V4, + #{ third => small, map => key } := "small map key 3", + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := V5 } = M4, + + a5 = V1, + "c5" = V2, + "e5" = V3, + "small map key 2" = V4, + "large map key 1" = V5, + + 95 = map_size(M4), + 95 = maps:size(M4), + + % call for value + + M5 = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + #{ one => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M5, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M5, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M5, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M5, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M5, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M5, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M5, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M5, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M5, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M5, + + #{ #{ one => small, map => key } := "small map key 1", + #{ second => small, map => key } := "small map key 2", + #{ third => small, map => key } := "small map key 3" } = M5, + + #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M5, + + 95 = map_size(M5), + 95 = maps:size(M5), + + %% remember + + #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0, + #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0, + #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0, + #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0, + #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0, + + #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1, + #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1, + #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1, + #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1, + #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2, + + #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3, + #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3, + #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3, + #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3, + #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3, + + #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3, + + #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3, + #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3, + #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3, + #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3, + + ok. + + t_map_size(Config) when is_list(Config) -> 0 = map_size(id(#{})), 1 = map_size(id(#{a=>1})), @@ -221,13 +687,72 @@ t_update_literals(Config) when is_list(Config) -> ]), ok. +t_update_literals_large(Config) when is_list(Config) -> + Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + #{ one => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [ + {"a","1"},{"b","2"},{"c","3"},{"d","4"} + ]), + ok. + loop_update_literals_x_q(Map, []) -> Map; loop_update_literals_x_q(Map, [{X,Q}|Vs]) -> loop_update_literals_x_q(Map#{q=>Q,x=>X},Vs). % test map updates with matching t_match_and_update_literals(Config) when is_list(Config) -> - Map = #{x=>0,y=>"untouched",z=>"also untouched",q=>1}, + Map = #{ x=>0,y=>"untouched",z=>"also untouched",q=>1, + #{ "one" => small, map => key } => "small map key 1" }, #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [ {1,2},{3,4},{5,6},{7,8} ]), @@ -241,8 +766,77 @@ t_match_and_update_literals(Config) when is_list(Config) -> #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number }, ok. +t_match_and_update_literals_large(Config) when is_list(Config) -> + Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10", + 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11", + 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12", + 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13", + 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14", + + 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15", + 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16", + 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17", + 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18", + 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19", + + 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"), + 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"), + 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"), + 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"), + 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"), + + 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"), + 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"), + 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"), + 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"), + 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"), + + x=>0,y=>"untouched",z=>"also untouched",q=>1, + + #{ "one" => small, map => key } => id("small map key 1"), + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }), + + #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [ + {1,2},{3,4},{5,6},{7,8} + ]), + M0 = id(Map#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, + 4 => number, 18446744073709551629 => wat}), + M1 = id(Map#{}), + M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, + 4 => number, 18446744073709551629 => wat}, + M0 = M2, + + #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number }, + ok. + loop_match_and_update_literals_x_q(Map, []) -> Map; -loop_match_and_update_literals_x_q(#{q:=Q0,x:=X0} = Map, [{X,Q}|Vs]) -> +loop_match_and_update_literals_x_q(#{ q:=Q0, x:=X0, + #{ "one" => small, map => key } := "small map key 1" } = Map, [{X,Q}|Vs]) -> loop_match_and_update_literals_x_q(Map#{q=>Q0+Q,x=>X0+X},Vs). @@ -278,6 +872,75 @@ t_update_assoc(Config) when is_list(Config) -> ok. +t_update_assoc_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + M1 = M0#{1=>42,2=>100,4=>[a,b,c]}, + #{1:=42,2:=100,10.0:=fa0,4:=[a,b,c],25:=b5} = M1, + #{ 10:=43, 24:=b4, 15:=a5, 35:="c5", 2.0:=100, 13.0:=fa3, 4.0:=[a,b,c]} = + M0#{1.0=>float,10:=43,2.0=>wrong,2.0=>100,4.0=>[a,b,c]}, + + M2 = M0#{13.0=>new}, + #{10:=a0,20:=b0,13.0:=new,"40":="d0",<<"50">>:="e0"} = M2, + M2 = M0#{13.0:=wrong,13.0=>new}, + + %% Errors cases. + BadMap = id(badmap), + {'EXIT',{{badarg,BadMap},_}} = (catch BadMap#{nonexisting=>M0}), + ok. + + t_update_exact(Config) when is_list(Config) -> M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}), @@ -295,9 +958,91 @@ t_update_exact(Config) when is_list(Config) -> {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}), {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}), {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), + ok. + +t_update_exact_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + M1 = M0#{10:=42,<<"55">>:=100,10.0:=[a,b,c]}, + #{ 10:=42,<<"55">>:=100,{["05"]}:="15",10.0:=[a,b,c], + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1" } = M1, + + M1 = M0#{10:=wrong,10=>42,<<"55">>=>wrong,<<"55">>:=100,10.0:=[a,b,c]}, + + M2 = M0#{13.0:=new}, + #{10:=a0,20:=b0,13.0:=new} = M2, + M2 = M0#{13.0=>wrong,13.0:=new}, + + %% Errors cases. + {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}), + {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}), + {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}), + {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), ok. + t_update_values(Config) when is_list(Config) -> V0 = id(1337), M0 = #{ a => 1, val => V0}, @@ -371,6 +1116,75 @@ t_guard_sequence(Config) when is_list(Config) -> {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(#{b=>5})), ok. +t_guard_sequence_large(Config) when is_list(Config) -> + M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19", + + 10.0=>fa0,20.0=>fb0,30.0=>"fc0", + 11.0=>fa1,21.0=>fb1,31.0=>"fc1", + 12.0=>fa2,22.0=>fb2,32.0=>"fc2", + 13.0=>fa3,23.0=>fb3,33.0=>"fc3", + 14.0=>fa4,24.0=>fb4,34.0=>"fc4", + + 15.0=>fa5,25.0=>fb5,35.0=>"fc5", + 16.0=>fa6,26.0=>fb6,36.0=>"fc6", + 17.0=>fa7,27.0=>fb7,37.0=>"fc7", + 18.0=>fa8,28.0=>fb8,38.0=>"fc8", + 19.0=>fa9,29.0=>fb9,39.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + {1, "a"} = map_guard_sequence_1(M0#{seq=>1,val=>id("a")}), + {2, "b"} = map_guard_sequence_1(M0#{seq=>2,val=>id("b")}), + {3, "c"} = map_guard_sequence_1(M0#{seq=>3,val=>id("c")}), + {4, "d"} = map_guard_sequence_1(M0#{seq=>4,val=>id("d")}), + {5, "e"} = map_guard_sequence_1(M0#{seq=>5,val=>id("e")}), + + {1,M1} = map_guard_sequence_2(M1 = id(M0#{a=>3})), + {2,M2} = map_guard_sequence_2(M2 = id(M0#{a=>4, b=>4})), + {3,gg,M3} = map_guard_sequence_2(M3 = id(M0#{a=>gg, b=>4})), + {4,sc,sc,M4} = map_guard_sequence_2(M4 = id(M0#{a=>sc, b=>3, c=>sc2})), + {5,kk,kk,M5} = map_guard_sequence_2(M5 = id(M0#{a=>kk, b=>other, c=>sc2})), + + {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(M0#{seq=>6,val=>id("e")})), + {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(M0#{b=>5})), + ok. + map_guard_sequence_1(#{seq:=1=Seq, val:=Val}) -> {Seq,Val}; map_guard_sequence_1(#{seq:=2=Seq, val:=Val}) -> {Seq,Val}; map_guard_sequence_1(#{seq:=3=Seq, val:=Val}) -> {Seq,Val}; @@ -390,6 +1204,65 @@ t_guard_update(Config) when is_list(Config) -> second = map_guard_update(#{y=>old}, #{x=>second,y=>old}), ok. +t_guard_update_large(Config) when is_list(Config) -> + M0 = id(#{ 70=>a0,80=>b0,90=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10", + 71=>a1,81=>b1,91=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11", + 72=>a2,82=>b2,92=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12", + 73=>a3,83=>b3,93=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13", + 74=>a4,84=>b4,94=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14", + + 75=>a5,85=>b5,95=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15", + 76=>a6,86=>b6,96=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16", + 77=>a7,87=>b7,97=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17", + 78=>a8,88=>b8,98=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18", + 79=>a9,89=>b9,99=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19", + + 70.0=>fa0,80.0=>fb0,90.0=>"fc0", + 71.0=>fa1,81.0=>fb1,91.0=>"fc1", + 72.0=>fa2,82.0=>fb2,92.0=>"fc2", + 73.0=>fa3,83.0=>fb3,93.0=>"fc3", + 74.0=>fa4,84.0=>fb4,94.0=>"fc4", + + 75.0=>fa5,85.0=>fb5,95.0=>"fc5", + 76.0=>fa6,86.0=>fb6,96.0=>"fc6", + 77.0=>fa7,87.0=>fb7,97.0=>"fc7", + 78.0=>fa8,88.0=>fb8,98.0=>"fc8", + 79.0=>fa9,89.0=>fb9,99.0=>"fc9", + + #{ one => small, map => key } => "small map key 1", + #{ second => small, map => key } => "small map key 2", + #{ third => small, map => key } => "small map key 3", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1", + + #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10", + 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11", + 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12", + 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13", + 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14", + + 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15", + k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16", + 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17", + 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18", + 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }), + + + error = map_guard_update(M0#{},M0#{}), + first = map_guard_update(M0#{},M0#{x=>first}), + second = map_guard_update(M0#{y=>old}, M0#{x=>second,y=>old}), + ok. + map_guard_update(M1, M2) when M1#{x=>first} =:= M2 -> first; map_guard_update(M1, M2) when M1#{x=>second} =:= M2 -> second; map_guard_update(_, _) -> error. @@ -419,6 +1292,42 @@ t_guard_receive(Config) when is_list(Config) -> done = call(Pid, done), ok. +-define(t_guard_receive_large_procs, 150). + +t_guard_receive_large(Config) when is_list(Config) -> + M = lists:foldl(fun(_,#{procs := Ps } = M) -> + M#{ procs := Ps#{ spawn_link(fun() -> grecv_loop() end) => 0 }} + end, #{procs => #{}, done => 0}, lists:seq(1,?t_guard_receive_large_procs)), + lists:foreach(fun(Pid) -> + Pid ! {self(), hello} + end, maps:keys(maps:get(procs,M))), + ok = guard_receive_large_loop(M), + ok. + +guard_receive_large_loop(#{done := ?t_guard_receive_large_procs}) -> + ok; +guard_receive_large_loop(M) -> + receive + #{pid := Pid, msg := hello} -> + case M of + #{done := Count, procs := #{Pid := 150}} -> + Pid ! {self(), done}, + guard_receive_large_loop(M#{done := Count + 1}); + #{procs := #{Pid := Count} = Ps} -> + Pid ! {self(), hello}, + guard_receive_large_loop(M#{procs := Ps#{Pid := Count + 1}}) + end + end. + +grecv_loop() -> + receive + {_, done} -> + ok; + {Pid, hello} -> + Pid ! #{pid=>self(), msg=>hello}, + grecv_loop() + end. + call(Pid, M) -> Pid ! {self(), M}, receive {Pid, Res} -> Res end. @@ -449,6 +1358,14 @@ guard_receive_loop() -> t_list_comprehension(Config) when is_list(Config) -> [#{k:=1},#{k:=2},#{k:=3}] = [#{k=>I} || I <- [1,2,3]], + Ls = id([#{<<2:301>> => I, "wat" => I + 1} || I <- [1,2,3]]), + [#{<<2:301>>:=1,"wat":=2},#{<<2:301>>:=2,"wat":=3},#{<<2:301>>:=3,"wat":=4}] = Ls, + [{1,2},{2,3},{3,4}] = id([{I2,I1} || #{"wat" := I1, <<2:301>> := I2} <- Ls]), + + Ks = lists:seq($a,$z), + Ms = [#{[K1,K2]=>{K1,K2}} || K1 <- Ks, K2 <- Ks], + [#{"aa" := {$a,$a}},#{"ab":={$a,$b}}|_] = Ms, + [#{"zz" := {$z,$z}},#{"zy":={$z,$y}}|_] = lists:reverse(Ms), ok. t_guard_fun(Config) when is_list(Config) -> diff --git a/lib/debugger/vsn.mk b/lib/debugger/vsn.mk index 38c19be93e..b82f0f4e37 100644 --- a/lib/debugger/vsn.mk +++ b/lib/debugger/vsn.mk @@ -1 +1 @@ -DEBUGGER_VSN = 4.0.2 +DEBUGGER_VSN = 4.0.3 diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml index 4020165697..8976679c1d 100644 --- a/lib/dialyzer/doc/src/notes.xml +++ b/lib/dialyzer/doc/src/notes.xml @@ -31,6 +31,21 @@ <p>This document describes the changes made to the Dialyzer application.</p> +<section><title>Dialyzer 2.7.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> A bug concerning <c>map()</c> types has been fixed. + </p> + <p> + Own Id: OTP-12472</p> + </item> + </list> + </section> + +</section> + <section><title>Dialyzer 2.7.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl index debb78bd0b..fe4ec41359 100644 --- a/lib/dialyzer/src/dialyzer_cl.erl +++ b/lib/dialyzer/src/dialyzer_cl.erl @@ -2,7 +2,7 @@ %%------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -469,7 +469,7 @@ expand_dependent_modules(Md5, DiffMd5, ModDeps) -> Mod = list_to_atom(filename:basename(File, ".beam")), sets:is_element(Mod, AnalyzeMods) end, - {[F || {F, _} <- Md5, FilterFun(F)], RemovedMods, NewModDeps}. + {[F || {F, _} <- Md5, FilterFun(F)], BigSet, NewModDeps}. expand_dependent_modules_1([Mod|Mods], Included, ModDeps) -> case dict:find(Mod, ModDeps) of diff --git a/lib/dialyzer/test/dialyzer_SUITE.erl b/lib/dialyzer/test/dialyzer_SUITE.erl index 8507525597..f625d12b45 100644 --- a/lib/dialyzer/test/dialyzer_SUITE.erl +++ b/lib/dialyzer/test/dialyzer_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2014. All Rights Reserved. +%% Copyright Ericsson AB 2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -30,12 +30,12 @@ -export([init_per_testcase/2, end_per_testcase/2]). %% Test cases must be exported. --export([app_test/1, appup_test/1, beam_tests/1]). +-export([app_test/1, appup_test/1]). suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [app_test, appup_test, beam_tests]. + [app_test, appup_test]. groups() -> []. @@ -75,38 +75,3 @@ app_test(Config) when is_list(Config) -> %% Test that the .appup file does not contain any `basic' errors appup_test(Config) when is_list(Config) -> ok = ?t:appup_test(dialyzer). - -beam_tests(Config) when is_list(Config) -> - Prog = <<" - -module(no_auto_import). - - %% Copied from erl_lint_SUITE.erl, clash6 - - -export([size/1]). - - size([]) -> - 0; - size({N,_}) -> - N; - size([_|T]) -> - 1+size(T). - ">>, - Opts = [no_auto_import], - {ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts), - [] = run_dialyzer([BeamFile]), - ok. - -compile(Config, Prog, Module, CompileOpts) -> - Source = lists:concat([Module, ".erl"]), - PrivDir = ?config(priv_dir,Config), - Filename = filename:join([PrivDir, Source]), - ok = file:write_file(Filename, Prog), - Opts = [{outdir, PrivDir}, debug_info | CompileOpts], - {ok, Module} = compile:file(Filename, Opts), - {ok, filename:join([PrivDir, lists:concat([Module, ".beam"])])}. - -run_dialyzer(Files) -> - dialyzer:run([{analysis_type, plt_build}, - {files, Files}, - {from, byte_code}, - {check_plt, false}]). diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl index aee9f449a6..ef4cdc57f0 100644 --- a/lib/dialyzer/test/plt_SUITE.erl +++ b/lib/dialyzer/test/plt_SUITE.erl @@ -1,17 +1,17 @@ %% This suite is the only hand made and simply -%% checks if we can build a plt. +%% checks if we can build and update a plt. -module(plt_SUITE). -include_lib("common_test/include/ct.hrl"). -include("dialyzer_test_constants.hrl"). --export([suite/0, all/0, build_plt/1]). +-export([suite/0, all/0, build_plt/1, beam_tests/1, update_plt/1]). suite() -> [{timetrap, ?plt_timeout}]. -all() -> [build_plt]. +all() -> [build_plt, beam_tests, update_plt]. build_plt(Config) -> OutDir = ?config(priv_dir, Config), @@ -19,3 +19,87 @@ build_plt(Config) -> ok -> ok; fail -> ct:fail(plt_build_fail) end. + +beam_tests(Config) when is_list(Config) -> + Prog = <<" + -module(no_auto_import). + + %% Copied from erl_lint_SUITE.erl, clash6 + + -export([size/1]). + + size([]) -> + 0; + size({N,_}) -> + N; + size([_|T]) -> + 1+size(T). + ">>, + Opts = [no_auto_import], + {ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts), + [] = run_dialyzer([BeamFile]), + ok. + +run_dialyzer(Files) -> + dialyzer:run([{analysis_type, plt_build}, + {files, Files}, + {from, byte_code}, + {check_plt, false}]). + +%%% [James Fish:] +%%% If a function is removed from a module and the module has previously +%%% been added to a PLT, the function will not be removed from PLT when +%%% the PLT is checked. This results in dialyzer failing to produce a +%%% callgraph warning when doing success typings analysis if the remove +%%% function is still called in another module +%%% As the function is not removed from the PLT a prior warning, such as a +%%% contract types warning, might be emitted when the removed function +%%% nolonger exists. +update_plt(Config) -> + PrivDir = ?config(priv_dir, Config), + Prog1 = <<"-module(plt_gc). + -export([one/0]). + one() -> + one.">>, + {ok, Beam} = compile(Config, Prog1, plt_gc, []), + + ErlangBeam = case code:where_is_file("erlang.beam") of + non_existing -> + filename:join([code:root_dir(), + "erts", "preloaded", "ebin", + "erlang.beam"]); + EBeam -> + EBeam + end, + Plt = filename:join(PrivDir, "plt_gc.plt"), + Opts = [{check_plt, true}, {from, byte_code}], + [] = dialyzer:run([{analysis_type, plt_build}, + {files, [Beam, ErlangBeam]}, + {output_plt, Plt}] ++ Opts), + + Prog2 = <<"-module(plt_gc). + -export([two/0]). + two() -> + two.">>, + {ok, Beam} = compile(Config, Prog2, plt_gc, []), + + Test = <<"-module(test). + -export([test/0]). + -spec test() -> test. + test() -> + plt_gc:one().">>, + {ok, TestBeam} = compile(Config, Test, test, []), + [{warn_callgraph, _, {call_to_missing, [plt_gc, one, 0]}}] = + dialyzer:run([{analysis_type, succ_typings}, + {files, [TestBeam]}, + {init_plt, Plt}] ++ Opts), + ok. + +compile(Config, Prog, Module, CompileOpts) -> + Source = lists:concat([Module, ".erl"]), + PrivDir = ?config(priv_dir,Config), + Filename = filename:join([PrivDir, Source]), + ok = file:write_file(Filename, Prog), + Opts = [{outdir, PrivDir}, debug_info | CompileOpts], + {ok, Module} = compile:file(Filename, Opts), + {ok, filename:join([PrivDir, lists:concat([Module, ".beam"])])}. diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk index e7c13f04ad..527afaf4ef 100644 --- a/lib/dialyzer/vsn.mk +++ b/lib/dialyzer/vsn.mk @@ -1 +1 @@ -DIALYZER_VSN = 2.7.3 +DIALYZER_VSN = 2.7.4 diff --git a/lib/diameter/doc/src/diameter.xml b/lib/diameter/doc/src/diameter.xml index 37e67d8630..6e41b01c44 100644 --- a/lib/diameter/doc/src/diameter.xml +++ b/lib/diameter/doc/src/diameter.xml @@ -783,6 +783,27 @@ be matched by corresponding &capability; configuration, of </item> +<marker id="incoming_maxlen"/> +<tag><c>{incoming_maxlen, 0..16777215}</c></tag> +<item> +<p> +Bound on the expected size of incoming Diameter messages. +Messages larger than the specified number of bytes are discarded.</p> + +<p> +Defaults to <c>16777215</c>, the maximum value of the 24-bit Message +Length field in a Diameter Header.</p> + +<warning> +<p> +This option should be set to as low a value as is sufficient for the +Diameter applications and peers in question, since decoding incoming +messages from a malicious peer can otherwise generate significant +load.</p> +</warning> + +</item> + <tag><c>{restrict_connections, false | node | nodes diff --git a/lib/diameter/doc/src/diameter_dict.xml b/lib/diameter/doc/src/diameter_dict.xml index 9db9bcffde..5cf1b174a0 100644 --- a/lib/diameter/doc/src/diameter_dict.xml +++ b/lib/diameter/doc/src/diameter_dict.xml @@ -529,7 +529,7 @@ answer record and passed to a &app_handle_request; callback upon reception of an incoming request.</p> <p> -In cases in which there is a choice between list() and binary() types +In cases in which there is a choice between string() and binary() types for OctetString() and derived types, the representation is determined by the value of &mod_string_decode;.</p> diff --git a/lib/diameter/doc/src/notes.xml b/lib/diameter/doc/src/notes.xml index e6ac332c10..479fab21b2 100644 --- a/lib/diameter/doc/src/notes.xml +++ b/lib/diameter/doc/src/notes.xml @@ -42,6 +42,189 @@ first.</p> <!-- ===================================================================== --> +<section><title>diameter 1.9</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Don't discard outgoing answers unnecessarily.</p> + <p> + Answers missing a Result-Code AVP or setting an E-bit + inappropriately were discarded even if encode was + successful.</p> + <p> + Own Id: OTP-11492</p> + </item> + <item> + <p> + Increase supervision timeouts.</p> + <p> + At diameter application shutdown, DPR could be omitted on + open peer connections because of short supervision + timeouts.</p> + <p> + Own Id: OTP-12412</p> + </item> + <item> + <p> + Fix retransmission of messages sent as header/avps list.</p> + <p> + Extracting End-to-End and Hop-by-Hop Identifiers resulted + in a function clause error, resulting in a handle_error + callback.</p> + <p> + Own Id: OTP-12415</p> + </item> + <item> + <p> + Fix diameter_avp decode of Grouped AVPs having decode + errors.</p> + <p> + Components of such an AVP were not extracted, causing it + to be represented by a single diameter_avp record instead + of the intended list.</p> + <p> + Dictionary files must be recompiled for the fix to have + effect.</p> + <p> + Own Id: OTP-12475</p> + </item> + <item> + <p> + Fix ordering of AVPs in relayed messages.</p> + <p> + The order was reversed relative to the received order, + with a Route-Record AVP prepended.</p> + <p> + Thanks to Andrzej TrawiÅ„ski.</p> + <p> + Own Id: OTP-12551</p> + </item> + <item> + <p> + Fix issues with DiameterURI encode/decode.</p> + <p> + RFC 6773 changed the default port and transport, but the + RFC 3588 defaults were used even if the RFC 6733 common + dictionary was in use. The RFC 3588 defaults are now only + used when the common dictionary is + diameter_gen_base_rfc3588.</p> + <p> + Both RFC 3588 and 6733 disallow + transport=udp;protocol=diameter. Encode of the + combination now fails.</p> + <p> + Decode of ports numbers outside the range 0-65535 and + fully qualified domain names longer than 255 octets now + fails.</p> + <p> + Note that RFC 3588 is obsolete, and that there is a + diameter_gen_base_rfc6733. The change in defaults is a + potential interoperability problem when moving to RFC + 6733 with peers that do not send all URI components. The + fact that 6733 allows 5xxx result codes in answer + messages setting the E-bit, which RFC 3588 doesn't, is + another.</p> + <p> + Own Id: OTP-12589</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Add service_opt() string_decode.</p> + <p> + To disable the decode of potentially large binaries to + string. This prevents large strings from being copied + when incoming Diameter messages are passed between + processes, a vulnerability that can lead to memory being + exhausted given sufficiently malicious peers.</p> + <p> + The value is a boolean(), true being the default for + backwards compatibility. Setting false causes both + diameter_caps records and decoded messages to contain + binary() in relevant places that previously had string(): + diameter_app(3) callbacks need to be prepared for the + change.</p> + <p> + The Diameter types affected are OctetString and the + derived types UTF8String, DiameterIdentity, DiameterURI, + IPFilterRule, and QoSFilterRule. Time and Address are + unaffected.</p> + <p> + Own Id: OTP-11952</p> + </item> + <item> + <p> + Add transport_opt() pool_size.</p> + <p> + To allow for pools of accepting transport processes, + which can better service multiple simultaneous peer + connections. The option can also be used with connecting + transports, to establish multiple connections to the same + peer without having to configure multiple transports.</p> + <p> + Own Id: OTP-12428</p> + </item> + <item> + <p> + Allow DPR to be sent with diameter:call/4.</p> + <p> + It has been possible to send, but the answer was regarded + as unsolicited and discarded. DPA now causes the + transport process in question to be terminated, as for + DPR that diameter itself sends.</p> + <p> + Own Id: OTP-12542</p> + </item> + <item> + <p> + Discard requests after DPR.</p> + <p> + RFC 6733 is imprecise, but the tone is that messages + received after DPR are an exception to be dealt with only + because of the possibility of unordered delivery over + SCTP. As a consequence, and because a request following + DPR is unlikely to be answered due to the impending loss + of the peer connection, discard outgoing requests + following an outgoing or incoming DPR. Incoming requests + are also discarded, with the exception of DPR itself. + Answers are sent and received as usual.</p> + <p> + Own Id: OTP-12543</p> + </item> + <item> + <p> + Add transport_opt() dpr_timeout.</p> + <p> + To cause a peer connection to be closed following an + outgoing DPA when the peer fails to do so. It is the + recipient of DPA that should close the connection + according to RFC 6733.</p> + <p> + Own Id: OTP-12609</p> + </item> + <item> + <p> + Add service_opt() incoming_maxlen.</p> + <p> + To bound the expected size of incoming Diameter messages. + Messages larger than the specified number of bytes are + discarded, to prevent a malicious peer from generating + excessive load.</p> + <p> + Own Id: OTP-12628</p> + </item> + </list> + </section> + +</section> + <section><title>diameter 1.8</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl index 8272904856..0eef218a07 100644 --- a/lib/diameter/include/diameter_gen.hrl +++ b/lib/diameter/include/diameter_gen.hrl @@ -395,7 +395,7 @@ d(false, Reason, Name, Avp, {Avps, Acc}) -> diameter_lib:log(decode_error, ?MODULE, ?LINE, - {Reason, Name, Avp#diameter_avp.name, Stack}), + {Name, Avp#diameter_avp.name, Stack}), {Rec, Failed} = Acc, {[Avp|Avps], {Rec, [rc(Reason, Avp) | Failed]}}. diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl index 67dfc7bdbf..010f977b97 100644 --- a/lib/diameter/src/base/diameter.erl +++ b/lib/diameter/src/base/diameter.erl @@ -45,6 +45,7 @@ -export_type([evaluable/0, restriction/0, + message_length/0, remotes/0, sequence/0, app_alias/0, @@ -298,6 +299,9 @@ call(SvcName, App, Message) -> | [node()] | evaluable(). +-type message_length() + :: 0..16#FFFFFF. + %% Options passed to start_service/2 -type service_opt() @@ -307,6 +311,7 @@ call(SvcName, App, Message) -> | {sequence, sequence() | evaluable()} | {share_peers, remotes()} | {string_decode, boolean()} + | {incoming_maxlen, message_length()} | {use_shared_peers, remotes()} | {spawn_opt, list()}. diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl index 0d0304bf33..8ac3b9d6ca 100644 --- a/lib/diameter/src/base/diameter_config.erl +++ b/lib/diameter/src/base/diameter_config.erl @@ -159,7 +159,8 @@ stop_service(SvcName) -> %% # add_transport/2 %% -------------------------------------------------------------------------- --spec add_transport(diameter:service_name(), {connect|listen, [diameter:transport_opt()]}) +-spec add_transport(diameter:service_name(), + {connect|listen, [diameter:transport_opt()]}) -> {ok, diameter:transport_ref()} | {error, term()}. @@ -645,6 +646,7 @@ make_config(SvcName, Opts) -> {false, monitor}, {?NOMASK, sequence}, {nodes, restrict_connections}, + {16#FFFFFF, incoming_maxlen}, {true, string_decode}, {[], spawn_opt}]), @@ -670,12 +672,20 @@ make_opts(Opts, Defs) -> [{K, opt(K,V)} || {K,V} <- Known]. +opt(incoming_maxlen, N) + when 0 =< N, N < 1 bsl 24 -> + N; + opt(spawn_opt, L) when is_list(L) -> L; opt(K, false = B) - when K /= sequence -> + when K == share_peers; + K == use_shared_peers; + K == monitor; + K == restrict_connections; + K == string_decode -> B; opt(K, true = B) diff --git a/lib/diameter/src/base/diameter_peer_fsm.erl b/lib/diameter/src/base/diameter_peer_fsm.erl index aac2685514..2255d0a76b 100644 --- a/lib/diameter/src/base/diameter_peer_fsm.erl +++ b/lib/diameter/src/base/diameter_peer_fsm.erl @@ -125,7 +125,8 @@ %% outgoing DPR; boolean says whether or not %% the request was sent explicitly with %% diameter:call/4. - length_errors :: exit | handle | discard}). + length_errors :: exit | handle | discard, + incoming_maxlen :: integer() | infinity}). %% There are non-3588 states possible as a consequence of 5.6.1 of the %% standard and the corresponding problem for incoming CEA's: we don't @@ -203,6 +204,7 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) -> diameter_stats:reg(Ref), diameter_codec:setopts([{common_dictionary, Dict0} | SvcOpts]), {_,_} = Mask = proplists:get_value(sequence, SvcOpts), + Maxlen = proplists:get_value(incoming_maxlen, SvcOpts, 16#FFFFFF), {[Cs,Ds], Rest} = proplists:split(Opts, [capabilities_cb, disconnect_cb]), putr(?CB_KEY, {Ref, [F || {_,F} <- Cs]}), putr(?DPR_KEY, [F || {_, F} <- Ds]), @@ -223,7 +225,8 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) -> dictionary = Dict0, mode = M, service = svc(Svc, Addrs), - length_errors = OnLengthErr}. + length_errors = OnLengthErr, + incoming_maxlen = Maxlen}. %% The transport returns its local ip addresses so that different %% transports on the same service can use different local addresses. %% The local addresses are put into Host-IP-Address avps here when @@ -326,11 +329,14 @@ handle_info(T, #state{} = State) -> {?MODULE, Tag, Reason} -> ?LOG(stop, Tag), {stop, {shutdown, Reason}, State} - end. + end; %% The form of the throw caught here is historical. It's %% significant that it's not a 2-tuple, as in ?FAILURE(Reason), %% since these are caught elsewhere. +handle_info(T, S) -> %% started in old code + handle_info(T, #state{} = erlang:append_element(S, infinity)). + %% Note that there's no guarantee that the service and transport %% capabilities are good enough to build a CER/CEA that can be %% succesfully encoded. It's not checked at diameter:add_transport/2 @@ -561,6 +567,12 @@ recv(Bin, S) -> %% recv1/3 +recv1(_, + #diameter_packet{header = H, bin = Bin}, + #state{incoming_maxlen = M}) + when M < size(Bin) -> + invalid(false, incoming_maxlen_exceeded, {size(Bin), H}); + %% Incoming request after outgoing DPR: discard. Don't discard DPR, so %% both ends don't do so when sending simultaneously. recv1(Name, diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl index a01bcdd4e7..86e744dfbe 100644 --- a/lib/diameter/src/base/diameter_service.erl +++ b/lib/diameter/src/base/diameter_service.erl @@ -131,7 +131,8 @@ | {share_peers, diameter:remotes()} %% broadcast to | {use_shared_peers, diameter:remotes()} %% use from | {restrict_connections, diameter:restriction()} - | {string_decode, boolean()}]}). + | {string_decode, boolean()} + | {incoming_maxlen, diameter:message_length()}]}). %% shared_peers reflects the peers broadcast from remote nodes. %% Record representing an RFC 3539 watchdog process implemented by @@ -698,7 +699,8 @@ service_options(Opts) -> Opts, ?RESTRICT)}, {spawn_opt, proplists:get_value(spawn_opt, Opts, [])}, - {string_decode, proplists:get_value(string_decode, Opts, true)}]. + {string_decode, proplists:get_value(string_decode, Opts, true)}, + {incoming_maxlen, proplists:get_value(incoming_maxlen, Opts, 16#FFFFFF)}]. %% The order of options is significant since we match against the list. mref(false = No) -> diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl index 784f9ca08f..538ebeeeba 100644 --- a/lib/diameter/src/base/diameter_traffic.erl +++ b/lib/diameter/src/base/diameter_traffic.erl @@ -78,7 +78,11 @@ service_name :: diameter:service_name(), apps :: [#diameter_app{}], sequence :: diameter:sequence(), - codec :: list()}). + codec :: [{string_decode, boolean()} + | {incoming_maxlen, diameter:message_length()}]}). +%% Note that incoming_maxlen is currently handled in diameter_peer_fsm, +%% so that any message exceeding the maximum is discarded. Retain the +%% option in case we want to extend the values and semantics. %% Record stored in diameter_request for each outgoing request. -record(request, @@ -102,7 +106,9 @@ make_recvdata([SvcName, PeerT, Apps, SvcOpts | _]) -> peerT = PeerT, apps = Apps, sequence = Mask, - codec = [T || {K,_} = T <- SvcOpts, K == string_decode]}. + codec = [T || {K,_} = T <- SvcOpts, + lists:member(K, [string_decode, + incoming_maxlen])]}. %% --------------------------------------------------------------------------- %% peer_up/1 @@ -233,6 +239,8 @@ receive_message(TPid, Pkt, Dict0, RecvData) Dict0, RecvData). +%% recv/6 + %% Incoming request ... recv(true, false, TPid, Pkt, Dict0, T) -> spawn_request(TPid, Pkt, Dict0, T); @@ -240,6 +248,7 @@ recv(true, false, TPid, Pkt, Dict0, T) -> %% ... answer to known request ... recv(false, #request{ref = Ref, handler = Pid} = Req, _, Pkt, Dict0, _) -> Pid ! {answer, Ref, Req, Dict0, Pkt}; + %% Note that failover could have happened prior to this message being %% received and triggering failback. That is, both a failover message %% and answer may be on their way to the handler process. In the worst @@ -1693,6 +1702,8 @@ send({TPid, Pkt, #request{handler = Pid} = Req0, SvcName, Timeout, TRef}) -> end. %% recv/4 +%% +%% Relay an answer from a remote node. recv(TPid, Pid, TRef, Ref) -> receive diff --git a/lib/diameter/src/base/diameter_types.erl b/lib/diameter/src/base/diameter_types.erl index fe7613541c..87a0f0663d 100644 --- a/lib/diameter/src/base/diameter_types.erl +++ b/lib/diameter/src/base/diameter_types.erl @@ -93,7 +93,7 @@ case diameter_codec:getopt(string_decode) of true -> binary_to_list(Bin); - _ -> + false -> Bin end; @@ -565,18 +565,29 @@ msb(false) -> ?TIME_2036. scan_uri(Bin) -> RE = "^(aaas?)://" - "([-a-zA-Z0-9.]+)" - "(:([0-9]+))?" + "([-a-zA-Z0-9.]{1,255})" + "(:0{0,5}([0-9]{1,5}))?" "(;transport=(tcp|sctp|udp))?" "(;protocol=(diameter|radius|tacacs\\+))?$", + %% A port number is 16-bit, so an arbitrary number of digits is + %% just a vulnerability, but provide a little slack with leading + %% zeros in a port number just because the regexp was previously + %% [0-9]+ and it's not inconceivable that a value might be padded. + %% Don't fantasize about this padding being more than the number + %% of digits in the port number proper. + %% + %% Similarly, a FQDN can't be arbitrarily long: at most 255 + %% octets. {match, [A, DN, PN, T, P]} = re:run(Bin, RE, [{capture, [1,2,4,6,8], binary}]), Type = to_atom(A), {PN0, T0} = defaults(diameter_codec:getopt(rfc), Type), + PortNr = to_int(PN, PN0), + 0 = PortNr bsr 16, %% assert #diameter_uri{type = Type, - fqdn = from_bin(DN), - port = to_int(PN, PN0), + fqdn = 'OctetString'(decode, DN), + port = PortNr, transport = to_atom(T, T0), protocol = to_atom(P, diameter)}. @@ -588,14 +599,6 @@ defaults(6733, aaa) -> defaults(6733, aaas) -> {5658, tcp}. -from_bin(B) -> - case diameter_codec:getopt(string_decode) of - true -> - binary_to_list(B); - false -> - B - end. - to_int(<<>>, N) -> N; to_int(B, _) -> diff --git a/lib/diameter/test/diameter_codec_test.erl b/lib/diameter/test/diameter_codec_test.erl index 854b71ba93..5f1dbfbd61 100644 --- a/lib/diameter/test/diameter_codec_test.erl +++ b/lib/diameter/test/diameter_codec_test.erl @@ -352,15 +352,19 @@ values('DiameterURI') -> {[], ["aaa" ++ S ++ "://diameter.se" ++ P ++ Tr ++ Pr || S <- ["", "s"], - P <- ["", ":1234"], + P <- ["", ":1234", ":0", ":65535"], Tr <- ["" | [";transport=" ++ X || X <- ["tcp", "sctp", "udp"]]], Pr <- ["" | [";protocol=" ++ X || X <- ["diameter","radius","tacacs+"]]], Tr /= ";transport=udp" - orelse (Pr /= ";protocol=diameter" andalso Pr /= "")], - ["aaa://diameter.se;transport=udp;protocol=diameter", + orelse (Pr /= ";protocol=diameter" andalso Pr /= "")] + ++ ["aaa://" ++ lists:duplicate(255, $x)], + ["aaa://diameter.se:65536", + "aaa://diameter.se:-1", + "aaa://diameter.se;transport=udp;protocol=diameter", "aaa://diameter.se;transport=udp", + "aaa://" ++ lists:duplicate(256, $x), "aaa://:3868", "aaax://diameter.se", "aaa://diameter.se;transport=tcpx", diff --git a/lib/diameter/test/diameter_config_SUITE.erl b/lib/diameter/test/diameter_config_SUITE.erl index 77f7aace1b..bbdf672291 100644 --- a/lib/diameter/test/diameter_config_SUITE.erl +++ b/lib/diameter/test/diameter_config_SUITE.erl @@ -85,6 +85,12 @@ {string_decode, [[true], [false]], [[0], [x]]}, + {incoming_maxlen, + [[0], [65536], [16#FFFFFF]], + [[-1], [1 bsl 24], [infinity], [false]]}, + {spawn_opt, + [[[]], [[monitor, link]]], + [[false]]}, {invalid_option, %% invalid service options are rejected [], [[x], @@ -186,6 +192,9 @@ {private, [[x]], []}, + {spawn_opt, + [[[]], [[monitor, link]]], + [[false]]}, {invalid_option, %% invalid transport options are silently ignored [[x], [x,x]], diff --git a/lib/diameter/test/diameter_traffic_SUITE.erl b/lib/diameter/test/diameter_traffic_SUITE.erl index 10c58ab6e7..7dd9f39f85 100644 --- a/lib/diameter/test/diameter_traffic_SUITE.erl +++ b/lib/diameter/test/diameter_traffic_SUITE.erl @@ -59,6 +59,7 @@ send_unexpected_mandatory_decode/1, send_unexpected_mandatory/1, send_long/1, + send_maxlen/1, send_nopeer/1, send_noapp/1, send_discard/1, @@ -182,6 +183,7 @@ {'Acct-Application-Id', [?DIAMETER_APP_ID_ACCOUNTING]}, {restrict_connections, false}, {string_decode, Decode}, + {incoming_maxlen, 1 bsl 21}, {spawn_opt, [{min_heap_size, 5000}]} | [{application, [{dictionary, D}, {module, ?MODULE}, @@ -317,6 +319,7 @@ tc() -> send_unexpected_mandatory_decode, send_unexpected_mandatory, send_long, + send_maxlen, send_nopeer, send_noapp, send_discard, @@ -635,6 +638,12 @@ send_long(Config) -> ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _] = call(Config, Req). +%% Send something longer than the configure incoming_maxlen. +send_maxlen(Config) -> + Req = ['STR', {'Termination-Cause', ?LOGOUT}, + {'User-Name', [lists:duplicate(1 bsl 21, $X)]}], + {timeout, _} = call(Config, Req). + %% Send something for which pick_peer finds no suitable peer. send_nopeer(Config) -> Req = ['STR', {'Termination-Cause', ?LOGOUT}], diff --git a/lib/eldap/doc/src/notes.xml b/lib/eldap/doc/src/notes.xml index e5cbcb26ff..e76101c30e 100644 --- a/lib/eldap/doc/src/notes.xml +++ b/lib/eldap/doc/src/notes.xml @@ -30,6 +30,42 @@ </header> <p>This document describes the changes made to the Eldap application.</p> +<section><title>Eldap 1.1.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Corrects that <c>eldap:close/1</c> returned a tuple + instead of the specified atom <c>ok</c>.</p> + <p> + Own Id: OTP-12349</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Clarification in the reference manual for + <c>eldap:modify_dn/5</c>, <c>eldap:search/2</c> and + <c>eldap:start_tls/3</c>.</p> + <p> + Own Id: OTP-12354</p> + </item> + <item> + <p> + The eldap test suites are extended and re-organized.</p> + <p> + Own Id: OTP-12355</p> + </item> + </list> + </section> + +</section> + <section><title>Eldap 1.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/hipe/doc/src/notes.xml b/lib/hipe/doc/src/notes.xml index 2d6fd245f7..8d3358533b 100644 --- a/lib/hipe/doc/src/notes.xml +++ b/lib/hipe/doc/src/notes.xml @@ -30,6 +30,55 @@ </header> <p>This document describes the changes made to HiPE.</p> +<section><title>Hipe 3.11.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix HiPE for ARM when Erlang VM is compiled for Thumb + execution mode. This was a problem on e.g. Ubuntu which + configures its system GCC to generate Thumb by default.</p> + <p> + Own Id: OTP-12405</p> + </item> + <item> + <p> + Reduced lock contention of dynamic function lookups (like + apply) from hipe compiled code.</p> + <p> + Own Id: OTP-12557</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Fix two bugs in HiPE compiler regarding floating-points, + both leading to crash during compilation. The + target-specific code generators failed to handle integer + to floating-point conversion instructions with constant + operands. The middle-end could use an incorrect + representation for copies between floating-point + registers.</p> + <p> + Own Id: OTP-12413</p> + </item> + <item> + <p> + Improved error handling when memory allocation for HiPE + code fails.</p> + <p> + Own Id: OTP-12448</p> + </item> + </list> + </section> + +</section> + <section><title>Hipe 3.11.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/hipe/rtl/hipe_tagscheme.erl b/lib/hipe/rtl/hipe_tagscheme.erl index 990d01e190..c27c682915 100644 --- a/lib/hipe/rtl/hipe_tagscheme.erl +++ b/lib/hipe/rtl/hipe_tagscheme.erl @@ -109,7 +109,6 @@ -define(TAG_HEADER_REFC_BIN,((16#8 bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). -define(TAG_HEADER_HEAP_BIN,((16#9 bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). -define(TAG_HEADER_SUB_BIN, ((16#A bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). --define(TAG_HEADER_HASHMAP, ((16#B bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). -define(TAG_HEADER_EXTERNAL_PID, ((16#C bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). -define(TAG_HEADER_EXTERNAL_PORT,((16#D bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). -define(TAG_HEADER_EXTERNAL_REF, ((16#E bsl ?TAG_PRIMARY_SIZE) bor ?TAG_PRIMARY_HEADER)). @@ -258,16 +257,11 @@ test_tuple_N(X, N, TrueLab, FalseLab, Pred) -> test_map(X, TrueLab, FalseLab, Pred) -> Tmp = hipe_rtl:mk_new_reg_gcsafe(), HalfTrueLab = hipe_rtl:mk_new_label(), - OrHashmapLab = hipe_rtl:mk_new_label(), MapMask = ?TAG_HEADER_MASK, [test_is_boxed(X, hipe_rtl:label_name(HalfTrueLab), FalseLab, Pred), HalfTrueLab, get_header(Tmp, X), - mask_and_compare(Tmp, MapMask, ?TAG_HEADER_MAP, - TrueLab, hipe_rtl:label_name(OrHashmapLab), Pred), - OrHashmapLab, - mask_and_compare(Tmp, MapMask, ?TAG_HEADER_HASHMAP, TrueLab, FalseLab, Pred) - ]. + mask_and_compare(Tmp, MapMask, ?TAG_HEADER_MAP, TrueLab, FalseLab, Pred)]. test_ref(X, TrueLab, FalseLab, Pred) -> Hdr = hipe_rtl:mk_new_reg_gcsafe(), @@ -366,22 +360,16 @@ test_matchstate(X, TrueLab, FalseLab, Pred) -> test_bitstr(X, TrueLab, FalseLab, Pred) -> Tmp = hipe_rtl:mk_new_reg_gcsafe(), HalfTrueLab = hipe_rtl:mk_new_label(), - AndNotHashmapLab = hipe_rtl:mk_new_label(), Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK, [test_is_boxed(X, hipe_rtl:label_name(HalfTrueLab), FalseLab, Pred), HalfTrueLab, get_header(Tmp, X), - mask_and_compare(Tmp, Mask, ?TAG_HEADER_REFC_BIN, - hipe_rtl:label_name(AndNotHashmapLab), FalseLab, Pred), - AndNotHashmapLab, - mask_and_compare(Tmp, ?TAG_HEADER_MASK, ?TAG_HEADER_HASHMAP, FalseLab, TrueLab, Pred) - ]. + mask_and_compare(Tmp, Mask, ?TAG_HEADER_REFC_BIN, TrueLab, FalseLab, Pred)]. test_binary(X, TrueLab, FalseLab, Pred) -> Tmp1 = hipe_rtl:mk_new_reg_gcsafe(), Tmp2 = hipe_rtl:mk_new_reg_gcsafe(), IsBoxedLab = hipe_rtl:mk_new_label(), - AndNotHashmapLab = hipe_rtl:mk_new_label(), IsBitStrLab = hipe_rtl:mk_new_label(), IsSubBinLab = hipe_rtl:mk_new_label(), Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK, @@ -389,10 +377,7 @@ test_binary(X, TrueLab, FalseLab, Pred) -> IsBoxedLab, get_header(Tmp1, X), mask_and_compare(Tmp1, Mask, ?TAG_HEADER_REFC_BIN, - hipe_rtl:label_name(AndNotHashmapLab), FalseLab, Pred), - AndNotHashmapLab, - mask_and_compare(Tmp1, ?TAG_HEADER_MASK, ?TAG_HEADER_HASHMAP, - FalseLab, hipe_rtl:label_name(IsBitStrLab), Pred), + hipe_rtl:label_name(IsBitStrLab), FalseLab, Pred), IsBitStrLab, mask_and_compare(Tmp1, ?TAG_HEADER_MASK, ?TAG_HEADER_SUB_BIN, hipe_rtl:label_name(IsSubBinLab), TrueLab, 0.5), diff --git a/lib/hipe/vsn.mk b/lib/hipe/vsn.mk index 4cf09830cb..60b4e0559b 100644 --- a/lib/hipe/vsn.mk +++ b/lib/hipe/vsn.mk @@ -1 +1 @@ -HIPE_VSN = 3.11.2 +HIPE_VSN = 3.11.3 diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml index 7f73aa5e7b..2c3ee79f31 100644 --- a/lib/inets/doc/src/notes.xml +++ b/lib/inets/doc/src/notes.xml @@ -32,7 +32,47 @@ <file>notes.xml</file> </header> - <section><title>Inets 5.10.5</title> + <section><title>Inets 5.10.6</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + inets: parse correctly 'Set-Cookie' header with empty + value</p> + <p> + httpc_cookie should parse cookies with empty values and + no attributes set in the 'Set-Cookie' headers.</p> + <p> + Own Id: OTP-12455</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Add parsing of URI fragments to http_uri:parse</p> + <p> + This fixes a bug in httpc where redirection URIs could + lead to bad requests if they contained fragments.</p> + <p> + Own Id: OTP-12398</p> + </item> + <item> + <p> + httpc: http client now ignores invalid set-cookie headers</p> + <p> + Own Id: OTP-12430</p> + </item> + </list> + </section> + +</section> + +<section><title>Inets 5.10.5</title> <section><title>Fixed Bugs and Malfunctions</title> <list> diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk index 7d11916454..e5b63a6446 100644 --- a/lib/inets/vsn.mk +++ b/lib/inets/vsn.mk @@ -18,6 +18,6 @@ # %CopyrightEnd% APPLICATION = inets -INETS_VSN = 5.10.5 +INETS_VSN = 5.10.6 PRE_VSN = APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)" diff --git a/lib/kernel/doc/src/notes.xml b/lib/kernel/doc/src/notes.xml index 1ef106e17a..6f7f18a8e7 100644 --- a/lib/kernel/doc/src/notes.xml +++ b/lib/kernel/doc/src/notes.xml @@ -30,6 +30,54 @@ </header> <p>This document describes the changes made to the Kernel application.</p> +<section><title>Kernel 3.2</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + A bug causing an infinite loop in hostname resolving has + been corrected. To trigger this bug you would have to + enter an bogus search method from a configuration file + e.g .inetrc.</p> + <p> + Bug pinpointed by Emil Holmström</p> + <p> + Own Id: OTP-12133</p> + </item> + <item> + <p> + The standard_error process now handles the getopts I/O + protocol request correctly and stores its encoding in the + same way as standard_io.</p> + <p> + Also, io:put_chars(standard_error, [oops]) could + previously crash the standard_error process. This is now + corrected.</p> + <p> + Own Id: OTP-12424</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Configuration parameters for the Kernel application that + allows setting socket options for the distribution + sockets have been added. See the application Kernel + documentation; parameters 'inet_dist_listen_options' and + 'inet_dist_connect_options'.</p> + <p> + Own Id: OTP-12476 Aux Id: OTP-12476 </p> + </item> + </list> + </section> + +</section> + <section><title>Kernel 3.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/kernel/vsn.mk b/lib/kernel/vsn.mk index 15820a0182..e1d447a465 100644 --- a/lib/kernel/vsn.mk +++ b/lib/kernel/vsn.mk @@ -1 +1 @@ -KERNEL_VSN = 3.1 +KERNEL_VSN = 3.2 diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml index 18f72f4faf..dc98efbff3 100644 --- a/lib/mnesia/doc/src/notes.xml +++ b/lib/mnesia/doc/src/notes.xml @@ -38,7 +38,34 @@ thus constitutes one section in this document. The title of each section is the version number of Mnesia.</p> - <section><title>Mnesia 4.12.4</title> + <section><title>Mnesia 4.12.5</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fixed race condition in protocol negotiation.</p> + <p> + Own Id: OTP-12473</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Grammar corrections. (Thanks to Derek Brown)</p> + <p> + Own Id: OTP-12400</p> + </item> + </list> + </section> + +</section> + +<section><title>Mnesia 4.12.4</title> <section><title>Fixed Bugs and Malfunctions</title> <list> diff --git a/lib/mnesia/src/mnesia.erl b/lib/mnesia/src/mnesia.erl index 792b6c4a73..f501a4485b 100644 --- a/lib/mnesia/src/mnesia.erl +++ b/lib/mnesia/src/mnesia.erl @@ -145,7 +145,7 @@ %% Local function in order to avoid external function call val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason); + {'EXIT', _} -> mnesia_lib:other_val(Var); Value -> Value end. @@ -807,7 +807,7 @@ next(Tid,Ts,Tab,Key) tid -> lock_table(Tid, Ts, Tab, read), do_fixtable(Tab,Ts), - New = (catch dirty_next(Tab,Key)), + New = ?CATCH(dirty_next(Tab,Key)), stored_keys(Tab,New,Key,Ts,next, val({Tab, setorbag})); _Protocol -> @@ -833,7 +833,7 @@ prev(Tid,Ts,Tab,Key) tid -> lock_table(Tid, Ts, Tab, read), do_fixtable(Tab,Ts), - New = (catch dirty_prev(Tab,Key)), + New = ?CATCH(dirty_prev(Tab,Key)), stored_keys(Tab,New,Key,Ts,prev, val({Tab, setorbag})); _Protocol -> @@ -965,7 +965,7 @@ foldl(Fun, Acc, Tab, LockKind) when is_function(Fun) -> foldl(ActivityId, Opaque, Fun, Acc, Tab, LockKind) -> {Type, Prev} = init_iteration(ActivityId, Opaque, Tab, LockKind), - Res = (catch do_foldl(ActivityId, Opaque, Tab, dirty_first(Tab), Fun, Acc, Type, Prev)), + Res = ?CATCH(do_foldl(ActivityId, Opaque, Tab, dirty_first(Tab), Fun, Acc, Type, Prev)), close_iteration(Res, Tab). do_foldl(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) -> @@ -1011,7 +1011,7 @@ foldr(ActivityId, Opaque, Fun, Acc, Tab, LockKind) -> true -> %% Order doesn't matter for set and bag TempPrev %% Keep the order so we can use ordsets:del_element end, - Res = (catch do_foldr(ActivityId, Opaque, Tab, dirty_last(Tab), Fun, Acc, Type, Prev)), + Res = ?CATCH(do_foldr(ActivityId, Opaque, Tab, dirty_last(Tab), Fun, Acc, Type, Prev)), close_iteration(Res, Tab). do_foldr(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) -> @@ -1905,21 +1905,21 @@ any_table_info(Tab, _Item) -> abort({bad_type, Tab}). raw_table_info(Tab, Item) -> - case ?catch_val({Tab, storage_type}) of - ram_copies -> - info_reply(catch ?ets_info(Tab, Item), Tab, Item); - disc_copies -> - info_reply(catch ?ets_info(Tab, Item), Tab, Item); - disc_only_copies -> - info_reply(catch dets:info(Tab, Item), Tab, Item); - unknown -> - bad_info_reply(Tab, Item); - {'EXIT', _} -> + try + case ?ets_lookup_element(mnesia_gvar, {Tab, storage_type}, 2) of + ram_copies -> + info_reply(?ets_info(Tab, Item), Tab, Item); + disc_copies -> + info_reply(?ets_info(Tab, Item), Tab, Item); + disc_only_copies -> + info_reply(dets:info(Tab, Item), Tab, Item); + unknown -> + bad_info_reply(Tab, Item) + end + catch error:_ -> bad_info_reply(Tab, Item) end. -info_reply({'EXIT', _Reason}, Tab, Item) -> - bad_info_reply(Tab, Item); info_reply({error, _Reason}, Tab, Item) -> bad_info_reply(Tab, Item); info_reply(Val, _Tab, _Item) -> @@ -2063,9 +2063,8 @@ storage_count(T, {U, R, D, DO}) -> end. system_info(Item) -> - case catch system_info2(Item) of - {'EXIT',Error} -> abort(Error); - Other -> Other + try system_info2(Item) + catch _:Error -> abort(Error) end. system_info2(all) -> @@ -2381,11 +2380,10 @@ del_table_index(Tab, Ix) -> mnesia_schema:del_table_index(Tab, Ix). transform_table(Tab, Fun, NewA) -> - case catch val({Tab, record_name}) of - {'EXIT', Reason} -> - mnesia:abort(Reason); - OldRN -> - mnesia_schema:transform_table(Tab, Fun, NewA, OldRN) + try val({Tab, record_name}) of + OldRN -> mnesia_schema:transform_table(Tab, Fun, NewA, OldRN) + catch exit:Reason -> + mnesia:abort(Reason) end. transform_table(Tab, Fun, NewA, NewRN) -> @@ -2796,7 +2794,7 @@ pre_qlc(Opts, Tab) -> end. post_qlc(Tab) -> - case catch get(mnesia_activity_state) of + case get(mnesia_activity_state) of {_,#tid{},_} -> ok; _ -> case ?catch_val({Tab, setorbag}) of diff --git a/lib/mnesia/src/mnesia.hrl b/lib/mnesia/src/mnesia.hrl index 9a58ea581a..86b6fd908f 100644 --- a/lib/mnesia/src/mnesia.hrl +++ b/lib/mnesia/src/mnesia.hrl @@ -39,7 +39,12 @@ -define(ets_delete_table(Tab), ets:delete(Tab)). -define(ets_fixtable(Tab, Bool), ets:fixtable(Tab, Bool)). --define(catch_val(Var), (catch ?ets_lookup_element(mnesia_gvar, Var, 2))). + +-define(SAFE(OP), try (OP) catch error:_ -> ok end). +-define(CATCH(OP), try (OP) catch _:_Reason -> {'EXIT', _Reason} end). + +-define(catch_val(Var), (try ?ets_lookup_element(mnesia_gvar, Var, 2) + catch error:_ -> {'EXIT', {badarg, []}} end)). %% It's important that counter is first, since we compare tid's diff --git a/lib/mnesia/src/mnesia_bup.erl b/lib/mnesia/src/mnesia_bup.erl index 71e610bd63..3fee952d77 100644 --- a/lib/mnesia/src/mnesia_bup.erl +++ b/lib/mnesia/src/mnesia_bup.erl @@ -78,24 +78,21 @@ %% BunchOfRecords will be [] when the iteration is done. iterate(Mod, Fun, Opaque, Acc) -> R = #restore{bup_module = Mod, bup_data = Opaque}, - case catch read_schema_section(R) of - {error, Reason} -> - {error, Reason}; - {R2, {Header, Schema, Rest}} -> - case catch iter(R2, Header, Schema, Fun, Acc, Rest) of - {ok, R3, Res} -> - catch safe_apply(R3, close_read, [R3#restore.bup_data]), - {ok, Res}; - {error, Reason} -> - catch safe_apply(R2, close_read, [R2#restore.bup_data]), - {error, Reason}; - {'EXIT', Pid, Reason} -> - catch safe_apply(R2, close_read, [R2#restore.bup_data]), - {error, {'EXIT', Pid, Reason}}; - {'EXIT', Reason} -> - catch safe_apply(R2, close_read, [R2#restore.bup_data]), - {error, {'EXIT', Reason}} - end + try read_schema_section(R) of + {R2, {Header, Schema, Rest}} -> + try iter(R2, Header, Schema, Fun, Acc, Rest) of + {ok, R3, Res} -> + close_read(R3), + {ok, Res} + catch throw:Err -> + close_read(R2), + Err; + _:Reason -> + close_read(R2), + {error, {Reason, erlang:get_stacktrace()}} + end + catch throw:{error,_} = Err -> + Err end. iter(R, Header, Schema, Fun, Acc, []) -> @@ -116,7 +113,7 @@ safe_apply(R, write, [_, Items]) when Items =:= [] -> safe_apply(R, What, Args) -> Abort = fun(Re) -> abort_restore(R, What, Args, Re) end, Mod = R#restore.bup_module, - case catch apply(Mod, What, Args) of + try apply(Mod, What, Args) of {ok, Opaque, Items} when What =:= read -> {R#restore{bup_data = Opaque}, Items}; {ok, Opaque} when What =/= read-> @@ -125,16 +122,19 @@ safe_apply(R, What, Args) -> Abort(Re); Re -> Abort(Re) + catch _:Re -> + Abort(Re) end. -abort_restore(R, What, Args, Reason) -> - Mod = R#restore.bup_module, - Opaque = R#restore.bup_data, +abort_restore(R = #restore{bup_module=Mod}, What, Args, Reason) -> dbg_out("Restore aborted. ~p:~p~p -> ~p~n", [Mod, What, Args, Reason]), - catch apply(Mod, close_read, [Opaque]), + close_read(R), throw({error, Reason}). +close_read(#restore{bup_module=Mod, bup_data=Opaque}) -> + ?SAFE(Mod:close_read(Opaque)). + fallback_to_schema() -> Fname = fallback_bup(), fallback_to_schema(Fname). @@ -145,40 +145,30 @@ fallback_to_schema(Fname) -> {error, Reason} -> {error, Reason}; Schema -> - case catch lookup_schema(schema, Schema) of - {error, _} -> - {error, "No schema in fallback"}; - List -> - {ok, fallback, List} + try lookup_schema(schema, Schema) of + List -> {ok, fallback, List} + catch throw:_ -> + {error, "No schema in fallback"} end end. %% Opens Opaque reads schema and then close read_schema(Mod, Opaque) -> R = #restore{bup_module = Mod, bup_data = Opaque}, - case catch read_schema_section(R) of - {error, Reason} -> - {error, Reason}; - {R2, {_Header, Schema, _}} -> - catch safe_apply(R2, close_read, [R2#restore.bup_data]), - Schema + try read_schema_section(R) of + {_, {_Header, Schema, _}} -> Schema + catch throw:{error,_} = Error -> + Error + after close_read(R) end. %% Open backup media and extract schema %% rewind backup media and leave it open %% Returns {R, {Header, Schema}} read_schema_section(R) -> - case catch do_read_schema_section(R) of - {'EXIT', Reason} -> - catch safe_apply(R, close_read, [R#restore.bup_data]), - {error, {'EXIT', Reason}}; - {error, Reason} -> - catch safe_apply(R, close_read, [R#restore.bup_data]), - {error, Reason}; - {R2, {H, Schema, Rest}} -> - Schema2 = convert_schema(H#log_header.log_version, Schema), - {R2, {H, Schema2, Rest}} - end. + {R2, {H, Schema, Rest}} = do_read_schema_section(R), + Schema2 = convert_schema(H#log_header.log_version, Schema), + {R2, {H, Schema2, Rest}}. do_read_schema_section(R) -> R2 = safe_apply(R, open_read, [R#restore.bup_data]), @@ -201,7 +191,7 @@ do_read_schema_section(R, {ok, B, _C, Rest}, Acc) -> {R, {B, Acc, Rest}}; do_read_schema_section(_R, {error, Reason}, _Acc) -> - {error, Reason}. + throw({error, Reason}). verify_header([H | RawSchema]) when is_record(H, log_header) -> Current = mnesia_log:backup_log_header(), @@ -218,7 +208,7 @@ verify_header([H | RawSchema]) when is_record(H, log_header) -> {error, {"Bad kind of header. Cannot be used as backup.", H}} end; verify_header(RawSchema) -> - {error, {"Missing header. Cannot be used as backup.", catch hd(RawSchema)}}. + {error, {"Missing header. Cannot be used as backup.", ?CATCH(hd(RawSchema))}}. refresh_cookie(Schema, NewCookie) -> case lists:keysearch(schema, 2, Schema) of @@ -345,7 +335,7 @@ create_schema(Ns, ok) -> Str = mk_str(), File = mnesia_lib:dir(Str), file:delete(File), - case catch make_initial_backup(Ns, File, Mod) of + try make_initial_backup(Ns, File, Mod) of {ok, _Res} -> case do_install_fallback(File, Mod) of ok -> @@ -353,8 +343,8 @@ create_schema(Ns, ok) -> ok; {error, Reason} -> {error, Reason} - end; - {error, Reason} -> + end + catch throw:{error, Reason} -> {error, Reason} end end @@ -384,10 +374,11 @@ make_initial_backup(Ns, Opaque, Mod) -> do_apply(_, write, [_, Items], Opaque) when Items =:= [] -> Opaque; do_apply(Mod, What, Args, _Opaque) -> - case catch apply(Mod, What, Args) of + try apply(Mod, What, Args) of {ok, Opaque2} -> Opaque2; - {error, Reason} -> throw({error, Reason}); - {'EXIT', Reason} -> throw({error, {'EXIT', Reason}}) + {error, Reason} -> throw({error, Reason}) + catch _:Reason -> + throw({error, {'EXIT', Reason}}) end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -425,11 +416,11 @@ do_install_fallback(_Opaque, Args) -> {error, {badarg, Args}}. check_fallback_args([Arg | Tail], FA) -> - case catch check_fallback_arg_type(Arg, FA) of - {'EXIT', _Reason} -> - {error, {badarg, Arg}}; + try check_fallback_arg_type(Arg, FA) of FA2 -> check_fallback_args(Tail, FA2) + catch error:_ -> + {error, {badarg, Arg}} end; check_fallback_args([], FA) -> {ok, FA}. @@ -484,7 +475,7 @@ install_fallback_master(ClientPid, FA) -> State = {start, FA}, Opaque = FA#fallback_args.opaque, Mod = FA#fallback_args.module, - Res = (catch iterate(Mod, fun restore_recs/4, Opaque, State)), + Res = iterate(Mod, fun restore_recs/4, Opaque, State), unlink(ClientPid), ClientPid ! {self(), Res}, exit(shutdown). @@ -496,9 +487,7 @@ restore_recs(Recs, Header, Schema, {start, FA}) -> %% No records in backup Schema2 = convert_schema(Header#log_header.log_version, Schema), CreateList = lookup_schema(schema, Schema2), - case catch mnesia_schema:list2cs(CreateList) of - {'EXIT', Reason} -> - throw({error, {"Bad schema in restore_recs", Reason}}); + try mnesia_schema:list2cs(CreateList) of Cs -> Ns = get_fallback_nodes(FA, Cs#cstruct.disc_copies), global:set_lock({{mnesia_table_lock, schema}, self()}, Ns, infinity), @@ -508,6 +497,8 @@ restore_recs(Recs, Header, Schema, {start, FA}) -> Res = restore_recs(Recs, Header, Schema2, Pids), global:del_lock({{mnesia_table_lock, schema}, self()}, Ns), Res + catch _:Reason -> + throw({error, {"Bad schema in restore_recs", Reason}}) end; restore_recs([], _Header, _Schema, Pids) -> @@ -579,45 +570,46 @@ fallback_tmp_name() -> "FALLBACK.TMP". fallback_receiver(Master, FA) -> process_flag(trap_exit, true), - case catch register(mnesia_fallback, self()) of - {'EXIT', _} -> - Reason = {already_exists, node()}, - local_fallback_error(Master, Reason); - true -> - FA2 = check_fallback_dir(Master, FA), - Bup = FA2#fallback_args.fallback_bup, - case mnesia_lib:exists(Bup) of - true -> - Reason2 = {already_exists, node()}, - local_fallback_error(Master, Reason2); - false -> - Mod = mnesia_backup, - Tmp = FA2#fallback_args.fallback_tmp, - R = #restore{mode = replace, - bup_module = Mod, - bup_data = Tmp}, - file:delete(Tmp), - case catch fallback_receiver_loop(Master, R, FA2, schema) of - {error, Reason} -> - local_fallback_error(Master, Reason); - Other -> - exit(Other) - end - end - end. + Res = try + register(mnesia_fallback, self()), + FA2 = check_fallback_dir(FA), + Bup = FA2#fallback_args.fallback_bup, + false = mnesia_lib:exists(Bup), + Mod = mnesia_backup, + Tmp = FA2#fallback_args.fallback_tmp, + R = #restore{mode = replace, + bup_module = Mod, + bup_data = Tmp}, + file:delete(Tmp), + fallback_receiver_loop(Master, R, FA2, schema) + catch + error:_ -> + Reason = {already_exists, node()}, + local_fallback_error(Master, Reason); + throw:{error, Reason} -> + local_fallback_error(Master, Reason) + end, + exit(Res). local_fallback_error(Master, Reason) -> Master ! {self(), {error, Reason}}, unlink(Master), exit(Reason). + check_fallback_dir(Master, FA) -> + try check_fallback_dir(FA) + catch throw:{error,Reason} -> + local_fallback_error(Master, Reason) + end. + +check_fallback_dir(FA) -> case mnesia:system_info(schema_location) of ram -> Reason = {has_no_disc, node()}, - local_fallback_error(Master, Reason); + throw({error, Reason}); _ -> - Dir = check_fallback_dir_arg(Master, FA), + Dir = check_fallback_dir_arg(FA), Bup = filename:join([Dir, fallback_name()]), Tmp = filename:join([Dir, fallback_tmp_name()]), FA#fallback_args{fallback_bup = Bup, @@ -625,22 +617,20 @@ check_fallback_dir(Master, FA) -> mnesia_dir = Dir} end. -check_fallback_dir_arg(Master, FA) -> +check_fallback_dir_arg(FA) -> case FA#fallback_args.use_default_dir of true -> mnesia_lib:dir(); false when FA#fallback_args.scope =:= local -> Dir = FA#fallback_args.mnesia_dir, - case catch mnesia_monitor:do_check_type(dir, Dir) of - {'EXIT', _R} -> + try mnesia_monitor:do_check_type(dir, Dir) + catch _:_ -> Reason = {badarg, {dir, Dir}, node()}, - local_fallback_error(Master, Reason); - AbsDir-> - AbsDir - end; + throw({error, Reason}) + end; false when FA#fallback_args.scope =:= global -> Reason = {combine_error, global, dir, node()}, - local_fallback_error(Master, Reason) + throw({error, Reason}) end. fallback_receiver_loop(Master, R, FA, State) -> @@ -666,7 +656,7 @@ fallback_receiver_loop(Master, R, FA, State) -> Bup = FA#fallback_args.fallback_bup, Tmp = FA#fallback_args.fallback_tmp, throw_bad_res(ok, file:rename(Tmp, Bup)), - catch mnesia_lib:set(active_fallback, true), + ?SAFE(mnesia_lib:set(active_fallback, true)), ?eval_debug_fun({?MODULE, fallback_receiver_loop, post_swap}, []), Master ! {self(), ok}, fallback_receiver_loop(Master, R, FA, stop); @@ -697,7 +687,7 @@ throw_bad_res(_Expected, Actual) -> throw({error, Actual}). tm_fallback_start(IgnoreFallback) -> mnesia_schema:lock_schema(), Res = do_fallback_start(fallback_exists(), IgnoreFallback), - mnesia_schema: unlock_schema(), + mnesia_schema:unlock_schema(), case Res of ok -> ok; {error, Reason} -> exit(Reason) @@ -715,9 +705,9 @@ do_fallback_start(true, false) -> BupFile = fallback_bup(), Mod = mnesia_backup, LocalTabs = ?ets_new_table(mnesia_local_tables, [set, public, {keypos, 2}]), - case catch iterate(Mod, fun restore_tables/4, BupFile, {start, LocalTabs}) of + case iterate(Mod, fun restore_tables/4, BupFile, {start, LocalTabs}) of {ok, _Res} -> - catch dets:close(schema), + ?SAFE(dets:close(schema)), TmpSchema = mnesia_lib:tab2tmp(schema), DatSchema = mnesia_lib:tab2dat(schema), AllLT = ?ets_match_object(LocalTabs, '_'), @@ -733,8 +723,6 @@ do_fallback_start(true, false) -> {error, {"Cannot start from fallback. Rename error.", Reason}} end; {error, Reason} -> - {error, {"Cannot start from fallback", Reason}}; - {'EXIT', Reason} -> {error, {"Cannot start from fallback", Reason}} end. @@ -996,10 +984,10 @@ uninstall_fallback_master(ClientPid, FA) -> case fallback_to_schema(Bup) of {ok, fallback, List} -> Cs = mnesia_schema:list2cs(List), - case catch get_fallback_nodes(FA, Cs#cstruct.disc_copies) of + try get_fallback_nodes(FA, Cs#cstruct.disc_copies) of Ns when is_list(Ns) -> - do_uninstall(ClientPid, Ns, FA); - {error, Reason} -> + do_uninstall(ClientPid, Ns, FA) + catch throw:{error, Reason} -> local_fallback_error(ClientPid, Reason) end; {error, Reason} -> @@ -1042,13 +1030,13 @@ local_uninstall_fallback(Master, FA) -> %% Don't trap exit register(mnesia_fallback, self()), % May exit - FA2 = check_fallback_dir(Master, FA), % May exit + FA2 = check_fallback_dir(Master, FA), % May exit Master ! {self(), started}, receive {Master, do_uninstall} -> ?eval_debug_fun({?MODULE, uninstall_fallback2, pre_delete}, []), - catch mnesia_lib:set(active_fallback, false), + ?SAFE(mnesia_lib:set(active_fallback, false)), Tmp = FA2#fallback_args.fallback_tmp, Bup = FA2#fallback_args.fallback_bup, file:delete(Tmp), @@ -1071,10 +1059,8 @@ rec_uninstall(ClientPid, [Pid | Pids], AccRes) -> {Pid, BadRes} -> rec_uninstall(ClientPid, Pids, BadRes) end; -rec_uninstall(ClientPid, [], Res) -> - ClientPid ! {self(), Res}, - unlink(ClientPid), - exit(normal). +rec_uninstall(_, [], Res) -> + Res. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Backup traversal @@ -1125,12 +1111,11 @@ do_traverse_backup(ClientPid, Source, SourceMod, Target, TargetMod, Fun, Acc) -> Iter = if TargetMod =/= read_only -> - case catch do_apply(TargetMod, open_write, [Target], Target) of - {error, Error} -> + try do_apply(TargetMod, open_write, [Target], Target) + catch throw:{error, Error} -> unlink(ClientPid), ClientPid ! {iter_done, self(), {error, Error}}, - exit(Error); - Else -> Else + exit(Error) end; true -> ignore @@ -1139,16 +1124,16 @@ do_traverse_backup(ClientPid, Source, SourceMod, Target, TargetMod, Fun, Acc) -> Res = case iterate(SourceMod, fun trav_apply/4, Source, A) of {ok, {iter, _, Acc2, _, Iter2}} when TargetMod =/= read_only -> - case catch do_apply(TargetMod, commit_write, [Iter2], Iter2) of - {error, Reason} -> - {error, Reason}; - _ -> - {ok, Acc2} + try + do_apply(TargetMod, commit_write, [Iter2], Iter2), + {ok, Acc2} + catch throw:{error, Reason} -> + {error, Reason} end; {ok, {iter, _, Acc2, _, _}} -> {ok, Acc2}; {error, Reason} when TargetMod =/= read_only-> - catch do_apply(TargetMod, abort_write, [Iter], Iter), + ?CATCH(do_apply(TargetMod, abort_write, [Iter], Iter)), {error, {"Backup traversal failed", Reason}}; {error, Reason} -> {error, {"Backup traversal failed", Reason}} diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl index 442e68a13d..0a3ea8d769 100644 --- a/lib/mnesia/src/mnesia_checkpoint.erl +++ b/lib/mnesia/src/mnesia_checkpoint.erl @@ -128,7 +128,7 @@ tm_enter_pending([], Pending) -> Pending; tm_enter_pending([Tab | Tabs], Pending) -> %% io:format("Add ~p ~p ~p~n",[Tab, Pending, hd(tl(element(2, process_info(self(), current_stacktrace))))]), - catch ?ets_insert(Tab, Pending), + ?SAFE(?ets_insert(Tab, Pending)), tm_enter_pending(Tabs, Pending). tm_exit_pending(Tid) -> @@ -427,22 +427,22 @@ check_tables(Cp) -> arrange_retainers(Cp, Overriders, AllTabs) -> R = #retainer{cp_name = Cp#checkpoint_args.name}, - case catch [R#retainer{tab_name = Tab, - writers = select_writers(Cp, Tab)} - || Tab <- AllTabs] of - {'EXIT', Reason} -> - {error, Reason}; + try [R#retainer{tab_name = Tab, + writers = select_writers(Cp, Tab)} + || Tab <- AllTabs] of Retainers -> {ok, Cp#checkpoint_args{ram_overrides_dump = Overriders, - retainers = Retainers, - nodes = writers(Retainers)}} + retainers = Retainers, + nodes = writers(Retainers)}} + catch throw:Reason -> + {error, Reason} end. select_writers(Cp, Tab) -> case filter_remote(Cp, val({Tab, active_replicas})) of [] -> - exit({"Cannot prepare checkpoint (replica not available)", - [Tab, Cp#checkpoint_args.name]}); + throw({"Cannot prepare checkpoint (replica not available)", + [Tab, Cp#checkpoint_args.name]}); Writers -> This = node(), case {lists:member(Tab, Cp#checkpoint_args.max), @@ -492,12 +492,12 @@ check_prep([], Name, Nodes, IgnoreNew) -> collect_pending(Name, Nodes, IgnoreNew) -> case rpc:multicall(Nodes, ?MODULE, call, [Name, collect_pending]) of {Replies, []} -> - case catch ?ets_new_table(mnesia_union, [bag]) of - {'EXIT', Reason} -> %% system limit + try + UnionTab = ?ets_new_table(mnesia_union, [bag]), + compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew) + catch error:Reason -> %% system limit Msg = "Cannot create an ets table pending union", - {error, {system_limit, Msg, Reason}}; - UnionTab -> - compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew) + {error, {system_limit, Msg, Reason}} end; {_, BadNodes} -> deactivate(Nodes, Name), @@ -1170,7 +1170,7 @@ iterate(Name, Tab, Fun, Acc, Source, Val) -> {error, Reason}; {ok, Iter, Pid} -> link(Pid), % We don't want any pending fixtable's - Res = (catch iter(Fun, Acc, Iter)), + Res = ?CATCH(iter(Fun, Acc, Iter)), unlink(Pid), call(Name, {iter_end, Iter}), case Res of @@ -1246,7 +1246,7 @@ system_code_change(Cp, _Module, _OldVsn, _Extra) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_); - _VaLuE_ -> _VaLuE_ + {'EXIT', _} -> mnesia_lib:other_val(Var); + _VaLuE_ -> _VaLuE_ end. diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl index aa72de7594..b9d3779e9a 100644 --- a/lib/mnesia/src/mnesia_controller.erl +++ b/lib/mnesia/src/mnesia_controller.erl @@ -186,7 +186,7 @@ max_loaders() -> val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason); + {'EXIT', _} -> mnesia_lib:other_val(Var); Value -> Value end. @@ -241,9 +241,7 @@ do_wait_for_tables(Tabs, Timeout) -> end. reply_wait(Tabs) -> - case catch mnesia_lib:active_tables() of - {'EXIT', _} -> - {error, {node_not_running, node()}}; + try mnesia_lib:active_tables() of Active when is_list(Active) -> case Tabs -- Active of [] -> @@ -251,6 +249,7 @@ reply_wait(Tabs) -> BadTabs -> {timeout, BadTabs} end + catch exit:_ -> {error, {node_not_running, node()}} end. wait_for_tables_init(From, Tabs) -> @@ -261,13 +260,12 @@ wait_for_tables_init(From, Tabs) -> exit(normal). wait_for_init(From, Tabs, Init) -> - case catch link(Init) of - {'EXIT', _} -> - %% Mnesia is not started - {error, {node_not_running, node()}}; + try link(Init) of true when is_pid(Init) -> cast({sync_tabs, Tabs, self()}), rec_tabs(Tabs, Tabs, From, Init) + catch error:_ -> %% Mnesia is not started + {error, {node_not_running, node()}} end. sync_reply(Waiter, Tab) -> @@ -343,7 +341,7 @@ get_network_copy(Tab, Cs) -> %% might be solved by using monitor in subscr instead. process_flag(trap_exit, true), Load = load_table_fun(Work), - Res = (catch Load()), + Res = ?CATCH(Load()), process_flag(trap_exit, false), call({del_other, self()}), case Res of @@ -592,11 +590,8 @@ call(Msg) -> end. remote_call(Node, Func, Args) -> - case catch gen_server:call({?MODULE, Node}, {Func, Args, self()}, infinity) of - {'EXIT', Error} -> - {error, Error}; - Else -> - Else + try gen_server:call({?MODULE, Node}, {Func, Args, self()}, infinity) + catch exit:Error -> {error, Error} end. multicall(Nodes, Msg) -> @@ -677,7 +672,7 @@ handle_call(block_controller, From, State) -> noreply(State2); handle_call({update,Fun}, From, State) -> - Res = (catch Fun()), + Res = ?CATCH(Fun()), reply(From, Res), noreply(State); @@ -1256,7 +1251,7 @@ handle_info(#sender_done{worker_pid=Pid, worker_res=Res}, State) -> end; handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor -> - catch set(mnesia_status, stopping), + ?SAFE(set(mnesia_status, stopping)), case State#state.dumper_pid of undefined -> dbg_out("~p was ~p~n", [?SERVER_NAME, R]), @@ -1480,9 +1475,9 @@ orphan_tables([], _, _, LocalOrphans, RemoteMasters) -> node_has_tabs([Tab | Tabs], Node, State) when Node /= node() -> State2 = - case catch update_whereabouts(Tab, Node, State) of - State1 = #state{} -> State1; - {'EXIT', R} -> %% Tab was just deleted? + try update_whereabouts(Tab, Node, State) of + State1 = #state{} -> State1 + catch exit:R -> %% Tab was just deleted? case ?catch_val({Tab, cstruct}) of {'EXIT', _} -> State; % yes _ -> erlang:error(R) @@ -1768,22 +1763,17 @@ change_table_majority(Cs) -> update_where_to_wlock(Tab) -> WNodes = val({Tab, where_to_write}), - Majority = case catch val({Tab, majority}) of - true -> true; - _ -> false - end, + Majority = ?catch_val({Tab, majority}) == true, set({Tab, where_to_wlock}, {WNodes, Majority}). %% node To now has tab loaded, but this must be undone %% This code is rpc:call'ed from the tab_copier process %% when it has *not* released it's table lock unannounce_add_table_copy(Tab, To) -> - catch del_active_replica(Tab, To), - case catch val({Tab , where_to_read}) of - To -> - mnesia_lib:set_remote_where_to_read(Tab); - _ -> - ignore + ?SAFE(del_active_replica(Tab, To)), + try To = val({Tab , where_to_read}), + mnesia_lib:set_remote_where_to_read(Tab) + catch _:_ -> ignore end. user_sync_tab(Tab) -> diff --git a/lib/mnesia/src/mnesia_dumper.erl b/lib/mnesia/src/mnesia_dumper.erl index 509b765dee..693f20dbc2 100644 --- a/lib/mnesia/src/mnesia_dumper.erl +++ b/lib/mnesia/src/mnesia_dumper.erl @@ -137,7 +137,7 @@ perform_dump(InitBy, Regulator) -> U = mnesia_monitor:get_env(dump_log_update_in_place), Cont = mnesia_log:init_log_dump(), mnesia_recover:sync(), - case catch do_perform_dump(Cont, U, InitBy, Regulator, undefined) of + try do_perform_dump(Cont, U, InitBy, Regulator, undefined) of ok -> ?eval_debug_fun({?MODULE, post_dump}, [InitBy]), case mnesia_monitor:use_dir() of @@ -148,17 +148,15 @@ perform_dump(InitBy, Regulator) -> end, mnesia_recover:allow_garb(), %% And now to the crucial point... - mnesia_log:confirm_log_dump(Diff); - {error, Reason} -> - {error, Reason}; - {'EXIT', {Desc, Reason}} -> + mnesia_log:confirm_log_dump(Diff) + catch exit:Reason when Reason =/= fatal -> case mnesia_monitor:get_env(auto_repair) of true -> - mnesia_lib:important(Desc, Reason), + mnesia_lib:important(error, Reason), %% Ignore rest of the log mnesia_log:confirm_log_dump(Diff); false -> - fatal(Desc, Reason) + fatal(error, Reason) end end; {error, Reason} -> @@ -176,24 +174,25 @@ scan_decisions(Fname, InitBy, Regulator) -> mnesia_log:open_log(Name, Header, Fname, Exists, mnesia_monitor:get_env(auto_repair), read_only), Cont = start, - Res = (catch do_perform_dump(Cont, false, InitBy, Regulator, undefined)), - mnesia_log:close_log(Name), - case Res of - ok -> ok; - {'EXIT', Reason} -> {error, Reason} + try + do_perform_dump(Cont, false, InitBy, Regulator, undefined) + catch exit:Reason when Reason =/= fatal -> + {error, Reason} + after mnesia_log:close_log(Name) end end. do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) -> case mnesia_log:chunk_log(Cont) of {C2, Recs} -> - case catch insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of - {'EXIT', R} -> - Reason = {"Transaction log dump error: ~p~n", [R]}, - close_files(InPlace, {error, Reason}, InitBy), - exit(Reason); + try insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of Version -> do_perform_dump(C2, InPlace, InitBy, Regulator, Version) + catch _:R when R =/= fatal -> + ST = erlang:get_stacktrace(), + Reason = {"Transaction log dump error: ~p~n", [{R, ST}]}, + close_files(InPlace, {error, Reason}, InitBy), + exit(Reason) end; eof -> close_files(InPlace, ok, InitBy), @@ -303,17 +302,16 @@ perform_update(Tid, SchemaOps, _DumperMode, _UseDir) -> InitBy = fast_schema_update, InPlace = mnesia_monitor:get_env(dump_log_update_in_place), - ?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]), - case catch insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, - mnesia_log:version()) of - {'EXIT', Reason} -> - Error = {error, {"Schema update error", Reason}}, + try insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, + mnesia_log:version()), + ?eval_debug_fun({?MODULE, post_dump}, [InitBy]), + close_files(InPlace, ok, InitBy), + ok + catch _:Reason when Reason =/= fatal -> + ST = erlang:get_stacktrace(), + Error = {error, {"Schema update error", {Reason, ST}}}, close_files(InPlace, Error, InitBy), - fatal("Schema update error ~p ~p", [Reason, SchemaOps]); - _ -> - ?eval_debug_fun({?MODULE, post_dump}, [InitBy]), - close_files(InPlace, ok, InitBy), - ok + fatal("Schema update error ~p ~p", [{Reason,ST}, SchemaOps]) end. insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok; @@ -362,13 +360,11 @@ dets_insert(Op,Tab,Key,Val) -> case dets_incr_counter(Tab,Key) of true -> {RecName, Incr} = Val, - case catch dets:update_counter(Tab, Key, Incr) of - CounterVal when is_integer(CounterVal) -> - ok; - _ when Incr < 0 -> + try _ = dets:update_counter(Tab, Key, Incr) + catch error:_ when Incr < 0 -> Zero = {RecName, Key, 0}, ok = dets:insert(Tab, Zero); - _ -> + error:_ -> Init = {RecName, Key, Incr}, ok = dets:insert(Tab, Init) end; @@ -786,7 +782,7 @@ insert_op(Tid, _, {op, clear_table, TabDef}, InPlace, InitBy) -> end, %% Need to catch this, it crashes on ram_copies if %% the op comes before table is loaded at startup. - catch insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy) + ?CATCH(insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy)) end; insert_op(Tid, _, {op, merge_schema, TabDef}, InPlace, InitBy) -> @@ -1060,14 +1056,13 @@ prepare_open(Tab, UpdateInPlace) -> Dat; false -> Tmp = mnesia_lib:tab2tmp(Tab), - case catch mnesia_lib:copy_file(Dat, Tmp) of - ok -> - Tmp; - Error -> + try ok = mnesia_lib:copy_file(Dat, Tmp) + catch error:Error -> fatal("Cannot copy dets file ~p to ~p: ~p~n", [Dat, Tmp, Error]) - end - end. + end, + Tmp + end. del_opened_tab(Tab) -> erase({?MODULE, Tab}). @@ -1189,18 +1184,16 @@ raw_named_dump_table(Tab, Ftype) -> Storage = ram_copies, mnesia_lib:db_fixtable(Storage, Tab, true), - case catch raw_dump_table(TabRef, Tab) of - {'EXIT', Reason} -> - mnesia_lib:db_fixtable(Storage, Tab, false), - mnesia_lib:dets_sync_close(Tab), - file:delete(TmpFname), - mnesia_lib:unlock_table(Tab), - exit({"Dump of table to disc failed", Reason}); - ok -> - mnesia_lib:db_fixtable(Storage, Tab, false), - mnesia_lib:dets_sync_close(Tab), - mnesia_lib:unlock_table(Tab), - ok = file:rename(TmpFname, Fname) + try + ok = raw_dump_table(TabRef, Tab), + ok = file:rename(TmpFname, Fname) + catch _:Reason -> + ?SAFE(file:delete(TmpFname)), + exit({"Dump of table to disc failed", Reason}) + after + mnesia_lib:db_fixtable(Storage, Tab, false), + mnesia_lib:dets_sync_close(Tab), + mnesia_lib:unlock_table(Tab) end; {error, Reason} -> mnesia_lib:unlock_table(Tab), @@ -1266,6 +1259,6 @@ regulate(RegulatorPid) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason); + {'EXIT', _} -> mnesia_lib:other_val(Var); Value -> Value end. diff --git a/lib/mnesia/src/mnesia_frag.erl b/lib/mnesia/src/mnesia_frag.erl index 66fc20913c..6036ac4e8f 100644 --- a/lib/mnesia/src/mnesia_frag.erl +++ b/lib/mnesia/src/mnesia_frag.erl @@ -406,10 +406,11 @@ verify_numbers(FH,MatchSpec) -> VerifyFun = fun(F) when is_integer(F), F >= 1, F =< N -> false; (_F) -> true end, - case catch lists:filter(VerifyFun, FragNumbers) of - [] -> - FragNumbers; - BadFrags -> + try + Frags = lists:filter(VerifyFun, FragNumbers), + Frags == [] orelse error(Frags), + FragNumbers + catch error:BadFrags -> mnesia:abort({"match_spec_to_frag_numbers: Fragment numbers out of range", BadFrags, {range, 1, N}}) end. @@ -437,7 +438,7 @@ remote_select(ReplyTo, Ref, NameNodes, MatchSpec) -> do_remote_select(ReplyTo, Ref, [{Name, Node} | NameNodes], MatchSpec) -> if Node == node() -> - Res = (catch {ok, mnesia:dirty_select(Name, MatchSpec)}), + Res = ?CATCH({ok, mnesia:dirty_select(Name, MatchSpec)}), ReplyTo ! {remote_select, Ref, Node, Res}, do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec); true -> @@ -886,17 +887,19 @@ adjust_before_split(FH) -> HashMod:add_frag(HashState) end, N = FH#frag_state.n_fragments + 1, - FromFrags2 = (catch lists:sort(FromFrags)), - UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))), VerifyFun = fun(F) when is_integer(F), F >= 1, F =< N -> false; (_F) -> true end, - case catch lists:filter(VerifyFun, UnionFrags) of - [] -> - FH2 = FH#frag_state{n_fragments = N, - hash_state = HashState2}, - {FH2, FromFrags2, UnionFrags}; - BadFrags -> + try + FromFrags2 = lists:sort(FromFrags), + UnionFrags = lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags)), + + Frags = lists:filter(VerifyFun, UnionFrags), + Frags == [] orelse error(Frags), + FH2 = FH#frag_state{n_fragments = N, + hash_state = HashState2}, + {FH2, FromFrags2, UnionFrags} + catch error:BadFrags -> mnesia:abort({"add_frag: Fragment numbers out of range", BadFrags, {range, 1, N}}) end. @@ -981,22 +984,24 @@ adjust_before_merge(FH) -> HashMod:del_frag(HashState) end, N = FH#frag_state.n_fragments, - FromFrags2 = (catch lists:sort(FromFrags)), - UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))), VerifyFun = fun(F) when is_integer(F), F >= 1, F =< N -> false; (_F) -> true end, - case catch lists:filter(VerifyFun, UnionFrags) of - [] -> - case lists:member(N, FromFrags2) of - true -> - FH2 = FH#frag_state{n_fragments = N - 1, - hash_state = HashState2}, - {FH2, FromFrags2, UnionFrags}; + try + FromFrags2 = lists:sort(FromFrags), + UnionFrags = lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags)), + + Frags = lists:filter(VerifyFun, UnionFrags), + [] == Frags orelse error(Frags), + case lists:member(N, FromFrags2) of + true -> + FH2 = FH#frag_state{n_fragments = N - 1, + hash_state = HashState2}, + {FH2, FromFrags2, UnionFrags}; false -> - mnesia:abort({"del_frag: Last fragment number not included", N}) - end; - BadFrags -> + mnesia:abort({"del_frag: Last fragment number not included", N}) + end + catch error:BadFrags -> mnesia:abort({"del_frag: Fragment numbers out of range", BadFrags, {range, 1, N}}) end. @@ -1141,8 +1146,8 @@ remove_node(Node, Cs) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason); - Value -> Value + {'EXIT', _} -> mnesia_lib:other_val(Var); + Value -> Value end. set_frag_hash(Tab, Props) -> diff --git a/lib/mnesia/src/mnesia_index.erl b/lib/mnesia/src/mnesia_index.erl index 87cb58dae1..6a7c964fce 100644 --- a/lib/mnesia/src/mnesia_index.erl +++ b/lib/mnesia/src/mnesia_index.erl @@ -45,21 +45,11 @@ del_transient/3, del_index_table/3]). --import(mnesia_lib, [verbose/2]). +-import(mnesia_lib, [val/1, verbose/2]). -include("mnesia.hrl"). -record(index, {setorbag, pos_list}). -val(Var) -> - case ?catch_val(Var) of - {'EXIT', _ReASoN_} -> - case mnesia_lib:other_val(Var) of - error -> mnesia_lib:pr_other(Var, _ReASoN_); - Val -> Val - end; - _VaLuE_ -> _VaLuE_ - end. - %% read an object list throuh its index table %% we assume that table Tab has index on attribute number Pos diff --git a/lib/mnesia/src/mnesia_late_loader.erl b/lib/mnesia/src/mnesia_late_loader.erl index d09de3ca66..9a113c6306 100644 --- a/lib/mnesia/src/mnesia_late_loader.erl +++ b/lib/mnesia/src/mnesia_late_loader.erl @@ -36,17 +36,19 @@ -define(SERVER_NAME, ?MODULE). +-include("mnesia.hrl"). + -record(state, {supervisor}). async_late_disc_load(_, [], _) -> ok; async_late_disc_load(Node, Tabs, Reason) -> Msg = {async_late_disc_load, Tabs, Reason}, - catch ({?SERVER_NAME, Node} ! {self(), Msg}). + ?SAFE({?SERVER_NAME, Node} ! {self(), Msg}). maybe_async_late_disc_load(_, [], _) -> ok; maybe_async_late_disc_load(Node, Tabs, Reason) -> Msg = {maybe_async_late_disc_load, Tabs, Reason}, - catch ({?SERVER_NAME, Node} ! {self(), Msg}). + ?SAFE({?SERVER_NAME, Node} ! {self(), Msg}). start() -> mnesia_monitor:start_proc(?SERVER_NAME, ?MODULE, init, [self()]). diff --git a/lib/mnesia/src/mnesia_lib.erl b/lib/mnesia/src/mnesia_lib.erl index fc41cbedcb..7bd207f816 100644 --- a/lib/mnesia/src/mnesia_lib.erl +++ b/lib/mnesia/src/mnesia_lib.erl @@ -114,9 +114,7 @@ lock_table/1, mkcore/1, not_active_here/1, - other_val/2, other_val/1, - pr_other/2, overload_read/0, overload_read/1, overload_set/2, @@ -380,8 +378,8 @@ search_key(_Key, []) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_); - _VaLuE_ -> _VaLuE_ + {'EXIT', _} -> other_val(Var); + _VaLuE_ -> _VaLuE_ end. set(Var, Val) -> @@ -390,13 +388,13 @@ set(Var, Val) -> unset(Var) -> ?ets_delete(mnesia_gvar, Var). -other_val(Var, Other) -> - case other_val(Var) of - error -> pr_other(Var, Other); +other_val(Var) -> + case other_val_1(Var) of + error -> pr_other(Var); Val -> Val end. -other_val(Var) -> +other_val_1(Var) -> case Var of {_, where_to_read} -> nowhere; {_, where_to_write} -> []; @@ -404,21 +402,16 @@ other_val(Var) -> _ -> error end. -pr_other(Var, Other) -> - Why = +pr_other(Var) -> + Why = case is_running() of no -> {node_not_running, node()}; _ -> {no_exists, Var} end, - verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~p ~n", + verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~n", [self(), process_info(self(), registered_name), - Var, Other, Why]), - case Other of - {badarg, [{ets, lookup_element, _, _}|_]} -> - exit(Why); - _ -> - erlang:error(Why) - end. + Var, Why]), + exit(Why). %% Some functions for list valued variables add(Var, Val) -> @@ -905,7 +898,7 @@ dirty_rpc_error_tag(Reason) -> end. fatal(Format, Args) -> - catch set(mnesia_status, stopping), + ?SAFE(catch set(mnesia_status, stopping)), Core = mkcore({crashinfo, {Format, Args}}), report_fatal(Format, Args, Core), timer:sleep(10000), % Enough to write the core dump to disc? @@ -917,7 +910,7 @@ report_fatal(Format, Args) -> report_fatal(Format, Args, Core) -> report_system_event({mnesia_fatal, Format, Args, Core}), - catch exit(whereis(mnesia_monitor), fatal). + ?SAFE(exit(whereis(mnesia_monitor), fatal)). %% We sleep longer and longer the more we try %% Made some testing and came up with the following constants @@ -1014,7 +1007,7 @@ dbg_out(Format, Args) -> %% Keep the last 10 debug print outs save(DbgInfo) -> - catch save2(DbgInfo). + ?SAFE(save2(DbgInfo)). save2(DbgInfo) -> Key = {'$$$_report', current_pos}, @@ -1090,35 +1083,29 @@ db_match_object(Tab, Pat) -> db_match_object(val({Tab, storage_type}), Tab, Pat). db_match_object(Storage, Tab, Pat) -> db_fixtable(Storage, Tab, true), - Res = catch_match_object(Storage, Tab, Pat), - db_fixtable(Storage, Tab, false), - case Res of - {'EXIT', Reason} -> exit(Reason); - _ -> Res + try + case Storage of + disc_only_copies -> dets:match_object(Tab, Pat); + _ -> ets:match_object(Tab, Pat) + end + after + db_fixtable(Storage, Tab, false) end. -catch_match_object(disc_only_copies, Tab, Pat) -> - catch dets:match_object(Tab, Pat); -catch_match_object(_, Tab, Pat) -> - catch ets:match_object(Tab, Pat). - db_select(Tab, Pat) -> db_select(val({Tab, storage_type}), Tab, Pat). db_select(Storage, Tab, Pat) -> db_fixtable(Storage, Tab, true), - Res = catch_select(Storage, Tab, Pat), - db_fixtable(Storage, Tab, false), - case Res of - {'EXIT', Reason} -> exit(Reason); - _ -> Res + try + case Storage of + disc_only_copies -> dets:select(Tab, Pat); + _ -> ets:select(Tab, Pat) + end + after + db_fixtable(Storage, Tab, false) end. -catch_select(disc_only_copies, Tab, Pat) -> - catch dets:select(Tab, Pat); -catch_select(_, Tab, Pat) -> - catch ets:select(Tab, Pat). - db_select_init(disc_only_copies, Tab, Pat, Limit) -> dets:select(Tab, Pat, Limit); db_select_init(_, Tab, Pat, Limit) -> @@ -1262,7 +1249,7 @@ dets_sync_open(Tab, Args) -> end. dets_sync_close(Tab) -> - catch dets:close(Tab), + ?SAFE(dets:close(Tab)), unlock_table(Tab), ok. @@ -1298,7 +1285,7 @@ readable_indecies(Tab) -> scratch_debug_fun() -> dbg_out("scratch_debug_fun(): ~p~n", [?DEBUG_TAB]), - (catch ?ets_delete_table(?DEBUG_TAB)), + ?SAFE(?ets_delete_table(?DEBUG_TAB)), ?ets_new_table(?DEBUG_TAB, [set, public, named_table, {keypos, 2}]). activate_debug_fun(FunId, Fun, InitialContext, File, Line) -> @@ -1311,43 +1298,45 @@ activate_debug_fun(FunId, Fun, InitialContext, File, Line) -> update_debug_info(Info). update_debug_info(Info) -> - case catch ?ets_insert(?DEBUG_TAB, Info) of - {'EXIT', _} -> + try ?ets_insert(?DEBUG_TAB, Info), + ok + catch error:_ -> scratch_debug_fun(), - ?ets_insert(?DEBUG_TAB, Info); - _ -> - ok + ?ets_insert(?DEBUG_TAB, Info) end, dbg_out("update_debug_info(~p)~n", [Info]), ok. deactivate_debug_fun(FunId, _File, _Line) -> - catch ?ets_delete(?DEBUG_TAB, FunId), + ?SAFE(?ets_delete(?DEBUG_TAB, FunId)), ok. eval_debug_fun(FunId, EvalContext, EvalFile, EvalLine) -> - case catch ?ets_lookup(?DEBUG_TAB, FunId) of - [] -> - ok; - [Info] -> - OldContext = Info#debug_info.context, - dbg_out("~s(~p): ~w " - "activated in ~s(~p)~n " - "eval_debug_fun(~w, ~w)~n", - [filename:basename(EvalFile), EvalLine, Info#debug_info.id, - filename:basename(Info#debug_info.file), Info#debug_info.line, - OldContext, EvalContext]), - Fun = Info#debug_info.function, - NewContext = Fun(OldContext, EvalContext), - - case catch ?ets_lookup(?DEBUG_TAB, FunId) of - [Info] when NewContext /= OldContext -> - NewInfo = Info#debug_info{context = NewContext}, - update_debug_info(NewInfo); - _ -> - ok - end; - {'EXIT', _} -> ok + try + case ?ets_lookup(?DEBUG_TAB, FunId) of + [] -> + ok; + [Info] -> + OldContext = Info#debug_info.context, + dbg_out("~s(~p): ~w " + "activated in ~s(~p)~n " + "eval_debug_fun(~w, ~w)~n", + [filename:basename(EvalFile), EvalLine, Info#debug_info.id, + filename:basename(Info#debug_info.file), Info#debug_info.line, + OldContext, EvalContext]), + Fun = Info#debug_info.function, + NewContext = Fun(OldContext, EvalContext), + + case ?ets_lookup(?DEBUG_TAB, FunId) of + [Info] when NewContext /= OldContext -> + NewInfo = Info#debug_info{context = NewContext}, + update_debug_info(NewInfo); + _ -> + ok + end + end + catch error -> + ok end. -ifdef(debug). diff --git a/lib/mnesia/src/mnesia_loader.erl b/lib/mnesia/src/mnesia_loader.erl index cbb3d7e430..65ea743fd3 100644 --- a/lib/mnesia/src/mnesia_loader.erl +++ b/lib/mnesia/src/mnesia_loader.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1998-2013. All Rights Reserved. +%% Copyright Ericsson AB 1998-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -35,7 +35,7 @@ val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason); + {'EXIT', _} -> mnesia_lib:other_val(Var); Value -> Value end. @@ -332,7 +332,7 @@ wait_on_load_complete(Pid) -> {Pid, Res} -> Res; {'EXIT', Pid, Reason} -> - exit(Reason); + error(Reason); Else -> Pid ! Else, wait_on_load_complete(Pid) @@ -442,18 +442,18 @@ init_table(Tab, disc_only_copies, Fun, DetsInfo,Sender) -> ErtsVer = erlang:system_info(version), case DetsInfo of {ErtsVer, DetsData} -> - Res = (catch dets:is_compatible_bchunk_format(Tab, DetsData)), - case Res of - {'EXIT',{undef,[{dets,_,_,_}|_]}} -> - Sender ! {self(), {old_protocol, Tab}}, - dets:init_table(Tab, Fun); %% Old dets version - {'EXIT', What} -> - exit(What); + try dets:is_compatible_bchunk_format(Tab, DetsData) of false -> Sender ! {self(), {old_protocol, Tab}}, dets:init_table(Tab, Fun); %% Old dets version true -> dets:init_table(Tab, Fun, [{format, bchunk}]) + catch + error:{undef,[{dets,_,_,_}|_]} -> + Sender ! {self(), {old_protocol, Tab}}, + dets:init_table(Tab, Fun); %% Old dets version + error:What -> + What end; Old when Old /= false -> Sender ! {self(), {old_protocol, Tab}}, @@ -462,10 +462,10 @@ init_table(Tab, disc_only_copies, Fun, DetsInfo,Sender) -> dets:init_table(Tab, Fun) end; init_table(Tab, _, Fun, _DetsInfo,_) -> - case catch ets:init_table(Tab, Fun) of - true -> - ok; - {'EXIT', Else} -> Else + try + true = ets:init_table(Tab, Fun), + ok + catch _:Else -> {Else, erlang:get_stacktrace()} end. @@ -572,9 +572,9 @@ handle_last({ram_copies, Tab}, _Type, DatBin) -> down(Tab, Storage) -> case Storage of ram_copies -> - catch ?ets_delete_table(Tab); + ?SAFE(?ets_delete_table(Tab)); disc_copies -> - catch ?ets_delete_table(Tab); + ?SAFE(?ets_delete_table(Tab)); disc_only_copies -> TmpFile = mnesia_lib:tab2tmp(Tab), mnesia_lib:dets_sync_close(Tab), @@ -658,26 +658,23 @@ send_table(Pid, Tab, RemoteS) -> {Init, Chunk} = reader_funcs(UseDetsChunk, Tab, Storage, KeysPerTransfer), SendIt = fun() -> - prepare_copy(Pid, Tab, Storage), + {atomic, ok} = prepare_copy(Pid, Tab, Storage), send_more(Pid, 1, Chunk, Init(), Tab), finish_copy(Pid, Tab, Storage, RemoteS) end, - case catch SendIt() of - receiver_died -> + try SendIt() of + {_, receiver_died} -> ok; + {atomic, no_more} -> ok + catch + throw:receiver_died -> cleanup_tab_copier(Pid, Storage, Tab), - unlink(whereis(mnesia_tm)), ok; - {_, receiver_died} -> - unlink(whereis(mnesia_tm)), - ok; - {atomic, no_more} -> - unlink(whereis(mnesia_tm)), - ok; - Reason -> + error:Reason -> %% Prepare failed cleanup_tab_copier(Pid, Storage, Tab), - unlink(whereis(mnesia_tm)), - {error, Reason} + {error, {tab_copier, Tab, {Reason, erlang:get_stacktrace()}}} + after + unlink(whereis(mnesia_tm)) end end. @@ -690,12 +687,7 @@ prepare_copy(Pid, Tab, Storage) -> mnesia_lib:db_fixtable(Storage, Tab, true), ok end, - case mnesia:transaction(Trans) of - {atomic, ok} -> - ok; - {aborted, Reason} -> - exit({tab_copier_prepare, Tab, Reason}) - end. + mnesia:transaction(Trans). update_where_to_write(Tab, Node) -> case val({Tab, access_mode}) of @@ -828,6 +820,6 @@ dat2bin(_Tab, _LocalS, _RemoteS) -> nobin. handle_exit(Pid, Reason) when node(Pid) == node() -> - exit(Reason); + error(Reason); handle_exit(_Pid, _Reason) -> %% Not from our node, this will be handled by ignore. %% mnesia_down soon. diff --git a/lib/mnesia/src/mnesia_locker.erl b/lib/mnesia/src/mnesia_locker.erl index 1efb939e00..6658dbeacb 100644 --- a/lib/mnesia/src/mnesia_locker.erl +++ b/lib/mnesia/src/mnesia_locker.erl @@ -98,7 +98,7 @@ init(Parent) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_); + {'EXIT', _} -> mnesia_lib:other_val(Var); _VaLuE_ -> _VaLuE_ end. @@ -1001,13 +1001,11 @@ flush_remaining(Ns=[Node | Tail], SkipNode, Res) -> opt_lookup_in_client(lookup_in_client, Oid, Lock) -> {Tab, Key} = Oid, - case catch mnesia_lib:db_get(Tab, Key) of - {'EXIT', _} -> + try mnesia_lib:db_get(Tab, Key) + catch error:_ -> %% Table has been deleted from this node, %% restart the transaction. - #cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere}; - Val -> - Val + #cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere} end; opt_lookup_in_client(Val, _Oid, _Lock) -> Val. @@ -1139,11 +1137,10 @@ send_requests([], _X) -> rec_requests([Node | Nodes], Oid, Store) -> Res = l_req_rec(Node, Store), - case catch rlock_get_reply(Node, Store, Oid, Res) of - {'EXIT', Reason} -> - flush_remaining(Nodes, Node, Reason); - _ -> - rec_requests(Nodes, Oid, Store) + try rlock_get_reply(Node, Store, Oid, Res) of + _ -> rec_requests(Nodes, Oid, Store) + catch _:Reason -> + flush_remaining(Nodes, Node, Reason) end; rec_requests([], _Oid, _Store) -> ok. diff --git a/lib/mnesia/src/mnesia_log.erl b/lib/mnesia/src/mnesia_log.erl index 6206a8bdb9..21ad0ffdb6 100644 --- a/lib/mnesia/src/mnesia_log.erl +++ b/lib/mnesia/src/mnesia_log.erl @@ -462,7 +462,7 @@ chunk_log(Cont) -> chunk_log(_Log, eof) -> eof; chunk_log(Log, Cont) -> - case catch disk_log:chunk(Log, Cont) of + case disk_log:chunk(Log, Cont) of {error, Reason} -> fatal("Possibly truncated ~p file: ~p~n", [Log, Reason]); @@ -647,11 +647,11 @@ backup_checkpoint(Name, Opaque, Args) when is_list(Args) -> end. check_backup_args([Arg | Tail], B) -> - case catch check_backup_arg_type(Arg, B) of - {'EXIT', _Reason} -> - {error, {badarg, Arg}}; + try check_backup_arg_type(Arg, B) of B2 -> check_backup_args(Tail, B2) + catch error:_ -> + {error, {badarg, Arg}} end; check_backup_args([], B) -> @@ -674,11 +674,11 @@ check_backup_arg_type(Arg, B) -> backup_master(ClientPid, B) -> process_flag(trap_exit, true), - case catch do_backup_master(B) of - {'EXIT', Reason} -> - ClientPid ! {self(), ClientPid, {error, {'EXIT', Reason}}}; + try do_backup_master(B) of Res -> ClientPid ! {self(), ClientPid, Res} + catch _:Reason -> + ClientPid ! {self(), ClientPid, {error, {'EXIT', Reason}}} end, unlink(ClientPid), exit(normal). @@ -736,10 +736,10 @@ safe_apply(B, What, Args) -> {'EXIT', Pid, R} -> Abort({'EXIT', Pid, R}) after 0 -> Mod = B#backup_args.module, - case catch apply(Mod, What, Args) of + try apply(Mod, What, Args) of {ok, Opaque} -> B#backup_args{opaque=Opaque}; - {error, R} -> Abort(R); - R -> Abort(R) + {error, R} -> Abort(R) + catch _:R -> Abort(R) end end. @@ -748,10 +748,9 @@ abort_write(B, What, Args, Reason) -> Opaque = B#backup_args.opaque, dbg_out("Failed to perform backup. M=~p:F=~p:A=~p -> ~p~n", [Mod, What, Args, Reason]), - case catch apply(Mod, abort_write, [Opaque]) of - {ok, _Res} -> - throw({error, Reason}); - Other -> + try apply(Mod, abort_write, [Opaque]) of + {ok, _Res} -> throw({error, Reason}) + catch _:Other -> error("Failed to abort backup. ~p:~p~p -> ~p~n", [Mod, abort_write, [Opaque], Other]), throw({error, Reason}) @@ -892,10 +891,8 @@ tab_receiver(Pid, B, Tab, RecName, Slot) -> end. rec_filter(B, schema, _RecName, Recs) -> - case catch mnesia_bup:refresh_cookie(Recs, B#backup_args.cookie) of - Recs2 when is_list(Recs2) -> - Recs2; - {error, _Reason} -> + try mnesia_bup:refresh_cookie(Recs, B#backup_args.cookie) + catch throw:{error, _Reason} -> %% No schema table cookie Recs end; @@ -1006,13 +1003,14 @@ add_recs([{{Tab, _Key}, Val, delete_object} | Rest], N) -> add_recs(Rest, N+1); add_recs([{{Tab, Key}, Val, update_counter} | Rest], N) -> {RecName, Incr} = Val, - case catch ets:update_counter(Tab, Key, Incr) of - CounterVal when is_integer(CounterVal) -> - ok; - _ when Incr < 0 -> + try + CounterVal = ets:update_counter(Tab, Key, Incr), + true = (CounterVal >= 0) + catch + error:_ when Incr < 0 -> Zero = {RecName, Key, 0}, true = ets:insert(Tab, Zero); - _ -> + error:_ -> Zero = {RecName, Key, Incr}, true = ets:insert(Tab, Zero) end, diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl index a0e0e630ec..14b1ab5c1a 100644 --- a/lib/mnesia/src/mnesia_monitor.erl +++ b/lib/mnesia/src/mnesia_monitor.erl @@ -268,7 +268,7 @@ init([Parent]) -> set(version, Version), dbg_out("Version: ~p~n", [Version]), - case catch process_config_args(env()) of + try process_config_args(env()) of ok -> mnesia_lib:set({'$$$_report', current_pos}, 0), Level = mnesia_lib:val(debug), @@ -288,8 +288,8 @@ init([Parent]) -> set(pending_checkpoints, []), set(pending_checkpoint_pids, []), - {ok, #state{supervisor = Parent}}; - {'EXIT', Reason} -> + {ok, #state{supervisor = Parent}} + catch _:Reason -> mnesia_lib:report_fatal("Bad configuration: ~p~n", [Reason]), {stop, {bad_config, Reason}} end. @@ -323,25 +323,24 @@ non_empty_dir() -> %%---------------------------------------------------------------------- handle_call({mktab, Tab, Args}, _From, State) -> - case catch ?ets_new_table(Tab, Args) of - {'EXIT', ExitReason} -> + try ?ets_new_table(Tab, Args) of + Reply -> + {reply, Reply, State} + catch error:ExitReason -> Msg = "Cannot create ets table", Reason = {system_limit, Msg, Tab, Args, ExitReason}, fatal("~p~n", [Reason]), - {noreply, State}; - Reply -> - {reply, Reply, State} + {noreply, State} end; handle_call({unsafe_mktab, Tab, Args}, _From, State) -> - case catch ?ets_new_table(Tab, Args) of - {'EXIT', ExitReason} -> - {reply, {error, ExitReason}, State}; + try ?ets_new_table(Tab, Args) of Reply -> {reply, Reply, State} + catch error:ExitReason -> + {reply, {error, ExitReason}, State} end; - handle_call({open_dets, Tab, Args}, _From, State) -> case mnesia_lib:dets_sync_open(Tab, Args) of {ok, Tab} -> @@ -546,7 +545,7 @@ handle_info({'EXIT', Pid, fatal}, State) when node(Pid) == node() -> %% is in progress %% exit(State#state.supervisor, shutdown), %% It is better to kill an innocent process - catch exit(whereis(mnesia_locker), kill), + ?SAFE(exit(whereis(mnesia_locker), kill)), {noreply, State}; handle_info(Msg = {'EXIT',Pid,_}, State) -> @@ -727,11 +726,8 @@ default_env(send_compressed) -> 0. check_type(Env, Val) -> - case catch do_check_type(Env, Val) of - {'EXIT', _Reason} -> - exit({bad_config, Env, Val}); - NewVal -> - NewVal + try do_check_type(Env, Val) + catch error:_ -> exit({bad_config, Env, Val}) end. do_check_type(access_module, A) when is_atom(A) -> A; @@ -781,12 +777,12 @@ media(opt_disc) -> opt_disc; media(ram) -> ram. patch_env(Env, Val) -> - case catch do_check_type(Env, Val) of - {'EXIT', _Reason} -> - {error, {bad_type, Env, Val}}; + try do_check_type(Env, Val) of NewVal -> application_controller:set_env(mnesia, Env, NewVal), NewVal + catch error:_ -> + {error, {bad_type, Env, Val}} end. detect_partitioned_network(Mon, Node) -> diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl index eeb4fa0ced..aa567a23cb 100644 --- a/lib/mnesia/src/mnesia_recover.erl +++ b/lib/mnesia/src/mnesia_recover.erl @@ -178,11 +178,8 @@ log_decision(D) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> - case mnesia_lib:other_val(Var) of - error -> mnesia_lib:pr_other(Var, Reason); - Val -> Val - end; + {'EXIT', _Reason} -> + mnesia_lib:other_val(Var); Value -> Value end. @@ -373,11 +370,8 @@ log_master_nodes2([], _UseDir, IsRunning, WorstRes) -> get_master_node_info() -> Tab = mnesia_decision, Pat = {master_nodes, '_', '_'}, - case catch mnesia_lib:db_match_object(ram_copies,Tab, Pat) of - {'EXIT', _} -> - []; - Masters -> - Masters + try mnesia_lib:db_match_object(ram_copies,Tab, Pat) + catch error:_ -> [] end. get_master_node_tables() -> @@ -385,9 +379,8 @@ get_master_node_tables() -> [Tab || {master_nodes, Tab, _Nodes} <- Masters]. get_master_nodes(Tab) -> - case catch ?ets_lookup_element(mnesia_decision, Tab, 3) of - {'EXIT', _} -> []; - Nodes -> Nodes + try ?ets_lookup_element(mnesia_decision, Tab, 3) + catch error:_ -> [] end. %% Determine what has happened to the transaction @@ -485,8 +478,6 @@ load_decision_tab() -> load_decision_tab(Cont, load_decision_tab), mnesia_log:close_decision_tab(). -load_decision_tab(eof, _InitBy) -> - ok; load_decision_tab(Cont, InitBy) -> case mnesia_log:chunk_decision_tab(Cont) of {Cont2, Decisions} -> @@ -519,8 +510,6 @@ dump_decision_log(InitBy) -> Cont = mnesia_log:prepare_decision_log_dump(), perform_dump_decision_log(Cont, InitBy). -perform_dump_decision_log(eof, _InitBy) -> - confirm_decision_log_dump(); perform_dump_decision_log(Cont, InitBy) when InitBy == startup -> case mnesia_log:chunk_decision_log(Cont) of {Cont2, Decisions} -> @@ -1024,7 +1013,7 @@ decision(Tid) -> decision(Tid, tabs()). decision(Tid, [Tab | Tabs]) -> - case catch ?ets_lookup(Tab, Tid) of + try ?ets_lookup(Tab, Tid) of [D] when is_record(D, decision) -> D; [C] when is_record(C, transient_decision) -> @@ -1034,8 +1023,8 @@ decision(Tid, [Tab | Tabs]) -> ram_nodes = [] }; [] -> - decision(Tid, Tabs); - {'EXIT', _} -> + decision(Tid, Tabs) + catch error:_ -> %% Recently switched transient decision table decision(Tid, Tabs) end; @@ -1046,11 +1035,8 @@ outcome(Tid, Default) -> outcome(Tid, Default, tabs()). outcome(Tid, Default, [Tab | Tabs]) -> - case catch ?ets_lookup_element(Tab, Tid, 3) of - {'EXIT', _} -> - outcome(Tid, Default, Tabs); - Val -> - Val + try ?ets_lookup_element(Tab, Tid, 3) + catch error:_ -> outcome(Tid, Default, Tabs) end; outcome(_Tid, Default, []) -> Default. diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl index 52180cfcf5..4c8234bbc7 100644 --- a/lib/mnesia/src/mnesia_schema.erl +++ b/lib/mnesia/src/mnesia_schema.erl @@ -151,7 +151,7 @@ exit_on_error(GoodRes) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason); + {'EXIT', _} -> mnesia_lib:other_val(Var); Value -> Value end. @@ -2151,14 +2151,14 @@ prepare_op(_Tid, {op, transform, Fun, TabDef}, _WaitFor) -> mnesia_lib:db_fixtable(Storage, Tab, true), Key = mnesia_lib:db_first(Tab), Op = {op, transform, Fun, TabDef}, - case catch transform_objs(Fun, Tab, RecName, - Key, NewArity, Storage, Type, [Op]) of - {'EXIT', Reason} -> - mnesia_lib:db_fixtable(Storage, Tab, false), - exit({"Bad transform function", Tab, Fun, node(), Reason}); + try transform_objs(Fun, Tab, RecName, Key, + NewArity, Storage, Type, [Op]) of Objs -> mnesia_lib:db_fixtable(Storage, Tab, false), {true, Objs, mandatory} + catch _:Reason -> + mnesia_lib:db_fixtable(Storage, Tab, false), + exit({"Bad transform function", Tab, Fun, node(), Reason}) end end; @@ -2342,7 +2342,7 @@ undo_prepare_commit(Tid, Commit) -> ignore; Ops -> %% Catch to allow failure mnesia_controller may not be started - catch mnesia_controller:release_schema_commit_lock(), + ?SAFE(mnesia_controller:release_schema_commit_lock()), undo_prepare_ops(Tid, Ops) end, Commit. @@ -2489,14 +2489,14 @@ ram_delete_table(Tab, Storage) -> %% delete possible index files and data ..... %% Got to catch this since if no info has been set in the %% mnesia_gvar it will crash - catch mnesia_index:del_transient(Tab, Storage), + ?CATCH(mnesia_index:del_transient(Tab, Storage)), case ?catch_val({Tab, {index, snmp}}) of {'EXIT', _} -> ignore; Etab -> - catch mnesia_snmp_hook:delete_table(Tab, Etab) + ?SAFE(mnesia_snmp_hook:delete_table(Tab, Etab)) end, - catch ?ets_delete_table(Tab) + ?SAFE(?ets_delete_table(Tab)) end. purge_dir(Dir, KeepFiles) -> @@ -2584,10 +2584,7 @@ info2(_, []) -> io:format("~n", []). get_table_properties(Tab) -> - case catch mnesia_lib:db_match_object(ram_copies, - mnesia_gvar, {{Tab, '_'}, '_'}) of - {'EXIT', _} -> - mnesia:abort({no_exists, Tab, all}); + try mnesia_lib:db_match_object(ram_copies, mnesia_gvar, {{Tab, '_'}, '_'}) of RawGvar -> case [{Item, Val} || {{_Tab, Item}, Val} <- RawGvar] of [] -> @@ -2598,6 +2595,8 @@ get_table_properties(Tab) -> Master = {master_nodes, mnesia:table_info(Tab, master_nodes)}, lists:sort([Size, Memory, Master | Gvar]) end + catch error:_ -> + mnesia:abort({no_exists, Tab, all}) end. %%%%%%%%%%% RESTORE %%%%%%%%%%% @@ -2620,15 +2619,15 @@ restore(_Opaque, BadArg) -> {aborted, {badarg, BadArg}}. restore(Opaque, Args, Module) when is_list(Args), is_atom(Module) -> InitR = #r{opaque = Opaque, module = Module}, - case catch lists:foldl(fun check_restore_arg/2, InitR, Args) of + try lists:foldl(fun check_restore_arg/2, InitR, Args) of R when is_record(R, r) -> case mnesia_bup:read_schema(R#r.module, Opaque) of {error, Reason} -> {aborted, Reason}; BupSchema -> schema_transaction(fun() -> do_restore(R, BupSchema) end) - end; - {'EXIT', Reason} -> + end + catch exit:Reason -> {aborted, Reason} end; restore(_Opaque, Args, Module) -> @@ -3073,15 +3072,13 @@ do_make_merge_schema(Node, NeedsConv, RemoteCs = #cstruct{}) -> %% Returns a new cstruct or issues a fatal error merge_cstructs(Cs, RemoteCs, Force) -> verify_cstruct(Cs), - case catch do_merge_cstructs(Cs, RemoteCs, Force) of - {'EXIT', {aborted, _Reason}} when Force == true -> - Cs; - {'EXIT', Reason} -> - exit(Reason); + try do_merge_cstructs(Cs, RemoteCs, Force) of MergedCs when is_record(MergedCs, cstruct) -> - MergedCs; - Other -> - throw(Other) + MergedCs + catch exit:{aborted, _Reason} when Force == true -> + Cs; + exit:Reason -> exit(Reason); + error:Reason -> exit(Reason) end. do_merge_cstructs(Cs, RemoteCs, Force) -> diff --git a/lib/mnesia/src/mnesia_snmp_hook.erl b/lib/mnesia/src/mnesia_snmp_hook.erl index 256f83b029..c76cf89ebb 100644 --- a/lib/mnesia/src/mnesia_snmp_hook.erl +++ b/lib/mnesia/src/mnesia_snmp_hook.erl @@ -30,15 +30,6 @@ -include("mnesia.hrl"). -val(Var) -> - case ?catch_val(Var) of - {'EXIT', _ReASoN_} -> - case mnesia_lib:other_val(Var) of - error -> mnesia_lib:pr_other(Var, _ReASoN_); - Val -> Val - end; - _VaLuE_ -> _VaLuE_ - end. check_ustruct([]) -> true; %% default value, not SNMP'ified @@ -85,12 +76,12 @@ delete_table(_MnesiaTab, Tree) -> %%----------------------------------------------------------------- update({clear_table, MnesiaTab}) -> - Tree = val({MnesiaTab, {index, snmp}}), + Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}), b_clear(Tree), ok; update({Op, MnesiaTab, MnesiaKey, SnmpKey}) -> - Tree = val({MnesiaTab, {index, snmp}}), + Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}), update(Op, Tree, MnesiaKey, SnmpKey). update(Op, Tree, MnesiaKey, SnmpKey) -> @@ -120,7 +111,7 @@ update(Op, Tree, MnesiaKey, SnmpKey) -> %%----------------------------------------------------------------- key_to_oid(Tab,Key) -> - Types = val({Tab,snmp}), + Types = mnesia_lib:val({Tab,snmp}), key_to_oid(Tab, Key, Types). key_to_oid(Tab, Key, [{key, Types}]) -> @@ -144,7 +135,7 @@ keys_to_oid(N, Key, Oid, Types) -> %% This can be lookup up in tree but that might be on a remote node. %% It's probably faster to look it up, but use when it migth be remote oid_to_key(Oid, Tab) -> - [{key, Types}] = val({Tab,snmp}), + [{key, Types}] = mnesia_lib:val({Tab,snmp}), oid_to_key_1(Types, Oid). oid_to_key_1(integer, [Key]) -> Key; diff --git a/lib/mnesia/src/mnesia_subscr.erl b/lib/mnesia/src/mnesia_subscr.erl index 866a57e370..c39edea9e3 100644 --- a/lib/mnesia/src/mnesia_subscr.erl +++ b/lib/mnesia/src/mnesia_subscr.erl @@ -186,11 +186,11 @@ patch_record(Tab, Obj) -> end. what(Tab, Tid, {RecName, Key}, delete, undefined) -> - case catch mnesia_lib:db_get(Tab, Key) of - Old when is_list(Old) -> %% Op only allowed for set table. - {mnesia_table_event, {delete, Tab, {RecName, Key}, Old, Tid}}; - _ -> - %% Record just deleted by a dirty_op or + try mnesia_lib:db_get(Tab, Key) of + Old -> %% Op only allowed for set table. + {mnesia_table_event, {delete, Tab, {RecName, Key}, Old, Tid}} + catch error:_ -> + %% Record just deleted by a dirty_op or %% the whole table has been deleted ignore end; @@ -199,10 +199,10 @@ what(Tab, Tid, Obj, delete, Old) -> what(Tab, Tid, Obj, delete_object, _Old) -> {mnesia_table_event, {delete, Tab, Obj, [Obj], Tid}}; what(Tab, Tid, Obj, write, undefined) -> - case catch mnesia_lib:db_get(Tab, element(2, Obj)) of - Old when is_list(Old) -> - {mnesia_table_event, {write, Tab, Obj, Old, Tid}}; - {'EXIT', _} -> + try mnesia_lib:db_get(Tab, element(2, Obj)) of + Old -> + {mnesia_table_event, {write, Tab, Obj, Old, Tid}} + catch error:_ -> ignore end; what(Tab, Tid, Obj, write, Old) -> @@ -386,12 +386,12 @@ activate(ClientPid, What, Var, OldSubscribers, SubscrTab) -> case lists:member(ClientPid, Old) of false -> %% Don't care about checking old links - case catch link(ClientPid) of + try link(ClientPid) of true -> ?ets_insert(SubscrTab, {ClientPid, What}), add_subscr(Var, What, ClientPid), - {ok, node()}; - {'EXIT', _Reason} -> + {ok, node()} + catch error:_ -> {error, {no_exists, ClientPid}} end; true -> @@ -443,11 +443,10 @@ add_subscr({Tab, commit_work}, What, Pid) -> deactivate(ClientPid, What, Var, SubscrTab) -> ?ets_match_delete(SubscrTab, {ClientPid, What}), - case catch ?ets_lookup_element(SubscrTab, ClientPid, 1) of - List when is_list(List) -> - ignore; - {'EXIT', _} -> - unlink(ClientPid) + try + ?ets_lookup_element(SubscrTab, ClientPid, 1), + ignore + catch error:_ -> unlink(ClientPid) end, try del_subscr(Var, What, ClientPid), diff --git a/lib/mnesia/src/mnesia_text.erl b/lib/mnesia/src/mnesia_text.erl index 0906d18da9..794e633238 100644 --- a/lib/mnesia/src/mnesia_text.erl +++ b/lib/mnesia/src/mnesia_text.erl @@ -84,8 +84,12 @@ validate_tab({Tabname, RecName, List}) -> validate_tab(_) -> error(badtab). make_tabs([{Tab, Def} | Tail]) -> - case catch mnesia:table_info(Tab, where_to_read) of - {'EXIT', _} -> %% non-existing table + try mnesia:table_info(Tab, where_to_read) of + Node -> + io:format("** Table ~w already exists on ~p, just entering data~n", + [Tab, Node]), + make_tabs(Tail) + catch exit:_ -> %% non-existing table case mnesia:create_table(Tab, Def) of {aborted, Reason} -> io:format("** Failed to create table ~w ~n" @@ -95,11 +99,7 @@ make_tabs([{Tab, Def} | Tail]) -> _ -> io:format("New table ~w~n", [Tab]), make_tabs(Tail) - end; - Node -> - io:format("** Table ~w already exists on ~p, just entering data~n", - [Tab, Node]), - make_tabs(Tail) + end end; make_tabs([]) -> @@ -118,11 +118,9 @@ load_data(L) -> parse(File) -> case file(File) of {ok, Terms} -> - case catch collect(Terms) of - {error, X} -> - {error, X}; - Other -> - {ok, Other} + try collect(Terms) of + Other -> {ok, Other} + catch throw:Error -> Error end; Other -> Other diff --git a/lib/mnesia/src/mnesia_tm.erl b/lib/mnesia/src/mnesia_tm.erl index af658150da..b4b46228e9 100644 --- a/lib/mnesia/src/mnesia_tm.erl +++ b/lib/mnesia/src/mnesia_tm.erl @@ -51,6 +51,7 @@ ]). -include("mnesia.hrl"). + -import(mnesia_lib, [set/2]). -import(mnesia_lib, [fatal/2, verbose/2, dbg_out/2]). @@ -119,7 +120,7 @@ init(Parent) -> val(Var) -> case ?catch_val(Var) of - {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_); + {'EXIT', _} -> mnesia_lib:other_val(Var); _VaLuE_ -> _VaLuE_ end. @@ -224,11 +225,7 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor= end; {From, start_outer} -> %% Create and associate ets_tab with Tid - case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of - {'EXIT', Reason} -> %% system limit - Msg = "Cannot create an ets table for the " - "local transaction store", - reply(From, {error, {system_limit, Msg, Reason}}, State); + try ?ets_new_table(mnesia_trans_store, [bag, public]) of Etab -> tmlink(From), C = mnesia_recover:incr_trans_tid_serial(), @@ -237,6 +234,10 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor= A2 = gb_trees:insert(Tid,[Etab],Coordinators), S2 = State#state{coordinators = A2}, reply(From, {new_tid, Tid, Etab}, S2) + catch error:Reason -> %% system limit + Msg = "Cannot create an ets table for the " + "local transaction store", + reply(From, {error, {system_limit, Msg, Reason}}, State) end; {From, {ask_commit, Protocol, Tid, Commit, DiscNs, RamNs}} -> @@ -339,15 +340,15 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor= end; {From, {add_store, Tid}} -> %% new store for nested transaction - case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of - {'EXIT', Reason} -> %% system limit - Msg = "Cannot create an ets table for a nested " - "local transaction store", - reply(From, {error, {system_limit, Msg, Reason}}, State); + try ?ets_new_table(mnesia_trans_store, [bag, public]) of Etab -> A2 = add_coord_store(Coordinators, Tid, Etab), reply(From, {new_store, Etab}, State#state{coordinators = A2}) + catch error:Reason -> %% system limit + Msg = "Cannot create an ets table for a nested " + "local transaction store", + reply(From, {error, {system_limit, Msg, Reason}}, State) end; {From, {del_store, Tid, Current, Obsolete, PropagateStore}} -> @@ -471,13 +472,13 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor= do_sync_dirty(From, Tid, Commit, _Tab) -> ?eval_debug_fun({?MODULE, sync_dirty, pre}, [{tid, Tid}]), - Res = (catch do_dirty(Tid, Commit)), + Res = do_dirty(Tid, Commit), ?eval_debug_fun({?MODULE, sync_dirty, post}, [{tid, Tid}]), From ! {?MODULE, node(), {dirty_res, Res}}. do_async_dirty(Tid, Commit, _Tab) -> ?eval_debug_fun({?MODULE, async_dirty, pre}, [{tid, Tid}]), - catch do_dirty(Tid, Commit), + do_dirty(Tid, Commit), ?eval_debug_fun({?MODULE, async_dirty, post}, [{tid, Tid}]). @@ -501,7 +502,7 @@ process_dirty_queue(_Tab, []) -> []. prepare_pending_coordinators([{Tid, [Store | _Etabs]} | Coords], IgnoreNew) -> - case catch ?ets_lookup(Store, pending) of + try ?ets_lookup(Store, pending) of [] -> prepare_pending_coordinators(Coords, IgnoreNew); [Pending] -> @@ -511,8 +512,8 @@ prepare_pending_coordinators([{Tid, [Store | _Etabs]} | Coords], IgnoreNew) -> true -> ignore end, - prepare_pending_coordinators(Coords, IgnoreNew); - {'EXIT', _} -> + prepare_pending_coordinators(Coords, IgnoreNew) + catch error:_ -> prepare_pending_coordinators(Coords, IgnoreNew) end; prepare_pending_coordinators([], _IgnoreNew) -> @@ -573,11 +574,7 @@ recover_coordinator(Tid, Etabs) -> Store = hd(Etabs), CheckNodes = get_elements(nodes,Store), TellNodes = CheckNodes -- [node()], - case catch arrange(Tid, Store, async) of - {'EXIT', Reason} -> - dbg_out("Recovery of coordinator ~p failed:~n", [Tid, Reason]), - Protocol = asym_trans, - tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes); + try arrange(Tid, Store, async) of {_N, Prep} -> %% Tell the participants about the outcome Protocol = Prep#prep.protocol, @@ -596,6 +593,11 @@ recover_coordinator(Tid, Etabs) -> false -> %% When killed before store havn't been copied to ok %% to the new nested trans store. end + catch _:Reason -> + dbg_out("Recovery of coordinator ~p failed:~n", + [Tid, {Reason, erlang:get_stacktrace()}]), + Protocol = asym_trans, + tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes) end, erase_ets_tabs(Etabs), transaction_terminated(Tid), @@ -724,33 +726,25 @@ non_transaction(OldState={_,_,Trans}, Fun, Args, ActivityKind, Mod) _ -> async end, case transaction(OldState, Fun, Args, infinity, Mod, Kind) of - {atomic, Res} -> - Res; - {aborted,Res} -> - exit(Res) + {atomic, Res} -> Res; + {aborted,Res} -> exit(Res) end; non_transaction(OldState, Fun, Args, ActivityKind, Mod) -> Id = {ActivityKind, self()}, NewState = {Mod, Id, non_transaction}, put(mnesia_activity_state, NewState), - %% I Want something uniqe here, references are expensive - Ref = mNeSia_nOn_TrAnSacTioN, - RefRes = (catch {Ref, apply(Fun, Args)}), - case OldState of - undefined -> erase(mnesia_activity_state); - _ -> put(mnesia_activity_state, OldState) - end, - case RefRes of - {Ref, Res} -> - case Res of - {'EXIT', Reason} -> exit(Reason); - {aborted, Reason} -> mnesia:abort(Reason); - _ -> Res - end; - {'EXIT', Reason} -> - exit(Reason); - Throw -> - throw(Throw) + try apply(Fun, Args) of + {'EXIT', Reason} -> exit(Reason); + {aborted, Reason} -> mnesia:abort(Reason); + Res -> Res + catch + throw:Throw -> throw(Throw); + _:Reason -> exit(Reason) + after + case OldState of + undefined -> erase(mnesia_activity_state); + _ -> put(mnesia_activity_state, OldState) + end end. transaction(OldTidTs, Fun, Args, Retries, Mod, Type) -> @@ -810,23 +804,28 @@ insert_objs([], _Tab) -> ok. execute_transaction(Fun, Args, Factor, Retries, Type) -> - case catch apply_fun(Fun, Args, Type) of - {'EXIT', Reason} -> - check_exit(Fun, Args, Factor, Retries, Reason, Type); + try apply_fun(Fun, Args, Type) of {atomic, Value} -> mnesia_lib:incr_counter(trans_commits), erase(mnesia_activity_state), %% no need to clear locks, already done by commit ... %% Flush any un processed mnesia_down messages we might have flush_downs(), - catch unlink(whereis(?MODULE)), + ?SAFE(unlink(whereis(?MODULE))), {atomic, Value}; + {do_abort, Reason} -> + check_exit(Fun, Args, Factor, Retries, {aborted, Reason}, Type); {nested_atomic, Value} -> mnesia_lib:incr_counter(trans_commits), - {atomic, Value}; - Value -> %% User called throw + {atomic, Value} + catch throw:Value -> %% User called throw Reason = {aborted, {throw, Value}}, - return_abort(Fun, Args, Reason) + return_abort(Fun, Args, Reason); + error:Reason -> + ST = erlang:get_stacktrace(), + check_exit(Fun, Args, Factor, Retries, {Reason,ST}, Type); + _:Reason -> + check_exit(Fun, Args, Factor, Retries, Reason, Type) end. apply_fun(Fun, Args, Type) -> @@ -836,10 +835,10 @@ apply_fun(Fun, Args, Type) -> {atomic, Result}; do_commit_nested -> {nested_atomic, Result}; - {do_abort, {aborted, Reason}} -> - {'EXIT', {aborted, Reason}}; - {do_abort, Reason} -> - {'EXIT', {aborted, Reason}} + {do_abort, {aborted, Reason}} -> + {do_abort, Reason}; + {do_abort, _} = Abort -> + Abort end. check_exit(Fun, Args, Factor, Retries, Reason, Type) -> @@ -943,7 +942,7 @@ return_abort(Fun, Args, Reason) -> OldStore = Ts#tidstore.store, Nodes = get_elements(nodes, OldStore), intercept_friends(Tid, Ts), - catch mnesia_lib:incr_counter(trans_failures), + ?SAFE(mnesia_lib:incr_counter(trans_failures)), Level = Ts#tidstore.level, if Level == 1 -> @@ -951,7 +950,7 @@ return_abort(Fun, Args, Reason) -> ?MODULE ! {delete_transaction, Tid}, erase(mnesia_activity_state), flush_downs(), - catch unlink(whereis(?MODULE)), + ?SAFE(unlink(whereis(?MODULE))), {aborted, mnesia_lib:fix_error(Reason)}; true -> %% Nested transaction @@ -1005,11 +1004,11 @@ erase_activity_id() -> erase(mnesia_activity_state). get_elements(Type,Store) -> - case catch ?ets_lookup(Store, Type) of + try ?ets_lookup(Store, Type) of [] -> []; [{_,Val}] -> [Val]; - {'EXIT', _} -> []; Vals -> [Val|| {_,Val} <- Vals] + catch error:_ -> [] end. opt_propagate_store(_Current, _Obsolete, false) -> @@ -1032,7 +1031,7 @@ intercept_friends(_Tid, Ts) -> intercept_best_friend([],_) -> ok; intercept_best_friend([{stop,Fun} | R],Ignore) -> - catch Fun(), + ?CATCH(Fun()), intercept_best_friend(R,Ignore); intercept_best_friend([Pid | R],false) -> Pid ! {activity_ended, undefined, self()}, @@ -1046,25 +1045,12 @@ wait_for_best_friend(Pid, Timeout) -> {'EXIT', Pid, _} -> ok; {activity_ended, _, Pid} -> ok after Timeout -> - case my_process_is_alive(Pid) of + case erlang:is_process_alive(Pid) of true -> wait_for_best_friend(Pid, 1000); false -> ok end end. -my_process_is_alive(Pid) -> - case catch erlang:is_process_alive(Pid) of % New BIF in R5 - true -> - true; - false -> - false; - {'EXIT', _} -> % Pre R5 backward compatibility - case process_info(Pid, message_queue_len) of - undefined -> false; - _ -> true - end - end. - dirty(Protocol, Item) -> {{Tab, Key}, _Val, _Op} = Item, Tid = {dirty, self()}, @@ -1144,18 +1130,8 @@ arrange(Tid, Store, Type) -> async -> #prep{protocol = sym_trans, records = Recs}; sync -> #prep{protocol = sync_sym_trans, records = Recs} end, - case catch do_arrange(Tid, Store, Key, Prep, N) of - {'EXIT', Reason} -> - dbg_out("do_arrange failed ~p ~p~n", [Reason, Tid]), - case Reason of - {aborted, R} -> - mnesia:abort(R); - _ -> - mnesia:abort(Reason) - end; - {New, Prepared} -> - {New, Prepared#prep{records = reverse(Prepared#prep.records)}} - end. + {New, Prepared} = do_arrange(Tid, Store, Key, Prep, N), + {New, Prepared#prep{records = reverse(Prepared#prep.records)}}. reverse([]) -> []; @@ -1522,7 +1498,7 @@ multi_commit(asym_trans, Majority, Tid, CR, Store) -> Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs), ?ets_insert(Store, Pending), {WaitFor, Local} = ask_commit(asym_trans, Tid, CR2, DiscNs, RamNs), - SchemaPrep = (catch mnesia_schema:prepare_commit(Tid, Local, {coord, WaitFor})), + SchemaPrep = ?CATCH(mnesia_schema:prepare_commit(Tid, Local, {coord, WaitFor})), {Votes, Pids} = rec_all(WaitFor, Tid, do_commit, []), ?eval_debug_fun({?MODULE, multi_commit_asym_got_votes}, @@ -1589,7 +1565,7 @@ rec_acc_pre_commit([Pid | Tail], Tid, Store, Commit, Res, DumperMode, GoodPids, SchemaAckPids); {mnesia_down, Node} when Node == node(Pid) -> AbortRes = {do_abort, {bad_commit, Node}}, - catch Pid ! {Tid, AbortRes}, %% Tell him that he has died + ?SAFE(Pid ! {Tid, AbortRes}), %% Tell him that he has died rec_acc_pre_commit(Tail, Tid, Store, Commit, AbortRes, DumperMode, GoodPids, SchemaAckPids) end; @@ -1666,7 +1642,7 @@ commit_participant(Coord, Tid, C = #commit{}, DiscNs, RamNs) -> commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) -> ?eval_debug_fun({?MODULE, commit_participant, pre}, [{tid, Tid}]), - case catch mnesia_schema:prepare_commit(Tid, C0, {part, Coord}) of + try mnesia_schema:prepare_commit(Tid, C0, {part, Coord}) of {Modified, C = #commit{}, DumperMode} -> %% If we can not find any local unclear decision %% we should presume abort at startup recovery @@ -1742,9 +1718,8 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) -> reply(Coord, {do_abort, Tid, self(), {bad_commit,internal}}), verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n", [Tid, Msg]) - end; - - {'EXIT', Reason} -> + end + catch _:Reason -> ?eval_debug_fun({?MODULE, commit_participant, vote_no}, [{tid, Tid}]), reply(Coord, {vote_no, Tid, Reason}), @@ -1790,22 +1765,20 @@ do_commit(Tid, C, DumperMode) -> %% Update the items do_update(Tid, Storage, [Op | Ops], OldRes) -> - case catch do_update_op(Tid, Storage, Op) of - ok -> - do_update(Tid, Storage, Ops, OldRes); - {'EXIT', Reason} -> + try do_update_op(Tid, Storage, Op) of + ok -> do_update(Tid, Storage, Ops, OldRes); + NewRes -> do_update(Tid, Storage, Ops, NewRes) + catch _:Reason -> %% This may only happen when we recently have %% deleted our local replica, changed storage_type %% or transformed table %% BUGBUG: Updates may be lost if storage_type is changed. %% Determine actual storage type and try again. %% BUGBUG: Updates may be lost if table is transformed. - + ST = erlang:get_stacktrace(), verbose("do_update in ~w failed: ~p -> {'EXIT', ~p}~n", - [Tid, Op, Reason]), - do_update(Tid, Storage, Ops, OldRes); - NewRes -> - do_update(Tid, Storage, Ops, NewRes) + [Tid, Op, {Reason, ST}]), + do_update(Tid, Storage, Ops, OldRes) end; do_update(_Tid, _Storage, [], Res) -> Res. @@ -1821,14 +1794,15 @@ do_update_op(Tid, Storage, {{Tab, K}, Val, delete}) -> do_update_op(Tid, Storage, {{Tab, K}, {RecName, Incr}, update_counter}) -> {NewObj, OldObjs} = - case catch mnesia_lib:db_update_counter(Storage, Tab, K, Incr) of - NewVal when is_integer(NewVal), NewVal >= 0 -> - {{RecName, K, NewVal}, [{RecName, K, NewVal - Incr}]}; - _ when Incr > 0 -> + try + NewVal = mnesia_lib:db_update_counter(Storage, Tab, K, Incr), + true = is_integer(NewVal) andalso (NewVal >= 0), + {{RecName, K, NewVal}, [{RecName, K, NewVal - Incr}]} + catch error:_ when Incr > 0 -> New = {RecName, K, Incr}, mnesia_lib:db_put(Storage, Tab, New), {New, []}; - _ -> + error:_ -> Zero = {RecName, K, 0}, mnesia_lib:db_put(Storage, Tab, Zero), {Zero, []} @@ -1913,16 +1887,14 @@ commit_clear([H|R], Tid, Tab, K, Obj) do_snmp(_, []) -> ok; do_snmp(Tid, [Head | Tail]) -> - case catch mnesia_snmp_hook:update(Head) of - {'EXIT', Reason} -> + try mnesia_snmp_hook:update(Head) + catch _:Reason -> %% This should only happen when we recently have %% deleted our local replica or recently deattached %% the snmp table - + ST = erlang:get_stacktrace(), verbose("do_snmp in ~w failed: ~p -> {'EXIT', ~p}~n", - [Tid, Head, Reason]); - ok -> - ignore + [Tid, Head, {Reason, ST}]) end, do_snmp(Tid, Tail). @@ -2093,7 +2065,7 @@ rec_all([Node | Tail], Tid, Res, Pids) -> %% Make sure that mnesia_tm knows it has died %% it may have been restarted Abort = {do_abort, {bad_commit, Node}}, - catch {?MODULE, Node} ! {Tid, Abort}, + ?SAFE({?MODULE, Node} ! {Tid, Abort}), rec_all(Tail, Tid, Abort, Pids) end; rec_all([], _Tid, Res, Pids) -> diff --git a/lib/mnesia/test/mnesia_config_backup.erl b/lib/mnesia/test/mnesia_config_backup.erl index 0916e255e2..a7d8c04a45 100644 --- a/lib/mnesia/test/mnesia_config_backup.erl +++ b/lib/mnesia/test/mnesia_config_backup.erl @@ -90,7 +90,8 @@ open_read(Name) -> List = lists:reverse(ReverseList), {ok, #backup{name = Name, mode = read, items = List}}; {error, Reason} -> - {error, {open_read, Reason}} + %% {error, {open_read, Reason}} + {Reason, error} %% Testing error handling in mnesia end. read(Opaque) when Opaque#backup.mode == read -> diff --git a/lib/mnesia/test/mnesia_config_test.erl b/lib/mnesia/test/mnesia_config_test.erl index c495bce63f..a8fb93b28e 100644 --- a/lib/mnesia/test/mnesia_config_test.erl +++ b/lib/mnesia/test/mnesia_config_test.erl @@ -37,7 +37,6 @@ dump_log_update_in_place/1, event_module/1, - ignore_fallback_at_startup/1, inconsistent_database/1, max_wait_for_decision/1, send_compressed/1, @@ -104,7 +103,7 @@ all() -> [access_module, auto_repair, backup_module, debug, dir, dump_log_load_regulation, {group, dump_log_thresholds}, dump_log_update_in_place, - event_module, ignore_fallback_at_startup, + event_module, inconsistent_database, max_wait_for_decision, send_compressed, app_test, {group, schema_config}, unknown_config]. @@ -317,11 +316,17 @@ backup_module(Config) when is_list(Config) -> ?match([], mnesia_test_lib:start_mnesia(Nodes, [test_table, test_table2])), %% Now check newly started tables - ?match({atomic, [1,2]}, + ?match({atomic, [1,2]}, mnesia:transaction(fun() -> lists:sort(mnesia:all_keys(test_table)) end)), - ?match({atomic, [3,4]}, + ?match({atomic, [3,4]}, mnesia:transaction(fun() -> lists:sort(mnesia:all_keys(test_table2)) end)), - + + %% Test some error cases + mnesia:set_debug_level(debug), + ?match({error, _}, mnesia:install_fallback("NonExisting.FILE")), + ?match({error, _}, mnesia:install_fallback(filename:join(mnesia_lib:dir(), "LATEST.LOG"))), + + %% Cleanup file:delete(File), ?verify_mnesia(Nodes, []), ?cleanup(1, Config), @@ -609,13 +614,6 @@ dump_log_load_regulation(Config) when is_list(Config) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -ignore_fallback_at_startup(doc) -> - ["Start Mnesia without rollback of the database to the fallback. ", - "Once Mnesia has been (re)started the installed fallback should", - "be handled as a normal active fallback.", - "Install a customized event module which disables the termination", - "of Mnesia when mnesia_down occurrs with an active fallback."]. - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% max_wait_for_decision(doc) -> diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl index 9e0a8db1ae..68efa3f6ea 100644 --- a/lib/mnesia/test/mnesia_evil_backup.erl +++ b/lib/mnesia/test/mnesia_evil_backup.erl @@ -142,6 +142,9 @@ restore_errors(Config) when is_list(Config) -> ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{skip_tables, xxx}])), ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{recreate_tables, [schema]}])), ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{default_op, asdklasd}])), + MnesiaDir = mnesia_lib:dir(), + ?match({aborted, {not_a_log_file, _}}, mnesia:restore(filename:join(MnesiaDir, "schema.DAT"), [])), + ?match({aborted, _}, mnesia:restore(filename:join(MnesiaDir, "LATEST.LOG"), [])), ok. restore_clear(suite) -> []; @@ -488,6 +491,14 @@ install_fallback(Config) when is_list(Config) -> mnesia_test_lib:kill_mnesia([Node1, Node2]), timer:sleep(timer:seconds(1)), % Let it die! + ok = mnesia:start([{ignore_fallback_at_startup, true}]), + ok = mnesia:wait_for_tables([Tab, Tab2, Tab3], 10000), + ?match([{Tab, 6, test_nok}], mnesia:dirty_read({Tab, 6})), + mnesia_test_lib:kill_mnesia([Node1]), + application:set_env(mnesia, ignore_fallback_at_startup, false), + + timer:sleep(timer:seconds(1)), % Let it die! + ?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [Tab, Tab2, Tab3])), % Verify @@ -510,6 +521,13 @@ install_fallback(Config) when is_list(Config) -> file:delete(File3), ?match({error, _}, mnesia:install_fallback(File3)), ?match({error, _}, mnesia:install_fallback(File2, mnesia_badmod)), + ?match({error, _}, mnesia:install_fallback(File2, {foo, foo})), + ?match({error, _}, mnesia:install_fallback(File2, [{foo, foo}])), + ?match({error, {badarg, {skip_tables, _}}}, + mnesia:install_fallback(File2, [{default_op, skip_tables}, + {default_op, keep_tables}, + {keep_tables, [Tab, Tab2, Tab3]}, + {skip_tables, [foo,{asd}]}])), ?match(ok, mnesia:install_fallback(File2, mnesia_backup)), ?match(ok, file:delete(File)), ?match(ok, file:delete(File2)), @@ -535,6 +553,7 @@ uninstall_fallback(Config) when is_list(Config) -> ?match(ok, mnesia:install_fallback(File2)), ?match(ok, file:delete(File)), ?match(ok, file:delete(File2)), + ?match({error, _}, mnesia:uninstall_fallback([foobar])), ?match(ok, mnesia:uninstall_fallback()), mnesia_test_lib:kill_mnesia([Node1, Node2]), diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk index 94eb360591..b23339e408 100644 --- a/lib/mnesia/vsn.mk +++ b/lib/mnesia/vsn.mk @@ -1 +1 @@ -MNESIA_VSN = 4.12.4 +MNESIA_VSN = 4.12.5 diff --git a/lib/observer/doc/src/notes.xml b/lib/observer/doc/src/notes.xml index 11729078c2..a9ec68fc9e 100644 --- a/lib/observer/doc/src/notes.xml +++ b/lib/observer/doc/src/notes.xml @@ -31,6 +31,21 @@ <p>This document describes the changes made to the Observer application.</p> +<section><title>Observer 2.0.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix crash when opening a process information window.</p> + <p> + Own Id: OTP-12634</p> + </item> + </list> + </section> + +</section> + <section><title>Observer 2.0.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/observer/src/Makefile b/lib/observer/src/Makefile index c120865213..a42967644a 100644 --- a/lib/observer/src/Makefile +++ b/lib/observer/src/Makefile @@ -61,6 +61,7 @@ MODULES= \ etop_txt \ observer \ observer_app_wx \ + observer_alloc_wx \ observer_html_lib \ observer_lib \ observer_perf_wx \ diff --git a/lib/observer/src/observer.app.src b/lib/observer/src/observer.app.src index 197d0a2a95..e293990d64 100644 --- a/lib/observer/src/observer.app.src +++ b/lib/observer/src/observer.app.src @@ -44,6 +44,7 @@ etop_tr, etop_txt, observer, + observer_alloc_wx, observer_app_wx, observer_html_lib, observer_lib, diff --git a/lib/observer/src/observer_alloc_wx.erl b/lib/observer/src/observer_alloc_wx.erl new file mode 100644 index 0000000000..0c4bc9ee4b --- /dev/null +++ b/lib/observer/src/observer_alloc_wx.erl @@ -0,0 +1,256 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +-module(observer_alloc_wx). + +-export([start_link/2]). + +%% wx_object callbacks +-export([init/1, handle_info/2, terminate/2, code_change/3, handle_call/3, + handle_event/2, handle_sync_event/3, handle_cast/2]). + +-behaviour(wx_object). +-include_lib("wx/include/wx.hrl"). +-include("observer_defs.hrl"). + +-record(state, + { + offset = 0.0, + active = false, + parent, + windows, + data = {0, queue:new()}, + panel, + paint, + appmon, + async + }). + +-define(ALLOC_W, 1). +-define(UTIL_W, 2). + +start_link(Notebook, Parent) -> + wx_object:start_link(?MODULE, [Notebook, Parent], []). + +init([Notebook, Parent]) -> + try + Panel = wxPanel:new(Notebook), + Main = wxBoxSizer:new(?wxVERTICAL), + Style = ?wxFULL_REPAINT_ON_RESIZE bor ?wxCLIP_CHILDREN, + Carrier = wxPanel:new(Panel, [{winid, ?ALLOC_W}, {style,Style}]), + Utilz = wxPanel:new(Panel, [{winid, ?UTIL_W}, {style,Style}]), + BorderFlags = ?wxLEFT bor ?wxRIGHT, + wxSizer:add(Main, Carrier, [{flag, ?wxEXPAND bor BorderFlags bor ?wxTOP}, + {proportion, 1}, {border, 5}]), + + wxSizer:add(Main, Utilz, [{flag, ?wxEXPAND bor BorderFlags}, + {proportion, 1}, {border, 5}]), + + MemWin = {MemPanel,_} = create_mem_info(Panel), + wxSizer:add(Main, MemPanel, [{flag, ?wxEXPAND bor BorderFlags bor ?wxBOTTOM}, + {proportion, 1}, {border, 5}]), + wxWindow:setSizer(Panel, Main), + + PaintInfo = observer_perf_wx:setup_graph_drawing([Carrier, Utilz]), + {Panel, #state{parent=Parent, + panel =Panel, + windows = {Carrier, Utilz, MemWin}, + paint=PaintInfo} + } + catch _:Err -> + io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]), + {stop, Err} + end. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +handle_event(#wx{event=#wxCommand{type=command_menu_selected}}, + State = #state{}) -> + {noreply, State}; + +handle_event(Event, _State) -> + error({unhandled_event, Event}). + +%%%%%%%%%% +handle_sync_event(#wx{obj=Panel, event = #wxPaint{}},_, + #state{active=Active, offset=Offset, paint=Paint, + windows=Windows, data=Data}) -> + %% Sigh workaround bug on MacOSX (Id in paint event is always 0) + Id = if Panel =:= element(?ALLOC_W, Windows) -> alloc; + Panel =:= element(?UTIL_W, Windows) -> utilz + end, + observer_perf_wx:refresh_panel(Panel, Id, Offset, Data, Active, Paint), + ok. +%%%%%%%%%% +handle_call(Event, From, _State) -> + error({unhandled_call, Event, From}). + +handle_cast(Event, _State) -> + error({unhandled_cast, Event}). +%%%%%%%%%% + +handle_info({Key, {promise_reply, {badrpc, _}}}, #state{async=Key} = State) -> + {noreply, State#state{active=false, appmon=undefined}}; + +handle_info({Key, {promise_reply, SysInfo}}, #state{async=Key, data=Data} = State) -> + Info = alloc_info(SysInfo), + update_alloc(State, Info), + {noreply, State#state{offset=0.0, data = add_data(Info, Data), async=undefined}}; + +handle_info({refresh, Seq, Freq, Node}, #state{panel=Panel, appmon=Node, async=Key} = State) -> + wxWindow:refresh(Panel), + Next = Seq+1, + if + Next > Freq, Key =:= undefined -> + erlang:send_after(trunc(1000 / Freq), self(), {refresh, 1, Freq, Node}), + Req = rpc:async_call(Node, observer_backend, sys_info, []), + {noreply, State#state{offset=Seq/Freq, async=Req}}; + true -> + erlang:send_after(trunc(1000 / Freq), self(), {refresh, Next, Freq, Node}), + {noreply, State#state{offset=Seq/Freq}} + end; +handle_info({refresh, _Seq, _Freq, _Node}, State) -> + {noreply, State}; + +handle_info({active, Node}, State = #state{parent=Parent, panel=Panel, appmon=Old}) -> + create_menus(Parent, []), + try + Node = Old, + wxWindow:refresh(Panel), + {noreply, State#state{active=true}} + catch _:_ -> + SysInfo = observer_wx:try_rpc(Node, observer_backend, sys_info, []), + Info = alloc_info(SysInfo), + Freq = 6, + erlang:send_after(trunc(1000 / Freq), self(), {refresh, 1, Freq, Node}), + wxWindow:refresh(Panel), + {noreply, State#state{active=true, appmon=Node, offset=0.0, + data = add_data(Info, {0, queue:new()})}} + end; + +handle_info(not_active, State = #state{appmon=_Pid}) -> + {noreply, State#state{active=false}}; + +handle_info({'EXIT', Old, _}, State = #state{appmon=Old}) -> + {noreply, State#state{active=false, appmon=undefined}}; + +handle_info(_Event, State) -> + %% io:format("~p:~p: ~p~n",[?MODULE,?LINE,_Event]), + {noreply, State}. + +terminate(_Event, #state{}) -> + ok. +code_change(_, _, State) -> + State. + +%%%%%%%%%% + +add_data(Stats, {N, Q}) when N > 60 -> + {N, queue:drop(queue:in(Stats, Q))}; +add_data(Stats, {N, Q}) -> + {N+1, queue:in(Stats, Q)}. + +update_alloc(#state{windows={_, _, {_, Grid}}}, Fields) -> + Max = wxListCtrl:getItemCount(Grid), + Update = fun({Name, BS, CS}, Row) -> + (Row >= Max) andalso wxListCtrl:insertItem(Grid, Row, ""), + wxListCtrl:setItem(Grid, Row, 0, observer_lib:to_str(Name)), + wxListCtrl:setItem(Grid, Row, 1, observer_lib:to_str(BS div 1024)), + wxListCtrl:setItem(Grid, Row, 2, observer_lib:to_str(CS div 1024)), + Row + 1 + end, + lists:foldl(Update, 0, Fields), + Fields. + +alloc_info(SysInfo) -> + AllocInfo = proplists:get_value(alloc_info, SysInfo, []), + alloc_info(AllocInfo, [], 0, 0, true). + +alloc_info([{Type,Instances}|Allocators],TypeAcc,TotalBS,TotalCS,IncludeTotal) -> + {BS,CS,NewTotalBS,NewTotalCS,NewIncludeTotal} = + sum_alloc_instances(Instances,0,0,TotalBS,TotalCS), + alloc_info(Allocators,[{Type,BS,CS}|TypeAcc],NewTotalBS,NewTotalCS, + IncludeTotal andalso NewIncludeTotal); +alloc_info([],TypeAcc,TotalBS,TotalCS,IncludeTotal) -> + Types = [X || X={_,BS,CS} <- TypeAcc, (BS>0 orelse CS>0)], + case IncludeTotal of + true -> + [{total,TotalBS,TotalCS} | lists:reverse(Types)]; + false -> + lists:reverse(Types) + end. + +sum_alloc_instances(false,BS,CS,TotalBS,TotalCS) -> + {BS,CS,TotalBS,TotalCS,false}; +sum_alloc_instances([{_,_,Data}|Instances],BS,CS,TotalBS,TotalCS) -> + {NewBS,NewCS,NewTotalBS,NewTotalCS} = + sum_alloc_one_instance(Data,BS,CS,TotalBS,TotalCS), + sum_alloc_instances(Instances,NewBS,NewCS,NewTotalBS,NewTotalCS); +sum_alloc_instances([],BS,CS,TotalBS,TotalCS) -> + {BS,CS,TotalBS,TotalCS,true}. + +sum_alloc_one_instance([{sbmbcs,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}| + Rest],OldBS,OldCS,TotalBS,TotalCS) -> + sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS,TotalCS); +sum_alloc_one_instance([{_,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}| + Rest],OldBS,OldCS,TotalBS,TotalCS) -> + sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS); +sum_alloc_one_instance([{_,[{blocks_size,BS},{carriers_size,CS}]}| + Rest],OldBS,OldCS,TotalBS,TotalCS) -> + sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS); +sum_alloc_one_instance([_|Rest],BS,CS,TotalBS,TotalCS) -> + sum_alloc_one_instance(Rest,BS,CS,TotalBS,TotalCS); +sum_alloc_one_instance([],BS,CS,TotalBS,TotalCS) -> + {BS,CS,TotalBS,TotalCS}. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +create_mem_info(Parent) -> + Panel = wxPanel:new(Parent), + wxWindow:setBackgroundColour(Panel, {255,255,255}), + Style = ?wxLC_REPORT bor ?wxLC_SINGLE_SEL bor ?wxLC_HRULES bor ?wxLC_VRULES, + Grid = wxListCtrl:new(Panel, [{style, Style}]), + Li = wxListItem:new(), + AddListEntry = fun({Name, Align, DefSize}, Col) -> + wxListItem:setText(Li, Name), + wxListItem:setAlign(Li, Align), + wxListCtrl:insertColumn(Grid, Col, Li), + wxListCtrl:setColumnWidth(Grid, Col, DefSize), + Col + 1 + end, + ListItems = [{"Allocator Type", ?wxLIST_FORMAT_LEFT, 200}, + {"Block size (kB)", ?wxLIST_FORMAT_RIGHT, 150}, + {"Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150}], + lists:foldl(AddListEntry, 0, ListItems), + wxListItem:destroy(Li), + + Sizer = wxBoxSizer:new(?wxVERTICAL), + wxSizer:add(Sizer, Grid, [{flag, ?wxEXPAND bor ?wxLEFT bor ?wxRIGHT}, + {border, 5}, {proportion, 1}]), + wxWindow:setSizerAndFit(Panel, Sizer), + {Panel, Grid}. + + +create_menus(Parent, _) -> + MenuEntries = + [{"File", + [ + ]} + ], + observer_wx:create_menus(Parent, MenuEntries). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl index 34c7b127ff..9592ab5977 100644 --- a/lib/observer/src/observer_lib.erl +++ b/lib/observer/src/observer_lib.erl @@ -493,8 +493,11 @@ link_entry2(Panel,{Target,Str},Cursor) -> wxWindow:setToolTip(TC, ToolTip), TC. -to_link(Tuple = {_Target, _Str}) -> - Tuple; +to_link(RegName={Name, Node}) when is_atom(Name), is_atom(Node) -> + Str = io_lib:format("{~p,~p}", [Name, Node]), + {RegName, Str}; +to_link(TI = {_Target, _Identifier}) -> + TI; to_link(Target0) -> Target=to_str(Target0), {Target, Target}. diff --git a/lib/observer/src/observer_perf_wx.erl b/lib/observer/src/observer_perf_wx.erl index 8173349ed7..4df9218087 100644 --- a/lib/observer/src/observer_perf_wx.erl +++ b/lib/observer/src/observer_perf_wx.erl @@ -24,7 +24,8 @@ handle_event/2, handle_sync_event/3, handle_cast/2]). %% Drawing wrappers for DC and GC areas --export([haveGC/0, +-export([setup_graph_drawing/1, refresh_panel/6, + haveGC/0, setPen/2, setFont/3, setBrush/2, strokeLine/5, strokeLines/2, drawRoundedRectangle/6, drawText/4, getTextExtent/2]). @@ -42,13 +43,12 @@ data = {0, queue:new()}, panel, paint, - appmon, - usegc = false + appmon }). -define(wxGC, wxGraphicsContext). --record(paint, {font, small, pen, pen2, pens}). +-record(paint, {font, small, pen, pen2, pens, usegc = false}). -define(RQ_W, 1). -define(MEM_W, 2). @@ -63,14 +63,11 @@ init([Notebook, Parent]) -> Main = wxBoxSizer:new(?wxVERTICAL), Style = ?wxFULL_REPAINT_ON_RESIZE bor ?wxCLIP_CHILDREN, CPU = wxPanel:new(Panel, [{winid, ?RQ_W}, {style,Style}]), - wxWindow:setBackgroundColour(CPU, ?wxWHITE), wxSizer:add(Main, CPU, [{flag, ?wxEXPAND bor ?wxALL}, {proportion, 1}, {border, 5}]), MemIO = wxBoxSizer:new(?wxHORIZONTAL), MEM = wxPanel:new(Panel, [{winid, ?MEM_W}, {style,Style}]), - wxWindow:setBackgroundColour(MEM, ?wxWHITE), IO = wxPanel:new(Panel, [{winid, ?IO_W}, {style,Style}]), - wxWindow:setBackgroundColour(IO, ?wxWHITE), wxSizer:add(MemIO, MEM, [{flag, ?wxEXPAND bor ?wxLEFT}, {proportion, 1}, {border, 5}]), wxSizer:add(MemIO, IO, [{flag, ?wxEXPAND bor ?wxLEFT bor ?wxRIGHT}, @@ -79,53 +76,56 @@ init([Notebook, Parent]) -> {proportion, 1}, {border, 5}]), wxWindow:setSizer(Panel, Main), - wxPanel:connect(CPU, paint, [callback]), - wxPanel:connect(IO, paint, [callback]), - wxPanel:connect(MEM, paint, [callback]), - case os:type() of - {win32, _} -> %% Ignore erase on windows - wxPanel:connect(CPU, erase_background, [{callback, fun(_,_) -> ok end}]), - wxPanel:connect(IO, erase_background, [{callback, fun(_,_) -> ok end}]), - wxPanel:connect(MEM, erase_background, [{callback, fun(_,_) -> ok end}]); - _ -> ok - end, + PaintInfo = setup_graph_drawing([CPU, MEM, IO]), + process_flag(trap_exit, true), + {Panel, #state{parent=Parent, + panel =Panel, + windows = {CPU, MEM, IO}, + paint=PaintInfo + }} + catch _:Err -> + io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]), + {stop, Err} + end. + +setup_graph_drawing(Panels) -> + IsWindows = element(1, os:type()) =:= win32, + IgnoreCB = {callback, fun(_,_) -> ok end}, + Do = fun(Panel) -> + wxWindow:setBackgroundColour(Panel, ?wxWHITE), + wxPanel:connect(Panel, paint, [callback]), + IsWindows andalso + wxPanel:connect(Panel, erase_background, [IgnoreCB]) + end, + _ = [Do(Panel) || Panel <- Panels], UseGC = haveGC(), Version28 = ?wxMAJOR_VERSION =:= 2 andalso ?wxMINOR_VERSION =:= 8, {Font, SmallFont} - = case os:type() of - {unix, _} when UseGC, Version28 -> + = if UseGC, Version28 -> %% Def font is really small when using Graphics contexts in 2.8 %% Hardcode it F = wxFont:new(12,?wxFONTFAMILY_DECORATIVE,?wxFONTSTYLE_NORMAL,?wxFONTWEIGHT_BOLD), SF = wxFont:new(10, ?wxFONTFAMILY_DECORATIVE, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_NORMAL), {F, SF}; - _ -> + true -> DefFont = wxSystemSettings:getFont(?wxSYS_DEFAULT_GUI_FONT), DefSize = wxFont:getPointSize(DefFont), DefFamily = wxFont:getFamily(DefFont), - F = wxFont:new(DefSize, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_BOLD), - SF = wxFont:new(DefSize-1, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_NORMAL), + F = wxFont:new(DefSize-1, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_BOLD), + SF = wxFont:new(DefSize-2, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_NORMAL), {F, SF} end, BlackPen = wxPen:new({0,0,0}, [{width, 2}]), - Pens = [wxPen:new(Col, [{width, 2}]) || Col <- tuple_to_list(colors())], - process_flag(trap_exit, true), - {Panel, #state{parent=Parent, - panel =Panel, - windows = {CPU, MEM, IO}, - usegc=UseGC, - paint=#paint{font = Font, - small = SmallFont, - pen = ?wxGREY_PEN, - pen2 = BlackPen, - pens = list_to_tuple(Pens) - } - }} - catch _:Err -> - io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]), - {stop, Err} - end. + Pens = [wxPen:new(Col, [{width, 3}]) || Col <- tuple_to_list(colors())], + #paint{usegc = UseGC, + font = Font, + small = SmallFont, + pen = ?wxGREY_PEN, + pen2 = BlackPen, + pens = list_to_tuple(Pens) + }. + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -139,21 +139,25 @@ handle_event(Event, _State) -> %%%%%%%%%% handle_sync_event(#wx{obj=Panel, event = #wxPaint{}},_, #state{active=Active, offset=Offset, paint=Paint, - windows=Windows, data=Data, usegc=UseGC}) -> - %% PaintDC must be created in a callback to work on windows. + windows=Windows, data=Data}) -> %% Sigh workaround bug on MacOSX (Id in paint event is always 0) %% Panel = element(Id, Windows), - Id = if Panel =:= element(?RQ_W, Windows) -> ?RQ_W; - Panel =:= element(?MEM_W, Windows) -> ?MEM_W; - Panel =:= element(?IO_W, Windows) -> ?IO_W + Id = if Panel =:= element(?RQ_W, Windows) -> runq; + Panel =:= element(?MEM_W, Windows) -> memory; + Panel =:= element(?IO_W, Windows) -> io end, - IsWindows = element(1, os:type()) =:= win32, - DC = if IsWindows -> + refresh_panel(Panel, Id, Offset, Data, Active, Paint), + ok. + +refresh_panel(Panel, Id, Offset, Data, Active, #paint{usegc=UseGC} = Paint) -> + %% PaintDC must be created in a callback to work on windows. + IsWindows = element(1, os:type()) =:= win32, + DC = if IsWindows -> %% Ugly hack to aviod flickering on windows, works on windows only %% But the other platforms are doublebuffered by default wx:typeCast(wxBufferedPaintDC:new(Panel), wxPaintDC); - true -> + true -> wxPaintDC:new(Panel) end, IsWindows andalso wxDC:clear(DC), @@ -167,8 +171,9 @@ handle_sync_event(#wx{obj=Panel, event = #wxPaint{}},_, io:format("Internal error ~p ~p~n",[Err, erlang:get_stacktrace()]) end, UseGC andalso ?wxGC:destroy(GC), - wxPaintDC:destroy(DC), - ok. + wxPaintDC:destroy(DC). + + %%%%%%%%%% handle_call(Event, From, _State) -> error({unhandled_call, Event, From}). @@ -247,10 +252,10 @@ create_menus(Parent, _) -> observer_wx:create_menus(Parent, MenuEntries). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -collect_data(?RQ_W, {N, Q}) -> +collect_data(runq, {N, Q}) -> case queue:to_list(Q) of - [] -> {0, 0, []}; - [_] -> {0, 0, []}; + [] -> {0, 0, [], []}; + [_] -> {0, 0, [], []}; [{stats, _Ver, Init0, _IO, _Mem}|Data0] -> Init = lists:sort(Init0), [_|Data=[First|_]] = lists:foldl(fun({stats, _, T0, _, _}, [Prev|Acc]) -> @@ -258,25 +263,46 @@ collect_data(?RQ_W, {N, Q}) -> Delta = calc_delta(TN, Prev), [TN, list_to_tuple(Delta)|Acc] end, [Init], Data0), - {N, lmax(Data), lists:reverse([First|Data])} + NoGraphs = tuple_size(First), + {N, lmax(Data), lists:reverse([First|Data]), lists:seq(1, NoGraphs)} end; -collect_data(?MEM_W, {N, Q}) -> +collect_data(memory, {N, Q}) -> MemT = mem_types(), Data = [list_to_tuple([Value || {Type,Value} <- MemInfo, lists:member(Type, MemT)]) || {stats, _Ver, _RQ, _IO, MemInfo} <- queue:to_list(Q)], - {N, lmax(Data), Data}; -collect_data(?IO_W, {N, Q}) -> + {N, lmax(Data), Data, MemT}; +collect_data(io, {N, Q}) -> case queue:to_list(Q) of - [] -> {0, 0, []}; - [_] -> {0, 0, []}; + [] -> {0, 0, [], []}; + [_] -> {0, 0, [], []}; [{stats, _Ver, _RQ, {{_,In0}, {_,Out0}}, _Mem}|Data0] -> [_,_|Data=[First|_]] = lists:foldl(fun({stats, _, _, {{_,In}, {_,Out}}, _}, [PIn,Pout|Acc]) -> [In,Out,{In-PIn,Out-Pout}|Acc] end, [In0,Out0], Data0), - {N, lmax(Data), lists:reverse([First|Data])} - end. + {N, lmax(Data), lists:reverse([First|Data]), [input, output]} + end; +collect_data(alloc, {N, Q}) -> + List = queue:to_list(Q), + Data = [list_to_tuple([Carrier || {_Type,_Block,Carrier} <- MemInfo]) + || MemInfo <- List], + Info = case List of %% Varies depending on erlang build config/platform + [MInfo|_] -> [Type || {Type, _, _} <- MInfo]; + _ -> [] + end, + {N, lmax(Data), Data, Info}; + +collect_data(utilz, {N, Q}) -> + List = queue:to_list(Q), + Data = [list_to_tuple([round(100*Block/Carrier) || {_Type,Block,Carrier} <- MemInfo]) + || MemInfo <- List], + Info = case List of %% Varies depending on erlang build config/platform + [MInfo|_] -> [Type || {Type, _, _} <- MInfo]; + _ -> [] + end, + {N, lmax(Data), Data, Info}. + mem_types() -> [total, processes, atom, binary, code, ets]. @@ -299,14 +325,14 @@ draw(Offset, Id, DC, Panel, Paint=#paint{pens=Pens, small=Small}, Data, Active) %% This can be optimized a lot by collecting data once %% and draw to memory and then blit memory and only draw new entries in new memory %% area. Hmm now rewritten to use ?wxGC I don't now if it is feasable. - {Len, Max0, Hs} = collect_data(Id, Data), - Max = calc_max(Max0), - NoGraphs = try tuple_size(hd(Hs)) catch _:_ -> 0 end, + {Len, Max0, Hs, Info} = collect_data(Id, Data), + {Max,_,_} = MaxDisp = calc_max(Id, Max0), Size = wxWindow:getClientSize(Panel), - {X0,Y0,WS,HS} = draw_borders(Id, NoGraphs, DC, Size, Max, Paint), + {X0,Y0,WS,HS, DrawBs} = draw_borders(Id, Info, DC, Size, MaxDisp, Paint), Last = 60*WS+X0-1, Start = max(61-Len, 0)*WS+X0 - Offset*WS, Samples = length(Hs), + NoGraphs = try tuple_size(hd(Hs)) catch _:_ -> 0 end, case Active andalso Samples > 1 andalso NoGraphs > 0 of true -> Draw = fun(N) -> @@ -315,14 +341,16 @@ draw(Offset, Id, DC, Panel, Paint=#paint{pens=Pens, small=Small}, Data, Active) strokeLines(DC, Lines), N+1 end, - [Draw(I) || I <- lists:seq(NoGraphs, 1, -1)]; + [Draw(I) || I <- lists:seq(NoGraphs, 1, -1)], + DrawBs(); false -> - Info = case Active andalso Samples =< 1 of - true -> "Waiting on data"; + DrawBs(), + Text = case Active andalso Samples =< 1 of + true -> "Waiting for data"; false -> "Information not available" end, setFont(DC, Small, {0,0,0}), - drawText(DC, Info, X0 + 100, element(2,Size) div 2) + drawText(DC, Text, X0 + 100, element(2,Size) div 2) end, ok. @@ -397,9 +425,8 @@ spline_tan(Y0, Y1, Y2, Y3) -> -define(BW, 5). -define(BH, 5). -draw_borders(Type, NoGraphs, DC, {W,H}, Max, +draw_borders(Type, Info, DC, {W,H}, {Max, Unit, MaxUnit}, #paint{pen=Pen, pen2=Pen2, font=Font, small=Small}) -> - {Unit, MaxUnit} = bytes(Type, Max), Str1 = observer_lib:to_str(MaxUnit), Str2 = observer_lib:to_str(MaxUnit div 2), Str3 = observer_lib:to_str(0), @@ -410,10 +437,10 @@ draw_borders(Type, NoGraphs, DC, {W,H}, Max, GraphX0 = ?BW+TW+?BW, GraphX1 = W-?BW*4, - TopTextX = ?BW+TW+?BW, - MaxTextY = ?BH+TH+?BH, + TopTextX = ?BW*3+TW, + MaxTextY = TH+?BH, BottomTextY = H-?BH-TH, - SecondsY = BottomTextY - ?BH - TH, + SecondsY = BottomTextY - TH, GraphY0 = MaxTextY + (TH / 2), GraphY1 = SecondsY - ?BH, GraphW = GraphX1-GraphX0-1, @@ -447,17 +474,7 @@ draw_borders(Type, NoGraphs, DC, {W,H}, Max, strokeLine(DC, GraphX0-3, GraphY50, GraphX1, GraphY50), strokeLine(DC, GraphX0-3, GraphY75, GraphX1, GraphY75), - setPen(DC, Pen2), - strokeLines(DC, [{GraphX0, GraphY0-1}, {GraphX0, GraphY1+1}, - {GraphX1, GraphY1+1}, {GraphX1, GraphY0-1}, - {GraphX0, GraphY0-1}]), - setFont(DC, Font, {0,0,0}), - case Type of - ?RQ_W -> drawText(DC, "Scheduler Utilization (%) ", TopTextX,?BH); - ?MEM_W -> drawText(DC, "Memory Usage " ++ Unit, TopTextX,?BH); - ?IO_W -> drawText(DC, "IO Usage " ++ Unit, TopTextX,?BH) - end, Text = fun(X,Y, Str, PenId) -> if PenId == 0 -> @@ -468,32 +485,65 @@ draw_borders(Type, NoGraphs, DC, {W,H}, Max, end, drawText(DC, Str, X, Y), {StrW, _} = getTextExtent(DC, Str), - StrW + X + SpaceW + StrW + X + ?BW*2 end, + case Type of - ?RQ_W -> - TN0 = Text(?BW, BottomTextY, "Scheduler: ", 0), + runq -> + drawText(DC, "Scheduler Utilization (%) ", TopTextX, ?BH), + TN0 = Text(TopTextX, BottomTextY, "Scheduler: ", 0), lists:foldl(fun(Id, Pos0) -> Text(Pos0, BottomTextY, integer_to_list(Id), Id) - end, TN0, lists:seq(1, NoGraphs)); - ?MEM_W -> + end, TN0, Info); + memory -> + drawText(DC, "Memory Usage " ++ Unit, TopTextX,?BH), + lists:foldl(fun(MType, {PenId, Pos0}) -> + Str = to_string(MType), + Pos = Text(Pos0, BottomTextY, Str, PenId), + {PenId+1, Pos} + end, {1, TopTextX}, Info); + io -> + drawText(DC, "IO Usage " ++ Unit, TopTextX,?BH), + lists:foldl(fun(MType, {PenId, Pos0}) -> + Str = to_string(MType), + Pos = Text(Pos0, BottomTextY, Str, PenId), + {PenId+1, Pos} + end, {1, TopTextX}, Info); + alloc -> + drawText(DC, "Carrier Size " ++ Unit, TopTextX,?BH); + utilz -> + drawText(DC, "Carrier Utilization (%)" ++ Unit, TopTextX,?BH), lists:foldl(fun(MType, {PenId, Pos0}) -> - Str = uppercase(atom_to_list(MType)), + Str = to_string(MType), Pos = Text(Pos0, BottomTextY, Str, PenId), {PenId+1, Pos} - end, {1, ?BW}, mem_types()); - ?IO_W -> - TN0 = Text(?BW, BottomTextY, "Input", 1), - Text(TN0, BottomTextY, "Output", 2) + end, {1, TopTextX}, Info) end, - {GraphX0+1, GraphY1, ScaleW, ScaleH}. + DrawBorder = fun() -> + setPen(DC, Pen2), + strokeLines(DC, [{GraphX0, GraphY0-1}, {GraphX0, GraphY1+1}, + {GraphX1, GraphY1+1}, {GraphX1, GraphY0-1}, + {GraphX0, GraphY0-1}]) + end, + {GraphX0+1, GraphY1, ScaleW, ScaleH, DrawBorder}. + +to_string(Atom) -> + Name = atom_to_list(Atom), + case lists:reverse(Name) of + "colla_" ++ Rev -> + uppercase(lists:reverse(Rev)); + _ -> + uppercase(Name) + end. uppercase([C|Rest]) -> [C-$a+$A|Rest]. -calc_max(Max) when Max < 10 -> 10; -calc_max(Max) -> calc_max1(Max). +calc_max(Type, Max) -> + bytes(Type, Max). +calc_max1(Max) when Max < 10 -> + 10; calc_max1(Max) -> case Max div 10 of X when X < 10 -> @@ -506,23 +556,36 @@ calc_max1(Max) -> 10*calc_max1(X) end. -bytes(?RQ_W, Val) -> {"", Val}; +bytes(runq, Val) -> + Upper = calc_max1(Val), + {Upper, "", Upper}; +bytes(utilz, Val) -> + Upper = calc_max1(Val), + {Upper, "", Upper}; bytes(_, B) -> KB = B div 1024, MB = KB div 1024, GB = MB div 1024, if - GB > 10 -> {"(GB)", GB}; - MB > 10 -> {"(MB)", MB}; - KB > 0 -> {"(KB)", KB}; - true -> {"(B)", B} + GB > 10 -> + Upper = calc_max1(GB), + {Upper*1024*1024*1024, "(GB)", Upper}; + MB > 10 -> + Upper = calc_max1(MB), + {Upper*1024*1024, "(MB)", Upper}; + KB > 0 -> + Upper = calc_max1(KB), + {Upper*1024, "(KB)", Upper}; + true -> + Upper = calc_max1(B), + {Upper, "(B)", Upper} end. colors() -> - {{200, 50, 50}, {50, 200, 50}, {50, 50, 200}, - {255, 110, 0}, {50, 200, 200}, {200, 50, 200}, - {240, 200, 80}, {140, 2, 140}, - {100, 200, 240}, {100, 240, 100} + {{240, 100, 100}, {100, 240, 100}, {100, 100, 240}, + {220, 220, 80}, {100, 240, 240}, {240, 100, 240}, + {100, 25, 25}, {25, 100, 25}, {25, 25, 100}, + {120, 120, 0}, {25, 100, 100}, {100, 50, 100} }. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/lib/observer/src/observer_sys_wx.erl b/lib/observer/src/observer_sys_wx.erl index f989f9cf97..ea89590e84 100644 --- a/lib/observer/src/observer_sys_wx.erl +++ b/lib/observer/src/observer_sys_wx.erl @@ -37,7 +37,6 @@ parent_notebook, panel, sizer, menubar, - alloc, fields, timer}). @@ -48,7 +47,6 @@ start_link(Notebook, Parent) -> init([Notebook, Parent]) -> SysInfo = observer_backend:sys_info(), - AllocInfo = proplists:get_value(alloc_info, SysInfo, []), {Info, Stat} = info_fields(), Panel = wxPanel:new(Notebook), Sizer = wxBoxSizer:new(?wxVERTICAL), @@ -60,16 +58,13 @@ init([Notebook, Parent]) -> wxSizer:add(TopSizer, FPanel0, [{flag, ?wxEXPAND}, {proportion, 1}]), wxSizer:add(TopSizer, FPanel1, [{flag, ?wxEXPAND}, {proportion, 1}]), BorderFlags = ?wxLEFT bor ?wxRIGHT, - {MemPanel, MemoryInfo} = create_mem_info(Panel, AllocInfo), wxSizer:add(Sizer, TopSizer, [{flag, ?wxEXPAND bor BorderFlags bor ?wxTOP}, {proportion, 0}, {border, 5}]), - wxSizer:add(Sizer, MemPanel, [{flag, ?wxEXPAND bor BorderFlags bor ?wxBOTTOM}, - {proportion, 1}, {border, 5}]), wxPanel:setSizer(Panel, Sizer), Timer = observer_lib:start_timer(10), {Panel, #sys_wx_state{parent=Parent, parent_notebook=Notebook, - panel=Panel, sizer=Sizer, alloc=MemoryInfo, + panel=Panel, sizer=Sizer, timer=Timer, fields=Fields0 ++ Fields1}}. create_sys_menu(Parent) -> @@ -77,91 +72,13 @@ create_sys_menu(Parent) -> #create_menu{id = ?ID_REFRESH_INTERVAL, text = "Refresh interval"}]}, observer_wx:create_menus(Parent, [View]). -update_syspage(#sys_wx_state{node = Node, fields=Fields, sizer=Sizer, alloc=AllocCtrl}) -> +update_syspage(#sys_wx_state{node = Node, fields=Fields, sizer=Sizer}) -> SysInfo = observer_wx:try_rpc(Node, observer_backend, sys_info, []), - AllocInfo = proplists:get_value(alloc_info, SysInfo, []), {Info, Stat} = info_fields(), observer_lib:update_info(Fields, observer_lib:fill_info(Info, SysInfo) ++ observer_lib:fill_info(Stat, SysInfo)), - update_alloc(AllocCtrl, AllocInfo), wxSizer:layout(Sizer). -create_mem_info(Parent, Fields) -> - Panel = wxPanel:new(Parent), - wxWindow:setBackgroundColour(Panel, {255,255,255}), - Style = ?wxLC_REPORT bor ?wxLC_SINGLE_SEL bor ?wxLC_HRULES bor ?wxLC_VRULES, - Grid = wxListCtrl:new(Panel, [{style, Style}]), - Li = wxListItem:new(), - AddListEntry = fun({Name, Align, DefSize}, Col) -> - wxListItem:setText(Li, Name), - wxListItem:setAlign(Li, Align), - wxListCtrl:insertColumn(Grid, Col, Li), - wxListCtrl:setColumnWidth(Grid, Col, DefSize), - Col + 1 - end, - ListItems = [{"Allocator Type", ?wxLIST_FORMAT_LEFT, 200}, - {"Block size (kB)", ?wxLIST_FORMAT_RIGHT, 150}, - {"Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150}], - lists:foldl(AddListEntry, 0, ListItems), - wxListItem:destroy(Li), - update_alloc(Grid, Fields), - - Sizer = wxBoxSizer:new(?wxVERTICAL), - wxSizer:add(Sizer, Grid, [{flag, ?wxEXPAND bor ?wxLEFT bor ?wxRIGHT}, - {border, 5}, {proportion, 1}]), - wxWindow:setSizerAndFit(Panel, Sizer), - {Panel, Grid}. - -update_alloc(Grid, AllocInfo) -> - Fields = alloc_info(AllocInfo, [], 0, 0, true), - wxListCtrl:deleteAllItems(Grid), - Update = fun({Name, BS, CS}, Row) -> - wxListCtrl:insertItem(Grid, Row, ""), - wxListCtrl:setItem(Grid, Row, 0, observer_lib:to_str(Name)), - wxListCtrl:setItem(Grid, Row, 1, observer_lib:to_str(BS div 1024)), - wxListCtrl:setItem(Grid, Row, 2, observer_lib:to_str(CS div 1024)), - Row + 1 - end, - lists:foldl(Update, 0, Fields), - Fields. - -alloc_info([{Type,Instances}|Allocators],TypeAcc,TotalBS,TotalCS,IncludeTotal) -> - {BS,CS,NewTotalBS,NewTotalCS,NewIncludeTotal} = - sum_alloc_instances(Instances,0,0,TotalBS,TotalCS), - alloc_info(Allocators,[{Type,BS,CS}|TypeAcc],NewTotalBS,NewTotalCS, - IncludeTotal andalso NewIncludeTotal); -alloc_info([],TypeAcc,TotalBS,TotalCS,IncludeTotal) -> - Types = [X || X={_,BS,CS} <- TypeAcc, (BS>0 orelse CS>0)], - case IncludeTotal of - true -> - [{total,TotalBS,TotalCS} | lists:reverse(Types)]; - false -> - lists:reverse(Types) - end. - -sum_alloc_instances(false,BS,CS,TotalBS,TotalCS) -> - {BS,CS,TotalBS,TotalCS,false}; -sum_alloc_instances([{_,_,Data}|Instances],BS,CS,TotalBS,TotalCS) -> - {NewBS,NewCS,NewTotalBS,NewTotalCS} = - sum_alloc_one_instance(Data,BS,CS,TotalBS,TotalCS), - sum_alloc_instances(Instances,NewBS,NewCS,NewTotalBS,NewTotalCS); -sum_alloc_instances([],BS,CS,TotalBS,TotalCS) -> - {BS,CS,TotalBS,TotalCS,true}. - -sum_alloc_one_instance([{sbmbcs,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}| - Rest],OldBS,OldCS,TotalBS,TotalCS) -> - sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS,TotalCS); -sum_alloc_one_instance([{_,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}| - Rest],OldBS,OldCS,TotalBS,TotalCS) -> - sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS); -sum_alloc_one_instance([{_,[{blocks_size,BS},{carriers_size,CS}]}| - Rest],OldBS,OldCS,TotalBS,TotalCS) -> - sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS); -sum_alloc_one_instance([_|Rest],BS,CS,TotalBS,TotalCS) -> - sum_alloc_one_instance(Rest,BS,CS,TotalBS,TotalCS); -sum_alloc_one_instance([],BS,CS,TotalBS,TotalCS) -> - {BS,CS,TotalBS,TotalCS}. - info_fields() -> Info = [{"System and Architecture", [{"System Version", otp_release}, diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl index 54c4092a78..cf602569aa 100644 --- a/lib/observer/src/observer_wx.erl +++ b/lib/observer/src/observer_wx.erl @@ -43,6 +43,7 @@ -define(LAST_NODES_MENU_ID, 2000). -define(TRACE_STR, "Trace Overview"). +-define(ALLOC_STR, "Memory Allocators"). %% Records -record(state, @@ -58,6 +59,7 @@ trace_panel, app_panel, perf_panel, + allc_panel, active_tab, node, nodes, @@ -149,6 +151,10 @@ setup(#state{frame = Frame} = State) -> PerfPanel = observer_perf_wx:start_link(Notebook, self()), wxNotebook:addPage(Notebook, PerfPanel, "Load Charts", []), + %% Memory Allocator Viewer Panel + AllcPanel = observer_alloc_wx:start_link(Notebook, self()), + wxNotebook:addPage(Notebook, AllcPanel, ?ALLOC_STR, []), + %% App Viewer Panel AppPanel = observer_app_wx:start_link(Notebook, self()), wxNotebook:addPage(Notebook, AppPanel, "Applications", []), @@ -184,6 +190,7 @@ setup(#state{frame = Frame} = State) -> trace_panel = TracePanel, app_panel = AppPanel, perf_panel = PerfPanel, + allc_panel = AllcPanel, active_tab = SysPid, node = node(), nodes = Nodes @@ -505,7 +512,7 @@ check_page_title(Notebook) -> get_active_pid(#state{notebook=Notebook, pro_panel=Pro, sys_panel=Sys, tv_panel=Tv, trace_panel=Trace, app_panel=App, - perf_panel=Perf + perf_panel=Perf, allc_panel=Alloc }) -> Panel = case check_page_title(Notebook) of "Processes" -> Pro; @@ -513,13 +520,14 @@ get_active_pid(#state{notebook=Notebook, pro_panel=Pro, sys_panel=Sys, "Table Viewer" -> Tv; ?TRACE_STR -> Trace; "Load Charts" -> Perf; - "Applications" -> App + "Applications" -> App; + ?ALLOC_STR -> Alloc end, wx_object:get_pid(Panel). pid2panel(Pid, #state{pro_panel=Pro, sys_panel=Sys, tv_panel=Tv, trace_panel=Trace, app_panel=App, - perf_panel=Perf}) -> + perf_panel=Perf, allc_panel=Alloc}) -> case Pid of Pro -> "Processes"; Sys -> "System"; @@ -527,6 +535,7 @@ pid2panel(Pid, #state{pro_panel=Pro, sys_panel=Sys, Trace -> ?TRACE_STR; Perf -> "Load Charts"; App -> "Applications"; + Alloc -> ?ALLOC_STR; _ -> "unknown" end. @@ -617,8 +626,8 @@ default_menus(NodesMenuItems) -> %% automagicly, so just add them to a menu that always exist. %% But not to the help menu for some reason - {Tag, Menus} = NodeMenu, - [{Tag, Menus ++ [Quit,About]}, LogMenu, {"&Help", [Help]}] + {Tag, Menus} = FileMenu, + [{Tag, Menus ++ [Quit,About]}, NodeMenu, LogMenu, {"&Help", [Help]}] end. clean_menus(Menus, MenuBar) -> diff --git a/lib/observer/vsn.mk b/lib/observer/vsn.mk index c8a6023b4f..10ed3bdfe5 100644 --- a/lib/observer/vsn.mk +++ b/lib/observer/vsn.mk @@ -1 +1 @@ -OBSERVER_VSN = 2.0.3 +OBSERVER_VSN = 2.0.4 diff --git a/lib/os_mon/doc/src/notes.xml b/lib/os_mon/doc/src/notes.xml index 6bc0cf7d43..d3acc1effc 100644 --- a/lib/os_mon/doc/src/notes.xml +++ b/lib/os_mon/doc/src/notes.xml @@ -30,6 +30,34 @@ </header> <p>This document describes the changes made to the OS_Mon application.</p> +<section><title>Os_Mon 2.3.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Do not crash with badmatch when integer part of loadavg + has more than 2 digits.</p> + <p> + Own Id: OTP-12581</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Fix compilation of memsup on OpenBSD.</p> + <p> + Own Id: OTP-12404</p> + </item> + </list> + </section> + +</section> + <section><title>Os_Mon 2.3</title> <section><title>Improvements and New Features</title> diff --git a/lib/os_mon/vsn.mk b/lib/os_mon/vsn.mk index f90cc306f0..833e855e0e 100644 --- a/lib/os_mon/vsn.mk +++ b/lib/os_mon/vsn.mk @@ -1 +1 @@ -OS_MON_VSN = 2.3 +OS_MON_VSN = 2.3.1 diff --git a/lib/public_key/doc/src/notes.xml b/lib/public_key/doc/src/notes.xml index fe4bf5ce2d..f241a91eb0 100644 --- a/lib/public_key/doc/src/notes.xml +++ b/lib/public_key/doc/src/notes.xml @@ -34,6 +34,21 @@ <file>notes.xml</file> </header> +<section><title>Public_Key 0.23</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Improve/extend support for CRL handling.</p> + <p> + Own Id: OTP-12547 Aux Id: OTP-10362 </p> + </item> + </list> + </section> + +</section> + <section><title>Public_Key 0.22.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/public_key/vsn.mk b/lib/public_key/vsn.mk index 2fa2d725c3..16794660a5 100644 --- a/lib/public_key/vsn.mk +++ b/lib/public_key/vsn.mk @@ -1 +1 @@ -PUBLIC_KEY_VSN = 0.22.1 +PUBLIC_KEY_VSN = 0.23 diff --git a/lib/runtime_tools/doc/src/notes.xml b/lib/runtime_tools/doc/src/notes.xml index 2877355718..1612c62c98 100644 --- a/lib/runtime_tools/doc/src/notes.xml +++ b/lib/runtime_tools/doc/src/notes.xml @@ -31,6 +31,22 @@ <p>This document describes the changes made to the Runtime_Tools application.</p> +<section><title>Runtime_Tools 1.8.16</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + The trace process started by <c>dbg</c> would not always + terminate when <c>dbg:stop/0</c> was called.</p> + <p> + Own Id: OTP-12517</p> + </item> + </list> + </section> + +</section> + <section><title>Runtime_Tools 1.8.15</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/runtime_tools/vsn.mk b/lib/runtime_tools/vsn.mk index c1df23d2a2..e9f43df1aa 100644 --- a/lib/runtime_tools/vsn.mk +++ b/lib/runtime_tools/vsn.mk @@ -1 +1 @@ -RUNTIME_TOOLS_VSN = 1.8.15 +RUNTIME_TOOLS_VSN = 1.8.16 diff --git a/lib/ssh/doc/src/notes.xml b/lib/ssh/doc/src/notes.xml index 3aa61aa9ec..f22bca36f4 100644 --- a/lib/ssh/doc/src/notes.xml +++ b/lib/ssh/doc/src/notes.xml @@ -29,6 +29,50 @@ <file>notes.xml</file> </header> +<section><title>Ssh 3.2</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + If a channel is closed by the peer while using a function + with call semantics in ssh_connection.erl return {error, + closed}. Document that the functions can return {error, + timeout | closed} and not only ssh_request_status()</p> + <p> + Own Id: OTP-12004</p> + </item> + <item> + <p> + Bug that causes ssh:connect to return + <c>{error,int()}</c> instead of <c>{error,timeout}</c> + when ssh handshake takes too long time.</p> + <p> + Own Id: OTP-12369</p> + </item> + <item> + <p> + Documentation corrections. (Thanks to Rabbe Fogelholm)</p> + <p> + Own Id: OTP-12399</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Example of ssh_connection:exec added.</p> + <p> + Own Id: OTP-12558</p> + </item> + </list> + </section> + +</section> + <section><title>Ssh 3.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/ssh/vsn.mk b/lib/ssh/vsn.mk index bfebe2c60b..0d90278977 100644 --- a/lib/ssh/vsn.mk +++ b/lib/ssh/vsn.mk @@ -1,5 +1,5 @@ #-*-makefile-*- ; force emacs to enter makefile-mode -SSH_VSN = 3.1.1 +SSH_VSN = 3.2 APP_VSN = "ssh-$(SSH_VSN)" diff --git a/lib/ssl/doc/src/notes.xml b/lib/ssl/doc/src/notes.xml index 4349e5a456..352563700b 100644 --- a/lib/ssl/doc/src/notes.xml +++ b/lib/ssl/doc/src/notes.xml @@ -25,7 +25,80 @@ <file>notes.xml</file> </header> <p>This document describes the changes made to the SSL application.</p> - <section><title>SSL 5.3.8</title> + <section><title>SSL 6.0</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Exclude self-signed trusted anchor certificates from + certificate prospective certification path according to + RFC 3280.</p> + <p> + This will avoid some unnecessary certificate processing.</p> + <p> + Own Id: OTP-12449</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Separate client and server session cache internally.</p> + <p> + Avoid session table growth when client starts many + connections in such a manner that many connections are + started before session reuse is possible. Only save a new + session in client if there is no equivalent session + already stored.</p> + <p> + Own Id: OTP-11365</p> + </item> + <item> + <p> + The PEM cache is now validated by a background process, + instead of always keeping it if it is small enough and + clearing it otherwise. That strategy required that small + caches where cleared by API function if a file changes on + disk.</p> + <p> + However export the API function to clear the cache as it + may still be useful.</p> + <p> + Own Id: OTP-12391</p> + </item> + <item> + <p> + Add padding check for TLS-1.0 to remove Poodle + vulnerability from TLS 1.0, also add the option + padding_check. This option only affects TLS-1.0 + connections and if set to false it disables the block + cipher padding check to be able to interoperate with + legacy software.</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-12420</p> + </item> + <item> + <p> + Add support for TLS_FALLBACK_SCSV used to prevent + undesired TLS version downgrades. If used by a client + that is vulnerable to the POODLE attack, and the server + also supports TLS_FALLBACK_SCSV, the attack can be + prevented.</p> + <p> + Own Id: OTP-12458</p> + </item> + </list> + </section> + +</section> + +<section><title>SSL 5.3.8</title> <section><title>Fixed Bugs and Malfunctions</title> <list> diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml index 47b0dbc206..d070cb4019 100644 --- a/lib/ssl/doc/src/ssl.xml +++ b/lib/ssl/doc/src/ssl.xml @@ -21,245 +21,273 @@ </legalnotice> <title>ssl</title> + <prepared></prepared> + <docno></docno> + <date></date> + <rev></rev> <file>ssl.xml</file> </header> <module>ssl</module> <modulesummary>Interface Functions for Secure Socket Layer</modulesummary> <description> - <p>This module contains interface functions to the Secure Socket - Layer. - </p> + <p>This module contains interface functions for the SSL.</p> </description> <section> <title>SSL</title> <list type="bulleted"> - <item>ssl requires the crypto and public_key applications.</item> + <item><c>ssl</c> requires the <c>crypto</c> and <c>public_key</c> + applications.</item> <item>Supported SSL/TLS-versions are SSL-3.0, TLS-1.0, - TLS-1.1 and TLS-1.2.</item> + TLS-1.1, and TLS-1.2.</item> <item>For security reasons SSL-2.0 is not supported.</item> <item>For security reasons SSL-3.0 is no longer supported by default, - but may be configured.</item> - <item>Ephemeral Diffie-Hellman cipher suites are supported + but can be configured.</item> + <item>Ephemeral Diffie-Hellman cipher suites are supported, but not Diffie Hellman Certificates cipher suites.</item> - <item>Elliptic Curve cipher suites are supported if crypto - supports it and named curves are used. + <item>Elliptic Curve cipher suites are supported if the <c>crypto</c> + application supports it and named curves are used. </item> <item>Export cipher suites are not supported as the U.S. lifted its export restrictions in early 2000.</item> <item>IDEA cipher suites are not supported as they have - become deprecated by the latest TLS spec so there is not any - real motivation to implement them.</item> + become deprecated by the latest TLS specification so it is not + motivated to implement them.</item> <item>CRL validation is supported.</item> - <item>Policy certificate extensions are not supported - yet. </item> - <item>Support for 'Server Name Indication' extension client side - (RFC 6066 section 3).</item> + <item>Policy certificate extensions are not supported.</item> + <item>'Server Name Indication' extension client side + (RFC 6066, Section 3) is supported.</item> </list> </section> <section> - <title>COMMON DATA TYPES</title> - <p>The following data types are used in the functions below: - </p> + <title>DATA TYPES</title> + <p>The following data types are used in the functions for <c>ssl</c>:</p> - <p><c>boolean() = true | false</c></p> + <taglist> - <p><c>option() = socketoption() | ssloption() | transportoption()</c></p> + <tag><c>boolean()</c></tag> + <item><p><c>= true | false</c></p></item> - <p><c>socketoption() = proplists:property() - The default socket options are - [{mode,list},{packet, 0},{header, 0},{active, true}]. - </c></p> + <tag><c>option()</c></tag> + <item><p><c>= socketoption() | ssloption() | transportoption()</c></p> + </item> - <p>For valid options - see <seealso marker="kernel:inet">inet(3)</seealso> and - <seealso marker="kernel:gen_tcp">gen_tcp(3)</seealso>. - </p> - - <p><marker id="type-ssloption"></marker><c>ssloption() = {verify, verify_type()} | - {verify_fun, {fun(), term()}} | - {fail_if_no_peer_cert, boolean()} - {depth, integer()} | - {cert, der_encoded()}| {certfile, path()} | - {key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo', der_encoded()}} | - {keyfile, path()} | {password, string()} | - {cacerts, [der_encoded()]} | {cacertfile, path()} | - |{dh, der_encoded()} | {dhfile, path()} | {ciphers, ciphers()} | - {user_lookup_fun, {fun(), term()}}, {psk_identity, string()}, {srp_identity, {string(), string()}} | - {ssl_imp, ssl_imp()} | {reuse_sessions, boolean()} | {reuse_session, fun()} - {alpn_advertised_protocols, [binary()]} | - {alpn_preferred_protocols, [binary()]} | - {next_protocols_advertised, [binary()]} | - {client_preferred_next_protocols, {client | server, [binary()]} | {client | server, [binary()], binary()}} | - {log_alert, boolean()} | {server_name_indication, hostname() | disable} - </c></p> - - <p><c>transportoption() = {cb_info, {CallbackModule :: atom(), DataTag :: atom(), ClosedTag :: atom(), ErrTag:atom()}} - - defaults to {gen_tcp, tcp, tcp_closed, tcp_error}. Can be used to customize - the transport layer. The callback module must implement a reliable transport - protocol and behave as gen_tcp and in addition have functions corresponding to - inet:setopts/2, inet:getopts/2, inet:peername/1, inet:sockname/1 and inet:port/1. - The callback gen_tcp is treated specially and will call inet directly. - </c></p> - - <p><c> CallbackModule = - atom()</c> - </p> <p><c> DataTag = - atom() - tag used in socket data message.</c></p> - <p><c> ClosedTag = atom() - tag used in - socket close message.</c></p> - - <p><c>verify_type() = verify_none | verify_peer</c></p> - - <p><c>path() = string() - representing a file path.</c></p> + <tag><c>socketoption()</c></tag> + <item><p><c>= proplists:property()</c></p> + <p>The default socket options are + <c>[{mode,list},{packet, 0},{header, 0},{active, true}]</c>.</p> + <p>For valid options, see the + <seealso marker="kernel:inet">inet(3)</seealso> and + <seealso marker="kernel:gen_tcp">gen_tcp(3)</seealso> manual pages + in <c>kernel</c>.</p></item> + + <tag><c>ssloption()</c></tag> + <item><p><c>= {verify, verify_type()}</c></p> + <p><c>| {verify_fun, {fun(), term()}}</c></p> + <p><c>| {fail_if_no_peer_cert, boolean()} {depth, integer()}</c></p> + <p><c>| {cert, public_key:der_encoded()}</c></p> + <p><c>| {certfile, path()}</c></p> + <p><c>| {key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' + | 'PrivateKeyInfo', public_key:der_encoded()}}</c></p> + <p><c>| {keyfile, path()}</c></p> + <p><c>| {password, string()}</c></p> + <p><c>| {cacerts, [public_key:der_encoded()]}</c></p> + <p><c>| {cacertfile, path()}</c></p> + <p><c>| {dh, public_key:der_encoded()}</c></p> + <p><c>| {dhfile, path()}</c></p> + <p><c>| {ciphers, ciphers()}</c></p> + <p><c>| {user_lookup_fun, {fun(), term()}}, {psk_identity, string()}, + {srp_identity, {string(), string()}}</c></p> + <p><c>| {reuse_sessions, boolean()}</c></p> + <p><c>| {reuse_session, fun()} {next_protocols_advertised, [binary()]}</c></p> + <p><c>| {client_preferred_next_protocols, {client | server, + [binary()]} | {client | server, [binary()], binary()}}</c></p> + <p><c>| {log_alert, boolean()}</c></p> + <p><c>| {server_name_indication, hostname() | disable}</c></p></item> + + <tag><c>transportoption()</c></tag> + <item><p><c>= {cb_info, {CallbackModule::atom(), DataTag::atom(), + ClosedTag::atom(), ErrTag:atom()}}</c></p> + <p>Defaults to <c>{gen_tcp, tcp, tcp_closed, tcp_error}</c>. Can be used + to customize the transport layer. The callback module must implement a + reliable transport protocol, behave as <c>gen_tcp</c>, and have functions + corresponding to <c>inet:setopts/2</c>, <c>inet:getopts/2</c>, + <c>inet:peername/1</c>, <c>inet:sockname/1</c>, and <c>inet:port/1</c>. + The callback <c>gen_tcp</c> is treated specially and calls <c>inet</c> + directly.</p> + <taglist> + <tag><c>CallbackModule</c></tag> + <item><p><c>= atom()</c></p></item> + <tag><c>DataTag</c></tag> + <item><p><c>= atom()</c></p> + <p>Used in socket data message.</p></item> + <tag><c>ClosedTag</c></tag> + <item><p><c>= atom()</c></p> + <p>Used in socket close message.</p></item> + </taglist> + </item> - <p><c>der_encoded() = binary() -Asn1 DER encoded entity as an erlang binary.</c></p> - - <p><c>host() = hostname() | ipaddress()</c></p> - - <p><c>hostname() = string()</c></p> - - <p><c> - ip_address() = {N1,N2,N3,N4} % IPv4 - | {K1,K2,K3,K4,K5,K6,K7,K8} % IPv6 </c></p> + <tag><c>verify_type()</c></tag> + <item><p><c>= verify_none | verify_peer</c></p></item> - <p><c>sslsocket() - opaque to the user. </c></p> - - <p><c>protocol() = sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2' </c></p> - - <p><c>ciphers() = [ciphersuite()] | string() (according to old API)</c></p> - - <p><c>ciphersuite() = - {key_exchange(), cipher(), hash()}</c></p> - - <p><c>key_exchange() = rsa | dhe_dss | dhe_rsa | dh_anon - | psk | dhe_psk | rsa_psk | srp_anon | srp_dss | srp_rsa - | ecdh_anon | ecdh_ecdsa | ecdhe_ecdsa | ecdh_rsa | ecdhe_rsa - </c></p> + <tag><c>path()</c></tag> + <item><p><c>= string()</c></p> + <p>Represents a file path.</p></item> + + <tag><c>public_key:der_encoded()</c></tag> + <item><p><c>= binary()</c></p> + <p>ASN.1 DER-encoded entity as an Erlang binary.</p></item> + + <tag><c>host()</c></tag> + <item><p><c>= hostname() | ipaddress()</c></p></item> + + <tag><c>hostname()</c></tag> + <item><p><c>= string()</c></p></item> + + <tag><c>ip_address()</c></tag> + <item><p><c>= {N1,N2,N3,N4} % IPv4 | {K1,K2,K3,K4,K5,K6,K7,K8} % IPv6 + </c></p></item> + + <tag><c>sslsocket()</c></tag> + <item><p>Opaque to the user.</p></item> + + <tag><c>protocol()</c></tag> + <item><p><c>= sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2'</c></p></item> + + <tag><c>ciphers()</c></tag> + <item><p><c>= [ciphersuite()] | string()</c></p> + <p>According to old API.</p></item> - <p><c>cipher() = rc4_128 | des_cbc | '3des_ede_cbc' - | aes_128_cbc | aes_256_cbc | aes_128_gcm | aes_256_gcm </c></p> + <tag><c>ciphersuite()</c></tag> + <item><p><c>= {key_exchange(), cipher(), hash()}</c></p></item> - <p> <c>hash() = md5 | sha - </c></p> + <tag><c>key_exchange()</c></tag> + <item><p><c>= rsa | dhe_dss | dhe_rsa | dh_anon | psk | dhe_psk + | rsa_psk | srp_anon | srp_dss | srp_rsa | ecdh_anon | ecdh_ecdsa + | ecdhe_ecdsa | ecdh_rsa | ecdhe_rsa</c></p></item> - <p><c>prf_random() = client_random | server_random - </c></p> + <tag><c>cipher()</c></tag> + <item><p><c>= rc4_128 | des_cbc | '3des_ede_cbc' + | aes_128_cbc | aes_256_cbc | aes_128_gcm | aes_256_gcm</c></p></item> - <p><c>srp_param_type() = srp_1024 | srp_1536 | srp_2048 | srp_3072 - | srp_4096 | srp_6144 | srp_8192</c></p> + <tag><c>hash()</c></tag> + <item><p><c>= md5 | sha</c></p></item> + <tag><c>prf_random()</c></tag> + <item><p><c>= client_random | server_random</c></p></item> + + <tag><c>srp_param_type()</c></tag> + <item><p><c>= srp_1024 | srp_1536 | srp_2048 | srp_3072 + | srp_4096 | srp_6144 | srp_8192</c></p></item> + + </taglist> </section> <section> <title>SSL OPTION DESCRIPTIONS - COMMON for SERVER and CLIENT</title> - <p>Options described here are options that are have the same - meaning in the client and the server. - </p> + <p>The following options have the same meaning in the client and + the server:</p> <taglist> - <tag>{cert, der_encoded()}</tag> - <item> The DER encoded users certificate. If this option - is supplied it will override the certfile option.</item> + <tag><c>{cert, public_key:der_encoded()}</c></tag> + <item><p>The DER-encoded users certificate. If this option + is supplied, it overrides option <c>certfile</c>.</p></item> - <tag>{certfile, path()}</tag> - <item>Path to a file containing the user's PEM encoded certificate.</item> + <tag><c>{certfile, path()}</c></tag> + <item><p>Path to a file containing the user certificate.</p></item> - <tag>{key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo', der_encoded()}}</tag> - <item> The DER encoded users private key. If this option - is supplied it will override the keyfile option.</item> + <tag><c>{key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' + |'PrivateKeyInfo', public_key:der_encoded()}}</c></tag> + <item><p>The DER-encoded user's private key. If this option + is supplied, it overrides option <c>keyfile</c>.</p></item> - <tag>{keyfile, path()}</tag> - <item>Path to file containing user's - private PEM encoded key. As PEM-files may contain several - entries this option defaults to the same file as given by - certfile option.</item> - - <tag>{password, string()}</tag> - <item>String containing the user's password. - Only used if the private keyfile is password protected. - </item> - - <tag>{cacerts, [der_encoded()]}</tag> - <item> The DER encoded trusted certificates. If this option - is supplied it will override the cacertfile option.</item> - - <tag>{ciphers, ciphers()}</tag> - <item>The cipher suites that should be supported. The function + <tag><c>{keyfile, path()}</c></tag> + <item><p>Path to the file containing the user's + private PEM-encoded key. As PEM-files can contain several + entries, this option defaults to the same file as given by + option <c>certfile</c>.</p></item> + + <tag><c>{password, string()}</c></tag> + <item><p>String containing the user's password. Only used if the + private keyfile is password-protected.</p></item> + + <tag><c>{ciphers, ciphers()}</c></tag> + <item><p>Supported cipher suites. The function <c>cipher_suites/0</c> can be used to find all ciphers that are - supported by default. <c>cipher_suites(all)</c> may be called - to find all available cipher suites. - Pre-Shared Key (<url href="http://www.ietf.org/rfc/rfc4279.txt">RFC 4279</url> and + supported by default. <c>cipher_suites(all)</c> can be called + to find all available cipher suites. Pre-Shared Key + (<url href="http://www.ietf.org/rfc/rfc4279.txt">RFC 4279</url> and <url href="http://www.ietf.org/rfc/rfc5487.txt">RFC 5487</url>), - Secure Remote Password (<url href="http://www.ietf.org/rfc/rfc5054.txt">RFC 5054</url>) + Secure Remote Password + (<url href="http://www.ietf.org/rfc/rfc5054.txt">RFC 5054</url>), RC4 cipher suites, and anonymous cipher suites only work if explicitly enabled by - this option and they are supported/enabled by the peer also. - Note that anonymous cipher suites are supported for testing purposes - only and should not be used when security matters. - </item> - - <tag>{ssl_imp, new | old}</tag> - <item>No longer has any meaning as the old implementation has - been removed, it will be ignored. + this option; they are supported/enabled by the peer also. + Anonymous cipher suites are supported for testing purposes + only and are not be used when security matters.</p></item> + + <tag><c>{secure_renegotiate, boolean()}</c></tag> + <item><p>Specifies if to reject renegotiation attempt that does + not live up to + <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>. + By default <c>secure_renegotiate</c> is set to <c>false</c>, + that is, secure renegotiation is used if possible, + but it fallback to unsecure renegotiation if the peer + does not support + <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>.</p> </item> - <tag>{secure_renegotiate, boolean()}</tag> - <item>Specifies if to reject renegotiation attempt that does - not live up to <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>. By default secure_renegotiate is - set to false i.e. secure renegotiation will be used if possible - but it will fallback to unsecure renegotiation if the peer - does not support <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>. - </item> + <tag><c>{depth, integer()}</c></tag> + <item><p>Maximum number of non-self-issued + intermediate certificates that can follow the peer certificate + in a valid certification path. So, if depth is 0 the PEER must + be signed by the trusted ROOT-CA directly; if 1 the path can + be PEER, CA, ROOT-CA; if 2 the path can be PEER, CA, CA, + ROOT-CA, and so on. The default value is 1.</p></item> - <tag>{depth, integer()}</tag> - <item> - The depth is the maximum number of non-self-issued - intermediate certificates that may follow the peer certificate - in a valid certification path. So if depth is 0 the PEER must - be signed by the trusted ROOT-CA directly, if 1 the path can - be PEER, CA, ROOT-CA, if it is 2 PEER, CA, CA, ROOT-CA and so - on. The default value is 1. - </item> - - <tag>{verify_fun, {Verifyfun :: fun(), InitialUserState :: term()}}</tag> - <item> - <p>The verification fun should be defined as:</p> + <tag><c>{verify_fun, {Verifyfun :: fun(), InitialUserState :: + term()}}</c></tag> + <item><p>The verification fun is to be defined as follows:</p> <code> -fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revoked, atom()}} | +fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revoked, +atom()}} | {extension, #'Extension'{}}, InitialUserState :: term()) -> {valid, UserState :: term()} | {valid_peer, UserState :: term()} | {fail, Reason :: term()} | {unknown, UserState :: term()}. </code> - <p>The verify fun will be called during the X509-path - validation when an error or an extension unknown to the ssl - application is encountered. Additionally it will be called + <p>The verification fun is called during the X509-path + validation when an error or an extension unknown to the <c>ssl</c> + application is encountered. It is also called when a certificate is considered valid by the path validation to allow access to each certificate in the path to the user - application. Note that it will differentiate between the - peer certificate and CA certificates by using valid_peer or - valid as the second argument to the verify fun. See <seealso - marker="public_key:cert_records">the public_key User's - Guide</seealso> for definition of #'OTPCertificate'{} and - #'Extension'{}.</p> - - <p>If the verify callback fun returns {fail, Reason}, the - verification process is immediately stopped and an alert is - sent to the peer and the TLS/SSL handshake is terminated. If - the verify callback fun returns {valid, UserState}, the - verification process is continued. If the verify callback fun - always returns {valid, UserState}, the TLS/SSL handshake will - not be terminated with respect to verification failures and - the connection will be established. If called with an - extension unknown to the user application, the return value - {unknown, UserState} should be used.</p> - - <p>The default verify_fun option in verify_peer mode:</p> + application. It differentiates between the peer + certificate and the CA certificates by using <c>valid_peer</c> or + <c>valid</c> as second argument to the verification fun. See the + <seealso marker="public_key:cert_records">public_key User's + Guide</seealso> for definition of <c>#'OTPCertificate'{}</c> and + <c>#'Extension'{}</c>.</p> + + <list type="bulleted"> + <item><p>If the verify callback fun returns <c>{fail, Reason}</c>, + the verification process is immediately stopped, an alert is + sent to the peer, and the TLS/SSL handshake terminates.</p></item> + <item><p>If the verify callback fun returns <c>{valid, UserState}</c>, + the verification process continues.</p></item> + <item><p>If the verify callback fun always returns + <c>{valid, UserState}</c>, the TLS/SSL handshake does not + terminate regarding verification failures and the connection is + established.</p></item> + <item><p>If called with an extension unknown to the user application, + return value <c>{unknown, UserState}</c> is to be used.</p></item> + </list> + + <p>Default option <c>verify_fun</c> in <c>verify_peer mode</c>:</p> <code> {fun(_,{bad_cert, _} = Reason, _) -> @@ -273,7 +301,7 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo end, []} </code> - <p>The default verify_fun option in verify_none mode:</p> + <p>Default option <c>verify_fun</c> in mode <c>verify_none</c>:</p> <code> {fun(_,{bad_cert, _}, UserState) -> @@ -287,25 +315,28 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo end, []} </code> - <p>Possible path validation errors are given on the form {bad_cert, Reason} where Reason is:</p> + <p>The possible path validation errors are given on form + <c>{bad_cert, Reason}</c> where <c>Reason</c> is:</p> <taglist> - <tag>unknown_ca</tag> - <item>No trusted CA was found in the trusted store. The trusted CA is - normally a so called ROOT CA that is a self-signed cert. Trust may - be claimed for an intermediat CA (trusted anchor does not have to be self signed - according to X-509) by using the option <c>partial_chain</c></item> - - <tag>selfsigned_peer</tag> - <item>The chain consisted only of one self-signed certificate.</item> - - <tag>PKIX X-509-path validation error</tag> - <item> Possible such reasons see <seealso - marker="public_key:public_key#pkix_path_validation-3"> public_key:pkix_path_validation/3 </seealso></item> + <tag><c>unknown_ca</c></tag> + <item><p>No trusted CA was found in the trusted store. The trusted CA is + normally a so called ROOT CA, which is a self-signed certificate. Trust can + be claimed for an intermediat CA (trusted anchor does not have to be + self-signed according to X-509) by using option <c>partial_chain</c>.</p> + </item> + + <tag><c>selfsigned_peer</c></tag> + <item><p>The chain consisted only of one self-signed certificate.</p></item> + + <tag><c>PKIX X-509-path validation error</c></tag> + <item><p>For possible reasons, see <seealso +marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3</seealso> + </p></item> </taglist> </item> - <tag>{crl_check, boolean() | peer | best_effort }</tag> + <tag><c>{crl_check, boolean() | peer | best_effort }</c></tag> <item> Perform CRL (Certificate Revocation List) verification <seealso marker="public_key:public_key#pkix_crl_validate-3"> @@ -324,16 +355,16 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo <p>The CA certificates specified for the connection will be used to construct the certificate chain validating the CRLs.</p> - <p>The CRLs will be fetched from a local or external cache + <p>The CRLs will be fetched from a local or external cache see <seealso marker="ssl:ssl_crl_cache_api">ssl_crl_cache_api(3)</seealso>.</p> </item> - <tag>{crl_cache, {Module :: atom(), {DbHandle :: internal | term(), Args :: list()}}}</tag> + <tag><c>{crl_cache, {Module :: atom(), {DbHandle :: internal | term(), Args :: list()}}}</c></tag> <item> <p>Module defaults to ssl_crl_cache with <c> DbHandle </c> internal and an empty argument list. The following arguments may be specified for the internal cache.</p> <taglist> - <tag>{http, timeout()}</tag> + <tag><c>{http, timeout()}</c></tag> <item> Enables fetching of CRLs specified as http URIs in<seealso marker="public_key:cert_records"> X509 cerificate extensions.</seealso> @@ -341,32 +372,30 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo </item> </taglist> </item> - - <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca </tag> - - <item> - Claim an intermediat CA in the chain as trusted. TLS will then perform the public_key:pkix_path_validation/3 - with the selected CA as trusted anchor and the rest of the chain. - </item> - <tag>{versions, [protocol()]}</tag> - <item>TLS protocol versions that will be supported by started clients and servers. - This option overrides the application environment option <c>protocol_version</c>. If the - environment option is not set it defaults to all versions, except SSL-3.0, supported by the SSL application. See also - <seealso marker="ssl:ssl_app">ssl(6)</seealso> - </item> - - <tag>{hibernate_after, integer()|undefined}</tag> - <item>When an integer-value is specified, the <c>ssl_connection</c> - will go into hibernation after the specified number of milliseconds - of inactivity, thus reducing its memory footprint. When - <c>undefined</c> is specified (this is the default), the process - will never go into hibernation. - </item> + <tag><c>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | + unknown_ca }</c></tag> + <item><p>Claim an intermediate CA in the chain as trusted. TLS then + performs <c>public_key:pkix_path_validation/3</c> + with the selected CA as trusted anchor and the rest of the chain.</p></item> + + <tag><c>{versions, [protocol()]}</c></tag> + <item><p>TLS protocol versions supported by started clients and servers. + This option overrides the application environment option + <c>protocol_version</c>. If the environment option is not set, it defaults + to all versions, except SSL-3.0, supported by the <c>ssl</c> application. + See also <seealso marker="ssl:ssl_app">ssl(6).</seealso></p></item> + + <tag><c>{hibernate_after, integer()|undefined}</c></tag> + <item><p>When an integer-value is specified, <c>ssl_connection</c> + goes into hibernation after the specified number of milliseconds + of inactivity, thus reducing its memory footprint. When + <c>undefined</c> is specified (this is the default), the process + never goes into hibernation.</p></item> + + <tag><c>{user_lookup_fun, {Lookupfun :: fun(), UserState :: term()}}</c></tag> + <item><p>The lookup fun is to defined as follows:</p> - <tag>{user_lookup_fun, {Lookupfun :: fun(), UserState :: term()}}</tag> - <item> - <p>The lookup fun should be defined as:</p> <code> fun(psk, PSKIdentity ::string(), UserState :: term()) -> {ok, SharedSecret :: binary()} | error; @@ -374,61 +403,61 @@ fun(srp, Username :: string(), UserState :: term()) -> {ok, {SRPParams :: srp_param_type(), Salt :: binary(), DerivedKey :: binary()}} | error. </code> - <p>For Pre-Shared Key (PSK) cipher suites, the lookup fun will - be called by the client and server to determine the shared - secret. When called by the client, PSKIdentity will be set to the - hint presented by the server or undefined. When called by the - server, PSKIdentity is the identity presented by the client. - </p> - - <p>For Secure Remote Password (SRP), the fun will only be used by the server to obtain - parameters that it will use to generate its session keys. <c>DerivedKey</c> should be - derived according to <url href="http://tools.ietf.org/html/rfc2945#section-3"> RFC 2945</url> and - <url href="http://tools.ietf.org/html/rfc5054#section-2.4"> RFC 5054</url>: - <c>crypto:sha([Salt, crypto:sha([Username, <<$:>>, Password])]) </c> + <p>For Pre-Shared Key (PSK) cipher suites, the lookup fun is + called by the client and server to determine the shared + secret. When called by the client, <c>PSKIdentity</c> is set to the + hint presented by the server or to undefined. When called by the + server, <c>PSKIdentity</c> is the identity presented by the client.</p> + + <p>For Secure Remote Password (SRP), the fun is only used by the server to + obtain parameters that it uses to generate its session keys. + <c>DerivedKey</c> is to be derived according to + <url href="http://tools.ietf.org/html/rfc2945#section-3"> RFC 2945</url> and + <url href="http://tools.ietf.org/html/rfc5054#section-2.4"> RFC 5054</url>: + <c>crypto:sha([Salt, crypto:sha([Username, <<$:>>, Password])])</c> </p> </item> - <tag>{padding_check, boolean()}</tag> - <item> - <p> This option only affects TLS-1.0 connections. - If set to false it disables the block cipher padding check - to be able to interoperate with legacy software. - </p> - - <warning><p> Using this option makes TLS vulnerable to - the Poodle attack</p></warning> - - </item> - + <tag><c>{padding_check, boolean()}</c></tag> + <item><p>Affects TLS-1.0 connections only. + If set to <c>false</c>, it disables the block cipher padding check + to be able to interoperate with legacy software.</p></item> + </taglist> - + + <warning><p>Using <c>{padding_check, boolean()}</c> makes TLS + vulnerable to the Poodle attack.</p></warning> + </section> <section> <title>SSL OPTION DESCRIPTIONS - CLIENT SIDE</title> - <p>Options described here are client specific or has a slightly different - meaning in the client than in the server.</p> + <p>The following options are client-specific or have a slightly different + meaning in the client than in the server:</p> <taglist> - <tag>{verify, verify_type()}</tag> - <item> In verify_none mode the default behavior will be to - allow all x509-path validation errors. See also the verify_fun - option. - </item> - <tag>{reuse_sessions, boolean()}</tag> - <item>Specifies if client should try to reuse sessions - when possible. + + <tag><c>{verify, verify_type()}</c></tag> + <item><p>In mode <c>verify_none</c> the default behavior is to allow + all x509-path validation errors. See also option <c>verify_fun</c>.</p> </item> + + <tag><c>{reuse_sessions, boolean()}</c></tag> + <item><p>Specifies if the client is to try to reuse sessions + when possible.</p></item> + + <tag><c>{cacerts, [public_key:der_encoded()]}</c></tag> + <item><p>The DER-encoded trusted certificates. If this option + is supplied it overrides option <c>cacertfile</c>.</p></item> - <tag>{cacertfile, path()}</tag> - <item>The path to a file containing PEM encoded CA certificates. The CA + <tag><c>{cacertfile, path()}</c></tag> + <item><p>Path to a file containing PEM-encoded CA certificates. The CA certificates are used during server authentication and when building the - client certificate chain. - </item> + client certificate chain.</p> + </item> - <tag>{alpn_advertised_protocols, [binary()]}</tag> + <tag><c>{alpn_advertised_protocols, [binary()]}</c></tag> <item> <p>The list of protocols supported by the client to be sent to the server to be used for an Application-Layer Protocol Negotiation (ALPN). @@ -441,50 +470,54 @@ fun(srp, Username :: string(), UserState :: term()) -> <p>The negotiated protocol can be retrieved using the <c>negotiated_protocol/1</c> function.</p> </item> - <tag>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()]}}</tag> - <tag>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()], Default :: binary()}}</tag> + <tag><c>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()]}}</c></tag> + <tag><c>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()], Default :: binary()}}</c></tag> <item> - <p>Indicates the client will try to perform Next Protocol + <p>Indicates that the client is to try to perform Next Protocol Negotiation.</p> - <p>If precedence is server the negotiated protocol will be the - first protocol that appears on the server advertised list that is + <p>If precedence is server, the negotiated protocol is the + first protocol to be shown on the server advertised list, which is also on the client preference list.</p> - <p>If precedence is client the negotiated protocol will be the - first protocol that appears on the client preference list that is + <p>If precedence is client, the negotiated protocol is the + first protocol to be shown on the client preference list, which is also on the server advertised list.</p> <p>If the client does not support any of the server advertised - protocols or the server does not advertise any protocols the - client will fallback to the first protocol in its list or if a - default is supplied it will fallback to that instead. If the - server does not support Next Protocol Negotiation the - connection will be aborted if no default protocol is supplied.</p> + protocols or the server does not advertise any protocols, the + client falls back to the first protocol in its list or to the + default protocol (if a default is supplied). If the + server does not support Next Protocol Negotiation, the + connection terminates if no default protocol is supplied.</p> </item> - <tag>{psk_identity, string()}</tag> - <item>Specifies the identity the client presents to the server. The matching secret is - found by calling the user_look_fun. - </item> - <tag>{srp_identity, {Username :: string(), Password :: string()}</tag> - <item>Specifies the Username and Password to use to authenticate to the server. + <tag><c>{psk_identity, string()}</c></tag> + <item><p>Specifies the identity the client presents to the server. + The matching secret is found by calling <c>user_lookup_fun</c>.</p> </item> - <tag>{server_name_indication, hostname()}</tag> - <tag>{server_name_indication, disable}</tag> + + <tag><c>{srp_identity, {Username :: string(), Password :: string()} + </c></tag> + <item><p>Specifies the username and password to use to authenticate + to the server.</p></item> + + <tag><c>{server_name_indication, hostname()}</c></tag> + <item><p>Can be specified when upgrading a TCP socket to a TLS + socket to use the TLS Server Name Indication extension.</p></item> + + <tag><c>{server_name_indication, disable}</c></tag> <item> - <p>This option can be specified when upgrading a TCP socket to a TLS - socket to use the TLS Server Name Indication extension.</p> - <p>When starting a TLS connection without upgrade the Server Name - Indication extension will be sent if possible, this option may also be + <p>When starting a TLS connection without upgrade, the Server Name + Indication extension is sent if possible. This option can be used to disable that behavior.</p> </item> - <tag>{fallback, boolean()}</tag> + <tag><c>{fallback, boolean()}</c></tag> <item> <p> Send special cipher suite TLS_FALLBACK_SCSV to avoid undesired TLS version downgrade. Defaults to false</p> <warning><p>Note this option is not needed in normal TLS usage and should not be used - to implement new clients. But legacy clients that that retries connections in the following manner</p> + to implement new clients. But legacy clients that retries connections in the following manner</p> <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv2', 'tlsv1.1', 'tlsv1', 'sslv3']}])</c></p> <p><c> ssl:connect(Host, Port, [...{versions, [tlsv1.1', 'tlsv1', 'sslv3']}, {fallback, true}])</c></p> @@ -502,65 +535,64 @@ fun(srp, Username :: string(), UserState :: term()) -> <section> <title>SSL OPTION DESCRIPTIONS - SERVER SIDE</title> - <p>Options described here are server specific or has a slightly different - meaning in the server than in the client.</p> + <p>The following options are server-specific or have a slightly different + meaning in the server than in the client:</p> <taglist> + + <tag><c>{cacerts, [public_key:der_encoded()]}</c></tag> + <item><p>The DER-encoded trusted certificates. If this option + is supplied it overrides option <c>cacertfile</c>.</p></item> - <tag>{cacertfile, path()}</tag> - <item>The path to a file containing PEM encoded CA + <tag><c>{cacertfile, path()}</c></tag> + <item><p>Path to a file containing PEM-encoded CA certificates. The CA certificates are used to build the server - certificate chain, and for client authentication. Also the CAs - are used in the list of acceptable client CAs passed to the - client when a certificate is requested. May be omitted if there - is no need to verify the client and if there are not any - intermediate CAs for the server certificate. - </item> + certificate chain and for client authentication. The CAs are + also used in the list of acceptable client CAs passed to the + client when a certificate is requested. Can be omitted if there + is no need to verify the client and if there are no + intermediate CAs for the server certificate.</p></item> - <tag>{dh, der_encoded()}</tag> - <item>The DER encoded Diffie Hellman parameters. If this option - is supplied it will override the dhfile option. - </item> - - <tag>{dhfile, path()}</tag> - <item>Path to file containing PEM encoded Diffie Hellman parameters, - for the server to use if a cipher suite using Diffie Hellman key exchange - is negotiated. If not specified default parameters will be used. - </item> - - <tag>{verify, verify_type()}</tag> - <item>Servers only do the x509-path validation in verify_peer - mode, as it then will send a certificate request to the client - (this message is not sent if the verify option is verify_none) - and you may then also want to specify the option - fail_if_no_peer_cert. - </item> - - <tag>{fail_if_no_peer_cert, boolean()}</tag> - <item>Used together with {verify, verify_peer} by an ssl server. - If set to true, the server will fail if the client does not have - a certificate to send, i.e. sends a empty certificate, if set to - false it will only fail if the client sends an invalid - certificate (an empty certificate is considered valid). - </item> - - <tag>{reuse_sessions, boolean()}</tag> - <item>Specifies if the server should agree to reuse sessions - when the clients request to do so. See also the reuse_session - option. + <tag><c>{dh, public_key:der_encoded()}</c></tag> + <item><p>The DER-encoded Diffie-Hellman parameters. If specified, + it overrides option <c>dhfile</c>.</p></item> + + <tag><c>{dhfile, path()}</c></tag> + <item><p>Path to a file containing PEM-encoded Diffie Hellman parameters + to be used by the server if a cipher suite using Diffie Hellman key + exchange is negotiated. If not specified, default parameters are used. + </p></item> + + <tag><c>{verify, verify_type()}</c></tag> + <item><p>A server only does x509-path validation in mode <c>verify_peer</c>, + as it then sends a certificate request to the client + (this message is not sent if the verify option is <c>verify_none</c>). + You can then also want to specify option <c>fail_if_no_peer_cert</c>. + </p></item> + + <tag><c>{fail_if_no_peer_cert, boolean()}</c></tag> + <item><p>Used together with <c>{verify, verify_peer}</c> by an SSL server. + If set to <c>true</c>, the server fails if the client does not have + a certificate to send, that is, sends an empty certificate. If set to + <c>false</c>, it fails only if the client sends an invalid + certificate (an empty certificate is considered valid). Defaults to false.</p> </item> - <tag>{reuse_session, fun(SuggestedSessionId, - PeerCert, Compression, CipherSuite) -> boolean()}</tag> - <item>Enables the ssl server to have a local policy - for deciding if a session should be reused or not, - only meaningful if <c>reuse_sessions</c> is set to true. - SuggestedSessionId is a binary(), PeerCert is a DER encoded - certificate, Compression is an enumeration integer - and CipherSuite is of type ciphersuite(). - </item> - - <tag>{alpn_preferred_protocols, [binary()]}</tag> + <tag><c>{reuse_sessions, boolean()}</c></tag> + <item><p>Specifies if the server is to agree to reuse sessions + when requested by the clients. See also option <c>reuse_session</c>. + </p></item> + + <tag><c>{reuse_session, fun(SuggestedSessionId, + PeerCert, Compression, CipherSuite) -> boolean()}</c></tag> + <item><p>Enables the SSL server to have a local policy + for deciding if a session is to be reused or not. + Meaningful only if <c>reuse_sessions</c> is set to <c>true</c>. + <c>SuggestedSessionId</c> is a <c>binary()</c>, <c>PeerCert</c> is + a DER-encoded certificate, <c>Compression</c> is an enumeration integer, + and <c>CipherSuite</c> is of type <c>ciphersuite()</c>.</p></item> + + <tag><c>{alpn_preferred_protocols, [binary()]}</c></tag> <item> <p>Indicates the server will try to perform Application-Layer Protocol Negotiation (ALPN).</p> @@ -573,65 +605,62 @@ fun(srp, Username :: string(), UserState :: term()) -> <p>The negotiated protocol can be retrieved using the <c>negotiated_protocol/1</c> function.</p> </item> - <tag>{next_protocols_advertised, Protocols :: [binary()]}</tag> - <item>The list of protocols to send to the client if the client indicates - it supports the Next Protocol extension. The client may select a protocol + <tag><c>{next_protocols_advertised, Protocols :: [binary()]}</c></tag> + <item><p>List of protocols to send to the client if the client indicates that + it supports the Next Protocol extension. The client can select a protocol that is not on this list. The list of protocols must not contain an empty - binary. If the server negotiates a Next Protocol it can be accessed - using <c>negotiated_protocol/1</c> function. - </item> + binary. If the server negotiates a Next Protocol, it can be accessed + using the <c>negotiated_next_protocol/1</c> method.</p></item> - <tag>{psk_identity, string()}</tag> - <item>Specifies the server identity hint the server presents to the client. - </item> - <tag>{log_alert, boolean()}</tag> - <item>If false, error reports will not be displayed.</item> - <tag>{honor_cipher_order, boolean()}</tag> - <item>If true, use the server's preference for cipher selection. If false - (the default), use the client's preference. - </item> + <tag><c>{psk_identity, string()}</c></tag> + <item><p>Specifies the server identity hint, which the server presents to + the client.</p></item> + + <tag><c>{log_alert, boolean()}</c></tag> + <item><p>If set to <c>false</c>, error reports are not displayed.</p></item> + + <tag><c>{honor_cipher_order, boolean()}</c></tag> + <item><p>If set to <c>true</c>, use the server preference for cipher + selection. If set to <c>false</c> (the default), use the client + preference.</p></item> + + </taglist> </section> <section> <title>General</title> - <p>When an ssl socket is in active mode (the default), data from the + <p>When an SSL socket is in active mode (the default), data from the socket is delivered to the owner of the socket in the form of - messages: - </p> + messages:</p> + <list type="bulleted"> - <item>{ssl, Socket, Data} - </item> - <item>{ssl_closed, Socket} - </item> - <item> - {ssl_error, Socket, Reason} - </item> + <item><p><c>{ssl, Socket, Data}</c></p></item> + <item><p><c>{ssl_closed, Socket}</c></p></item> + <item><p><c>{ssl_error, Socket, Reason}</c></p></item> </list> - - <p>A <c>Timeout</c> argument specifies a timeout in milliseconds. The - default value for a <c>Timeout</c> argument is <c>infinity</c>. - </p> + + <p>A <c>Timeout</c> argument specifies a time-out in milliseconds. The + default value for argument <c>Timeout</c> is <c>infinity</c>.</p> </section> <funcs> <func> <name>cipher_suites() -></name> <name>cipher_suites(Type) -> ciphers()</name> - <fsummary> Returns a list of supported cipher suites</fsummary> + <fsummary>Returns a list of supported cipher suites.</fsummary> <type> <v>Type = erlang | openssl | all</v> - </type> <desc><p>Returns a list of supported cipher suites. - cipher_suites() is equivalent to cipher_suites(erlang). - Type openssl is provided for backwards compatibility with - old ssl that used openssl. cipher_suites(all) returns + <c>cipher_suites()</c> is equivalent to <c>cipher_suites(erlang).</c> + Type <c>openssl</c> is provided for backwards compatibility with the + old SSL, which used OpenSSL. <c>cipher_suites(all)</c> returns all available cipher suites. The cipher suites not present - in cipher_suites(erlang) but in included in cipher_suites(all) - will not be used unless explicitly configured by the user. - </p> + in <c>cipher_suites(erlang)</c> but included in + <c>cipher_suites(all)</c> are not used unless explicitly configured + by the user.</p> </desc> </func> @@ -651,17 +680,17 @@ fun(srp, Username :: string(), UserState :: term()) -> <name>connect(Socket, SslOptions) -> </name> <name>connect(Socket, SslOptions, Timeout) -> {ok, SslSocket} | {error, Reason}</name> - <fsummary> Upgrades a gen_tcp, or - equivalent, connected socket to an ssl socket. </fsummary> + <fsummary>Upgrades a <c>gen_tcp</c>, or + equivalent, connected socket to an SSL socket.</fsummary> <type> - <v>Socket = socket()</v> - <v>SslOptions = [ssloption()]</v> + <v>Socket = socket()</v> + <v>SslOptions = [ssloption()]</v> <v>Timeout = integer() | infinity</v> <v>SslSocket = sslsocket()</v> <v>Reason = term()</v> </type> - <desc> <p>Upgrades a gen_tcp, or equivalent, - connected socket to an ssl socket i.e. performs the + <desc><p>Upgrades a <c>gen_tcp</c>, or equivalent, + connected socket to an SSL socket, that is, performs the client-side ssl handshake.</p> </desc> </func> @@ -670,7 +699,7 @@ fun(srp, Username :: string(), UserState :: term()) -> <name>connect(Host, Port, Options) -></name> <name>connect(Host, Port, Options, Timeout) -> {ok, SslSocket} | {error, Reason}</name> - <fsummary>Opens an ssl connection to Host, Port. </fsummary> + <fsummary>Opens an SSL connection to <c>Host</c>, <c>Port</c>.</fsummary> <type> <v>Host = host()</v> <v>Port = integer()</v> @@ -679,72 +708,70 @@ fun(srp, Username :: string(), UserState :: term()) -> <v>SslSocket = sslsocket()</v> <v>Reason = term()</v> </type> - <desc> <p>Opens an ssl connection to Host, Port.</p> </desc> + <desc><p>Opens an SSL connection to <c>Host</c>, <c>Port</c>.</p></desc> </func> <func> <name>close(SslSocket) -> ok | {error, Reason}</name> - <fsummary>Close an ssl connection</fsummary> + <fsummary>Closes an SSL connection.</fsummary> <type> <v>SslSocket = sslsocket()</v> <v>Reason = term()</v> </type> - <desc><p>Close an ssl connection.</p> + <desc><p>Closes an SSL connection.</p> + </desc> + </func> + + <func> + <name>connection_info(SslSocket) -> + {ok, {ProtocolVersion, CipherSuite}} | {error, Reason}</name> + <fsummary>Returns the Negotiated Protocol version and cipher suite. + </fsummary> + <type> + <v>CipherSuite = ciphersuite()</v> + <v>ProtocolVersion = protocol()</v> + </type> + <desc><p>Returns the Negotiated Protocol version and cipher suite.</p> </desc> </func> <func> <name>controlling_process(SslSocket, NewOwner) -> ok | {error, Reason}</name> - <fsummary>Assigns a new controlling process to the - ssl-socket.</fsummary> - + SSL socket.</fsummary> <type> <v>SslSocket = sslsocket()</v> <v>NewOwner = pid()</v> <v>Reason = term()</v> </type> - <desc><p>Assigns a new controlling process to the ssl-socket. A - controlling process is the owner of an ssl-socket, and receives - all messages from the socket.</p> + <desc><p>Assigns a new controlling process to the SSL socket. A + controlling process is the owner of an SSL socket, and receives + all messages from the socket.</p> </desc> </func> <func> - <name>connection_info(SslSocket) -> - {ok, {ProtocolVersion, CipherSuite}} | {error, Reason} </name> - <fsummary>Returns the negotiated protocol version and cipher suite. - </fsummary> - <type> - <v>CipherSuite = ciphersuite()</v> - <v>ProtocolVersion = protocol()</v> - </type> - <desc><p>Returns the negotiated protocol version and cipher suite.</p> - </desc> - </func> - - <func> <name>format_error(Reason) -> string()</name> - <fsummary>Return an error string.</fsummary> + <fsummary>Returns an error string.</fsummary> <type> <v>Reason = term()</v> </type> <desc> - <p>Presents the error returned by an ssl function as a printable string.</p> + <p>Presents the error returned by an SSL function as a printable string.</p> </desc> </func> <func> <name>getopts(Socket, OptionNames) -> {ok, [socketoption()]} | {error, Reason}</name> - <fsummary>Get the value of the specified options.</fsummary> + <fsummary>Gets the values of the specified options.</fsummary> <type> <v>Socket = sslsocket()</v> <v>OptionNames = [atom()]</v> </type> <desc> - <p>Get the value of the specified socket options. + <p>Gets the values of the specified socket options. </p> </desc> </func> @@ -752,34 +779,49 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>listen(Port, Options) -> {ok, ListenSocket} | {error, Reason}</name> - <fsummary>Creates an ssl listen socket.</fsummary> + <fsummary>Creates an SSL listen socket.</fsummary> <type> <v>Port = integer()</v> <v>Options = options()</v> <v>ListenSocket = sslsocket()</v> </type> <desc> - <p>Creates an ssl listen socket.</p> + <p>Creates an SSL listen socket.</p> </desc> </func> <func> + <name>negotiated_protocol(Socket) -> {ok, Protocol} | {error, protocol_not_negotiated}</name> + <fsummary>Returns the protocol negotiated through ALPN or NPN extensions.</fsummary> + <type> + <v>Socket = sslsocket()</v> + <v>Protocol = binary()</v> + </type> + <desc> + <p> + Returns the protocol negotiated through ALPN or NPN extensions. + </p> + </desc> + </func> + + <func> <name>peercert(Socket) -> {ok, Cert} | {error, Reason}</name> - <fsummary>Return the peer certificate.</fsummary> + <fsummary>Returns the peer certificate.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Cert = binary()</v> </type> <desc> - <p>The peer certificate is returned as a DER encoded binary. - The certificate can be decoded with <c>public_key:pkix_decode_cert/2</c>. - </p> + <p>The peer certificate is returned as a DER-encoded binary. + The certificate can be decoded with + <c>public_key:pkix_decode_cert/2</c>.</p> </desc> </func> + <func> <name>peername(Socket) -> {ok, {Address, Port}} | {error, Reason}</name> - <fsummary>Return peer address and port.</fsummary> + <fsummary>Returns the peer address and port.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Address = ipaddress()</v> @@ -789,12 +831,32 @@ fun(srp, Username :: string(), UserState :: term()) -> <p>Returns the address and port number of the peer.</p> </desc> </func> + + <func> + <name>prf(Socket, Secret, Label, Seed, WantedLength) -> {ok, binary()} | {error, reason()}</name> + <fsummary>Uses a session Pseudo-Random Function to generate key material.</fsummary> + <type> + <v>Socket = sslsocket()</v> + <v>Secret = binary() | master_secret</v> + <v>Label = binary()</v> + <v>Seed = [binary() | prf_random()]</v> + <v>WantedLength = non_neg_integer()</v> + </type> + <desc> + <p>Uses the Pseudo-Random Function (PRF) of a TLS session to generate + extra key material. It either takes user-generated values for + <c>Secret</c> and <c>Seed</c> or atoms directing it to use a specific + value from the session security parameters.</p> + <p>Can only be used with TLS connections; <c>{error, undefined}</c> + is returned for SSLv3 connections.</p> + </desc> + </func> <func> <name>recv(Socket, Length) -> </name> <name>recv(Socket, Length, Timeout) -> {ok, Data} | {error, Reason}</name> - <fsummary>Receive data on a socket.</fsummary> + <fsummary>Receives data on a socket.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Length = integer()</v> @@ -802,63 +864,43 @@ fun(srp, Username :: string(), UserState :: term()) -> <v>Data = [char()] | binary()</v> </type> <desc> - <p>This function receives a packet from a socket in passive - mode. A closed socket is indicated by a return value + <p>Receives a packet from a socket in passive + mode. A closed socket is indicated by return value <c>{error, closed}</c>.</p> - <p>The <c>Length</c> argument is only meaningful when - the socket is in <c>raw</c> mode and denotes the number of + <p>Argument <c>Length</c> is meaningful only when + the socket is in mode <c>raw</c> and denotes the number of bytes to read. If <c>Length</c> = 0, all available bytes are returned. If <c>Length</c> > 0, exactly <c>Length</c> bytes are returned, or an error; possibly discarding less than <c>Length</c> bytes of data when the socket gets closed from the other side.</p> - <p>The optional <c>Timeout</c> parameter specifies a timeout in + <p>Optional argument <c>Timeout</c> specifies a time-out in milliseconds. The default value is <c>infinity</c>.</p> </desc> </func> <func> - <name>prf(Socket, Secret, Label, Seed, WantedLength) -> {ok, binary()} | {error, reason()}</name> - <fsummary>Use a sessions pseudo random function to generate key material.</fsummary> - <type> - <v>Socket = sslsocket()</v> - <v>Secret = binary() | master_secret</v> - <v>Label = binary()</v> - <v>Seed = [binary() | prf_random()]</v> - <v>WantedLength = non_neg_integer()</v> - </type> - <desc> - <p>Use the pseudo random function (PRF) of a TLS session to generate - additional key material. It either takes user generated values for - <c>Secret</c> and <c>Seed</c> or atoms directing it use a specific - value from the session security parameters.</p> - <p>This function can only be used with TLS connections, <c>{error, undefined}</c> - is returned for SSLv3 connections.</p> - </desc> - </func> - - <func> <name>renegotiate(Socket) -> ok | {error, Reason}</name> - <fsummary> Initiates a new handshake.</fsummary> + <fsummary>Initiates a new handshake.</fsummary> <type> <v>Socket = sslsocket()</v> </type> <desc><p>Initiates a new handshake. A notable return value is <c>{error, renegotiation_rejected}</c> indicating that the peer - refused to go through with the renegotiation but the connection + refused to go through with the renegotiation, but the connection is still active using the previously negotiated session.</p> </desc> </func> <func> <name>send(Socket, Data) -> ok | {error, Reason}</name> - <fsummary>Write data to a socket.</fsummary> + <fsummary>Writes data to a socket.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Data = iodata()</v> </type> <desc> - <p>Writes <c>Data</c> to <c>Socket</c>. </p> + <p>Writes <c>Data</c> to <c>Socket</c>.</p> <p>A notable return value is <c>{error, closed}</c> indicating that the socket is closed.</p> </desc> @@ -866,31 +908,31 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>setopts(Socket, Options) -> ok | {error, Reason}</name> - <fsummary>Set socket options.</fsummary> + <fsummary>Sets socket options.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Options = [socketoption]()</v> </type> <desc> - <p>Sets options according to <c>Options</c> for the socket - <c>Socket</c>. </p> + <p>Sets options according to <c>Options</c> for socket + <c>Socket</c>.</p> </desc> </func> <func> <name>shutdown(Socket, How) -> ok | {error, Reason}</name> - <fsummary>Immediately close a socket</fsummary> + <fsummary>Immediately closes a socket.</fsummary> <type> <v>Socket = sslsocket()</v> <v>How = read | write | read_write</v> <v>Reason = reason()</v> </type> <desc> - <p>Immediately close a socket in one or two directions.</p> + <p>Immediately closes a socket in one or two directions.</p> <p><c>How == write</c> means closing the socket for writing, reading from it is still possible.</p> <p>To be able to handle that the peer has done a shutdown on - the write side, the <c>{exit_on_close, false}</c> option + the write side, option <c>{exit_on_close, false}</c> is useful.</p> </desc> </func> @@ -898,16 +940,16 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>ssl_accept(Socket) -> </name> <name>ssl_accept(Socket, Timeout) -> ok | {error, Reason}</name> - <fsummary>Perform server-side SSL/TLS handshake</fsummary> + <fsummary>Performs server-side SSL/TLS handshake.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Timeout = integer()</v> <v>Reason = term()</v> </type> <desc> - <p> Performs the SSL/TLS server-side handshake <c>Socket</c> is a socket as returned - by <seealso - marker="#transport_accept-2">ssl:transport_accept/[1,2]</seealso> + <p>Performs the SSL/TLS server-side handshake.</p> + <p><c>Socket</c> is a socket as returned by + <seealso marker="#transport_accept-2">ssl:transport_accept/[1,2]</seealso> </p> </desc> </func> @@ -915,7 +957,7 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>ssl_accept(Socket, SslOptions) -> </name> <name>ssl_accept(Socket, SslOptions, Timeout) -> {ok, Socket} | ok | {error, Reason}</name> - <fsummary>Perform server-side SSL/TLS handshake</fsummary> + <fsummary>Performs server-side SSL/TLS handshake.</fsummary> <type> <v>Socket = socket() | sslsocket() </v> <v>SslOptions = ssloptions()</v> @@ -923,17 +965,19 @@ fun(srp, Username :: string(), UserState :: term()) -> <v>Reason = term()</v> </type> <desc> - <p> If <c>Socket</c> is a socket() - upgrades a gen_tcp, or equivalent, socket to an ssl socket - i.e. performs the SSL/TLS server-side handshake and returns the ssl socket. - </p> + <p>If <c>Socket</c> is a <c>socket()</c>: upgrades a <c>gen_tcp</c>, + or equivalent, socket to an SSL socket, that is, performs + the SSL/TLS server-side handshake and returns the SSL socket.</p> - <warning><p>Note that the listen socket should be in {active, false} mode + <warning><p>The listen socket is to be in mode <c>{active, false}</c> before telling the client that the server is ready to upgrade - by calling this function, otherwise the upgrade may - or may not succeed depending on timing.</p></warning> + by calling this function, else the upgrade succeeds or does not + succeed depending on timing.</p></warning> - <p> If <c>Socket</c> is an sslsocket() - provides additional SSL/TLS options to those specified in <seealso - marker="#listen-2">ssl:listen/2 </seealso> and then performs the SSL/TLS handshake. + <p>If <c>Socket</c> is an <c>sslsocket()</c>: provides extra SSL/TLS + options to those specified in + <seealso marker="#listen-2">ssl:listen/2 </seealso> and then performs + the SSL/TLS handshake. </p> </desc> </func> @@ -941,14 +985,14 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>sockname(Socket) -> {ok, {Address, Port}} | {error, Reason}</name> - <fsummary>Return the local address and port.</fsummary> + <fsummary>Returns the local address and port.</fsummary> <type> <v>Socket = sslsocket()</v> <v>Address = ipaddress()</v> <v>Port = integer()</v> </type> <desc> - <p>Returns the local address and port number of the socket + <p>Returns the local address and port number of socket <c>Socket</c>.</p> </desc> </func> @@ -956,22 +1000,21 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>start() -> </name> <name>start(Type) -> ok | {error, Reason}</name> - <fsummary>Starts the Ssl application. </fsummary> + <fsummary>Starts the <c>ssl</c>application.</fsummary> <type> - <v>Type = permanent | transient | temporary</v> + <v>Type = permanent | transient | temporary</v> </type> <desc> - <p>Starts the Ssl application. Default type - is temporary. - <seealso marker="kernel:application">application(3)</seealso></p> + <p>Starts the <c>ssl</c> application. Default type + is <c>temporary</c>.</p> </desc> </func> + <func> <name>stop() -> ok </name> - <fsummary>Stops the Ssl application.</fsummary> + <fsummary>Stops the <c>ssl</c> application.</fsummary> <desc> - <p>Stops the Ssl application. - <seealso marker="kernel:application">application(3)</seealso></p> + <p>Stops the <c>ssl</c> application.</p> </desc> </func> @@ -979,8 +1022,8 @@ fun(srp, Username :: string(), UserState :: term()) -> <name>transport_accept(ListenSocket) -></name> <name>transport_accept(ListenSocket, Timeout) -> {ok, NewSocket} | {error, Reason}</name> - <fsummary>Accept an incoming connection and - prepare for <c>ssl_accept</c></fsummary> + <fsummary>Accepts an incoming connection and + prepares for <c>ssl_accept</c>.</fsummary> <type> <v>ListenSocket = NewSocket = sslsocket()</v> <v>Timeout = integer()</v> @@ -989,23 +1032,22 @@ fun(srp, Username :: string(), UserState :: term()) -> <desc> <p>Accepts an incoming connection request on a listen socket. <c>ListenSocket</c> must be a socket returned from - <seealso - marker="#listen-2"> ssl:listen/2</seealso>. - The socket returned should be passed to + <seealso marker="#listen-2"> ssl:listen/2</seealso>. + The socket returned is to be passed to <seealso marker="#ssl_accept-2"> ssl:ssl_accept[2,3]</seealso> - to complete handshaking i.e + to complete handshaking, that is, establishing the SSL/TLS connection.</p> <warning> <p>The socket returned can only be used with - <seealso marker="#ssl_accept-2"> ssl:ssl_accept[2,3]</seealso> - no traffic can be sent or received before that call.</p> + <seealso marker="#ssl_accept-2"> ssl:ssl_accept[2,3]</seealso>. + No traffic can be sent or received before that call.</p> </warning> <p>The accepted socket inherits the options set for - <c>ListenSocket</c> in <seealso - marker="#listen-2"> ssl:listen/2</seealso>.</p> + <c>ListenSocket</c> in + <seealso marker="#listen-2"> ssl:listen/2</seealso>.</p> <p>The default value for <c>Timeout</c> is <c>infinity</c>. If - <c>Timeout</c> is specified, and no connection is accepted + <c>Timeout</c> is specified and no connection is accepted within the given time, <c>{error, timeout}</c> is returned.</p> </desc> @@ -1014,57 +1056,41 @@ fun(srp, Username :: string(), UserState :: term()) -> <func> <name>versions() -> [versions_info()]</name> <fsummary>Returns version information relevant for the - ssl application.</fsummary> + <c>ssl</c> application.</fsummary> <type> <v>versions_info() = {app_vsn, string()} | {supported | available, [protocol()] </v> </type> <desc> - <p> - Returns version information relevant for the - ssl application. - </p> + <p>Returns version information relevant for the <c>ssl</c> + application.</p> <taglist> - <tag>app_vsn</tag> - <item> The application version of the OTP ssl application.</item> - - <tag>supported</tag> + <tag><c>app_vsn</c></tag> + <item>The application version of the <c>ssl</c> application.</item> + <tag><c>supported</c></tag> <item>TLS/SSL versions supported by default. - Overridden by a versions option on - <seealso marker="#connect-2"> connect/[2,3,4]</seealso>, <seealso - marker="#listen-2"> listen/2</seealso> and <seealso - marker="#ssl_accept-2">ssl_accept/[1,2,3]</seealso>. For the - negotiated TLS/SSL version see <seealso + Overridden by a version option on + <seealso marker="#connect-2"> connect/[2,3,4]</seealso>, + <seealso marker="#listen-2"> listen/2</seealso>, and <seealso + marker="#ssl_accept-2">ssl_accept/[1,2,3]</seealso>. + For the negotiated TLS/SSL version, see <seealso marker="#connection_info-1">ssl:connection_info/1 - </seealso></item> - - <tag>available</tag> - <item>All TLS/SSL versions that the Erlang ssl application - can support. Note that TLS 1.2 requires sufficient support - from the crypto application. </item> + </seealso>.</item> + + <tag><c>available</c></tag> + <item>All TLS/SSL versions supported by the <c>ssl</c> application. + TLS 1.2 requires sufficient support from the <c>crypto</c> + application.</item> </taglist> </desc> </func> - <func> - <name>negotiated_protocol(Socket) -> {ok, Protocol} | {error, protocol_not_negotiated}</name> - <fsummary>Returns the protocol negotiated through ALPN or NPN extensions.</fsummary> - <type> - <v>Socket = sslsocket()</v> - <v>Protocol = binary()</v> - </type> - <desc> - <p> - Returns the protocol negotiated through ALPN or NPN extensions. - </p> - </desc> - </func> - + </funcs> <section> <title>SEE ALSO</title> - <p><seealso marker="kernel:inet">inet(3) </seealso> and - <seealso marker="kernel:gen_tcp">gen_tcp(3) </seealso> + <p><seealso marker="kernel:inet">inet(3)</seealso> and + <seealso marker="kernel:gen_tcp">gen_tcp(3)</seealso> </p> </section> diff --git a/lib/ssl/doc/src/ssl_app.xml b/lib/ssl/doc/src/ssl_app.xml index e3a3fc27f2..43c69ba377 100644 --- a/lib/ssl/doc/src/ssl_app.xml +++ b/lib/ssl/doc/src/ssl_app.xml @@ -22,66 +22,60 @@ </legalnotice> <title>ssl</title> + <prepared></prepared> + <docno></docno> + <date></date> + <rev></rev> <file>ssl_app.sgml</file> </header> <app>ssl</app> - <appsummary>The SSL application provides secure communication over + <appsummary>The ssl application provides secure communication over sockets.</appsummary> + <description></description> <section> <title>DEPENDENCIES</title> - <p>The ssl application uses the Erlang applications public_key and - crypto to handle public keys and encryption, hence these - applications needs to be loaded for the ssl application to work. In - an embedded environment that means they need to be started with - application:start/[1,2] before the ssl application is started. - </p> + <p>The <c>ssl</c> application uses the <c>public_key</c> and + <c>crypto</c> application to handle public keys and encryption, hence + these applications must be loaded for the <c>ssl</c> application to work. + In an embedded environment this means they must be started with + <c>application:start/[1,2]</c> before the <c>ssl</c> application is + started.</p> </section> <section> - <title>ENVIRONMENT</title> - <p>The following application environment configuration parameters - are defined for the SSL application. See <seealso - marker="kernel:application">application(3)</seealso>for more - information about configuration parameters. - </p> - <p>Note that the environment parameters can be set on the command line, - for instance,</p> - <p><c>erl ... -ssl protocol_version '[sslv3, tlsv1]' ...</c>. - </p> + <title>CONFIGURATION</title> + <p>The application environment configuration parameters in this section + are defined for the <c>ssl</c> application. For more information + about configuration parameters, see the + <seealso marker="kernel:application">application(3)</seealso> + manual page in <c>kernel</c>.</p> + + <p>The environment parameters can be set on the command line, + for example:</p> + + <p><c>erl -ssl protocol_version "['tlsv1.2', 'tlsv1.1']"</c></p> + <taglist> - <tag><c><![CDATA[protocol_version = [sslv3|tlsv1] <optional>]]></c>.</tag> - <item> - <p>Protocol that will be supported by started clients and - servers. If this option is not set it will default to all - protocols currently supported by the erlang ssl application. - Note that this option may be overridden by the version option - to ssl:connect/[2,3] and ssl:listen/2. - </p> - </item> + <tag><c><![CDATA[protocol_version = <seealso marker="kernel:error_logger">ssl:protocol()</seealso> <optional>]]></c>.</tag> + <item><p>Protocol supported by started clients and + servers. If this option is not set, it defaults to all + protocols currently supported by the <c>ssl</c> application. + This option can be overridden by the version option + to <c>ssl:connect/[2,3]</c> and <c>ssl:listen/2</c>.</p></item> <tag><c><![CDATA[session_lifetime = integer() <optional>]]></c></tag> - <item> - <p>The lifetime of session data in seconds. - </p> - </item> + <item><p>Lifetime of the session data in seconds.</p></item> - <tag><c><![CDATA[session_cb = atom() <optional>]]></c></tag> - <item> - <p> - Name of session cache callback module that implements - the ssl_session_cache_api behavior, defaults to - ssl_session_cache.erl. - </p> - </item> + <tag><c><![CDATA[session_cb = atom() <optional>]]></c></tag> + <item><p>Name of the session cache callback module that implements + the <c>ssl_session_cache_api</c> behavior. Defaults to + <c>ssl_session_cache.erl</c>.</p></item> <tag><c><![CDATA[session_cb_init_args = proplist:proplist() <optional>]]></c></tag> - <item> - <p> - List of additional user defined arguments to the init function in session cache - callback module, defaults to []. - </p> - </item> + + <item><p>List of extra user-defined arguments to the <c>init</c> function + in the session cache callback module. Defaults to <c>[]</c>.</p></item> <tag><c><![CDATA[ssl_pem_cache_clean = integer() <optional>]]></c></tag> <item> @@ -96,6 +90,11 @@ </section> <section> + <title>ERROR LOGGER AND EVENT HANDLERS</title> + <p>The <c>ssl</c> applications uses the default <seealso marker="kernel:error_logger">OTP error logger</seealso> to log unexpected errors and TLS alerts. The logging of TLS alerts may be turned off with the <c>log_alert</c> option. </p> + </section> + + <section> <title>SEE ALSO</title> <p><seealso marker="kernel:application">application(3)</seealso></p> </section> diff --git a/lib/ssl/doc/src/ssl_crl_cache.xml b/lib/ssl/doc/src/ssl_crl_cache.xml index b291c7b633..62bf2ea7b7 100644 --- a/lib/ssl/doc/src/ssl_crl_cache.xml +++ b/lib/ssl/doc/src/ssl_crl_cache.xml @@ -36,31 +36,30 @@ <funcs> <func> - <name>insert(CRLSrc) -> ok | {error, Reason}</name> - <name>insert(URI, CRLSrc) -> ok | {error, Reason}</name> - <fsummary> </fsummary> - <type> - <v> CRLSrc = {file, string()} | {der, [ <seealso - marker="public_key:public_key"> der_encoded() </seealso> ]}</v> - <v> URI = http_uri:uri()</v> - <v> Reason = term()</v> - </type> - <desc> + <name>delete(Entries) -> ok | {error, Reason} </name> + <fsummary> </fsummary> + <type> + <v> Entries = <seealso marker="inets:http_uri">http_uri:uri() </seealso> | {file, string()} | {der, [<seealso + marker="public_key:public_key"> public_key:der_encoded() </seealso>]}</v> + <v> Reason = term()</v> + </type> + <desc> + Delete CRLs from the ssl applications local cache. + </desc> + </func> + <func> + <name>insert(CRLSrc) -> ok | {error, Reason}</name> + <name>insert(URI, CRLSrc) -> ok | {error, Reason}</name> + <fsummary> </fsummary> + <type> + <v> CRLSrc = {file, string()} | {der, [ <seealso + marker="public_key:public_key"> public_key:der_encoded() </seealso> ]}</v> + <v> URI = <seealso marker="inets:http_uri">http_uri:uri() </seealso> </v> + <v> Reason = term()</v> + </type> + <desc> Insert CRLs into the ssl applications local cache. - </desc> - </func> - - <func> - <name>delete(Entries) -> ok | {error, Reason} </name> - <fsummary> </fsummary> - <type> - <v> Entries = http_uri:uri() | {file, string()} | {der, [<seealso - marker="public_key:public_key"> der_encoded() </seealso>]}</v> - <v> Reason = term()</v> - </type> - <desc> - Delete CRLs from the ssl applications local cache. - </desc> - </func> + </desc> + </func> </funcs> </erlref>
\ No newline at end of file diff --git a/lib/ssl/doc/src/ssl_crl_cache_api.xml b/lib/ssl/doc/src/ssl_crl_cache_api.xml index 3f518496be..557b7814b8 100644 --- a/lib/ssl/doc/src/ssl_crl_cache_api.xml +++ b/lib/ssl/doc/src/ssl_crl_cache_api.xml @@ -40,17 +40,40 @@ </description> <section> - <title>Common Data Types</title> + <title>DATA TYPES</title> <p>The following data types are used in the functions below: </p> - <p><c>cache_ref() = opaque()</c></p> - <p> dist_point() = #'DistributionPoint'{} see <seealso - marker="public_key:cert_records"> X509 certificates records</seealso></p> + <taglist> + + <tag><c>cache_ref()</c></tag> + <item> = opaque()</item> + <tag><c>dist_point()</c></tag> + <item> = #'DistributionPoint'{} see <seealso + marker="public_key:cert_records"> X509 certificates records</seealso></item> + + </taglist> + </section> - <funcs> + <func> + <name>fresh_crl(DistributionPoint, CRL) -> FreshCRL</name> + <fsummary> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to + public_key:pkix_crls_validate/3 </fsummary> + <type> + <v> DistributionPoint = dist_point() </v> + <v> CRL = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + <v> FreshCRL = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + </type> + <desc> + <p> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to + <seealso marker="public_key#pkix_path_validation-3">public_key:pkix_crls_validate/3 </seealso> </p> + </desc> + </func> + <func> <name>lookup(DistributionPoint, DbHandle) -> not_available | CRLs </name> <fsummary> </fsummary> @@ -60,7 +83,7 @@ <v> CRLs = [<seealso marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> </type> - <desc> <p>Lookup the CRLs belonging to the distribution point <c> Distributionpoint </c> </p>. + <desc> <p>Lookup the CRLs belonging to the distribution point <c> Distributionpoint</c>. </p> This function may choose to only look in the cache or to follow distribution point links depending on how the cache is administrated. </desc> @@ -78,22 +101,5 @@ <p>Select the CRLs in the cache that are issued by <c>Issuer</c> </p> </desc> </func> - - <func> - <name>fresh_crl(DistributionPoint, CRL) -> FreshCRL</name> - <fsummary> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to - public_key:pkix_crls_validate/3 </fsummary> - <type> - <v> DistributionPoint = dist_point() </v> - <v> CRL = [<seealso - marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> - <v> FreshCRL = [<seealso - marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> - </type> - <desc> - <p> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to - <seealso marker="public_key#pkix_path_validation-3">public_key:pkix_crls_validate/3 </seealso> </p> - </desc> - </func> </funcs> </erlref>
\ No newline at end of file diff --git a/lib/ssl/doc/src/ssl_distribution.xml b/lib/ssl/doc/src/ssl_distribution.xml index 4b4d042f70..c9f7b1b27f 100644 --- a/lib/ssl/doc/src/ssl_distribution.xml +++ b/lib/ssl/doc/src/ssl_distribution.xml @@ -31,23 +31,20 @@ <rev>B</rev> <file>ssl_distribution.xml</file> </header> - <p>This chapter describes how the Erlang distribution can use - SSL to get additional verification and security. - </p> + <p>This section describes how the Erlang distribution can use + SSL to get extra verification and security.</p> - <section> - <title>Introduction</title> - <p>The Erlang distribution can in theory use almost any connection - based protocol as bearer. A module that implements the protocol - specific parts of the connection setup is however needed. The - default distribution module is <c>inet_tcp_dist</c> which is - included in the Kernel application. When starting an + <p>The Erlang distribution can in theory use almost any + connection-based protocol as bearer. However, a module that + implements the protocol-specific parts of the connection setup is + needed. The default distribution module is <c>inet_tcp_dist</c> + in the <c>kernel</c> application. When starting an Erlang node distributed, <c>net_kernel</c> uses this module to - setup listen ports and connections. </p> + set up listen ports and connections.</p> - <p>In the SSL application there is an additional distribution - module, <c>inet_tls_dist</c> which can be used as an - alternative. All distribution connections will be using SSL and + <p>In the <c>ssl</c> application, an exra distribution + module, <c>inet_tls_dist</c>, can be used as an + alternative. All distribution connections will use SSL and all participating Erlang nodes in a distributed system must use this distribution module.</p> @@ -55,35 +52,45 @@ SSL connection setup. Erlang node cookies are however always used, as they can be used to differentiate between two different Erlang networks.</p> - <p>Setting up Erlang distribution over SSL involves some simple but - necessary steps:</p> + + <p>To set up Erlang distribution over SSL:</p> <list type="bulleted"> - <item>Building boot scripts including the SSL application</item> - <item>Specifying the distribution module for net_kernel</item> - <item>Specifying security options and other SSL options</item> + <item><em>Step 1:</em> Build boot scripts including the + <c>ssl</c> application.</item> + <item><em>Step 2:</em> Specify the distribution module for + <c>net_kernel</c>.</item> + <item><em>Step 3:</em> Specify the security options and other + SSL options.</item> + <item><em>Step 4:</em> Set up the environment to always use SSL.</item> </list> - <p>The rest of this chapter describes the above mentioned steps in - more detail.</p> - </section> + + <p>The following sections describe these steps.</p> <section> - <title>Building boot scripts including the SSL application</title> + <title>Building Boot Scripts Including the ssl Application</title> <p>Boot scripts are built using the <c>systools</c> utility in the - SASL application. Refer to the SASL documentations - for more information on systools. This is only an example of + <c>sasl</c> application. For more information on <c>systools</c>, + see the <c>sasl</c> documentation. This is only an example of what can be done.</p> - <p>The simplest boot script possible includes only the Kernel - and STDLIB applications. Such a script is located in the - Erlang distributions bin directory. The source for the script - can be found under the Erlang installation top directory under - <c><![CDATA[releases/<OTP version>/start_clean.rel]]></c>. Copy that - script to another location (and preferably another name) - and add the applications crypto, public_key and SSL with their current version numbers - after the STDLIB application.</p> - <p>An example .rel file with SSL added may look like this:</p> + <p>The simplest boot script possible includes only the <c>kernel</c> + and <c>stdlib</c> applications. Such a script is located in the + <c>bin</c> directory of the Erlang distribution. The source for the + script is found under the Erlang installation top directory under + <c><![CDATA[releases/<OTP version>/start_clean.rel]]></c>.</p> + + <p>Do the following:</p> + <list type="bulleted"> + <item><p>Copy that script to another location (and preferably another + name).</p></item> + <item><p>Add the applications <c>crypto</c>, <c>public_key</c>, and + <c>ssl</c> with their current version numbers after the + <c>stdlib</c>application.</p></item> + </list> + <p>The following shows an example <c>.rel</c> file with <c>ssl</c> + added:</p> <code type="none"> {release, {"OTP APN 181 01","R15A"}, {erts, "5.9"}, [{kernel,"2.15"}, @@ -94,23 +101,29 @@ ]}. </code> - <p>Note that the version numbers surely will differ in your system. - Whenever one of the applications included in the script is - upgraded, the script has to be changed.</p> - <p>Assuming the above .rel file is stored in a file - <c>start_ssl.rel</c> in the current directory, a boot script - can be built like this:</p> + <p>The version numbers differ in your system. Whenever one of the + applications included in the script is upgraded, change the script.</p> + <p>Do the following:</p> + <list type="bulleted"> + <item><p>Build the boot script.</p> + <p>Assuming the <c>.rel file</c> is stored in a file + <c>start_ssl.rel</c> in the current directory, a boot script + can be built as follows:</p></item> + </list> <code type="none"> 1> systools:make_script("start_ssl",[]). </code> - <p>There will now be a file <c>start_ssl.boot</c> in the current - directory. To test the boot script, start Erlang with the - <c>-boot</c> command line parameter specifying this boot script - (with its full path but without the <c>.boot</c> suffix), in - Unix it could look like this:</p> - <p></p> + <p>There is now a <c>start_ssl.boot</c> file in the current + directory.</p> + <p>Do the following:</p> + <list type="bulleted"> + <item><p>Test the boot script. To do this, start Erlang with the + <c>-boot</c> command-line parameter specifying this boot script + (with its full path, but without the <c>.boot</c> suffix). In + UNIX it can look as follows:</p></item> + </list> <code type="none"><![CDATA[ $ erl -boot /home/me/ssl/start_ssl Erlang (BEAM) emulator version 5.0 @@ -118,86 +131,99 @@ Erlang (BEAM) emulator version 5.0 Eshell V5.0 (abort with ^G) 1> whereis(ssl_manager). <0.41.0> ]]></code> - <p>The <c>whereis</c> function call verifies that the SSL - application is really started.</p> - - <p>As an alternative to building a bootscript, one can explicitly - add the path to the SSL <c>ebin</c> directory on the command - line. This is done with the command line option <c>-pa</c>. This - works as the SSL application does not need to be started for the - distribution to come up, as a clone of the SSL application is - hooked into the kernel application, so as long as the - SSL applications code can be reached, the distribution will - start. The <c>-pa</c> method is only recommended for testing - purposes.</p> - - <note><p>Note that the clone of the SSL application is necessary to + + <p>The <c>whereis</c> function-call verifies that the <c>ssl</c> + application is started.</p> + + <p>As an alternative to building a bootscript, you can explicitly + add the path to the <c>ssl</c> <c>ebin</c> directory on the command + line. This is done with command-line option <c>-pa</c>. This + works as the <c>ssl</c> application does not need to be started for the + distribution to come up, as a clone of the <c>ssl</c> application is + hooked into the <c>kernel</c> application. So, as long as the + <c>ssl</c> application code can be reached, the distribution starts. + The <c>-pa</c> method is only recommended for testing purposes.</p> + + <note><p>The clone of the <c>ssl</c> application must enable the use of the SSL code in such an early bootstage as - needed to setup the distribution, however this will make it - impossible to soft upgrade the SSL application.</p></note> + needed to set up the distribution. However, this makes it + impossible to soft upgrade the <c>ssl</c> application.</p></note> </section> <section> - <title>Specifying distribution module for net_kernel</title> - <p>The distribution module for SSL is named <c>inet_tls_dist</c> - and is specified on the command line with the <c>-proto_dist</c> - option. The argument to <c>-proto_dist</c> should be the module - name without the <c>_dist</c> suffix, so this distribution + <title>Specifying Distribution Module for net_kernel</title> + <p>The distribution module for <c>ssl</c> is named <c>inet_tls_dist</c> + and is specified on the command line with option <c>-proto_dist</c>. + The argument to <c>-proto_dist</c> is to be the module + name without suffix <c>_dist</c>. So, this distribution module is specified with <c>-proto_dist inet_tls</c> on the command line.</p> - <p></p> - <p>Extending the command line from above gives us the following:</p> + <p>Extending the command line gives the following:</p> <code type="none"> $ erl -boot /home/me/ssl/start_ssl -proto_dist inet_tls </code> -<p>For the distribution to actually be started, we need to give -the emulator a name as well:</p> +<p>For the distribution to be started, give the emulator a name as well:</p> <code type="none"> $ erl -boot /home/me/ssl/start_ssl -proto_dist inet_tls -sname ssl_test Erlang (BEAM) emulator version 5.0 [source] Eshell V5.0 (abort with ^G) (ssl_test@myhost)1> </code> - <p>Note however that a node started in this way will refuse to talk - to other nodes, as no ssl parameters are supplied - (see below).</p> + + <p>However, a node started in this way refuses to talk + to other nodes, as no <c>ssl</c> parameters are supplied + (see the next section).</p> </section> <section> - <title>Specifying SSL options</title> <p>For SSL to work, at least - a public key and certificate needs to be specified for the server - side. In the following example the PEM-files consists of two - entries the servers certificate and its private key.</p> - - <p>On the <c>erl</c> command line one can specify options that the - SSL distribution will add when creating a socket.</p> - - <p>One can specify the simpler SSL options certfile, keyfile, - password, cacertfile, verify, reuse_sessions, - secure_renegotiate, depth, hibernate_after and ciphers (use old - string format) by adding the prefix server_ or client_ to the - option name. The server can also take the options dhfile and - fail_if_no_peer_cert (also prefixed). - <c>client_</c>-prfixed options are used when the distribution initiates a - connection to another node and the <c>server_</c>-prefixed options are used - when accepting a connection from a remote node. </p> - - <p> More complex options such as verify_fun are not available at - the moment but a mechanism to handle such options may be added in - a future release. </p> - - <p> Raw socket options such as packet and size must not be specified on - the command line</p>. - - <p>The command line argument for specifying the SSL options is named - <c>-ssl_dist_opt</c> and should be followed by pairs of - SSL options and their values. The <c>-ssl_dist_opt</c> argument can + <title>Specifying SSL Options</title> + <p>For SSL to work, at least + a public key and a certificate must be specified for the server + side. In the following example, the PEM-files consist of two + entries, the server certificate and its private key.</p> + + <p>On the <c>erl</c> command line you can specify options that the + SSL distribution adds when creating a socket.</p> + + <p>The simplest SSL options in the following list can be specified + by adding the + prefix <c>server_</c> or <c>client_</c> to the option name:</p> + <list type="bulleted"> + <item><c>certfile</c></item> + <item><c>keyfile</c></item> + <item><c>password</c></item> + <item><c>cacertfile</c></item> + <item><c>verify</c></item> + <item><c>reuse_sessions</c></item> + <item><c>secure_renegotiate</c></item> + <item><c>depth</c></item> + <item><c>hibernate_after</c></item> + <item><c>ciphers</c> (use old string format)</item> + </list> + + <p>The server can also take the options <c>dhfile</c> and + <c>fail_if_no_peer_cert</c> (also prefixed).</p> + + <p><c>client_</c>-prefixed options are used when the distribution + initiates a connection to another node. <c>server_</c>-prefixed + options are used when accepting a connection from a remote node.</p> + + <p>More complex options, such as <c>verify_fun</c>, are currently not + available, but a mechanism to handle such options may be added in + a future release.</p> + + <p>Raw socket options, such as <c>packet</c> and <c>size</c> must not + be specified on the command line.</p> + + <p>The command-line argument for specifying the SSL options is named + <c>-ssl_dist_opt</c> and is to be followed by pairs of + SSL options and their values. Argument <c>-ssl_dist_opt</c> can be repeated any number of times.</p> - <p>An example command line would now look something like this + <p>An example command line can now look as follows (line breaks in the command are for readability, - they should not be there when typed):</p> + and are not be there when typed):</p> <code type="none"> $ erl -boot /home/me/ssl/start_ssl -proto_dist inet_tls -ssl_dist_opt server_certfile "/home/me/ssl/erlserver.pem" @@ -207,20 +233,20 @@ Erlang (BEAM) emulator version 5.0 [source] Eshell V5.0 (abort with ^G) (ssl_test@myhost)1> </code> - <p>A node started in this way will be fully functional, using SSL + <p>A node started in this way is fully functional, using SSL as the distribution protocol.</p> </section> <section> - <title>Setting up environment to always use SSL</title> - <p>A convenient way to specify arguments to Erlang is to use the - <c>ERL_FLAGS</c> environment variable. All the flags needed to - use SSL distribution can be specified in that variable and will - then be interpreted as command line arguments for all + <title>Setting up Environment to Always Use SSL</title> + <p>A convenient way to specify arguments to Erlang is to use environment + variable <c>ERL_FLAGS</c>. All the flags needed to + use the SSL distribution can be specified in that variable and are + then interpreted as command-line arguments for all subsequent invocations of Erlang.</p> - <p></p> - <p>In a Unix (Bourne) shell it could look like this (line breaks for - readability, they should not be there when typed):</p> + + <p>In a Unix (Bourne) shell, it can look as follows (line breaks are for + readability, they are not to be there when typed):</p> <code type="none"> $ ERL_FLAGS="-boot /home/me/ssl/start_ssl -proto_dist inet_tls -ssl_dist_opt server_certfile /home/me/ssl/erlserver.pem @@ -240,7 +266,8 @@ Eshell V5.0 (abort with ^G) {ssl_dist_opt,["server_secure_renegotiate","true", "client_secure_renegotiate","true"] {home,["/home/me"]}] </code> + <p>The <c>init:get_arguments()</c> call verifies that the correct - arguments are supplied to the emulator. </p> + arguments are supplied to the emulator.</p> </section> </chapter> diff --git a/lib/ssl/doc/src/ssl_introduction.xml b/lib/ssl/doc/src/ssl_introduction.xml new file mode 100644 index 0000000000..64607a393a --- /dev/null +++ b/lib/ssl/doc/src/ssl_introduction.xml @@ -0,0 +1,53 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!DOCTYPE chapter SYSTEM "chapter.dtd"> + +<chapter> + <header> + <copyright> + <year>2015</year> + <year>2015</year> + <holder>Ericsson AB, All Rights Reserved</holder> + </copyright> + <legalnotice> + The contents of this file are subject to the Erlang Public License, + Version 1.1, (the "License"); you may not use this file except in + compliance with the License. You should have received a copy of the + Erlang Public License along with this software. If not, it can be + retrieved online at http://www.erlang.org/. + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + the License for the specific language governing rights and limitations + under the License. + + The Initial Developer of the Original Code is Ericsson AB. + </legalnotice> + + <title>Introduction</title> + <prepared>OTP team</prepared> + <docno></docno> + <date>2015-03-05</date> + <rev>A</rev> + <file>ssl_introduction.xml</file> + </header> + + <section> + <title>Purpose</title> + <p>Transport Layer Security (TLS) and its predecessor, the Secure + Sockets Layer (SSL), are cryptographic protocols designed to + provide communications security over a computer network. The protocols use + use X.509 certificates and hence public key (asymmetric) cryptography to + authenticate the counterpart with whom they communicate, + and to exchange a symmetric key for payload encryption. The protocol provides + data/message confidentiality (encryption), integrity (through message authentication code checks) + and host verification (through certificate path validation).</p> + </section> + + <section> + <title>Prerequisites</title> + <p>It is assumed that the reader is familiar with the Erlang + programming language, the concepts of OTP, and has a basic + understanding of SSL/TLS.</p> + </section> + +</chapter> diff --git a/lib/ssl/doc/src/ssl_protocol.xml b/lib/ssl/doc/src/ssl_protocol.xml index 80d9cc4ee8..20f53c98e1 100644 --- a/lib/ssl/doc/src/ssl_protocol.xml +++ b/lib/ssl/doc/src/ssl_protocol.xml @@ -4,7 +4,7 @@ <chapter> <header> <copyright> - <year>2003</year><year>2013</year> + <year>2003</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -21,33 +21,42 @@ </legalnotice> - <title>Transport Layer Security (TLS) and its predecessor, Secure Socket Layer (SSL)</title> + <title>TLS and its Predecessor, SSL</title> + <prepared></prepared> + <responsible></responsible> + <docno></docno> + <approved></approved> + <checked></checked> + <date></date> + <rev></rev> <file>ssl_protocol.xml</file> </header> - <p>The erlang SSL application currently implements the protocol SSL/TLS - for currently supported versions see <seealso marker="ssl">ssl(3)</seealso> + <p>The Erlang <c>ssl</c> application implements the SSL/TLS protocol + for the currently supported versions, see the + <seealso marker="ssl">ssl(3)</seealso> manual page. </p> - <p>By default erlang SSL is run over the TCP/IP protocol even - though you could plug in any other reliable transport protocol - with the same API as gen_tcp.</p> + <p>By default <c>ssl</c> is run over the TCP/IP protocol even + though you can plug in any other reliable transport protocol + with the same Application Programming Interface (API) as the + <c>gen_tcp</c> module in <c>kernel</c>.</p> - <p>If a client and server wants to use an upgrade mechanism, such as - defined by RFC2817, to upgrade a regular TCP/IP connection to an SSL - connection the erlang SSL API supports this. This can be useful for - things such as supporting HTTP and HTTPS on the same port and + <p>If a client and a server wants to use an upgrade mechanism, such as + defined by RFC 2817, to upgrade a regular TCP/IP connection to an SSL + connection, this is supported by the Erlang <c>ssl</c> API. This can be + useful for, for example, supporting HTTP and HTTPS on the same port and implementing virtual hosting. </p> <section> - <title>Security overview</title> + <title>Security Overview</title> - <p>To achieve authentication and privacy the client and server will - perform a TLS Handshake procedure before transmitting or receiving - any data. During the handshake they agree on a protocol version and - cryptographic algorithms, they generate shared secrets using public - key cryptographics and optionally authenticate each other with + <p>To achieve authentication and privacy, the client and server + perform a TLS handshake procedure before transmitting or receiving + any data. During the handshake, they agree on a protocol version and + cryptographic algorithms, generate shared secrets using public + key cryptographies, and optionally authenticate each other with digital certificates.</p> </section> @@ -55,20 +64,21 @@ <title>Data Privacy and Integrity</title> <p>A <em>symmetric key</em> algorithm has one key only. The key is - used for both encryption and decryption. These algorithms are fast - compared to public key algorithms (using two keys, a public and a - private one) and are therefore typically used for encrypting bulk + used for both encryption and decryption. These algorithms are fast, + compared to public key algorithms (using two keys, one public and one + private) and are therefore typically used for encrypting bulk data. </p> <p>The keys for the symmetric encryption are generated uniquely for each connection and are based on a secret negotiated - in the TLS handshake. </p> + in the TLS handshake.</p> - <p>The TLS handshake protocol and data transfer is run on top of - the TLS Record Protocol that uses a keyed-hash MAC (Message - Authenticity Code), or HMAC, to protect the message's data - integrity. From the TLS RFC "A Message Authentication Code is a + <p>The TLS handshake protocol and data transfer is run on top of + the TLS Record Protocol, which uses a keyed-hash Message + Authenticity Code (MAC), or a Hash-based MAC (HMAC), + to protect the message data + integrity. From the TLS RFC: "A Message Authentication Code is a one-way hash computed from a message and some secret data. It is difficult to forge without knowing the secret data. Its purpose is to detect if the message has been altered." @@ -82,40 +92,43 @@ passport. The holder of the certificate is called the <em>subject</em>. The certificate is signed with the private key of the issuer of the certificate. A chain - of trust is build by having the issuer in its turn being - certified by another certificate and so on until you reach the - so called root certificate that is self signed i.e. issued + of trust is built by having the issuer in its turn being + certified by another certificate, and so on, until you reach the + so called root certificate, which is self-signed, that is, issued by itself.</p> - <p>Certificates are issued by <em>certification - authorities</em> (<em>CA</em>s) only. There are a handful of - top CAs in the world that issue root certificates. You can - examine the certificates of several of them by clicking + <p>Certificates are issued by Certification Authorities (CAs) only. + A handful of top CAs in the world issue root certificates. You can + examine several of these certificates by clicking through the menus of your web browser. </p> </section> <section> - <title>Authentication of Sender</title> + <title>Peer Authentication</title> - <p>Authentication of the sender is done by public key path - validation as defined in RFC 3280. Simplified that means that - each certificate in the certificate chain is issued by the one - before, the certificates attributes are valid ones, and the - root cert is a trusted cert that is present in the trusted - certs database kept by the peer.</p> + <p>Authentication of the peer is done by public key path + validation as defined in RFC 3280. This means basically + the following:</p> + <list type="bulleted"> + <item>Each certificate in the certificate chain is issued by the + previous one.</item> + <item>The certificates attributes are valid.</item> + <item>The root certificate is a trusted certificate that is present + in the trusted certificate database kept by the peer.</item> + </list> - <p>The server will always send a certificate chain as part of - the TLS handshake, but the client will only send one if - the server requests it. If the client does not have - an appropriate certificate it may send an "empty" certificate + <p>The server always sends a certificate chain as part of + the TLS handshake, but the client only sends one if requested + by the server. If the client does not have + an appropriate certificate, it can send an "empty" certificate to the server.</p> - <p>The client may choose to accept some path evaluation errors - for instance a web browser may ask the user if they want to - accept an unknown CA root certificate. The server, if it request - a certificate, will on the other hand not accept any path validation - errors. It is configurable if the server should accept + <p>The client can choose to accept some path evaluation errors, + for example, a web browser can ask the user whether to + accept an unknown CA root certificate. The server, if it requests + a certificate, does however not accept any path validation + errors. It is configurable if the server is to accept or reject an "empty" certificate as response to a certificate request.</p> </section> @@ -123,25 +136,24 @@ <section> <title>TLS Sessions</title> - <p>From the TLS RFC "A TLS session is an association between a - client and a server. Sessions are created by the handshake + <p>From the TLS RFC: "A TLS session is an association between a + client and a server. Sessions are created by the handshake protocol. Sessions define a set of cryptographic security parameters, which can be shared among multiple connections. Sessions are used to avoid the expensive negotiation of new security parameters for each connection."</p> - <p>Session data is by default kept by the SSL application in a - memory storage hence session data will be lost at application - restart or takeover. Users may define their own callback module + <p>Session data is by default kept by the <c>ssl</c> application in a + memory storage, hence session data is lost at application + restart or takeover. Users can define their own callback module to handle session data storage if persistent data storage is - required. Session data will also be invalidated after 24 hours - from it was saved, for security reasons. It is of course - possible to configure the amount of time the session data should be - saved.</p> + required. Session data is also invalidated after 24 hours + from it was saved, for security reasons. The amount of time the + session data is to be saved can be configured.</p> - <p>SSL clients will by default try to reuse an available session, - SSL servers will by default agree to reuse sessions when clients - ask to do so.</p> + <p>By default the SSL clients try to reuse an available session and + by default the SSL servers agree to reuse sessions when clients + ask for it.</p> </section> </chapter> diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml index 9f87d31e90..9cd16c5f58 100644 --- a/lib/ssl/doc/src/ssl_session_cache_api.xml +++ b/lib/ssl/doc/src/ssl_session_cache_api.xml @@ -21,42 +21,54 @@ </legalnotice> <title>ssl</title> + <prepared></prepared> + <docno></docno> + <date></date> + <rev></rev> <file>ssl_session_cache_api.xml</file> </header> <module>ssl_session_cache_api</module> - <modulesummary>Defines the API for the TLS session cache so - that the data storage scheme can be replaced by - defining a new callback module implementing this API.</modulesummary> + <modulesummary>TLS session cache API</modulesummary> + <description>Defines the API for the TLS session cache so + that the data storage scheme can be replaced by + defining a new callback module implementing this API.</description> <section> - <title>Common Data Types</title> + <title>DATA TYPES</title> - <p>The following data types are used in the functions below: - </p> + <p>The following data types are used in the functions for + <c>ssl_session_cache_api</c>:</p> - <p><c>cache_ref() = opaque()</c></p> - - <p><c>key() = {partialkey(), session_id()}</c></p> - - <p><c>partialkey() = opaque()</c></p> - - <p><c>session_id() = binary()</c></p> + <taglist> + <tag><c>cache_ref()</c></tag> + <item><p>= <c>opaque()</c></p></item> + + <tag><c>key()</c></tag> + <item><p>= <c>{partialkey(), session_id()}</c></p></item> + + <tag><c>partialkey()</c></tag> + <item><p>= <c>opaque()</c></p></item> + + <tag><c>session_id()</c></tag> + <item><p>= <c>binary()</c></p></item> + + <tag><c>session()</c></tag> + <item><p>= <c>opaque()</c></p></item> + </taglist> - <p><c>session() = opaque()</c></p> - </section> <funcs> <func> <name>delete(Cache, Key) -> _</name> - <fsummary></fsummary> + <fsummary>Deletes a cache entry.</fsummary> <type> - <v> Cache = cache_ref()</v> - <v> Key = key()</v> + <v>Cache = cache_ref()</v> + <v>Key = key()</v> </type> <desc> - <p> Deletes a cache entry. Will only be called from the cache + <p>Deletes a cache entry. Is only called from the cache handling process. </p> </desc> @@ -69,49 +81,50 @@ <v></v> </type> <desc> - <p>Calls Fun(Elem, AccIn) on successive elements of the - cache, starting with AccIn == Acc0. Fun/2 must return a new - accumulator which is passed to the next call. The function returns - the final value of the accumulator. Acc0 is returned if the cache is - empty. + <p>Calls <c>Fun(Elem, AccIn)</c> on successive elements of the + cache, starting with <c>AccIn == Acc0</c>. <c>Fun/2</c> must + return a new accumulator, which is passed to the next call. + The function returns the final value of the accumulator. + <c>Acc0</c> is returned if the cache is empty. </p> </desc> </func> <func> <name>init(Args) -> opaque() </name> - <fsummary>Return cache reference</fsummary> + <fsummary>Returns cache reference.</fsummary> <type> <v>Args = proplists:proplist()</v> - <d>Will always include the property {role, client | server}. Currently this - is the only predefined property, there may also be user defined properties. - <seealso marker="ssl_app"> See also application environment variable - session_cb_init_args</seealso> - </d> </type> <desc> + <p>Includes property <c>{role, client | server}</c>. + Currently this is the only predefined property, + there can also be user-defined properties. See also + application environment variable + <seealso marker="ssl_app">session_cb_init_args</seealso>. + </p> <p>Performs possible initializations of the cache and returns - a reference to it that will be used as parameter to the other - API functions. Will be called by the cache handling processes - init function, hence putting the same requirements on it as a - normal process init function. Note that this function will be - called twice when starting the ssl application, once with the - role client and once with the role server, as the ssl application - must be prepared to take on both roles. + a reference to it that is used as parameter to the other + API functions. Is called by the cache handling processes + <c>init</c> function, hence putting the same requirements on it + as a normal process <c>init</c> function. This function is + called twice when starting the <c>ssl</c> application, once with + the role client and once with the role server, as the <c>ssl</c> + application must be prepared to take on both roles. </p> </desc> </func> <func> <name>lookup(Cache, Key) -> Entry</name> - <fsummary> Looks up a cache entry.</fsummary> + <fsummary>Looks up a cache entry.</fsummary> <type> - <v> Cache = cache_ref()</v> - <v> Key = key()</v> - <v> Entry = session() | undefined </v> + <v>Cache = cache_ref()</v> + <v>Key = key()</v> + <v>Entry = session() | undefined</v> </type> <desc> - <p>Looks up a cache entry. Should be callable from any + <p>Looks up a cache entry. Is to be callable from any process. </p> </desc> @@ -119,14 +132,14 @@ <func> <name>select_session(Cache, PartialKey) -> [session()]</name> - <fsummary>Selects a sessions that could be reused.</fsummary> + <fsummary>Selects sessions that can be reused.</fsummary> <type> - <v> Cache = cache_ref()</v> - <v> PartialKey = partialkey()</v> - <v> Session = session()</v> + <v>Cache = cache_ref()</v> + <v>PartialKey = partialkey()</v> + <v>Session = session()</v> </type> <desc> - <p>Selects a sessions that could be reused. Should be callable + <p>Selects sessions that can be reused. Is to be callable from any process. </p> </desc> @@ -137,7 +150,7 @@ <fsummary>Called by the process that handles the cache when it is about to terminate.</fsummary> <type> - <v>Cache = term() - as returned by init/0</v> + <v>Cache = term() - as returned by init/0</v> </type> <desc> <p>Takes care of possible cleanup that is needed when the @@ -148,15 +161,15 @@ <func> <name>update(Cache, Key, Session) -> _</name> - <fsummary> Caches a new session or updates a already cached one.</fsummary> + <fsummary>Caches a new session or updates an already cached one.</fsummary> <type> - <v> Cache = cache_ref()</v> - <v> Key = key()</v> - <v> Session = session()</v> + <v>Cache = cache_ref()</v> + <v>Key = key()</v> + <v>Session = session()</v> </type> <desc> - <p> Caches a new session or updates a already cached one. Will - only be called from the cache handling process. + <p>Caches a new session or updates an already cached one. Is + only called from the cache handling process. </p> </desc> </func> diff --git a/lib/ssl/doc/src/usersguide.xml b/lib/ssl/doc/src/usersguide.xml index b1c7190085..6fce022507 100644 --- a/lib/ssl/doc/src/usersguide.xml +++ b/lib/ssl/doc/src/usersguide.xml @@ -23,14 +23,17 @@ <title>SSL User's Guide</title> <prepared>OTP Team</prepared> + <docno></docno> <date>2003-05-26</date> + <rev></rev> <file>usersguide.sgml</file> </header> <description> - <p>The <em>SSL</em> application provides secure communication over + <p>The Secure Socket Layer (SSL) application provides secure communication over sockets. </p> </description> + <xi:include href="ssl_introduction.xml"/> <xi:include href="ssl_protocol.xml"/> <xi:include href="using_ssl.xml"/> <xi:include href="ssl_distribution.xml"/> diff --git a/lib/ssl/doc/src/using_ssl.xml b/lib/ssl/doc/src/using_ssl.xml index cce388d02a..01b7970fb6 100644 --- a/lib/ssl/doc/src/using_ssl.xml +++ b/lib/ssl/doc/src/using_ssl.xml @@ -21,126 +21,131 @@ </legalnotice> - <title>Using the SSL API</title> + <title>Using SSL API</title> + <prepared></prepared> + <responsible></responsible> + <docno></docno> + <approved></approved> + <checked></checked> + <date></date> + <rev></rev> <file>using_ssl.xml</file> </header> - - <section> - <title>General information</title> - <p>To see relevant version information for ssl you can - call ssl:versions/0</p> + <p>To see relevant version information for ssl, call + <seealso marker="ssl:versions-0"><c>ssl:versions/0</c></seealso> + .</p> - <p>To see all supported cipher suites - call ssl:cipher_suites/0. Note that available cipher suites - for a connection will depend on your certificate. It is also - possible to specify a specific cipher suite(s) that you - want your connection to use. Default is to use the strongest - available.</p> - - </section> + <p>To see all supported cipher suites, call <seealso marker="ssl:cipher_suites-1"><c>ssl:cipher_suites(all)</c> </seealso>. + The available cipher suites for a connection depend on your certificate. + Specific cipher suites that you want your connection to use can also be + specified. Default is to use the strongest available.</p> <section> - <title>Setting up connections</title> + <title>Setting up Connections</title> - <p>Here follows some small example of how to set up client/server connections - using the erlang shell. The returned value of the sslsocket has been abbreviated with - <c>[...]</c> as it can be fairly large and is opaque.</p> + <p>This section shows a small example of how to set up client/server connections + using the Erlang shell. The returned value of the <c>sslsocket</c> is abbreviated + with <c>[...]</c> as it can be fairly large and is opaque.</p> <section> - <title>Minmal example</title> + <title>Minimal Example</title> - <note><p> The minimal setup is not the most secure setup of ssl.</p> + <note><p> The minimal setup is not the most secure setup of SSL.</p> </note> - - <p> Start server side</p> + + <p>To set up client/server connections:</p> + + <p><em>Step 1:</em> Start the server side:</p> <code type="erl">1 server> ssl:start(). ok</code> - <p>Create an ssl listen socket</p> + <p><em>Step 2:</em> Create an SSL listen socket:</p> <code type="erl">2 server> {ok, ListenSocket} = ssl:listen(9999, [{certfile, "cert.pem"}, {keyfile, "key.pem"},{reuseaddr, true}]). {ok,{sslsocket, [...]}}</code> - <p>Do a transport accept on the ssl listen socket</p> + <p><em>Step 3:</em> Do a transport accept on the SSL listen socket:</p> <code type="erl">3 server> {ok, Socket} = ssl:transport_accept(ListenSocket). {ok,{sslsocket, [...]}}</code> - <p>Start client side</p> + <p><em>Step 4:</em> Start the client side:</p> <code type="erl">1 client> ssl:start(). ok</code> <code type="erl">2 client> {ok, Socket} = ssl:connect("localhost", 9999, [], infinity). {ok,{sslsocket, [...]}}</code> - <p>Do the ssl handshake</p> + <p><em>Step 5:</em> Do the SSL handshake:</p> <code type="erl">4 server> ok = ssl:ssl_accept(Socket). ok</code> - <p>Send a messag over ssl</p> + <p><em>Step 6:</em> Send a message over SSL:</p> <code type="erl">5 server> ssl:send(Socket, "foo"). ok</code> - <p>Flush the shell message queue to see that we got the message - sent on the server side</p> + <p><em>Step 7:</em> Flush the shell message queue to see that the message + was sent on the server side:</p> <code type="erl">3 client> flush(). Shell got {ssl,{sslsocket,[...]},"foo"} ok</code> </section> <section> - <title>Upgrade example</title> + <title>Upgrade Example</title> - <note><p> To upgrade a TCP/IP connection to an ssl connection the - client and server have to aggre to do so. Agreement - may be accompliced by using a protocol such the one used by HTTP - specified in RFC 2817.</p> </note> + <note><p>To upgrade a TCP/IP connection to an SSL connection, the + client and server must agree to do so. The agreement + can be accomplished by using a protocol, for example, the one used by HTTP + specified in RFC 2817.</p></note> + + <p>To upgrade to an SSL connection:</p> - <p>Start server side</p> + <p><em>Step 1:</em> Start the server side:</p> <code type="erl">1 server> ssl:start(). ok</code> - <p>Create a normal tcp listen socket</p> + <p><em>Step 2:</em> Create a normal TCP listen socket:</p> <code type="erl">2 server> {ok, ListenSocket} = gen_tcp:listen(9999, [{reuseaddr, true}]). {ok, #Port<0.475>}</code> - <p>Accept client connection</p> + <p><em>Step 3:</em> Accept client connection:</p> <code type="erl">3 server> {ok, Socket} = gen_tcp:accept(ListenSocket). {ok, #Port<0.476>}</code> - <p>Start client side</p> + <p><em>Step 4:</em> Start the client side:</p> <code type="erl">1 client> ssl:start(). ok</code> <code type="erl">2 client> {ok, Socket} = gen_tcp:connect("localhost", 9999, [], infinity).</code> - <p>Make sure active is set to false before trying - to upgrade a connection to an ssl connection, otherwhise - ssl handshake messages may be deliverd to the wrong process.</p> + <p><em>Step 5:</em> Ensure <c>active</c> is set to <c>false</c> before trying + to upgrade a connection to an SSL connection, otherwise + SSL handshake messages can be delivered to the wrong process:</p> <code type="erl">4 server> inet:setopts(Socket, [{active, false}]). ok</code> - <p>Do the ssl handshake.</p> + <p><em>Step 6:</em> Do the SSL handshake:</p> <code type="erl">5 server> {ok, SSLSocket} = ssl:ssl_accept(Socket, [{cacertfile, "cacerts.pem"}, {certfile, "cert.pem"}, {keyfile, "key.pem"}]). {ok,{sslsocket,[...]}}</code> - <p> Upgrade to an ssl connection. Note that the client and server - must agree upon the upgrade and the server must call - ssl:accept/2 before the client calls ssl:connect/3.</p> + <p><em>Step 7:</em> Upgrade to an SSL connection. The client and server + must agree upon the upgrade. The server must call + <c>ssl:accept/2</c> before the client calls <c>ssl:connect/3.</c></p> <code type="erl">3 client>{ok, SSLSocket} = ssl:connect(Socket, [{cacertfile, "cacerts.pem"}, {certfile, "cert.pem"}, {keyfile, "key.pem"}], infinity). {ok,{sslsocket,[...]}}</code> - <p>Send a messag over ssl</p> + <p><em>Step 8:</em> Send a message over SSL:</p> <code type="erl">4 client> ssl:send(SSLSocket, "foo"). ok</code> - <p>Set active true on the ssl socket</p> + <p><em>Step 9:</em> Set <c>active true</c> on the SSL socket:</p> <code type="erl">4 server> ssl:setopts(SSLSocket, [{active, true}]). ok</code> - <p>Flush the shell message queue to see that we got the message - sent on the client side</p> + <p><em>Step 10:</em> Flush the shell message queue to see that the message + was sent on the client side:</p> <code type="erl">5 server> flush(). Shell got {ssl,{sslsocket,[...]},"foo"} ok</code> diff --git a/lib/stdlib/doc/src/notes.xml b/lib/stdlib/doc/src/notes.xml index 8582bfc9f9..301a5ee2e8 100644 --- a/lib/stdlib/doc/src/notes.xml +++ b/lib/stdlib/doc/src/notes.xml @@ -30,6 +30,41 @@ </header> <p>This document describes the changes made to the STDLIB application.</p> +<section><title>STDLIB 2.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Behaviour of character types \d, \w and \s has always + been to not match characters with value above 255, not + 128, i.e. they are limited to ISO-Latin-1 and not ASCII</p> + <p> + Own Id: OTP-12521</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + c:m/1 now displays the module's MD5 sum.</p> + <p> + Own Id: OTP-12500</p> + </item> + <item> + <p> + Make ets:i/1 handle binary input from IO server.</p> + <p> + Own Id: OTP-12550</p> + </item> + </list> + </section> + +</section> + <section><title>STDLIB 2.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/stdlib/src/stdlib.appup.src b/lib/stdlib/src/stdlib.appup.src index 5900fd3ff3..ee87a8ddb2 100644 --- a/lib/stdlib/src/stdlib.appup.src +++ b/lib/stdlib/src/stdlib.appup.src @@ -17,9 +17,9 @@ %% %CopyrightEnd% {"%VSN%", %% Up from - max one major revision back - [{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3 + [{<<"2\\.[1-3](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3 {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], %% 17.0 %% Down to - max one major revision back - [{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3 + [{<<"2\\.[1-3](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3 {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}] %% 17.0 }. diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl index 5936fe9133..5774d774b5 100644 --- a/lib/stdlib/test/ets_SUITE.erl +++ b/lib/stdlib/test/ets_SUITE.erl @@ -5051,36 +5051,40 @@ colliding_names(Name) -> grow_shrink(Config) when is_list(Config) -> ?line EtsMem = etsmem(), - ?line grow_shrink_0(lists:seq(3071, 5000), EtsMem), - ?line verify_etsmem(EtsMem). -grow_shrink_0([N|Ns], EtsMem) -> - ?line grow_shrink_1(N, [set]), - ?line grow_shrink_1(N, [ordered_set]), - %% Verifying ets-memory here takes too long time, since - %% lock-free allocators were introduced... - %% ?line verify_etsmem(EtsMem), - grow_shrink_0(Ns, EtsMem); -grow_shrink_0([], _) -> ok. - -grow_shrink_1(N, Flags) -> - ?line T = ets_new(a, Flags), - ?line grow_shrink_2(N, N, T), - ?line ets:delete(T). + Set = ets_new(a, [set]), + grow_shrink_0(0, 3071, 3000, 5000, Set), + ets:delete(Set), + + %OrdSet = ets_new(a, [ordered_set]), + %grow_shrink_0(0, lists:seq(3071, 5000), OrdSet), + %ets:delete(OrdSet), -grow_shrink_2(0, Orig, T) -> - List = [{I,a} || I <- lists:seq(1, Orig)], - List = lists:sort(ets:tab2list(T)), - grow_shrink_3(Orig, T); -grow_shrink_2(N, Orig, T) -> + ?line verify_etsmem(EtsMem). + +grow_shrink_0(N, _, _, Max, _) when N >= Max -> + ok; +grow_shrink_0(N0, GrowN, ShrinkN, Max, T) -> + N1 = grow_shrink_1(N0, GrowN, ShrinkN, T), + grow_shrink_0(N1, GrowN, ShrinkN, Max, T). + +grow_shrink_1(N0, GrowN, ShrinkN, T) -> + N1 = grow_shrink_2(N0+1, N0 + GrowN, T), + grow_shrink_3(N1, N1 - ShrinkN, T). + +grow_shrink_2(N, GrowTo, _) when N > GrowTo -> + %io:format("Grown to ~p\n", [GrowTo]), + GrowTo; +grow_shrink_2(N, GrowTo, T) -> true = ets:insert(T, {N,a}), - grow_shrink_2(N-1, Orig, T). + grow_shrink_2(N+1, GrowTo, T). -grow_shrink_3(0, T) -> - [] = ets:tab2list(T); -grow_shrink_3(N, T) -> +grow_shrink_3(N, ShrinkTo, _) when N =< ShrinkTo -> + %io:format("Shrunk to ~p\n", [ShrinkTo]), + ShrinkTo; +grow_shrink_3(N, ShrinkTo, T) -> true = ets:delete(T, N), - grow_shrink_3(N-1, T). + grow_shrink_3(N-1, ShrinkTo, T). grow_pseudo_deleted(doc) -> ["Grow a table that still contains pseudo-deleted objects"]; grow_pseudo_deleted(suite) -> []; diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl index 0b7b96da8e..0a1b6dd2ba 100644 --- a/lib/stdlib/test/qlc_SUITE.erl +++ b/lib/stdlib/test/qlc_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2014. All Rights Reserved. +%% Copyright Ericsson AB 2004-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -3021,8 +3021,9 @@ lookup2(Config) when is_list(Config) -> end, [{3,true},{4,true}])">>, <<"%% Only guards are inspected. No lookup. - E1 = create_ets(1, 10), - E2 = ets:new(join, []), + E1 = ets:new(e, [ordered_set]), + true = ets:insert(E1, [{1,1}, {2,2}, {3,3}, {4,4}, {5,5}]), + E2 = ets:new(join, [ordered_set]), true = ets:insert(E2, [{true,1},{false,2}]), Q = qlc:q([{X,Z} || {_,X} <- ets:table(E1), {Y,Z} <- ets:table(E2), diff --git a/lib/stdlib/vsn.mk b/lib/stdlib/vsn.mk index 5be130bac9..f57f31c8de 100644 --- a/lib/stdlib/vsn.mk +++ b/lib/stdlib/vsn.mk @@ -1 +1 @@ -STDLIB_VSN = 2.3 +STDLIB_VSN = 2.4 diff --git a/lib/syntax_tools/doc/src/notes.xml b/lib/syntax_tools/doc/src/notes.xml index b0f11bb243..408f6d5bac 100644 --- a/lib/syntax_tools/doc/src/notes.xml +++ b/lib/syntax_tools/doc/src/notes.xml @@ -31,6 +31,21 @@ <p>This document describes the changes made to the Syntax_Tools application.</p> +<section><title>Syntax_Tools 1.6.18</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix bad format of error in epp_dodger:parse_file/3</p> + <p> + Own Id: OTP-12406</p> + </item> + </list> + </section> + +</section> + <section><title>Syntax_Tools 1.6.17</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/syntax_tools/vsn.mk b/lib/syntax_tools/vsn.mk index 673362d01d..1c42ef0ddb 100644 --- a/lib/syntax_tools/vsn.mk +++ b/lib/syntax_tools/vsn.mk @@ -1 +1 @@ -SYNTAX_TOOLS_VSN = 1.6.17 +SYNTAX_TOOLS_VSN = 1.6.18 diff --git a/lib/test_server/doc/src/notes.xml b/lib/test_server/doc/src/notes.xml index 68dc1fec88..f21c32a304 100644 --- a/lib/test_server/doc/src/notes.xml +++ b/lib/test_server/doc/src/notes.xml @@ -32,6 +32,51 @@ <file>notes.xml</file> </header> +<section><title>Test_Server 3.8</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + When installing test suites in a cross compilation + environment, ts_install was not able to read the values + of the environment variables specified in the + configuration file. This has been fixed.</p> + <p> + Own Id: OTP-11441</p> + </item> + <item> + <p> + Printouts by means of ct:log/2/3 or ct:pal/2/3 from the + hook functions on_tc_fail/2 and on_tc_skip/2 would (quite + unexpectedly) end up in the "unexpected i/o" log file + instead of in the test case log file. This behaviour has + been changed so that now, all printouts (including stdio + printouts) from these hook functions will be routed to + the test case log file.</p> + <p> + Own Id: OTP-12468</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + The format of the information printed on top of the test + case (and configuration function) log file has been + slightly modified, mainly in order to make the start + configuration data easier to read and interpret.</p> + <p> + Own Id: OTP-12518 Aux Id: seq12808 </p> + </item> + </list> + </section> + +</section> + <section><title>Test_Server 3.7.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/test_server/vsn.mk b/lib/test_server/vsn.mk index 18d7583c35..77225b4cad 100644 --- a/lib/test_server/vsn.mk +++ b/lib/test_server/vsn.mk @@ -1 +1 @@ -TEST_SERVER_VSN = 3.7.2 +TEST_SERVER_VSN = 3.8 diff --git a/lib/tools/doc/src/notes.xml b/lib/tools/doc/src/notes.xml index 6f9563bb68..38b57b73a9 100644 --- a/lib/tools/doc/src/notes.xml +++ b/lib/tools/doc/src/notes.xml @@ -30,6 +30,32 @@ </header> <p>This document describes the changes made to the Tools application.</p> +<section><title>Tools 2.7.2</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix lcnt sorting and printout of histograms.</p> + <p> + Own Id: OTP-12364</p> + </item> + <item> + <p> Fix a Unicode bug in the <c>tags</c> module. </p> + <p> + Own Id: OTP-12567</p> + </item> + <item> + <p> + Fix tags completion in erlang.el for GNU Emacs 23+</p> + <p> + Own Id: OTP-12583</p> + </item> + </list> + </section> + +</section> + <section><title>Tools 2.7.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/tools/vsn.mk b/lib/tools/vsn.mk index d9651c30e3..3b3202d38b 100644 --- a/lib/tools/vsn.mk +++ b/lib/tools/vsn.mk @@ -1 +1 @@ -TOOLS_VSN = 2.7.1 +TOOLS_VSN = 2.7.2 diff --git a/lib/wx/api_gen/wx_gen_cpp.erl b/lib/wx/api_gen/wx_gen_cpp.erl index 107d064f4a..720526b3b9 100644 --- a/lib/wx/api_gen/wx_gen_cpp.erl +++ b/lib/wx/api_gen/wx_gen_cpp.erl @@ -975,6 +975,13 @@ build_ret(Name = "ev->m_scanCode",_,#type{base=bool,single=true,by_val=true}) -> w(" rt.addBool(~s);~n",[Name]), w("#else~n rt.addBool(false);~n",[]), w("#endif~n",[]); +build_ret(Name = "ev->m_metaDown",_,#type{base=bool,single=true,by_val=true}) -> + %% Hardcoded workaround for MAC on 2.9 and later + w("#if wxCHECK_VERSION(2,9,0) && defined(_MACOSX)~n", []), + w(" rt.addBool(ev->m_rawControlDown);~n",[]), + w("#else~n rt.addBool(~s);~n",[Name]), + w("#endif~n",[]); + build_ret(Name,_,#type{base=bool,single=true,by_val=true}) -> w(" rt.addBool(~s);~n",[Name]); build_ret(Name,{arg, both},#type{base=int,single=true,mod=M}) -> diff --git a/lib/wx/api_gen/wxapi.conf b/lib/wx/api_gen/wxapi.conf index 2e961cce98..09f21af0f3 100644 --- a/lib/wx/api_gen/wxapi.conf +++ b/lib/wx/api_gen/wxapi.conf @@ -32,7 +32,10 @@ wxALWAYS_NATIVE_DOUBLE_BUFFER, wxGAUGE_EMULATE_INDETERMINATE_MODE, wxTR_DEFAULT_STYLE, - wxSL_LABELS + wxSL_LABELS, + wxCURSOR_DEFAULT, + wxCURSOR_ARROWWAIT, + wxCURSOR_MAX ]}. {gvars, diff --git a/lib/wx/c_src/gen/wxe_events.cpp b/lib/wx/c_src/gen/wxe_events.cpp index 255b36c2fa..ae85931d8d 100644 --- a/lib/wx/c_src/gen/wxe_events.cpp +++ b/lib/wx/c_src/gen/wxe_events.cpp @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2008-2014. All Rights Reserved. + * Copyright Ericsson AB 2008-2015. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -394,7 +394,11 @@ case 167: {// wxMouseEvent rt.addBool(ev->m_controlDown); rt.addBool(ev->m_shiftDown); rt.addBool(ev->m_altDown); +#if wxCHECK_VERSION(2,9,0) && defined(_MACOSX) + rt.addBool(ev->m_rawControlDown); +#else rt.addBool(ev->m_metaDown); +#endif rt.addInt(ev->m_wheelRotation); rt.addInt(ev->m_wheelDelta); rt.addInt(ev->m_linesPerAction); @@ -419,7 +423,11 @@ case 169: {// wxKeyEvent rt.addBool(ev->m_controlDown); rt.addBool(ev->m_shiftDown); rt.addBool(ev->m_altDown); +#if wxCHECK_VERSION(2,9,0) && defined(_MACOSX) + rt.addBool(ev->m_rawControlDown); +#else rt.addBool(ev->m_metaDown); +#endif #if !wxCHECK_VERSION(2,9,0) rt.addBool(ev->m_scanCode); #else diff --git a/lib/wx/c_src/gen/wxe_init.cpp b/lib/wx/c_src/gen/wxe_init.cpp index 3a4bced790..1673f2a1b3 100644 --- a/lib/wx/c_src/gen/wxe_init.cpp +++ b/lib/wx/c_src/gen/wxe_init.cpp @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2008-2014. All Rights Reserved. + * Copyright Ericsson AB 2008-2015. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -56,6 +56,12 @@ void WxeApp::init_nonconsts(wxeMemEnv *memenv, ErlDrvTermData caller) { rt.addTupleCount(2); rt.addAtom("wxMOD_CMD"); rt.addInt(wxMOD_CMD); rt.addTupleCount(2); + rt.addAtom("wxCURSOR_ARROWWAIT"); rt.addInt(wxCURSOR_ARROWWAIT); + rt.addTupleCount(2); + rt.addAtom("wxCURSOR_DEFAULT"); rt.addInt(wxCURSOR_DEFAULT); + rt.addTupleCount(2); + rt.addAtom("wxCURSOR_MAX"); rt.addInt(wxCURSOR_MAX); + rt.addTupleCount(2); rt.addAtom("wxBLACK"); rt.add(*(wxBLACK)); rt.addTupleCount(2); rt.addAtom("wxBLACK_BRUSH"); rt.addRef(getRef((void *)wxBLACK_BRUSH,memenv),"wxBrush"); @@ -138,7 +144,7 @@ void WxeApp::init_nonconsts(wxeMemEnv *memenv, ErlDrvTermData caller) { rt.addTupleCount(2); rt.addAtom("wxWHITE_PEN"); rt.addRef(getRef((void *)wxWHITE_PEN,memenv),"wxPen"); rt.addTupleCount(2); - rt.endList(57); + rt.endList(60); rt.addTupleCount(2); rt.send(); } diff --git a/lib/wx/configure.in b/lib/wx/configure.in index 4c4d4f41a8..fbdddb9220 100644 --- a/lib/wx/configure.in +++ b/lib/wx/configure.in @@ -441,19 +441,26 @@ else else CWXWIN_PROG=`cygpath -d "$PROGRAMFILES" | cygpath -f - 2>/dev/null` fi - CWXWIN3=$CWXWIN_PROG/wxWidgets-?.*.* - CWXWIN4=$CWXWIN_PROG/wxMSW-?.*.* - CWX_DOCUMENTED="/opt/local/pgm/wxMSW-?.*.* /opt/local/pgm/wxWidgets-?.*.*" + + CWXWIN3="$CWXWIN_PROG/wxWidgets-3.*.* $CWXWIN_PROG/wxWidgets-2.*.*" + CWXWIN4="$CWXWIN_PROG/wxMSW-3.*.* $CWXWIN_PROG/wxMSW-2.*.*" + + DOC_OPT=/opt/local/pgm + CWX_DOCUMENTED="$DOC_OPT/wxWidgets-2.*.* $DOC_OPT/wxMSW-2.*.*" + CWX_DOCUMENTED="$DOC_OPT/wxWidgets-3.*.* $DOC_OPT/wxMSW-3.*.* $CWX_DOCUMENTED" + case $ac_cv_sizeof_void_p in 8) - CWX_DOCUMENTED="/opt/local64/pgm/wxMSW-?.*.* /opt/local64/pgm/wxWidgets-?.*.* $CWX_DOCUMENTED" + DOC_OPT64=/opt/local64/pgm + CWX_DOCUMENTED="$DOC_OPT64/wxWidgets-2.*.* $DOC_OPT64/wxMSW-2.*.* $CWX_DOCUMENTED" + CWX_DOCUMENTED="$DOC_OPT64/wxWidgets-3.*.* $DOC_OPT64/wxMSW-3.*.* $CWX_DOCUMENTED" ;; *) true ;; - esac - - CWXPATH="$CWXWIN0 $CWXWIN1 $CWXWIN2 $CWX_DOCUMENTED $CWXWIN3.* $CWXWIN4.*" + esac + + CWXPATH="$CWXWIN0 $CWXWIN1 $CWXWIN2 $CWX_DOCUMENTED $CWXWIN3 $CWXWIN4" for dir in $CWXPATH; do AC_MSG_NOTICE(Checking: [$dir]) diff --git a/lib/wx/doc/src/notes.xml b/lib/wx/doc/src/notes.xml index 52087398e7..682ab48ca0 100644 --- a/lib/wx/doc/src/notes.xml +++ b/lib/wx/doc/src/notes.xml @@ -31,6 +31,21 @@ <p>This document describes the changes made to the wxErlang application.</p> +<section><title>Wx 1.3.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix timing related crash during wx application stop.</p> + <p> + Own Id: OTP-12374</p> + </item> + </list> + </section> + +</section> + <section><title>Wx 1.3.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/wx/examples/demo/demo.erl b/lib/wx/examples/demo/demo.erl index 2f560096f5..65fb05cd94 100644 --- a/lib/wx/examples/demo/demo.erl +++ b/lib/wx/examples/demo/demo.erl @@ -256,9 +256,17 @@ handle_event(#wx{id = Id, wx_misc:launchDefaultBrowser("http://www.erlang.org/doc/apps/wx/part_frame.html"), {noreply, State}; ?wxID_ABOUT -> + WxWVer = io_lib:format("~p.~p.~p.~p", + [?wxMAJOR_VERSION, ?wxMINOR_VERSION, + ?wxRELEASE_NUMBER, ?wxSUBRELEASE_NUMBER]), + application:load(wx), + {ok, WxVsn} = application:get_key(wx, vsn), AboutString = "Demo of various widgets\n" - "Authors: Olle & Dan", + "Authors: Olle & Dan\n\n" ++ + "Frontend: wx-" ++ WxVsn ++ + "\nBackend: wxWidgets-" ++ lists:flatten(WxWVer), + wxMessageDialog:showModal(wxMessageDialog:new(State#state.win, AboutString, [{style, ?wxOK bor diff --git a/lib/wx/include/wx.hrl b/lib/wx/include/wx.hrl index 348daf64ce..9b913c7c00 100644 --- a/lib/wx/include/wx.hrl +++ b/lib/wx/include/wx.hrl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2014. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -1883,9 +1883,9 @@ -define(wxCURSOR_WAIT, 24). -define(wxCURSOR_WATCH, 25). -define(wxCURSOR_BLANK, 26). --define(wxCURSOR_DEFAULT, 27). --define(wxCURSOR_ARROWWAIT, 28). --define(wxCURSOR_MAX, 29). +-define(wxCURSOR_DEFAULT, wxe_util:get_const(wxCURSOR_DEFAULT)). +-define(wxCURSOR_ARROWWAIT, wxe_util:get_const(wxCURSOR_ARROWWAIT)). +-define(wxCURSOR_MAX, wxe_util:get_const(wxCURSOR_MAX)). % From "generic_2laywin.h" -define(wxLAYOUT_QUERY, 256). -define(wxLAYOUT_MRU_LENGTH, 16). diff --git a/lib/wx/vsn.mk b/lib/wx/vsn.mk index 78c24ec093..942d4c0d6f 100644 --- a/lib/wx/vsn.mk +++ b/lib/wx/vsn.mk @@ -1 +1 @@ -WX_VSN = 1.3.2 +WX_VSN = 1.3.3 diff --git a/otp_versions.table b/otp_versions.table index 41c05e79d8..64ffd82809 100644 --- a/otp_versions.table +++ b/otp_versions.table @@ -1,3 +1,4 @@ +OTP-17.5 : asn1-3.0.4 common_test-1.10 compiler-5.0.4 crypto-3.5 debugger-4.0.3 dialyzer-2.7.4 diameter-1.9 eldap-1.1.1 erts-6.4 hipe-3.11.3 inets-5.10.6 kernel-3.2 mnesia-4.12.5 observer-2.0.4 os_mon-2.3.1 public_key-0.23 runtime_tools-1.8.16 ssh-3.2 ssl-6.0 stdlib-2.4 syntax_tools-1.6.18 test_server-3.8 tools-2.7.2 wx-1.3.3 # cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 edoc-0.7.16 erl_docgen-0.3.7 erl_interface-3.7.20 et-1.5 eunit-2.2.9 gs-1.5.16 ic-4.3.6 jinterface-1.5.12 megaco-3.17.3 odbc-2.10.22 orber-3.7.1 ose-1.0.2 otp_mibs-1.0.10 parsetools-2.0.12 percept-0.8.10 reltool-0.6.6 sasl-2.4.1 snmp-5.1.1 typer-0.9.8 webtool-0.8.10 xmerl-1.3.7 : OTP-17.4.1 : erts-6.3.1 inets-5.10.5 # asn1-3.0.3 common_test-1.9 compiler-5.0.3 cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 crypto-3.4.2 debugger-4.0.2 dialyzer-2.7.3 diameter-1.8 edoc-0.7.16 eldap-1.1 erl_docgen-0.3.7 erl_interface-3.7.20 et-1.5 eunit-2.2.9 gs-1.5.16 hipe-3.11.2 ic-4.3.6 jinterface-1.5.12 kernel-3.1 megaco-3.17.3 mnesia-4.12.4 observer-2.0.3 odbc-2.10.22 orber-3.7.1 os_mon-2.3 ose-1.0.2 otp_mibs-1.0.10 parsetools-2.0.12 percept-0.8.10 public_key-0.22.1 reltool-0.6.6 runtime_tools-1.8.15 sasl-2.4.1 snmp-5.1.1 ssh-3.1 ssl-5.3.8 stdlib-2.3 syntax_tools-1.6.17 test_server-3.7.2 tools-2.7.1 typer-0.9.8 webtool-0.8.10 wx-1.3.2 xmerl-1.3.7 : OTP-17.4 : asn1-3.0.3 common_test-1.9 compiler-5.0.3 crypto-3.4.2 debugger-4.0.2 dialyzer-2.7.3 diameter-1.8 edoc-0.7.16 eldap-1.1 erl_docgen-0.3.7 erl_interface-3.7.20 erts-6.3 eunit-2.2.9 hipe-3.11.2 inets-5.10.4 jinterface-1.5.12 kernel-3.1 megaco-3.17.3 mnesia-4.12.4 observer-2.0.3 odbc-2.10.22 otp_mibs-1.0.10 parsetools-2.0.12 percept-0.8.10 runtime_tools-1.8.15 snmp-5.1.1 ssh-3.1 ssl-5.3.8 stdlib-2.3 syntax_tools-1.6.17 test_server-3.7.2 tools-2.7.1 wx-1.3.2 # cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 et-1.5 gs-1.5.16 ic-4.3.6 orber-3.7.1 os_mon-2.3 ose-1.0.2 public_key-0.22.1 reltool-0.6.6 sasl-2.4.1 typer-0.9.8 webtool-0.8.10 xmerl-1.3.7 : OTP-17.3.4 : erts-6.2.1 # asn1-3.0.2 common_test-1.8.2 compiler-5.0.2 cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 crypto-3.4.1 debugger-4.0.1 dialyzer-2.7.2 diameter-1.7.1 edoc-0.7.15 eldap-1.0.4 erl_docgen-0.3.6 erl_interface-3.7.19 et-1.5 eunit-2.2.8 gs-1.5.16 hipe-3.11.1 ic-4.3.6 inets-5.10.3 jinterface-1.5.11 kernel-3.0.3 megaco-3.17.2 mnesia-4.12.3 observer-2.0.2 odbc-2.10.21 orber-3.7.1 os_mon-2.3 ose-1.0.2 otp_mibs-1.0.9 parsetools-2.0.11 percept-0.8.9 public_key-0.22.1 reltool-0.6.6 runtime_tools-1.8.14 sasl-2.4.1 snmp-5.1 ssh-3.0.8 ssl-5.3.7 stdlib-2.2 syntax_tools-1.6.16 test_server-3.7.1 tools-2.7 typer-0.9.8 webtool-0.8.10 wx-1.3.1 xmerl-1.3.7 : diff --git a/system/doc/reference_manual/typespec.xml b/system/doc/reference_manual/typespec.xml index 0891dbaa9b..53ef93b60f 100644 --- a/system/doc/reference_manual/typespec.xml +++ b/system/doc/reference_manual/typespec.xml @@ -4,7 +4,7 @@ <chapter> <header> <copyright> - <year>2003</year><year>2014</year> + <year>2003</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -226,7 +226,7 @@ <cell><c>module()</c></cell><cell><c>atom()</c></cell> </row> <row> - <cell><c>mfa()</c></cell><cell><c>{atom(),atom(),arity()}</c></cell> + <cell><c>mfa()</c></cell><cell><c>{module(),atom(),arity()}</c></cell> </row> <row> <cell><c>arity()</c></cell><cell><c>0..255</c></cell> |