aboutsummaryrefslogtreecommitdiffstats
path: root/erts/emulator/beam
diff options
context:
space:
mode:
Diffstat (limited to 'erts/emulator/beam')
-rw-r--r--erts/emulator/beam/bif.tab1
-rw-r--r--erts/emulator/beam/erl_hashmap.c415
-rw-r--r--erts/emulator/beam/erl_hashmap.h3
3 files changed, 419 insertions, 0 deletions
diff --git a/erts/emulator/beam/bif.tab b/erts/emulator/beam/bif.tab
index 27ae8adcec..5ffd5b37b5 100644
--- a/erts/emulator/beam/bif.tab
+++ b/erts/emulator/beam/bif.tab
@@ -621,6 +621,7 @@ bif hashmap:find/2
bif hashmap:update/3
bif hashmap:remove/2
bif hashmap:info/1
+bif hashmap:from_list/1
bif hashmap:to_list/1
bif hashmap:new/0
bif hashmap:is_key/2
diff --git a/erts/emulator/beam/erl_hashmap.c b/erts/emulator/beam/erl_hashmap.c
index 3da2a53333..ddb55c53ce 100644
--- a/erts/emulator/beam/erl_hashmap.c
+++ b/erts/emulator/beam/erl_hashmap.c
@@ -67,14 +67,28 @@ static char *format_binary(Uint64 x, char *b) {
#endif
static Eterm hashmap_insert(Process *p, Uint32 hx, Eterm key, Eterm value, Eterm node, int is_update);
+
+/* for hashmap_from_list/1 */
+typedef struct {
+ Uint32 hx;
+ Uint32 skip;
+ Uint i;
+ Eterm val;
+} hxnode_t;
+
static const Eterm *hashmap_get(Uint32 hx, Eterm key, Eterm node);
static Eterm hashmap_delete(Process *p, Uint32 hx, Eterm key, Eterm node);
+static Eterm hashmap_from_list(Process *p, Eterm node);
static Eterm hashmap_to_list(Process *p, Eterm map);
static Eterm hashmap_keys(Process *p, Eterm map);
static Eterm hashmap_values(Process *p, Eterm map);
static Eterm hashmap_merge(Process *p, Eterm nodeA, Eterm nodeB);
static Eterm hashmap_bld_tuple_uint(Uint **hpp, Uint *szp, Uint n, Uint nums[]);
+static Eterm hashmap_from_sorted_unique_array(Process *p, hxnode_t *hxns, Uint n, int is_root);
+static Eterm hashmap_from_chunked_array(Process *p, hxnode_t *hxns, Uint n, int is_root);
+static int hxnodecmp(hxnode_t* a, hxnode_t* b);
+static int hxnodecmpkey(hxnode_t* a, hxnode_t* b);
/* hashmap:new/0 */
@@ -129,6 +143,15 @@ BIF_RETTYPE hashmap_to_list_1(BIF_ALIST_1) {
BIF_ERROR(BIF_P, BADARG);
}
+/* hashmap:from_list/1 */
+
+BIF_RETTYPE hashmap_from_list_1(BIF_ALIST_1) {
+ if (is_list(BIF_ARG_1) || is_nil(BIF_ARG_1)) {
+ return hashmap_from_list(BIF_P, BIF_ARG_1);
+ }
+
+ BIF_ERROR(BIF_P, BADARG);
+}
/* hashmap:get/2 */
BIF_RETTYPE hashmap_get_2(BIF_ALIST_2) {
@@ -794,6 +817,398 @@ not_found:
return res;
}
+#define swizzle32(D,S) \
+ do { \
+ (D) = ((S) & 0x0000000f) << 28 | ((S) & 0x000000f0) << 20 \
+ | ((S) & 0x00000f00) << 12 | ((S) & 0x0000f000) << 4 \
+ | ((S) & 0x000f0000) >> 4 | ((S) & 0x00f00000) >> 12 \
+ | ((S) & 0x0f000000) >> 20 | ((S) & 0xf0000000) >> 28; \
+ } while(0)
+
+
+static Eterm hashmap_from_list(Process *p, Eterm list) {
+ Eterm *kv, res, item = list;
+ Eterm *hp;
+ Eterm tmp[2];
+ Uint32 sw, hx;
+ Uint jx = 0, ix = 0, lx, cx, n = 0;
+ hxnode_t *hxns;
+
+ /* Calculate size and check validity */
+
+ if (is_nil(list)) {
+ hp = HAlloc(p, HAMT_HEAD_EMPTY_SZ);
+ hp[0] = MAP_HEADER_HAMT_HEAD_BITMAP(0);
+ hp[1] = 0;
+ return make_hashmap(hp);
+ }
+
+ while(is_list(item)) {
+ res = CAR(list_val(item));
+ if (is_not_tuple(res))
+ goto error;
+
+ kv = tuple_val(res);
+ if (*kv != make_arityval(2))
+ goto error;
+
+ n++;
+ item = CDR(list_val(item));
+ }
+
+ if (is_not_nil(item))
+ goto error;
+
+ hp = HAlloc(p, (2 * n));
+
+ /* create tmp hx values and leaf ptrs */
+ hxns = (hxnode_t *)erts_alloc(ERTS_ALC_T_TMP, n * sizeof(hxnode_t));
+ item = list;
+
+ while(is_list(item)) {
+ res = CAR(list_val(item));
+ kv = tuple_val(res);
+ hx = hashmap_restore_hash(tmp,0,kv[1]);
+ swizzle32(sw,hx);
+ hxns[ix].hx = sw;
+ hxns[ix].val = CONS(hp, kv[1], kv[2]); hp += 2;
+ hxns[ix].skip = 1; /* will be reassigned in from_array */
+ hxns[ix].i = ix;
+ ix++;
+ item = CDR(list_val(item));
+ }
+
+ ASSERT(n > 0);
+
+ /* sort and compact array (remove non-unique entries) */
+ qsort(hxns, n, sizeof(hxnode_t), (int (*)(const void *, const void *)) hxnodecmp);
+
+ ix = 0, cx = 0;
+ while(ix < n - 1) {
+ if (hxns[ix].hx == hxns[ix+1].hx) {
+
+ /* find region of equal hash values */
+ jx = ix + 1;
+ while(jx < n && hxns[ix].hx == hxns[jx].hx) { jx++; }
+ /* find all correct keys from region
+ * (last in list but now hash sorted so we check highest id instead) */
+
+ /* resort with keys instead of hash value within region */
+
+ qsort(&hxns[ix], jx - ix, sizeof(hxnode_t),
+ (int (*)(const void *, const void *)) hxnodecmpkey);
+
+ while(ix < jx) {
+ lx = ix;
+ while(ix < jx && EQ(CAR(list_val(hxns[ix].val)), CAR(list_val(hxns[lx].val)))) {
+ if (hxns[ix].i > hxns[lx].i) {
+ lx = ix;
+ }
+ ix++;
+ }
+ hxns[cx].hx = hxns[lx].hx;
+ hxns[cx].val = hxns[lx].val;
+ cx++;
+ }
+ ix = jx;
+ continue;
+ }
+ if (ix > cx) {
+ hxns[cx].hx = hxns[ix].hx;
+ hxns[cx].val = hxns[ix].val;
+ }
+ cx++;
+ ix++;
+ }
+
+ if (ix < n) {
+ hxns[cx].hx = hxns[ix].hx;
+ hxns[cx].val = hxns[ix].val;
+ cx++;
+ }
+
+ if (cx > 1) {
+ /* recursive decompose array */
+ res = hashmap_from_sorted_unique_array(p, hxns, cx, 0);
+ } else {
+ /* hash value has been swizzled, need to drag it down to get the
+ * correct slot. */
+ hp = HAlloc(p, HAMT_HEAD_BITMAP_SZ(1));
+ hp[0] = MAP_HEADER_HAMT_HEAD_BITMAP(1 << ((hxns[0].hx >> 0x1c) & 0xf));
+ hp[1] = 1;
+ hp[2] = hxns[0].val;
+ res = make_hashmap(hp);
+ }
+
+ erts_free(ERTS_ALC_T_TMP, (void *) hxns);
+ ERTS_VERIFY_UNUSED_TEMP_ALLOC(p);
+
+ BIF_RET(res);
+error:
+ BIF_ERROR(p, BADARG);
+}
+
+
+static Eterm hashmap_from_sorted_unique_array(Process *p, hxnode_t *hxns, Uint n, int lvl) {
+ Eterm res = NIL;
+ Uint i,ix,jx,elems;
+ Uint32 sw, hx;
+ Eterm val;
+ Eterm th[2];
+ hxnode_t *tmp;
+
+ ASSERT(lvl < 32);
+ ix = 0;
+ elems = 1;
+ while (ix < n - 1) {
+ if (hxns[ix].hx == hxns[ix+1].hx) {
+ jx = ix + 1;
+ while (jx < n && hxns[ix].hx == hxns[jx].hx) { jx++; }
+ tmp = (hxnode_t *)erts_alloc(ERTS_ALC_T_TMP, ((jx - ix)) * sizeof(hxnode_t));
+
+ for(i = 0; i < jx - ix; i++) {
+ val = hxns[i + ix].val;
+ hx = hashmap_restore_hash(th, lvl + 8, CAR(list_val(val)));
+ swizzle32(sw,hx);
+ tmp[i].hx = sw;
+ tmp[i].val = val;
+ tmp[i].i = i;
+ tmp[i].skip = 1;
+ }
+
+ qsort(tmp, jx - ix, sizeof(hxnode_t), (int (*)(const void *, const void *)) hxnodecmp);
+
+ hxns[ix].skip = jx - ix;
+ hxns[ix].val = hashmap_from_sorted_unique_array(p, tmp, jx - ix, lvl + 8);
+ erts_free(ERTS_ALC_T_TMP, (void *) tmp);
+ ix = jx;
+ if (ix < n) { elems++; }
+ continue;
+ }
+ hxns[ix].skip = 1;
+ elems++;
+ ix++;
+ }
+
+ res = hashmap_from_chunked_array(p, hxns, elems, !lvl);
+
+ ERTS_HOLE_CHECK(p);
+
+ return res;
+}
+
+#define maskval(V,L) (((V) >> ((7 - (L))*4)) & 0xf)
+#define cdepth(V1,V2) (hashmap_clz((V1) ^ (V2)) >> 2)
+
+/* n must be > 1
+ * hash values in hxns has to be unique
+ */
+
+#define HALLOC_EXTRA 200
+static Eterm hashmap_from_chunked_array(Process *p, hxnode_t *hxns, Uint n, int is_root) {
+ Uint ix, d, dn, dc, slot, elems;
+ Uint32 v, vp, vn, hdr;
+ Uint bp, sz;
+ DECLARE_ESTACK(stack);
+ Eterm res = NIL, *hp = NULL, *nhp;
+
+ ASSERT(n > 1);
+
+ /* push initial nodes on the stack,
+ * this is the starting depth */
+
+ ix = 0;
+ d = 0;
+ vp = hxns[ix].hx;
+ v = hxns[ix + hxns[ix].skip].hx;
+
+ ASSERT(vp > v);
+ slot = maskval(vp,d);
+
+ while(slot == maskval(v,d)) {
+ ESTACK_PUSH(stack, 1 << slot);
+ d++;
+ slot = maskval(vp,d);
+ }
+
+ res = hxns[ix].val;
+
+ if (hxns[ix].skip > 1) {
+ dc = 7;
+ /* build collision nodes */
+ while (dc > d) {
+ hp = HAllocX(p, HAMT_NODE_BITMAP_SZ(1), HALLOC_EXTRA);
+ hp[0] = MAP_HEADER_HAMT_NODE_BITMAP(1 << maskval(vp,dc));
+ hp[1] = res;
+ res = make_hashmap(hp);
+ dc--;
+ }
+ }
+
+ ESTACK_PUSH(stack, res);
+ ESTACK_PUSH(stack, 1 << slot);
+
+ /* all of the other nodes .. */
+ elems = n - 2; /* remove first and last elements */
+ while(elems--) {
+ hdr = ESTACK_POP(stack);
+ ix = ix + hxns[ix].skip;
+
+ /* determine if node or subtree should be built by looking
+ * at the next value. */
+
+ vn = hxns[ix + hxns[ix].skip].hx;
+ dn = cdepth(v,vn);
+ ASSERT(v > vn);
+
+ res = hxns[ix].val;
+
+ if (hxns[ix].skip > 1) {
+ int wat = (d > dn) ? d : dn;
+ dc = 7;
+ /* build collision nodes */
+ while (dc > wat) {
+ hp = HAllocX(p, HAMT_NODE_BITMAP_SZ(1), HALLOC_EXTRA);
+ hp[0] = MAP_HEADER_HAMT_NODE_BITMAP(1 << maskval(v,dc));
+ hp[1] = res;
+ res = make_hashmap(hp);
+ dc--;
+ }
+ }
+
+ /* next depth is higher (implies collision) */
+ if (d < dn) {
+ /* hdr is the popped one initially */
+ while(d < dn) {
+ slot = maskval(v, d);
+ bp = 1 << slot;
+ ESTACK_PUSH(stack, hdr | bp);
+ d++;
+ hdr = 0; /* clear hdr for all other collisions */
+ }
+
+ slot = maskval(v, d);
+ bp = 1 << slot;
+ /* no more collisions */
+ ESTACK_PUSH(stack,res);
+ ESTACK_PUSH(stack,bp);
+ } else if (d == dn) {
+ /* no collisions at all */
+ slot = maskval(v, d);
+ bp = 1 << slot;
+ ESTACK_PUSH(stack,res);
+ ESTACK_PUSH(stack,hdr | bp);
+ } else {
+ /* dn < n, we have a drop and we are done
+ * build nodes and subtree */
+ while (dn != d) {
+ slot = maskval(v, d);
+ bp = 1 << slot;
+ /* OR bitposition before sz calculation to handle
+ * redundant collisions */
+ hdr |= bp;
+ sz = hashmap_bitcount(hdr);
+ hp = HAllocX(p, HAMT_NODE_BITMAP_SZ(sz), HALLOC_EXTRA);
+ nhp = hp;
+ *hp++ = (hdr == 0xffff) ? MAP_HEADER_HAMT_NODE_ARRAY : MAP_HEADER_HAMT_NODE_BITMAP(hdr);
+ *hp++ = res; sz--;
+ while (sz--) { *hp++ = ESTACK_POP(stack); }
+ ASSERT((hp - nhp) < 18);
+ res = make_hashmap(nhp);
+
+ /* we need to pop the next hdr and push if we don't need it */
+
+ hdr = ESTACK_POP(stack);
+ d--;
+ }
+ ESTACK_PUSH(stack, res);
+ ESTACK_PUSH(stack, hdr);
+ }
+
+ vp = v;
+ v = vn;
+ d = dn;
+ ERTS_HOLE_CHECK(p);
+ }
+
+ /* v and vp are reused from above */
+ dn = cdepth(vp,v);
+ ix = ix + hxns[ix].skip;
+ res = hxns[ix].val;
+
+ if (hxns[ix].skip > 1) {
+ dc = 7;
+ /* build collision nodes */
+ while (dc > dn) {
+ hp = HAllocX(p, HAMT_NODE_BITMAP_SZ(1), HALLOC_EXTRA);
+ hp[0] = MAP_HEADER_HAMT_NODE_BITMAP(1 << maskval(v,dc));
+ hp[1] = res;
+ res = make_hashmap(hp);
+ dc--;
+ }
+ }
+
+ hdr = ESTACK_POP(stack);
+ /* pop remaining subtree if any */
+ while (dn) {
+ slot = maskval(v, dn);
+ bp = 1 << slot;
+ /* OR bitposition before sz calculation to handle
+ * redundant collisions */
+ hdr |= bp;
+ sz = hashmap_bitcount(hdr);
+ hp = HAllocX(p, HAMT_NODE_BITMAP_SZ(sz), HALLOC_EXTRA);
+ nhp = hp;
+ *hp++ = (hdr == 0xffff) ? MAP_HEADER_HAMT_NODE_ARRAY : MAP_HEADER_HAMT_NODE_BITMAP(hdr);
+ *hp++ = res; sz--;
+
+ while (sz--) { *hp++ = ESTACK_POP(stack); }
+ res = make_hashmap(nhp);
+ hdr = ESTACK_POP(stack);
+ dn--;
+ }
+
+ /* and finally the root .. */
+
+ slot = maskval(v, dn);
+ bp = 1 << slot;
+ hdr |= bp;
+ sz = hashmap_bitcount(hdr);
+ hp = HAlloc(p, sz + /* hdr + item */ (is_root ? 2 : 1));
+ nhp = hp;
+
+ if (is_root) {
+ *hp++ = (hdr == 0xffff) ? MAP_HEADER_HAMT_HEAD_ARRAY : MAP_HEADER_HAMT_HEAD_BITMAP(hdr);
+ *hp++ = n;
+ } else {
+ *hp++ = (hdr == 0xffff) ? MAP_HEADER_HAMT_NODE_ARRAY : MAP_HEADER_HAMT_NODE_BITMAP(hdr);
+ }
+
+ *hp++ = res; sz--;
+ while (sz--) { *hp++ = ESTACK_POP(stack); }
+
+ res = make_hashmap(nhp);
+
+ ASSERT(ESTACK_COUNT(stack) == 0);
+ DESTROY_ESTACK(stack);
+ ERTS_HOLE_CHECK(p);
+ return res;
+}
+#undef HALLOC_EXTRA
+
+static int hxnodecmpkey(hxnode_t *a, hxnode_t *b) {
+ return CMP_TERM(CAR(list_val(a->val)), CAR(list_val(b->val)));
+}
+
+static int hxnodecmp(hxnode_t *a, hxnode_t *b) {
+ if (a->hx < b->hx)
+ return 1;
+ else if (a->hx == b->hx)
+ return 0;
+ else
+ return -1;
+}
+
#define HALLOC_EXTRA 200
static Eterm hashmap_merge(Process *p, Eterm nodeA, Eterm nodeB) {
diff --git a/erts/emulator/beam/erl_hashmap.h b/erts/emulator/beam/erl_hashmap.h
index 1964787218..f05505bae0 100644
--- a/erts/emulator/beam/erl_hashmap.h
+++ b/erts/emulator/beam/erl_hashmap.h
@@ -127,6 +127,9 @@ typedef struct hashmap_head_s {
#define MAP_HEADER_HAMT_HEAD_BITMAP(Bmp) \
MAKE_MAP_HEADER(MAP_HEADER_TAG_HAMT_HEAD_BITMAP,0x1,Bmp)
+#define MAP_HEADER_HAMT_NODE_ARRAY \
+ make_arityval(16)
+
#define MAP_HEADER_HAMT_NODE_BITMAP(Bmp) \
MAKE_MAP_HEADER(MAP_HEADER_TAG_HAMT_NODE_BITMAP,0x0,Bmp)