aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/compiler/scripts/.gitignore1
-rwxr-xr-xlib/compiler/scripts/smoke122
-rw-r--r--lib/compiler/scripts/smoke-mix.exs95
-rw-r--r--lib/compiler/src/beam_except.erl12
-rw-r--r--lib/compiler/src/beam_ssa_dead.erl47
-rw-r--r--lib/compiler/src/beam_ssa_opt.erl20
-rw-r--r--lib/compiler/src/beam_ssa_type.erl24
-rw-r--r--lib/compiler/src/beam_validator.erl541
-rw-r--r--lib/compiler/test/beam_type_SUITE.erl16
-rw-r--r--lib/crypto/src/crypto.erl3
-rw-r--r--lib/ssl/src/tls_connection.erl1
-rw-r--r--lib/ssl/test/ssl_ECC_SUITE.erl61
-rw-r--r--lib/ssl/test/ssl_test_lib.erl42
-rw-r--r--lib/stdlib/doc/src/ets.xml25
14 files changed, 647 insertions, 363 deletions
diff --git a/lib/compiler/scripts/.gitignore b/lib/compiler/scripts/.gitignore
new file mode 100644
index 0000000000..4e4eba766d
--- /dev/null
+++ b/lib/compiler/scripts/.gitignore
@@ -0,0 +1 @@
+/smoke-build
diff --git a/lib/compiler/scripts/smoke b/lib/compiler/scripts/smoke
new file mode 100755
index 0000000000..2429f104c0
--- /dev/null
+++ b/lib/compiler/scripts/smoke
@@ -0,0 +1,122 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+-mode(compile).
+
+main(_Args) ->
+ setup(),
+ clone_elixir(),
+ build_elixir(),
+ test_elixir(),
+ setup_mix(),
+ smoke(main),
+ smoke(rabbitmq),
+ halt(0).
+
+setup() ->
+ ScriptsDir = scripts_dir(),
+ SmokeBuildDir = filename:join(ScriptsDir, "smoke-build"),
+ _ = file:make_dir(SmokeBuildDir),
+ ok = file:set_cwd(SmokeBuildDir),
+ ok.
+
+clone_elixir() ->
+ {ok,SmokeDir} = file:get_cwd(),
+ DotGitDir = filename:join([SmokeDir,"elixir",".git"]),
+ ElixirRepo = "[email protected]:elixir-lang/elixir.git",
+ case filelib:is_dir(DotGitDir) of
+ false ->
+ cmd("git clone " ++ ElixirRepo);
+ true ->
+ GetHeadSHA1 = "cd elixir && git rev-parse --verify HEAD",
+ Before = os:cmd(GetHeadSHA1),
+ cmd("cd elixir && git pull --ff-only origin master"),
+ case os:cmd(GetHeadSHA1) of
+ Before ->
+ ok;
+ _After ->
+ %% There were some changes. Clean to force a re-build.
+ cmd("cd elixir && make clean")
+ end
+ end.
+
+build_elixir() ->
+ cmd("cd elixir && make compile").
+
+test_elixir() ->
+ cmd("cd elixir && make test_stdlib").
+
+setup_mix() ->
+ MixExsFile = filename:join(scripts_dir(), "smoke-mix.exs"),
+ {ok,MixExs} = file:read_file(MixExsFile),
+ ok = file:write_file("mix.exs", MixExs),
+
+ {ok,SmokeDir} = file:get_cwd(),
+ ElixirBin = filename:join([SmokeDir,"elixir","bin"]),
+ PATH = ElixirBin ++ ":" ++ os:getenv("PATH"),
+ os:putenv("PATH", PATH),
+ mix("local.rebar --force"),
+ ok.
+
+smoke(Set) ->
+ os:putenv("SMOKE_DEPS_SET", atom_to_list(Set)),
+ _ = file:delete("mix.lock"),
+ cmd("touch mix.exs"),
+ mix("deps.clean --all"),
+ mix("deps.get"),
+ mix("deps.compile"),
+ ok.
+
+scripts_dir() ->
+ Root = code:lib_dir(compiler),
+ filename:join(Root, "scripts").
+
+mix(Cmd) ->
+ cmd("mix " ++ Cmd).
+
+cmd(Cmd) ->
+ run("sh", ["-c",Cmd]).
+
+run(Program0, Args) ->
+ Program = case os:find_executable(Program0) of
+ Path when is_list(Path) ->
+ Path;
+ false ->
+ abort("Unable to find program: ~s\n", [Program0])
+ end,
+ Cmd = case {Program0,Args} of
+ {"sh",["-c"|ShCmd]} ->
+ ShCmd;
+ {_,_} ->
+ lists:join(" ", [Program0|Args])
+ end,
+ io:format("\n# ~s\n", [Cmd]),
+ Options = [{args,Args},binary,exit_status,stderr_to_stdout],
+ try open_port({spawn_executable,Program}, Options) of
+ Port ->
+ case run_loop(Port, <<>>) of
+ 0 ->
+ ok;
+ ExitCode ->
+ abort("*** Failed with exit code: ~p\n",
+ [ExitCode])
+ end
+ catch
+ error:_ ->
+ abort("Failed to execute ~s\n", [Program0])
+ end.
+
+run_loop(Port, Output) ->
+ receive
+ {Port,{exit_status,Status}} ->
+ Status;
+ {Port,{data,Bin}} ->
+ io:put_chars(Bin),
+ run_loop(Port, <<Output/binary,Bin/binary>>);
+ Msg ->
+ io:format("L: ~p~n", [Msg]),
+ run_loop(Port, Output)
+ end.
+
+abort(Format, Args) ->
+ io:format(Format, Args),
+ halt(1).
diff --git a/lib/compiler/scripts/smoke-mix.exs b/lib/compiler/scripts/smoke-mix.exs
new file mode 100644
index 0000000000..82ae3370fe
--- /dev/null
+++ b/lib/compiler/scripts/smoke-mix.exs
@@ -0,0 +1,95 @@
+defmodule Smoke.MixProject do
+ use Mix.Project
+
+ def project do
+ [
+ app: :smoke,
+ version: "0.1.0",
+ elixir: "~> 1.8",
+ start_permanent: Mix.env() == :prod,
+ deps: deps()
+ ]
+ end
+
+ # Run "mix help compile.app" to learn about applications.
+ def application do
+ [
+ extra_applications: [:logger]
+ ]
+ end
+
+ # Run "mix help deps" to learn about dependencies.
+ defp deps do
+ case :os.getenv('SMOKE_DEPS_SET') do
+ 'main' ->
+ [
+ {:bear, "~> 0.8.7"},
+ {:cloudi_core, "~> 1.7"},
+ {:concuerror, "~> 0.20.0"},
+ {:cowboy, "~> 2.6.1"},
+ {:ecto, "~> 3.0.6"},
+ {:ex_doc, "~> 0.19.3"},
+ {:distillery, "~> 2.0.12"},
+ {:erlydtl, "~> 0.12.1"},
+ {:gen_smtp, "~> 0.13.0"},
+ {:getopt, "~> 1.0.1"},
+ {:gettext, "~> 0.16.1"},
+ {:gpb, "~> 4.6"},
+ {:gproc, "~> 0.8.0"},
+ {:graphql, "~> 0.15.0", hex: :graphql_erl},
+ {:hackney, "~> 1.15.0"},
+ {:ibrowse, "~> 4.4.1"},
+ {:jose, "~> 1.9.0"},
+ {:lager, "~> 3.6"},
+ {:locus, "~> 1.6"},
+ {:nimble_parsec, "~> 0.5.0"},
+ {:phoenix, "~> 1.4.0"},
+ {:riak_pb, "~> 2.3"},
+ {:scalaris, git: "https://github.com/scalaris-team/scalaris",
+ compile: build_scalaris()},
+ {:tdiff, "~> 0.1.2"},
+ {:webmachine, "~> 1.11"},
+ {:wings, git: "https://github.com/dgud/wings.git",
+ compile: build_wings()},
+ {:zotonic_stdlib, "~> 1.0"},
+ ]
+ 'rabbitmq' ->
+ [{:rabbit_common, "~> 3.7"}]
+ _ ->
+ []
+ end
+ end
+
+ defp build_scalaris do
+ # Only compile the Erlang code.
+
+ """
+ echo '-include("rt_simple.hrl").' >include/rt.hrl
+ (cd src && erlc -W0 -I ../include -I ../contrib/log4erl/include -I ../contrib/yaws/include *.erl)
+ (cd src/comm_layer && erlc -W0 -I ../../include -I *.erl)
+ (cd src/cp && erlc -W0 -I ../../include -I *.erl)
+ (cd src/crdt && erlc -W0 -I ../../include -I *.erl)
+ (cd src/json && erlc -W0 -I ../../include -I *.erl)
+ (cd src/paxos && erlc -W0 -I ../../include -I *.erl)
+ (cd src/rbr && erlc -W0 -I ../../include -I *.erl)
+ (cd src/rrepair && erlc -W0 -I ../../include -I *.erl)
+ (cd src/time && erlc -W0 -I ../../include -I *.erl)
+ (cd src/transactions && erlc -W0 -I ../../include -I *.erl)
+ (cd src/tx && erlc -W0 -I ../../include -I *.erl)
+ """
+ end
+
+ defp build_wings do
+ # If the Erlang system is not installed, the build will
+ # crash in plugins_src/accel when attempting to build
+ # the accel driver. Since there is very little Erlang code in
+ # the directory, skip the entire directory.
+
+ """
+ echo "all:\n\t" >plugins_src/accel/Makefile
+ git commit -a -m'Disable for smoke testing'
+ git tag -a -m'Smoke test' vsmoke_test
+ make
+ """
+ end
+end
diff --git a/lib/compiler/src/beam_except.erl b/lib/compiler/src/beam_except.erl
index 49bfb5606f..26b193a35b 100644
--- a/lib/compiler/src/beam_except.erl
+++ b/lib/compiler/src/beam_except.erl
@@ -79,13 +79,15 @@ translate_1(Ar, I, Is, #st{arity=Arity}=St, [{line,_}=Line|Acc1]=Acc0) ->
no ->
translate(Is, St, [I|Acc0]);
{yes,function_clause,Acc2} ->
- case {Line,St} of
- {{line,Loc},#st{lbl=Fi,loc=Loc}} ->
+ case {Is,Line,St} of
+ {[return|_],{line,Loc},#st{lbl=Fi,loc=Loc}} ->
Instr = {jump,{f,Fi}},
translate(Is, St, [Instr|Acc2]);
- {_,_} ->
- %% This must be "error(function_clause, Args)" in
- %% the Erlang source code or a fun. Don't translate.
+ {_,_,_} ->
+ %% Not a call_only instruction, or not the same
+ %% location information as in in the line instruction
+ %% before the func_info instruction. Not safe
+ %% to translate to a jump.
translate(Is, St, [I|Acc0])
end;
{yes,Instr,Acc2} ->
diff --git a/lib/compiler/src/beam_ssa_dead.erl b/lib/compiler/src/beam_ssa_dead.erl
index 067d9a6741..09b29025c6 100644
--- a/lib/compiler/src/beam_ssa_dead.erl
+++ b/lib/compiler/src/beam_ssa_dead.erl
@@ -181,9 +181,9 @@ shortcut_2(L, Bs0, UnsetVars0, St) ->
%% We have a potentially suitable br.
%% Now update the set of variables that will never
%% be set if this block will be skipped.
- UnsetVars1 = [V || #b_set{dst=V} <- Is],
- UnsetVars = ordsets:union(UnsetVars0,
- ordsets:from_list(UnsetVars1)),
+ SetInThisBlock = [V || #b_set{dst=V} <- Is],
+ UnsetVars = update_unset_vars(L, Br, SetInThisBlock,
+ UnsetVars0, St),
%% Continue checking whether this br is suitable.
shortcut_3(Br, Bs#{from:=L}, UnsetVars, St)
@@ -296,6 +296,37 @@ shortcut_3(Br, Bs, UnsetVars, #st{target=Target}=St) ->
end
end.
+update_unset_vars(L, Br, SetInThisBlock, UnsetVars, #st{skippable=Skippable}) ->
+ case is_map_key(L, Skippable) of
+ true ->
+ %% None of the variables used in this block are used in
+ %% the successors. We can speed up compilation by avoiding
+ %% adding variables to the UnsetVars if the presence of
+ %% those variable would not change the outcome of the
+ %% tests in is_br_safe/2.
+ case Br of
+ #b_br{bool=Bool} ->
+ case member(Bool, SetInThisBlock) of
+ true ->
+ %% Bool is a variable defined in this
+ %% block. It will change the outcome of
+ %% the `not member(V, UnsetVars)` check in
+ %% is_br_safe/2. The other variables
+ %% defined in this block will not.
+ ordsets:add_element(Bool, UnsetVars);
+ false ->
+ %% Bool is either a variable not defined
+ %% in this block or a literal. Adding it
+ %% to the UnsetVars set would not change
+ %% the outcome of the tests in
+ %% is_br_safe/2.
+ UnsetVars
+ end
+ end;
+ false ->
+ ordsets:union(UnsetVars, ordsets:from_list(SetInThisBlock))
+ end.
+
shortcut_two_way(#b_br{succ=Succ,fail=Fail}, Bs0, UnsetVars0, St) ->
case shortcut_2(Succ, Bs0, UnsetVars0, St#st{target=Fail}) of
{#b_br{bool=#b_literal{},succ=Fail},_,_}=Res ->
@@ -919,11 +950,11 @@ used_vars([{L,#b_blk{is=Is}=Blk}|Bs], UsedVars0, Skip0) ->
Used = used_vars_blk(Blk, Used0),
UsedVars = used_vars_phis(Is, L, Used, UsedVars0),
- %% combine_eqs/1 needs different variable usage
- %% information than shortcut_opt/1. The Skip
- %% map will have an entry for each block that
- %% can be skipped (does not bind any variable used
- %% in successor).
+ %% combine_eqs/1 needs different variable usage information than
+ %% shortcut_opt/1. The Skip map will have an entry for each block
+ %% that can be skipped (does not bind any variable used in
+ %% successor). This information is also useful for speeding up
+ %% shortcut_opt/1.
Defined0 = [Def || #b_set{dst=Def} <- Is],
Defined = ordsets:from_list(Defined0),
diff --git a/lib/compiler/src/beam_ssa_opt.erl b/lib/compiler/src/beam_ssa_opt.erl
index 2c898ba6f8..e528bb4dfb 100644
--- a/lib/compiler/src/beam_ssa_opt.erl
+++ b/lib/compiler/src/beam_ssa_opt.erl
@@ -79,11 +79,9 @@ module(Module, Opts) ->
{ok, finish(Module, StMap)}.
phase([FuncId | Ids], Ps, StMap, FuncDb0) ->
- try
- {St, FuncDb} =
- compile:run_sub_passes(Ps, {map_get(FuncId, StMap), FuncDb0}),
-
- phase(Ids, Ps, StMap#{ FuncId => St }, FuncDb)
+ try compile:run_sub_passes(Ps, {map_get(FuncId, StMap), FuncDb0}) of
+ {St, FuncDb} ->
+ phase(Ids, Ps, StMap#{ FuncId => St }, FuncDb)
catch
Class:Error:Stack ->
#b_local{name=Name,arity=Arity} = FuncId,
@@ -1972,13 +1970,17 @@ verify_merge_is([#b_set{op=Op}|_]) ->
verify_merge_is(_) ->
ok.
-is_merge_allowed(_, _, #b_blk{is=[#b_set{op=peek_message}|_]}) ->
+is_merge_allowed(_, #b_blk{}, #b_blk{is=[#b_set{op=peek_message}|_]}) ->
false;
-is_merge_allowed(L, Blk0, #b_blk{}) ->
- case beam_ssa:successors(Blk0) of
+is_merge_allowed(L, #b_blk{last=#b_br{}}=Blk, #b_blk{}) ->
+ %% The predecessor block must have exactly one successor (L) for
+ %% the merge to be safe.
+ case beam_ssa:successors(Blk) of
[L] -> true;
[_|_] -> false
- end.
+ end;
+is_merge_allowed(_, #b_blk{last=#b_switch{}}, #b_blk{}) ->
+ false.
%%%
%%% When a tuple is matched, the pattern matching compiler generates a
diff --git a/lib/compiler/src/beam_ssa_type.erl b/lib/compiler/src/beam_ssa_type.erl
index 38ea5e6914..8bd6772ac5 100644
--- a/lib/compiler/src/beam_ssa_type.erl
+++ b/lib/compiler/src/beam_ssa_type.erl
@@ -1072,15 +1072,23 @@ simplify_not(#b_br{bool=#b_var{}=V,succ=Succ,fail=Fail}=Br0, Ts, Ds) ->
%%%
%%% Calculate the set of variables that are only used once in the
-%%% block that they are defined in. That will allow us to discard type
-%%% information for variables that will never be referenced by the
-%%% successor blocks, potentially improving compilation times.
+%%% terminator of the block that defines them. That will allow us to
+%%% discard type information for variables that will never be
+%%% referenced by the successor blocks, potentially improving
+%%% compilation times.
%%%
used_once(Linear, Args) ->
Map0 = used_once_1(reverse(Linear), #{}),
Map = maps:without(Args, Map0),
- cerl_sets:from_list(maps:keys(Map)).
+ Used0 = cerl_sets:from_list(maps:keys(Map)),
+ Used1 = used_in_terminators(Linear, []),
+ cerl_sets:intersection(Used0, Used1).
+
+used_in_terminators([{_,#b_blk{last=Last}}|Bs], Acc) ->
+ used_in_terminators(Bs, beam_ssa:used(Last) ++ Acc);
+used_in_terminators([], Acc) ->
+ cerl_sets:from_list(Acc).
used_once_1([{L,#b_blk{is=Is,last=Last}}|Bs], Uses0) ->
Uses = used_once_2([Last|reverse(Is)], L, Uses0),
@@ -1177,7 +1185,7 @@ get_type(#b_literal{val=Val}, _Ts) ->
%% failed and that L is not 'cons'. 'cons' can be subtracted from the
%% previously known type for L and the result put in FailTypes.
-infer_types(#b_var{}=V, Ts, #d{ds=Ds,once=Once}) ->
+infer_types(#b_var{}=V, Ts, #d{ds=Ds}) ->
#{V:=#b_set{op=Op,args=Args}} = Ds,
Types0 = infer_type(Op, Args, Ds),
@@ -1195,11 +1203,7 @@ infer_types(#b_var{}=V, Ts, #d{ds=Ds,once=Once}) ->
is_singleton_type(T)
end, EqTypes0),
- %% Don't bother updating the types for variables that
- %% are never used again.
- Types2 = Types1 ++ Types0,
- Types = [P || {InfV,_}=P <- Types2, not cerl_sets:is_element(InfV, Once)],
-
+ Types = Types1 ++ Types0,
{meet_types(EqTypes++Types, Ts),subtract_types(Types, Ts)}.
infer_eq_type({bif,'=:='}, [#b_var{}=Src,#b_literal{}=Lit], Ts, Ds) ->
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 4081e366a5..b56d53d4ce 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -303,11 +303,11 @@ valfun_1(_I, #vst{current=none}=Vst) ->
%% the original R10B compiler thought would return.
Vst;
valfun_1({badmatch,Src}, Vst) ->
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
verify_y_init(Vst),
kill_state(Vst);
valfun_1({case_end,Src}, Vst) ->
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
verify_y_init(Vst),
kill_state(Vst);
valfun_1(if_end, Vst) ->
@@ -315,40 +315,21 @@ valfun_1(if_end, Vst) ->
kill_state(Vst);
valfun_1({try_case_end,Src}, Vst) ->
verify_y_init(Vst),
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
kill_state(Vst);
%% Instructions that cannot cause exceptions
valfun_1({bs_get_tail,Ctx,Dst,Live}, Vst0) ->
+ bsm_validate_context(Ctx, Vst0),
verify_live(Live, Vst0),
verify_y_init(Vst0),
Vst = prune_x_regs(Live, Vst0),
- #vst{current=#st{x=Xs,y=Ys}} = Vst,
- {Reg, Tree} = case Ctx of
- {x,X} -> {X, Xs};
- {y,Y} -> {Y, Ys};
- _ -> error({bad_source,Ctx})
- end,
- Type = case gb_trees:lookup(Reg, Tree) of
- {value,#ms{}} -> propagate_fragility(term, [Ctx], Vst);
- _ -> error({bad_context,Reg})
- end,
- set_type_reg(Type, Dst, Vst);
+ extract_term(binary, [Ctx], Dst, Vst, Vst0);
valfun_1(bs_init_writable=I, Vst) ->
call(I, 1, Vst);
valfun_1(build_stacktrace=I, Vst) ->
call(I, 1, Vst);
-valfun_1({move,{y,_}=Src,{y,_}=Dst}, Vst) ->
- %% The stack trimming optimization may generate a move from an initialized
- %% but unassigned Y register to another Y register.
- case get_term_type_1(Src, Vst) of
- {catchtag,_} -> error({catchtag,Src});
- {trytag,_} -> error({trytag,Src});
- Type -> set_type_reg(Type, Dst, Vst)
- end;
-valfun_1({move,Src,Dst}, Vst0) ->
- Type = get_move_term_type(Src, Vst0),
- Vst = set_type_reg(Type, Dst, Vst0),
- set_alias(Src, Dst, Vst);
+valfun_1({move,Src,Dst}, Vst) ->
+ assign(Src, Dst, Vst);
valfun_1({fmove,Src,{fr,_}=Dst}, Vst) ->
assert_type(float, Src, Vst),
set_freg(Dst, Vst);
@@ -356,7 +337,7 @@ valfun_1({fmove,{fr,_}=Src,Dst}, Vst0) ->
assert_freg_set(Src, Vst0),
assert_fls(checked, Vst0),
Vst = eat_heap_float(Vst0),
- set_type_reg({float,[]}, Dst, Vst);
+ create_term({float,[]}, Dst, Vst);
valfun_1({kill,{y,_}=Reg}, Vst) ->
set_type_y(initialized, Reg, Vst);
valfun_1({init,{y,_}=Reg}, Vst) ->
@@ -378,24 +359,24 @@ valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) ->
end;
%% Put instructions.
valfun_1({put_list,A,B,Dst}, Vst0) ->
- assert_term(A, Vst0),
- assert_term(B, Vst0),
+ assert_not_fragile(A, Vst0),
+ assert_not_fragile(B, Vst0),
Vst = eat_heap(2, Vst0),
- set_type_reg(cons, Dst, Vst);
+ create_term(cons, Dst, Vst);
valfun_1({put_tuple2,Dst,{list,Elements}}, Vst0) ->
- _ = [assert_term(El, Vst0) || El <- Elements],
+ _ = [assert_not_fragile(El, Vst0) || El <- Elements],
Size = length(Elements),
Vst = eat_heap(Size+1, Vst0),
Type = {tuple,Size},
- set_type_reg(Type, Dst, Vst);
+ create_term(Type, Dst, Vst);
valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) ->
Vst1 = eat_heap(1, Vst0),
- Vst = set_type_reg(tuple_in_progress, Dst, Vst1),
+ Vst = create_term(tuple_in_progress, Dst, Vst1),
#vst{current=St0} = Vst,
St = St0#st{puts_left={Sz,{Dst,{tuple,Sz}}}},
Vst#vst{current=St};
valfun_1({put,Src}, Vst0) ->
- assert_term(Src, Vst0),
+ assert_not_fragile(Src, Vst0),
Vst = eat_heap(1, Vst0),
#vst{current=St0} = Vst,
case St0 of
@@ -403,7 +384,7 @@ valfun_1({put,Src}, Vst0) ->
error(not_building_a_tuple);
#st{puts_left={1,{Dst,Type}}} ->
St = St0#st{puts_left=none},
- set_type_reg(Type, Dst, Vst#vst{current=St});
+ create_term(Type, Dst, Vst#vst{current=St});
#st{puts_left={PutsLeft,Info}} when is_integer(PutsLeft) ->
St = St0#st{puts_left={PutsLeft-1,Info}},
Vst#vst{current=St}
@@ -418,19 +399,13 @@ valfun_1(remove_message, Vst) ->
%% The message term is no longer fragile. It can be used
%% without restrictions.
remove_fragility(Vst);
-valfun_1({'%', {type_info, Reg, match_context}}, Vst0) ->
- set_aliased_type(#ms{}, Reg, Vst0);
-valfun_1({'%', {type_info, Reg, NewType0}}, Vst0) ->
+valfun_1({'%', {type_info, Reg, match_context}}, Vst) ->
+ update_type(fun meet/2, #ms{}, Reg, Vst);
+valfun_1({'%', {type_info, Reg, Type}}, Vst) ->
%% Explicit type information inserted by optimization passes to indicate
%% that Reg has a certain type, so that we can accept cross-function type
%% optimizations.
- OldType = get_durable_term_type(Reg, Vst0),
- NewType = case meet(NewType0, OldType) of
- none -> error({bad_type_info, Reg, NewType0, OldType});
- T -> T
- end,
- Type = propagate_fragility(NewType, [Reg], Vst0),
- set_aliased_type(Type, Reg, Vst0);
+ update_type(fun meet/2, Type, Reg, Vst);
valfun_1({'%',_}, Vst) ->
Vst;
valfun_1({line,_}, Vst) ->
@@ -507,20 +482,20 @@ valfun_1({try_case,Reg}, #vst{current=#st{ct=[Fail|Fails]}}=Vst0) ->
valfun_1({get_list,Src,D1,D2}, Vst0) ->
assert_not_literal(Src),
assert_type(cons, Src, Vst0),
- Vst = set_type_reg(term, Src, D1, Vst0),
- set_type_reg(term, Src, D2, Vst);
+ Vst = extract_term(term, [Src], D1, Vst0),
+ extract_term(term, [Src], D2, Vst);
valfun_1({get_hd,Src,Dst}, Vst) ->
assert_not_literal(Src),
assert_type(cons, Src, Vst),
- set_type_reg(term, Src, Dst, Vst);
+ extract_term(term, [Src], Dst, Vst);
valfun_1({get_tl,Src,Dst}, Vst) ->
assert_not_literal(Src),
assert_type(cons, Src, Vst),
- set_type_reg(term, Src, Dst, Vst);
+ extract_term(term, [Src], Dst, Vst);
valfun_1({get_tuple_element,Src,I,Dst}, Vst) ->
assert_not_literal(Src),
assert_type({tuple_element,I+1}, Src, Vst),
- set_type_reg(term, Src, Dst, Vst);
+ extract_term(term, [Src], Dst, Vst);
valfun_1({jump,{f,Lbl}}, Vst) ->
kill_state(branch_state(Lbl, Vst));
valfun_1(I, Vst) ->
@@ -619,73 +594,59 @@ valfun_4({make_fun2,_,_,_,Live}, Vst) ->
call(make_fun, Live, Vst);
%% Other BIFs
valfun_4({bif,tuple_size,{f,Fail},[Tuple],Dst}=I, Vst0) ->
- TupleType0 = get_term_type(Tuple, Vst0),
Vst1 = branch_state(Fail, Vst0),
- TupleType = upgrade_tuple_type({tuple,[0]}, TupleType0),
- Vst = set_aliased_type(TupleType, Tuple, Vst1),
+ Vst = update_type(fun meet/2, {tuple,[0]}, Tuple, Vst1),
set_type_reg_expr({integer,[]}, I, Dst, Vst);
valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) ->
- TupleType0 = get_term_type(Tuple, Vst0),
- PosType = get_term_type(Pos, Vst0),
+ PosType = get_durable_term_type(Pos, Vst0),
Vst1 = branch_state(Fail, Vst0),
- TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0),
- Vst = set_aliased_type(TupleType, Tuple, Vst1),
- set_type_reg(term, Tuple, Dst, Vst);
+ Type = {tuple,[get_tuple_size(PosType)]},
+ Vst = update_type(fun meet/2, Type, Tuple, Vst1),
+ extract_term(term, [Tuple], Dst, Vst);
valfun_4({bif,raise,{f,0},Src,_Dst}, Vst) ->
validate_src(Src, Vst),
kill_state(Vst);
valfun_4(raw_raise=I, Vst) ->
call(I, 3, Vst);
-valfun_4({bif,map_get,{f,Fail},[_Key,Map]=Src,Dst}, Vst0) ->
- validate_src(Src, Vst0),
+valfun_4({bif,map_get,{f,Fail},[_Key,Map]=Ss,Dst}, Vst0) ->
+ validate_src(Ss, Vst0),
Vst1 = branch_state(Fail, Vst0),
- Vst = set_aliased_type(map, Map, Vst1),
- Type = propagate_fragility(term, Src, Vst),
- set_type_reg(Type, Dst, Vst);
-valfun_4({bif,is_map_key,{f,Fail},[_Key,Map]=Src,Dst}, Vst0) ->
- validate_src(Src, Vst0),
+ Vst = update_type(fun meet/2, map, Map, Vst1),
+ extract_term(term, Ss, Dst, Vst);
+valfun_4({bif,is_map_key,{f,Fail},[_Key,Map]=Ss,Dst}, Vst0) ->
+ validate_src(Ss, Vst0),
Vst1 = branch_state(Fail, Vst0),
- Vst = set_aliased_type(map, Map, Vst1),
- Type = propagate_fragility(bool, Src, Vst),
- set_type_reg(Type, Dst, Vst);
-valfun_4({bif,Op,{f,Fail},[Cons]=Src,Dst}, Vst0)
+ Vst = update_type(fun meet/2, map, Map, Vst1),
+ extract_term(bool, Ss, Dst, Vst);
+valfun_4({bif,Op,{f,Fail},[Cons]=Ss,Dst}, Vst0)
when Op =:= hd; Op =:= tl ->
- validate_src(Src, Vst0),
+ validate_src(Ss, Vst0),
Vst1 = branch_state(Fail, Vst0),
- Vst = set_aliased_type(cons, Cons, Vst1),
- Type0 = bif_type(Op, Src, Vst),
- Type = propagate_fragility(Type0, Src, Vst),
- set_type_reg(Type, Dst, Vst);
-valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) ->
- validate_src(Src, Vst0),
+ Vst = update_type(fun meet/2, cons, Cons, Vst1),
+ Type = bif_type(Op, Ss, Vst),
+ extract_term(Type, Ss, Dst, Vst);
+valfun_4({bif,Op,{f,Fail},Ss,Dst}, Vst0) ->
+ validate_src(Ss, Vst0),
Vst = branch_state(Fail, Vst0),
- Type0 = bif_type(Op, Src, Vst),
- Type = propagate_fragility(Type0, Src, Vst),
- set_type_reg(Type, Dst, Vst);
-valfun_4({gc_bif,Op,{f,Fail},Live,Src,Dst}, #vst{current=St0}=Vst0) ->
+ Type = bif_type(Op, Ss, Vst),
+ extract_term(Type, Ss, Dst, Vst);
+valfun_4({gc_bif,Op,{f,Fail},Live,Ss,Dst}, #vst{current=St0}=Vst0) ->
+ validate_src(Ss, Vst0),
verify_live(Live, Vst0),
verify_y_init(Vst0),
St = kill_heap_allocation(St0),
Vst1 = Vst0#vst{current=St},
Vst2 = branch_state(Fail, Vst1),
- Vst3 = prune_x_regs(Live, Vst2),
- SrcType = get_term_type(hd(Src), Vst3),
- Vst = case Op of
- length when SrcType =/= cons, SrcType =/= nil ->
- %% If we already know we have a cons cell or nil, it
- %% shouldn't be demoted to list.
- set_type(list, hd(Src), Vst3);
- map_size ->
- set_type(map, hd(Src), Vst3);
- _ ->
- Vst3
+ Vst3 = case Op of
+ length -> update_type(fun meet/2, list, hd(Ss), Vst2);
+ map_size -> update_type(fun meet/2, map, hd(Ss), Vst2);
+ _ -> Vst2
end,
- validate_src(Src, Vst),
- Type0 = bif_type(Op, Src, Vst),
- Type = propagate_fragility(Type0, Src, Vst),
- set_type_reg(Type, Dst, Vst);
+ Type = bif_type(Op, Ss, Vst3),
+ Vst = prune_x_regs(Live, Vst3),
+ extract_term(Type, Ss, Dst, Vst, Vst0);
valfun_4(return, #vst{current=#st{numy=none}}=Vst) ->
- assert_term({x,0}, Vst),
+ assert_not_fragile({x,0}, Vst),
kill_state(Vst);
valfun_4(return, #vst{current=#st{numy=NumY}}) ->
error({stack_frame,NumY});
@@ -695,7 +656,7 @@ valfun_4({loop_rec,{f,Fail},Dst}, Vst0) ->
%% remove_message/0 is executed. If control transfers
%% to the loop_rec_end/1 instruction, no part of
%% this term must be stored in a Y register.
- set_type_reg({fragile,term}, Dst, Vst);
+ create_term({fragile,term}, Dst, Vst);
valfun_4({wait,_}, Vst) ->
verify_y_init(Vst),
kill_state(Vst);
@@ -711,7 +672,7 @@ valfun_4(timeout, #vst{current=St}=Vst) ->
valfun_4(send, Vst) ->
call(send, 2, Vst);
valfun_4({set_tuple_element,Src,Tuple,I}, Vst) ->
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
assert_type({tuple_element,I+1}, Tuple, Vst),
Vst;
%% Match instructions.
@@ -723,52 +684,15 @@ valfun_4({select_val,Src,{f,Fail},{list,Choices}}, Vst0) ->
valfun_4({select_tuple_arity,Tuple,{f,Fail},{list,Choices}}, Vst) ->
assert_type(tuple, Tuple, Vst),
assert_arities(Choices),
- TupleType = case get_term_type(Tuple, Vst) of
- {fragile,TupleType0} -> TupleType0;
- TupleType0 -> TupleType0
- end,
+ TupleType = get_durable_term_type(Tuple, Vst),
kill_state(branch_arities(Choices, Tuple, TupleType,
branch_state(Fail, Vst)));
%% New bit syntax matching instructions.
-valfun_4({test,bs_start_match3,{f,Fail},Live,[Src],Dst}, Vst0) ->
- %% Match states are always okay as input.
- SrcType = get_move_term_type(Src, Vst0),
- DstType = propagate_fragility(bsm_match_state(), [Src], Vst0),
- verify_live(Live, Vst0),
- verify_y_init(Vst0),
- Vst1 = prune_x_regs(Live, Vst0),
- BranchVst = case SrcType of
- #ms{} ->
- %% The failure branch will never be taken when Src is a
- %% match context. Therefore, the type for Src at the
- %% failure label must not be match_context (or we could
- %% reject legal code).
- set_type_reg(term, Src, Vst1);
- _ ->
- Vst1
- end,
- Vst = branch_state(Fail, BranchVst),
- set_type_reg(DstType, Dst, Vst);
-valfun_4({test,bs_start_match2,{f,Fail},Live,[Src,Slots],Dst}, Vst0) ->
- %% Match states are always okay as input.
- SrcType = get_move_term_type(Src, Vst0),
- DstType = propagate_fragility(bsm_match_state(Slots), [Src], Vst0),
- verify_live(Live, Vst0),
- verify_y_init(Vst0),
- Vst1 = prune_x_regs(Live, Vst0),
- BranchVst = case SrcType of
- #ms{} ->
- %% The failure branch will never be taken when Src is a
- %% match context. Therefore, the type for Src at the
- %% failure label must not be match_context (or we could
- %% reject legal code).
- set_type_reg(term, Src, Vst1);
- _ ->
- Vst1
- end,
- Vst = branch_state(Fail, BranchVst),
- set_type_reg(DstType, Dst, Vst);
+valfun_4({test,bs_start_match3,{f,Fail},Live,[Src],Dst}, Vst) ->
+ validate_bs_start_match(Fail, Live, bsm_match_state(), Src, Dst, Vst);
+valfun_4({test,bs_start_match2,{f,Fail},Live,[Src,Slots],Dst}, Vst) ->
+ validate_bs_start_match(Fail, Live, bsm_match_state(Slots), Src, Dst, Vst);
valfun_4({test,bs_match_string,{f,Fail},[Ctx,_,_]}, Vst) ->
bsm_validate_context(Ctx, Vst),
branch_state(Fail, Vst);
@@ -810,7 +734,7 @@ valfun_4({bs_get_position, Ctx, Dst, Live}, Vst0) ->
verify_live(Live, Vst0),
verify_y_init(Vst0),
Vst = prune_x_regs(Live, Vst0),
- set_type_reg(bs_position, Dst, Vst);
+ create_term(bs_position, Dst, Vst);
valfun_4({bs_set_position, Ctx, Pos}, Vst) ->
bsm_validate_context(Ctx, Vst),
assert_type(bs_position, Pos, Vst),
@@ -818,91 +742,68 @@ valfun_4({bs_set_position, Ctx, Pos}, Vst) ->
%% Other test instructions.
valfun_4({test,is_atom,{f,Lbl},[Src]}, Vst) ->
- assert_term(Src, Vst),
- set_aliased_type({atom,[]}, Src, branch_state(Lbl, Vst));
+ type_test(Lbl, {atom,[]}, Src, Vst);
valfun_4({test,is_boolean,{f,Lbl},[Src]}, Vst) ->
- assert_term(Src, Vst),
- set_aliased_type(bool, Src, branch_state(Lbl, Vst));
-valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) ->
- assert_term(Float, Vst),
- set_type({float,[]}, Float, branch_state(Lbl, Vst));
-valfun_4({test,is_tuple,{f,Lbl},[Tuple]}, Vst) ->
- Type0 = get_term_type(Tuple, Vst),
- Type = upgrade_tuple_type({tuple,[0]}, Type0),
- set_aliased_type(Type, Tuple, branch_state(Lbl, Vst));
+ type_test(Lbl, bool, Src, Vst);
+valfun_4({test,is_float,{f,Lbl},[Src]}, Vst) ->
+ type_test(Lbl, {float,[]}, Src, Vst);
+valfun_4({test,is_tuple,{f,Lbl},[Src]}, Vst) ->
+ type_test(Lbl, {tuple,[0]}, Src, Vst);
valfun_4({test,is_integer,{f,Lbl},[Src]}, Vst) ->
- assert_term(Src, Vst),
- set_aliased_type({integer,[]}, Src, branch_state(Lbl, Vst));
-valfun_4({test,is_nonempty_list,{f,Lbl},[Cons]}, Vst) ->
- assert_term(Cons, Vst),
- Type = cons,
- set_aliased_type(Type, Cons, branch_state(Lbl, Vst));
+ type_test(Lbl, {integer,[]}, Src, Vst);
+valfun_4({test,is_nonempty_list,{f,Lbl},[Src]}, Vst) ->
+ type_test(Lbl, cons, Src, Vst);
+valfun_4({test,is_list,{f,Lbl},[Src]}, Vst) ->
+ type_test(Lbl, list, Src, Vst);
+valfun_4({test,is_nil,{f,Lbl},[Src]}, Vst) ->
+ type_test(Lbl, nil, Src, Vst);
+valfun_4({test,is_map,{f,Lbl},[Src]}, Vst) ->
+ case Src of
+ {Tag,_} when Tag =:= x; Tag =:= y ->
+ type_test(Lbl, map, Src, Vst);
+ {literal,Map} when is_map(Map) ->
+ Vst;
+ _ ->
+ assert_term(Src, Vst),
+ kill_state(Vst)
+ end;
valfun_4({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) ->
assert_type(tuple, Tuple, Vst),
- Type = {tuple,Sz},
- set_aliased_type(Type, Tuple, branch_state(Lbl, Vst));
+ update_type(fun meet/2, {tuple,Sz}, Tuple, branch_state(Lbl, Vst));
valfun_4({test,is_tagged_tuple,{f,Lbl},[Src,Sz,_Atom]}, Vst) ->
- validate_src([Src], Vst),
- Type = {tuple,Sz},
- set_aliased_type(Type, Src, branch_state(Lbl, Vst));
+ assert_term(Src, Vst),
+ update_type(fun meet/2, {tuple,Sz}, Src, branch_state(Lbl, Vst));
valfun_4({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) ->
assert_type(map, Src, Vst),
assert_unique_map_keys(List),
branch_state(Lbl, Vst);
-valfun_4({test,is_list,{f,Lbl},[Src]}, Vst) ->
- validate_src([Src], Vst),
- Type = case get_term_type(Src, Vst) of
- cons -> cons;
- nil -> nil;
- _ -> list
- end,
- set_aliased_type(Type, Src, branch_state(Lbl, Vst));
-valfun_4({test,is_map,{f,Lbl},[Src]}, Vst0) ->
- Vst = branch_state(Lbl, Vst0),
- case Src of
- {Tag,_} when Tag =:= x; Tag =:= y ->
- Type = map,
- set_aliased_type(Type, Src, Vst);
- {literal,Map} when is_map(Map) ->
- Vst0;
- _ ->
- kill_state(Vst0)
- end;
-valfun_4({test,is_nil,{f,Lbl},[Src]}, Vst0) ->
- Vst = case get_term_type(Src, Vst0) of
- list ->
- branch_state(Lbl, set_aliased_type(cons, Src, Vst0));
- _ ->
- branch_state(Lbl, Vst0)
- end,
- set_aliased_type(nil, Src, Vst);
valfun_4({test,is_eq_exact,{f,Lbl},[Src,Val]=Ss}, Vst0) ->
validate_src(Ss, Vst0),
Infer = infer_types(Src, Vst0),
Vst1 = Infer(Val, Vst0),
- Vst2 = upgrade_ne_types(Src, Val, Vst1),
+ Vst2 = update_ne_types(Src, Val, Vst1),
Vst3 = branch_state(Lbl, Vst2),
Vst = Vst3#vst{current=Vst1#vst.current},
- upgrade_eq_types(Src, Val, Vst);
+ update_eq_types(Src, Val, Vst);
valfun_4({test,is_ne_exact,{f,Lbl},[Src,Val]=Ss}, Vst0) ->
validate_src(Ss, Vst0),
- Vst1 = upgrade_eq_types(Src, Val, Vst0),
+ Vst1 = update_eq_types(Src, Val, Vst0),
Vst2 = branch_state(Lbl, Vst1),
Vst = Vst2#vst{current=Vst0#vst.current},
- upgrade_ne_types(Src, Val, Vst);
+ update_ne_types(Src, Val, Vst);
valfun_4({test,_Op,{f,Lbl},Src}, Vst) ->
validate_src(Src, Vst),
branch_state(Lbl, Vst);
valfun_4({bs_add,{f,Fail},[A,B,_],Dst}, Vst) ->
- assert_term(A, Vst),
- assert_term(B, Vst),
- set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
+ assert_not_fragile(A, Vst),
+ assert_not_fragile(B, Vst),
+ create_term({integer,[]}, Dst, branch_state(Fail, Vst));
valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) ->
assert_term(A, Vst),
- set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
+ create_term({integer,[]}, Dst, branch_state(Fail, Vst));
valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) ->
assert_term(A, Vst),
- set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
+ create_term({integer,[]}, Dst, branch_state(Fail, Vst));
valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
verify_live(Live, Vst0),
verify_y_init(Vst0),
@@ -910,12 +811,12 @@ valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
is_integer(Sz) ->
ok;
true ->
- assert_term(Sz, Vst0)
+ assert_not_fragile(Sz, Vst0)
end,
Vst1 = heap_alloc(Heap, Vst0),
Vst2 = branch_state(Fail, Vst1),
Vst = prune_x_regs(Live, Vst2),
- set_type_reg(binary, Dst, Vst);
+ create_term(binary, Dst, Vst);
valfun_4({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
verify_live(Live, Vst0),
verify_y_init(Vst0),
@@ -928,43 +829,43 @@ valfun_4({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
Vst1 = heap_alloc(Heap, Vst0),
Vst2 = branch_state(Fail, Vst1),
Vst = prune_x_regs(Live, Vst2),
- set_type_reg(binary, Dst, Vst);
+ create_term(binary, Dst, Vst);
valfun_4({bs_append,{f,Fail},Bits,Heap,Live,_Unit,Bin,_Flags,Dst}, Vst0) ->
verify_live(Live, Vst0),
verify_y_init(Vst0),
- assert_term(Bits, Vst0),
- assert_term(Bin, Vst0),
+ assert_not_fragile(Bits, Vst0),
+ assert_not_fragile(Bin, Vst0),
Vst1 = heap_alloc(Heap, Vst0),
Vst2 = branch_state(Fail, Vst1),
Vst = prune_x_regs(Live, Vst2),
- set_type_reg(binary, Dst, Vst);
+ create_term(binary, Dst, Vst);
valfun_4({bs_private_append,{f,Fail},Bits,_Unit,Bin,_Flags,Dst}, Vst0) ->
- assert_term(Bits, Vst0),
- assert_term(Bin, Vst0),
+ assert_not_fragile(Bits, Vst0),
+ assert_not_fragile(Bin, Vst0),
Vst = branch_state(Fail, Vst0),
- set_type_reg(binary, Dst, Vst);
+ create_term(binary, Dst, Vst);
valfun_4({bs_put_string,Sz,_}, Vst) when is_integer(Sz) ->
Vst;
valfun_4({bs_put_binary,{f,Fail},Sz,_,_,Src}, Vst) ->
- assert_term(Sz, Vst),
- assert_term(Src, Vst),
+ assert_not_fragile(Sz, Vst),
+ assert_not_fragile(Src, Vst),
branch_state(Fail, Vst);
valfun_4({bs_put_float,{f,Fail},Sz,_,_,Src}, Vst) ->
- assert_term(Sz, Vst),
- assert_term(Src, Vst),
+ assert_not_fragile(Sz, Vst),
+ assert_not_fragile(Src, Vst),
branch_state(Fail, Vst);
valfun_4({bs_put_integer,{f,Fail},Sz,_,_,Src}, Vst) ->
- assert_term(Sz, Vst),
- assert_term(Src, Vst),
+ assert_not_fragile(Sz, Vst),
+ assert_not_fragile(Src, Vst),
branch_state(Fail, Vst);
valfun_4({bs_put_utf8,{f,Fail},_,Src}, Vst) ->
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
branch_state(Fail, Vst);
valfun_4({bs_put_utf16,{f,Fail},_,Src}, Vst) ->
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
branch_state(Fail, Vst);
valfun_4({bs_put_utf32,{f,Fail},_,Src}, Vst) ->
- assert_term(Src, Vst),
+ assert_not_fragile(Src, Vst),
branch_state(Fail, Vst);
%% Map instructions.
valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) ->
@@ -976,31 +877,12 @@ valfun_4({get_map_elements,{f,Fail},Src,{list,List}}, Vst) ->
valfun_4(_, _) ->
error(unknown_instruction).
-upgrade_ne_types(Src1, Src2, Vst0) ->
- T1 = get_durable_term_type(Src1, Vst0),
- T2 = get_durable_term_type(Src2, Vst0),
- Type = subtract(T1, T2),
- set_aliased_type(Type, Src1, Vst0).
-
-upgrade_eq_types(Src1, Src2, Vst0) ->
- T1 = get_durable_term_type(Src1, Vst0),
- T2 = get_durable_term_type(Src2, Vst0),
- Meet = meet(T1, T2),
- Vst = case T1 =/= Meet of
- true -> set_aliased_type(Meet, Src1, Vst0);
- false -> Vst0
- end,
- case T2 =/= Meet of
- true -> set_aliased_type(Meet, Src2, Vst);
- false -> Vst
- end.
-
verify_get_map(Fail, Src, List, Vst0) ->
assert_not_literal(Src), %OTP 22.
assert_type(map, Src, Vst0),
Vst1 = foldl(fun(D, Vsti) ->
case is_reg_defined(D,Vsti) of
- true -> set_type_reg(term,D,Vsti);
+ true -> create_term(term, D, Vsti);
false -> Vsti
end
end, Vst0, extract_map_vals(List)),
@@ -1019,7 +901,7 @@ extract_map_keys([]) -> [].
verify_get_map_pair([Src,Dst|Vs], Map, Vst0, Vsti0) ->
assert_term(Src, Vst0),
- Vsti = set_type_reg(term, Map, Dst, Vsti0),
+ Vsti = extract_term(term, [Map], Dst, Vsti0),
verify_get_map_pair(Vs, Map, Vst0, Vsti);
verify_get_map_pair([], _Map, _Vst0, Vst) -> Vst.
@@ -1027,13 +909,29 @@ verify_put_map(Fail, Src, Dst, Live, List, Vst0) ->
assert_type(map, Src, Vst0),
verify_live(Live, Vst0),
verify_y_init(Vst0),
- foreach(fun (Term) -> assert_term(Term, Vst0) end, List),
+ foreach(fun (Term) -> assert_not_fragile(Term, Vst0) end, List),
Vst1 = heap_alloc(0, Vst0),
Vst2 = branch_state(Fail, Vst1),
Vst = prune_x_regs(Live, Vst2),
Keys = extract_map_keys(List),
assert_unique_map_keys(Keys),
- set_type_reg(map, Dst, Vst).
+ create_term(map, Dst, Vst).
+
+%%
+%% Common code for validating bs_start_match* instructions.
+%%
+
+validate_bs_start_match(Fail, Live, Type, Src, Dst, Vst0) ->
+ verify_live(Live, Vst0),
+ verify_y_init(Vst0),
+
+ %% #ms{} can represent either a match context or a term, so we have to mark
+ %% the source as a term if it fails, and retain the incoming type if it
+ %% succeeds (match context or not).
+ Vst1 = set_aliased_type(term, Src, Vst0),
+ Vst2 = prune_x_regs(Live, Vst1),
+ Vst3 = branch_state(Fail, Vst2),
+ extract_term(Type, [Src], Dst, Vst3, Vst0).
%%
%% Common code for validating bs_get* instructions.
@@ -1044,7 +942,7 @@ validate_bs_get(Fail, Ctx, Live, Type, Dst, Vst0) ->
verify_y_init(Vst0),
Vst1 = prune_x_regs(Live, Vst0),
Vst = branch_state(Fail, Vst1),
- set_type_reg(Type, Dst, Vst).
+ create_term(Type, Dst, Vst).
%%
%% Common code for validating bs_skip_utf* instructions.
@@ -1117,7 +1015,7 @@ verify_call_args(_, Live, _) ->
verify_call_args_1(0, _) -> ok;
verify_call_args_1(N, Vst) ->
X = N - 1,
- get_term_type({x,X}, Vst),
+ assert_not_fragile({x,X}, Vst),
verify_call_args_1(X, Vst).
verify_local_call(Lbl, Live, Vst) ->
@@ -1405,14 +1303,12 @@ select_val_branches_1([], _, _, Vst) -> Vst.
infer_types(Src, Vst) ->
case get_def(Src, Vst) of
{bif,is_map,{f,_},[Map],_} ->
- fun({atom,true}, S) -> set_aliased_type(map, Map, S);
+ fun({atom,true}, S) -> update_type(fun meet/2, map, Map, S);
(_, S) -> S
end;
{bif,tuple_size,{f,_},[Tuple],_} ->
fun({integer,Arity}, S) ->
- Type0 = get_term_type(Tuple, S),
- Type = upgrade_tuple_type({tuple,Arity}, Type0),
- set_aliased_type(Type, Tuple, S);
+ update_type(fun meet/2, {tuple,Arity}, Tuple, S);
(_, S) -> S
end;
{bif,'=:=',{f,_},[ArityReg,{integer,_}=Val],_} when ArityReg =/= Src ->
@@ -1429,11 +1325,93 @@ infer_types(Src, Vst) ->
%%% Keeping track of types.
%%%
-set_alias(Reg1, Reg2, #vst{current=St0}=Vst) ->
- case Reg1 of
+%% Assigns Src to Dst and marks them as aliasing each other.
+assign({y,_}=Src, {y,_}=Dst, Vst) ->
+ %% The stack trimming optimization may generate a move from an initialized
+ %% but unassigned Y register to another Y register.
+ case get_term_type_1(Src, Vst) of
+ initialized -> set_type_reg(initialized, Dst, Vst);
+ _ -> assign_1(Src, Dst, Vst)
+ end;
+assign({Kind,_}=Reg, Dst, Vst) when Kind =:= x; Kind =:= y ->
+ assign_1(Reg, Dst, Vst);
+assign(Literal, Dst, Vst) ->
+ create_term(get_term_type(Literal, Vst), Dst, Vst).
+
+%% Creates a completely new term with the given type.
+create_term(Type, Dst, Vst) ->
+ set_type_reg(Type, Dst, Vst).
+
+%% Extracts a term from Ss, propagating fragility.
+extract_term(Type, Ss, Dst, Vst) ->
+ extract_term(Type, Ss, Dst, Vst, Vst).
+
+%% As extract_term/4, but uses the incoming Vst for fragility in case x-regs
+%% have been pruned and the sources can no longer be found.
+extract_term(Type0, Ss, Dst, Vst, OrigVst) ->
+ Type = propagate_fragility(Type0, Ss, OrigVst),
+ set_type_reg(Type, Dst, Vst).
+
+%% Helper function for simple "is_type" tests.
+type_test(Fail, Type, Reg, Vst0) ->
+ assert_term(Reg, Vst0),
+ Vst = branch_state(Fail, update_type(fun subtract/2, Type, Reg, Vst0)),
+ update_type(fun meet/2, Type, Reg, Vst).
+
+%% This is used when linear code finds out more and more information about a
+%% type, so that the type gets more specialized.
+update_type(Merge, Type0, Reg, Vst) ->
+ %% If the old type can't be merged with the new one, the type information
+ %% is inconsistent and we know that some instructions will never be
+ %% executed at run-time. For example:
+ %%
+ %% {test,is_list,Fail,[Reg]}.
+ %% {test,is_tuple,Fail,[Reg]}.
+ %% {test,test_arity,Fail,[Reg,5]}.
+ %%
+ %% Note that the test_arity instruction can never be reached, so we use the
+ %% new type instead of 'none'.
+ Type = case Merge(get_durable_term_type(Reg, Vst), Type0) of
+ none -> Type0;
+ T -> T
+ end,
+ set_aliased_type(propagate_fragility(Type, [Reg], Vst), Reg, Vst).
+
+update_ne_types(LHS, RHS, Vst) ->
+ T1 = get_durable_term_type(LHS, Vst),
+ T2 = get_durable_term_type(RHS, Vst),
+ Type = propagate_fragility(subtract(T1, T2), [LHS], Vst),
+ set_aliased_type(Type, LHS, Vst).
+
+update_eq_types(LHS, RHS, Vst0) ->
+ T1 = get_durable_term_type(LHS, Vst0),
+ T2 = get_durable_term_type(RHS, Vst0),
+ Meet = meet(T1, T2),
+ Vst = case T1 =/= Meet of
+ true ->
+ LType = propagate_fragility(Meet, [LHS], Vst0),
+ set_aliased_type(LType, LHS, Vst0);
+ false ->
+ Vst0
+ end,
+ case T2 =/= Meet of
+ true ->
+ RType = propagate_fragility(Meet, [RHS], Vst0),
+ set_aliased_type(RType, RHS, Vst);
+ false ->
+ Vst
+ end.
+
+%% Helper functions for the above.
+
+assign_1(Src, Dst, Vst0) ->
+ Type = get_move_term_type(Src, Vst0),
+ Vst = set_type_reg(Type, Dst, Vst0),
+ case Src of
{Kind,_} when Kind =:= x; Kind =:= y ->
+ #vst{current=St0} = Vst,
#st{aliases=Aliases0} = St0,
- Aliases = Aliases0#{Reg1=>Reg2,Reg2=>Reg1},
+ Aliases = Aliases0#{Src=>Dst,Dst=>Src},
St = St0#st{aliases=Aliases},
Vst#vst{current=St};
_ ->
@@ -1567,6 +1545,12 @@ assert_term(Src, Vst) ->
get_term_type(Src, Vst),
ok.
+assert_not_fragile(Src, Vst) ->
+ case get_term_type(Src, Vst) of
+ {fragile, _} -> error({fragile_message_reference, Src});
+ _ -> ok
+ end.
+
assert_not_literal({x,_}) -> ok;
assert_not_literal({y,_}) -> ok;
assert_not_literal(Literal) -> error({literal_not_allowed,Literal}).
@@ -1692,12 +1676,8 @@ subtract(bool, {atom,true}) -> {atom, false};
subtract(Type, _) -> Type.
assert_type(WantedType, Term, Vst) ->
- case get_term_type(Term, Vst) of
- {fragile,Type} ->
- assert_type(WantedType, Type);
- Type ->
- assert_type(WantedType, Type)
- end.
+ Type = get_durable_term_type(Term, Vst),
+ assert_type(WantedType, Type).
assert_type(Correct, Correct) -> ok;
assert_type(float, {float,_}) -> ok;
@@ -1716,36 +1696,6 @@ assert_type(cons, {literal,[_|_]}) ->
assert_type(Needed, Actual) ->
error({bad_type,{needed,Needed},{actual,Actual}}).
-%% upgrade_tuple_type(NewTupleType, OldType) -> TupleType.
-%% upgrade_tuple_type/2 is used when linear code finds out more and
-%% more information about a tuple type, so that the type gets more
-%% specialized. If OldType is not a tuple type, the type information
-%% is inconsistent, and we know that some instructions will never
-%% be executed at run-time.
-
-upgrade_tuple_type(NewType, {fragile,OldType}) ->
- Type = upgrade_tuple_type_1(NewType, OldType),
- make_fragile(Type);
-upgrade_tuple_type(NewType, OldType) ->
- upgrade_tuple_type_1(NewType, OldType).
-
-upgrade_tuple_type_1(NewType, OldType) ->
- case meet(NewType, OldType) of
- none ->
- %% Unoptimized code may look like this:
- %%
- %% {test,is_list,Fail,[Reg]}.
- %% {test,is_tuple,Fail,[Reg]}.
- %% {test,test_arity,Fail,[Reg,5]}.
- %%
- %% Note that the test_arity instruction can never be reached.
- %% To make sure it's not rejected, set the type of Reg to
- %% NewType instead of 'none'.
- NewType;
- Type ->
- Type
- end.
-
get_tuple_size({integer,[]}) -> 0;
get_tuple_size({integer,Sz}) -> Sz;
get_tuple_size(_) -> 0.
@@ -2110,7 +2060,7 @@ bif_type('+', Src, Vst) ->
bif_type('*', Src, Vst) ->
arith_type(Src, Vst);
bif_type(abs, [Num], Vst) ->
- case get_term_type(Num, Vst) of
+ case get_durable_term_type(Num, Vst) of
{float,_}=T -> T;
{integer,_}=T -> T;
_ -> number
@@ -2162,6 +2112,7 @@ bif_type(is_port, [_], _) -> bool;
bif_type(is_reference, [_], _) -> bool;
bif_type(is_tuple, [_], _) -> bool;
%% Misc.
+bif_type(tuple_size, [_], _) -> {integer,[]};
bif_type(node, [], _) -> {atom,[]};
bif_type(node, [_], _) -> {atom,[]};
bif_type(hd, [_], _) -> term;
@@ -2198,13 +2149,15 @@ is_bif_safe(_, _) -> false.
arith_type([A], Vst) ->
%% Unary '+' or '-'.
- case get_term_type(A, Vst) of
+ case get_durable_term_type(A, Vst) of
{integer,_} -> {integer,[]};
{float,_} -> {float,[]};
_ -> number
end;
arith_type([A,B], Vst) ->
- case {get_term_type(A, Vst),get_term_type(B, Vst)} of
+ TypeA = get_durable_term_type(A, Vst),
+ TypeB = get_durable_term_type(B, Vst),
+ case {TypeA, TypeB} of
{{integer,_},{integer,_}} -> {integer,[]};
{{float,_},_} -> {float,[]};
{_,{float,_}} -> {float,[]};
@@ -2227,9 +2180,15 @@ return_type_1(erlang, setelement, 3, Vst) ->
{tuple,[0]}
end,
case get_term_type({x,0}, Vst) of
- {integer,[]} -> TupleType;
- {integer,I} -> upgrade_tuple_type({tuple,[I]}, TupleType);
- _ -> TupleType
+ {integer,[]} ->
+ TupleType;
+ {integer,I} ->
+ case meet({tuple,[I]}, TupleType) of
+ none -> TupleType;
+ T -> T
+ end;
+ _ ->
+ TupleType
end;
return_type_1(erlang, '++', 2, Vst) ->
case get_term_type({x,0}, Vst) =:= cons orelse
diff --git a/lib/compiler/test/beam_type_SUITE.erl b/lib/compiler/test/beam_type_SUITE.erl
index 6efa98de44..a7ffc3f60a 100644
--- a/lib/compiler/test/beam_type_SUITE.erl
+++ b/lib/compiler/test/beam_type_SUITE.erl
@@ -222,6 +222,9 @@ coverage(Config) ->
booleans(_Config) ->
{'EXIT',{{case_clause,_},_}} = (catch do_booleans_1(42)),
+ ok = do_booleans_2(42, 41),
+ error = do_booleans_2(42, 42),
+
AnyAtom = id(atom),
true = is_atom(AnyAtom),
false = is_boolean(AnyAtom),
@@ -250,6 +253,19 @@ do_booleans_1(B) ->
no -> no
end.
+do_booleans_2(A, B) ->
+ Not = not do_booleans_cmp(A, B),
+ case Not of
+ true ->
+ case Not of
+ true -> error;
+ false -> ok
+ end;
+ false -> ok
+ end.
+
+do_booleans_cmp(A, B) -> A > B.
+
setelement(_Config) ->
T0 = id({a,42}),
{a,_} = T0,
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 6836e30a1b..987bc3fe0f 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -914,7 +914,8 @@ rand_seed_nif(_Seed) -> ?nif_stub.
-type pk_sign_verify_opts() :: [ rsa_sign_verify_opt() ] .
-type rsa_sign_verify_opt() :: {rsa_padding, rsa_sign_verify_padding()}
- | {rsa_pss_saltlen, integer()} .
+ | {rsa_pss_saltlen, integer()}
+ | {rsa_mgf1_md, sha2()}.
-type rsa_sign_verify_padding() :: rsa_pkcs1_padding | rsa_pkcs1_pss_padding
| rsa_x931_padding | rsa_no_padding
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 26f9fc99d3..b3389def22 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -1104,6 +1104,7 @@ handle_alerts(_, {stop, _, _} = Stop) ->
Stop;
handle_alerts([#alert{level = ?WARNING, description = ?CLOSE_NOTIFY} | _Alerts],
{next_state, connection = StateName, #state{user_data_buffer = Buffer,
+ socket_options = #socket_options{active = false},
protocol_buffers = #protocol_buffers{tls_cipher_texts = CTs}} =
State}) when (Buffer =/= <<>>) orelse
(CTs =/= []) ->
diff --git a/lib/ssl/test/ssl_ECC_SUITE.erl b/lib/ssl/test/ssl_ECC_SUITE.erl
index a5309e866b..ca8d0ec70c 100644
--- a/lib/ssl/test/ssl_ECC_SUITE.erl
+++ b/lib/ssl/test/ssl_ECC_SUITE.erl
@@ -212,53 +212,61 @@ client_ecdsa_server_ecdsa_with_raw_key(Config) when is_list(Config) ->
ecc_default_order(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
- ecdhe_ecdsa, ecdhe_ecdsa, Config),
+ ecdhe_ecdsa, ecdhe_ecdsa,
+ Config, DefaultCurve),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
ECCOpts = [],
- case ssl_test_lib:supported_eccs([{eccs, [sect571r1]}]) of
- true -> ssl_test_lib:ecc_test(sect571r1, COpts, SOpts, [], ECCOpts, Config);
+ case ssl_test_lib:supported_eccs([{eccs, [DefaultCurve]}]) of
+ true -> ssl_test_lib:ecc_test(DefaultCurve, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
end.
ecc_default_order_custom_curves(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
- ecdhe_ecdsa, ecdhe_ecdsa, Config),
+ ecdhe_ecdsa, ecdhe_ecdsa,
+ Config, DefaultCurve),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
- true -> ssl_test_lib:ecc_test(sect571r1, COpts, SOpts, [], ECCOpts, Config);
+ true -> ssl_test_lib:ecc_test(DefaultCurve, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
end.
ecc_client_order(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
- ecdhe_ecdsa, ecdhe_ecdsa, Config),
+ ecdhe_ecdsa, ecdhe_ecdsa,
+ Config, DefaultCurve),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
ECCOpts = [{honor_ecc_order, false}],
- case ssl_test_lib:supported_eccs([{eccs, [sect571r1]}]) of
- true -> ssl_test_lib:ecc_test(sect571r1, COpts, SOpts, [], ECCOpts, Config);
+ case ssl_test_lib:supported_eccs([{eccs, [DefaultCurve]}]) of
+ true -> ssl_test_lib:ecc_test(DefaultCurve, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
end.
ecc_client_order_custom_curves(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
- ecdhe_ecdsa, ecdhe_ecdsa, Config),
+ ecdhe_ecdsa, ecdhe_ecdsa,
+ Config, DefaultCurve),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, false}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, false}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
- true -> ssl_test_lib:ecc_test(sect571r1, COpts, SOpts, [], ECCOpts, Config);
+ true -> ssl_test_lib:ecc_test(DefaultCurve, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
end.
@@ -274,12 +282,13 @@ ecc_unknown_curve(Config) ->
client_ecdh_rsa_server_ecdhe_ecdsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdh_rsa, ecdhe_ecdsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
@@ -287,12 +296,13 @@ client_ecdh_rsa_server_ecdhe_ecdsa_server_custom(Config) ->
client_ecdh_rsa_server_ecdhe_rsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdh_rsa, ecdhe_rsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, [], ECCOpts, Config);
@@ -301,12 +311,13 @@ client_ecdh_rsa_server_ecdhe_rsa_server_custom(Config) ->
client_ecdhe_rsa_server_ecdhe_ecdsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdhe_rsa, ecdhe_ecdsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
@@ -314,19 +325,21 @@ client_ecdhe_rsa_server_ecdhe_ecdsa_server_custom(Config) ->
client_ecdhe_rsa_server_ecdhe_rsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdhe_rsa, ecdhe_rsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
end.
client_ecdhe_rsa_server_ecdh_rsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
Ext = x509_test:extensions([{key_usage, [keyEncipherment]}]),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
@@ -334,8 +347,8 @@ client_ecdhe_rsa_server_ecdh_rsa_server_custom(Config) ->
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
- Expected = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))), %% The certificate curve
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
+ Expected = secp256r1, %% The certificate curve
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(Expected, COpts, SOpts, [], ECCOpts, Config);
@@ -344,12 +357,13 @@ client_ecdhe_rsa_server_ecdh_rsa_server_custom(Config) ->
client_ecdhe_ecdsa_server_ecdhe_ecdsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdhe_ecdsa, ecdhe_ecdsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
@@ -357,12 +371,13 @@ client_ecdhe_ecdsa_server_ecdhe_ecdsa_server_custom(Config) ->
client_ecdhe_ecdsa_server_ecdhe_rsa_server_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdhe_ecdsa, ecdhe_rsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{honor_ecc_order, true}, {eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, [], ECCOpts, Config);
false -> {skip, "unsupported named curves"}
@@ -370,12 +385,13 @@ client_ecdhe_ecdsa_server_ecdhe_rsa_server_custom(Config) ->
client_ecdhe_ecdsa_server_ecdhe_ecdsa_client_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdhe_ecdsa, ecdhe_ecdsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, ECCOpts, [], Config);
false -> {skip, "unsupported named curves"}
@@ -383,12 +399,13 @@ client_ecdhe_ecdsa_server_ecdhe_ecdsa_client_custom(Config) ->
client_ecdhe_rsa_server_ecdhe_ecdsa_client_custom(Config) ->
Default = ssl_test_lib:default_cert_chain_conf(),
+ DefaultCurve = pubkey_cert_records:namedCurves(hd(tls_v1:ecc_curves(0))),
{COpts0, SOpts0} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
{client_chain, Default}],
ecdhe_rsa, ecdhe_ecdsa, Config),
COpts = ssl_test_lib:ssl_options(COpts0, Config),
SOpts = ssl_test_lib:ssl_options(SOpts0, Config),
- ECCOpts = [{eccs, [secp256r1, sect571r1]}],
+ ECCOpts = [{eccs, [secp256r1, DefaultCurve]}],
case ssl_test_lib:supported_eccs(ECCOpts) of
true -> ssl_test_lib:ecc_test(secp256r1, COpts, SOpts, ECCOpts, [], Config);
false -> {skip, "unsupported named curves"}
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index 0173b98e1a..fdbd39d016 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -30,6 +30,7 @@
-record(sslsocket, { fd = nil, pid = nil}).
-define(SLEEP, 1000).
+-define(DEFAULT_CURVE, secp256r1).
%% For now always run locally
run_where(_) ->
@@ -618,9 +619,12 @@ make_rsa_cert_chains(UserConf, Config, Suffix) ->
}.
make_ec_cert_chains(UserConf, ClientChainType, ServerChainType, Config) ->
+ make_ec_cert_chains(UserConf, ClientChainType, ServerChainType, Config, ?DEFAULT_CURVE).
+%%
+make_ec_cert_chains(UserConf, ClientChainType, ServerChainType, Config, Curve) ->
ClientChain = proplists:get_value(client_chain, UserConf, default_cert_chain_conf()),
ServerChain = proplists:get_value(server_chain, UserConf, default_cert_chain_conf()),
- CertChainConf = gen_conf(ClientChainType, ServerChainType, ClientChain, ServerChain),
+ CertChainConf = gen_conf(ClientChainType, ServerChainType, ClientChain, ServerChain, Curve),
ClientFileBase = filename:join([proplists:get_value(priv_dir, Config), atom_to_list(ClientChainType)]),
ServerFileBase = filename:join([proplists:get_value(priv_dir, Config), atom_to_list(ServerChainType)]),
GenCertData = public_key:pkix_test_data(CertChainConf),
@@ -635,7 +639,11 @@ default_cert_chain_conf() ->
%% Use only default options
[[],[],[]].
-gen_conf(mix, mix, UserClient, UserServer) ->
+
+gen_conf(ClientChainType, ServerChainType, UserClient, UserServer) ->
+ gen_conf(ClientChainType, ServerChainType, UserClient, UserServer, ?DEFAULT_CURVE).
+%%
+gen_conf(mix, mix, UserClient, UserServer, _) ->
ClientTag = conf_tag("client"),
ServerTag = conf_tag("server"),
@@ -646,12 +654,12 @@ gen_conf(mix, mix, UserClient, UserServer) ->
ServerConf = merge_chain_spec(UserServer, DefaultServer, []),
new_format([{ClientTag, ClientConf}, {ServerTag, ServerConf}]);
-gen_conf(ClientChainType, ServerChainType, UserClient, UserServer) ->
+gen_conf(ClientChainType, ServerChainType, UserClient, UserServer, Curve) ->
ClientTag = conf_tag("client"),
ServerTag = conf_tag("server"),
- DefaultClient = chain_spec(client, ClientChainType),
- DefaultServer = chain_spec(server, ServerChainType),
+ DefaultClient = chain_spec(client, ClientChainType, Curve),
+ DefaultServer = chain_spec(server, ServerChainType, Curve),
ClientConf = merge_chain_spec(UserClient, DefaultClient, []),
ServerConf = merge_chain_spec(UserServer, DefaultServer, []),
@@ -673,43 +681,43 @@ proplist_to_map([Head | Rest]) ->
conf_tag(Role) ->
list_to_atom(Role ++ "_chain").
-chain_spec(_Role, ecdh_rsa) ->
+chain_spec(_Role, ecdh_rsa, Curve) ->
Digest = {digest, appropriate_sha(crypto:supports())},
- CurveOid = hd(tls_v1:ecc_curves(0)),
+ CurveOid = pubkey_cert_records:namedCurves(Curve),
[[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, hardcode_rsa_key(1)}],
[Digest, {key, {namedCurve, CurveOid}}]];
-chain_spec(_Role, ecdhe_ecdsa) ->
+chain_spec(_Role, ecdhe_ecdsa, Curve) ->
Digest = {digest, appropriate_sha(crypto:supports())},
- CurveOid = hd(tls_v1:ecc_curves(0)),
+ CurveOid = pubkey_cert_records:namedCurves(Curve),
[[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, {namedCurve, CurveOid}}]];
-chain_spec(_Role, ecdh_ecdsa) ->
+chain_spec(_Role, ecdh_ecdsa, Curve) ->
Digest = {digest, appropriate_sha(crypto:supports())},
- CurveOid = hd(tls_v1:ecc_curves(0)),
+ CurveOid = pubkey_cert_records:namedCurves(Curve),
[[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, {namedCurve, CurveOid}}]];
-chain_spec(_Role, ecdhe_rsa) ->
+chain_spec(_Role, ecdhe_rsa, _) ->
Digest = {digest, appropriate_sha(crypto:supports())},
[[Digest, {key, hardcode_rsa_key(1)}],
[Digest, {key, hardcode_rsa_key(2)}],
[Digest, {key, hardcode_rsa_key(3)}]];
-chain_spec(_Role, ecdsa) ->
+chain_spec(_Role, ecdsa, Curve) ->
Digest = {digest, appropriate_sha(crypto:supports())},
- CurveOid = hd(tls_v1:ecc_curves(0)),
+ CurveOid = pubkey_cert_records:namedCurves(Curve),
[[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, {namedCurve, CurveOid}}],
[Digest, {key, {namedCurve, CurveOid}}]];
-chain_spec(_Role, rsa) ->
+chain_spec(_Role, rsa, _) ->
Digest = {digest, appropriate_sha(crypto:supports())},
[[Digest, {key, hardcode_rsa_key(1)}],
[Digest, {key, hardcode_rsa_key(2)}],
[Digest, {key, hardcode_rsa_key(3)}]];
-chain_spec(_Role, dsa) ->
+chain_spec(_Role, dsa, _) ->
Digest = {digest, appropriate_sha(crypto:supports())},
[[Digest, {key, hardcode_dsa_key(1)}],
[Digest, {key, hardcode_dsa_key(2)}],
@@ -742,7 +750,7 @@ merge_spec(User, Default, [Conf | Rest], Acc) ->
make_mix_cert(Config) ->
Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
Digest = {digest, appropriate_sha(crypto:supports())},
- CurveOid = hd(tls_v1:ecc_curves(0)),
+ CurveOid = pubkey_cert_records:namedCurves(?DEFAULT_CURVE),
Mix = proplists:get_value(mix, Config, peer_ecc),
ClientChainType =ServerChainType = mix,
{ClientChain, ServerChain} = mix(Mix, Digest, CurveOid, Ext),
diff --git a/lib/stdlib/doc/src/ets.xml b/lib/stdlib/doc/src/ets.xml
index e67397c6fd..7594514b29 100644
--- a/lib/stdlib/doc/src/ets.xml
+++ b/lib/stdlib/doc/src/ets.xml
@@ -188,6 +188,31 @@
is used to keep the table fixated during the entire traversal.</p>
</item>
</list>
+ <note>
+ <p>Even though the access of a single object is always guaranteed to be
+ <seealso marker="#concurrency">atomic and isolated</seealso>, each traversal
+ through a table to find the next key is not done with such guarantees. This is often
+ not a problem, but may cause rare subtle "unexpected" effects if a concurrent
+ process inserts objects during a traversal. For example, consider one
+ process doing</p>
+<pre>
+ets:new(t, [ordered_set, named_table]),
+ets:insert(t, {1}),
+ets:insert(t, {2}),
+ets:insert(t, {3}),
+</pre>
+ <p>A concurrent call to <c>ets:first(t)</c>, done by another
+ process, may then in rare cases return <c>2</c> even though
+ <c>2</c> has never existed in the table ordered as the first key. In
+ the same way, a concurrent call to <c>ets:next(t, 1)</c> may return
+ <c>3</c> even though <c>3</c> never existed in the table
+ ordered directly after <c>1</c>.</p>
+ <p>Effects like this are improbable but possible. The probability will
+ further be reduced (if not vanish) if table option
+ <seealso marker="#new_2_write_concurrency"><c>write_concurrency</c></seealso>
+ is not enabled. This can also only be a potential concern for
+ <c>ordered_set</c> where the traversal order is defined.</p>
+ </note>
</section>
<section>