aboutsummaryrefslogtreecommitdiffstats
path: root/lib/compiler/src
diff options
context:
space:
mode:
Diffstat (limited to 'lib/compiler/src')
-rw-r--r--lib/compiler/src/beam_jump.erl15
-rw-r--r--lib/compiler/src/beam_type.erl5
-rw-r--r--lib/compiler/src/beam_utils.erl22
-rw-r--r--lib/compiler/src/beam_validator.erl38
-rw-r--r--lib/compiler/src/compile.erl42
-rw-r--r--lib/compiler/src/sys_core_bsm.erl62
-rw-r--r--lib/compiler/src/sys_core_fold.erl114
-rw-r--r--lib/compiler/src/v3_codegen.erl141
-rw-r--r--lib/compiler/src/v3_core.erl64
9 files changed, 298 insertions, 205 deletions
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index 9eee56d604..22974da398 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -128,7 +128,7 @@
%%% on the program state.
%%%
--import(lists, [reverse/1,reverse/2,foldl/3]).
+-import(lists, [dropwhile/2,reverse/1,reverse/2,foldl/3]).
-type instruction() :: beam_utils:instruction().
@@ -411,14 +411,19 @@ opt_useless_loads([{test,_,{f,L},_}=I|Is], L, St) ->
opt_useless_loads(Is, _L, St) ->
{Is,St}.
-opt_useless_block_loads([{set,[Dst],_,_}=I|Is], L, Index) ->
- BlockJump = [{block,Is},{jump,{f,L}}],
+opt_useless_block_loads([{set,[Dst],_,_}=I|Is0], L, Index) ->
+ BlockJump = [{block,Is0},{jump,{f,L}}],
case beam_utils:is_killed(Dst, BlockJump, Index) of
true ->
- %% The register is killed and not used, we can remove the load
+ %% The register is killed and not used, we can remove the load.
+ %% Remove any `put` instructions in case we just
+ %% removed a `put_tuple` instruction.
+ Is = dropwhile(fun({set,_,_,put}) -> true;
+ (_) -> false
+ end, Is0),
opt_useless_block_loads(Is, L, Index);
false ->
- [I|opt_useless_block_loads(Is, L, Index)]
+ [I|opt_useless_block_loads(Is0, L, Index)]
end;
opt_useless_block_loads([I|Is], L, Index) ->
[I|opt_useless_block_loads(Is, L, Index)];
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index b8c3ca1325..a1e9eff8f3 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -559,7 +559,7 @@ update({bs_save2,_,_}, Ts) ->
update({bs_restore2,_,_}, Ts) ->
Ts;
update({bs_context_to_binary,Dst}, Ts) ->
- tdb_store(Dst, {binary,1}, Ts);
+ tdb_store(Dst, any, Ts);
update({test,bs_start_match2,_,_,[Src,_],Dst}, Ts0) ->
Ts = tdb_meet(Src, {binary,1}, Ts0),
tdb_copy(Src, Dst, Ts);
@@ -1114,4 +1114,5 @@ verified_type(nonempty_list=T) -> T;
verified_type({tuple,_,Sz,[]}=T) when is_integer(Sz) -> T;
verified_type({tuple,_,Sz,[_]}=T) when is_integer(Sz) -> T;
verified_type({tuple_element,_,_}=T) -> T;
-verified_type(float=T) -> T.
+verified_type(float=T) -> T;
+verified_type(none=T) -> T.
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index 5510624b2d..6b2ab5a2a4 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -355,6 +355,9 @@ split_even(Rs) -> split_even(Rs, [], []).
%% exit BIF will raise an exception
%% used - Reg is used
+check_liveness({fr,_}, _, St) ->
+ %% Conservatively always consider the floating point register used.
+ {used,St};
check_liveness(R, [{block,Blk}|Is], St0) ->
case check_liveness_block(R, Blk, St0) of
{transparent,St1} ->
@@ -742,8 +745,11 @@ check_liveness_block_2(R, {gc_bif,Op,{f,Lbl}}, Ss, St) ->
check_liveness_block_3(R, Lbl, {Op,length(Ss)}, St);
check_liveness_block_2(R, {bif,Op,{f,Lbl}}, Ss, St) ->
Arity = length(Ss),
+
+ %% Note that is_function/2 is a type test but is not safe.
case erl_internal:comp_op(Op, Arity) orelse
- erl_internal:new_type_test(Op, Arity) of
+ (erl_internal:new_type_test(Op, Arity) andalso
+ erl_bifs:is_safe(erlang, Op, Arity)) of
true ->
{killed,St};
false ->
@@ -1105,9 +1111,17 @@ defs([{bif,_,{f,Fail},_Src,Dst}=I|Is], Regs0, D) ->
defs([{block,Block0}|Is], Regs0, D0) ->
{Block,Regs,D} = defs_list(Block0, Regs0, D0),
[{block,[make_anno({def,Regs0})|Block]}|defs(Is, Regs, D)];
-defs([{bs_init,{f,L},_,_,_,Dst}=I|Is], Regs0, D) ->
- Regs = def_regs([Dst], Regs0),
+defs([{bs_init,{f,L},_,Live,_,Dst}=I|Is], Regs0, D) ->
+ Regs1 = case Live of
+ none -> Regs0;
+ _ -> init_def_regs(Live)
+ end,
+ Regs = def_regs([Dst], Regs1),
[I|defs(Is, Regs, update_regs(L, Regs, D))];
+defs([{test,bs_start_match2,{f,L},Live,_,Dst}=I|Is], _Regs, D) ->
+ Regs0 = init_def_regs(Live),
+ Regs = def_regs([Dst], Regs0),
+ [I|defs(Is, Regs, update_regs(L, Regs0, D))];
defs([{bs_put,{f,L},_,_}=I|Is], Regs, D) ->
[I|defs(Is, Regs, update_regs(L, Regs, D))];
defs([build_stacktrace=I|Is], _Regs, D) ->
@@ -1157,7 +1171,7 @@ defs([{loop_rec,{f,L},{x,0}}=I|Is], _Regs, D0) ->
D = update_regs(L, RegsAtLabel, D0),
[I|defs(Is, init_def_regs(1), D)];
defs([{loop_rec_end,_}=I|Is], _Regs, D) ->
- [I|defs(Is, 0, D)];
+ [I|defs_unreachable(Is, D)];
defs([{make_fun2,_,_,_,_}=I|Is], _Regs, D) ->
[I|defs(Is, 1, D)];
defs([{move,_,Dst}=I|Is], Regs0, D) ->
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index b03fadb197..fb2e7df65c 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -451,6 +451,19 @@ valfun_1({try_case,Reg}, #vst{current=#st{ct=[Fail|Fails]}}=Vst0) ->
Type ->
error({bad_type,Type})
end;
+valfun_1({get_list,Src,D1,D2}, Vst0) ->
+ assert_type(cons, Src, Vst0),
+ Vst = set_type_reg(term, Src, D1, Vst0),
+ set_type_reg(term, Src, D2, Vst);
+valfun_1({get_hd,Src,Dst}, Vst) ->
+ assert_type(cons, Src, Vst),
+ set_type_reg(term, Src, Dst, Vst);
+valfun_1({get_tl,Src,Dst}, Vst) ->
+ assert_type(cons, Src, Vst),
+ set_type_reg(term, Src, Dst, Vst);
+valfun_1({get_tuple_element,Src,I,Dst}, Vst) ->
+ assert_type({tuple_element,I+1}, Src, Vst),
+ set_type_reg(term, Src, Dst, Vst);
valfun_1(I, Vst) ->
valfun_2(I, Vst).
@@ -546,6 +559,18 @@ valfun_4({bif,raise,{f,0},Src,_Dst}, Vst) ->
kill_state(Vst);
valfun_4(raw_raise=I, Vst) ->
call(I, 3, Vst);
+valfun_4({bif,map_get,{f,Fail},[_Key,Map]=Src,Dst}, Vst0) ->
+ validate_src(Src, Vst0),
+ Vst1 = branch_state(Fail, Vst0),
+ Vst = set_type(map, Map, Vst1),
+ Type = propagate_fragility(term, Src, Vst),
+ set_type_reg(Type, Dst, Vst);
+valfun_4({bif,is_map_key,{f,Fail},[_Key,Map]=Src,Dst}, Vst0) ->
+ validate_src(Src, Vst0),
+ Vst1 = branch_state(Fail, Vst0),
+ Vst = set_type(map, Map, Vst1),
+ Type = propagate_fragility(bool, Src, Vst),
+ set_type_reg(Type, Dst, Vst);
valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) ->
validate_src(Src, Vst0),
Vst = branch_state(Fail, Vst0),
@@ -603,19 +628,6 @@ valfun_4({select_val,Src,{f,Fail},{list,Choices}}, Vst) ->
valfun_4({select_tuple_arity,Tuple,{f,Fail},{list,Choices}}, Vst) ->
assert_type(tuple, Tuple, Vst),
kill_state(branch_arities(Choices, Tuple, branch_state(Fail, Vst)));
-valfun_4({get_list,Src,D1,D2}, Vst0) ->
- assert_type(cons, Src, Vst0),
- Vst = set_type_reg(term, Src, D1, Vst0),
- set_type_reg(term, Src, D2, Vst);
-valfun_4({get_hd,Src,Dst}, Vst) ->
- assert_type(cons, Src, Vst),
- set_type_reg(term, Src, Dst, Vst);
-valfun_4({get_tl,Src,Dst}, Vst) ->
- assert_type(cons, Src, Vst),
- set_type_reg(term, Src, Dst, Vst);
-valfun_4({get_tuple_element,Src,I,Dst}, Vst) ->
- assert_type({tuple_element,I+1}, Src, Vst),
- set_type_reg(term, Src, Dst, Vst);
%% New bit syntax matching instructions.
valfun_4({test,bs_start_match2,{f,Fail},Live,[Ctx,NeedSlots],Ctx}, Vst0) ->
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 5ef9611504..6510571441 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -931,11 +931,17 @@ parse_module(_Code, St0) ->
end.
do_parse_module(DefEncoding, #compile{ifile=File,options=Opts,dir=Dir}=St) ->
+ SourceName0 = proplists:get_value(source, Opts, File),
+ SourceName = case member(deterministic, Opts) of
+ true -> filename:basename(SourceName0);
+ false -> SourceName0
+ end,
R = epp:parse_file(File,
- [{includes,[".",Dir|inc_paths(Opts)]},
- {macros,pre_defs(Opts)},
- {default_encoding,DefEncoding},
- extra]),
+ [{includes,[".",Dir|inc_paths(Opts)]},
+ {source_name, SourceName},
+ {macros,pre_defs(Opts)},
+ {default_encoding,DefEncoding},
+ extra]),
case R of
{ok,Forms,Extra} ->
Encoding = proplists:get_value(encoding, Extra),
@@ -1432,16 +1438,30 @@ encrypt_debug_info(DebugInfo, Key, Opts) ->
end.
cleanup_compile_options(Opts) ->
- lists:filter(fun keep_compile_option/1, Opts).
-
+ IsDeterministic = lists:member(deterministic, Opts),
+ lists:filter(fun(Opt) ->
+ keep_compile_option(Opt, IsDeterministic)
+ end, Opts).
+
+%% Include paths and current directory don't affect compilation, but they might
+%% be helpful so we include them unless we're doing a deterministic build.
+keep_compile_option({i, _}, Deterministic) ->
+ not Deterministic;
+keep_compile_option({cwd, _}, Deterministic) ->
+ not Deterministic;
%% We are storing abstract, not asm or core.
-keep_compile_option(from_asm) -> false;
-keep_compile_option(from_core) -> false;
+keep_compile_option(from_asm, _Deterministic) ->
+ false;
+keep_compile_option(from_core, _Deterministic) ->
+ false;
%% Parse transform and macros have already been applied.
-keep_compile_option({parse_transform, _}) -> false;
-keep_compile_option({d, _, _}) -> false;
+keep_compile_option({parse_transform, _}, _Deterministic) ->
+ false;
+keep_compile_option({d, _, _}, _Deterministic) ->
+ false;
%% Do not affect compilation result on future calls.
-keep_compile_option(Option) -> effects_code_generation(Option).
+keep_compile_option(Option, _Deterministic) ->
+ effects_code_generation(Option).
start_crypto() ->
try crypto:start() of
diff --git a/lib/compiler/src/sys_core_bsm.erl b/lib/compiler/src/sys_core_bsm.erl
index d7b26c3a56..62657933ee 100644
--- a/lib/compiler/src/sys_core_bsm.erl
+++ b/lib/compiler/src/sys_core_bsm.erl
@@ -44,6 +44,14 @@ function([{#c_var{name={F,Arity}}=Name,B0}|Fs], FsAcc, Ws0) ->
{B,Ws} ->
function(Fs, [{Name,B}|FsAcc], Ws)
catch
+ throw:unsafe_bs_context_to_binary ->
+ %% Unsafe bs_context_to_binary (in the sense that the
+ %% contents of the binary will probably be wrong).
+ %% Disable binary optimizations for the entire function.
+ %% We don't generate an INFO message, because this happens
+ %% very infrequently and it would be hard to explain in
+ %% a comprehensible way in an INFO message.
+ function(Fs, [{Name,B0}|FsAcc], Ws0);
Class:Error:Stack ->
io:fwrite("Function: ~w/~w\n", [F,Arity]),
erlang:raise(Class, Error, Stack)
@@ -116,12 +124,66 @@ move_from_col(Pos, L) ->
[Col|First] ++ Rest.
bsm_do_an([#c_var{name=Vname}=V0|Vs0], Cs0, Case) ->
+ bsm_inner_context_to_binary(Cs0),
Cs = bsm_do_an_var(Vname, Cs0),
V = bsm_annotate_for_reuse(V0),
Vs = core_lib:make_values([V|Vs0]),
Case#c_case{arg=Vs,clauses=Cs};
bsm_do_an(_Vs, _Cs, Case) -> Case.
+bsm_inner_context_to_binary([#c_clause{body=B}|Cs]) ->
+ %% Consider:
+ %%
+ %% foo(<<Length, Data/binary>>) -> %Line 1
+ %% case {Data, Length} of %Line 2
+ %% {_, 0} -> Data; %Line 3
+ %% {<<...>>, 4} -> ... %Line 4
+ %% end.
+ %%
+ %% No sub binary will be created for Data in line 1. The match
+ %% context will be passed on to the `case` in line 2. In line 3,
+ %% this pass inserts a `bs_context_to_binary` instruction to
+ %% convert the match context representing Data to a binary before
+ %% returning it. The problem is that the binary created will be
+ %% the original binary (including the matched out Length field),
+ %% not the tail of the binary as it is supposed to be.
+ %%
+ %% Here follows a heuristic to disable the binary optimizations
+ %% for the entire function if this code kind of code is found.
+
+ case cerl_trees:free_variables(B) of
+ [] ->
+ %% Since there are no free variables in the body of
+ %% this clause, there can't be any troublesome
+ %% bs_context_to_binary instructions.
+ bsm_inner_context_to_binary(Cs);
+ [_|_]=Free ->
+ %% One of the free variables could refer to a match context
+ %% created by the outer binary match.
+ F = fun(#c_primop{name=#c_literal{val=bs_context_to_binary},
+ args=[#c_var{name=V}]}, _) ->
+ case member(V, Free) of
+ true ->
+ %% This bs_context_to_binary instruction will
+ %% make a binary of the match context from an
+ %% outer binary match. It is very likely that
+ %% the contents of the binary will be wrong
+ %% (the original binary as opposed to only
+ %% the tail binary).
+ throw(unsafe_bs_context_to_binary);
+ false ->
+ %% Safe. This bs_context_to_binary instruction
+ %% will make a binary from a match context
+ %% defined in the body of the clause.
+ ok
+ end;
+ (_, _) ->
+ ok
+ end,
+ cerl_trees:fold(F, ok, B)
+ end;
+bsm_inner_context_to_binary([]) -> ok.
+
bsm_do_an_var(V, [#c_clause{pats=[P|_],guard=G,body=B0}=C0|Cs]) ->
case P of
#c_var{name=VarName} ->
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index ceb7d56221..1681d97efb 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -115,13 +115,6 @@ module(#c_module{defs=Ds0}=Mod, Opts) ->
{ok,Mod#c_module{defs=Ds1},get_warnings()}.
function_1({#c_var{name={F,Arity}}=Name,B0}) ->
- %% Find a suitable starting value for the variable counter. Note
- %% that this pass assumes that new_var_name/1 returns a variable
- %% name distinct from any variable used in the entire body of
- %% the function. We use integers as variable names to avoid
- %% filling up the atom table when compiling huge functions.
- Count = cerl_trees:next_free_variable_name(B0),
- put(new_var_num, Count),
try
%% Find a suitable starting value for the variable
%% counter. Note that this pass assumes that new_var_name/1
@@ -352,7 +345,12 @@ expr(#c_letrec{body=#c_var{}}=Letrec, effect, _Sub) ->
void();
expr(#c_letrec{defs=Fs0,body=B0}=Letrec, Ctxt, Sub) ->
Fs1 = map(fun ({Name,Fb}) ->
- {Name,expr(Fb, {letrec,Ctxt}, Sub)}
+ case Ctxt =:= effect andalso is_fun_effect_safe(Name, B0) of
+ true ->
+ {Name,expr(Fb, {letrec, effect}, Sub)};
+ false ->
+ {Name,expr(Fb, {letrec, value}, Sub)}
+ end
end, Fs0),
B1 = body(B0, Ctxt, Sub),
Letrec#c_letrec{defs=Fs1,body=B1};
@@ -483,6 +481,86 @@ expr(#c_try{anno=A,arg=E0,vars=Vs0,body=B0,evars=Evs0,handler=H0}=Try, _, Sub0)
Try#c_try{arg=E1,vars=Vs1,body=B1,evars=Evs1,handler=H1}
end.
+
+%% If a fun or its application is used as an argument, then it's unsafe to
+%% handle it in effect context as the side-effects may rely on its return
+%% value. The following is a minimal example of where it can go wrong:
+%%
+%% do letrec 'f'/0 = fun () -> ... whatever ...
+%% in call 'side':'effect'(apply 'f'/0())
+%% 'ok'
+%%
+%% This function returns 'true' if Body definitely does not rely on a
+%% value produced by FVar, or 'false' if Body depends on or might depend on
+%% a value produced by FVar.
+
+is_fun_effect_safe(#c_var{}=FVar, Body) ->
+ ifes_1(FVar, Body, true).
+
+ifes_1(FVar, #c_alias{pat=Pat}, _Safe) ->
+ ifes_1(FVar, Pat, false);
+ifes_1(FVar, #c_apply{op=Op,args=Args}, Safe) ->
+ %% FVar(...) is safe as long its return value is ignored, but it's never
+ %% okay to pass FVar as an argument.
+ ifes_list(FVar, Args, false) andalso ifes_1(FVar, Op, Safe);
+ifes_1(FVar, #c_binary{segments=Segments}, _Safe) ->
+ ifes_list(FVar, Segments, false);
+ifes_1(FVar, #c_bitstr{val=Val,size=Size,unit=Unit}, _Safe) ->
+ ifes_list(FVar, [Val, Size, Unit], false);
+ifes_1(FVar, #c_call{args=Args}, _Safe) ->
+ ifes_list(FVar, Args, false);
+ifes_1(FVar, #c_case{arg=Arg,clauses=Clauses}, Safe) ->
+ ifes_1(FVar, Arg, false) andalso ifes_list(FVar, Clauses, Safe);
+ifes_1(FVar, #c_catch{body=Body}, _Safe) ->
+ ifes_1(FVar, Body, false);
+ifes_1(FVar, #c_clause{pats=Pats,guard=Guard,body=Body}, Safe) ->
+ ifes_list(FVar, Pats, false) andalso
+ ifes_1(FVar, Guard, false) andalso
+ ifes_1(FVar, Body, Safe);
+ifes_1(FVar, #c_cons{hd=Hd,tl=Tl}, _Safe) ->
+ ifes_1(FVar, Hd, false) andalso ifes_1(FVar, Tl, false);
+ifes_1(FVar, #c_fun{body=Body}, _Safe) ->
+ ifes_1(FVar, Body, false);
+ifes_1(FVar, #c_let{arg=Arg,body=Body}, Safe) ->
+ ifes_1(FVar, Arg, false) andalso ifes_1(FVar, Body, Safe);
+ifes_1(FVar, #c_letrec{defs=Defs,body=Body}, Safe) ->
+ Funs = [Fun || {_,Fun} <- Defs],
+ ifes_list(FVar, Funs, false) andalso ifes_1(FVar, Body, Safe);
+ifes_1(_FVar, #c_literal{}, _Safe) ->
+ true;
+ifes_1(FVar, #c_map{arg=Arg,es=Elements}, _Safe) ->
+ ifes_1(FVar, Arg, false) andalso ifes_list(FVar, Elements, false);
+ifes_1(FVar, #c_map_pair{key=Key,val=Val}, _Safe) ->
+ ifes_1(FVar, Key, false) andalso ifes_1(FVar, Val, false);
+ifes_1(FVar, #c_primop{args=Args}, _Safe) ->
+ ifes_list(FVar, Args, false);
+ifes_1(FVar, #c_receive{timeout=Timeout,action=Action,clauses=Clauses}, Safe) ->
+ ifes_1(FVar, Timeout, false) andalso
+ ifes_1(FVar, Action, Safe) andalso
+ ifes_list(FVar, Clauses, Safe);
+ifes_1(FVar, #c_seq{arg=Arg,body=Body}, Safe) ->
+ %% Arg of a #c_seq{} has no effect so it's okay to use FVar there even if
+ %% Safe=false.
+ ifes_1(FVar, Arg, true) andalso ifes_1(FVar, Body, Safe);
+ifes_1(FVar, #c_try{arg=Arg,handler=Handler,body=Body}, Safe) ->
+ ifes_1(FVar, Arg, false) andalso
+ ifes_1(FVar, Handler, Safe) andalso
+ ifes_1(FVar, Body, Safe);
+ifes_1(FVar, #c_tuple{es=Elements}, _Safe) ->
+ ifes_list(FVar, Elements, false);
+ifes_1(FVar, #c_values{es=Elements}, _Safe) ->
+ ifes_list(FVar, Elements, false);
+ifes_1(#c_var{name=Name}, #c_var{name=Name}, Safe) ->
+ %% It's safe to return FVar if it's unused.
+ Safe;
+ifes_1(_FVar, #c_var{}, _Safe) ->
+ true.
+
+ifes_list(FVar, [E|Es], Safe) ->
+ ifes_1(FVar, E, Safe) andalso ifes_list(FVar, Es, Safe);
+ifes_list(_FVar, [], _Safe) ->
+ true.
+
expr_list(Es, Ctxt, Sub) ->
[expr(E, Ctxt, Sub) || E <- Es].
@@ -2557,12 +2635,20 @@ opt_build_stacktrace(#c_let{vars=[#c_var{name=Cooked}],
#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=raise},
args=[Class,Exp,#c_var{name=Cooked}]} ->
- %% The stacktrace is only used in a call to erlang:raise/3.
- %% There is no need to build the stacktrace. Replace the
- %% call to erlang:raise/3 with the the raw_raise/3 instruction,
- %% which will use a raw stacktrace.
- #c_primop{name=#c_literal{val=raw_raise},
- args=[Class,Exp,RawStk]};
+ case core_lib:is_var_used(Cooked, #c_cons{hd=Class,tl=Exp}) of
+ true ->
+ %% Not safe. The stacktrace is used in the class or
+ %% reason.
+ Let;
+ false ->
+ %% The stacktrace is only used in the last
+ %% argument for erlang:raise/3. There is no need
+ %% to build the stacktrace. Replace the call to
+ %% erlang:raise/3 with the the raw_raise/3
+ %% instruction, which will use a raw stacktrace.
+ #c_primop{name=#c_literal{val=raw_raise},
+ args=[Class,Exp,RawStk]}
+ end;
#c_let{vars=[#c_var{name=V}],arg=Arg,body=B0} when V =/= Cooked ->
case core_lib:is_var_used(Cooked, Arg) of
false ->
diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl
index 6cd114abf7..d7a7778740 100644
--- a/lib/compiler/src/v3_codegen.erl
+++ b/lib/compiler/src/v3_codegen.erl
@@ -79,13 +79,9 @@ function(#k_fdef{anno=#k{a=Anno},func=Name,arity=Arity,
try
#k_match{} = Kb, %Assertion.
- %% Try to suppress the stack frame unless it is
- %% really needed.
- Body0 = avoid_stack_frame(Kb),
-
%% Annotate kernel records with variable usage.
Vdb0 = init_vars(As),
- {Body,_,Vdb} = body(Body0, 1, Vdb0),
+ {Body,_,Vdb} = body(Kb, 1, Vdb0),
%% Generate the BEAM assembly code.
{Asm,EntryLabel,St} = cg_fun(Body, As, Vdb, AtomMod,
@@ -98,136 +94,6 @@ function(#k_fdef{anno=#k{a=Anno},func=Name,arity=Arity,
erlang:raise(Class, Error, Stack)
end.
-
-%% avoid_stack_frame(Kernel) -> Kernel'
-%% If possible, avoid setting up a stack frame. Functions
-%% that only do matching, calls to guard BIFs, and tail-recursive
-%% calls don't need a stack frame.
-
-avoid_stack_frame(#k_match{body=Body}=M) ->
- try
- M#k_match{body=avoid_stack_frame_1(Body)}
- catch
- impossible ->
- M
- end.
-
-avoid_stack_frame_1(#k_alt{first=First0,then=Then0}=Alt) ->
- First = avoid_stack_frame_1(First0),
- Then = avoid_stack_frame_1(Then0),
- Alt#k_alt{first=First,then=Then};
-avoid_stack_frame_1(#k_bif{op=Op}=Bif) ->
- case Op of
- #k_internal{} ->
- %% Most internal BIFs clobber the X registers.
- throw(impossible);
- _ ->
- Bif
- end;
-avoid_stack_frame_1(#k_break{anno=Anno,args=Args}) ->
- #k_guard_break{anno=Anno,args=Args};
-avoid_stack_frame_1(#k_guard_break{}=Break) ->
- Break;
-avoid_stack_frame_1(#k_enter{}=Enter) ->
- %% Tail-recursive calls don't need a stack frame.
- Enter;
-avoid_stack_frame_1(#k_guard{clauses=Cs0}=Guard) ->
- Cs = avoid_stack_frame_list(Cs0),
- Guard#k_guard{clauses=Cs};
-avoid_stack_frame_1(#k_guard_clause{guard=G0,body=B0}=C) ->
- G = avoid_stack_frame_1(G0),
- B = avoid_stack_frame_1(B0),
- C#k_guard_clause{guard=G,body=B};
-avoid_stack_frame_1(#k_match{anno=A,vars=Vs,body=B0,ret=Ret}) ->
- %% Use #k_guard_match{} instead to avoid saving the X registers
- %% to the stack before matching.
- B = avoid_stack_frame_1(B0),
- #k_guard_match{anno=A,vars=Vs,body=B,ret=Ret};
-avoid_stack_frame_1(#k_guard_match{body=B0}=M) ->
- B = avoid_stack_frame_1(B0),
- M#k_guard_match{body=B};
-avoid_stack_frame_1(#k_protected{arg=Arg0}=Prot) ->
- Arg = avoid_stack_frame_1(Arg0),
- Prot#k_protected{arg=Arg};
-avoid_stack_frame_1(#k_put{}=Put) ->
- Put;
-avoid_stack_frame_1(#k_return{}=Ret) ->
- Ret;
-avoid_stack_frame_1(#k_select{var=#k_var{anno=Vanno},types=Types0}=Select) ->
- case member(reuse_for_context, Vanno) of
- false ->
- Types = avoid_stack_frame_list(Types0),
- Select#k_select{types=Types};
- true ->
- %% Including binary patterns that overwrite the register containing
- %% the binary with the match context may not be safe. For example,
- %% bs_match_SUITE:bin_tail_e/1 with inlining will be rejected by
- %% beam_validator.
- %%
- %% Essentially the following code is produced:
- %%
- %% bs_match {x,0} => {x,0}
- %% ...
- %% bs_match {x,0} => {x,1} %% ILLEGAL
- %%
- %% A bs_match instruction will only accept a match context as the
- %% source operand if the source and destination registers are the
- %% the same (as in the first bs_match instruction above).
- %% The second bs_match instruction is therefore illegal.
- %%
- %% This situation is avoided if there is a stack frame:
- %%
- %% move {x,0} => {y,0}
- %% bs_match {x,0} => {x,0}
- %% ...
- %% bs_match {y,0} => {x,1} %% LEGAL
- %%
- throw(impossible)
- end;
-avoid_stack_frame_1(#k_seq{arg=#k_call{anno=Anno,op=Op}=Call,
- body=#k_break{args=BrArgs0}}=Seq) ->
- case Op of
- #k_remote{mod=#k_atom{val=Mod},
- name=#k_atom{val=Name},
- arity=Arity} ->
- case erl_bifs:is_exit_bif(Mod, Name, Arity) of
- false ->
- %% Will clobber X registers. Must have a stack frame.
- throw(impossible);
- true ->
- %% The call to this BIF will never return. It is safe
- %% to suppress the stack frame.
- Bif = #k_bif{anno=Anno,
- op=#k_internal{name=guard_error,arity=1},
- args=[Call],ret=[]},
- BrArgs = lists:duplicate(length(BrArgs0), #k_nil{}),
- GB = #k_guard_break{anno=#k{us=[],ns=[],a=[]},args=BrArgs},
- Seq#k_seq{arg=Bif,body=GB}
- end;
- _ ->
- %% Will clobber X registers. Must have a stack frame.
- throw(impossible)
- end;
-avoid_stack_frame_1(#k_seq{arg=A0,body=B0}=Seq) ->
- A = avoid_stack_frame_1(A0),
- B = avoid_stack_frame_1(B0),
- Seq#k_seq{arg=A,body=B};
-avoid_stack_frame_1(#k_test{}=Test) ->
- Test;
-avoid_stack_frame_1(#k_type_clause{values=Values0}=TC) ->
- Values = avoid_stack_frame_list(Values0),
- TC#k_type_clause{values=Values};
-avoid_stack_frame_1(#k_val_clause{body=B0}=VC) ->
- B = avoid_stack_frame_1(B0),
- VC#k_val_clause{body=B};
-avoid_stack_frame_1(_Body) ->
- throw(impossible).
-
-avoid_stack_frame_list([H|T]) ->
- [avoid_stack_frame_1(H)|avoid_stack_frame_list(T)];
-avoid_stack_frame_list([]) -> [].
-
-
%% This pass creates beam format annotated with variable lifetime
%% information. Each thing is given an index and for each variable we
%% store the first and last index for its occurrence. The variable
@@ -1621,11 +1487,6 @@ test_cg(is_boolean, [#k_atom{val=Val}], Fail, I, Vdb, Bef, St) ->
false -> [{jump,{f,Fail}}]
end,
{Is,Aft,St};
-test_cg(is_map_key, As, Fail, I, Vdb, Bef, St) ->
- [Key,Map] = cg_reg_args(As, Bef),
- Aft = clear_dead(Bef, I, Vdb),
- F = {f,Fail},
- {[{test,is_map,F,[Map]},{test,has_map_fields,F,Map,{list,[Key]}}],Aft,St};
test_cg(Test, As, Fail, I, Vdb, Bef, St) ->
Args = cg_reg_args(As, Bef),
Aft = clear_dead(Bef, I, Vdb),
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index 0196e7fdfd..66e578b776 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -328,14 +328,16 @@ gexpr({protect,Line,Arg}, Bools0, St0) ->
Anno = lineno_anno(Line, St),
{#iprotect{anno=#a{anno=Anno},body=Eps++[E]},[],Bools0,St}
end;
-gexpr({op,L,'andalso',E1,E2}, Bools, St0) ->
+gexpr({op,_,'andalso',_,_}=E0, Bools, St0) ->
+ {op,L,'andalso',E1,E2} = right_assoc(E0, 'andalso', St0),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch_guard(L, E1, V, E2, False),
gexpr(E, Bools, St);
-gexpr({op,L,'orelse',E1,E2}, Bools, St0) ->
+gexpr({op,_,'orelse',_,_}=E0, Bools, St0) ->
+ {op,L,'orelse',E1,E2} = right_assoc(E0, 'orelse', St0),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
@@ -764,14 +766,16 @@ expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) ->
{Qs,St2} = preprocess_quals(Llc, Qs0, St1),
{Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2),
{Y,Mps++Yps,St};
-expr({op,L,'andalso',E1,E2}, St0) ->
+expr({op,_,'andalso',_,_}=E0, St0) ->
+ {op,L,'andalso',E1,E2} = right_assoc(E0, 'andalso', St0),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False, St0),
expr(E, St);
-expr({op,L,'orelse',E1,E2}, St0) ->
+expr({op,_,'orelse',_,_}=E0, St0) ->
+ {op,L,'orelse',E1,E2} = right_assoc(E0, 'orelse', St0),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
@@ -1501,7 +1505,7 @@ bc_initial_size(E0, Q, St0) ->
end.
bc_elem_size({bin,_,El}, St0) ->
- case bc_elem_size_1(El, 0, []) of
+ case bc_elem_size_1(El, ordsets:new(), 0, []) of
{Bits,[]} ->
{#c_literal{val=Bits},[],[],St0};
{Bits,Vars0} ->
@@ -1515,19 +1519,33 @@ bc_elem_size(_, _) ->
throw(impossible).
bc_elem_size_1([{bin_element,_,{string,_,String},{integer,_,N},_}=El|Es],
- Bits, Vars) ->
+ DefVars, Bits, SizeVars) ->
U = get_unit(El),
- bc_elem_size_1(Es, Bits+U*N*length(String), Vars);
-bc_elem_size_1([{bin_element,_,_,{integer,_,N},_}=El|Es], Bits, Vars) ->
+ bc_elem_size_1(Es, DefVars, Bits+U*N*length(String), SizeVars);
+bc_elem_size_1([{bin_element,_,Expr,{integer,_,N},_}=El|Es],
+ DefVars0, Bits, SizeVars) ->
U = get_unit(El),
- bc_elem_size_1(Es, Bits+U*N, Vars);
-bc_elem_size_1([{bin_element,_,_,{var,_,Var},_}=El|Es], Bits, Vars) ->
- U = get_unit(El),
- bc_elem_size_1(Es, Bits, [{U,#c_var{name=Var}}|Vars]);
-bc_elem_size_1([_|_], _, _) ->
+ DefVars = bc_elem_size_def_var(Expr, DefVars0),
+ bc_elem_size_1(Es, DefVars, Bits+U*N, SizeVars);
+bc_elem_size_1([{bin_element,_,Expr,{var,_,Src},_}=El|Es],
+ DefVars0, Bits, SizeVars) ->
+ case ordsets:is_element(Src, DefVars0) of
+ false ->
+ U = get_unit(El),
+ DefVars = bc_elem_size_def_var(Expr, DefVars0),
+ bc_elem_size_1(Es, DefVars, Bits, [{U,#c_var{name=Src}}|SizeVars]);
+ true ->
+ throw(impossible)
+ end;
+bc_elem_size_1([_|_], _, _, _) ->
throw(impossible);
-bc_elem_size_1([], Bits, Vars) ->
- {Bits,Vars}.
+bc_elem_size_1([], _DefVars, Bits, SizeVars) ->
+ {Bits,SizeVars}.
+
+bc_elem_size_def_var({var,_,Var}, DefVars) ->
+ ordsets:add_element(Var, DefVars);
+bc_elem_size_def_var(_Expr, DefVars) ->
+ DefVars.
bc_elem_size_combine([{U,V}|T], U, UVars, Acc) ->
bc_elem_size_combine(T, U, [V|UVars], Acc);
@@ -2040,6 +2058,19 @@ fail_clause(Pats, Anno, Arg) ->
body=[#iprimop{anno=#a{anno=Anno},name=#c_literal{val=match_fail},
args=[Arg]}]}.
+%% Optimization for Dialyzer.
+right_assoc(E, Op, St) ->
+ case member(dialyzer, St#core.opts) of
+ true ->
+ right_assoc2(E, Op);
+ false ->
+ E
+ end.
+
+right_assoc2({op,L1,Op,{op,L2,Op,E1,E2},E3}, Op) ->
+ right_assoc2({op,L2,Op,E1,{op,L1,Op,E2,E3}}, Op);
+right_assoc2(E, _Op) -> E.
+
annotate_tuple(A, Es, St) ->
case member(dialyzer, St#core.opts) of
true ->
@@ -2597,7 +2628,8 @@ cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
[],A#a.us,St2}.
c_call_erl(Fun, Args) ->
- cerl:c_call(cerl:c_atom(erlang), cerl:c_atom(Fun), Args).
+ As = [compiler_generated],
+ cerl:ann_c_call(As, cerl:c_atom(erlang), cerl:c_atom(Fun), Args).
%% lit_vars(Literal) -> [Var].