aboutsummaryrefslogtreecommitdiffstats
path: root/lib/compiler/src
diff options
context:
space:
mode:
Diffstat (limited to 'lib/compiler/src')
-rw-r--r--lib/compiler/src/beam_bsm.erl13
-rw-r--r--lib/compiler/src/beam_jump.erl15
-rw-r--r--lib/compiler/src/beam_type.erl3
-rw-r--r--lib/compiler/src/beam_utils.erl9
-rw-r--r--lib/compiler/src/compile.erl14
-rw-r--r--lib/compiler/src/sys_core_bsm.erl62
-rw-r--r--lib/compiler/src/sys_core_fold.erl20
-rw-r--r--lib/compiler/src/v3_codegen.erl136
-rw-r--r--lib/compiler/src/v3_core.erl9
9 files changed, 114 insertions, 167 deletions
diff --git a/lib/compiler/src/beam_bsm.erl b/lib/compiler/src/beam_bsm.erl
index abc6e96c85..1c8e0e9854 100644
--- a/lib/compiler/src/beam_bsm.erl
+++ b/lib/compiler/src/beam_bsm.erl
@@ -310,18 +310,7 @@ btb_reaches_match_2([{test,bs_start_match2,{f,F},Live,[Bin,_],Ctx}|Is],
end;
btb_reaches_match_2([{test,_,{f,F},Ss}=I|Is], Regs, D0) ->
btb_ensure_not_used(Ss, I, Regs),
- D1 = btb_follow_branch(F, Regs, D0),
- D = case Is of
- [{bs_context_to_binary,_}|_] ->
- %% bs_context_to_binary following a test instruction
- %% probably needs the current position to be saved as
- %% the new start position, but we can't be sure.
- %% Therefore, conservatively disable the optimization
- %% (instead of forcing a saving of the position).
- D1#btb{must_save=true,must_not_save=true};
- _ ->
- D1
- end,
+ D = btb_follow_branch(F, Regs, D0),
btb_reaches_match_1(Is, Regs, D);
btb_reaches_match_2([{test,_,{f,F},_,Ss,_}=I|Is], Regs, D0) ->
btb_ensure_not_used(Ss, I, Regs),
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index 9eee56d604..22974da398 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -128,7 +128,7 @@
%%% on the program state.
%%%
--import(lists, [reverse/1,reverse/2,foldl/3]).
+-import(lists, [dropwhile/2,reverse/1,reverse/2,foldl/3]).
-type instruction() :: beam_utils:instruction().
@@ -411,14 +411,19 @@ opt_useless_loads([{test,_,{f,L},_}=I|Is], L, St) ->
opt_useless_loads(Is, _L, St) ->
{Is,St}.
-opt_useless_block_loads([{set,[Dst],_,_}=I|Is], L, Index) ->
- BlockJump = [{block,Is},{jump,{f,L}}],
+opt_useless_block_loads([{set,[Dst],_,_}=I|Is0], L, Index) ->
+ BlockJump = [{block,Is0},{jump,{f,L}}],
case beam_utils:is_killed(Dst, BlockJump, Index) of
true ->
- %% The register is killed and not used, we can remove the load
+ %% The register is killed and not used, we can remove the load.
+ %% Remove any `put` instructions in case we just
+ %% removed a `put_tuple` instruction.
+ Is = dropwhile(fun({set,_,_,put}) -> true;
+ (_) -> false
+ end, Is0),
opt_useless_block_loads(Is, L, Index);
false ->
- [I|opt_useless_block_loads(Is, L, Index)]
+ [I|opt_useless_block_loads(Is0, L, Index)]
end;
opt_useless_block_loads([I|Is], L, Index) ->
[I|opt_useless_block_loads(Is, L, Index)];
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index b5c979e529..a1e9eff8f3 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -1114,4 +1114,5 @@ verified_type(nonempty_list=T) -> T;
verified_type({tuple,_,Sz,[]}=T) when is_integer(Sz) -> T;
verified_type({tuple,_,Sz,[_]}=T) when is_integer(Sz) -> T;
verified_type({tuple_element,_,_}=T) -> T;
-verified_type(float=T) -> T.
+verified_type(float=T) -> T;
+verified_type(none=T) -> T.
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index 5580d2f123..6b2ab5a2a4 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -745,8 +745,11 @@ check_liveness_block_2(R, {gc_bif,Op,{f,Lbl}}, Ss, St) ->
check_liveness_block_3(R, Lbl, {Op,length(Ss)}, St);
check_liveness_block_2(R, {bif,Op,{f,Lbl}}, Ss, St) ->
Arity = length(Ss),
+
+ %% Note that is_function/2 is a type test but is not safe.
case erl_internal:comp_op(Op, Arity) orelse
- erl_internal:new_type_test(Op, Arity) of
+ (erl_internal:new_type_test(Op, Arity) andalso
+ erl_bifs:is_safe(erlang, Op, Arity)) of
true ->
{killed,St};
false ->
@@ -1115,6 +1118,10 @@ defs([{bs_init,{f,L},_,Live,_,Dst}=I|Is], Regs0, D) ->
end,
Regs = def_regs([Dst], Regs1),
[I|defs(Is, Regs, update_regs(L, Regs, D))];
+defs([{test,bs_start_match2,{f,L},Live,_,Dst}=I|Is], _Regs, D) ->
+ Regs0 = init_def_regs(Live),
+ Regs = def_regs([Dst], Regs0),
+ [I|defs(Is, Regs, update_regs(L, Regs0, D))];
defs([{bs_put,{f,L},_,_}=I|Is], Regs, D) ->
[I|defs(Is, Regs, update_regs(L, Regs, D))];
defs([build_stacktrace=I|Is], _Regs, D) ->
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 562d57a6ef..6510571441 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -931,11 +931,17 @@ parse_module(_Code, St0) ->
end.
do_parse_module(DefEncoding, #compile{ifile=File,options=Opts,dir=Dir}=St) ->
+ SourceName0 = proplists:get_value(source, Opts, File),
+ SourceName = case member(deterministic, Opts) of
+ true -> filename:basename(SourceName0);
+ false -> SourceName0
+ end,
R = epp:parse_file(File,
- [{includes,[".",Dir|inc_paths(Opts)]},
- {macros,pre_defs(Opts)},
- {default_encoding,DefEncoding},
- extra]),
+ [{includes,[".",Dir|inc_paths(Opts)]},
+ {source_name, SourceName},
+ {macros,pre_defs(Opts)},
+ {default_encoding,DefEncoding},
+ extra]),
case R of
{ok,Forms,Extra} ->
Encoding = proplists:get_value(encoding, Extra),
diff --git a/lib/compiler/src/sys_core_bsm.erl b/lib/compiler/src/sys_core_bsm.erl
index d7b26c3a56..62657933ee 100644
--- a/lib/compiler/src/sys_core_bsm.erl
+++ b/lib/compiler/src/sys_core_bsm.erl
@@ -44,6 +44,14 @@ function([{#c_var{name={F,Arity}}=Name,B0}|Fs], FsAcc, Ws0) ->
{B,Ws} ->
function(Fs, [{Name,B}|FsAcc], Ws)
catch
+ throw:unsafe_bs_context_to_binary ->
+ %% Unsafe bs_context_to_binary (in the sense that the
+ %% contents of the binary will probably be wrong).
+ %% Disable binary optimizations for the entire function.
+ %% We don't generate an INFO message, because this happens
+ %% very infrequently and it would be hard to explain in
+ %% a comprehensible way in an INFO message.
+ function(Fs, [{Name,B0}|FsAcc], Ws0);
Class:Error:Stack ->
io:fwrite("Function: ~w/~w\n", [F,Arity]),
erlang:raise(Class, Error, Stack)
@@ -116,12 +124,66 @@ move_from_col(Pos, L) ->
[Col|First] ++ Rest.
bsm_do_an([#c_var{name=Vname}=V0|Vs0], Cs0, Case) ->
+ bsm_inner_context_to_binary(Cs0),
Cs = bsm_do_an_var(Vname, Cs0),
V = bsm_annotate_for_reuse(V0),
Vs = core_lib:make_values([V|Vs0]),
Case#c_case{arg=Vs,clauses=Cs};
bsm_do_an(_Vs, _Cs, Case) -> Case.
+bsm_inner_context_to_binary([#c_clause{body=B}|Cs]) ->
+ %% Consider:
+ %%
+ %% foo(<<Length, Data/binary>>) -> %Line 1
+ %% case {Data, Length} of %Line 2
+ %% {_, 0} -> Data; %Line 3
+ %% {<<...>>, 4} -> ... %Line 4
+ %% end.
+ %%
+ %% No sub binary will be created for Data in line 1. The match
+ %% context will be passed on to the `case` in line 2. In line 3,
+ %% this pass inserts a `bs_context_to_binary` instruction to
+ %% convert the match context representing Data to a binary before
+ %% returning it. The problem is that the binary created will be
+ %% the original binary (including the matched out Length field),
+ %% not the tail of the binary as it is supposed to be.
+ %%
+ %% Here follows a heuristic to disable the binary optimizations
+ %% for the entire function if this code kind of code is found.
+
+ case cerl_trees:free_variables(B) of
+ [] ->
+ %% Since there are no free variables in the body of
+ %% this clause, there can't be any troublesome
+ %% bs_context_to_binary instructions.
+ bsm_inner_context_to_binary(Cs);
+ [_|_]=Free ->
+ %% One of the free variables could refer to a match context
+ %% created by the outer binary match.
+ F = fun(#c_primop{name=#c_literal{val=bs_context_to_binary},
+ args=[#c_var{name=V}]}, _) ->
+ case member(V, Free) of
+ true ->
+ %% This bs_context_to_binary instruction will
+ %% make a binary of the match context from an
+ %% outer binary match. It is very likely that
+ %% the contents of the binary will be wrong
+ %% (the original binary as opposed to only
+ %% the tail binary).
+ throw(unsafe_bs_context_to_binary);
+ false ->
+ %% Safe. This bs_context_to_binary instruction
+ %% will make a binary from a match context
+ %% defined in the body of the clause.
+ ok
+ end;
+ (_, _) ->
+ ok
+ end,
+ cerl_trees:fold(F, ok, B)
+ end;
+bsm_inner_context_to_binary([]) -> ok.
+
bsm_do_an_var(V, [#c_clause{pats=[P|_],guard=G,body=B0}=C0|Cs]) ->
case P of
#c_var{name=VarName} ->
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 3a65b40fa5..1681d97efb 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -2635,12 +2635,20 @@ opt_build_stacktrace(#c_let{vars=[#c_var{name=Cooked}],
#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=raise},
args=[Class,Exp,#c_var{name=Cooked}]} ->
- %% The stacktrace is only used in a call to erlang:raise/3.
- %% There is no need to build the stacktrace. Replace the
- %% call to erlang:raise/3 with the the raw_raise/3 instruction,
- %% which will use a raw stacktrace.
- #c_primop{name=#c_literal{val=raw_raise},
- args=[Class,Exp,RawStk]};
+ case core_lib:is_var_used(Cooked, #c_cons{hd=Class,tl=Exp}) of
+ true ->
+ %% Not safe. The stacktrace is used in the class or
+ %% reason.
+ Let;
+ false ->
+ %% The stacktrace is only used in the last
+ %% argument for erlang:raise/3. There is no need
+ %% to build the stacktrace. Replace the call to
+ %% erlang:raise/3 with the the raw_raise/3
+ %% instruction, which will use a raw stacktrace.
+ #c_primop{name=#c_literal{val=raw_raise},
+ args=[Class,Exp,RawStk]}
+ end;
#c_let{vars=[#c_var{name=V}],arg=Arg,body=B0} when V =/= Cooked ->
case core_lib:is_var_used(Cooked, Arg) of
false ->
diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl
index e9152ba88f..d7a7778740 100644
--- a/lib/compiler/src/v3_codegen.erl
+++ b/lib/compiler/src/v3_codegen.erl
@@ -79,13 +79,9 @@ function(#k_fdef{anno=#k{a=Anno},func=Name,arity=Arity,
try
#k_match{} = Kb, %Assertion.
- %% Try to suppress the stack frame unless it is
- %% really needed.
- Body0 = avoid_stack_frame(Kb),
-
%% Annotate kernel records with variable usage.
Vdb0 = init_vars(As),
- {Body,_,Vdb} = body(Body0, 1, Vdb0),
+ {Body,_,Vdb} = body(Kb, 1, Vdb0),
%% Generate the BEAM assembly code.
{Asm,EntryLabel,St} = cg_fun(Body, As, Vdb, AtomMod,
@@ -98,136 +94,6 @@ function(#k_fdef{anno=#k{a=Anno},func=Name,arity=Arity,
erlang:raise(Class, Error, Stack)
end.
-
-%% avoid_stack_frame(Kernel) -> Kernel'
-%% If possible, avoid setting up a stack frame. Functions
-%% that only do matching, calls to guard BIFs, and tail-recursive
-%% calls don't need a stack frame.
-
-avoid_stack_frame(#k_match{body=Body}=M) ->
- try
- M#k_match{body=avoid_stack_frame_1(Body)}
- catch
- impossible ->
- M
- end.
-
-avoid_stack_frame_1(#k_alt{first=First0,then=Then0}=Alt) ->
- First = avoid_stack_frame_1(First0),
- Then = avoid_stack_frame_1(Then0),
- Alt#k_alt{first=First,then=Then};
-avoid_stack_frame_1(#k_bif{op=Op}=Bif) ->
- case Op of
- #k_internal{} ->
- %% Most internal BIFs clobber the X registers.
- throw(impossible);
- _ ->
- Bif
- end;
-avoid_stack_frame_1(#k_break{anno=Anno,args=Args}) ->
- #k_guard_break{anno=Anno,args=Args};
-avoid_stack_frame_1(#k_guard_break{}=Break) ->
- Break;
-avoid_stack_frame_1(#k_enter{}=Enter) ->
- %% Tail-recursive calls don't need a stack frame.
- Enter;
-avoid_stack_frame_1(#k_guard{clauses=Cs0}=Guard) ->
- Cs = avoid_stack_frame_list(Cs0),
- Guard#k_guard{clauses=Cs};
-avoid_stack_frame_1(#k_guard_clause{guard=G0,body=B0}=C) ->
- G = avoid_stack_frame_1(G0),
- B = avoid_stack_frame_1(B0),
- C#k_guard_clause{guard=G,body=B};
-avoid_stack_frame_1(#k_match{anno=A,vars=Vs,body=B0,ret=Ret}) ->
- %% Use #k_guard_match{} instead to avoid saving the X registers
- %% to the stack before matching.
- B = avoid_stack_frame_1(B0),
- #k_guard_match{anno=A,vars=Vs,body=B,ret=Ret};
-avoid_stack_frame_1(#k_guard_match{body=B0}=M) ->
- B = avoid_stack_frame_1(B0),
- M#k_guard_match{body=B};
-avoid_stack_frame_1(#k_protected{arg=Arg0}=Prot) ->
- Arg = avoid_stack_frame_1(Arg0),
- Prot#k_protected{arg=Arg};
-avoid_stack_frame_1(#k_put{}=Put) ->
- Put;
-avoid_stack_frame_1(#k_return{}=Ret) ->
- Ret;
-avoid_stack_frame_1(#k_select{var=#k_var{anno=Vanno},types=Types0}=Select) ->
- case member(reuse_for_context, Vanno) of
- false ->
- Types = avoid_stack_frame_list(Types0),
- Select#k_select{types=Types};
- true ->
- %% Including binary patterns that overwrite the register containing
- %% the binary with the match context may not be safe. For example,
- %% bs_match_SUITE:bin_tail_e/1 with inlining will be rejected by
- %% beam_validator.
- %%
- %% Essentially the following code is produced:
- %%
- %% bs_match {x,0} => {x,0}
- %% ...
- %% bs_match {x,0} => {x,1} %% ILLEGAL
- %%
- %% A bs_match instruction will only accept a match context as the
- %% source operand if the source and destination registers are the
- %% the same (as in the first bs_match instruction above).
- %% The second bs_match instruction is therefore illegal.
- %%
- %% This situation is avoided if there is a stack frame:
- %%
- %% move {x,0} => {y,0}
- %% bs_match {x,0} => {x,0}
- %% ...
- %% bs_match {y,0} => {x,1} %% LEGAL
- %%
- throw(impossible)
- end;
-avoid_stack_frame_1(#k_seq{arg=#k_call{anno=Anno,op=Op}=Call,
- body=#k_break{args=BrArgs0}}=Seq) ->
- case Op of
- #k_remote{mod=#k_atom{val=Mod},
- name=#k_atom{val=Name},
- arity=Arity} ->
- case erl_bifs:is_exit_bif(Mod, Name, Arity) of
- false ->
- %% Will clobber X registers. Must have a stack frame.
- throw(impossible);
- true ->
- %% The call to this BIF will never return. It is safe
- %% to suppress the stack frame.
- Bif = #k_bif{anno=Anno,
- op=#k_internal{name=guard_error,arity=1},
- args=[Call],ret=[]},
- BrArgs = lists:duplicate(length(BrArgs0), #k_nil{}),
- GB = #k_guard_break{anno=#k{us=[],ns=[],a=[]},args=BrArgs},
- Seq#k_seq{arg=Bif,body=GB}
- end;
- _ ->
- %% Will clobber X registers. Must have a stack frame.
- throw(impossible)
- end;
-avoid_stack_frame_1(#k_seq{arg=A0,body=B0}=Seq) ->
- A = avoid_stack_frame_1(A0),
- B = avoid_stack_frame_1(B0),
- Seq#k_seq{arg=A,body=B};
-avoid_stack_frame_1(#k_test{}=Test) ->
- Test;
-avoid_stack_frame_1(#k_type_clause{values=Values0}=TC) ->
- Values = avoid_stack_frame_list(Values0),
- TC#k_type_clause{values=Values};
-avoid_stack_frame_1(#k_val_clause{body=B0}=VC) ->
- B = avoid_stack_frame_1(B0),
- VC#k_val_clause{body=B};
-avoid_stack_frame_1(_Body) ->
- throw(impossible).
-
-avoid_stack_frame_list([H|T]) ->
- [avoid_stack_frame_1(H)|avoid_stack_frame_list(T)];
-avoid_stack_frame_list([]) -> [].
-
-
%% This pass creates beam format annotated with variable lifetime
%% information. Each thing is given an index and for each variable we
%% store the first and last index for its occurrence. The variable
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index c9517c3e51..66e578b776 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -766,14 +766,16 @@ expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) ->
{Qs,St2} = preprocess_quals(Llc, Qs0, St1),
{Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2),
{Y,Mps++Yps,St};
-expr({op,L,'andalso',E1,E2}, St0) ->
+expr({op,_,'andalso',_,_}=E0, St0) ->
+ {op,L,'andalso',E1,E2} = right_assoc(E0, 'andalso', St0),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False, St0),
expr(E, St);
-expr({op,L,'orelse',E1,E2}, St0) ->
+expr({op,_,'orelse',_,_}=E0, St0) ->
+ {op,L,'orelse',E1,E2} = right_assoc(E0, 'orelse', St0),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
@@ -2626,7 +2628,8 @@ cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
[],A#a.us,St2}.
c_call_erl(Fun, Args) ->
- cerl:c_call(cerl:c_atom(erlang), cerl:c_atom(Fun), Args).
+ As = [compiler_generated],
+ cerl:ann_c_call(As, cerl:c_atom(erlang), cerl:c_atom(Fun), Args).
%% lit_vars(Literal) -> [Var].