diff options
Diffstat (limited to 'lib')
42 files changed, 1527 insertions, 1021 deletions
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl index 00d0aab507..3605385689 100644 --- a/lib/common_test/src/ct_run.erl +++ b/lib/common_test/src/ct_run.erl @@ -293,10 +293,10 @@ script_start1(Parent, Args) -> application:set_env(common_test, auto_compile, true), InclDirs = case proplists:get_value(include, Args) of - Incl when is_list(hd(Incl)) -> - Incl; + Incls when is_list(hd(Incls)) -> + [filename:absname(IDir) || IDir <- Incls]; Incl when is_list(Incl) -> - [Incl]; + [filename:absname(Incl)]; undefined -> [] end, @@ -1023,10 +1023,10 @@ run_test2(StartOpts) -> case proplists:get_value(include, StartOpts) of undefined -> []; - Incl when is_list(hd(Incl)) -> - Incl; + Incls when is_list(hd(Incls)) -> + [filename:absname(IDir) || IDir <- Incls]; Incl when is_list(Incl) -> - [Incl] + [filename:absname(Incl)] end, case os:getenv("CT_INCLUDE_PATH") of false -> @@ -1393,6 +1393,7 @@ run_testspec2(TestSpec) -> EnvInclude++Opts#opts.include end, application:set_env(common_test, include, AllInclude), + LogDir1 = which(logdir,Opts#opts.logdir), case check_and_install_configfiles( Opts#opts.config, LogDir1, Opts) of @@ -2134,6 +2135,14 @@ do_run_test(Tests, Skip, Opts0) -> case check_and_add(Tests, [], []) of {ok,AddedToPath} -> ct_util:set_testdata({stats,{0,0,{0,0}}}), + + %% test_server needs to know the include path too + InclPath = case application:get_env(common_test, include) of + {ok,Incls} -> Incls; + _ -> [] + end, + application:set_env(test_server, include, InclPath), + test_server_ctrl:start_link(local), %% let test_server expand the test tuples and count no of cases diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl index 4e699c4fbf..ba71d4efae 100644 --- a/lib/compiler/src/beam_jump.erl +++ b/lib/compiler/src/beam_jump.erl @@ -166,6 +166,12 @@ share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) -> end; share_1([{func_info,_,_,_}=I|Is], _, [], Acc) -> reverse(Is, [I|Acc]); +share_1([{'try',_,_}=I|Is], Dict0, Seq, Acc) -> + Dict = clean_non_sharable(Dict0), + share_1(Is, Dict, [I|Seq], Acc); +share_1([{try_case,_}=I|Is], Dict0, Seq, Acc) -> + Dict = clean_non_sharable(Dict0), + share_1(Is, Dict, [I|Seq], Acc); share_1([I|Is], Dict, Seq, Acc) -> case is_unreachable_after(I) of false -> @@ -174,6 +180,24 @@ share_1([I|Is], Dict, Seq, Acc) -> share_1(Is, Dict, [I], Acc) end. +clean_non_sharable(Dict) -> + %% We are passing in or out of a 'try' block. Remove + %% sequences that should not shared over the boundaries + %% of a 'try' block. Since the end of the sequence must match, + %% the only possible match between a sequence outside and + %% a sequence inside the 'try' block is a sequence that ends + %% with an instruction that causes an exception. Any sequence + %% that causes an exception must contain a line/1 instruction. + dict:filter(fun(K, _V) -> sharable_with_try(K) end, Dict). + +sharable_with_try([{line,_}|_]) -> + %% This sequence may cause an exception and may potentially + %% match a sequence on the other side of the 'try' block + %% boundary. + false; +sharable_with_try([_|Is]) -> + sharable_with_try(Is); +sharable_with_try([]) -> true. %% Eliminate all fallthroughs. Return the result reversed. @@ -295,12 +319,6 @@ opt([{test,_,{f,_}=Lbl,_,_,_}=I|Is], Acc, St) -> opt(Is, [I|Acc], label_used(Lbl, St)); opt([{select,_,_R,Fail,Vls}=I|Is], Acc, St) -> skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St)); -opt([{label,L}=I|Is], Acc, #st{entry=L}=St) -> - %% NEVER move the entry label. - opt(Is, [I|Acc], St); -opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) -> - St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)}, - opt([Prev,I|Is], Acc, label_used({f,L2}, St)); opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) -> case dict:find(Lbl, Mlbl) of {ok,Lbls} -> @@ -310,9 +328,20 @@ opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) -> insert_labels([Lbl|Lbls], Is, Acc, St); error -> opt(Is, [I|Acc], St0) end; -opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) -> - opt([I|Is], Acc, St); -opt([{jump,Lbl}=I|Is], Acc, St) -> +opt([{jump,{f,_}=X}|[{label,_},{jump,X}|_]=Is], Acc, St) -> + opt(Is, Acc, St); +opt([{jump,{f,Lbl}}|[{label,Lbl}|_]=Is], Acc, St) -> + opt(Is, Acc, St); +opt([{jump,{f,L}=Lbl}=I|Is], Acc0, #st{mlbl=Mlbl0}=St0) -> + %% All labels before this jump instruction should now be + %% moved to the location of the jump's target. + {Lbls,Acc} = collect_labels(Acc0, St0), + St = case Lbls of + [] -> St0; + [_|_] -> + Mlbl = dict:append_list(L, Lbls, Mlbl0), + St0#st{mlbl=Mlbl} + end, skip_unreachable(Is, [I|Acc], label_used(Lbl, St)); %% Optimization: quickly handle some common instructions that don't %% have any failure labels and where is_unreachable_after(I) =:= false. @@ -349,6 +378,17 @@ insert_fc_labels([L|Ls], Mlbl, Acc0) -> end; insert_fc_labels([], _, Acc) -> Acc. +collect_labels(Is, #st{entry=Entry}) -> + collect_labels_1(Is, Entry, []). + +collect_labels_1([{label,Entry}|_]=Is, Entry, Acc) -> + %% Never move the entry label. + {Acc,Is}; +collect_labels_1([{label,L}|Is], Entry, Acc) -> + collect_labels_1(Is, Entry, [L|Acc]); +collect_labels_1(Is, _Entry, Acc) -> + {Acc,Is}. + %% label_defined(Is, Label) -> true | false. %% Test whether the label Label is defined at the start of the instruction %% sequence, possibly preceeded by other label definitions. diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl index 26020e1d29..3249024854 100644 --- a/lib/compiler/src/beam_utils.erl +++ b/lib/compiler/src/beam_utils.erl @@ -187,7 +187,7 @@ is_pure_test({test,is_lt,_,[_,_]}) -> true; is_pure_test({test,is_nil,_,[_]}) -> true; is_pure_test({test,is_nonempty_list,_,[_]}) -> true; is_pure_test({test,test_arity,_,[_,_]}) -> true; -is_pure_test({test,has_map_fields,_,[_,{list,_}]}) -> true; +is_pure_test({test,has_map_fields,_,[_|_]}) -> true; is_pure_test({test,Op,_,Ops}) -> erl_internal:new_type_test(Op, length(Ops)). diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl index c156cf79fe..e60184c929 100644 --- a/lib/compiler/src/beam_validator.erl +++ b/lib/compiler/src/beam_validator.erl @@ -22,7 +22,6 @@ %% Avoid warning for local function error/1 clashing with autoimported BIF. -compile({no_auto_import,[error/1]}). --export([file/1, files/1]). %% Interface for compiler. -export([module/2, format_error/1]). @@ -40,38 +39,12 @@ -define(DBG_FORMAT(F, D), ok). -endif. -%%% -%%% API functions. -%%% - --spec file(file:filename()) -> 'ok' | {'error', term()}. - -file(Name) when is_list(Name) -> - case case filename:extension(Name) of - ".S" -> s_file(Name); - ".beam" -> beam_file(Name) - end of - [] -> ok; - Es -> {error,Es} - end. - --spec files([file:filename()]) -> 'ok'. - -files([F|Fs]) -> - ?DBG_FORMAT("# Verifying: ~p~n", [F]), - case file(F) of - ok -> ok; - {error,Es} -> - io:format("~tp:~n~ts~n", [F,format_error(Es)]) - end, - files(Fs); -files([]) -> ok. - %% To be called by the compiler. module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts) when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) -> case validate(Mod, Fs) of - [] -> {ok,Code}; + [] -> + {ok,Code}; Es0 -> Es = [{?MODULE,E} || E <- Es0], {error,[{atom_to_list(Mod),Es}]} @@ -79,12 +52,6 @@ module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts) -spec format_error(term()) -> iolist(). -format_error([]) -> []; -format_error([{{M,F,A},{I,Off,Desc}}|Es]) -> - [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n", - [M,F,A,Off,I,Desc])|format_error(Es)]; -format_error([Error|Es]) -> - [format_error(Error)|format_error(Es)]; format_error({{_M,F,A},{I,Off,limit}}) -> io_lib:format( "function ~p/~p+~p:~n" @@ -103,8 +70,6 @@ format_error({{_M,F,A},{I,Off,Desc}}) -> " Internal consistency check failed - please report this bug.~n" " Instruction: ~p~n" " Error: ~p:~n", [F,A,Off,I,Desc]); -format_error({Module,Error}) -> - [Module:format_error(Error)]; format_error(Error) -> io_lib:format("~p~n", [Error]). @@ -112,36 +77,6 @@ format_error(Error) -> %%% Local functions follow. %%% -s_file(Name) -> - {ok,Is} = file:consult(Name), - {module,Module} = lists:keyfind(module, 1, Is), - Fs = find_functions(Is), - validate(Module, Fs). - -find_functions(Fs) -> - find_functions_1(Fs, none, [], []). - -find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) -> - Acc = add_func(Func, FuncAcc, Acc0), - find_functions_1(Is, {Name,Arity,Entry}, [], Acc); -find_functions_1([I|Is], Func, FuncAcc, Acc) -> - find_functions_1(Is, Func, [I|FuncAcc], Acc); -find_functions_1([], Func, FuncAcc, Acc) -> - reverse(add_func(Func, FuncAcc, Acc)). - -add_func(none, _, Acc) -> Acc; -add_func({Name,Arity,Entry}, Is, Acc) -> - [{function,Name,Arity,Entry,reverse(Is)}|Acc]. - -beam_file(Name) -> - try beam_disasm:file(Name) of - {error,beam_lib,Reason} -> [{beam_lib,Reason}]; - #beam_file{module=Module, code=Code0} -> - Code = normalize_disassembled_code(Code0), - validate(Module, Code) - catch _:_ -> [disassembly_failed] - end. - %%% %%% The validator follows. %%% @@ -196,23 +131,16 @@ validate_0(Module, [{function,Name,Ar,Entry,Code}|Fs], Ft) -> try validate_1(Code, Name, Ar, Entry, Ft) of _ -> validate_0(Module, Fs, Ft) catch - Error -> + throw:Error -> + %% Controlled error. [Error|validate_0(Module, Fs, Ft)]; - error:Error -> - [validate_error(Error, Module, Name, Ar)|validate_0(Module, Fs, Ft)] + Class:Error -> + %% Crash. + Stack = erlang:get_stacktrace(), + io:fwrite("Function: ~w/~w\n", [Name,Ar]), + erlang:raise(Class, Error, Stack) end. --ifdef(DEBUG). -validate_error(Error, Module, Name, Ar) -> - exit(validate_error_1(Error, Module, Name, Ar)). --else. -validate_error(Error, Module, Name, Ar) -> - validate_error_1(Error, Module, Name, Ar). --endif. -validate_error_1(Error, Module, Name, Ar) -> - {{Module,Name,Ar}, - {internal_error,'_',{Error,erlang:get_stacktrace()}}}. - -type index() :: non_neg_integer(). -type reg_tab() :: gb_trees:tree(index(), 'none' | {'value', _}). @@ -225,7 +153,6 @@ validate_error_1(Error, Module, Name, Ar) -> hf=0, %Available heap size for floats. fls=undefined, %Floating point state. ct=[], %List of hot catch/try labels - bsm=undefined, %Bit syntax matching state. bits=undefined, %Number of bits in bit syntax binary. setelem=false %Previous instruction was setelement/3. }). @@ -308,7 +235,7 @@ labels_1([{label,L}|Is], R) -> labels_1([{line,_}|Is], R) -> labels_1(Is, R); labels_1(Is, R) -> - {lists:reverse(R),Is}. + {reverse(R),Is}. init_state(Arity) -> Xs = init_regs(Arity, term), @@ -403,10 +330,6 @@ valfun_1({init,{y,_}=Reg}, Vst) -> set_type_y(initialized, Reg, Vst); valfun_1({test_heap,Heap,Live}, Vst) -> test_heap(Heap, Live, Vst); -valfun_1({bif,_Op,nofail,Src,Dst}, Vst) -> - %% The 'nofail' atom only occurs in disassembled code. - validate_src(Src, Vst), - set_type_reg(term, Dst, Vst); valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) -> case is_bif_safe(Op, length(Src)) of false -> @@ -432,9 +355,6 @@ valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) -> valfun_1({put,Src}, Vst) -> assert_term(Src, Vst), eat_heap(1, Vst); -valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) -> - Vst = eat_heap(2*Sz, Vst0), - set_type_reg(cons, Dst, Vst); %% Instructions for optimization of selective receives. valfun_1({recv_mark,{f,Fail}}, Vst) when is_integer(Fail) -> Vst; @@ -602,8 +522,6 @@ valfun_4({call_ext_last,Live,Func,StkSize}, tail_call(Func, Live, Vst); valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) -> error({allocated,NumY}); -valfun_4({make_fun,_,_,Live}, Vst) -> - call('fun', Live, Vst); valfun_4({make_fun2,_,_,_,Live}, Vst) -> call(make_fun, Live, Vst); %% Other BIFs @@ -620,8 +538,6 @@ valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) -> TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0), Vst = set_type(TupleType, Tuple, Vst1), set_type_reg(term, Dst, Vst); -valfun_4({raise,{f,_}=Fail,Src,Dst}, Vst) -> - valfun_4({bif,raise,Fail,Src,Dst}, Vst); valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) -> validate_src(Src, Vst0), Vst = branch_state(Fail, Vst0), @@ -738,32 +654,6 @@ valfun_4({bs_save2,Ctx,SavePoint}, Vst) -> valfun_4({bs_restore2,Ctx,SavePoint}, Vst) -> bsm_restore(Ctx, SavePoint, Vst); -%% Bit syntax instructions. -valfun_4({bs_start_match,{f,_Fail}=F,Src}, Vst) -> - valfun_4({test,bs_start_match,F,[Src]}, Vst); -valfun_4({test,bs_start_match,{f,Fail},[Src]}, Vst) -> - assert_term(Src, Vst), - bs_start_match(branch_state(Fail, Vst)); - -valfun_4({bs_save,SavePoint}, Vst) -> - bs_assert_state(Vst), - bs_save(SavePoint, Vst); -valfun_4({bs_restore,SavePoint}, Vst) -> - bs_assert_state(Vst), - bs_assert_savepoint(SavePoint, Vst), - Vst; -valfun_4({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) -> - bs_assert_state(Vst), - assert_term(Src, Vst), - branch_state(Fail, Vst); -valfun_4({test,bs_test_tail,{f,Fail},_}, Vst) -> - bs_assert_state(Vst), - branch_state(Fail, Vst); -valfun_4({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) -> - bs_assert_state(Vst0), - Vst = branch_state(Fail, Vst0), - set_type_reg({integer,[]}, Dst, Vst); - %% Other test instructions. valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) -> assert_term(Float, Vst), @@ -795,9 +685,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) -> valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) -> assert_term(A, Vst), set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst)); -valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) -> - assert_term(Src, Vst), - set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst)); valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> verify_live(Live, Vst0), if @@ -868,16 +755,6 @@ valfun_4({bs_put_utf32,{f,Fail},_,Src}=I, Vst0) -> assert_term(Src, Vst0), Vst = bs_align_check(I, Vst0), branch_state(Fail, Vst); -%% Old bit syntax construction (before R10B). -valfun_4({bs_init,_,_}, Vst) -> - bs_zero_bits(Vst); -valfun_4({bs_need_buf,_}, Vst) -> Vst; -valfun_4({bs_final,{f,Fail},Dst}, Vst0) -> - Vst = branch_state(Fail, Vst0), - set_type_reg(binary, Dst, Vst); -valfun_4({bs_final2,Src,Dst}, Vst0) -> - assert_term(Src, Vst0), - set_type_reg(binary, Dst, Vst0); %% Map instructions. valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> verify_put_map(Fail, Src, Dst, Live, List, Vst); @@ -891,10 +768,14 @@ valfun_4(_, _) -> verify_get_map(Fail, Src, List, Vst0) -> assert_term(Src, Vst0), Vst1 = branch_state(Fail, Vst0), - Lits = mmap(fun(L,_R) -> [L] end, List), - assert_strict_literal_termorder(Lits), + Keys = extract_map_keys(List), + assert_strict_literal_termorder(Keys), verify_get_map_pair(List,Vst0,Vst1). +extract_map_keys([Key,_Val|T]) -> + [Key|extract_map_keys(T)]; +extract_map_keys([]) -> []. + verify_get_map_pair([],_,Vst) -> Vst; verify_get_map_pair([Src,Dst|Vs],Vst0,Vsti) -> assert_term(Src, Vst0), @@ -936,9 +817,6 @@ validate_bs_skip_utf(Fail, Ctx, Live, Vst0) -> %% val_dsetel({move,_,_}, Vst) -> Vst; -val_dsetel({put_string,0,{string,""},_}, Vst) -> - %% An empty string is OK since it doesn't build anything. - Vst; val_dsetel({call_ext,3,{extfunc,erlang,setelement,3}}, #vst{current=St}=Vst) -> Vst#vst{current=St#st{setelem=true}}; val_dsetel({set_tuple_element,_,_,_}, #vst{current=#st{setelem=false}}) -> @@ -972,7 +850,7 @@ call(Name, Live, #vst{current=St}=Vst) -> Type when Type =/= exception -> %% Type is never 'exception' because it has been handled earlier. Xs = gb_trees_from_list([{0,Type}]), - Vst#vst{current=St#st{x=Xs,f=init_fregs(),bsm=undefined}} + Vst#vst{current=St#st{x=Xs,f=init_fregs()}} end. %% Tail call. @@ -1030,7 +908,7 @@ allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) -> error({existing_stack_frame,{size,Numy}}). deallocate(#vst{current=St}=Vst) -> - Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none,bsm=undefined}}. + Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}. test_heap(Heap, Live, Vst0) -> verify_live(Live, Vst0), @@ -1038,7 +916,7 @@ test_heap(Heap, Live, Vst0) -> heap_alloc(Heap, Vst). heap_alloc(Heap, #vst{current=St0}=Vst) -> - St1 = kill_heap_allocation(St0#st{bsm=undefined}), + St1 = kill_heap_allocation(St0), St = heap_alloc_1(Heap, St1), Vst#vst{current=St}. @@ -1122,74 +1000,30 @@ assert_freg_set(Fr, _) -> error({bad_source,Fr}). %%% Maps -%% ensure that a list of literals has a strict -%% ascending term order (also meaning unique literals). -%% Single item lists may have registers. -assert_strict_literal_termorder([_]) -> ok; -assert_strict_literal_termorder(Ls) -> - Vs = lists:map(fun (L) -> get_literal(L) end, Ls), +%% A single item list may be either a list or a register. +%% +%% A list with more than item must contain literals in +%% ascending term order. +%% +%% An empty list is not allowed. + +assert_strict_literal_termorder([]) -> + %% There is no reason to use the get_map_elements and + %% has_map_fields instructions with empty lists. + error(empty_field_list); +assert_strict_literal_termorder([_]) -> + ok; +assert_strict_literal_termorder([_,_|_]=Ls) -> + Vs = [get_literal(L) || L <- Ls], case check_strict_value_termorder(Vs) of true -> ok; - false -> error({not_strict_order, Ls}) - end. - -%% usage: -%% mmap(fun(A,B) -> [{A,B}] end, [1,2,3,4]), -%% [{1,2},{3,4}] - -mmap(F,List) -> - {arity,Ar} = erlang:fun_info(F,arity), - mmap(F,Ar,List). -mmap(_F,_,[]) -> []; -mmap(F,Ar,List) -> - {Hd,Tl} = lists:split(Ar,List), - apply(F,Hd) ++ mmap(F,Ar,Tl). - -check_strict_value_termorder([]) -> true; -check_strict_value_termorder([_]) -> true; -check_strict_value_termorder([V1,V2]) -> - erts_internal:cmp_term(V1,V2) < 0; -check_strict_value_termorder([V1,V2|Vs]) -> - case erts_internal:cmp_term(V1,V2) < 0 of - true -> check_strict_value_termorder([V2|Vs]); - false -> false - end. - -%%% -%%% Binary matching. -%%% -%%% Possible values for the bsm field (=bit syntax matching state). -%%% -%%% undefined - Undefined (initial state). No matching instructions allowed. -%%% -%%% (gb set) - The gb set contains the defined save points. -%%% -%%% The bsm field is reset to 'undefined' by instructions that may cause a -%%% a garbage collection (might move the binary) and/or context switch -%%% (may invalidate the save points). - -bs_start_match(#vst{current=#st{bsm=undefined}=St}=Vst) -> - Vst#vst{current=St#st{bsm=gb_sets:empty()}}; -bs_start_match(Vst) -> - %% Must retain save points here - it is possible to restore back - %% to a previous binary. - Vst. - -bs_save(Reg, #vst{current=#st{bsm=Saved}=St}=Vst) - when is_integer(Reg), Reg < ?MAXREG -> - Vst#vst{current=St#st{bsm=gb_sets:add(Reg, Saved)}}; -bs_save(_, _) -> error(limit). - -bs_assert_savepoint(Reg, #vst{current=#st{bsm=Saved}}) -> - case gb_sets:is_member(Reg, Saved) of - false -> error({no_save_point,Reg}); - true -> ok + false -> error(not_strict_order) end. -bs_assert_state(#vst{current=#st{bsm=undefined}}) -> - error(no_bs_match_state); -bs_assert_state(_) -> ok. - +check_strict_value_termorder([V1|[V2|_]=Vs]) -> + erts_internal:cmp_term(V1, V2) < 0 andalso + check_strict_value_termorder(Vs); +check_strict_value_termorder([_]) -> true. %%% %%% New binary matching instructions. @@ -1525,14 +1359,13 @@ merge_states(L, St, Branched) when L =/= 0 -> {value,OtherSt} -> merge_states_1(St, OtherSt) end. -merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0,bsm=Bsm0}=St, - #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1,bsm=Bsm1}) -> +merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0}=St, + #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1}) -> NumY = merge_stk(NumY0, NumY1), Xs = merge_regs(Xs0, Xs1), Ys = merge_y_regs(Ys0, Ys1), Ct = merge_ct(Ct0, Ct1), - Bsm = merge_bsm(Bsm0, Bsm1), - St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct,bsm=Bsm}. + St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct}. merge_stk(S, S) -> S; merge_stk(_, _) -> undecided. @@ -1615,10 +1448,6 @@ merge_types(T1, T2) when T1 =/= T2 -> %% Too different. All we know is that the type is a 'term'. term. -merge_bsm(undefined, _) -> undefined; -merge_bsm(_, undefined) -> undefined; -merge_bsm(Bsm0, Bsm1) -> gb_sets:intersection(Bsm0, Bsm1). - tuple_sz([Sz]) -> Sz; tuple_sz(Sz) -> Sz. @@ -1840,52 +1669,3 @@ error(Error) -> exit(Error). -else. error(Error) -> throw(Error). -endif. - - -%%% -%%% Rewrite disassembled code to the same format as we used internally -%%% to not have to worry later. -%%% - -normalize_disassembled_code(Fs) -> - Index = ndc_index(Fs, []), - ndc(Fs, Index, []). - -ndc_index([{function,Name,Arity,Entry,_Code}|Fs], Acc) -> - ndc_index(Fs, [{{Name,Arity},Entry}|Acc]); -ndc_index([], Acc) -> - gb_trees:from_orddict(lists:sort(Acc)). - -ndc([{function,Name,Arity,Entry,Code0}|Fs], D, Acc) -> - Code = ndc_1(Code0, D, []), - ndc(Fs, D, [{function,Name,Arity,Entry,Code}|Acc]); -ndc([], _, Acc) -> reverse(Acc). - -ndc_1([{call=Op,A,{_,F,A}}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]); -ndc_1([{call_only=Op,A,{_,F,A}}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]); -ndc_1([{call_last=Op,A,{_,F,A},Sz}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)},Sz}|Acc]); -ndc_1([{arithbif,Op,F,Src,Dst}|Is], D, Acc) -> - ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]); -ndc_1([{arithfbif,Op,F,Src,Dst}|Is], D, Acc) -> - ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]); -ndc_1([{test,bs_start_match2=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_binary2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_float2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_integer2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_utf8=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_utf16=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_utf32=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([I|Is], D, Acc) -> - ndc_1(Is, D, [I|Acc]); -ndc_1([], _, Acc) -> - reverse(Acc). diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl index 7d1819ea15..2618f7adba 100644 --- a/lib/compiler/src/sys_core_fold.erl +++ b/lib/compiler/src/sys_core_fold.erl @@ -2288,10 +2288,17 @@ opt_simple_let_2(Let0, Vs0, Arg0, Body, Ctxt, Sub) -> %% No variables left. Body; {_,Arg,#c_literal{}} -> - %% Since the variable is not used in the body, we can rewrite the - %% let to a sequence: - %% let <Var> = Arg in Literal ==> seq Arg Literal - expr(#c_seq{arg=Arg,body=Body}, Ctxt, sub_new_preserve_types(Sub)); + E = case Ctxt of + effect -> + %% Throw away the literal body. + Arg; + value -> + %% Since the variable is not used in the body, we + %% can rewrite the let to a sequence. + %% let <Var> = Arg in Literal ==> seq Arg Literal + #c_seq{arg=Arg,body=Body} + end, + expr(E, Ctxt, sub_new_preserve_types(Sub)); {Vs,Arg,Body} -> %% If none of the variables are used in the body, we can %% rewrite the let to a sequence: diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl index 27979647c6..c441f9f284 100644 --- a/lib/compiler/test/beam_validator_SUITE.erl +++ b/lib/compiler/test/beam_validator_SUITE.erl @@ -21,16 +21,17 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, init_per_testcase/2,end_per_testcase/2, - beam_files/1,compiler_bug/1,stupid_but_valid/1, + compiler_bug/1,stupid_but_valid/1, xrange/1,yrange/1,stack/1,call_last/1,merge_undefined/1, uninit/1,unsafe_catch/1, - dead_code/1,mult_labels/1, + dead_code/1, overwrite_catchtag/1,overwrite_trytag/1,accessing_tags/1,bad_catch_try/1, cons_guard/1, freg_range/1,freg_uninit/1,freg_state/1, - bin_match/1,bad_bin_match/1,bin_aligned/1,bad_dsetel/1, + bad_bin_match/1,bin_aligned/1,bad_dsetel/1, state_after_fault_in_catch/1,no_exception_in_catch/1, - undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1]). + undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1, + map_field_lists/1]). -include_lib("test_server/include/test_server.hrl"). @@ -47,18 +48,19 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> test_lib:recompile(?MODULE), - [beam_files,{group,p}]. + [{group,p}]. groups() -> [{p,test_lib:parallel(), [compiler_bug,stupid_but_valid,xrange, yrange,stack,call_last,merge_undefined,uninit, - unsafe_catch,dead_code,mult_labels, + unsafe_catch,dead_code, overwrite_catchtag,overwrite_trytag,accessing_tags, bad_catch_try,cons_guard,freg_range,freg_uninit, - freg_state,bin_match,bad_bin_match,bin_aligned,bad_dsetel, + freg_state,bad_bin_match,bin_aligned,bad_dsetel, state_after_fault_in_catch,no_exception_in_catch, - undef_label,illegal_instruction,failing_gc_guard_bif]}]. + undef_label,illegal_instruction,failing_gc_guard_bif, + map_field_lists]}]. init_per_suite(Config) -> Config. @@ -72,27 +74,6 @@ init_per_group(_GroupName, Config) -> end_per_group(_GroupName, Config) -> Config. - -beam_files(Config) when is_list(Config) -> - ?line DataDir = proplists:get_value(data_dir, Config), - ?line Wc = filename:join([DataDir,"..","..","*","*.beam"]), - %% Must have at least two files here, or there will be - %% a grammatical error in the output of the io:format/2 call below. ;-) - ?line [_,_|_] = Fs = filelib:wildcard(Wc), - ?line io:format("~p files\n", [length(Fs)]), - test_lib:p_run(fun do_beam_file/1, Fs). - - -do_beam_file(F) -> - case beam_validator:file(F) of - ok -> - ok; - {error,Es} -> - io:format("File: ~s", [F]), - io:format("Error: ~p\n", [Es]), - error - end. - compiler_bug(Config) when is_list(Config) -> %% Check that the compiler returns an error if we try to %% assemble one of the bad '.S' files. @@ -152,14 +133,14 @@ yrange(Config) when is_list(Config) -> stack(Config) when is_list(Config) -> Errors = do_val(stack, Config), - ?line [{{t,a,2},{return,11,{stack_frame,2}}}, - {{t,b,2},{{deallocate,2},4,{allocated,none}}}, - {{t,c,2},{{deallocate,2},12,{allocated,none}}}, - {{t,d,2}, - {{allocate,2,2},5,{existing_stack_frame,{size,2}}}}, - {{t,e,2},{{deallocate,5},6,{allocated,2}}}, - {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}}, - {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}] = Errors, + [{{t,a,2},{return,11,{stack_frame,2}}}, + {{t,b,2},{{deallocate,2},4,{allocated,none}}}, + {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}}, + {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}, + {{t,c,2},{{deallocate,2},12,{allocated,none}}}, + {{t,d,2}, + {{allocate,2,2},5,{existing_stack_frame,{size,2}}}}, + {{t,e,2},{{deallocate,5},6,{allocated,2}}}] = Errors, ok. call_last(Config) when is_list(Config) -> @@ -173,10 +154,10 @@ call_last(Config) when is_list(Config) -> merge_undefined(Config) when is_list(Config) -> Errors = do_val(merge_undefined, Config), - ?line [{{t,handle_call,2}, - {{call_ext,2,{extfunc,debug,filter,2}}, - 22, - {uninitialized_reg,{y,0}}}}] = Errors, + [{{t,handle_call,2}, + {{call_ext,2,{extfunc,debug,filter,2}}, + 23, + {uninitialized_reg,{y,0}}}}] = Errors, ok. uninit(Config) when is_list(Config) -> @@ -185,7 +166,7 @@ uninit(Config) when is_list(Config) -> [{{t,sum_1,2}, {{move,{y,0},{x,0}},5,{uninitialized_reg,{y,0}}}}, {{t,sum_2,2}, - {{call,1,{f,10}},6,{uninitialized_reg,{y,0}}}}, + {{call,1,{f,8}},6,{uninitialized_reg,{y,0}}}}, {{t,sum_3,2}, {{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}, 7, @@ -206,10 +187,6 @@ dead_code(Config) when is_list(Config) -> [] = do_val(dead_code, Config), ok. -mult_labels(Config) when is_list(Config) -> - [] = do_val(erl_prim_loader, Config, ".beam"), - ok. - overwrite_catchtag(Config) when is_list(Config) -> Errors = do_val(overwrite_catchtag, Config), ?line @@ -226,11 +203,10 @@ overwrite_trytag(Config) when is_list(Config) -> accessing_tags(Config) when is_list(Config) -> Errors = do_val(accessing_tags, Config), - ?line - [{{accessing_tags,foo,1}, - {{move,{y,0},{x,0}},6,{catchtag,_}}}, - {{accessing_tags,bar,1}, - {{move,{y,0},{x,0}},6,{trytag,_}}}] = Errors, + [{{accessing_tags,bar,1}, + {{move,{y,0},{x,0}},6,{trytag,_}}}, + {{accessing_tags,foo,1}, + {{move,{y,0},{x,0}},6,{catchtag,_}}}] = Errors, ok. bad_catch_try(Config) when is_list(Config) -> @@ -317,13 +293,6 @@ freg_state(Config) when is_list(Config) -> {fclearerror,5,{bad_floating_point_state,cleared}}}] = Errors, ok. -bin_match(Config) when is_list(Config) -> - Errors = do_val(bin_match, Config), - ?line - [{{t,t,1},{{bs_save,0},4,no_bs_match_state}}, - {{t,x,1},{{bs_restore,1},16,{no_save_point,1}}}] = Errors, - ok. - bad_bin_match(Config) when is_list(Config) -> [{{t,t,1},{return,5,{match_context,{x,0}}}}] = do_val(bad_bin_match, Config), @@ -347,20 +316,20 @@ bad_dsetel(Config) when is_list(Config) -> ?line [{{t,t,1}, {{set_tuple_element,{x,1},{x,0},1}, - 15, + 17, illegal_context_for_set_tuple_element}}] = Errors, ok. state_after_fault_in_catch(Config) when is_list(Config) -> Errors = do_val(state_after_fault_in_catch, Config), - [{{t,foo,1}, - {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}, - {{state_after_fault_in_catch,if_end,1}, + [{{state_after_fault_in_catch,badmatch,1}, {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, {{state_after_fault_in_catch,case_end,1}, {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, - {{state_after_fault_in_catch,badmatch,1}, - {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}] = Errors, + {{state_after_fault_in_catch,if_end,1}, + {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, + {{t,foo,1}, + {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}] = Errors, ok. no_exception_in_catch(Config) when is_list(Config) -> @@ -370,13 +339,46 @@ no_exception_in_catch(Config) when is_list(Config) -> ok. undef_label(Config) when is_list(Config) -> - Errors = do_val(undef_label, Config), + M = {undef_label, + [{t,1}], + [], + [{function,t,1,2, + [{label,1}, + {func_info,{atom,undef_label},{atom,t},1}, + {label,2}, + {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}, + {move,{atom,ok},{x,0}}, + return]}, + {function,x,1,17, + [{label,3}, + {func_info,{atom,undef_label},{atom,x},1}, + {label,4}, + return]}], + 5}, + Errors = beam_val(M), [{{undef_label,t,1},{undef_labels,[42]}}, {{undef_label,x,1},{return,4,no_entry_label}}] = Errors, ok. illegal_instruction(Config) when is_list(Config) -> - Errors = do_val(illegal_instruction, Config), + M = {illegal_instruction, + [{t,1},{x,1},{y,0}], + [], + [{function,t,1,2, + [{label,1}, + {func_info,{atom,illegal_instruction},{atom,t},1}, + {label,2}, + {my_illegal_instruction,{x,0}}, + return]}, + {function,x,1,4, + [{label,3}, + bad_func_info, + {label,4}, + {my_illegal_instruction,{x,0}}, + return]}, + {function,y,0,17,[]}], + 5}, + Errors = beam_val(M), [{{illegal_instruction,t,1}, {{my_illegal_instruction,{x,0}},4,unknown_instruction}}, {{'_',x,1},{bad_func_info,1,illegal_instruction}}, @@ -414,19 +416,40 @@ process_request_foo(_) -> process_request_bar(Pid, [Response]) when is_pid(Pid) -> Response. +map_field_lists(Config) -> + Errors = do_val(map_field_lists, Config), + [{{map_field_lists,x,1}, + {{test,has_map_fields,{f,1},{x,0}, + {list,[{atom,z},{atom,a}]}}, + 5, + not_strict_order}}, + {{map_field_lists,y,1}, + {{test,has_map_fields,{f,3},{x,0},{list,[]}}, + 5, + empty_field_list}} + ] = Errors. %%%------------------------------------------------------------------------- -do_val(Name, Config) -> - do_val(Name, Config, ".S"). - -do_val(Name, Config, Type) -> - ?line Data = ?config(data_dir, Config), - ?line File = filename:join(Data, atom_to_list(Name)++Type), - ?line case beam_validator:file(File) of - {error,Errors} -> - ?line io:format("~p:~n~s", - [File,beam_validator:format_error(Errors)]), - Errors; - ok -> [] - end. +do_val(Mod, Config) -> + Data = ?config(data_dir, Config), + Base = atom_to_list(Mod), + File = filename:join(Data, Base), + case compile:file(File, [from_asm,no_postopt,return_errors]) of + {error,L,[]} -> + [{Base,Errors0}] = L, + Errors = [E || {beam_validator,E} <- Errors0], + _ = [io:put_chars(beam_validator:format_error(E)) || + E <- Errors], + Errors; + {ok,Mod} -> + [] + end. + +beam_val(M) -> + Name = atom_to_list(element(1, M)), + {error,[{Name,Errors0}]} = beam_validator:module(M, []), + Errors = [E || {beam_validator,E} <- Errors0], + _ = [io:put_chars(beam_validator:format_error(E)) || + E <- Errors], + Errors. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S index 279b2fa97f..9630d73a93 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S +++ b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S @@ -1,4 +1,4 @@ -{module, t}. %% version = 0 +{module, bad_dsetel}. %% version = 0 {exports, [{module_info,0},{module_info,1},{t,1}]}. @@ -21,7 +21,9 @@ {move,{integer,3},{x,0}}. {call_ext,3,{extfunc,erlang,setelement,3}}. {test_heap,6,1}. - {put_string,3,{string,"abc"},{x,1}}. + {put_list,{integer,99},nil,{x,1}}. + {put_list,{integer,98},{x,1},{x,1}}. + {put_list,{integer,97},{x,1},{x,1}}. {set_tuple_element,{x,1},{x,0},1}. {'%live',1}. {deallocate,0}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S index 2f353fbd25..a59f7ccc03 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S +++ b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S @@ -1,4 +1,4 @@ -{module, t}. %% version = 0 +{module, bin_aligned}. %% version = 0 {exports, [{decode,1},{module_info,0},{module_info,1}]}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S b/lib/compiler/test/beam_validator_SUITE_data/bin_match.S deleted file mode 100644 index 96df0f7933..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S +++ /dev/null @@ -1,64 +0,0 @@ -{module, bin_match}. %% version = 0 - -{exports, [{t,1}]}. - -{attributes, []}. - -{labels, 8}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,t},{atom,t},1}. - {label,2}. -%% {test,bs_start_match,{f,1},[{x,0}]}. - {bs_save,0}. - {test,bs_get_integer, - {f,3}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_get_integer, - {f,3}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}. - {test,bs_test_tail,{f,3},[0]}. - {test_heap,3,3}. - {put_tuple,2,{x,0}}. - {put,{x,1}}. - {put,{x,2}}. - {'%live',1}. - return. - {label,3}. - {bs_restore,0}. - {test,bs_get_integer, - {f,1}, - [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_test_tail,{f,1},[0]}. - {move,{x,1},{x,0}}. - return. - -{function, x, 1, 5}. - {label,4}. - {func_info,{atom,t},{atom,x},1}. - {label,5}. - {test,bs_start_match,{f,4},[{x,0}]}. - {bs_save,0}. - {test,bs_get_integer, - {f,6}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_get_integer, - {f,6}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}. - {test,bs_test_tail,{f,6},[0]}. - {test_heap,3,3}. - {put_tuple,2,{x,0}}. - {put,{x,1}}. - {put,{x,2}}. - {'%live',1}. - return. - {label,6}. - {bs_restore,1}. - {test,bs_get_integer, - {f,4}, - [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_test_tail,{f,4},[0]}. - {move,{x,1},{x,0}}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S index f964f98fba..c114664ba0 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S +++ b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S @@ -1,10 +1,10 @@ {module, dead_code}. %% version = 0 -{exports, [{execute,0},{module_info,0},{module_info,1}]}. +{exports, [{execute,0}]}. {attributes, []}. -{labels, 10}. +{labels, 6}. {function, execute, 0, 2}. @@ -12,7 +12,6 @@ {func_info,{atom,dead_code},{atom,execute},0}. {label,2}. {allocate,0,0}. - {'%live',0}. {call_ext,0,{extfunc,foo,fie,0}}. {test,is_ne,{f,4},[{x,0},{integer,0}]}. {test,is_ne,{f,4},[{x,0},{integer,1}]}. @@ -22,27 +21,7 @@ {case_end,{x,0}}. {label,4}. {move,{atom,ok},{x,0}}. - {'%live',1}. {deallocate,0}. return. - {'%','Moved code'}. {label,5}. {case_end,{x,0}}. - - -{function, module_info, 0, 7}. - {label,6}. - {func_info,{atom,dead_code},{atom,module_info},0}. - {label,7}. - {move,nil,{x,0}}. - {'%live',1}. - return. - - -{function, module_info, 1, 9}. - {label,8}. - {func_info,{atom,dead_code},{atom,module_info},1}. - {label,9}. - {move,nil,{x,0}}. - {'%live',1}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam b/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam Binary files differdeleted file mode 100644 index dd58a88e42..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam +++ /dev/null diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S index ee583a923e..b3ebff3ade 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S @@ -1,10 +1,10 @@ {module, freg_range}. %% version = 0 -{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}. {attributes, []}. -{labels, 8}. +{labels, 9}. {function, sum_1, 2, 2}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S index ff4d7548ae..7466763482 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S @@ -1,6 +1,6 @@ {module, freg_state}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2},{sum_5,2}]}. {attributes, []}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S index f8d805d9ec..71e833446a 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S @@ -1,10 +1,10 @@ {module, freg_uninit}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2}]}. {attributes, []}. -{labels, 8}. +{labels, 7}. {function, sum_1, 2, 2}. @@ -14,7 +14,6 @@ {fconv,{x,0},{fr,0}}. fclearerror. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. - {'%live',1}. return. @@ -26,7 +25,12 @@ {fconv,{x,1},{fr,1}}. fclearerror. {fcheckerror,{f,0}}. - {call,2,{f,8}}. + {call,2,{f,6}}. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. - {'%live',1}. + return. + +{function, foo, 2, 6}. + {label,5}. + {func_info,{atom,t},{atom,foo},2}. + {label,6}. return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S b/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S deleted file mode 100644 index d6e92abc71..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S +++ /dev/null @@ -1,26 +0,0 @@ -{module, illegal_instruction}. %% version = 0 - -{exports, []}. - -{attributes, []}. - -{labels, 7}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,illegal_instruction},{atom,t},1}. - {label,2}. - {my_illegal_instruction,{x,0}}. - return. - - -{function, x, 1, 4}. - {label,3}. - bad_func_info. - {label,4}. - {my_illegal_instruction,{x,0}}. - return. - -{function, y, 0, 17}. -
\ No newline at end of file diff --git a/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S new file mode 100644 index 0000000000..9af68c82d4 --- /dev/null +++ b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S @@ -0,0 +1,29 @@ +{module, map_field_lists}. %% version = 0 + +{exports, [{x,1},{y,1}]}. + +{attributes, []}. + +{labels, 5}. + + +{function, x, 1, 2}. + {label,1}. + {line,[{location,"map_field_lists.erl",4}]}. + {func_info,{atom,map_field_lists},{atom,x},1}. + {label,2}. + {test,is_map,{f,1},[{x,0}]}. + {test,has_map_fields,{f,1},{x,0},{list,[{atom,z},{atom,a}]}}. + {move,{atom,ok},{x,0}}. + return. + + +{function, y, 1, 4}. + {label,3}. + {line,[{location,"map_field_lists.erl",7}]}. + {func_info,{atom,map_field_lists},{atom,y},1}. + {label,4}. + {test,is_map,{f,3},[{x,0}]}. + {test,has_map_fields,{f,3},{x,0},{list,[]}}. + {move,{atom,ok},{x,0}}. + return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S index 3d76127824..481d55045d 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S +++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S @@ -22,7 +22,8 @@ {label,4}. {allocate_heap,1,6,2}. {move,{x,1},{y,0}}. - {put_string,2,{string,"~p"},{x,0}}. + {put_list,{integer,112},nil,{x,0}}. + {put_list,{integer,126},{x,0},{x,0}}. {put_list,{y,0},nil,{x,1}}. {'%live',2}. {call_ext,2,{extfunc,io,format,2}}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S index e08a718a39..1a5b417a5f 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S +++ b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S @@ -26,7 +26,7 @@ {call_ext,1,{extfunc,erlang,erase,1}}. {move,{atom,nested},{x,0}}. {call_ext,1,{extfunc,erlang,erase,1}}. - {bif,self,nofail,[],{x,0}}. + {bif,self,{f,0},[],{x,0}}. {'try',{y,8},{f,13}}. {'try',{y,7},{f,11}}. {'try',{y,6},{f,9}}. @@ -34,7 +34,7 @@ %% Because the following instructions can't possible throw an exception, %% label 7 used to get no state. Now the try_end itself will save the state. {move,{x,0},{y,4}}. - {bif,self,nofail,[],{x,0}}. + {bif,self,{f,0},[],{x,0}}. {'%live',1}. {try_end,{y,5}}. {test,is_eq_exact,{f,15},[{x,0},{y,4}]}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/stack.S b/lib/compiler/test/beam_validator_SUITE_data/stack.S index 244c22a2f9..e4356a9d00 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/stack.S +++ b/lib/compiler/test/beam_validator_SUITE_data/stack.S @@ -1,10 +1,10 @@ {module, stack}. %% version = 0 -{exports, [{a,2},{b,2},{c,2},{d,2},{e,2}]}. +{exports, [{a,2},{b,2},{c,2},{d,2},{e,2},{bad_1,0},{bad_2,0},{foo,0}]}. {attributes, []}. -{labels, 21}. +{labels, 17}. {function, a, 2, 2}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S b/lib/compiler/test/beam_validator_SUITE_data/undef_label.S deleted file mode 100644 index dd29066bf4..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S +++ /dev/null @@ -1,22 +0,0 @@ -{module, undef_label}. %% version = 0 - -{exports, []}. - -{attributes, []}. - -{labels, 7}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,undef_label},{atom,t},1}. - {label,2}. - {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}. - {move,{atom,ok},{x,0}}. - return. - -{function, x, 1, 17}. - {label,3}. - {func_info,{atom,undef_label},{atom,x},1}. - {label,4}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/uninit.S b/lib/compiler/test/beam_validator_SUITE_data/uninit.S index 1a45c31411..9a66f4f7d6 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/uninit.S @@ -1,9 +1,11 @@ {module, uninit}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2}]}. {attributes, []}. +{labels, 9}. + {function, sum_1, 2, 2}. {label,1}. {func_info,{atom,t},{atom,sum_1},2}. @@ -11,7 +13,7 @@ {allocate,1,2}. {move,{y,0},{x,0}}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. @@ -23,7 +25,7 @@ {label,4}. {allocate,1,2}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. @@ -35,14 +37,14 @@ {label,6}. {allocate_zero,1,2}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. return. -{function, id, 1, 10}. - {label,9}. +{function, id, 1, 8}. + {label,7}. {func_info,{atom,t},{atom,id},1}. - {label,10}. + {label,8}. return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/xrange.S b/lib/compiler/test/beam_validator_SUITE_data/xrange.S index 3abbdffbc2..c6f20288f7 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/xrange.S +++ b/lib/compiler/test/beam_validator_SUITE_data/xrange.S @@ -1,10 +1,10 @@ {module, xrange}. %% version = 0 -{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}. {attributes, []}. -{labels, 8}. +{labels, 9}. {function, sum_1, 2, 2}. diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl index 6c5b34498b..62bada1407 100644 --- a/lib/compiler/test/lc_SUITE.erl +++ b/lib/compiler/test/lc_SUITE.erl @@ -208,6 +208,17 @@ effect(Config) when is_list(Config) -> #{<<1:500>>:=V1,<<2:301>>:=V2} <- L], ok end, id([#{},x,#{<<1:500>>=>42,<<2:301>>=>{a,b,c}}])), + + %% Will trigger the time-trap timeout if not tail-recursive. + case ?MODULE of + lc_SUITE -> + _ = [{'EXIT',{badarg,_}} = + (catch binary_to_atom(<<C/utf8>>, utf8)) || + C <- lists:seq(16#10000, 16#FFFFF)]; + _ -> + ok + end, + ok. do_effect(Lc, L) -> diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl index bc5ae803c6..cfa8262701 100644 --- a/lib/compiler/test/map_SUITE.erl +++ b/lib/compiler/test/map_SUITE.erl @@ -641,6 +641,7 @@ t_build_and_match_nil(Config) when is_list(Config) -> "treat" => V2, [] => V1 }), #{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }), + #{ <<1>> := V3, [] := V1 } = id(#{ [] => V1, <<1>> => V3 }), ok. t_build_and_match_structure(Config) when is_list(Config) -> diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl index d721a86f5a..68a31f14d5 100644 --- a/lib/compiler/test/misc_SUITE.erl +++ b/lib/compiler/test/misc_SUITE.erl @@ -280,6 +280,14 @@ silly_coverage(Config) when is_list(Config) -> {label,2}|non_proper_list]}],99}, expect_error(fun() -> beam_z:module(BeamZInput, []) end), + %% beam_validator. + BeamValInput = {?MODULE,[{foo,0}],[], + [{function,foo,0,2, + [{label,1}, + {func_info,{atom,?MODULE},{atom,foo},0}, + {label,2}|non_proper_list]}],99}, + expect_error(fun() -> beam_validator:module(BeamValInput, []) end), + ok. expect_error(Fun) -> diff --git a/lib/compiler/test/trycatch_SUITE.erl b/lib/compiler/test/trycatch_SUITE.erl index 8ab618bb01..80d93fbfa4 100644 --- a/lib/compiler/test/trycatch_SUITE.erl +++ b/lib/compiler/test/trycatch_SUITE.erl @@ -24,7 +24,8 @@ catch_oops/1,after_oops/1,eclectic/1,rethrow/1, nested_of/1,nested_catch/1,nested_after/1, nested_horrid/1,last_call_optimization/1,bool/1, - plain_catch_coverage/1,andalso_orelse/1,get_in_try/1]). + plain_catch_coverage/1,andalso_orelse/1,get_in_try/1, + hockey/1]). -include_lib("test_server/include/test_server.hrl"). @@ -39,7 +40,8 @@ groups() -> [basic,lean_throw,try_of,try_after,catch_oops, after_oops,eclectic,rethrow,nested_of,nested_catch, nested_after,nested_horrid,last_call_optimization, - bool,plain_catch_coverage,andalso_orelse,get_in_try]}]. + bool,plain_catch_coverage,andalso_orelse,get_in_try, + hockey]}]. init_per_suite(Config) -> @@ -943,3 +945,14 @@ get_valid_line([_|T]=Path, Annotations) -> _:not_found -> get_valid_line(T, Annotations) end. + +hockey(_) -> + {'EXIT',{{badmatch,_},[_|_]}} = (catch hockey()), + ok. + +hockey() -> + %% beam_jump used to generate a call into the try block. + %% beam_validator disapproved. + receive _ -> (b = fun() -> ok end) + + hockey, +x after 0 -> ok end, try (a = fun() -> ok end) + hockey, + + y catch _ -> ok end. diff --git a/lib/kernel/src/gen_udp.erl b/lib/kernel/src/gen_udp.erl index 70dceb3679..860eec10a0 100644 --- a/lib/kernel/src/gen_udp.erl +++ b/lib/kernel/src/gen_udp.erl @@ -78,7 +78,7 @@ ipv6_v6only. -type socket() :: port(). --export_type([option/0, option_name/0]). +-export_type([option/0, option_name/0, socket/0]). -spec open(Port) -> {ok, Socket} | {error, Reason} when Port :: inet:port_number(), diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl index 43bab8bcf0..ec2c350931 100644 --- a/lib/kernel/src/inet.erl +++ b/lib/kernel/src/inet.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2014. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -1070,7 +1070,7 @@ gethostbyname_tm(Name, Type, Timer, [wins|_]=Opts) -> gethostbyname_tm_native(Name, Type, Timer, Opts); gethostbyname_tm(Name, Type, Timer, [native|_]=Opts) -> gethostbyname_tm_native(Name, Type, Timer, Opts); -gethostbyname_tm(Name, Type, Timer, [_|_]=Opts) -> +gethostbyname_tm(Name, Type, Timer, [_|Opts]) -> gethostbyname_tm(Name, Type, Timer, Opts); %% Make sure we always can look up our own hostname. gethostbyname_tm(Name, Type, Timer, []) -> diff --git a/lib/kernel/test/inet_SUITE.erl b/lib/kernel/test/inet_SUITE.erl index d45dfc2173..849013ac79 100644 --- a/lib/kernel/test/inet_SUITE.erl +++ b/lib/kernel/test/inet_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2014. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -36,6 +36,7 @@ gethostnative_parallell/1, cname_loop/1, gethostnative_soft_restart/0, gethostnative_soft_restart/1, gethostnative_debug_level/0, gethostnative_debug_level/1, + lookup_bad_search_option/1, getif/1, getif_ifr_name_overflow/1,getservbyname_overflow/1, getifaddrs/1, parse_strict_address/1, simple_netns/1, simple_netns_open/1]). @@ -52,6 +53,7 @@ all() -> ipv4_to_ipv6, host_and_addr, {group, parse}, t_gethostnative, gethostnative_parallell, cname_loop, gethostnative_debug_level, gethostnative_soft_restart, + lookup_bad_search_option, getif, getif_ifr_name_overflow, getservbyname_overflow, getifaddrs, parse_strict_address, simple_netns, simple_netns_open]. @@ -908,6 +910,21 @@ lookup_loop([H|Hs], Delay, Tag, Parent, Cnt, Hosts) -> +lookup_bad_search_option(suite) -> + []; +lookup_bad_search_option(doc) -> + ["Test lookup with erroneously configured lookup option (OTP-12133)"]; +lookup_bad_search_option(Config) when is_list(Config) -> + Db = inet_db, + %% The bad option can not enter through inet_db:set_lookup/1, + %% but through e.g .inetrc. + ets:insert(Db, {res_lookup,[lookup_bad_search_option]}), + {ok,Hostname} = inet:gethostname(), + {ok,_Hent} = inet:gethostbyname(Hostname), % Will hang loop for this bug + ok. + + + getif(suite) -> []; getif(doc) -> diff --git a/lib/observer/doc/src/observer_ug.xml b/lib/observer/doc/src/observer_ug.xml index 62f99c5210..fcb42f6c31 100644 --- a/lib/observer/doc/src/observer_ug.xml +++ b/lib/observer/doc/src/observer_ug.xml @@ -104,6 +104,29 @@ <note> <p><em>Reds</em> can be presented as accumulated values or as values since last update.</p> </note> + <p><c>Process info</c> open a detailed information window on the selected process. + <taglist> + <tag>Process Information</tag> + <item>Shows the process information.</item> + <tag>Messages</tag> + <item>Shows the process messages.</item> + <tag>Dictionary</tag> + <item>Shows the process dictionary.</item> + <tag>Stack Trace</tag> + <item>Shows the process current stack trace.</item> + <tag>State</tag> + <item>Show the process state.</item> + <tag>Log</tag> + <item>If enabled and available, show the process SASL log entries.</item> + </taglist> + <note> + <p><c>Log</c> needs SASL application to be started on the observed node, with log_mf_h as log handler. + The Observed node must be R16B02 or higher. + <c>rb</c> server must not be started on the observed node when clicking on menu 'Log/Toggle log view'. + <c>rb</c> server will be stopped on the observed node when exiting or changing observed node. + </p> + </note> + </p> <p><c>Trace Processes</c> will add the selected process identifiers to the <c>Trace Overview</c> view and the node the processes reside on will be added as well. <c>Trace Named Processes</c> will add the registered name of processes. This can be useful diff --git a/lib/observer/src/observer_html_lib.erl b/lib/observer/src/observer_html_lib.erl index c279218707..53197078cf 100644 --- a/lib/observer/src/observer_html_lib.erl +++ b/lib/observer/src/observer_html_lib.erl @@ -60,7 +60,8 @@ expandable_term_body(Heading,[],_Tab) -> "StackDump" -> "No stack dump was found"; "Dictionary" -> "No dictionary was found"; "ProcState" -> "Information could not be retrieved," - " system messages may not be handled by this process." + " system messages may not be handled by this process."; + "SaslLog" -> "No log entry was found" end]; expandable_term_body(Heading,Expanded,Tab) -> Attr = "BORDER=0 CELLPADDING=0 CELLSPACING=1 WIDTH=100%", @@ -102,7 +103,10 @@ expandable_term_body(Heading,Expanded,Tab) -> element(1, lists:mapfoldl(fun(Entry, Even) -> {proc_state(Tab, Entry,Even), not Even} - end, true, Expanded))]); + end, true, Expanded))]); + "SaslLog" -> + table(Attr, + [tr("BGCOLOR=white",[td("ALIGN=left", pre(href_proc_port(Expanded)))])]) ; _ -> table(Attr, [tr( diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl index 8e8a37fc93..a8512894f9 100644 --- a/lib/observer/src/observer_procinfo.erl +++ b/lib/observer/src/observer_procinfo.erl @@ -43,6 +43,8 @@ -record(worker, {panel, callback}). +-record(io, {rdata=""}). + start(Process, ParentFrame, Parent) -> wx_object:start_link(?MODULE, [Process, ParentFrame, Parent], []). @@ -69,6 +71,10 @@ init([Pid, ParentFrame, Parent]) -> DictPage = init_panel(Notebook, "Dictionary", [Pid,Table], fun init_dict_page/3), StackPage = init_panel(Notebook, "Stack Trace", [Pid], fun init_stack_page/2), StatePage = init_panel(Notebook, "State", [Pid,Table], fun init_state_page/3), + Ps = case gen_server:call(observer, log_status) of + true -> [init_panel(Notebook, "Log", [Pid,Table], fun init_log_page/3)]; + false -> [] + end, wxFrame:connect(Frame, close_window), wxMenu:connect(Frame, command_menu_selected), @@ -78,7 +84,7 @@ init([Pid, ParentFrame, Parent]) -> pid=Pid, frame=Frame, notebook=Notebook, - pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage], + pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage|Ps], expand_table=Table }} catch error:{badrpc, _} -> @@ -327,6 +333,26 @@ fetch_state_info2(Pid, M) -> {badrpc,{'EXIT',{timeout, _}}} -> [] end. +init_log_page(Parent, Pid, Table) -> + Win = observer_lib:html_window(Parent), + Update = fun() -> + Fd = spawn_link(fun() -> io_server() end), + rpc:call(node(Pid), rb, rescan, [[{start_log, Fd}]]), + rpc:call(node(Pid), rb , grep, [local_pid_str(Pid)]), + Logs = io_get_data(Fd), + %% Replace remote local pid notation to global notation + Pref = global_pid_node_pref(Pid), + ExpPid = re:replace(Logs,"<0\.","<" ++ Pref ++ ".",[global, {return, list}]), + %% Try to keep same look by removing blanks at right of rewritten PID + NbBlanks = length(Pref) - 1, + Re = "(<" ++ Pref ++ "\.[^>]{1,}>)[ ]{"++ integer_to_list(NbBlanks) ++ "}", + Look = re:replace(ExpPid, Re, "\\1", [global, {return, list}]), + Html = observer_html_lib:expandable_term("SaslLog", Look, Table), + wxHtmlWindow:setPage(Win, Html) + end, + Update(), + {Win, Update}. + create_menus(MenuBar) -> Menus = [{"File", [#create_menu{id=?wxID_CLOSE, text="Close"}]}, {"View", [#create_menu{id=?REFRESH, text="Refresh\tCtrl-R"}]}], @@ -409,3 +435,55 @@ filter_monitor_info() -> Ms = proplists:get_value(monitors, Data), [Pid || {process, Pid} <- Ms] end. + +local_pid_str(Pid) -> + %% observer can observe remote nodes + %% There is no function to get the local + %% pid from the remote pid ... + %% So grep will fail to find remote pid in remote local log. + %% i.e. <4589.42.1> will not be found, but <0.42.1> will + %% Let's replace first integer by zero + "<0" ++ re:replace(pid_to_list(Pid),"\<([0-9]{1,})","",[{return, list}]). + +global_pid_node_pref(Pid) -> + %% Global PID node prefix : X of <X.Y.Z> + string:strip(string:sub_word(pid_to_list(Pid),1,$.),left,$<). + + +io_get_data(Pid) -> + Pid ! {self(), get_data_and_close}, + receive + {Pid, data, Data} -> lists:flatten(Data) + end. + +io_server() -> + io_server(#io{}). + +io_server(State) -> + receive + {io_request, From, ReplyAs, Request} -> + case io_request(Request,State) of + {Tag, Reply, NewState} when Tag =:= ok; Tag =:= error -> + From ! {io_reply, ReplyAs, Reply}, + io_server(NewState); + {stop, Reply, _NewState} -> + From ! {io_reply, ReplyAs, Reply}, + exit(Reply) + end; + {Pid, get_data_and_close} -> + Pid ! {self(), data, lists:reverse(State#io.rdata)}, + normal; + _Unknown -> + io:format("~p: Unknown msg: ~p ~n",[?LINE, _Unknown]), + io_server(State) + end. + +io_request({put_chars, _Encoding, Chars}, State = #io{rdata=Data}) -> + {ok, ok, State#io{rdata=[Chars|Data]}}; +io_request({put_chars, Encoding, Module, Function, Args}, State) -> + try io_request({put_chars, Encoding, apply(Module, Function, Args)}, State) + catch _:_ -> {error, {error,Function}, State} + end; +io_request(Req, State) -> + io:format("~p: Unknown req: ~p ~n",[?LINE, Req]), + State. diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl index c86f5ea916..54c4092a78 100644 --- a/lib/observer/src/observer_wx.erl +++ b/lib/observer/src/observer_wx.erl @@ -37,6 +37,7 @@ -define(ID_CONNECT, 2). -define(ID_NOTEBOOK, 3). -define(ID_CDV, 4). +-define(ID_LOGVIEW, 5). -define(FIRST_NODES_MENU_ID, 1000). -define(LAST_NODES_MENU_ID, 2000). @@ -60,7 +61,8 @@ active_tab, node, nodes, - prev_node="" + prev_node="", + log = false }). start() -> @@ -215,14 +217,17 @@ handle_event(#wx{event=#wxNotebook{type=command_notebook_page_changing}}, {noreply, State#state{active_tab=Pid}} end; -handle_event(#wx{event = #wxClose{}}, State) -> - {stop, normal, State}; - handle_event(#wx{id = ?ID_CDV, event = #wxCommand{type = command_menu_selected}}, State) -> spawn(crashdump_viewer, start, []), {noreply, State}; -handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, State) -> +handle_event(#wx{event = #wxClose{}}, #state{log=LogOn} = State) -> + LogOn andalso rpc:block_call(State#state.node, rb, stop, []), + {stop, normal, State}; + +handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, + #state{log=LogOn} = State) -> + LogOn andalso rpc:block_call(State#state.node, rb, stop, []), {stop, normal, State}; handle_event(#wx{id = ?wxID_HELP, event = #wxCommand{type = command_menu_selected}}, State) -> @@ -300,12 +305,42 @@ handle_event(#wx{id = ?ID_PING, event = #wxCommand{type = command_menu_selected} end, {noreply, UpdState}; -handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, State) - when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID -> +handle_event(#wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}}, + #state{frame = Frame, log = PrevLog, node = Node} = State) -> + try + ok = ensure_sasl_started(Node), + ok = ensure_mf_h_handler_used(Node), + ok = ensure_rb_mode(Node, PrevLog), + case PrevLog of + false -> + rpc:block_call(Node, rb, start, []), + set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server started)"), + {noreply, State#state{log=true}}; + true -> + rpc:block_call(Node, rb, stop, []), + set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server stopped)"), + {noreply, State#state{log=false}} + end + catch + throw:Reason -> + create_txt_dialog(Frame, Reason, "Log view status", ?wxICON_ERROR), + {noreply, State} + end; - Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, State#state.nodes), - UpdState = change_node_view(Node, State), - {noreply, UpdState}; +handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, + #state{nodes= Ns , node = PrevNode, log = PrevLog} = State) + when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID -> + Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, Ns), + %% Close rb_server only if another node than current one selected + LState = case PrevLog of + true -> case Node == PrevNode of + false -> rpc:block_call(PrevNode, rb, stop, []), + State#state{log=false} ; + true -> State + end; + false -> State + end, + {noreply, change_node_view(Node, LState)}; handle_event(Event, State) -> Pid = get_active_pid(State), @@ -340,6 +375,9 @@ handle_call(stop, _, State = #state{frame = Frame}) -> wxFrame:destroy(Frame), {stop, normal, ok, State}; +handle_call(log_status, _From, State) -> + {reply, State#state.log, State}; + handle_call(_Msg, _From, State) -> {reply, ok, State}. @@ -422,8 +460,7 @@ return_to_localnode(Frame, Node) -> end. create_txt_dialog(Frame, Msg, Title, Style) -> - MD = wxMessageDialog:new(Frame, Msg, [{style, Style}]), - wxMessageDialog:setTitle(MD, Title), + MD = wxMessageDialog:new(Frame, Msg, [{style, Style}, {caption,Title}]), wxDialog:showModal(MD), wxDialog:destroy(MD). @@ -569,17 +606,19 @@ default_menus(NodesMenuItems) -> false -> {"Nodes", NodesMenuItems ++ [#create_menu{id = ?ID_CONNECT, text = "Enable distribution"}]} end, + LogMenu = {"Log", [#create_menu{id = ?ID_LOGVIEW, text = "Toggle log view"}]}, case os:type() =:= {unix, darwin} of false -> FileMenu = {"File", [CDV, Quit]}, HelpMenu = {"Help", [About,Help]}, - [FileMenu, NodeMenu, HelpMenu]; + [FileMenu, NodeMenu, LogMenu, HelpMenu]; true -> %% On Mac quit and about will be moved to the "default' place %% automagicly, so just add them to a menu that always exist. %% But not to the help menu for some reason - {Tag, Menus} = FileMenu, - [{Tag, Menus ++ [About]}, NodeMenu, {"&Help", [Help]}] + + {Tag, Menus} = NodeMenu, + [{Tag, Menus ++ [Quit,About]}, LogMenu, {"&Help", [Help]}] end. clean_menus(Menus, MenuBar) -> @@ -658,3 +697,59 @@ update_node_list(State = #state{menubar=MenuBar}) -> end, observer_lib:create_menu_item(Dist, NodeMenu, Index), State#state{nodes = Nodes}. + +ensure_sasl_started(Node) -> + %% is sasl started ? + Apps = rpc:block_call(Node, application, which_applications, []), + case lists:keyfind(sasl, 1, Apps) of + false -> throw("Error: sasl application not started."), + error; + {sasl, _, _} -> ok + end. + +ensure_mf_h_handler_used(Node) -> + %% is log_mf_h used ? + Handlers = rpc:block_call(Node, gen_event, which_handlers, [error_logger]), + case lists:any(fun(L)-> L == log_mf_h end, Handlers) of + false -> throw("Error: log_mf_h handler not used in sasl."), + error; + true -> ok + end. + +ensure_rb_mode(Node, PrevLog) -> + ok = ensure_rb_module_loaded(Node), + ok = is_rb_compatible(Node), + ok = is_rb_server_running(Node, PrevLog), + ok. + + +ensure_rb_module_loaded(Node) -> + %% Need to ensure that module is loaded in order to detect exported + %% functions on interactive nodes + case rpc:block_call(Node, code, ensure_loaded, [rb]) of + {badrpc, Reason} -> + throw("Error: badrpc - " ++ io_lib:format("~tp",[Reason])); + {error, Reason} -> + throw("Error: rb module load error - " ++ io_lib:format("~tp",[Reason])); + {module,rb} -> + ok + end. + +is_rb_compatible(Node) -> + %% Simply test that rb:log_list/0 is exported + case rpc:block_call(Node, erlang, function_exported, [rb, log_list, 0]) of + false -> throw("Error: Node's Erlang release must be at least R16B02."); + true -> ok + end. + +is_rb_server_running(Node, LogState) -> + %% If already started, somebody else may use it. + %% We can not use it too, as far log file would be overriden. Not fair. + case rpc:block_call(Node, erlang, whereis, [rb_server]) of + Pid when is_pid(Pid), (LogState == false) -> + throw("Error: rb_server is already started and maybe used by someone."); + Pid when is_pid(Pid) -> + ok; + undefined -> + ok + end. diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl index 5cf719acb1..c69fdf4bdf 100644 --- a/lib/observer/test/observer_SUITE.erl +++ b/lib/observer/test/observer_SUITE.erl @@ -22,6 +22,8 @@ -include_lib("wx/include/wx.hrl"). -include_lib("observer/src/observer_tv.hrl"). +-define(ID_LOGVIEW, 5). + %% Test server specific exports -export([all/0, suite/0,groups/0]). -export([init_per_testcase/2, end_per_testcase/2, @@ -44,8 +46,9 @@ all() -> groups() -> [{gui, [], - [basic - , process_win, table_win + [basic, + process_win, + table_win ] }]. @@ -107,7 +110,7 @@ appup_file(Config) when is_list(Config) -> basic(suite) -> []; basic(doc) -> [""]; basic(Config) when is_list(Config) -> - timer:send_after(100, "foobar"), %% Otherwise the timer sever gets added to procs + timer:send_after(100, "foobar"), %% Otherwise the timer server gets added to procs ProcsBefore = processes(), NumProcsBefore = length(ProcsBefore), @@ -126,7 +129,7 @@ basic(Config) when is_list(Config) -> timer:sleep(200), ok = wxNotebook:advanceSelection(Notebook) end, - %% Just verify that we can toogle trough all pages + %% Just verify that we can toggle through all pages [_|_] = [Check(N, false) || N <- lists:seq(1, Count)], %% Cause it to resize Frame = get_top_level_parent(Notebook), @@ -214,10 +217,27 @@ test_page(Title, Window) -> process_win(suite) -> []; process_win(doc) -> [""]; process_win(Config) when is_list(Config) -> + % Stop SASL if already started + SaslStart = case whereis(sasl_sup) of + undefined -> false; + _ -> application:stop(sasl), + true + end, + % Define custom sasl and log_mf_h app vars + Privdir=?config(priv_dir,Config), + application:set_env(sasl, sasl_error_logger, tty), + application:set_env(sasl, error_logger_mf_dir, Privdir), + application:set_env(sasl, error_logger_mf_maxbytes, 1000), + application:set_env(sasl, error_logger_mf_maxfiles, 5), + application:start(sasl), ok = observer:start(), ObserverNB = setup_whitebox_testing(), Parent = get_top_level_parent(ObserverNB), - Frame = observer_procinfo:start(self(), Parent, self()), + % Activate log view + whereis(observer) ! #wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}}, + timer:sleep(1000), + % Process window tests (use sasl_sup for a non empty Log tab) + Frame = observer_procinfo:start(whereis(sasl_sup), Parent, self()), PIPid = wx_object:get_pid(Frame), PIPid ! {get_debug_info, self()}, Notebook = receive {procinfo_debug, NB} -> NB end, @@ -229,6 +249,11 @@ process_win(Config) when is_list(Config) -> [_|_] = [Check(N) || N <- lists:seq(1, Count)], PIPid ! #wx{event=#wxClose{type=close_window}}, observer:stop(), + application:stop(sasl), + case SaslStart of + true -> application:start(sasl); + false -> ok + end, ok. table_win(suite) -> []; diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl index cf8fb3114a..5a9f63c5e2 100644 --- a/lib/stdlib/src/dict.erl +++ b/lib/stdlib/src/dict.erl @@ -417,6 +417,8 @@ on_bucket(F, T, Slot) -> %% could have implemented map and filter using fold but these are %% faster. We hope! +fold_dict(F, Acc, #dict{size=0}) when is_function(F, 3) -> + Acc; fold_dict(F, Acc, D) -> Segs = D#dict.segs, fold_segs(F, Acc, Segs, tuple_size(Segs)). @@ -434,6 +436,8 @@ fold_bucket(F, Acc, [?kv(Key,Val)|Bkt]) -> fold_bucket(F, F(Key, Val, Acc), Bkt); fold_bucket(F, Acc, []) when is_function(F, 3) -> Acc. +map_dict(F, #dict{size=0} = Dict) when is_function(F, 2) -> + Dict; map_dict(F, D) -> Segs0 = tuple_to_list(D#dict.segs), Segs1 = map_seg_list(F, Segs0), @@ -453,6 +457,8 @@ map_bucket(F, [?kv(Key,Val)|Bkt]) -> [?kv(Key,F(Key, Val))|map_bucket(F, Bkt)]; map_bucket(F, []) when is_function(F, 2) -> []. +filter_dict(F, #dict{size=0} = Dict) when is_function(F, 2) -> + Dict; filter_dict(F, D) -> Segs0 = tuple_to_list(D#dict.segs), {Segs1,Fc} = filter_seg_list(F, Segs0, [], 0), diff --git a/lib/test_server/src/erl2html2.erl b/lib/test_server/src/erl2html2.erl index b9b45cda25..7cfaa2c325 100644 --- a/lib/test_server/src/erl2html2.erl +++ b/lib/test_server/src/erl2html2.erl @@ -22,11 +22,11 @@ %%%------------------------------------------------------------------ -module(erl2html2). --export([convert/2, convert/3]). +-export([convert/3, convert/4]). -convert([], _Dest) -> % Fake clause. +convert([], _Dest, _InclPath) -> % Fake clause. ok; -convert(File, Dest) -> +convert(File, Dest, InclPath) -> %% The generated code uses the BGCOLOR attribute in the %% BODY tag, which wasn't valid until HTML 3.2. Also, %% good HTML should either override all colour attributes @@ -48,12 +48,12 @@ convert(File, Dest) -> "</head>\n\n" "<body bgcolor=\"white\" text=\"black\"" " link=\"blue\" vlink=\"purple\" alink=\"red\">\n"], - convert(File, Dest, Header). + convert(File, Dest, InclPath, Header). -convert(File, Dest, Header) -> +convert(File, Dest, InclPath, Header) -> %% statistics(runtime), - case parse_file(File) of + case parse_file(File, InclPath) of {ok,Functions} -> %% {_, Time1} = statistics(runtime), %% io:format("Parsed file in ~.2f Seconds.~n",[Time1/1000]), @@ -92,8 +92,8 @@ convert(File, Dest, Header) -> %%% Use expanded preprocessor directives if possible (epp). Only if %%% this fails, fall back on using non-expanded code (epp_dodger). -parse_file(File) -> - case epp:open(File, [], []) of +parse_file(File, InclPath) -> + case epp:open(File, InclPath, []) of {ok,Epp} -> try parse_preprocessed_file(Epp,File,false) of Forms -> @@ -145,13 +145,15 @@ parse_non_preprocessed_file(File) -> parse_non_preprocessed_file(Epp, File, Location) -> case epp_dodger:parse_form(Epp, Location) of {ok,Tree,Location1} -> - case erl_syntax:revert(Tree) of + try erl_syntax:revert(Tree) of {function,L,F,A,[_|C]} -> Clauses = [{clause,CL} || {clause,CL,_,_,_} <- C], [{atom_to_list(F),A,L} | Clauses] ++ parse_non_preprocessed_file(Epp, File, Location1); _ -> parse_non_preprocessed_file(Epp, File, Location1) + catch + _:_ -> parse_non_preprocessed_file(Epp, File, Location1) end; {error,_E,Location1} -> parse_non_preprocessed_file(Epp, File, Location1); diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl index 9192a76a17..1d989ce9c8 100644 --- a/lib/test_server/src/test_server.erl +++ b/lib/test_server/src/test_server.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2014. All Rights Reserved. +%% Copyright Ericsson AB 1996-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -177,68 +177,35 @@ module_names(Beams) -> do_cover_compile(Modules) -> cover:start(), - pmap1(fun(M) -> do_cover_compile1(M) end,lists:usort(Modules)), + Sticky = prepare_cover_compile(Modules,[]), + R = cover:compile_beam(Modules), + [warn_compile(Error) || Error <- R,element(1,Error)=/=ok], + [code:stick_mod(M) || M <- Sticky], ok. -do_cover_compile1(M) -> +warn_compile({error,{Reason,Module}}) -> + io:fwrite("\nWARNING: Could not cover compile ~ts: ~p\n", + [Module,{error,Reason}]). + +%% Make sure all modules are loaded and unstick if sticky +prepare_cover_compile([M|Ms],Sticky) -> case {code:is_sticky(M),code:is_loaded(M)} of {true,_} -> code:unstick_mod(M), - case cover:compile_beam(M) of - {ok,_} -> - ok; - Error -> - io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", - [M,Error]) - end, - code:stick_mod(M); + prepare_cover_compile(Ms,[M|Sticky]); {false,false} -> case code:load_file(M) of {module,_} -> - do_cover_compile1(M); + prepare_cover_compile([M|Ms],Sticky); Error -> - io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]) + io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]), + prepare_cover_compile(Ms,Sticky) end; {false,_} -> - case cover:compile_beam(M) of - {ok,_} -> - ok; - Error -> - io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", - [M,Error]) - end - end. - -pmap1(Fun,List) -> - NTot = length(List), - NProcs = erlang:system_info(schedulers) * 2, - NPerProc = (NTot div NProcs) + 1, - - {[],Pids} = - lists:foldr( - fun(_,{L,Ps}) -> - {L1,L2} = if length(L)>=NPerProc -> lists:split(NPerProc,L); - true -> {L,[]} % last chunk - end, - {P,_Ref} = - spawn_monitor(fun() -> - exit(lists:map(Fun,L1)) - end), - {L2,[P|Ps]} - end, - {List,[]}, - lists:seq(1,NProcs)), - collect(Pids,[]). - -collect([],Acc) -> - lists:append(Acc); -collect([Pid|Pids],Acc) -> - receive - {'DOWN', _Ref, process, Pid, Result} -> - %% collect(lists:delete(Pid,Pids),[Result|Acc]) - collect(Pids,[Result|Acc]) - end. - + prepare_cover_compile(Ms,Sticky) + end; +prepare_cover_compile([],Sticky) -> + Sticky. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop) -> @@ -268,45 +235,40 @@ collect([Pid|Pids],Acc) -> %% after the test is completed. cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) -> io:fwrite(user, "Cover analysing... ", []), - DetailsFun = + {ATFOk,ATFFail} = case Analyse of details -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> - fun(M) -> - OutFile = filename:join(Dir, - atom_to_list(M) ++ - ".COVER.html"), - case cover:analyse_to_file(M,OutFile,[html]) of - {ok,_} -> - {file,OutFile}; - Error -> - Error - end - end; + {result,Ok1,Fail1} = + cover:analyse_to_file(Modules,[{outdir,Dir},html]), + {lists:map(fun(OutFile) -> + M = list_to_atom( + filename:basename( + filename:rootname(OutFile, + ".COVER.html") + ) + ), + {M,{file,OutFile}} + end, Ok1), + lists:map(fun({Reason,M}) -> + {M,{error,Reason}} + end, Fail1)}; Error -> - fun(_) -> Error end + {[],lists:map(fun(M) -> {M,Error} end, Modules)} end; overview -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> - fun(_) -> undefined end; + {[],lists:map(fun(M) -> {M,undefined} end, Modules)}; Error -> - fun(_) -> Error end + {[],lists:map(fun(M) -> {M,Error} end, Modules)} end end, - R = pmap2( - fun(M) -> - case cover:analyse(M,module) of - {ok,{M,{Cov,NotCov}}} -> - {M,{Cov,NotCov,DetailsFun(M)}}; - Err -> - io:fwrite(user, - "\nWARNING: Analysis failed for ~w. Reason: ~p\n", - [M,Err]), - {M,Err} - end - end, Modules), + {result,AOk,AFail} = cover:analyse(Modules,module), + R = merge_analysis_results(AOk,ATFOk++ATFFail,[]) ++ + [{M,{error,Reason}} || {Reason,M} <- AFail], + io:fwrite(user, "done\n\n", []), case Stop of @@ -319,19 +281,15 @@ cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) -> end, R. -pmap2(Fun,List) -> - Collector = self(), - Pids = lists:map(fun(E) -> - spawn(fun() -> - Collector ! {res,self(),Fun(E)} - end) - end, List), - lists:map(fun(Pid) -> - receive - {res,Pid,Res} -> - Res - end - end, Pids). +merge_analysis_results([{M,{Cov,NotCov}}|T],ATF,Acc) -> + case lists:keytake(M,1,ATF) of + {value,{_,R},ATF1} -> + merge_analysis_results(T,ATF1,[{M,{Cov,NotCov,R}}|Acc]); + false -> + merge_analysis_results(T,ATF,Acc) + end; +merge_analysis_results([],_,Acc) -> + Acc. do_cover_for_node(Node,CoverFunc) -> do_cover_for_node(Node,CoverFunc,true). diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl index af8921fe75..488f38d05d 100644 --- a/lib/test_server/src/test_server_ctrl.erl +++ b/lib/test_server/src/test_server_ctrl.erl @@ -1927,15 +1927,20 @@ html_possibly_convert(Src, SrcInfo, Dest) -> {ok,DestInfo} when DestInfo#file_info.mtime >= SrcInfo#file_info.mtime -> ok; % dest file up to date _ -> + InclPath = case application:get_env(test_server, include) of + {ok,Incls} -> Incls; + _ -> [] + end, + OutDir = get(test_server_log_dir_base), case test_server_sup:framework_call(get_html_wrapper, ["Module "++Src,false, OutDir,undefined, encoding(Src)], "") of Empty when (Empty == "") ; (element(2,Empty) == "") -> - erl2html2:convert(Src, Dest); + erl2html2:convert(Src, Dest, InclPath); {_,Header,_} -> - erl2html2:convert(Src, Dest, Header) + erl2html2:convert(Src, Dest, InclPath, Header) end end. diff --git a/lib/test_server/test/erl2html2_SUITE.erl b/lib/test_server/test/erl2html2_SUITE.erl index 37c2b74d8e..908985c879 100644 --- a/lib/test_server/test/erl2html2_SUITE.erl +++ b/lib/test_server/test/erl2html2_SUITE.erl @@ -161,7 +161,7 @@ convert_module(Mod,Config) -> Src = filename:join(DataDir,Mod++".erl"), Dst = filename:join(PrivDir,Mod++".erl.html"), io:format("<a href=\"~s\">~s</a>\n",[Src,filename:basename(Src)]), - ok = erl2html2:convert(Src, Dst, "<html><body>"), + ok = erl2html2:convert(Src, Dst, [], "<html><body>"), io:format("<a href=\"~s\">~s</a>\n",[Dst,filename:basename(Dst)]), {Src,Dst}. diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml index 07ffa65e3d..914baa7977 100644 --- a/lib/tools/doc/src/cover.xml +++ b/lib/tools/doc/src/cover.xml @@ -5,7 +5,7 @@ <header> <copyright> <year>2001</year> - <year>2013</year> + <year>2015</year> <holder>Ericsson AB, All Rights Reserved</holder> </copyright> <legalnotice> @@ -138,17 +138,18 @@ </desc> </func> <func> - <name>compile(ModFile) -> Result</name> - <name>compile(ModFile, Options) -> Result</name> - <name>compile_module(ModFile) -> Result</name> - <name>compile_module(ModFile, Options) -> Result</name> - <fsummary>Compile a module for Cover analysis.</fsummary> + <name>compile(ModFiles) -> Result | [Result]</name> + <name>compile(ModFiles, Options) -> Result | [Result]</name> + <name>compile_module(ModFiles) -> Result | [Result]</name> + <name>compile_module(ModFiles, Options) -> Result | [Result]</name> + <fsummary>Compile one or more modules for Cover analysis.</fsummary> <type> + <v>ModFiles = ModFile | [ModFile]</v> <v>ModFile = Module | File</v> <v> Module = atom()</v> <v> File = string()</v> <v>Options = [Option]</v> - <v> Option = {i,Dir} | {d,Macro} | {d,Macro,Value}</v> + <v> Option = {i,Dir} | {d,Macro} | {d,Macro,Value} | export_all</v> <d>See <c>compile:file/2.</c></d> <v>Result = {ok,Module} | {error,File} | {error,not_main_node}</v> </type> @@ -165,6 +166,9 @@ returns <c>{ok,Module}</c>. Otherwise the function returns <c>{error,File}</c>. Errors and warnings are printed as they occur.</p> + <p>If a list of <c>ModFiles</c> is given as input, a list + of <c>Result</c> will be returned. The order of the returned + list is undefined.</p> <p>Note that the internal database is (re-)initiated during the compilation, meaning any previously collected coverage data for the module will be lost.</p> @@ -194,9 +198,10 @@ </desc> </func> <func> - <name>compile_beam(ModFile) -> Result</name> - <fsummary>Compile a module for Cover analysis, using an existing beam.</fsummary> + <name>compile_beam(ModFiles) -> Result | [Result]</name> + <fsummary>Compile one or more modules for Cover analysis, using existing beam(s).</fsummary> <type> + <v>ModFiles = ModFile | [ModFile]</v> <v>ModFile = Module | BeamFile</v> <v> Module = atom()</v> <v> BeamFile = string()</v> @@ -229,6 +234,9 @@ returned.</p> <p><c>{error,BeamFile}</c> is returned if the compiled code can not be loaded on the node.</p> + <p>If a list of <c>ModFiles</c> is given as input, a list + of <c>Result</c> will be returned. The order of the returned + list is undefined.</p> </desc> </func> <func> @@ -251,16 +259,21 @@ </desc> </func> <func> - <name>analyse(Module) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Analysis) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Level) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}</name> - <fsummary>Analyse a Cover compiled module.</fsummary> + <name>analyse() -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Analysis) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Level) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Analysis) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Analysis, Level) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Analysis, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <fsummary>Analyse one or more Cover compiled modules.</fsummary> <type> - <v>Module = atom()</v> + <v>Modules = Module | [Module]</v> + <v>Module = atom() </v> <v>Analysis = coverage | calls</v> <v>Level = line | clause | function | module</v> - <v>Answer = {Module,Value} | [{Item,Value}]</v> + <v>OneResult = {ok,{Module,Value}} | {ok,[{Item,Value}]} | {error, Error}</v> <v> Item = Line | Clause | Function</v> <v> Line = {M,N}</v> <v> Clause = {M,F,A,C}</v> @@ -269,49 +282,67 @@ <v> N = A = C = integer()</v> <v> Value = {Cov,NotCov} | Calls</v> <v> Cov = NotCov = Calls = integer()</v> - <v>Error = {not_cover_compiled,Module} | not_main_node</v> + <v> Error = {not_cover_compiled,Module}</v> + <v>Ok = [{Module,Value}] | [{Item,Value}]</v> + <v>Fail = [Error]</v> </type> <desc> - <p>Performs analysis of a Cover compiled module <c>Module</c>, as + <p>Performs analysis of one or more Cover compiled modules, as specified by <c>Analysis</c> and <c>Level</c> (see above), by examining the contents of the internal database.</p> <p><c>Analysis</c> defaults to <c>coverage</c> and <c>Level</c> defaults to <c>function</c>.</p> - <p>If <c>Module</c> is not Cover compiled, the function returns - <c>{error,{not_cover_compiled,Module}}</c>.</p> - <p>HINT: It is possible to issue multiple analyse_to_file commands at - the same time. </p> + <p>If <c>Modules</c> is an atom (one module), the return will + be <c>OneResult</c>, else the return will be + <c>{result,Ok,Fail}</c>.</p> + <p>If <c>Modules</c> is not given, all modules that have data + in the cover data table, are analysed. Note that this + includes both cover compiled modules and imported + modules.</p> + <p>If a given module is not Cover compiled, this is indicated + by the error reason <c>{not_cover_compiled,Module}</c>.</p> </desc> </func> <func> - <name>analyse_to_file(Module) -> </name> - <name>analyse_to_file(Module,Options) -> </name> - <name>analyse_to_file(Module, OutFile) -> </name> - <name>analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}</name> - <fsummary>Detailed coverage analysis of a Cover compiled module.</fsummary> + <name>analyse_to_file() -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Modules) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Options) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Modules,Options) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name> + <fsummary>Detailed coverage analysis of one or more Cover compiled modules.</fsummary> <type> + <v>Modules = Module | [Module]</v> <v>Module = atom()</v> - <v>OutFile = string()</v> + <v>OutFile = OutDir = string()</v> <v>Options = [Option]</v> - <v>Option = html</v> - <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v>Option = html | {outfile,OutFile} | {outdir,OutDir}</v> + <v>Answer = {ok,OutFile} | {error,Error}</v> + <v>Ok = [OutFile]</v> + <v>Fail = [Error]</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module}</v> <v> File = string()</v> <v> Reason = term()</v> </type> <desc> - <p>Makes a copy <c>OutFile</c> of the source file for a module - <c>Module</c>, where it for each executable line is specified + <p>Makes copies of the source file for the given modules, + where it for each executable line is specified how many times it has been executed.</p> <p>The output file <c>OutFile</c> defaults to <c>Module.COVER.out</c>, or <c>Module.COVER.html</c> if the option <c>html</c> was used.</p> - <p>If <c>Module</c> is not Cover compiled, the function returns - <c>{error,{not_cover_compiled,Module}}</c>.</p> + <p>If <c>Modules</c> is an atom (one module), the return will + be <c>Answer</c>, else the return will be a + list, <c>{result,Ok,Fail}</c>.</p> + <p>If <c>Modules</c> is not given, all modules that have data + in the cover data table, are analysed. Note that this + includes both cover compiled modules and imported + modules.</p> + <p>If a module is not Cover compiled, this is indicated by the + error reason <c>{not_cover_compiled,Module}</c>.</p> <p>If the source file and/or the output file cannot be opened using <c>file:open/2</c>, the function returns <c>{error,{file,File,Reason}}</c> where <c>File</c> is the file name and <c>Reason</c> is the error reason.</p> - <p>If the module was cover compiled from the <c>.beam</c> + <p>If a module was cover compiled from the <c>.beam</c> file, i.e. using <c>compile_beam/1</c> or <c>compile_beam_directory/0,1</c>, it is assumed that the source code can be found in the same directory as the @@ -322,10 +353,8 @@ joining <c>../src</c> and the tail of the compiled path below a trailing <c>src</c> component, then the compiled path itself. - If no source code is found, - <c>{error,no_source_code_found}</c> is returned.</p> - <p>HINT: It is possible to issue multiple analyse_to_file commands at - the same time. </p> + If no source code is found, this is indicated by the error reason + <c>{no_source_code_found,Module}</c>.</p> </desc> </func> <func> @@ -339,7 +368,7 @@ <v>OutFile = string()</v> <v>Options = [Option]</v> <v>Option = html</v> - <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module} | not_main_node</v> <v> File = string()</v> <v> Reason = term()</v> </type> diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 31754015f7..9a76084bc0 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2013. All Rights Reserved. +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -77,8 +77,11 @@ compile/1, compile/2, compile_module/1, compile_module/2, compile_directory/0, compile_directory/1, compile_directory/2, compile_beam/1, compile_beam_directory/0, compile_beam_directory/1, - analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3, + analyse/0, analyse/1, analyse/2, analyse/3, + analyze/0, analyze/1, analyze/2, analyze/3, + analyse_to_file/0, analyse_to_file/1, analyse_to_file/2, analyse_to_file/3, + analyze_to_file/0, analyze_to_file/1, analyze_to_file/2, analyze_to_file/3, async_analyse_to_file/1,async_analyse_to_file/2, async_analyse_to_file/3, async_analyze_to_file/1, @@ -109,6 +112,7 @@ line = '_' % integer() }). -define(BUMP_REC_NAME,bump). +-define(CHUNK_SIZE, 20000). -record(vars, {module, % atom() Module name @@ -181,10 +185,11 @@ start(Node) when is_atom(Node) -> start(Nodes) -> call({start_nodes,remove_myself(Nodes,[])}). -%% compile(ModFile) -> -%% compile(ModFile, Options) -> -%% compile_module(ModFile) -> Result -%% compile_module(ModFile, Options) -> Result +%% compile(ModFiles) -> +%% compile(ModFiles, Options) -> +%% compile_module(ModFiles) -> Result +%% compile_module(ModFiles, Options) -> Result +%% ModFiles = ModFile | [ModFile] %% ModFile = Module | File %% Module = atom() %% File = string() @@ -198,18 +203,27 @@ compile(ModFile, Options) -> compile_module(ModFile) when is_atom(ModFile); is_list(ModFile) -> compile_module(ModFile, []). -compile_module(Module, Options) when is_atom(Module), is_list(Options) -> - compile_module(atom_to_list(Module), Options); -compile_module(File, Options) when is_list(File), is_list(Options) -> - WithExt = case filename:extension(File) of - ".erl" -> - File; - _ -> - File++".erl" - end, - AbsFile = filename:absname(WithExt), - [R] = compile_modules([AbsFile], Options), - R. +compile_module(ModFile, Options) when is_atom(ModFile); + is_list(ModFile), is_integer(hd(ModFile)) -> + [R] = compile_module([ModFile], Options), + R; +compile_module(ModFiles, Options) when is_list(Options) -> + AbsFiles = + [begin + File = + case ModFile of + _ when is_atom(ModFile) -> atom_to_list(ModFile); + _ when is_list(ModFile) -> ModFile + end, + WithExt = case filename:extension(File) of + ".erl" -> + File; + _ -> + File++".erl" + end, + filename:absname(WithExt) + end || ModFile <- ModFiles], + compile_modules(AbsFiles, Options). %% compile_directory() -> %% compile_directory(Dir) -> @@ -240,13 +254,14 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) -> compile_modules(Files,Options) -> Options2 = filter_options(Options), - compile_modules(Files,Options2,[]). + %% compile_modules(Files,Options2,[]). + call({compile, Files, Options2}). -compile_modules([File|Files], Options, Result) -> - R = call({compile, File, Options}), - compile_modules(Files,Options,[R|Result]); -compile_modules([],_Opts,Result) -> - lists:reverse(Result). +%% compile_modules([File|Files], Options, Result) -> +%% R = call({compile, File, Options}), +%% compile_modules(Files,Options,[R|Result]); +%% compile_modules([],_Opts,Result) -> +%% lists:reverse(Result). filter_options(Options) -> lists:filter(fun(Option) -> @@ -264,30 +279,17 @@ filter_options(Options) -> %% ModFile - see compile/1 %% Result - see compile/1 %% Reason = non_existing | already_cover_compiled -compile_beam(Module) when is_atom(Module) -> - case code:which(Module) of - non_existing -> +compile_beam(ModFile0) when is_atom(ModFile0); + is_list(ModFile0), is_integer(hd(ModFile0)) -> + case compile_beams([ModFile0]) of + [{error,{non_existing,_}}] -> + %% Backwards compatibility {error,non_existing}; - ?TAG -> - compile_beam(Module,?TAG); - File -> - compile_beam(Module,File) + [Result] -> + Result end; -compile_beam(File) when is_list(File) -> - {WithExt,WithoutExt} - = case filename:rootname(File,".beam") of - File -> - {File++".beam",File}; - Rootname -> - {File,Rootname} - end, - AbsFile = filename:absname(WithExt), - Module = list_to_atom(filename:basename(WithoutExt)), - compile_beam(Module,AbsFile). - -compile_beam(Module,File) -> - call({compile_beam,Module,File}). - +compile_beam(ModFiles) when is_list(ModFiles) -> + compile_beams(ModFiles). %% compile_beam_directory(Dir) -> [Result] | {error,Reason} @@ -312,19 +314,52 @@ compile_beam_directory(Dir) when is_list(Dir) -> Error end. -compile_beams(Files) -> - compile_beams(Files,[]). -compile_beams([File|Files],Result) -> - R = compile_beam(File), - compile_beams(Files,[R|Result]); -compile_beams([],Result) -> - lists:reverse(Result). - +compile_beams(ModFiles0) -> + ModFiles = get_mods_and_beams(ModFiles0,[]), + call({compile_beams,ModFiles}). -%% analyse(Module) -> -%% analyse(Module, Analysis) -> -%% analyse(Module, Level) -> -%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error} +get_mods_and_beams([Module|ModFiles],Acc) when is_atom(Module) -> + case code:which(Module) of + non_existing -> + get_mods_and_beams(ModFiles,[{error,{non_existing,Module}}|Acc]); + File -> + get_mods_and_beams([{Module,File}|ModFiles],Acc) + end; +get_mods_and_beams([File|ModFiles],Acc) when is_list(File) -> + {WithExt,WithoutExt} + = case filename:rootname(File,".beam") of + File -> + {File++".beam",File}; + Rootname -> + {File,Rootname} + end, + AbsFile = filename:absname(WithExt), + Module = list_to_atom(filename:basename(WithoutExt)), + get_mods_and_beams([{Module,AbsFile}|ModFiles],Acc); +get_mods_and_beams([{Module,File}|ModFiles],Acc) -> + %% Check for duplicates + case lists:keyfind(Module,2,Acc) of + {ok,Module,File} -> + %% Duplicate, but same file so ignore + get_mods_and_beams(ModFiles,Acc); + {ok,Module,_OtherFile} -> + %% Duplicate and differnet file - error + get_mods_and_beams(ModFiles,[{error,{duplicate,Module}}|Acc]); + _ -> + get_mods_and_beams(ModFiles,[{ok,Module,File}|Acc]) + end; +get_mods_and_beams([],Acc) -> + lists:reverse(Acc). + + +%% analyse(Modules) -> +%% analyse(Analysis) -> +%% analyse(Level) -> +%% analyse(Modules, Analysis) -> +%% analyse(Modules, Level) -> +%% analyse(Analysis, Level) +%% analyse(Modules, Analysis, Level) -> {ok,Answer} | {error,Error} +%% Modules = Module | [Module] %% Module = atom() %% Analysis = coverage | calls %% Level = line | clause | function | module @@ -337,48 +372,74 @@ compile_beams([],Result) -> %% N = A = C = integer() %% Value = {Cov,NotCov} | Calls %% Cov = NotCov = Calls = integer() -%% Error = {not_cover_compiled,Module} +%% Error = {not_cover_compiled,Module} | not_main_node +-define(is_analysis(__A__), + (__A__=:=coverage orelse __A__=:=calls)). +-define(is_level(__L__), + (__L__=:=line orelse __L__=:=clause orelse + __L__=:=function orelse __L__=:=module)). +analyse() -> + analyse('_'). + +analyse(Analysis) when ?is_analysis(Analysis) -> + analyse('_', Analysis); +analyse(Level) when ?is_level(Level) -> + analyse('_', Level); analyse(Module) -> analyse(Module, coverage). -analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls -> + +analyse(Analysis, Level) when ?is_analysis(Analysis) andalso + ?is_level(Level) -> + analyse('_', Analysis, Level); +analyse(Module, Analysis) when ?is_analysis(Analysis) -> analyse(Module, Analysis, function); -analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function; - Level=:=module -> +analyse(Module, Level) when ?is_level(Level) -> analyse(Module, coverage, Level). -analyse(Module, Analysis, Level) when is_atom(Module), - Analysis=:=coverage; Analysis=:=calls, - Level=:=line; Level=:=clause; - Level=:=function; Level=:=module -> + +analyse(Module, Analysis, Level) when ?is_analysis(Analysis), + ?is_level(Level) -> call({{analyse, Analysis, Level}, Module}). +analyze() -> analyse( ). analyze(Module) -> analyse(Module). analyze(Module, Analysis) -> analyse(Module, Analysis). analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level). -%% analyse_to_file(Module) -> -%% analyse_to_file(Module, Options) -> -%% analyse_to_file(Module, OutFile) -> -%% analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error} +%% analyse_to_file() -> +%% analyse_to_file(Modules) -> +%% analyse_to_file(Modules, Options) -> +%% Modules = Module | [Module] %% Module = atom() %% OutFile = string() %% Options = [Option] -%% Option = html +%% Option = html | {outfile,filename()} | {outdir,dirname()} %% Error = {not_cover_compiled,Module} | no_source_code_found | %% {file,File,Reason} %% File = string() %% Reason = term() -analyse_to_file(Module) when is_atom(Module) -> - analyse_to_file(Module, outfilename(Module,[]), []). -analyse_to_file(Module, []) when is_atom(Module) -> - analyse_to_file(Module, outfilename(Module,[]), []); -analyse_to_file(Module, Options) when is_atom(Module), - is_list(Options), is_atom(hd(Options)) -> - analyse_to_file(Module, outfilename(Module,Options), Options); -analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) -> - analyse_to_file(Module, OutFile, []). -analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) -> - call({{analyse_to_file, OutFile, Options}, Module}). - +%% +%% Kept for backwards compatibility: +%% analyse_to_file(Modules, OutFile) -> +%% analyse_to_file(Modules, OutFile, Options) -> {ok,OutFile} | {error,Error} +analyse_to_file() -> + analyse_to_file('_'). +analyse_to_file(Arg) -> + case is_options(Arg) of + true -> + analyse_to_file('_',Arg); + false -> + analyse_to_file(Arg,[]) + end. +analyse_to_file(Module, OutFile) when is_list(OutFile), is_integer(hd(OutFile)) -> + %% Kept for backwards compatibility + analyse_to_file(Module, [{outfile,OutFile}]); +analyse_to_file(Module, Options) when is_list(Options) -> + call({{analyse_to_file, Options}, Module}). +analyse_to_file(Module, OutFile, Options) when is_list(OutFile) -> + %% Kept for backwards compatibility + analyse_to_file(Module,[{outfile,OutFile}|Options]). + +analyze_to_file() -> analyse_to_file(). analyze_to_file(Module) -> analyse_to_file(Module). analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut). analyze_to_file(Module, OutFile, Options) -> @@ -391,6 +452,15 @@ async_analyse_to_file(Module, OutFileOrOpts) -> async_analyse_to_file(Module, OutFile, Options) -> do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). +is_options([html]) -> + true; % this is not 100% safe - could be a module named html... +is_options([html|Opts]) -> + is_options(Opts); +is_options([{Opt,_}|_]) when Opt==outfile; Opt==outdir -> + true; +is_options(_) -> + false. + do_spawn(M,F,A) -> spawn_link(fun() -> case apply(M,F,A) of @@ -408,13 +478,16 @@ async_analyze_to_file(Module, OutFileOrOpts) -> async_analyze_to_file(Module, OutFile, Options) -> async_analyse_to_file(Module, OutFile, Options). -outfilename(Module,Opts) -> - case lists:member(html,Opts) of - true -> - atom_to_list(Module)++".COVER.html"; - false -> - atom_to_list(Module)++".COVER.out" - end. +outfilename(undefined, Module, HTML) -> + outfilename(Module, HTML); +outfilename(OutDir, Module, HTML) -> + filename:join(OutDir, outfilename(Module, HTML)). + +outfilename(Module, true) -> + atom_to_list(Module)++".COVER.html"; +outfilename(Module, false) -> + atom_to_list(Module)++".COVER.out". + %% export(File) %% export(File,Module) -> ok | {error,Reason} @@ -559,7 +632,7 @@ init_main(Starter) -> ,{write_concurrency, true} ]), ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), - ets:new(?BINARY_TABLE, [set, named_table]), + ets:new(?BINARY_TABLE, [set, public, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]), net_kernel:monitor_nodes(true), @@ -573,55 +646,19 @@ main_process_loop(State) -> reply(From, {ok,StartedNodes}), main_process_loop(State1); - {From, {compile, File, Options}} -> - case do_compile(File, Options) of - {ok, Module} -> - remote_load_compiled(State#main_state.nodes,[{Module,File}]), - reply(From, {ok, Module}), - Compiled = add_compiled(Module, File, - State#main_state.compiled), - Imported = remove_imported(Module,State#main_state.imported), - NewState = State#main_state{compiled = Compiled, - imported = Imported}, - %% This module (cover) could have been reloaded. Make - %% sure we run the new code. - ?MODULE:main_process_loop(NewState); - error -> - reply(From, {error, File}), - main_process_loop(State) - end; + {From, {compile, Files, Options}} -> + {R,S} = do_compile(Files, Options, State), + reply(From,R), + %% This module (cover) could have been reloaded. Make + %% sure we run the new code. + ?MODULE:main_process_loop(S); - {From, {compile_beam, Module, BeamFile0}} -> - Compiled0 = State#main_state.compiled, - case get_beam_file(Module,BeamFile0,Compiled0) of - {ok,BeamFile} -> - UserOptions = get_compile_options(Module,BeamFile), - {Reply,Compiled} = - case do_compile_beam(Module,BeamFile,UserOptions) of - {ok, Module} -> - remote_load_compiled(State#main_state.nodes, - [{Module,BeamFile}]), - C = add_compiled(Module,BeamFile,Compiled0), - {{ok,Module},C}; - error -> - {{error, BeamFile}, Compiled0}; - {error,Reason} -> % no abstract code - {{error, {Reason, BeamFile}}, Compiled0} - end, - reply(From,Reply), - Imported = remove_imported(Module,State#main_state.imported), - NewState = State#main_state{compiled = Compiled, - imported = Imported}, - %% This module (cover) could have been reloaded. Make - %% sure we run the new code. - ?MODULE:main_process_loop(NewState); - {error,no_beam} -> - %% The module has first been compiled from .erl, and now - %% someone tries to compile it from .beam - reply(From, - {error,{already_cover_compiled,no_beam_found,Module}}), - main_process_loop(State) - end; + {From, {compile_beams, ModsAndFiles}} -> + {R,S} = do_compile_beams(ModsAndFiles,State), + reply(From,R), + %% This module (cover) could have been reloaded. Make + %% sure we run the new code. + ?MODULE:main_process_loop(S); {From, {export,OutFile,Module}} -> spawn(fun() -> @@ -706,6 +743,16 @@ main_process_loop(State) -> unregister(?SERVER), reply(From, ok); + {From, {{analyse, Analysis, Level}, '_'}} -> + R = analyse_all(Analysis, Level, State), + reply(From, R), + main_process_loop(State); + + {From, {{analyse, Analysis, Level}, Modules}} when is_list(Modules) -> + R = analyse_list(Modules, Analysis, Level, State), + reply(From, R), + main_process_loop(State); + {From, {{analyse, Analysis, Level}, Module}} -> S = try Loaded = is_loaded(Module, State), @@ -722,15 +769,23 @@ main_process_loop(State) -> end, main_process_loop(S); - {From, {{analyse_to_file, OutFile, Opts},Module}} -> + {From, {{analyse_to_file, Opts},'_'}} -> + R = analyse_all_to_file(Opts, State), + reply(From,R), + main_process_loop(State); + + {From, {{analyse_to_file, Opts},Modules}} when is_list(Modules) -> + R = analyse_list_to_file(Modules, Opts, State), + reply(From,R), + main_process_loop(State); + + {From, {{analyse_to_file, Opts},Module}} -> S = try Loaded = is_loaded(Module, State), spawn(fun() -> - ?SPAWN_DBG(analyse_to_file, - {Module,OutFile, Opts}), + ?SPAWN_DBG(analyse_to_file,{Module,Opts}), do_parallel_analysis_to_file( - Module, OutFile, Opts, - Loaded, From, State) + Module, Opts, Loaded, From, State) end), State catch throw:Reason -> @@ -848,11 +903,15 @@ remote_process_loop(State) -> {remote,collect,Module,CollectorPid} -> self() ! {remote,collect,Module,CollectorPid, ?SERVER}; - {remote,collect,Module,CollectorPid,From} -> + {remote,collect,Modules0,CollectorPid,From} -> + Modules = case Modules0 of + '_' -> [M || {M,_} <- State#remote_state.compiled]; + _ -> Modules0 + end, spawn(fun() -> ?SPAWN_DBG(remote_collect, - {Module, CollectorPid, From}), - do_collect(Module, CollectorPid, From) + {Modules, CollectorPid, From}), + do_collect(Modules, CollectorPid, From) end), remote_process_loop(State); @@ -893,39 +952,51 @@ remote_process_loop(State) -> end. -do_collect(Module, CollectorPid, From) -> - AllMods = - case Module of - '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); - _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) - end, - - %% Sending clause by clause in order to avoid large lists +do_collect(Modules, CollectorPid, From) -> pmap( - fun({_Mod,Clauses}) -> - lists:map(fun(Clause) -> - send_collected_data(Clause, CollectorPid) - end,Clauses) - end,AllMods), + fun(Module) -> + Pattern = {#bump{module=Module, _='_'}, '$1'}, + MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + send_chunks(Match, CollectorPid, []) + end,Modules), CollectorPid ! done, remote_reply(From, ok). -send_collected_data({M,F,A,C,_L}, CollectorPid) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps}. +send_chunks('$end_of_table', _CollectorPid, Mons) -> + get_downs(Mons); +send_chunks({Chunk,Continuation}, CollectorPid, Mons) -> + Mon = spawn_monitor( + fun() -> + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Chunk) end), + send_chunk(CollectorPid,Chunk), + send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]). + +send_chunk(CollectorPid,Chunk) -> + CollectorPid ! {chunk,Chunk,self()}, + receive continue -> ok end. + +get_downs([]) -> + ok; +get_downs(Mons) -> + receive + {'DOWN', Ref, _Type, Pid, _Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + get_downs(lists:delete({Pid,Ref},Mons)); + false -> + %% This should be handled somewhere else + self() ! Down, + get_downs(Mons) + end + end. -reload_originals([{Module,_File}|Compiled]) -> - do_reload_original(Module), - reload_originals(Compiled); -reload_originals([]) -> - ok. +reload_originals(Compiled) -> + Modules = [M || {M,_} <- Compiled], + pmap(fun do_reload_original/1, Modules). do_reload_original(Module) -> case code:which(Module) of @@ -1068,15 +1139,40 @@ remote_load_compiled(_Nodes, [], [], _ModNum) -> ok; remote_load_compiled(Nodes, Compiled, Acc, ModNum) when Compiled == []; ModNum == ?MAX_MODS -> + RemoteLoadData = get_downs_r(Acc), lists:foreach( fun(Node) -> - remote_call(Node,{remote,load_compiled,Acc}) + remote_call(Node,{remote,load_compiled,RemoteLoadData}) end, Nodes), remote_load_compiled(Nodes, Compiled, [], 0); remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> remote_load_compiled( - Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1). + Nodes, Rest, + [spawn_job_r(fun() -> get_data_for_remote_loading(MF) end) | Acc], + ModNum + 1). + +spawn_job_r(Fun) -> + spawn_monitor(fun() -> exit(Fun()) end). + +get_downs_r([]) -> + []; +get_downs_r(Mons) -> + receive + {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} -> + [R|get_downs_r(lists:delete({Pid,Ref},Mons))]; + {'DOWN', Ref, _Type, Pid, Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + %% Something went really wrong - don't hang! + exit(Reason); + false -> + %% This should be handled somewhere else + self() ! Down, + get_downs_r(Mons) + end + end. + %% Read all data needed for loading a cover compiled module on a remote node %% Binary is the beam code for the module and InitialTable is the initial @@ -1113,11 +1209,11 @@ remote_reset(Module,Nodes) -> Nodes). %% Collect data from remote nodes - used for analyse or stop(Node) -remote_collect(Module,Nodes,Stop) -> +remote_collect(Modules,Nodes,Stop) -> pmap(fun(Node) -> ?SPAWN_DBG(remote_collect, - {Module, Nodes, Stop}), - do_collection(Node, Module, Stop) + {Modules, Nodes, Stop}), + do_collection(Node, Modules, Stop) end, Nodes). @@ -1138,8 +1234,9 @@ do_collection(Node, Module, Stop) -> collector_proc() -> ?SPAWN_DBG(collector_proc, []), receive - {chunk,Chunk} -> + {chunk,Chunk,From} -> insert_in_collection_table(Chunk), + From ! continue, collector_proc(); done -> ok @@ -1259,6 +1356,19 @@ add_compiled(Module, File, [H|Compiled]) -> add_compiled(Module, File, []) -> [{Module,File}]. +are_loaded([Module|Modules], State, Loaded, Imported, Error) -> + try is_loaded(Module,State) of + {loaded,File} -> + are_loaded(Modules, State, [{Module,File}|Loaded], Imported, Error); + {imported,File,_} -> + are_loaded(Modules, State, Loaded, [{Module,File}|Imported], Error) + catch throw:_ -> + are_loaded(Modules, State, Loaded, Imported, + [{not_cover_compiled,Module}|Error]) + end; +are_loaded([], _State, Loaded, Imported, Error) -> + {Loaded, Imported, Error}. + is_loaded(Module, State) -> case get_file(Module, State#main_state.compiled) of {ok, File} -> @@ -1333,18 +1443,75 @@ get_compiled_still_loaded(Nodes,Compiled0) -> %%%--Compilation--------------------------------------------------------- -%% do_compile(File, Options) -> {ok,Module} | {error,Error} -do_compile(File, UserOptions) -> +do_compile_beams(ModsAndFiles, State) -> + Result0 = pmap(fun({ok,Module,File}) -> + do_compile_beam(Module,File,State); + (Error) -> + Error + end, + ModsAndFiles), + Compiled = [{M,F} || {ok,M,F} <- Result0], + remote_load_compiled(State#main_state.nodes,Compiled), + fix_state_and_result(Result0,State,[]). + +do_compile_beam(Module,BeamFile0,State) -> + case get_beam_file(Module,BeamFile0,State#main_state.compiled) of + {ok,BeamFile} -> + UserOptions = get_compile_options(Module,BeamFile), + case do_compile_beam1(Module,BeamFile,UserOptions) of + {ok, Module} -> + {ok,Module,BeamFile}; + error -> + {error, BeamFile}; + {error,Reason} -> % no abstract code + {error, {Reason, BeamFile}} + end; + {error,no_beam} -> + %% The module has first been compiled from .erl, and now + %% someone tries to compile it from .beam + {error,{already_cover_compiled,no_beam_found,Module}} + end. + +fix_state_and_result([{ok,Module,BeamFile}|Rest],State,Acc) -> + Compiled = add_compiled(Module,BeamFile,State#main_state.compiled), + Imported = remove_imported(Module,State#main_state.imported), + NewState = State#main_state{compiled=Compiled,imported=Imported}, + fix_state_and_result(Rest,NewState,[{ok,Module}|Acc]); +fix_state_and_result([Error|Rest],State,Acc) -> + fix_state_and_result(Rest,State,[Error|Acc]); +fix_state_and_result([],State,Acc) -> + {lists:reverse(Acc),State}. + + +do_compile(Files, Options, State) -> + Result0 = pmap(fun(File) -> + do_compile(File, Options) + end, + Files), + Compiled = [{M,F} || {ok,M,F} <- Result0], + remote_load_compiled(State#main_state.nodes,Compiled), + fix_state_and_result(Result0,State,[]). + +do_compile(File, Options) -> + case do_compile1(File, Options) of + {ok, Module} -> + {ok,Module,File}; + error -> + {error,File} + end. + +%% do_compile1(File, Options) -> {ok,Module} | error +do_compile1(File, UserOptions) -> Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions, case compile:file(File, Options) of {ok, Module, Binary} -> - do_compile_beam(Module,Binary,UserOptions); + do_compile_beam1(Module,Binary,UserOptions); error -> error end. %% Beam is a binary or a .beam file name -do_compile_beam(Module,Beam,UserOptions) -> +do_compile_beam1(Module,Beam,UserOptions) -> %% Clear database do_clear(Module), @@ -1915,10 +2082,21 @@ common_elems(L1, L2) -> collect(Nodes) -> %% local node AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE), - pmap(fun move_modules/1,AllClauses), - + Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,AllClauses) end), + + %% remote nodes + Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end), + get_downs([Mon1,Mon2]). + +%% Collect data for a list of modules +collect(Modules,Nodes) -> + MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Modules], + Clauses = ets:select(?COVER_CLAUSE_TABLE,MS), + Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,Clauses) end), + %% remote nodes - remote_collect('_',Nodes,false). + Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end), + get_downs([Mon1,Mon2]). %% Collect data for one module collect(Module,Clauses,Nodes) -> @@ -1926,25 +2104,26 @@ collect(Module,Clauses,Nodes) -> move_modules({Module,Clauses}), %% remote nodes - remote_collect(Module,Nodes,false). + remote_collect([Module],Nodes,false). %% When analysing, the data from the local ?COVER_TABLE is moved to the %% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE move_modules({Module,Clauses}) -> ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}), - move_clauses(Clauses). + Pattern = {#bump{module=Module, _='_'}, '_'}, + MatchSpec = [{Pattern,[],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + do_move_module(Match). -move_clauses([{M,F,A,C,_L}|Clauses]) -> - Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE,Pattern), +do_move_module({Bumps,Continuation}) -> lists:foreach(fun({Key,Val}) -> ets:insert(?COVER_TABLE, {Key,0}), insert_in_collection_table(Key,Val) end, Bumps), - move_clauses(Clauses); -move_clauses([]) -> + do_move_module(ets:select(Continuation)); +do_move_module('$end_of_table') -> ok. %% Given a .beam file, find the .erl file. Look first in same directory as @@ -2002,6 +2181,26 @@ splice(BeamDir, SrcFile) -> revsplit(Path) -> lists:reverse(filename:split(Path)). +analyse_list(Modules, Analysis, Level, State) -> + {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []), + Loaded = [M || {M,_} <- LoadedMF], + Imported = [M || {M,_} <- ImportedMF], + collect(Loaded, State#main_state.nodes), + MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Loaded ++ Imported], + AllClauses = ets:select(?COLLECTION_CLAUSE_TABLE,MS), + Fun = fun({Module,Clauses}) -> + do_analyse(Module, Analysis, Level, Clauses) + end, + {result, lists:flatten(pmap(Fun, AllClauses)), Error}. + +analyse_all(Analysis, Level, State) -> + collect(State#main_state.nodes), + AllClauses = ets:tab2list(?COLLECTION_CLAUSE_TABLE), + Fun = fun({Module,Clauses}) -> + do_analyse(Module, Analysis, Level, Clauses) + end, + {result, lists:flatten(pmap(Fun, AllClauses)), []}. + do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> analyse_info(Module,State#main_state.imported), C = case Loaded of @@ -2016,7 +2215,7 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> Clauses end, R = do_analyse(Module, Analysis, Level, C), - reply(From, R). + reply(From, {ok,R}). %% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error} %% Clauses = [{Module,Function,Arity,Clause,Lines}] @@ -2035,37 +2234,44 @@ do_analyse(Module, Analysis, line, _Clauses) -> {{Module,L}, N} end end, - Answer = lists:keysort(1, lists:map(Fun, Bumps)), - {ok, Answer}; -do_analyse(_Module, Analysis, clause, Clauses) -> - Fun = case Analysis of - coverage -> - fun({M,F,A,C,Ls}) -> - Pattern = {#bump{module=M,function=F,arity=A, - clause=C},0}, - Bumps = ets:match_object(?COLLECTION_TABLE, Pattern), - NotCov = length(Bumps), - {{M,F,A,C}, {Ls-NotCov, NotCov}} - end; - calls -> - fun({M,F,A,C,_Ls}) -> - Pattern = {#bump{module=M,function=F,arity=A, - clause=C},'_'}, - Bumps = ets:match_object(?COLLECTION_TABLE, Pattern), - {_Bump, Calls} = hd(lists:keysort(1, Bumps)), - {{M,F,A,C}, Calls} - end - end, - Answer = lists:map(Fun, Clauses), - {ok, Answer}; + lists:keysort(1, lists:map(Fun, Bumps)); +do_analyse(Module, Analysis, clause, _Clauses) -> + Pattern = {#bump{module=Module},'_'}, + Bumps = lists:keysort(1,ets:match_object(?COLLECTION_TABLE, Pattern)), + analyse_clause(Analysis,Bumps); do_analyse(Module, Analysis, function, Clauses) -> - {ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses), - Result = merge_clauses(ClauseResult, merge_fun(Analysis)), - {ok, Result}; + ClauseResult = do_analyse(Module, Analysis, clause, Clauses), + merge_clauses(ClauseResult, merge_fun(Analysis)); do_analyse(Module, Analysis, module, Clauses) -> - {ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses), + FunctionResult = do_analyse(Module, Analysis, function, Clauses), Result = merge_functions(FunctionResult, merge_fun(Analysis)), - {ok, {Module,Result}}. + {Module,Result}. + +analyse_clause(_,[]) -> + []; +analyse_clause(coverage, + [{#bump{module=M,function=F,arity=A,clause=C},_}|_]=Bumps) -> + analyse_clause_cov(Bumps,{M,F,A,C},0,0,[]); +analyse_clause(calls,Bumps) -> + analyse_clause_calls(Bumps,{x,x,x,x},[]). + +analyse_clause_cov([{#bump{module=M,function=F,arity=A,clause=C},N}|Bumps], + {M,F,A,C}=Clause,Ls,NotCov,Acc) -> + analyse_clause_cov(Bumps,Clause,Ls+1,if N==0->NotCov+1; true->NotCov end,Acc); +analyse_clause_cov([{#bump{module=M1,function=F1,arity=A1,clause=C1},_}|_]=Bumps, + Clause,Ls,NotCov,Acc) -> + analyse_clause_cov(Bumps,{M1,F1,A1,C1},0,0,[{Clause,{Ls-NotCov,NotCov}}|Acc]); +analyse_clause_cov([],Clause,Ls,NotCov,Acc) -> + lists:reverse(Acc,[{Clause,{Ls-NotCov,NotCov}}]). + +analyse_clause_calls([{#bump{module=M,function=F,arity=A,clause=C},_}|Bumps], + {M,F,A,C}=Clause,Acc) -> + analyse_clause_calls(Bumps,Clause,Acc); +analyse_clause_calls([{#bump{module=M1,function=F1,arity=A1,clause=C1},N}|Bumps], + _Clause,Acc) -> + analyse_clause_calls(Bumps,{M1,F1,A1,C1},[{{M1,F1,A1,C1},N}|Acc]); +analyse_clause_calls([],_Clause,Acc) -> + lists:reverse(Acc). merge_fun(coverage) -> fun({Cov1,NotCov1}, {Cov2,NotCov2}) -> @@ -2094,7 +2300,50 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) -> merge_functions([], _MFun, Result) -> Result. -do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> +analyse_list_to_file(Modules, Opts, State) -> + {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []), + collect([M || {M,_} <- LoadedMF], State#main_state.nodes), + OutDir = proplists:get_value(outdir,Opts), + HTML = lists:member(html,Opts), + Fun = fun({Module,File}) -> + OutFile = outfilename(OutDir,Module,HTML), + do_analyse_to_file(Module,File,OutFile,HTML,State) + end, + {Ok,Error1} = split_ok_error(pmap(Fun, LoadedMF++ImportedMF),[],[]), + {result,Ok,Error ++ Error1}. + +analyse_all_to_file(Opts, State) -> + collect(State#main_state.nodes), + AllModules = get_all_modules(State), + OutDir = proplists:get_value(outdir,Opts), + HTML = lists:member(html,Opts), + Fun = fun({Module,File}) -> + OutFile = outfilename(OutDir,Module,HTML), + do_analyse_to_file(Module,File,OutFile,HTML,State) + end, + {Ok,Error} = split_ok_error(pmap(Fun, AllModules),[],[]), + {result,Ok,Error}. + +get_all_modules(State) -> + get_all_modules(State#main_state.compiled ++ State#main_state.imported,[]). +get_all_modules([{Module,File}|Rest],Acc) -> + get_all_modules(Rest,[{Module,File}|Acc]); +get_all_modules([{Module,File,_}|Rest],Acc) -> + case lists:keymember(Module,1,Acc) of + true -> get_all_modules(Rest,Acc); + false -> get_all_modules(Rest,[{Module,File}|Acc]) + end; +get_all_modules([],Acc) -> + Acc. + +split_ok_error([{ok,R}|Result],Ok,Error) -> + split_ok_error(Result,[R|Ok],Error); +split_ok_error([{error,R}|Result],Ok,Error) -> + split_ok_error(Result,Ok,[R|Error]); +split_ok_error([],Ok,Error) -> + {Ok,Error}. + +do_parallel_analysis_to_file(Module, Opts, Loaded, From, State) -> File = case Loaded of {loaded, File0} -> [{Module,Clauses}] = @@ -2105,21 +2354,29 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> {imported, File0, _} -> File0 end, + HTML = lists:member(html,Opts), + OutFile = + case proplists:get_value(outfile,Opts) of + undefined -> + outfilename(proplists:get_value(outdir,Opts),Module,HTML); + F -> + F + end, + reply(From, do_analyse_to_file(Module,File,OutFile,HTML,State)). + +do_analyse_to_file(Module,File,OutFile,HTML,State) -> case find_source(Module, File) of {beam,_BeamFile} -> - reply(From, {error,no_source_code_found}); + {error,{no_source_code_found,Module}}; ErlFile -> analyse_info(Module,State#main_state.imported), - HTML = lists:member(html,Opts), - R = do_analyse_to_file(Module,OutFile, - ErlFile,HTML), - reply(From, R) + do_analyse_to_file1(Module,OutFile,ErlFile,HTML) end. -%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} +%% do_analyse_to_file1(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} %% Module = atom() %% OutFile = ErlFile = string() -do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> +do_analyse_to_file1(Module, OutFile, ErlFile, HTML) -> case file:open(ErlFile, [read]) of {ok, InFd} -> case file:open(OutFile, [write]) of @@ -2160,7 +2417,10 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> "**************************************" "\n\n"]), - print_lines(Module, InFd, OutFd, 1, HTML), + Pattern = {#bump{module=Module,line='$1',_='_'},'$2'}, + MS = [{Pattern,[],[{{'$1','$2'}}]}], + CovLines = lists:keysort(1,ets:select(?COLLECTION_TABLE, MS)), + print_lines(Module, CovLines, InFd, OutFd, 1, HTML), if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]); true -> ok @@ -2179,21 +2439,19 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> {error, {file, ErlFile, Reason}} end. -print_lines(Module, InFd, OutFd, L, HTML) -> + +print_lines(Module, CovLines, InFd, OutFd, L, HTML) -> case io:get_line(InFd, '') of eof -> ignore; "%"++_=Line -> %Comment line - not executed. io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]), - print_lines(Module, InFd, OutFd, L+1, HTML); + print_lines(Module, CovLines, InFd, OutFd, L+1, HTML); RawLine -> Line = escape_lt_and_gt(RawLine,HTML), - Pattern = {#bump{module=Module,line=L},'$1'}, - case ets:match(?COLLECTION_TABLE, Pattern) of - [] -> - io:put_chars(OutFd, [tab(),Line]); - Ns -> - N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns), + case CovLines of + [{L,N}|CovLines1] -> + %% N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns), if N=:=0, HTML=:=true -> LineNoNL = Line -- "\n", @@ -2211,9 +2469,12 @@ print_lines(Module, InFd, OutFd, L, HTML) -> true -> Str = integer_to_list(N), io:put_chars(OutFd, [Str,fill3(),Line]) - end - end, - print_lines(Module, InFd, OutFd, L+1, HTML) + end, + print_lines(Module, CovLines1, InFd, OutFd, L+1, HTML); + _ -> + io:put_chars(OutFd, [tab(),Line]), + print_lines(Module, CovLines, InFd, OutFd, L+1, HTML) + end end. tab() -> " | ". @@ -2362,21 +2623,21 @@ do_reset_collection_table(Module) -> ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). %% do_reset(Module) -> ok -%% The reset is done on a per-clause basis to avoid building +%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building %% long lists in the case of very large modules do_reset(Module) -> - [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module), - do_reset2(Clauses). + Pattern = {#bump{module=Module, _='_'}, '$1'}, + MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + do_reset2(Match). -do_reset2([{M,F,A,C,_L}|Clauses]) -> - Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), +do_reset2({Bumps,Continuation}) -> lists:foreach(fun({Bump,_N}) -> ets:insert(?COVER_TABLE, {Bump,0}) end, Bumps), - do_reset2(Clauses); -do_reset2([]) -> + do_reset2(ets:select(Continuation)); +do_reset2('$end_of_table') -> ok. do_clear(Module) -> @@ -2419,31 +2680,43 @@ escape_lt_and_gt1([],Acc) -> escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). -pmap(Fun, List) -> - pmap(Fun, List, 20). -pmap(Fun, List, Limit) -> - pmap(Fun, List, [], Limit, 0, []). -pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit -> - Collector = self(), - Pid = spawn_link(fun() -> - ?SPAWN_DBG(pmap,E), - Collector ! {res,self(),Fun(E)} - end), - erlang:monitor(process, Pid), - pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc); -pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) -> - receive - {'DOWN', _Ref, process, X, _} when is_pid(X) -> - pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc); - {res, Pid, Res} -> - pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc]) - end; -pmap(_Fun, [], [], _Limit, 0, Acc) -> - lists:reverse(Acc); -pmap(Fun, [], [], Limit, Cnt, Acc) -> +%%%--Internal functions for parallelization------------------------------ +pmap(Fun,List) -> + NTot = length(List), + NProcs = erlang:system_info(schedulers) * 2, + NPerProc = (NTot div NProcs) + 1, + Mons = pmap_spawn(Fun,NPerProc,List,[]), + pmap_collect(Mons,[]). + +pmap_spawn(_,_,[],Mons) -> + Mons; +pmap_spawn(Fun,NPerProc,List,Mons) -> + {L1,L2} = if length(List)>=NPerProc -> lists:split(NPerProc,List); + true -> {List,[]} % last chunk + end, + Mon = + spawn_monitor( + fun() -> + exit({pmap_done,lists:map(Fun,L1)}) + end), + pmap_spawn(Fun,NPerProc,L2,[Mon|Mons]). + +pmap_collect([],Acc) -> + lists:append(Acc); +pmap_collect(Mons,Acc) -> receive - {'DOWN', _Ref, process, X, _} when is_pid(X) -> - pmap(Fun, [], [], Limit, Cnt - 1, Acc) + {'DOWN', Ref, process, Pid, {pmap_done,Result}} -> + pmap_collect(lists:delete({Pid,Ref},Mons),[Result|Acc]); + {'DOWN', Ref, process, Pid, Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + %% Something went really wrong - don't hang! + exit(Reason); + false -> + %% This should be handled somewhere else + self() ! Down, + pmap_collect(Mons,Acc) + end end. %%%----------------------------------------------------------------- diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index 80807b1d38..368fa6c3d1 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2013. All Rights Reserved. +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -33,6 +33,8 @@ -export([do_coverage/1]). +-export([distribution_performance/1]). + -include_lib("test_server/include/test_server.hrl"). %%---------------------------------------------------------------------- @@ -170,10 +172,15 @@ compile(Config) when is_list(Config) -> ?line {ok, CWD} = file:get_cwd(), ?line Result2 = cover:compile_directory(CWD), ?line SortedResult = lists:sort(Result2), - ?line [{error,_DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult, + ?line [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult, ?line [{ok,e}] = cover:compile_directory("d1"), ?line {error,enoent} = cover:compile_directory("d2"), + [] = cover:compile([]), + Result21 = cover:compile([a,b,"cc.erl",d,"f"]), + SortedResult21 = lists:sort(Result21), + [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult21, + ?line {ok,a} = cover:compile(a), ?line {ok,b} = compile:file(b), ?line code:purge(b), @@ -213,8 +220,14 @@ compile(Config) when is_list(Config) -> ?line {error,non_existing} = cover:compile_beam(z), ?line [{ok,y}] = cover:compile_beam_directory("d"), ?line Result3 = lists:sort(cover:compile_beam_directory()), - ?line [{error,{no_abstract_code,_XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3, + ?line [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3, ?line {error,enoent} = cover:compile_beam_directory("d2"), + + [] = cover:compile_beam([]), + Result31 = cover:compile_beam([crypt,"v.beam",w,"x"]), + SortedResult31 = lists:sort(Result31), + [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = SortedResult31, + ?line decompile([v,w,y]), ?line Files = lsfiles(), ?line remove(files(Files, ".beam")). @@ -239,20 +252,22 @@ analyse(Config) when is_list(Config) -> ?line done = a:start(5), - ?line {ok, {a,{17,2}}} = cover:analyse(a, coverage, module), - ?line {ok, [{{a,start,1},{6,0}}, - {{a,stop,1},{0,1}}, - {{a,pong,1},{1,0}}, - {{a,loop,3},{5,1}}, - {{a,trycatch,1},{4,0}}, - {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a, coverage, function), - ?line {ok, [{{a,start,1,1},{6,0}}, - {{a,stop,1,1},{0,1}}, - {{a,pong,1,1},{1,0}}, + {ok, {a,{17,2}}=ACovMod} = cover:analyse(a, coverage, module), + {ok, [{{a,exit_kalle,0},{1,0}}, + {{a,loop,3},{5,1}}, + {{a,pong,1},{1,0}}, + {{a,start,1},{6,0}}, + {{a,stop,1},{0,1}}, + {{a,trycatch,1},{4,0}}]=ACovFunc} = + cover:analyse(a, coverage, function), + {ok, [{{a,exit_kalle,0,1},{1,0}}, {{a,loop,3,1},{3,1}}, {{a,loop,3,2},{2,0}}, - {{a,trycatch,1,1},{4,0}}, - {{a,exit_kalle,0,1},{1,0}}]} = cover:analyse(a, coverage, clause), + {{a,pong,1,1},{1,0}}, + {{a,start,1,1},{6,0}}, + {{a,stop,1,1},{0,1}}, + {{a,trycatch,1,1},{4,0}}]=ACovClause} = + cover:analyse(a, coverage, clause), ?line {ok, [{{a,9},{1,0}}, {{a,10},{1,0}}, {{a,11},{1,0}}, @@ -271,22 +286,22 @@ analyse(Config) when is_list(Config) -> {{a,47},{1,0}}, {{a,49},{1,0}}, {{a,51},{1,0}}, - {{a,55},{1,0}}]} = cover:analyse(a, coverage, line), - - ?line {ok, {a,15}} = cover:analyse(a, calls, module), - ?line {ok, [{{a,start,1},1}, - {{a,stop,1},0}, - {{a,pong,1},5}, - {{a,loop,3},6}, - {{a,trycatch,1},2}, - {{a,exit_kalle,0},1}]} = cover:analyse(a, calls, function), - ?line {ok, [{{a,start,1,1},1}, - {{a,stop,1,1},0}, - {{a,pong,1,1},5}, - {{a,loop,3,1},5}, - {{a,loop,3,2},1}, - {{a,trycatch,1,1},2}, - {{a,exit_kalle,0,1},1}]} = cover:analyse(a, calls, clause), + {{a,55},{1,0}}]=ACovLine} = cover:analyse(a, coverage, line), + + {ok, {a,15}=ACallsMod} = cover:analyse(a, calls, module), + {ok, [{{a,exit_kalle,0},1}, + {{a,loop,3},6}, + {{a,pong,1},5}, + {{a,start,1},1}, + {{a,stop,1},0}, + {{a,trycatch,1},2}]=ACallsFunc} = cover:analyse(a, calls, function), + {ok, [{{a,exit_kalle,0,1},1}, + {{a,loop,3,1},5}, + {{a,loop,3,2},1}, + {{a,pong,1,1},5}, + {{a,start,1,1},1}, + {{a,stop,1,1},0}, + {{a,trycatch,1,1},2}]=ACallsClause} = cover:analyse(a, calls, clause), ?line {ok, [{{a,9},1}, {{a,10},1}, {{a,11},1}, @@ -305,27 +320,85 @@ analyse(Config) when is_list(Config) -> {{a,47},1}, {{a,49},1}, {{a,51},2}, - {{a,55},1}]} = cover:analyse(a, calls, line), - - ?line {ok, [{{a,start,1},{6,0}}, - {{a,stop,1},{0,1}}, - {{a,pong,1},{1,0}}, - {{a,loop,3},{5,1}}, - {{a,trycatch,1},{4,0}}, - {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a), - ?line {ok, {a,{17,2}}} = cover:analyse(a, module), - ?line {ok, [{{a,start,1},1}, - {{a,stop,1},0}, - {{a,pong,1},5}, - {{a,loop,3},6}, - {{a,trycatch,1},2}, - {{a,exit_kalle,0},1}]} = cover:analyse(a, calls), + {{a,55},1}]=ACallsLine} = cover:analyse(a, calls, line), + + {ok,ACovFunc} = cover:analyse(a), + {ok,ACovMod} = cover:analyse(a, module), + {ok,ACallsFunc} = cover:analyse(a, calls), ?line {ok, "a.COVER.out"} = cover:analyse_to_file(a), ?line {ok, "e.COVER.out"} = cover:analyse_to_file(e), ?line {ok, "a.COVER.html"} = cover:analyse_to_file(a,[html]), ?line {ok, "e.COVER.html"} = cover:analyse_to_file(e,[html]), + %% Analyse all modules + Modules = cover:modules(), + N = length(Modules), + + {result,CovFunc,[]} = cover:analyse(), % default = coverage, function + ACovFunc = [A || {{a,_,_},_}=A<-CovFunc], + + {result,CovMod,[]} = cover:analyse(coverage,module), + ACovMod = lists:keyfind(a,1,CovMod), + + {result,CovClause,[]} = cover:analyse(coverage,clause), + ACovClause = [A || {{a,_,_,_},_}=A<-CovClause], + + {result,CovLine,[]} = cover:analyse(coverage,line), + ACovLine = [A || {{a,_},_}=A<-CovLine], + + {result,CallsFunc,[]} = cover:analyse(calls,function), + ACallsFunc = [A || {{a,_,_},_}=A<-CallsFunc], + + {result,CallsMod,[]} = cover:analyse(calls,module), + ACallsMod = lists:keyfind(a,1,CallsMod), + + {result,CallsClause,[]} = cover:analyse(calls,clause), + ACallsClause = [A || {{a,_,_,_},_}=A<-CallsClause], + + {result,CallsLine,[]} = cover:analyse(calls,line), + ACallsLine = [A || {{a,_},_}=A<-CallsLine], + + {result,AllToFile,[]} = cover:analyse_to_file(), + N = length(AllToFile), + true = lists:member("a.COVER.out",AllToFile), + {result,AllToFileHtml,[]} = cover:analyse_to_file([html]), + N = length(AllToFileHtml), + true = lists:member("a.COVER.html",AllToFileHtml), + + %% Analyse list of modules + %% Listing all modules so we can compare result with above result + %% from analysing all. + + {result,CovFunc1,[]} = cover:analyse(Modules), % default = coverage, function + true = lists:sort(CovFunc) == lists:sort(CovFunc1), + + {result,CovMod1,[]} = cover:analyse(Modules,coverage,module), + true = lists:sort(CovMod) == lists:sort(CovMod1), + + {result,CovClause1,[]} = cover:analyse(Modules,coverage,clause), + true = lists:sort(CovClause) == lists:sort(CovClause1), + + {result,CovLine1,[]} = cover:analyse(Modules,coverage,line), + true = lists:sort(CovLine) == lists:sort(CovLine1), + + {result,CallsFunc1,[]} = cover:analyse(Modules,calls,function), + true = lists:sort(CallsFunc1) == lists:sort(CallsFunc1), + + {result,CallsMod1,[]} = cover:analyse(Modules,calls,module), + true = lists:sort(CallsMod) == lists:sort(CallsMod1), + + {result,CallsClause1,[]} = cover:analyse(Modules,calls,clause), + true = lists:sort(CallsClause) == lists:sort(CallsClause1), + + {result,CallsLine1,[]} = cover:analyse(Modules,calls,line), + true = lists:sort(CallsLine) == lists:sort(CallsLine1), + + {result,AllToFile1,[]} = cover:analyse_to_file(Modules), + true = lists:sort(AllToFile) == lists:sort(AllToFile1), + {result,AllToFileHtml1,[]} = cover:analyse_to_file(Modules,[html]), + true = lists:sort(AllToFileHtml) == lists:sort(AllToFileHtml1), + %% analyse_to_file of file which is compiled from beam ?line {ok,f} = compile:file(f,[debug_info]), ?line code:purge(f), @@ -348,14 +421,17 @@ analyse(Config) when is_list(Config) -> {module,z} = code:load_file(z), {ok,z} = cover:compile_beam(z), ok = file:delete("z.erl"), - {error,no_source_code_found} = cover:analyse_to_file(z), + {error,{no_source_code_found,z}} = cover:analyse_to_file(z), + {result,[],[{no_source_code_found,z}]} = cover:analyse_to_file([z]), code:purge(z), code:delete(z), ?line {error,{not_cover_compiled,b}} = cover:analyse(b), ?line {error,{not_cover_compiled,g}} = cover:analyse(g), + {result,[],[{not_cover_compiled,b}]} = cover:analyse([b]), ?line {error,{not_cover_compiled,b}} = cover:analyse_to_file(b), - ?line {error,{not_cover_compiled,g}} = cover:analyse_to_file(g). + {error,{not_cover_compiled,g}} = cover:analyse_to_file(g), + {result,[],[{not_cover_compiled,g}]} = cover:analyse_to_file([g]). misc(suite) -> []; misc(Config) when is_list(Config) -> @@ -680,6 +756,119 @@ stop_node_after_disconnect(Config) -> ?t:stop_node(N1), ok. +distribution_performance(Config) -> + PrivDir = ?config(priv_dir,Config), + Dir = filename:join(PrivDir,"distribution_performance"), + AllFiles = filename:join(Dir,"*"), + ok = filelib:ensure_dir(AllFiles), + code:add_patha(Dir), + M = 9, % Generate M modules + F = 210, % with F functions + C = 10, % and each function of C clauses + Mods = generate_modules(M,F,C,Dir), + +% ?t:break(""), + + NodeName = cover_SUITE_distribution_performance, + {ok,N1} = ?t:start_node(NodeName,peer,[{start_cover,false}]), + %% CFun = fun() -> + %% [{ok,_} = cover:compile_beam(Mod) || Mod <- Mods] + %% end, + CFun = fun() -> cover:compile_beam(Mods) end, + {CT,CA} = timer:tc(CFun), +% erlang:display(CA), + erlang:display({compile,CT}), + + {SNT,_} = timer:tc(fun() -> {ok,[N1]} = cover:start(nodes()) end), + erlang:display({start_node,SNT}), + + [1 = rpc:call(N1,Mod,f1,[1]) || Mod <- Mods], + +% Fun = fun() -> [cover:analyse(Mod,calls,function) || Mod<-Mods] end, +% Fun = fun() -> analyse_all(Mods,calls,function) end, +% Fun = fun() -> cover:analyse('_',calls,function) end, + Fun = fun() -> cover:analyse(Mods,calls,function) end, + +% Fun = fun() -> [begin cover:analyse_to_file(Mod,[html]) end || Mod<-Mods] end, +% Fun = fun() -> analyse_all_to_file(Mods,[html]) end, +% Fun = fun() -> cover:analyse_to_file(Mods,[html]) end, +% Fun = fun() -> cover:analyse_to_file([html]) end, + +% Fun = fun() -> cover:reset() end, + + {AT,A} = timer:tc(Fun), + erlang:display({analyse,AT}), +% erlang:display(lists:sort([X || X={_MFA,N} <- lists:append([L || {ok,L}<-A]), N=/=0])), + + %% fprof:apply(Fun, [],[{procs,[whereis(cover_server)]}]), + %% fprof:profile(), + %% fprof:analyse(dest,[]), + + {SNT2,_} = timer:tc(fun() -> ?t:stop_node(N1) end), + erlang:display({stop_node,SNT2}), + + code:del_path(Dir), + Files = filelib:wildcard(AllFiles), + [ok = file:delete(File) || File <- Files], + ok = file:del_dir(Dir), + ok. + +%% Run analysis in parallel +analyse_all(Mods,Analysis,Level) -> + Pids = [begin + Pid = spawn(fun() -> + {ok,A} = cover:analyse(Mod,Analysis,Level), + exit(A) + end), + erlang:monitor(process,Pid), + Pid + end || Mod <- Mods], + get_downs(Pids,[]). + +analyse_all_to_file(Mods,Opts) -> + Pids = [begin + Pid = cover:async_analyse_to_file(Mod,Opts), + erlang:monitor(process,Pid), + Pid + end || Mod <- Mods], + get_downs(Pids,[]). + +get_downs([],Acc) -> + Acc; +get_downs(Pids,Acc) -> + receive + {'DOWN', _Ref, _Type, Pid, A} -> + get_downs(lists:delete(Pid,Pids),[A|Acc]) + end. + +generate_modules(0,_,_,_) -> + []; +generate_modules(M,F,C,Dir) -> + ModStr = "m" ++ integer_to_list(M), + Mod = list_to_atom(ModStr), + Src = ["-module(",ModStr,").\n" + "-compile(export_all).\n" | + generate_functions(F,C)], + Erl = filename:join(Dir,ModStr++".erl"), + ok = file:write_file(Erl,Src), + {ok,Mod} = compile:file(Erl,[{outdir,Dir},debug_info,report]), + [Mod | generate_modules(M-1,F,C,Dir)]. + +generate_functions(0,_) -> + []; +generate_functions(F,C) -> + Func = "f" ++ integer_to_list(F), + [generate_clauses(C,Func) | generate_functions(F-1,C)]. + +generate_clauses(0,_) -> + []; +generate_clauses(C,Func) -> + CStr = integer_to_list(C), + Sep = if C==1 -> "."; true -> ";" end, + [Func,"(",CStr,") -> ",CStr,Sep,"\n" | + generate_clauses(C-1,Func)]. + + export_import(suite) -> []; export_import(Config) when is_list(Config) -> ?line DataDir = ?config(data_dir, Config), @@ -788,10 +977,11 @@ otp_5031(Config) when is_list(Config) -> Dog = ?t:timetrap(?t:seconds(10)), - ?line {ok,N1} = ?t:start_node(cover_SUITE_distribution1,slave,[]), + {ok,N1} = ?t:start_node(cover_SUITE_otp_5031,slave,[]), ?line {ok,[N1]} = cover:start(N1), ?line {error,not_main_node} = rpc:call(N1,cover,modules,[]), ?line cover:stop(), + ?t:stop_node(N1), ?t:timetrap_cancel(Dog), ok. @@ -1005,6 +1195,7 @@ otp_7095(Config) when is_list(Config) -> ok. + otp_8270(doc) -> ["OTP-8270. Bug."]; otp_8270(suite) -> []; @@ -1020,7 +1211,7 @@ otp_8270(Config) when is_list(Config) -> ?line {ok,N3} = ?t:start_node(cover_n3,slave,As), timer:sleep(500), - cover:start(nodes()), + {ok,[_,_,_]} = cover:start(nodes()), Test = << "-module(m).\n" @@ -1058,6 +1249,7 @@ otp_8270(Config) when is_list(Config) -> ?line {N2,true} = {N2,is_list(N2_info)}, ?line {N3,true} = {N3,is_list(N3_info)}, + exit(Pid1,kill), ?line ?t:stop_node(N1), ?line ?t:stop_node(N2), ?line ?t:stop_node(N3), @@ -1572,7 +1764,9 @@ is_unloaded(What) -> end. check_f_calls(F1,F2) -> - {ok,[{{f,f1,0},F1},{{f,f2,0},F2}|_]} = cover:analyse(f,calls,function). + {ok,A} = cover:analyse(f,calls,function), + {_,F1} = lists:keyfind({f,f1,0},1,A), + {_,F2} = lists:keyfind({f,f2,0},1,A). cover_which_nodes(Expected) -> case cover:which_nodes() of |