diff options
Diffstat (limited to 'lib/tools')
-rw-r--r-- | lib/tools/doc/src/cover.xml | 29 | ||||
-rw-r--r-- | lib/tools/doc/src/cover_chapter.xml | 7 | ||||
-rw-r--r-- | lib/tools/emacs/erlang.el | 3 | ||||
-rw-r--r-- | lib/tools/src/cover.erl | 583 | ||||
-rw-r--r-- | lib/tools/test/Makefile | 6 | ||||
-rw-r--r-- | lib/tools/test/cover_SUITE.erl | 63 | ||||
-rw-r--r-- | lib/tools/test/cprof_SUITE.erl | 35 | ||||
-rw-r--r-- | lib/tools/test/emem_SUITE.erl | 38 | ||||
-rw-r--r-- | lib/tools/test/eprof_SUITE.erl | 26 | ||||
-rw-r--r-- | lib/tools/test/fprof_SUITE.erl | 32 | ||||
-rw-r--r--[l---------] | lib/tools/test/ignore_cores.erl | 159 | ||||
-rw-r--r-- | lib/tools/test/instrument_SUITE.erl | 29 | ||||
-rw-r--r-- | lib/tools/test/lcnt_SUITE.erl | 19 | ||||
-rw-r--r-- | lib/tools/test/make_SUITE.erl | 31 | ||||
-rw-r--r-- | lib/tools/test/tools.cover | 2 | ||||
-rw-r--r-- | lib/tools/test/tools.spec | 2 | ||||
-rw-r--r-- | lib/tools/test/tools_SUITE.erl | 31 | ||||
-rw-r--r-- | lib/tools/test/xref_SUITE.erl | 58 |
18 files changed, 841 insertions, 312 deletions
diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml index 323bd0dda8..0a3302bda5 100644 --- a/lib/tools/doc/src/cover.xml +++ b/lib/tools/doc/src/cover.xml @@ -270,6 +270,8 @@ defaults to <c>function</c>.</p> <p>If <c>Module</c> is not Cover compiled, the function returns <c>{error,{not_cover_compiled,Module}}</c>.</p> + <p>HINT: It is possible to issue multiple analyse_to_file commands at + the same time. </p> </desc> </func> <func> @@ -307,6 +309,33 @@ <c>.beam</c> file, or in <c>../src</c> relative to that directory. If no source code is found, <c>,{error,no_source_code_found}</c> is returned.</p> + <p>HINT: It is possible to issue multiple analyse_to_file commands at + the same time. </p> + </desc> + </func> + <func> + <name>async_analyse_to_file(Module) -> </name> + <name>async_analyse_to_file(Module,Options) -> </name> + <name>async_analyse_to_file(Module, OutFile) -> </name> + <name>async_analyse_to_file(Module, OutFile, Options) -> pid()</name> + <fsummary>Asynchronous call to analyse_to_file.</fsummary> + <type> + <v>Module = atom()</v> + <v>OutFile = string()</v> + <v>Options = [Option]</v> + <v>Option = html</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v> File = string()</v> + <v> Reason = term()</v> + </type> + <desc> + <p>This function works exactly the same way as + <seealso marker="#analyse_to_file-1">analyse_to_file</seealso> except + that it is asynchronous instead of synchronous. The spawned process + will link with the caller when created. If an <c>Error</c> occurs + while doing the cover analysis the process will crash with the same + error reason as <seealso marker="#analyse_to_file-1">analyse_to_file</seealso> + would return.</p> </desc> </func> <func> diff --git a/lib/tools/doc/src/cover_chapter.xml b/lib/tools/doc/src/cover_chapter.xml index b4f7919183..92a790c34e 100644 --- a/lib/tools/doc/src/cover_chapter.xml +++ b/lib/tools/doc/src/cover_chapter.xml @@ -403,6 +403,13 @@ ok database contains information about each executable line in each Cover compiled module, performance decreases proportionally to the size and number of the Cover compiled modules.</p> + <p>To improve performance when analysing cover results it is possible + to do multiple calls to <seealso marker="cover#analyse-1">analyse</seealso> + and <seealso marker="cover#analyse_to_file-1">analyse_to_file</seealso> + at once. You can also use the + <seealso marker="cover#async_analyse_to_file-1">async_analyse_to_file</seealso> + convenience function. + </p> </section> <section> diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el index ed825a298f..17b093ee34 100644 --- a/lib/tools/emacs/erlang.el +++ b/lib/tools/emacs/erlang.el @@ -466,14 +466,17 @@ To activate the workaround, place the following in your `~/.emacs' file: (defvar erlang-indent-level 4 "*Indentation of Erlang calls/clauses within blocks.") +(put 'erlang-indent-level 'safe-local-variable 'integerp) (defvar erlang-indent-guard 2 "*Indentation of Erlang guards.") +(put 'erlang-indent-guard 'safe-local-variable 'integerp) (defvar erlang-argument-indent 2 "*Indentation of the first argument in a function call. When nil, indent to the column after the `(' of the function.") +(put 'erlang-argument-indent 'safe-local-variable '(lambda (val) (or (null val) (integerp val)))) (defvar erlang-tab-always-indent t "*Non-nil means TAB in Erlang mode should always re-indent the current line, diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index c4d1bd1d2f..ada2db45be 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -35,23 +35,37 @@ %% remote_process_loop/1. %% %% TABLES -%% Each nodes has an ets table named 'cover_internal_data_table' -%% (?COVER_TABLE). This table contains the coverage data and is -%% continously updated when cover compiled code is executed. +%% Each nodes has two tables: cover_internal_data_table (?COVER_TABLE) and. +%% cover_internal_clause_table (?COVER_CLAUSE_TABLE). +%% ?COVER_TABLE contains the bump data i.e. the data about which lines +%% have been executed how many times. +%% ?COVER_CLAUSE_TABLE contains information about which clauses in which modules +%% cover is currently collecting statistics. %% -%% The main node owns a table named -%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE). This table -%% contains data which is collected from remote nodes (either when a -%% remote node is stopped with cover:stop/1 or when analysing. When -%% analysing, data is even moved from the ?COVER_TABLE on the main -%% node to the ?COLLECTION_TABLE. +%% The main node owns tables named +%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE) and +%% 'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE). +%% These tables contain data which is collected from remote nodes (either when a +%% remote node is stopped with cover:stop/1 or when analysing). When +%% analysing, data is even moved from the COVER tables on the main +%% node to the COLLECTION tables. %% %% The main node also has a table named 'cover_binary_code_table' %% (?BINARY_TABLE). This table contains the binary code for each cover %% compiled module. This is necessary so that the code can be loaded %% on remote nodes that are started after the compilation. %% - +%% PARELLALISM +%% To take advantage of SMP when doing the cover analysis both the data +%% collection and analysis has been parallelized. One process is spawned for +%% each node when collecting data, and on the remote node when collecting data +%% one process is spawned per module. +%% +%% When analyzing data it is possible to issue multiple analyse(_to_file)/X +%% calls at once. They are however all calls (for backwardscompatability +%% reasons) so the user of cover will have to spawn several processes to to the +%% calls ( or use async_analyse_to_file ). +%% %% External exports -export([start/0, start/1, @@ -61,6 +75,9 @@ analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3, analyse_to_file/1, analyse_to_file/2, analyse_to_file/3, analyze_to_file/1, analyze_to_file/2, analyze_to_file/3, + async_analyse_to_file/1,async_analyse_to_file/2, + async_analyse_to_file/3, async_analyze_to_file/1, + async_analyze_to_file/2, async_analyze_to_file/3, export/1, export/2, import/1, modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1, reset/1, reset/0, @@ -100,8 +117,10 @@ }). -define(COVER_TABLE, 'cover_internal_data_table'). +-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table'). -define(BINARY_TABLE, 'cover_binary_code_table'). -define(COLLECTION_TABLE, 'cover_collected_remote_data_table'). +-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table'). -define(TAG, cover_compiled). -define(SERVER, cover_server). @@ -114,6 +133,8 @@ true -> ?BLOCK(Expr) end). +-define(SPAWN_DBG(Tag,Value),put(Tag,Value)). + -include_lib("stdlib/include/ms_transform.hrl"). %%%---------------------------------------------------------------------- @@ -127,7 +148,10 @@ start() -> case whereis(?SERVER) of undefined -> Starter = self(), - Pid = spawn(fun() -> init_main(Starter) end), + Pid = spawn(fun() -> + ?SPAWN_DBG(start,[]), + init_main(Starter) + end), Ref = erlang:monitor(process,Pid), Return = receive @@ -382,6 +406,30 @@ analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut). analyze_to_file(Module, OutFile, Options) -> analyse_to_file(Module, OutFile, Options). +async_analyse_to_file(Module) -> + do_spawn(?MODULE, analyse_to_file, [Module]). +async_analyse_to_file(Module, OutFileOrOpts) -> + do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]). +async_analyse_to_file(Module, OutFile, Options) -> + do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). + +do_spawn(M,F,A) -> + spawn_link(fun() -> + case apply(M,F,A) of + {ok, _} -> + ok; + {error, Reason} -> + exit(Reason) + end + end). + +async_analyze_to_file(Module) -> + async_analyse_to_file(Module). +async_analyze_to_file(Module, OutFileOrOpts) -> + async_analyse_to_file(Module, OutFileOrOpts). +async_analyze_to_file(Module, OutFile, Options) -> + async_analyse_to_file(Module, OutFile, Options). + outfilename(Module,Opts) -> case lists:member(html,Opts) of true -> @@ -500,6 +548,8 @@ remote_call(Node,Request) -> Return end. +remote_reply(Proc,Reply) when is_pid(Proc) -> + Proc ! {?SERVER,Reply}; remote_reply(MainNode,Reply) -> {?SERVER,MainNode} ! {?SERVER,Reply}. @@ -509,9 +559,15 @@ remote_reply(MainNode,Reply) -> init_main(Starter) -> register(?SERVER,self()), - ets:new(?COVER_TABLE, [set, public, named_table]), + %% Having write concurrancy here gives a 40% performance boost + %% when collect/1 is called. + ets:new(?COVER_TABLE, [set, public, named_table + ,{write_concurrency, true} + ]), + ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), ets:new(?BINARY_TABLE, [set, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), + ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]), process_flag(trap_exit,true), Starter ! {?SERVER,started}, main_process_loop(#main_state{}). @@ -593,40 +649,10 @@ main_process_loop(State) -> end; {From, {export,OutFile,Module}} -> - case file:open(OutFile,[write,binary,raw]) of - {ok,Fd} -> - Reply = - case Module of - '_' -> - export_info(State#main_state.imported), - collect(State#main_state.nodes), - do_export_table(State#main_state.compiled, - State#main_state.imported, - Fd); - _ -> - export_info(Module,State#main_state.imported), - case is_loaded(Module, State) of - {loaded, File} -> - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - do_export_table([{Module,File}],[],Fd); - {imported, File, ImportFiles} -> - %% don't know if I should allow this - - %% export a module which is only imported - Imported = [{Module,File,ImportFiles}], - do_export_table([],Imported,Fd); - _NotLoaded -> - {error,{not_cover_compiled,Module}} - end - end, - file:close(Fd), - reply(From, Reply); - {error,Reason} -> - reply(From, {error, {cant_open_file,OutFile,Reason}}) - - end, + spawn(fun() -> + ?SPAWN_DBG(export,{OutFile, Module}), + do_export(Module, OutFile, From, State) + end), main_process_loop(State); {From, {import,File}} -> @@ -692,107 +718,73 @@ main_process_loop(State) -> unregister(?SERVER), reply(From, ok); - {From, {Request, Module}} -> - case is_loaded(Module, State) of - {loaded, File} -> - {Reply,State1} = - case Request of - {analyse, Analysis, Level} -> - analyse_info(Module,State#main_state.imported), - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module,Clauses,State#main_state.nodes), - R = do_analyse(Module, Analysis, Level, Clauses), - {R,State}; - - {analyse_to_file, OutFile, Opts} -> - R = case find_source(File) of - {beam,_BeamFile} -> - {error,no_source_code_found}; - ErlFile -> - Imported = State#main_state.imported, - analyse_info(Module,Imported), - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - HTML = lists:member(html,Opts), - do_analyse_to_file(Module,OutFile, - ErlFile,HTML) - end, - {R,State}; - - is_compiled -> - {{file, File},State}; - - reset -> - R = do_reset_main_node(Module, - State#main_state.nodes), - Imported = - remove_imported(Module, - State#main_state.imported), - {R,State#main_state{imported=Imported}} - end, - reply(From, Reply), - main_process_loop(State1); - - {imported,File,_ImportFiles} -> - {Reply,State1} = - case Request of - {analyse, Analysis, Level} -> - analyse_info(Module,State#main_state.imported), - [{Module,Clauses}] = - ets:lookup(?COLLECTION_TABLE,Module), - R = do_analyse(Module, Analysis, Level, Clauses), - {R,State}; - - {analyse_to_file, OutFile, Opts} -> - R = case find_source(File) of - {beam,_BeamFile} -> - {error,no_source_code_found}; - ErlFile -> - Imported = State#main_state.imported, - analyse_info(Module,Imported), - HTML = lists:member(html,Opts), - do_analyse_to_file(Module,OutFile, - ErlFile,HTML) - end, - {R,State}; - - is_compiled -> - {false,State}; - - reset -> - R = do_reset_collection_table(Module), - Imported = - remove_imported(Module, - State#main_state.imported), - {R,State#main_state{imported=Imported}} - end, - reply(From, Reply), - main_process_loop(State1); - - NotLoaded -> - Reply = - case Request of - is_compiled -> - false; - _ -> - {error, {not_cover_compiled,Module}} - end, - Compiled = - case NotLoaded of - unloaded -> - do_clear(Module), - remote_unload(State#main_state.nodes,[Module]), - update_compiled([Module], - State#main_state.compiled); - false -> - State#main_state.compiled + {From, {{analyse, Analysis, Level}, Module}} -> + S = try + Loaded = is_loaded(Module, State), + spawn(fun() -> + ?SPAWN_DBG(analyse,{Module,Analysis, Level}), + do_parallel_analysis( + Module, Analysis, Level, + Loaded, From, State) + end), + State + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {{analyse_to_file, OutFile, Opts},Module}} -> + S = try + Loaded = is_loaded(Module, State), + spawn(fun() -> + ?SPAWN_DBG(analyse_to_file, + {Module,OutFile, Opts}), + do_parallel_analysis_to_file( + Module, OutFile, Opts, + Loaded, From, State) + end), + State + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {is_compiled, Module}} -> + S = try is_loaded(Module, State) of + {loaded, File} -> + reply(From,{file, File}), + State; + {imported,_File,_ImportFiles} -> + reply(From,false), + State + catch throw:Reason -> + reply(From,false), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {reset, Module}} -> + S = try + Loaded = is_loaded(Module,State), + R = case Loaded of + {loaded, _File} -> + do_reset_main_node( + Module, State#main_state.nodes); + {imported, _File, _} -> + do_reset_collection_table(Module) end, - reply(From, Reply), - main_process_loop(State#main_state{compiled=Compiled}) - end; + Imported = + remove_imported(Module, + State#main_state.imported), + reply(From, R), + State#main_state{imported=Imported} + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); {'EXIT',Pid,_Reason} -> %% Exit is trapped on the main node only, so this will only happen @@ -807,17 +799,17 @@ main_process_loop(State) -> main_process_loop(State) end. - - - - %%%---------------------------------------------------------------------- %%% cover_server on remote node %%%---------------------------------------------------------------------- init_remote(Starter,MainNode) -> register(?SERVER,self()), - ets:new(?COVER_TABLE, [set, public, named_table]), + ets:new(?COVER_TABLE, [set, public, named_table + %% write_concurrency here makes otp_8270 break :( + %,{write_concurrency, true} + ]), + ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), Starter ! {self(),started}, remote_process_loop(#remote_state{main_node=MainNode}). @@ -843,29 +835,14 @@ remote_process_loop(State) -> remote_process_loop(State); {remote,collect,Module,CollectorPid} -> - MS = - case Module of - '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end); - _ -> ets:fun2ms(fun({M,C}) when M=:=Module -> C end) - end, - AllClauses = lists:flatten(ets:select(?COVER_TABLE,MS)), - - %% Sending clause by clause in order to avoid large lists - lists:foreach( - fun({M,F,A,C,_L}) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps} - end, - AllClauses), - CollectorPid ! done, - remote_reply(State#remote_state.main_node, ok), + self() ! {remote,collect,Module,CollectorPid, ?SERVER}; + + {remote,collect,Module,CollectorPid,From} -> + spawn(fun() -> + ?SPAWN_DBG(remote_collect, + {Module, CollectorPid, From}), + do_collect(Module, CollectorPid, From) + end), remote_process_loop(State); {remote,stop} -> @@ -894,6 +871,33 @@ remote_process_loop(State) -> end. +do_collect(Module, CollectorPid, From) -> + AllMods = + case Module of + '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); + _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) + end, + + %% Sending clause by clause in order to avoid large lists + pmap( + fun({_Mod,Clauses}) -> + lists:map(fun(Clause) -> + send_collected_data(Clause, CollectorPid) + end,Clauses) + end,AllMods), + CollectorPid ! done, + remote_reply(From, ok). + +send_collected_data({M,F,A,C,_L}, CollectorPid) -> + Pattern = + {#bump{module=M, function=F, arity=A, clause=C}, '_'}, + Bumps = ets:match_object(?COVER_TABLE, Pattern), + %% Reset + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Bumps), + CollectorPid ! {chunk,Bumps}. reload_originals([{Module,_File}|Compiled]) -> do_reload_original(Module), @@ -932,6 +936,9 @@ load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) -> load_compiled([],Acc) -> Acc. +insert_initial_data([Item|Items]) when is_atom(element(1,Item)) -> + ets:insert(?COVER_CLAUSE_TABLE, Item), + insert_initial_data(Items); insert_initial_data([Item|Items]) -> ets:insert(?COVER_TABLE, Item), insert_initial_data(Items); @@ -957,7 +964,10 @@ remote_start(MainNode) -> case whereis(?SERVER) of undefined -> Starter = self(), - Pid = spawn(fun() -> init_remote(Starter,MainNode) end), + Pid = spawn(fun() -> + ?SPAWN_DBG(remote_start,{MainNode}), + init_remote(Starter,MainNode) + end), Ref = erlang:monitor(process,Pid), Return = receive @@ -972,14 +982,25 @@ remote_start(MainNode) -> {error,{already_started,Pid}} end. -%% Load a set of cover compiled modules on remote nodes -remote_load_compiled(Nodes,Compiled0) -> - Compiled = lists:map(fun get_data_for_remote_loading/1,Compiled0), +%% Load a set of cover compiled modules on remote nodes, +%% We do it ?MAX_MODS modules at a time so that we don't +%% run out of memory on the cover_server node. +-define(MAX_MODS, 10). +remote_load_compiled(Nodes,Compiled) -> + remote_load_compiled(Nodes, Compiled, [], 0). +remote_load_compiled(_Nodes, [], [], _ModNum) -> + ok; +remote_load_compiled(Nodes, Compiled, Acc, ModNum) + when Compiled == []; ModNum == ?MAX_MODS -> lists:foreach( fun(Node) -> - remote_call(Node,{remote,load_compiled,Compiled}) + remote_call(Node,{remote,load_compiled,Acc}) end, - Nodes). + Nodes), + remote_load_compiled(Nodes, Compiled, [], 0); +remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> + remote_load_compiled( + Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1). %% Read all data needed for loading a cover compiled module on a remote node %% Binary is the beam code for the module and InitialTable is the initial @@ -987,15 +1008,15 @@ remote_load_compiled(Nodes,Compiled0) -> get_data_for_remote_loading({Module,File}) -> [{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module), %%! The InitialTable list will be long if the module is big - what to do?? - InitialTable = ets:select(?COVER_TABLE,ms(Module)), - {Module,File,Binary,InitialTable}. + InitialBumps = ets:select(?COVER_TABLE,ms(Module)), + InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module), + + {Module,File,Binary,InitialBumps ++ InitialClauses}. %% Create a match spec which returns the clause info {Module,InitInfo} and %% all #bump keys for the given module with 0 number of calls. ms(Module) -> - ets:fun2ms(fun({Module,InitInfo}) -> - {Module,InitInfo}; - ({Key,_}) when is_record(Key,bump),Key#bump.module=:=Module -> + ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module -> {Key,0} end). @@ -1017,27 +1038,30 @@ remote_reset(Module,Nodes) -> %% Collect data from remote nodes - used for analyse or stop(Node) remote_collect(Module,Nodes,Stop) -> - CollectorPid = spawn(fun() -> collector_proc(length(Nodes)) end), - lists:foreach( - fun(Node) -> - remote_call(Node,{remote,collect,Module,CollectorPid}), - if Stop -> remote_call(Node,{remote,stop}); - true -> ok - end - end, - Nodes). + pmap(fun(Node) -> + ?SPAWN_DBG(remote_collect, + {Module, Nodes, Stop}), + do_collection(Node, Module, Stop) + end, + Nodes). + +do_collection(Node, Module, Stop) -> + CollectorPid = spawn(fun collector_proc/0), + remote_call(Node,{remote,collect,Module,CollectorPid, self()}), + if Stop -> remote_call(Node,{remote,stop}); + true -> ok + end. %% Process which receives chunks of data from remote nodes - either when %% analysing or when stopping cover on the remote nodes. -collector_proc(0) -> - ok; -collector_proc(N) -> +collector_proc() -> + ?SPAWN_DBG(collector_proc, []), receive {chunk,Chunk} -> insert_in_collection_table(Chunk), - collector_proc(N); + collector_proc(); done -> - collector_proc(N-1) + ok end. insert_in_collection_table([{Key,Val}|Chunk]) -> @@ -1052,7 +1076,13 @@ insert_in_collection_table(Key,Val) -> ets:update_counter(?COLLECTION_TABLE, Key,Val); false -> - ets:insert(?COLLECTION_TABLE,{Key,Val}) + %% Make sure that there are no race conditions from ets:member + case ets:insert_new(?COLLECTION_TABLE,{Key,Val}) of + false -> + insert_in_collection_table(Key,Val); + _ -> + ok + end end. @@ -1073,14 +1103,15 @@ analyse_info(Module,Imported) -> export_info(_Module,[]) -> ok; -export_info(Module,Imported) -> - imported_info("Export",Module,Imported). +export_info(_Module,_Imported) -> + %% Do not print that the export includes imported modules + ok. export_info([]) -> ok; -export_info(Imported) -> - AllImportFiles = get_all_importfiles(Imported,[]), - io:format("Export includes data from imported files\n~p\n",[AllImportFiles]). +export_info(_Imported) -> + %% Do not print that the export includes imported modules + ok. get_all_importfiles([{_M,_F,ImportFiles}|Imported],Acc) -> NewAcc = do_get_all_importfiles(ImportFiles,Acc), @@ -1153,14 +1184,14 @@ is_loaded(Module, State) -> {ok, File} -> case code:which(Module) of ?TAG -> {loaded, File}; - _ -> unloaded + _ -> throw(unloaded) end; false -> case get_file(Module,State#main_state.imported) of {ok,File,ImportFiles} -> {imported, File, ImportFiles}; false -> - false + throw(not_loaded) end end. @@ -1259,7 +1290,7 @@ do_compile_beam(Module,Beam) -> %% Store info about all function clauses in database InitInfo = reverse(Vars#vars.init_info), - ets:insert(?COVER_TABLE, {Module, InitInfo}), + ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}), %% Store binary code so it can be loaded on remote nodes ets:insert(?BINARY_TABLE, {Module, Binary}), @@ -1793,9 +1824,8 @@ common_elems(L1, L2) -> %% Collect data for all modules collect(Nodes) -> %% local node - MS = ets:fun2ms(fun({M,C}) when is_atom(M) -> {M,C} end), - AllClauses = ets:select(?COVER_TABLE,MS), - move_modules(AllClauses), + AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE), + pmap(fun move_modules/1,AllClauses), %% remote nodes remote_collect('_',Nodes,false). @@ -1803,7 +1833,7 @@ collect(Nodes) -> %% Collect data for one module collect(Module,Clauses,Nodes) -> %% local node - move_modules([{Module,Clauses}]), + move_modules({Module,Clauses}), %% remote nodes remote_collect(Module,Nodes,false). @@ -1811,12 +1841,9 @@ collect(Module,Clauses,Nodes) -> %% When analysing, the data from the local ?COVER_TABLE is moved to the %% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE -move_modules([{Module,Clauses}|AllClauses]) -> - ets:insert(?COLLECTION_TABLE,{Module,Clauses}), - move_clauses(Clauses), - move_modules(AllClauses); -move_modules([]) -> - ok. +move_modules({Module,Clauses}) -> + ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}), + move_clauses(Clauses). move_clauses([{M,F,A,C,_L}|Clauses]) -> Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, @@ -1855,6 +1882,22 @@ find_source(File0) -> end end. +do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> + analyse_info(Module,State#main_state.imported), + C = case Loaded of + {loaded, _File} -> + [{Module,Clauses}] = + ets:lookup(?COVER_CLAUSE_TABLE,Module), + collect(Module,Clauses,State#main_state.nodes), + Clauses; + _ -> + [{Module,Clauses}] = + ets:lookup(?COLLECTION_CLAUSE_TABLE,Module), + Clauses + end, + R = do_analyse(Module, Analysis, Level, C), + reply(From, R). + %% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error} %% Clauses = [{Module,Function,Arity,Clause,Lines}] do_analyse(Module, Analysis, line, _Clauses) -> @@ -1931,6 +1974,28 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) -> merge_functions([], _MFun, Result) -> Result. +do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> + File = case Loaded of + {loaded, File0} -> + [{Module,Clauses}] = + ets:lookup(?COVER_CLAUSE_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + File0; + {imported, File0, _} -> + File0 + end, + case find_source(File) of + {beam,_BeamFile} -> + reply(From, {error,no_source_code_found}); + ErlFile -> + analyse_info(Module,State#main_state.imported), + HTML = lists:member(html,Opts), + R = do_analyse_to_file(Module,OutFile, + ErlFile,HTML), + reply(From, R) + end. + %% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} %% Module = atom() %% OutFile = ErlFile = string() @@ -2027,6 +2092,42 @@ fill2() -> ".| ". fill3() -> "| ". %%%--Export-------------------------------------------------------------- +do_export(Module, OutFile, From, State) -> + case file:open(OutFile,[write,binary,raw]) of + {ok,Fd} -> + Reply = + case Module of + '_' -> + export_info(State#main_state.imported), + collect(State#main_state.nodes), + do_export_table(State#main_state.compiled, + State#main_state.imported, + Fd); + _ -> + export_info(Module,State#main_state.imported), + try is_loaded(Module, State) of + {loaded, File} -> + [{Module,Clauses}] = + ets:lookup(?COVER_CLAUSE_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + do_export_table([{Module,File}],[],Fd); + {imported, File, ImportFiles} -> + %% don't know if I should allow this - + %% export a module which is only imported + Imported = [{Module,File,ImportFiles}], + do_export_table([],Imported,Fd) + catch throw:_ -> + {error,{not_cover_compiled,Module}} + end + end, + file:close(Fd), + reply(From, Reply); + {error,Reason} -> + reply(From, {error, {cant_open_file,OutFile,Reason}}) + + end. + do_export_table(Compiled, Imported, Fd) -> ModList = merge(Imported,Compiled), write_module_data(ModList,Fd). @@ -2043,7 +2144,7 @@ merge([],ModuleList) -> write_module_data([{Module,File}|ModList],Fd) -> write({file,Module,File},Fd), - [Clauses] = ets:lookup(?COLLECTION_TABLE,Module), + [Clauses] = ets:lookup(?COLLECTION_CLAUSE_TABLE,Module), write(Clauses,Fd), ModuleData = ets:match_object(?COLLECTION_TABLE,{#bump{module=Module},'_'}), do_write_module_data(ModuleData,Fd), @@ -2093,7 +2194,7 @@ do_import_to_table(Fd,ImportFile,Imported,DontImport) -> {Module,Clauses} -> case lists:member(Module,DontImport) of false -> - ets:insert(?COLLECTION_TABLE,{Module,Clauses}); + ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}); true -> ok end, @@ -2127,14 +2228,14 @@ do_reset_main_node(Module,Nodes) -> remote_reset(Module,Nodes). do_reset_collection_table(Module) -> - ets:delete(?COLLECTION_TABLE,Module), + ets:delete(?COLLECTION_CLAUSE_TABLE,Module), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). %% do_reset(Module) -> ok %% The reset is done on a per-clause basis to avoid building %% long lists in the case of very large modules do_reset(Module) -> - [{Module,Clauses}] = ets:lookup(?COVER_TABLE, Module), + [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module), do_reset2(Clauses). do_reset2([{M,F,A,C,_L}|Clauses]) -> @@ -2149,10 +2250,19 @@ do_reset2([]) -> ok. do_clear(Module) -> - ets:match_delete(?COVER_TABLE, {Module,'_'}), + ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}), ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). +not_loaded(Module, unloaded, State) -> + do_clear(Module), + remote_unload(State#main_state.nodes,[Module]), + Compiled = update_compiled([Module], + State#main_state.compiled), + State#main_state{ compiled = Compiled }; +not_loaded(_Module,_Else, State) -> + State. + %%%--Div----------------------------------------------------------------- @@ -2180,3 +2290,30 @@ escape_lt_and_gt1([],Acc) -> lists:reverse(Acc); escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). + +pmap(Fun, List) -> + pmap(Fun, List, 20). +pmap(Fun, List, Limit) -> + pmap(Fun, List, [], Limit, 0, []). +pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit -> + Collector = self(), + Pid = spawn_link(fun() -> + ?SPAWN_DBG(pmap,E), + Collector ! {res,self(),Fun(E)} + end), + erlang:monitor(process, Pid), + pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc); +pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) -> + receive + {'DOWN', _Ref, process, _, _} -> + pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc); + {res, Pid, Res} -> + pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc]) + end; +pmap(_Fun, [], [], _Limit, 0, Acc) -> + lists:reverse(Acc); +pmap(Fun, [], [], Limit, Cnt, Acc) -> + receive + {'DOWN', _Ref, process, _, _} -> + pmap(Fun, [], [], Limit, Cnt - 1, Acc) + end. diff --git a/lib/tools/test/Makefile b/lib/tools/test/Makefile index 3a59be758a..826a8f3ee8 100644 --- a/lib/tools/test/Makefile +++ b/lib/tools/test/Makefile @@ -39,7 +39,8 @@ INSTALL_PROGS= $(TARGET_FILES) EMAKEFILE=Emakefile -SPEC_FILES= tools.spec tools.spec.win +SPEC_FILES= tools.spec +COVER_FILE = tools.cover # ---------------------------------------------------- # Release directory specification @@ -84,7 +85,8 @@ release_spec: opt release_tests_spec: make_emakefile $(INSTALL_DIR) $(RELSYSDIR) - $(INSTALL_DATA) $(SPEC_FILES) $(EMAKEFILE) $(ERL_FILES) $(RELSYSDIR) + $(INSTALL_DATA) $(SPEC_FILES) $(COVER_FILE) $(EMAKEFILE) \ + $(ERL_FILES) $(RELSYSDIR) chmod -f -R u+w $(RELSYSDIR) @tar cf - *_SUITE_data | (cd $(RELSYSDIR); tar xf -) diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index b9ccd62d0b..67197c80cb 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2009. All Rights Reserved. +%% Copyright Ericsson AB 2001-2010. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,13 +18,16 @@ %% -module(cover_SUITE). --export([all/1]). +-export([all/0, init_per_testcase/2, end_per_testcase/2, + suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2]). + -export([start/1, compile/1, analyse/1, misc/1, stop/1, distribution/1, export_import/1, otp_5031/1, eif/1, otp_5305/1, otp_5418/1, otp_6115/1, otp_7095/1, otp_8188/1, otp_8270/1, otp_8273/1, otp_8340/1]). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %%---------------------------------------------------------------------- %% The following directory structure is assumed: @@ -37,18 +40,55 @@ %% y %%---------------------------------------------------------------------- -all(suite) -> +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> case whereis(cover_server) of undefined -> [start, compile, analyse, misc, stop, distribution, - export_import, - otp_5031, eif, otp_5305, otp_5418, otp_6115, otp_7095, - otp_8188, otp_8270, otp_8273, otp_8340]; + export_import, otp_5031, eif, otp_5305, otp_5418, + otp_6115, otp_7095, otp_8188, otp_8270, otp_8273, + otp_8340]; _pid -> - {skip,"It looks like the test server is running cover. " - "Can't run cover test."} + {skip, + "It looks like the test server is running " + "cover. Can't run cover test."} end. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +init_per_testcase(TC, Config) when TC =:= misc; + TC =:= compile; + TC =:= analyse; + TC =:= distribution; + TC =:= otp_5031; + TC =:= stop -> + case code:which(crypto) of + Path when is_list(Path) -> + init_per_testcase(dummy_tc, Config); + _Else -> + {skip, "No crypto file to test with"} + end; +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + %cover:stop(), + ok. + start(suite) -> []; start(Config) when is_list(Config) -> ?line ok = file:set_cwd(?config(data_dir, Config)), @@ -331,6 +371,7 @@ distribution(Config) when is_list(Config) -> %% Check that stop() unloads on all nodes ?line ok = cover:stop(), + ?line timer:sleep(100), %% Give nodes time to unload on slow machines. ?line LocalBeam = code:which(f), ?line N2Beam = rpc:call(N2,code,which,[f]), ?line true = is_unloaded(LocalBeam), @@ -381,8 +422,8 @@ export_import(Config) when is_list(Config) -> ?line {ok,a} = cover:compile(a), ?line ?t:capture_start(), ?line ok = cover:export("all_exported"), - ?line [Text2] = ?t:capture_get(), - ?line "Export includes data from imported files"++_ = lists:flatten(Text2), + ?line [] = ?t:capture_get(), +% ?line "Export includes data from imported files"++_ = lists:flatten(Text2), ?line ?t:capture_stop(), ?line ok = cover:stop(), ?line ok = cover:import("all_exported"), diff --git a/lib/tools/test/cprof_SUITE.erl b/lib/tools/test/cprof_SUITE.erl index e697cc1571..b6f786d33f 100644 --- a/lib/tools/test/cprof_SUITE.erl +++ b/lib/tools/test/cprof_SUITE.erl @@ -41,7 +41,7 @@ -define(config(A,B),config(A,B)). -export([config/2]). -else. --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -endif. -ifdef(debug). @@ -63,14 +63,17 @@ config(data_dir, _) -> "cprof_SUITE_data". -else. %% When run in test server. --export([all/1, init_per_testcase/2, fin_per_testcase/2, not_run/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, + init_per_testcase/2, end_per_testcase/2, + not_run/1]). -export([basic/1, on_load/1, modules/1]). init_per_testcase(_Case, Config) -> ?line Dog=test_server:timetrap(test_server:seconds(30)), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, Config) -> +end_per_testcase(_Case, Config) -> erlang:trace_pattern({'_','_','_'}, false, [local,meta,call_count]), erlang:trace_pattern(on_load, false, [local,meta,call_count]), erlang:trace(all, false, [all]), @@ -78,16 +81,30 @@ fin_per_testcase(_Case, Config) -> test_server:timetrap_cancel(Dog), ok. -all(doc) -> - ["Test the cprof profiling tool."]; -all(suite) -> - case test_server:is_native(?MODULE) of +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + case test_server:is_native(cprof_SUITE) of true -> [not_run]; false -> [basic, on_load, modules] -%, on_and_off, info, -% pause_and_restart, combo] end. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + not_run(Config) when is_list(Config) -> {skipped,"Native code"}. diff --git a/lib/tools/test/emem_SUITE.erl b/lib/tools/test/emem_SUITE.erl index 430fa86c6c..8b38e7d3a8 100644 --- a/lib/tools/test/emem_SUITE.erl +++ b/lib/tools/test/emem_SUITE.erl @@ -24,7 +24,8 @@ receive_and_save_trace/2, send_trace/2]). --export([all/1, init_per_testcase/2, fin_per_testcase/2]). +-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2, + init_per_testcase/2, end_per_testcase/2]). -export([live_node/1, 'sparc_sunos5.8_32b_emt2.0'/1, @@ -41,7 +42,7 @@ -include_lib("kernel/include/file.hrl"). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -define(DEFAULT_TIMEOUT, ?t:minutes(5)). @@ -65,23 +66,32 @@ %% %% -all(doc) -> []; -all(suite) -> +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> case is_debug_compiled() of - true -> {skipped, "Not run when debug compiled"}; + true -> {skip, "Not run when debug compiled"}; false -> test_cases() end. - -test_cases() -> - [live_node, - 'sparc_sunos5.8_32b_emt2.0', + +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + +test_cases() -> + [live_node, 'sparc_sunos5.8_32b_emt2.0', 'pc_win2000_32b_emt2.0', 'pc.smp_linux2.2.19pre17_32b_emt2.0', 'powerpc_darwin7.7.0_32b_emt2.0', 'alpha_osf1v5.1_64b_emt2.0', 'sparc_sunos5.8_64b_emt2.0', - 'sparc_sunos5.8_32b_emt1.0', - 'pc_win2000_32b_emt1.0', + 'sparc_sunos5.8_32b_emt1.0', 'pc_win2000_32b_emt1.0', 'powerpc_darwin7.7.0_32b_emt1.0', 'alpha_osf1v5.1_64b_emt1.0', 'sparc_sunos5.8_64b_emt1.0']. @@ -100,7 +110,7 @@ init_per_testcase(Case, Config) when is_list(Config) -> [{watchdog, Dog}, {testcase, Case} | Config]) end. -fin_per_testcase(_Case, Config) when is_list(Config) -> +end_per_testcase(_Case, Config) when is_list(Config) -> ignore_cores:restore(Config), Dog = ?config(watchdog, Config), ?t:timetrap_cancel(Dog), @@ -700,8 +710,8 @@ start_node(Name, Args) -> % stop_node(Node) -> % ?t:stop_node(Node). -is_debug_compiled() -> - is_debug_compiled(erlang:system_info(system_version)). +is_debug_compiled() -> +is_debug_compiled(erlang:system_info(system_version)). is_debug_compiled([$d,$e,$b,$u,$g | _]) -> true; diff --git a/lib/tools/test/eprof_SUITE.erl b/lib/tools/test/eprof_SUITE.erl index 67607c6cf2..16246d5e0b 100644 --- a/lib/tools/test/eprof_SUITE.erl +++ b/lib/tools/test/eprof_SUITE.erl @@ -18,11 +18,31 @@ %% -module(eprof_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). --export([all/1,tiny/1,eed/1,basic/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2,tiny/1,eed/1,basic/1]). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [basic, tiny, eed]. + +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. -all(suite) -> [basic,tiny,eed]. basic(suite) -> []; basic(Config) when is_list(Config) -> diff --git a/lib/tools/test/fprof_SUITE.erl b/lib/tools/test/fprof_SUITE.erl index 1cd9ac7824..78de77526c 100644 --- a/lib/tools/test/fprof_SUITE.erl +++ b/lib/tools/test/fprof_SUITE.erl @@ -18,10 +18,11 @@ %% -module(fprof_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %% Test server framework exports --export([all/1, not_run/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, not_run/1]). %% Test suites -export([stack_seq/1, tail_seq/1, create_file_slow/1, spawn_simple/1, @@ -54,18 +55,33 @@ -all(doc) -> - ["Test the 'fprof' profiling tool."]; -all(suite) -> - case test_server:is_native(?MODULE) of - true -> - [not_run]; +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + case test_server:is_native(fprof_SUITE) of + true -> [not_run]; false -> [stack_seq, tail_seq, create_file_slow, spawn_simple, imm_tail_seq, imm_create_file_slow, imm_compile, cpu_create_file_slow] end. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + not_run(Config) when is_list(Config) -> {skipped, "Native code"}. diff --git a/lib/tools/test/ignore_cores.erl b/lib/tools/test/ignore_cores.erl index 8902a469ef..8b1ac0fe6c 120000..100644 --- a/lib/tools/test/ignore_cores.erl +++ b/lib/tools/test/ignore_cores.erl @@ -1 +1,158 @@ -../../../erts/test/ignore_cores.erl
\ No newline at end of file +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File : ignore_cores.erl +%%% Author : Rickard Green <[email protected]> +%%% Description : +%%% +%%% Created : 11 Feb 2008 by Rickard Green <[email protected]> +%%%------------------------------------------------------------------- + +-module(ignore_cores). + +-include_lib("test_server/include/test_server.hrl"). + +-export([init/1, fini/1, setup/3, setup/4, restore/1, dir/1]). + +-record(ignore_cores, {org_cwd, + org_path, + org_pwd_env, + ign_dir = false, + cores_dir = false}). + +%% +%% Takes a testcase config +%% + +init(Config) -> + {ok, OrgCWD} = file:get_cwd(), + [{ignore_cores, + #ignore_cores{org_cwd = OrgCWD, + org_path = code:get_path(), + org_pwd_env = os:getenv("PWD")}} + | lists:keydelete(ignore_cores, 1, Config)]. + +fini(Config) -> + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD} = ?config(ignore_cores, Config), + ok = file:set_cwd(OrgCWD), + true = code:set_path(OrgPath), + case OrgPWD of + false -> ok; + _ -> true = os:putenv("PWD", OrgPWD) + end, + lists:keydelete(ignore_cores, 1, Config). + +setup(Suite, Testcase, Config) -> + setup(Suite, Testcase, Config, false). + +setup(Suite, Testcase, Config, SetCwd) when is_atom(Suite), + is_atom(Testcase), + is_list(Config) -> + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD} = ?config(ignore_cores, Config), + Path = lists:map(fun (".") -> OrgCWD; (Dir) -> Dir end, OrgPath), + true = code:set_path(Path), + PrivDir = ?config(priv_dir, Config), + IgnDir = filename:join([PrivDir, + atom_to_list(Suite) + ++ "_" + ++ atom_to_list(Testcase) + ++ "_wd"]), + ok = file:make_dir(IgnDir), + case SetCwd of + false -> + ok; + _ -> + ok = file:set_cwd(IgnDir), + OrgPWD = case os:getenv("PWD") of + false -> false; + PWD -> + os:putenv("PWD", IgnDir), + PWD + end + end, + ok = file:write_file(filename:join([IgnDir, "ignore_core_files"]), <<>>), + %% cores are dumped in /cores on MacOS X + CoresDir = case {?t:os_type(), filelib:is_dir("/cores")} of + {{unix,darwin}, true} -> + filelib:fold_files("/cores", + "^core.*$", + false, + fun (C,Cs) -> [C|Cs] end, + []); + _ -> + false + end, + lists:keyreplace(ignore_cores, + 1, + Config, + {ignore_cores, + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD, + ign_dir = IgnDir, + cores_dir = CoresDir}}). + +restore(Config) -> + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD, + ign_dir = IgnDir, + cores_dir = CoresDir} = ?config(ignore_cores, Config), + try + case CoresDir of + false -> + ok; + _ -> + %% Move cores dumped by these testcases in /cores + %% to cwd. + lists:foreach(fun (C) -> + case lists:member(C, CoresDir) of + true -> ok; + _ -> + Dst = filename:join( + [IgnDir, + filename:basename(C)]), + {ok, _} = file:copy(C, Dst), + file:delete(C) + end + end, + filelib:fold_files("/cores", + "^core.*$", + false, + fun (C,Cs) -> [C|Cs] end, + [])) + end + after + catch file:set_cwd(OrgCWD), + catch code:set_path(OrgPath), + case OrgPWD of + false -> ok; + _ -> catch os:putenv("PWD", OrgPWD) + end + end. + + +dir(Config) -> + #ignore_cores{ign_dir = Dir} = ?config(ignore_cores, Config), + Dir. diff --git a/lib/tools/test/instrument_SUITE.erl b/lib/tools/test/instrument_SUITE.erl index da5930e015..6800a94f94 100644 --- a/lib/tools/test/instrument_SUITE.erl +++ b/lib/tools/test/instrument_SUITE.erl @@ -18,22 +18,43 @@ %% -module(instrument_SUITE). --export([all/1,init_per_testcase/2,fin_per_testcase/2]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, + init_per_testcase/2,end_per_testcase/2]). -export(['+Mim true'/1, '+Mis true'/1]). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). init_per_testcase(_Case, Config) -> ?line Dog=?t:timetrap(10000), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, Config) -> +end_per_testcase(_Case, Config) -> Dog=?config(watchdog, Config), ?t:timetrap_cancel(Dog), ok. -all(suite) -> ['+Mim true', '+Mis true']. +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + ['+Mim true', '+Mis true']. + +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + '+Mim true'(doc) -> ["Check that memory data can be read and processed"]; '+Mim true'(suite) -> []; diff --git a/lib/tools/test/lcnt_SUITE.erl b/lib/tools/test/lcnt_SUITE.erl index e6866f721d..21383fa544 100644 --- a/lib/tools/test/lcnt_SUITE.erl +++ b/lib/tools/test/lcnt_SUITE.erl @@ -18,10 +18,10 @@ %% -module(lcnt_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %% Test server specific exports --export([all/1]). +-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2]). -export([init_per_suite/1, end_per_suite/1]). -export([init_per_testcase/2, end_per_testcase/2]). @@ -51,10 +51,21 @@ end_per_testcase(_Case, Config) -> ?t:timetrap_cancel(Dog), ok. -all(suite) -> - % Test cases +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> [load_v1, conflicts, locations, swap_keys]. +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + %%---------------------------------------------------------------------- %% Tests %%---------------------------------------------------------------------- diff --git a/lib/tools/test/make_SUITE.erl b/lib/tools/test/make_SUITE.erl index 72dccdb465..524ed04af4 100644 --- a/lib/tools/test/make_SUITE.erl +++ b/lib/tools/test/make_SUITE.erl @@ -18,12 +18,13 @@ %% -module(make_SUITE). --export([all/1, make_all/1, make_files/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, make_all/1, make_files/1]). -export([otp_6057_init/1, otp_6057_a/1, otp_6057_b/1, otp_6057_c/1, otp_6057_end/1]). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -include_lib("kernel/include/file.hrl"). @@ -35,9 +36,27 @@ %% that the file :"test5.erl" shall be compiled with the 'S' option, %% i.e. produce "test5.S" instead of "test5.<objext>" -all(suite) -> [make_all, make_files, - {conf, otp_6057_init, - [otp_6057_a,otp_6057_b,otp_6057_c], otp_6057_end}]. +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [make_all, make_files, {group, otp_6057}]. + +groups() -> + [{otp_6057,[],[otp_6057_a, otp_6057_b, + otp_6057_c]}]. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + otp_6057_init(Config). + +end_per_group(_GroupName, Config) -> + otp_6057_end(Config). + test_files() -> ["test1", "test2", "test3", "test4"]. @@ -146,7 +165,7 @@ otp_6057_init(Config) when is_list(Config) -> otp_6057_a(suite) -> []; otp_6057_a(doc) -> - ["Test that make:all/0 looks for object file in correct place"]; + ["Test that make:all/0, suite/0 looks for object file in correct place"]; otp_6057_a(Config) when is_list(Config) -> ?line PrivDir = ?config(priv_dir, Config), diff --git a/lib/tools/test/tools.cover b/lib/tools/test/tools.cover new file mode 100644 index 0000000000..1053be4f0f --- /dev/null +++ b/lib/tools/test/tools.cover @@ -0,0 +1,2 @@ +{incl_app,tools,details}. + diff --git a/lib/tools/test/tools.spec b/lib/tools/test/tools.spec index 93d5930472..1b07cf1cb6 100644 --- a/lib/tools/test/tools.spec +++ b/lib/tools/test/tools.spec @@ -1 +1 @@ -{topcase, {dir, "../tools_test"}}. +{suites,"../tools_test",all}. diff --git a/lib/tools/test/tools_SUITE.erl b/lib/tools/test/tools_SUITE.erl index 6b952f10ab..69dfab8fe7 100644 --- a/lib/tools/test/tools_SUITE.erl +++ b/lib/tools/test/tools_SUITE.erl @@ -18,28 +18,45 @@ %% -module(tools_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %% Default timetrap timeout (set in init_per_testcase). -define(default_timeout, ?t:minutes(1)). -define(application, tools). %% Test server specific exports --export([all/1]). --export([init_per_testcase/2, fin_per_testcase/2]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2]). +-export([init_per_testcase/2, end_per_testcase/2]). %% Test cases must be exported. -export([app_test/1]). -all(doc) -> - []; -all(suite) -> +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> [app_test]. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + init_per_testcase(_Case, Config) -> ?line Dog=test_server:timetrap(?default_timeout), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, Config) -> +end_per_testcase(_Case, Config) -> Dog=?config(watchdog, Config), test_server:timetrap_cancel(Dog), ok. diff --git a/lib/tools/test/xref_SUITE.erl b/lib/tools/test/xref_SUITE.erl index f9d062ef85..1fad070b67 100644 --- a/lib/tools/test/xref_SUITE.erl +++ b/lib/tools/test/xref_SUITE.erl @@ -29,28 +29,29 @@ -define(privdir, "xref_SUITE_priv"). -define(copydir, "xref_SUITE_priv/datacopy"). -else. --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -define(format(S, A), ok). -define(datadir, ?config(data_dir, Conf)). -define(privdir, ?config(priv_dir, Conf)). -define(copydir, ?config(copy_dir, Conf)). -endif. --export([all/1, init/1, fini/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, init/1, fini/1]). --export([xref/1, +-export([ addrem/1, convert/1, intergraph/1, lines/1, loops/1, no_data/1, modules/1]). --export([files/1, +-export([ add/1, default/1, info/1, lib/1, read/1, read2/1, remove/1, replace/1, update/1, deprecated/1, trycatch/1, abstract_modules/1, fun_mfa/1, qlc/1]). --export([analyses/1, +-export([ analyze/1, basic/1, md/1, q/1, variables/1, unused_locals/1]). --export([misc/1, +-export([ format_error/1, otp_7423/1, otp_7831/1]). -import(lists, [append/2, flatten/1, keysearch/3, member/2, sort/1, usort/1]). @@ -59,7 +60,7 @@ range/1, relation_to_family/1, set/1, to_external/1, union/2]). --export([init_per_testcase/2, fin_per_testcase/2]). +-export([init_per_testcase/2, end_per_testcase/2]). %% Checks some info counters of a server and some relations that should hold. -export([check_count/1, check_state/1]). @@ -68,8 +69,36 @@ -include_lib("tools/src/xref.hrl"). -all(suite) -> - {conf, init, [xref, files, analyses, misc], fini}. +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [{group, xref}, {group, files}, {group, analyses}, + {group, misc}]. + +groups() -> + [{xref, [], + [addrem, convert, intergraph, lines, loops, no_data, + modules]}, + {files, [], + [add, default, info, lib, read, read2, remove, replace, + update, deprecated, trycatch, abstract_modules, fun_mfa, + qlc]}, + {analyses, [], + [analyze, basic, md, q, variables, unused_locals]}, + {misc, [], [format_error, otp_7423, otp_7831]}]. + +init_per_suite(Config) -> + init(Config). + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + init(Conf) when is_list(Conf) -> DataDir = ?datadir, @@ -91,13 +120,11 @@ init_per_testcase(_Case, Config) -> Dog=?t:timetrap(?t:minutes(2)), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, _Config) -> +end_per_testcase(_Case, _Config) -> Dog=?config(watchdog, _Config), test_server:timetrap_cancel(Dog), ok. -xref(suite) -> - [addrem, convert, intergraph, lines, loops, no_data, modules]. %% Seems a bit short... addrem(suite) -> []; @@ -680,9 +707,6 @@ modules(Conf) when is_list(Conf) -> ?line ok = xref_base:delete(S), ok. -files(suite) -> - [add, default, info, lib, read, read2, remove, replace, update, - deprecated, trycatch, abstract_modules, fun_mfa, qlc]. add(suite) -> []; add(doc) -> ["Add modules, applications, releases, directories"]; @@ -1788,8 +1812,6 @@ qlc(Conf) when is_list(Conf) -> ok. -analyses(suite) -> - [analyze, basic, md, q, variables, unused_locals]. analyze(suite) -> []; analyze(doc) -> ["Simple analyses"]; @@ -2312,8 +2334,6 @@ unused_locals(Conf) when is_list(Conf) -> ?line ok = file:delete(Beam2), ok. -misc(suite) -> - [format_error, otp_7423, otp_7831]. format_error(suite) -> []; format_error(doc) -> ["Format error messages"]; |