From 701918817ce0e9ea3d49b54d250066da76095010 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Tue, 25 Jan 2011 13:24:55 +0100 Subject: Update remote loading to only load a certain number of modules at a time to prevent memory usage explosion --- lib/tools/src/cover.erl | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index c4d1bd1d2f..4cc78403e1 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -972,14 +972,25 @@ remote_start(MainNode) -> {error,{already_started,Pid}} end. -%% Load a set of cover compiled modules on remote nodes -remote_load_compiled(Nodes,Compiled0) -> - Compiled = lists:map(fun get_data_for_remote_loading/1,Compiled0), +%% Load a set of cover compiled modules on remote nodes, +%% We do it ?MAX_MODS modules at a time so that we don't +%% run out of memory on the cover_server node. +-define(MAX_MODS, 10). +remote_load_compiled(Nodes,Compiled) -> + remote_load_compiled(Nodes, Compiled, [], 0). +remote_load_compiled(_Nodes, [], [], _ModNum) -> + ok; +remote_load_compiled(Nodes, Compiled, Acc, ModNum) + when Compiled == []; ModNum == ?MAX_MODS -> lists:foreach( fun(Node) -> - remote_call(Node,{remote,load_compiled,Compiled}) + remote_call(Node,{remote,load_compiled,Acc}) end, - Nodes). + Nodes), + remote_load_compiled(Nodes, Compiled, [], 0); +remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> + remote_load_compiled( + Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1). %% Read all data needed for loading a cover compiled module on a remote node %% Binary is the beam code for the module and InitialTable is the initial @@ -993,8 +1004,8 @@ get_data_for_remote_loading({Module,File}) -> %% Create a match spec which returns the clause info {Module,InitInfo} and %% all #bump keys for the given module with 0 number of calls. ms(Module) -> - ets:fun2ms(fun({Module,InitInfo}) -> - {Module,InitInfo}; + ets:fun2ms(fun({Mod,InitInfo}) -> + {Mod,InitInfo}; ({Key,_}) when is_record(Key,bump),Key#bump.module=:=Module -> {Key,0} end). -- cgit v1.2.3 From 8f3148df7809a67a8ebf7a9762f4c85c2611a9f3 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Wed, 26 Jan 2011 20:09:09 +0100 Subject: Refactor cover to prepare it for making analysis parallel --- lib/tools/src/cover.erl | 321 ++++++++++++++++++++++++++---------------------- 1 file changed, 171 insertions(+), 150 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 4cc78403e1..128aa84831 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -500,6 +500,8 @@ remote_call(Node,Request) -> Return end. +remote_reply(Proc,Reply) when is_pid(Proc) -> + Proc ! {?SERVER,Reply}; remote_reply(MainNode,Reply) -> {?SERVER,MainNode} ! {?SERVER,Reply}. @@ -593,40 +595,10 @@ main_process_loop(State) -> end; {From, {export,OutFile,Module}} -> - case file:open(OutFile,[write,binary,raw]) of - {ok,Fd} -> - Reply = - case Module of - '_' -> - export_info(State#main_state.imported), - collect(State#main_state.nodes), - do_export_table(State#main_state.compiled, - State#main_state.imported, - Fd); - _ -> - export_info(Module,State#main_state.imported), - case is_loaded(Module, State) of - {loaded, File} -> - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - do_export_table([{Module,File}],[],Fd); - {imported, File, ImportFiles} -> - %% don't know if I should allow this - - %% export a module which is only imported - Imported = [{Module,File,ImportFiles}], - do_export_table([],Imported,Fd); - _NotLoaded -> - {error,{not_cover_compiled,Module}} - end - end, - file:close(Fd), - reply(From, Reply); - {error,Reason} -> - reply(From, {error, {cant_open_file,OutFile,Reason}}) - - end, + spawn(fun() -> + io:format(user, "EXPORTING: ~p to ~p~n",[Module, OutFile]), + do_export(Module, OutFile, From, State) + end), main_process_loop(State); {From, {import,File}} -> @@ -692,107 +664,95 @@ main_process_loop(State) -> unregister(?SERVER), reply(From, ok); - {From, {Request, Module}} -> - case is_loaded(Module, State) of - {loaded, File} -> - {Reply,State1} = - case Request of - {analyse, Analysis, Level} -> - analyse_info(Module,State#main_state.imported), + {From, {{analyse, Analysis, Level}, Module}} -> + S = try + Loaded = is_loaded(Module, State), + analyse_info(Module,State#main_state.imported), + C = case Loaded of + {loaded, _File} -> [{Module,Clauses}] = ets:lookup(?COVER_TABLE,Module), collect(Module,Clauses,State#main_state.nodes), - R = do_analyse(Module, Analysis, Level, Clauses), - {R,State}; - - {analyse_to_file, OutFile, Opts} -> - R = case find_source(File) of - {beam,_BeamFile} -> - {error,no_source_code_found}; - ErlFile -> - Imported = State#main_state.imported, - analyse_info(Module,Imported), - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - HTML = lists:member(html,Opts), - do_analyse_to_file(Module,OutFile, - ErlFile,HTML) - end, - {R,State}; - - is_compiled -> - {{file, File},State}; - - reset -> - R = do_reset_main_node(Module, - State#main_state.nodes), - Imported = - remove_imported(Module, - State#main_state.imported), - {R,State#main_state{imported=Imported}} - end, - reply(From, Reply), - main_process_loop(State1); - - {imported,File,_ImportFiles} -> - {Reply,State1} = - case Request of - {analyse, Analysis, Level} -> - analyse_info(Module,State#main_state.imported), + Clauses; + _ -> [{Module,Clauses}] = ets:lookup(?COLLECTION_TABLE,Module), - R = do_analyse(Module, Analysis, Level, Clauses), - {R,State}; - - {analyse_to_file, OutFile, Opts} -> - R = case find_source(File) of - {beam,_BeamFile} -> - {error,no_source_code_found}; - ErlFile -> - Imported = State#main_state.imported, - analyse_info(Module,Imported), - HTML = lists:member(html,Opts), - do_analyse_to_file(Module,OutFile, - ErlFile,HTML) - end, - {R,State}; - - is_compiled -> - {false,State}; - - reset -> - R = do_reset_collection_table(Module), - Imported = - remove_imported(Module, - State#main_state.imported), - {R,State#main_state{imported=Imported}} + Clauses end, - reply(From, Reply), - main_process_loop(State1); - - NotLoaded -> - Reply = - case Request of - is_compiled -> - false; - _ -> - {error, {not_cover_compiled,Module}} - end, - Compiled = - case NotLoaded of - unloaded -> - do_clear(Module), - remote_unload(State#main_state.nodes,[Module]), - update_compiled([Module], - State#main_state.compiled); - false -> - State#main_state.compiled + R = do_analyse(Module, Analysis, Level, C), + reply(From, R), + State + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {{analyse_to_file, OutFile, Opts},Module}} -> + S = try + Loaded = is_loaded(Module, State), + File = case Loaded of + {loaded, File0} -> + [{Module,Clauses}] = + ets:lookup(?COVER_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + File0; + {imported, File0, _} -> + File0 + end, + case find_source(File) of + {beam,_BeamFile} -> + reply(From, {error,no_source_code_found}), + State; + ErlFile -> + analyse_info(Module,State#main_state.imported), + HTML = lists:member(html,Opts), + R = do_analyse_to_file(Module,OutFile, + ErlFile,HTML), + reply(From, R), + State + end + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {is_compiled, Module}} -> + S = try is_loaded(Module, State) of + {loaded, File} -> + reply(From,{file, File}), + State; + {imported,_File,_ImportFiles} -> + reply(From,false), + State + catch throw:Reason -> + reply(From,false), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {reset, Module}} -> + S = try + Loaded = is_loaded(Module,State), + R = case Loaded of + {loaded, _File} -> + do_reset_main_node( + Module, State#main_state.nodes); + {imported, _File, _} -> + do_reset_collection_table(Module) end, - reply(From, Reply), - main_process_loop(State#main_state{compiled=Compiled}) - end; + Imported = + remove_imported(Module, + State#main_state.imported), + reply(From, R), + State#main_state{imported=Imported} + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); {'EXIT',Pid,_Reason} -> %% Exit is trapped on the main node only, so this will only happen @@ -807,10 +767,6 @@ main_process_loop(State) -> main_process_loop(State) end. - - - - %%%---------------------------------------------------------------------- %%% cover_server on remote node %%%---------------------------------------------------------------------- @@ -843,6 +799,10 @@ remote_process_loop(State) -> remote_process_loop(State); {remote,collect,Module,CollectorPid} -> + self() ! {remote,collect,Module,CollectorPid, ?SERVER}; + + {remote,collect,Module,CollectorPid,From} -> +% spawn(?MODULE, do_remote_collect, [Module, CollectorPid]), MS = case Module of '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end); @@ -865,7 +825,7 @@ remote_process_loop(State) -> end, AllClauses), CollectorPid ! done, - remote_reply(State#remote_state.main_node, ok), + remote_reply(From, ok), remote_process_loop(State); {remote,stop} -> @@ -1028,27 +988,37 @@ remote_reset(Module,Nodes) -> %% Collect data from remote nodes - used for analyse or stop(Node) remote_collect(Module,Nodes,Stop) -> - CollectorPid = spawn(fun() -> collector_proc(length(Nodes)) end), - lists:foreach( - fun(Node) -> - remote_call(Node,{remote,collect,Module,CollectorPid}), - if Stop -> remote_call(Node,{remote,stop}); - true -> ok - end - end, - Nodes). + Pids = lists:map( + fun(Node) -> + spawn(fun() -> + do_collection(Node, Module, Stop) + end) + end, + Nodes), + RefsNPids = [{erlang:monitor(process, Pid),Pid} || Pid <- Pids], + lists:foreach(fun({Ref,Pid}) -> + receive + {'DOWN', Ref, process, Pid, _} -> + ok + end + end,RefsNPids). + +do_collection(Node, Module, Stop) -> + CollectorPid = spawn(fun collector_proc/0), + remote_call(Node,{remote,collect,Module,CollectorPid, self()}), + if Stop -> remote_call(Node,{remote,stop}); + true -> ok + end. %% Process which receives chunks of data from remote nodes - either when %% analysing or when stopping cover on the remote nodes. -collector_proc(0) -> - ok; -collector_proc(N) -> +collector_proc() -> receive {chunk,Chunk} -> insert_in_collection_table(Chunk), - collector_proc(N); + collector_proc(); done -> - collector_proc(N-1) + ok end. insert_in_collection_table([{Key,Val}|Chunk]) -> @@ -1063,7 +1033,13 @@ insert_in_collection_table(Key,Val) -> ets:update_counter(?COLLECTION_TABLE, Key,Val); false -> - ets:insert(?COLLECTION_TABLE,{Key,Val}) + %% Make sure that there are no race conditions from ets:member + case ets:insert_new(?COLLECTION_TABLE,{Key,Val}) of + false -> + insert_in_collection_table(Key,Val); + _ -> + ok + end end. @@ -1164,14 +1140,14 @@ is_loaded(Module, State) -> {ok, File} -> case code:which(Module) of ?TAG -> {loaded, File}; - _ -> unloaded + _ -> throw(unloaded) end; false -> case get_file(Module,State#main_state.imported) of {ok,File,ImportFiles} -> {imported, File, ImportFiles}; false -> - false + throw(not_loaded) end end. @@ -2038,6 +2014,42 @@ fill2() -> ".| ". fill3() -> "| ". %%%--Export-------------------------------------------------------------- +do_export(Module, OutFile, From, State) -> + case file:open(OutFile,[write,binary,raw]) of + {ok,Fd} -> + Reply = + case Module of + '_' -> + export_info(State#main_state.imported), + collect(State#main_state.nodes), + do_export_table(State#main_state.compiled, + State#main_state.imported, + Fd); + _ -> + export_info(Module,State#main_state.imported), + try is_loaded(Module, State) of + {loaded, File} -> + [{Module,Clauses}] = + ets:lookup(?COVER_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + do_export_table([{Module,File}],[],Fd); + {imported, File, ImportFiles} -> + %% don't know if I should allow this - + %% export a module which is only imported + Imported = [{Module,File,ImportFiles}], + do_export_table([],Imported,Fd) + catch throw:_ -> + {error,{not_cover_compiled,Module}} + end + end, + file:close(Fd), + reply(From, Reply); + {error,Reason} -> + reply(From, {error, {cant_open_file,OutFile,Reason}}) + + end. + do_export_table(Compiled, Imported, Fd) -> ModList = merge(Imported,Compiled), write_module_data(ModList,Fd). @@ -2164,6 +2176,15 @@ do_clear(Module) -> ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). +not_loaded(Module, unloaded, State) -> + do_clear(Module), + remote_unload(State#main_state.nodes,[Module]), + Compiled = update_compiled([Module], + State#main_state.compiled), + State#main_state{ compiled = Compiled }; +not_loaded(_Module,_Else, State) -> + State. + %%%--Div----------------------------------------------------------------- -- cgit v1.2.3 From 3a4ed77a4b8d29ec6889e60a56e440c7db440628 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Wed, 26 Jan 2011 20:21:34 +0100 Subject: Update cover to allow multiple analyse and analyze_to_file calls at the same time. For each call a seperate process will be spawned to handle the request. --- lib/tools/src/cover.erl | 86 ++++++++++++++++++++++++++++--------------------- 1 file changed, 49 insertions(+), 37 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 128aa84831..b8cb8e58cf 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -596,7 +596,6 @@ main_process_loop(State) -> {From, {export,OutFile,Module}} -> spawn(fun() -> - io:format(user, "EXPORTING: ~p to ~p~n",[Module, OutFile]), do_export(Module, OutFile, From, State) end), main_process_loop(State); @@ -667,20 +666,11 @@ main_process_loop(State) -> {From, {{analyse, Analysis, Level}, Module}} -> S = try Loaded = is_loaded(Module, State), - analyse_info(Module,State#main_state.imported), - C = case Loaded of - {loaded, _File} -> - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module,Clauses,State#main_state.nodes), - Clauses; - _ -> - [{Module,Clauses}] = - ets:lookup(?COLLECTION_TABLE,Module), - Clauses - end, - R = do_analyse(Module, Analysis, Level, C), - reply(From, R), + spawn(fun() -> + do_parallel_analysis( + Module, Analysis, Level, + Loaded, From, State) + end), State catch throw:Reason -> reply(From,{error, {not_cover_compiled,Module}}), @@ -691,28 +681,12 @@ main_process_loop(State) -> {From, {{analyse_to_file, OutFile, Opts},Module}} -> S = try Loaded = is_loaded(Module, State), - File = case Loaded of - {loaded, File0} -> - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - File0; - {imported, File0, _} -> - File0 - end, - case find_source(File) of - {beam,_BeamFile} -> - reply(From, {error,no_source_code_found}), - State; - ErlFile -> - analyse_info(Module,State#main_state.imported), - HTML = lists:member(html,Opts), - R = do_analyse_to_file(Module,OutFile, - ErlFile,HTML), - reply(From, R), - State - end + spawn(fun() -> + do_parallel_analysis_to_file( + Module, OutFile, Opts, + Loaded, From, State) + end), + State catch throw:Reason -> reply(From,{error, {not_cover_compiled,Module}}), not_loaded(Module, Reason, State) @@ -1842,6 +1816,22 @@ find_source(File0) -> end end. +do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> + analyse_info(Module,State#main_state.imported), + C = case Loaded of + {loaded, _File} -> + [{Module,Clauses}] = + ets:lookup(?COVER_TABLE,Module), + collect(Module,Clauses,State#main_state.nodes), + Clauses; + _ -> + [{Module,Clauses}] = + ets:lookup(?COLLECTION_TABLE,Module), + Clauses + end, + R = do_analyse(Module, Analysis, Level, C), + reply(From, R). + %% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error} %% Clauses = [{Module,Function,Arity,Clause,Lines}] do_analyse(Module, Analysis, line, _Clauses) -> @@ -1918,6 +1908,28 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) -> merge_functions([], _MFun, Result) -> Result. +do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> + File = case Loaded of + {loaded, File0} -> + [{Module,Clauses}] = + ets:lookup(?COVER_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + File0; + {imported, File0, _} -> + File0 + end, + case find_source(File) of + {beam,_BeamFile} -> + reply(From, {error,no_source_code_found}); + ErlFile -> + analyse_info(Module,State#main_state.imported), + HTML = lists:member(html,Opts), + R = do_analyse_to_file(Module,OutFile, + ErlFile,HTML), + reply(From, R) + end. + %% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} %% Module = atom() %% OutFile = ErlFile = string() -- cgit v1.2.3 From a5bb09b0a8a6aac4e47c19b5abdbbb8f54e14133 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Wed, 26 Jan 2011 20:26:57 +0100 Subject: Make the call to cover parallel so that the test_server takes advantage of the new cool parallel cover features. --- lib/test_server/src/test_server.erl | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl index ee121e5bb6..1045fa9a9b 100644 --- a/lib/test_server/src/test_server.erl +++ b/lib/test_server/src/test_server.erl @@ -470,7 +470,7 @@ cover_analyse(Analyse,Modules) -> overview -> fun(_) -> undefined end end, - R = lists:map( + R = pmap( fun(M) -> case cover:analyse(M,module) of {ok,{M,{Cov,NotCov}}} -> @@ -486,6 +486,23 @@ cover_analyse(Analyse,Modules) -> stick_all_sticky(node(),Sticky), R. +pmap(Fun,List) -> + Collector = self(), + Pids = lists:map(fun(E) -> + spawn(fun() -> + Collector ! {res,self(),Fun(E)} + end) + end, List), + lists:map(fun(Pid) -> + receive + {res,Pid,Res} -> + Res + end + end, Pids). + + + + unstick_all_sticky(Node) -> lists:filter( -- cgit v1.2.3 From 976ce69a8ada40e6b2e4664fc48d5f430e4ea1f2 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Thu, 27 Jan 2011 11:14:07 +0100 Subject: Remove io printout warnings when exporting an imported module --- lib/tools/src/cover.erl | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index b8cb8e58cf..689a702d52 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -1034,14 +1034,15 @@ analyse_info(Module,Imported) -> export_info(_Module,[]) -> ok; -export_info(Module,Imported) -> - imported_info("Export",Module,Imported). +export_info(_Module,_Imported) -> + %% Do not print that the export includes imported modules + ok. export_info([]) -> ok; -export_info(Imported) -> - AllImportFiles = get_all_importfiles(Imported,[]), - io:format("Export includes data from imported files\n~p\n",[AllImportFiles]). +export_info(_Imported) -> + %% Do not print that the export includes imported modules + ok. get_all_importfiles([{_M,_F,ImportFiles}|Imported],Acc) -> NewAcc = do_get_all_importfiles(ImportFiles,Acc), -- cgit v1.2.3 From a74c4f6e655543c70179b8dc507d51e5603c7e30 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Thu, 27 Jan 2011 12:08:46 +0100 Subject: Update remote collect to handle multiple requests at once --- lib/tools/src/cover.erl | 53 ++++++++++++++++++++++++++----------------------- 1 file changed, 28 insertions(+), 25 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 689a702d52..48ab91db9c 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -776,30 +776,9 @@ remote_process_loop(State) -> self() ! {remote,collect,Module,CollectorPid, ?SERVER}; {remote,collect,Module,CollectorPid,From} -> -% spawn(?MODULE, do_remote_collect, [Module, CollectorPid]), - MS = - case Module of - '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end); - _ -> ets:fun2ms(fun({M,C}) when M=:=Module -> C end) - end, - AllClauses = lists:flatten(ets:select(?COVER_TABLE,MS)), - - %% Sending clause by clause in order to avoid large lists - lists:foreach( - fun({M,F,A,C,_L}) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps} - end, - AllClauses), - CollectorPid ! done, - remote_reply(From, ok), + spawn(fun() -> + do_collect(Module, CollectorPid, From) + end), remote_process_loop(State); {remote,stop} -> @@ -828,6 +807,30 @@ remote_process_loop(State) -> end. +do_collect(Module, CollectorPid, From) -> + MS = + case Module of + '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end); + _ -> ets:fun2ms(fun({M,C}) when M=:=Module -> C end) + end, + AllClauses = lists:flatten(ets:select(?COVER_TABLE,MS)), + + %% Sending clause by clause in order to avoid large lists + lists:foreach( + fun({M,F,A,C,_L}) -> + Pattern = + {#bump{module=M, function=F, arity=A, clause=C}, '_'}, + Bumps = ets:match_object(?COVER_TABLE, Pattern), + %% Reset + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Bumps), + CollectorPid ! {chunk,Bumps} + end, + AllClauses), + CollectorPid ! done, + remote_reply(From, ok). reload_originals([{Module,_File}|Compiled]) -> do_reload_original(Module), @@ -938,7 +941,7 @@ get_data_for_remote_loading({Module,File}) -> %% Create a match spec which returns the clause info {Module,InitInfo} and %% all #bump keys for the given module with 0 number of calls. ms(Module) -> - ets:fun2ms(fun({Mod,InitInfo}) -> + ets:fun2ms(fun({Mod,InitInfo}) when Mod =:= Module -> {Mod,InitInfo}; ({Key,_}) when is_record(Key,bump),Key#bump.module=:=Module -> {Key,0} -- cgit v1.2.3 From a3971dd8d2379fafd76bdaebc3c8b1e71b4e411a Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Thu, 27 Jan 2011 12:09:17 +0100 Subject: Add process debug tags --- lib/tools/src/cover.erl | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 48ab91db9c..f6f976e0af 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -114,6 +114,8 @@ true -> ?BLOCK(Expr) end). +-define(SPAWN_DBG(Tag,Value),put(Tag,Value)). + -include_lib("stdlib/include/ms_transform.hrl"). %%%---------------------------------------------------------------------- @@ -127,7 +129,10 @@ start() -> case whereis(?SERVER) of undefined -> Starter = self(), - Pid = spawn(fun() -> init_main(Starter) end), + Pid = spawn(fun() -> + ?SPAWN_DBG(start,[]), + init_main(Starter) + end), Ref = erlang:monitor(process,Pid), Return = receive @@ -596,6 +601,7 @@ main_process_loop(State) -> {From, {export,OutFile,Module}} -> spawn(fun() -> + ?SPAWN_DBG(export,{OutFile, Module}), do_export(Module, OutFile, From, State) end), main_process_loop(State); @@ -667,6 +673,7 @@ main_process_loop(State) -> S = try Loaded = is_loaded(Module, State), spawn(fun() -> + ?SPAWN_DBG(analyse,{Module,Analysis, Level}), do_parallel_analysis( Module, Analysis, Level, Loaded, From, State) @@ -682,6 +689,8 @@ main_process_loop(State) -> S = try Loaded = is_loaded(Module, State), spawn(fun() -> + ?SPAWN_DBG(analyse_to_file, + {Module,OutFile, Opts}), do_parallel_analysis_to_file( Module, OutFile, Opts, Loaded, From, State) @@ -777,6 +786,8 @@ remote_process_loop(State) -> {remote,collect,Module,CollectorPid,From} -> spawn(fun() -> + ?SPAWN_DBG(remote_collect, + {Module, CollectorPid, From}), do_collect(Module, CollectorPid, From) end), remote_process_loop(State); @@ -894,7 +905,10 @@ remote_start(MainNode) -> case whereis(?SERVER) of undefined -> Starter = self(), - Pid = spawn(fun() -> init_remote(Starter,MainNode) end), + Pid = spawn(fun() -> + ?SPAWN_DBG(remote_start,{MainNode}), + init_remote(Starter,MainNode) + end), Ref = erlang:monitor(process,Pid), Return = receive @@ -968,6 +982,8 @@ remote_collect(Module,Nodes,Stop) -> Pids = lists:map( fun(Node) -> spawn(fun() -> + ?SPAWN_DBG(remote_collect, + {Module, Nodes, Stop}), do_collection(Node, Module, Stop) end) end, @@ -990,6 +1006,7 @@ do_collection(Node, Module, Stop) -> %% Process which receives chunks of data from remote nodes - either when %% analysing or when stopping cover on the remote nodes. collector_proc() -> + ?SPAWN_DBG(collector_proc, []), receive {chunk,Chunk} -> insert_in_collection_table(Chunk), -- cgit v1.2.3 From 877935818cc41f74626f9a304acf3ec493ae8542 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Tue, 1 Feb 2011 19:21:35 +0100 Subject: Split the cover ets tables into two tables, one with the clause info and one with the bump info. This will make it faster to search the tables when analyzing and exporting data. Also made cover export more parallel in how data is collected from the different nodes and also how data is read from ets. This should make the performance of cover much better on machines with multiple CPUs. --- lib/test_server/src/test_server.erl | 4 -- lib/tools/src/cover.erl | 125 +++++++++++++++++++----------------- lib/tools/test/cover_SUITE.erl | 13 +++- 3 files changed, 77 insertions(+), 65 deletions(-) (limited to 'lib') diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl index 1045fa9a9b..da2e48488e 100644 --- a/lib/test_server/src/test_server.erl +++ b/lib/test_server/src/test_server.erl @@ -499,10 +499,6 @@ pmap(Fun,List) -> Res end end, Pids). - - - - unstick_all_sticky(Node) -> lists:filter( diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index f6f976e0af..a2e8288227 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -100,8 +100,10 @@ }). -define(COVER_TABLE, 'cover_internal_data_table'). +-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table'). -define(BINARY_TABLE, 'cover_binary_code_table'). -define(COLLECTION_TABLE, 'cover_collected_remote_data_table'). +-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table'). -define(TAG, cover_compiled). -define(SERVER, cover_server). @@ -517,8 +519,10 @@ remote_reply(MainNode,Reply) -> init_main(Starter) -> register(?SERVER,self()), ets:new(?COVER_TABLE, [set, public, named_table]), + ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), ets:new(?BINARY_TABLE, [set, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), + ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]), process_flag(trap_exit,true), Starter ! {?SERVER,started}, main_process_loop(#main_state{}). @@ -757,6 +761,7 @@ main_process_loop(State) -> init_remote(Starter,MainNode) -> register(?SERVER,self()), ets:new(?COVER_TABLE, [set, public, named_table]), + ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), Starter ! {self(),started}, remote_process_loop(#remote_state{main_node=MainNode}). @@ -819,27 +824,27 @@ remote_process_loop(State) -> end. do_collect(Module, CollectorPid, From) -> - MS = + AllClauses = case Module of - '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end); - _ -> ets:fun2ms(fun({M,C}) when M=:=Module -> C end) + '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); + _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) end, - AllClauses = lists:flatten(ets:select(?COVER_TABLE,MS)), %% Sending clause by clause in order to avoid large lists - lists:foreach( - fun({M,F,A,C,_L}) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps} - end, - AllClauses), + pmap( + fun({_Mod,Clauses}) -> + pmap(fun({M,F,A,C,_L}) -> + Pattern = + {#bump{module=M, function=F, arity=A, clause=C}, '_'}, + Bumps = ets:match_object(?COVER_TABLE, Pattern), + %% Reset + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Bumps), + CollectorPid ! {chunk,Bumps} + end,Clauses) + end,AllClauses), CollectorPid ! done, remote_reply(From, ok). @@ -880,6 +885,9 @@ load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) -> load_compiled([],Acc) -> Acc. +insert_initial_data([Item|Items]) when is_atom(element(1,Item)) -> + ets:insert(?COVER_CLAUSE_TABLE, Item), + insert_initial_data(Items); insert_initial_data([Item|Items]) -> ets:insert(?COVER_TABLE, Item), insert_initial_data(Items); @@ -949,15 +957,15 @@ remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> get_data_for_remote_loading({Module,File}) -> [{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module), %%! The InitialTable list will be long if the module is big - what to do?? - InitialTable = ets:select(?COVER_TABLE,ms(Module)), - {Module,File,Binary,InitialTable}. + InitialBumps = ets:select(?COVER_TABLE,ms(Module)), + InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module), + + {Module,File,Binary,InitialBumps ++ InitialClauses}. %% Create a match spec which returns the clause info {Module,InitInfo} and %% all #bump keys for the given module with 0 number of calls. ms(Module) -> - ets:fun2ms(fun({Mod,InitInfo}) when Mod =:= Module -> - {Mod,InitInfo}; - ({Key,_}) when is_record(Key,bump),Key#bump.module=:=Module -> + ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module -> {Key,0} end). @@ -979,22 +987,12 @@ remote_reset(Module,Nodes) -> %% Collect data from remote nodes - used for analyse or stop(Node) remote_collect(Module,Nodes,Stop) -> - Pids = lists:map( - fun(Node) -> - spawn(fun() -> - ?SPAWN_DBG(remote_collect, - {Module, Nodes, Stop}), - do_collection(Node, Module, Stop) - end) - end, - Nodes), - RefsNPids = [{erlang:monitor(process, Pid),Pid} || Pid <- Pids], - lists:foreach(fun({Ref,Pid}) -> - receive - {'DOWN', Ref, process, Pid, _} -> - ok - end - end,RefsNPids). + pmap(fun(Node) -> + ?SPAWN_DBG(remote_collect, + {Module, Nodes, Stop}), + do_collection(Node, Module, Stop) + end, + Nodes). do_collection(Node, Module, Stop) -> CollectorPid = spawn(fun collector_proc/0), @@ -1241,7 +1239,7 @@ do_compile_beam(Module,Beam) -> %% Store info about all function clauses in database InitInfo = reverse(Vars#vars.init_info), - ets:insert(?COVER_TABLE, {Module, InitInfo}), + ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}), %% Store binary code so it can be loaded on remote nodes ets:insert(?BINARY_TABLE, {Module, Binary}), @@ -1775,9 +1773,8 @@ common_elems(L1, L2) -> %% Collect data for all modules collect(Nodes) -> %% local node - MS = ets:fun2ms(fun({M,C}) when is_atom(M) -> {M,C} end), - AllClauses = ets:select(?COVER_TABLE,MS), - move_modules(AllClauses), + AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE), + pmap(fun move_modules/1,AllClauses), %% remote nodes remote_collect('_',Nodes,false). @@ -1785,7 +1782,7 @@ collect(Nodes) -> %% Collect data for one module collect(Module,Clauses,Nodes) -> %% local node - move_modules([{Module,Clauses}]), + move_modules({Module,Clauses}), %% remote nodes remote_collect(Module,Nodes,false). @@ -1793,12 +1790,9 @@ collect(Module,Clauses,Nodes) -> %% When analysing, the data from the local ?COVER_TABLE is moved to the %% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE -move_modules([{Module,Clauses}|AllClauses]) -> - ets:insert(?COLLECTION_TABLE,{Module,Clauses}), - move_clauses(Clauses), - move_modules(AllClauses); -move_modules([]) -> - ok. +move_modules({Module,Clauses}) -> + ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}), + move_clauses(Clauses). move_clauses([{M,F,A,C,_L}|Clauses]) -> Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, @@ -1842,12 +1836,12 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> C = case Loaded of {loaded, _File} -> [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), + ets:lookup(?COVER_CLAUSE_TABLE,Module), collect(Module,Clauses,State#main_state.nodes), Clauses; _ -> [{Module,Clauses}] = - ets:lookup(?COLLECTION_TABLE,Module), + ets:lookup(?COLLECTION_CLAUSE_TABLE,Module), Clauses end, R = do_analyse(Module, Analysis, Level, C), @@ -1933,7 +1927,7 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> File = case Loaded of {loaded, File0} -> [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), + ets:lookup(?COVER_CLAUSE_TABLE,Module), collect(Module, Clauses, State#main_state.nodes), File0; @@ -2063,7 +2057,7 @@ do_export(Module, OutFile, From, State) -> try is_loaded(Module, State) of {loaded, File} -> [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), + ets:lookup(?COVER_CLAUSE_TABLE,Module), collect(Module, Clauses, State#main_state.nodes), do_export_table([{Module,File}],[],Fd); @@ -2099,7 +2093,7 @@ merge([],ModuleList) -> write_module_data([{Module,File}|ModList],Fd) -> write({file,Module,File},Fd), - [Clauses] = ets:lookup(?COLLECTION_TABLE,Module), + [Clauses] = ets:lookup(?COLLECTION_CLAUSE_TABLE,Module), write(Clauses,Fd), ModuleData = ets:match_object(?COLLECTION_TABLE,{#bump{module=Module},'_'}), do_write_module_data(ModuleData,Fd), @@ -2149,7 +2143,7 @@ do_import_to_table(Fd,ImportFile,Imported,DontImport) -> {Module,Clauses} -> case lists:member(Module,DontImport) of false -> - ets:insert(?COLLECTION_TABLE,{Module,Clauses}); + ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}); true -> ok end, @@ -2183,14 +2177,14 @@ do_reset_main_node(Module,Nodes) -> remote_reset(Module,Nodes). do_reset_collection_table(Module) -> - ets:delete(?COLLECTION_TABLE,Module), + ets:delete(?COLLECTION_CLAUSE_TABLE,Module), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). %% do_reset(Module) -> ok %% The reset is done on a per-clause basis to avoid building %% long lists in the case of very large modules do_reset(Module) -> - [{Module,Clauses}] = ets:lookup(?COVER_TABLE, Module), + [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module), do_reset2(Clauses). do_reset2([{M,F,A,C,_L}|Clauses]) -> @@ -2205,7 +2199,7 @@ do_reset2([]) -> ok. do_clear(Module) -> - ets:match_delete(?COVER_TABLE, {Module,'_'}), + ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}), ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). @@ -2245,3 +2239,18 @@ escape_lt_and_gt1([],Acc) -> lists:reverse(Acc); escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). + +pmap(Fun,List) -> + Collector = self(), + Pids = lists:map(fun(E) -> + spawn_link(fun() -> + ?SPAWN_DBG(pmap,E), + Collector ! {res,self(),Fun(E)} + end) + end, List), + lists:map(fun(Pid) -> + receive + {res,Pid,Res} -> + Res + end + end, Pids). diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index b9ccd62d0b..4beb433839 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -18,7 +18,7 @@ %% -module(cover_SUITE). --export([all/1]). +-export([all/1, init_per_testcase/2, end_per_testcase/2]). -export([start/1, compile/1, analyse/1, misc/1, stop/1, distribution/1, export_import/1, otp_5031/1, eif/1, otp_5305/1, otp_5418/1, otp_6115/1, otp_7095/1, @@ -49,6 +49,13 @@ all(suite) -> "Can't run cover test."} end. +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + %cover:stop(), + ok. + start(suite) -> []; start(Config) when is_list(Config) -> ?line ok = file:set_cwd(?config(data_dir, Config)), @@ -381,8 +388,8 @@ export_import(Config) when is_list(Config) -> ?line {ok,a} = cover:compile(a), ?line ?t:capture_start(), ?line ok = cover:export("all_exported"), - ?line [Text2] = ?t:capture_get(), - ?line "Export includes data from imported files"++_ = lists:flatten(Text2), + ?line [] = ?t:capture_get(), +% ?line "Export includes data from imported files"++_ = lists:flatten(Text2), ?line ?t:capture_stop(), ?line ok = cover:stop(), ?line ok = cover:import("all_exported"), -- cgit v1.2.3 From e24ae469ca5c2814dfd133da1e6882b84a7db95b Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Wed, 2 Feb 2011 10:52:54 +0100 Subject: Add aync_analyse_to_file function to cover --- lib/tools/src/cover.erl | 56 +++++++++++++++++++++++++++++++++++++------------ 1 file changed, 43 insertions(+), 13 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index a2e8288227..50a812aa09 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -61,6 +61,9 @@ analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3, analyse_to_file/1, analyse_to_file/2, analyse_to_file/3, analyze_to_file/1, analyze_to_file/2, analyze_to_file/3, + async_analyse_to_file/1,async_analyse_to_file/2, + async_analyse_to_file/3, async_analyze_to_file/1, + async_analyze_to_file/2, async_analyze_to_file/3, export/1, export/2, import/1, modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1, reset/1, reset/0, @@ -389,6 +392,30 @@ analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut). analyze_to_file(Module, OutFile, Options) -> analyse_to_file(Module, OutFile, Options). +async_analyse_to_file(Module) -> + do_spawn(?MODULE,analyse_to_file, [Module]). +async_analyse_to_file(Module, OutFileOrOpts) -> + do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]). +async_analyse_to_file(Module, OutFile, Options) -> + do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). + +do_spawn(M,F,A) -> + spawn(fun() -> + case apply(M,F,A) of + {ok, _} -> + ok; + {error, Reason} -> + exit(Reason) + end + end). + +async_analyze_to_file(Module) -> + async_analyse_to_file(Module). +async_analyze_to_file(Module, OutFileOrOpts) -> + async_analyse_to_file(Module, OutFileOrOpts). +async_analyze_to_file(Module, OutFile, Options) -> + async_analyse_to_file(Module, OutFile, Options). + outfilename(Module,Opts) -> case lists:member(html,Opts) of true -> @@ -824,7 +851,7 @@ remote_process_loop(State) -> end. do_collect(Module, CollectorPid, From) -> - AllClauses = + AllMods = case Module of '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) @@ -833,21 +860,24 @@ do_collect(Module, CollectorPid, From) -> %% Sending clause by clause in order to avoid large lists pmap( fun({_Mod,Clauses}) -> - pmap(fun({M,F,A,C,_L}) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps} - end,Clauses) - end,AllClauses), + lists:map(fun(Clause) -> + send_collected_data(Clause, CollectorPid) + end,Clauses) + end,AllMods), CollectorPid ! done, remote_reply(From, ok). +send_collected_data({M,F,A,C,_L}, CollectorPid) -> + Pattern = + {#bump{module=M, function=F, arity=A, clause=C}, '_'}, + Bumps = ets:match_object(?COVER_TABLE, Pattern), + %% Reset + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Bumps), + CollectorPid ! {chunk,Bumps}. + reload_originals([{Module,_File}|Compiled]) -> do_reload_original(Module), reload_originals(Compiled); -- cgit v1.2.3 From cb119ebc0e923b6b85a8352ef71012eb431b54d5 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Wed, 2 Feb 2011 10:52:42 +0100 Subject: Update documentation to reflect performance enhancement changes of cover --- lib/tools/doc/src/cover.xml | 29 +++++++++++++++++++++++++++++ lib/tools/doc/src/cover_chapter.xml | 7 +++++++ lib/tools/src/cover.erl | 34 ++++++++++++++++++++++++---------- 3 files changed, 60 insertions(+), 10 deletions(-) (limited to 'lib') diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml index 323bd0dda8..0a3302bda5 100644 --- a/lib/tools/doc/src/cover.xml +++ b/lib/tools/doc/src/cover.xml @@ -270,6 +270,8 @@ defaults to function.

If Module is not Cover compiled, the function returns {error,{not_cover_compiled,Module}}.

+

HINT: It is possible to issue multiple analyse_to_file commands at + the same time.

@@ -307,6 +309,33 @@ .beam file, or in ../src relative to that directory. If no source code is found, ,{error,no_source_code_found} is returned.

+

HINT: It is possible to issue multiple analyse_to_file commands at + the same time.

+ +
+ + async_analyse_to_file(Module) -> + async_analyse_to_file(Module,Options) -> + async_analyse_to_file(Module, OutFile) -> + async_analyse_to_file(Module, OutFile, Options) -> pid() + Asynchronous call to analyse_to_file. + + Module = atom() + OutFile = string() + Options = [Option] + Option = html + Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node +  File = string() +  Reason = term() + + +

This function works exactly the same way as + analyse_to_file except + that it is asynchronous instead of synchronous. The spawned process + will link with the caller when created. If an Error occurs + while doing the cover analysis the process will crash with the same + error reason as analyse_to_file + would return.

diff --git a/lib/tools/doc/src/cover_chapter.xml b/lib/tools/doc/src/cover_chapter.xml index b4f7919183..92a790c34e 100644 --- a/lib/tools/doc/src/cover_chapter.xml +++ b/lib/tools/doc/src/cover_chapter.xml @@ -403,6 +403,13 @@ ok database contains information about each executable line in each Cover compiled module, performance decreases proportionally to the size and number of the Cover compiled modules.

+

To improve performance when analysing cover results it is possible + to do multiple calls to analyse + and analyse_to_file + at once. You can also use the + async_analyse_to_file + convenience function. +

diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 50a812aa09..cc4f75f2e8 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -35,23 +35,37 @@ %% remote_process_loop/1. %% %% TABLES -%% Each nodes has an ets table named 'cover_internal_data_table' -%% (?COVER_TABLE). This table contains the coverage data and is -%% continously updated when cover compiled code is executed. +%% Each nodes has two tables: cover_internal_data_table (?COVER_TABLE) and. +%% cover_internal_clause_table (?COVER_CLAUSE_TABLE). +%% ?COVER_TABLE contains the bump data i.e. the data about which lines +%% have been executed how many times. +%% ?COVER_CLAUSE_TABLE contains information about which clauses in which modules +%% cover is currently collecting statistics. %% -%% The main node owns a table named -%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE). This table -%% contains data which is collected from remote nodes (either when a -%% remote node is stopped with cover:stop/1 or when analysing. When -%% analysing, data is even moved from the ?COVER_TABLE on the main -%% node to the ?COLLECTION_TABLE. +%% The main node owns tables named +%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE) and +%% 'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE). +%% These tables contain data which is collected from remote nodes (either when a +%% remote node is stopped with cover:stop/1 or when analysing). When +%% analysing, data is even moved from the COVER tables on the main +%% node to the COLLECTION tables. %% %% The main node also has a table named 'cover_binary_code_table' %% (?BINARY_TABLE). This table contains the binary code for each cover %% compiled module. This is necessary so that the code can be loaded %% on remote nodes that are started after the compilation. %% - +%% PARELLALISM +%% To take advantage of SMP when doing the cover analysis both the data +%% collection and analysis has been parallelized. One process is spawned for +%% each node when collecting data, and on the remote node when collecting data +%% one process is spawned per module. +%% +%% When analyzing data it is possible to issue multiple analyse(_to_file)/X +%% calls at once. They are however all calls (for backwardscompatability +%% reasons) so the user of cover will have to spawn several processes to to the +%% calls ( or use async_analyse_to_file ). +%% %% External exports -export([start/0, start/1, -- cgit v1.2.3 From d45434a0b0388e4598186e60fa3d94db5678de4f Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Wed, 2 Feb 2011 15:42:07 +0100 Subject: Update internal pmap to have a process limit Add write concurrancy to cover masters ?COVER_TABLE --- lib/tools/src/cover.erl | 53 +++++++++++++++++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 17 deletions(-) (limited to 'lib') diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index cc4f75f2e8..ada2db45be 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -407,14 +407,14 @@ analyze_to_file(Module, OutFile, Options) -> analyse_to_file(Module, OutFile, Options). async_analyse_to_file(Module) -> - do_spawn(?MODULE,analyse_to_file, [Module]). + do_spawn(?MODULE, analyse_to_file, [Module]). async_analyse_to_file(Module, OutFileOrOpts) -> do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]). async_analyse_to_file(Module, OutFile, Options) -> do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). do_spawn(M,F,A) -> - spawn(fun() -> + spawn_link(fun() -> case apply(M,F,A) of {ok, _} -> ok; @@ -559,7 +559,11 @@ remote_reply(MainNode,Reply) -> init_main(Starter) -> register(?SERVER,self()), - ets:new(?COVER_TABLE, [set, public, named_table]), + %% Having write concurrancy here gives a 40% performance boost + %% when collect/1 is called. + ets:new(?COVER_TABLE, [set, public, named_table + ,{write_concurrency, true} + ]), ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), ets:new(?BINARY_TABLE, [set, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), @@ -801,7 +805,10 @@ main_process_loop(State) -> init_remote(Starter,MainNode) -> register(?SERVER,self()), - ets:new(?COVER_TABLE, [set, public, named_table]), + ets:new(?COVER_TABLE, [set, public, named_table + %% write_concurrency here makes otp_8270 break :( + %,{write_concurrency, true} + ]), ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), Starter ! {self(),started}, remote_process_loop(#remote_state{main_node=MainNode}). @@ -2284,17 +2291,29 @@ escape_lt_and_gt1([],Acc) -> escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). -pmap(Fun,List) -> +pmap(Fun, List) -> + pmap(Fun, List, 20). +pmap(Fun, List, Limit) -> + pmap(Fun, List, [], Limit, 0, []). +pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit -> Collector = self(), - Pids = lists:map(fun(E) -> - spawn_link(fun() -> - ?SPAWN_DBG(pmap,E), - Collector ! {res,self(),Fun(E)} - end) - end, List), - lists:map(fun(Pid) -> - receive - {res,Pid,Res} -> - Res - end - end, Pids). + Pid = spawn_link(fun() -> + ?SPAWN_DBG(pmap,E), + Collector ! {res,self(),Fun(E)} + end), + erlang:monitor(process, Pid), + pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc); +pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) -> + receive + {'DOWN', _Ref, process, _, _} -> + pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc); + {res, Pid, Res} -> + pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc]) + end; +pmap(_Fun, [], [], _Limit, 0, Acc) -> + lists:reverse(Acc); +pmap(Fun, [], [], Limit, Cnt, Acc) -> + receive + {'DOWN', _Ref, process, _, _} -> + pmap(Fun, [], [], Limit, Cnt - 1, Acc) + end. -- cgit v1.2.3 From 1b4303f812d5968fc6de961146b39a1b20f75e42 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Thu, 17 Feb 2011 11:09:55 +0100 Subject: Update testcases which need crypto to be skipped on platforms which does not have crypto --- lib/tools/test/cover_SUITE.erl | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'lib') diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index 4beb433839..f632409208 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -49,6 +49,13 @@ all(suite) -> "Can't run cover test."} end. +init_per_testcase(TC, Config) when TC =:= misc; TC =:= compile -> + case code:which(crypto) of + Path when is_list(Path) -> + init_per_testcase(dummy_tc, Config); + _Else -> + {skip, "No crypto file to test with"} + end; init_per_testcase(_TestCase, Config) -> Config. -- cgit v1.2.3