aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorBjörn Gustavsson <[email protected]>2019-02-06 14:24:51 +0100
committerBjörn Gustavsson <[email protected]>2019-02-11 14:54:44 +0100
commite6bdde9e927ad0933aaf36120cc740e2380567dc (patch)
tree1edd6f258afc6b6cfb6e9a477612f98cee908dbc /lib
parentab5debc0869dbfa64e4afd4cd59404a56ce51d71 (diff)
downloadotp-e6bdde9e927ad0933aaf36120cc740e2380567dc.tar.gz
otp-e6bdde9e927ad0933aaf36120cc740e2380567dc.tar.bz2
otp-e6bdde9e927ad0933aaf36120cc740e2380567dc.zip
Use the counters module to speed up cover
Use `counters:add/3` instead of `ets:update_counter/3` for counting the number of times a line is executed. By default, the reference to the counter array for each module will be stored in a persistent term and retrieved every time a counter is updated. This makes the compiler test suite with coverage enabled run *almost* twice as fast (on my computer, in about 6 minutes down from more than 11 minutes). To get even more speed, the new `cover:local_only/0` function can be called to put cover into a mode where the cover-compiled code can only be run on the local node. In this mode, the cover-compiled modules in a more efficient way by compiling the counter reference into the code. This shaves off about one more minute, making the compiler test suite with coverage enabled run *more than* twice as fast (in about 5 minutes on my computer).
Diffstat (limited to 'lib')
-rw-r--r--lib/tools/doc/src/cover.xml16
-rw-r--r--lib/tools/src/cover.erl490
-rw-r--r--lib/tools/test/cover_SUITE.erl40
3 files changed, 355 insertions, 191 deletions
diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml
index 31bf7266c7..e9f782977d 100644
--- a/lib/tools/doc/src/cover.xml
+++ b/lib/tools/doc/src/cover.xml
@@ -128,14 +128,26 @@
</desc>
</func>
<func>
- <name since="">start(Nodes) -> {ok,StartedNodes} | {error,not_main_node}</name>
+ <name since="OTP 22.0">local_only() -> ok | {error,too_late}</name>
+ <fsummary>Only support running Cover on the local node.</fsummary>
+ <desc>
+ <p>Only support running Cover on the local node. This function
+ must be called before any modules have been compiled or any
+ nodes added. When running in this mode, modules will be Cover
+ compiled in a more efficient way, but the resulting code will
+ only work on the same node they were compiled on.</p>
+ </desc>
+ </func>
+ <func>
+ <name since="">start(Nodes) -> {ok,StartedNodes} | {error,not_main_node} | {error,local_only}</name>
<fsummary>Start Cover on remote nodes.</fsummary>
<type>
<v>Nodes = StartedNodes = [atom()]</v>
</type>
<desc>
<p>Starts a Cover server on the each of given nodes, and loads
- all cover compiled modules.</p>
+ all cover compiled modules. This call will fail if
+ <c>cover:local_only/0</c> has been called.</p>
</desc>
</func>
<func>
diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl
index 8d4561ca9e..8fe866cb69 100644
--- a/lib/tools/src/cover.erl
+++ b/lib/tools/src/cover.erl
@@ -23,6 +23,7 @@
%% This module implements the Erlang coverage tool.
%%
%% ARCHITECTURE
+%%
%% The coverage tool consists of one process on each node involved in
%% coverage analysis. The process is registered as 'cover_server'
%% (?SERVER). The cover_server on the 'main' node is in charge, and
@@ -30,45 +31,62 @@
%% 'DOWN' message for another cover_server, it marks the node as
%% 'lost'. If a nodeup is received for a lost node the main node
%% ensures that the cover compiled modules are loaded again. If the
-%% remote node was alive during the disconnected periode, cover data
-%% for this periode will also be included in the analysis.
+%% remote node was alive during the disconnected period, cover data
+%% for this period will also be included in the analysis.
%%
%% The cover_server process on the main node is implemented by the
%% functions init_main/1 and main_process_loop/1. The cover_server on
%% the remote nodes are implemented by the functions init_remote/2 and
%% remote_process_loop/1.
%%
+%% COUNTERS
+%%
+%% The 'counters' modules is used for counting how many time each line
+%% executed. Each cover-compiled module will have its own array of
+%% counters.
+%%
+%% The counter reference for module Module is stored in a persistent
+%% term with the key {cover,Module}.
+%%
+%% When the cover:local_only/0 function has been called, the reference
+%% for the counter array will be compiled into each cover-compiled
+%% module directly (instead of retrieving it from a persistent term).
+%% That will be faster, but the resulting code can be only be used on
+%% the main node.
+%%
%% TABLES
-%% Each nodes has two tables: cover_internal_data_table (?COVER_TABLE) and.
-%% cover_internal_clause_table (?COVER_CLAUSE_TABLE).
-%% ?COVER_TABLE contains the bump data i.e. the data about which lines
-%% have been executed how many times.
+%%
+%% Each node has two tables: ?COVER_MAPPING_TABLE and ?COVER_CLAUSE_TABLE.
+%% ?COVER_MAPPING_TABLE maps from a #bump{} record to an index in the
+%% counter array for the module. It is used both during instrumentation
+%% of cover-compiled modules and when collecting the counter values.
+%%
%% ?COVER_CLAUSE_TABLE contains information about which clauses in which modules
%% cover is currently collecting statistics.
-%%
-%% The main node owns tables named
-%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE) and
-%% 'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE).
-%% These tables contain data which is collected from remote nodes (either when a
-%% remote node is stopped with cover:stop/1 or when analysing). When
-%% analysing, data is even moved from the COVER tables on the main
-%% node to the COLLECTION tables.
%%
-%% The main node also has a table named 'cover_binary_code_table'
-%% (?BINARY_TABLE). This table contains the binary code for each cover
-%% compiled module. This is necessary so that the code can be loaded
-%% on remote nodes that are started after the compilation.
+%% The main node owns the tables ?COLLECTION_TABLE and
+%% ?COLLECTION_CLAUSE_TABLE. The counter data is consolidated into those
+%% tables from the counters on both the main node and from remote nodes.
+%% This consolidation is done when a remote node is stopped with
+%% cover:stop/1 or just before starting an analysis.
+%%
+%% The main node also has a table named ?BINARY_TABLE. This table
+%% contains the abstract code code for each cover-compiled
+%% module. This is necessary so that the code can be loaded on remote
+%% nodes that are started after the compilation.
%%
%% PARALLELISM
+%%
%% To take advantage of SMP when doing the cover analysis both the data
%% collection and analysis has been parallelized. One process is spawned for
%% each node when collecting data, and on the remote node when collecting data
%% one process is spawned per module.
%%
-%% When analyzing data it is possible to issue multiple analyse(_to_file)/X
-%% calls at once. They are however all calls (for backwards compatibility
-%% reasons) so the user of cover will have to spawn several processes to to the
-%% calls ( or use async_analyse_to_file ).
+%% When analyzing data it is possible to issue multiple
+%% analyse(_to_file)/X calls at once. They are, however, all calls
+%% (for backwards compatibility reasons), so the user of cover will
+%% have to spawn several processes to to the calls (or use
+%% async_analyse_to_file/X).
%%
%% External exports
@@ -89,7 +107,8 @@
modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1,
reset/1, reset/0,
flush/1,
- stop/0, stop/1]).
+ stop/0, stop/1,
+ local_only/0]).
-export([remote_start/1,get_main_node/0]).
%% Used internally to ensure we upgrade the code to the latest version.
@@ -98,9 +117,16 @@
-record(main_state, {compiled=[], % [{Module,File}]
imported=[], % [{Module,File,ImportFile}]
stopper, % undefined | pid()
+ local_only=false, % true | false
nodes=[], % [Node]
lost_nodes=[]}). % [Node]
+-record(remote_data, {module,
+ file,
+ code,
+ mapping,
+ clauses}).
+
-record(remote_state, {compiled=[], % [{Module,File}]
main_node}). % atom()
@@ -126,11 +152,12 @@
is_guard=false % boolean
}).
--define(COVER_TABLE, 'cover_internal_data_table').
+-define(COVER_MAPPING_TABLE, 'cover_internal_mapping_table').
-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table').
-define(BINARY_TABLE, 'cover_binary_code_table').
-define(COLLECTION_TABLE, 'cover_collected_remote_data_table').
-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table').
+
-define(TAG, cover_compiled).
-define(SERVER, cover_server).
@@ -186,6 +213,11 @@ start(Node) when is_atom(Node) ->
start(Nodes) ->
call({start_nodes,remove_myself(Nodes,[])}).
+%% local_only() -> ok | {error,too_late}
+
+local_only() ->
+ call(local_only).
+
%% compile(ModFiles) ->
%% compile(ModFiles, Options) ->
%% compile_module(ModFiles) -> Result
@@ -255,15 +287,8 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) ->
compile_modules(Files,Options) ->
Options2 = filter_options(Options),
- %% compile_modules(Files,Options2,[]).
call({compile, Files, Options2}).
-%% compile_modules([File|Files], Options, Result) ->
-%% R = call({compile, File, Options}),
-%% compile_modules(Files,Options,[R|Result]);
-%% compile_modules([],_Opts,Result) ->
-%% lists:reverse(Result).
-
filter_options(Options) ->
lists:filter(fun(Option) ->
case Option of
@@ -561,16 +586,6 @@ flush(Nodes) ->
get_main_node() ->
call(get_main_node).
-%% bump(Module, Function, Arity, Clause, Line)
-%% Module = Function = atom()
-%% Arity = Clause = Line = integer()
-%% This function is inserted into Cover compiled modules, once for each
-%% executable line.
-%bump(Module, Function, Arity, Clause, Line) ->
-% Key = #bump{module=Module, function=Function, arity=Arity, clause=Clause,
-% line=Line},
-% ets:update_counter(?COVER_TABLE, Key, 1).
-
call(Request) ->
Ref = erlang:monitor(process,?SERVER),
receive {'DOWN', Ref, _Type, _Object, noproc} ->
@@ -631,10 +646,8 @@ remote_reply(MainNode,Reply) ->
init_main(Starter) ->
register(?SERVER,self()),
- %% Having write concurrancy here gives a 40% performance boost
- %% when collect/1 is called.
- ?COVER_TABLE = ets:new(?COVER_TABLE, [set, public, named_table,
- {write_concurrency, true}]),
+ ?COVER_MAPPING_TABLE = ets:new(?COVER_MAPPING_TABLE,
+ [ordered_set, public, named_table]),
?COVER_CLAUSE_TABLE = ets:new(?COVER_CLAUSE_TABLE, [set, public,
named_table]),
?BINARY_TABLE = ets:new(?BINARY_TABLE, [set, public, named_table]),
@@ -648,10 +661,26 @@ init_main(Starter) ->
main_process_loop(State) ->
receive
+ {From, local_only} ->
+ case State of
+ #main_state{compiled=[],nodes=[]} ->
+ reply(From, ok),
+ main_process_loop(State#main_state{local_only=true});
+ #main_state{} ->
+ reply(From, {error,too_late}),
+ main_process_loop(State)
+ end;
+
{From, {start_nodes,Nodes}} ->
- {StartedNodes,State1} = do_start_nodes(Nodes, State),
- reply(From, {ok,StartedNodes}),
- main_process_loop(State1);
+ case State#main_state.local_only of
+ false ->
+ {StartedNodes,State1} = do_start_nodes(Nodes, State),
+ reply(From, {ok,StartedNodes}),
+ main_process_loop(State1);
+ true ->
+ reply(From, {error,local_only}),
+ main_process_loop(State)
+ end;
{From, {compile, Files, Options}} ->
{R,S} = do_compile(Files, Options, State),
@@ -742,11 +771,12 @@ main_process_loop(State) ->
end,
State#main_state.nodes),
reload_originals(State#main_state.compiled),
- ets:delete(?COVER_TABLE),
+ ets:delete(?COVER_MAPPING_TABLE),
ets:delete(?COVER_CLAUSE_TABLE),
ets:delete(?BINARY_TABLE),
ets:delete(?COLLECTION_TABLE),
ets:delete(?COLLECTION_CLAUSE_TABLE),
+ delete_all_counters(),
unregister(?SERVER),
reply(From, ok);
@@ -878,10 +908,8 @@ main_process_loop(State) ->
init_remote(Starter,MainNode) ->
register(?SERVER,self()),
- %% write_concurrency here makes otp_8270 break :(
- ?COVER_TABLE = ets:new(?COVER_TABLE, [set, public, named_table
- %,{write_concurrency, true}
- ]),
+ ?COVER_MAPPING_TABLE = ets:new(?COVER_MAPPING_TABLE,
+ [ordered_set, public, named_table]),
?COVER_CLAUSE_TABLE = ets:new(?COVER_CLAUSE_TABLE, [set, public,
named_table]),
Starter ! {self(),started},
@@ -904,7 +932,7 @@ remote_process_loop(State) ->
remote_process_loop(State#remote_state{compiled=Compiled});
{remote,reset,Module} ->
- do_reset(Module),
+ reset_counters(Module),
remote_reply(State#remote_state.main_node, ok),
remote_process_loop(State);
@@ -925,8 +953,9 @@ remote_process_loop(State) ->
{remote,stop} ->
reload_originals(State#remote_state.compiled),
- ets:delete(?COVER_TABLE),
+ ets:delete(?COVER_MAPPING_TABLE),
ets:delete(?COVER_CLAUSE_TABLE),
+ delete_all_counters(),
unregister(?SERVER),
ok; % not replying since 'DOWN' message will be received anyway
@@ -961,28 +990,12 @@ remote_process_loop(State) ->
end.
do_collect(Modules, CollectorPid, From) ->
- _ = pmap(
- fun(Module) ->
- Pattern = {#bump{module=Module, _='_'}, '$1'},
- MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
- Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
- send_chunks(Match, CollectorPid, [])
- end,Modules),
+ _ = pmap(fun(Module) ->
+ send_counters(Module, CollectorPid)
+ end, Modules),
CollectorPid ! done,
remote_reply(From, ok).
-send_chunks('$end_of_table', _CollectorPid, Mons) ->
- get_downs(Mons);
-send_chunks({Chunk,Continuation}, CollectorPid, Mons) ->
- Mon = spawn_monitor(
- fun() ->
- lists:foreach(fun({Bump,_N}) ->
- ets:insert(?COVER_TABLE, {Bump,0})
- end,
- Chunk) end),
- send_chunk(CollectorPid,Chunk),
- send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]).
-
send_chunk(CollectorPid,Chunk) ->
CollectorPid ! {chunk,Chunk,self()},
receive continue -> ok end.
@@ -1021,10 +1034,15 @@ do_reload_original(Module) ->
ignore
end.
-load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) ->
- %% Make sure the #bump{} records are available *before* the
- %% module is loaded.
- insert_initial_data(InitialTable),
+load_compiled([Data|Compiled],Acc) ->
+ %% Make sure the #bump{} records and counters are available *before*
+ %% compiling and loading the code.
+ #remote_data{module=Module,file=File,code=Beam,
+ mapping=InitialMapping,clauses=InitialClauses} = Data,
+ ets:insert(?COVER_MAPPING_TABLE, InitialMapping),
+ ets:insert(?COVER_CLAUSE_TABLE, InitialClauses),
+ maybe_create_counters(Module, true),
+
Sticky = case code:is_sticky(Module) of
true ->
code:unstick_mod(Module),
@@ -1032,7 +1050,7 @@ load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) ->
false ->
false
end,
- NewAcc = case code:load_binary(Module, ?TAG, Binary) of
+ NewAcc = case code:load_binary(Module, ?TAG, Beam) of
{module,Module} ->
add_compiled(Module, File, Acc);
_ ->
@@ -1047,16 +1065,6 @@ load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) ->
load_compiled([],Acc) ->
Acc.
-insert_initial_data([Item|Items]) when is_atom(element(1,Item)) ->
- ets:insert(?COVER_CLAUSE_TABLE, Item),
- insert_initial_data(Items);
-insert_initial_data([Item|Items]) ->
- ets:insert(?COVER_TABLE, Item),
- insert_initial_data(Items);
-insert_initial_data([]) ->
- ok.
-
-
unload([Module|Modules]) ->
do_clear(Module),
do_reload_original(Module),
@@ -1177,7 +1185,7 @@ get_downs_r([]) ->
[];
get_downs_r(Mons) ->
receive
- {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} ->
+ {'DOWN', Ref, _Type, Pid, #remote_data{}=R} ->
[R|get_downs_r(lists:delete({Pid,Ref},Mons))];
{'DOWN', Ref, _Type, Pid, Reason} = Down ->
case lists:member({Pid,Ref},Mons) of
@@ -1196,19 +1204,13 @@ get_downs_r(Mons) ->
%% Binary is the beam code for the module and InitialTable is the initial
%% data to insert in ?COVER_TABLE.
get_data_for_remote_loading({Module,File}) ->
- [{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module),
+ [{Module,Code}] = ets:lookup(?BINARY_TABLE, Module),
%%! The InitialTable list will be long if the module is big - what to do??
- InitialBumps = ets:select(?COVER_TABLE,ms(Module)),
+ Mapping = counters_mapping_table(Module),
InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module),
- {Module,File,Binary,InitialBumps ++ InitialClauses}.
-
-%% Create a match spec which returns the clause info {Module,InitInfo} and
-%% all #bump keys for the given module with 0 number of calls.
-ms(Module) ->
- ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module ->
- {Key,0}
- end).
+ #remote_data{module=Module,file=File,code=Code,
+ mapping=Mapping,clauses=InitialClauses}.
%% Unload modules on remote nodes
remote_unload(Nodes,UnloadedModules) ->
@@ -1464,7 +1466,7 @@ get_compiled_still_loaded(Nodes,Compiled0) ->
do_compile_beams(ModsAndFiles, State) ->
Result0 = pmap(fun({ok,Module,File}) ->
- do_compile_beam(Module,File,State);
+ do_compile_beam(Module, File, State);
(Error) ->
Error
end,
@@ -1476,8 +1478,10 @@ do_compile_beams(ModsAndFiles, State) ->
do_compile_beam(Module,BeamFile0,State) ->
case get_beam_file(Module,BeamFile0,State#main_state.compiled) of
{ok,BeamFile} ->
+ LocalOnly = State#main_state.local_only,
UserOptions = get_compile_options(Module,BeamFile),
- case do_compile_beam1(Module,BeamFile,UserOptions) of
+ case do_compile_beam1(Module,BeamFile,
+ UserOptions,LocalOnly) of
{ok, Module} ->
{ok,Module,BeamFile};
error ->
@@ -1503,41 +1507,39 @@ fix_state_and_result([],State,Acc) ->
do_compile(Files, Options, State) ->
+ LocalOnly = State#main_state.local_only,
Result0 = pmap(fun(File) ->
- do_compile(File, Options)
+ do_compile1(File, Options, LocalOnly)
end,
Files),
Compiled = [{M,F} || {ok,M,F} <- Result0],
remote_load_compiled(State#main_state.nodes,Compiled),
fix_state_and_result(Result0,State,[]).
-do_compile(File, Options) ->
- case do_compile1(File, Options) of
+do_compile1(File, Options, LocalOnly) ->
+ case do_compile2(File, Options, LocalOnly) of
{ok, Module} ->
{ok,Module,File};
error ->
{error,File}
end.
-%% do_compile1(File, Options) -> {ok,Module} | error
-do_compile1(File, UserOptions) ->
+%% do_compile2(File, Options) -> {ok,Module} | error
+do_compile2(File, UserOptions, LocalOnly) ->
Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions,
case compile:file(File, Options) of
{ok, Module, Binary} ->
- do_compile_beam1(Module,Binary,UserOptions);
+ do_compile_beam1(Module,Binary,UserOptions,LocalOnly);
error ->
error
end.
%% Beam is a binary or a .beam file name
-do_compile_beam1(Module,Beam,UserOptions) ->
+do_compile_beam1(Module,Beam,UserOptions,LocalOnly) ->
%% Clear database
do_clear(Module),
- %% Extract the abstract format and insert calls to bump/6 at
- %% every executable line and, as a side effect, initiate
- %% the database
-
+ %% Extract the abstract format.
case get_abstract_code(Module, Beam) of
no_abstract_code=E ->
{error,E};
@@ -1547,7 +1549,8 @@ do_compile_beam1(Module,Beam,UserOptions) ->
Forms0 = epp:interpret_file_attribute(Code),
case find_main_filename(Forms0) of
{ok,MainFile} ->
- do_compile_beam2(Module,Beam,UserOptions,Forms0,MainFile);
+ do_compile_beam2(Module,Beam,UserOptions,
+ Forms0,MainFile,LocalOnly);
Error ->
Error
end;
@@ -1566,26 +1569,35 @@ get_abstract_code(Module, Beam) ->
Error -> Error
end.
-do_compile_beam2(Module,Beam,UserOptions,Forms0,MainFile) ->
- {Forms,Vars} = transform(Forms0, Module, MainFile),
+do_compile_beam2(Module,Beam,UserOptions,Forms0,MainFile,LocalOnly) ->
+ init_counter_mapping(Module),
+
+ %% Instrument the abstract code by inserting
+ %% calls to update the counters.
+ {Forms,Vars} = transform(Forms0, Module, MainFile, LocalOnly),
+
+ %% Create counters.
+ maybe_create_counters(Module, not LocalOnly),
%% We need to recover the source from the compilation
%% info otherwise the newly compiled module will have
%% source pointing to the current directory
SourceInfo = get_source_info(Module, Beam),
- %% Compile and load the result
+ %% Compile and load the result.
%% It's necessary to check the result of loading since it may
- %% fail, for example if Module resides in a sticky directory
- {ok, Module, Binary} = compile:forms(Forms, SourceInfo ++ UserOptions),
+ %% fail, for example if Module resides in a sticky directory.
+ Options = SourceInfo ++ UserOptions,
+ {ok, Module, Binary} = compile:forms(Forms, Options),
+
case code:load_binary(Module, ?TAG, Binary) of
{module, Module} ->
- %% Store info about all function clauses in database
+ %% Store info about all function clauses in database.
InitInfo = lists:reverse(Vars#vars.init_info),
ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}),
- %% Store binary code so it can be loaded on remote nodes
+ %% Store binary code so it can be loaded on remote nodes.
ets:insert(?BINARY_TABLE, {Module, Binary}),
{ok, Module};
@@ -1617,11 +1629,12 @@ get_compile_info(Module, Beam) ->
[]
end.
-transform(Code, Module, MainFile) ->
+transform(Code, Module, MainFile, LocalOnly) ->
Vars0 = #vars{module=Module},
- {ok,MungedForms,Vars} = transform_2(Code,[],Vars0,MainFile,on),
+ {ok,MungedForms0,Vars} = transform_2(Code, [], Vars0, MainFile, on),
+ MungedForms = patch_code(Module, MungedForms0, LocalOnly),
{MungedForms,Vars}.
-
+
%% Helpfunction which returns the first found file-attribute, which can
%% be interpreted as the name of the main erlang source file.
find_main_filename([{attribute,_,file,{MainFile,_}}|_]) ->
@@ -1788,19 +1801,7 @@ munge_body([Expr|Body], Vars, MungedBody, LastExprBumpLines) ->
MungedExprs1 = [MungedExpr|MungedBody1],
munge_body(Body, Vars3, MungedExprs1, NewBumps);
false ->
- ets:insert(?COVER_TABLE, {#bump{module = Vars#vars.module,
- function = Vars#vars.function,
- arity = Vars#vars.arity,
- clause = Vars#vars.clause,
- line = Line},
- 0}),
Bump = bump_call(Vars, Line),
-% Bump = {call, 0, {remote, 0, {atom,0,cover}, {atom,0,bump}},
-% [{atom, 0, Vars#vars.module},
-% {atom, 0, Vars#vars.function},
-% {integer, 0, Vars#vars.arity},
-% {integer, 0, Vars#vars.clause},
-% {integer, 0, Line}]},
Lines2 = [Line|Lines],
{MungedExpr, Vars2} = munge_expr(Expr, Vars#vars{lines=Lines2}),
NewBumps = new_bumps(Vars2, Vars),
@@ -1855,8 +1856,10 @@ maybe_fix_last_expr(MungedExprs, Vars, LastExprBumpLines) ->
last_expr_needs_fixing(Vars, LastExprBumpLines) ->
case common_elems(Vars#vars.no_bump_lines, LastExprBumpLines) of
- [Line] -> {yes, Line};
- _ -> no
+ [Line] ->
+ {yes, Line};
+ _ ->
+ no
end.
fix_last_expr([MungedExpr|MungedExprs], Line, Vars) ->
@@ -1921,9 +1924,7 @@ fix_cls([Cl | Cls], Line, Bump) ->
bumps_line(E, L) ->
try bumps_line1(E, L) catch true -> true end.
-bumps_line1({call,_,{remote,_,{atom,_,ets},{atom,_,update_counter}},
- [{atom,_,?COVER_TABLE},{tuple,_,[_,_,_,_,_,{integer,_,Line}]},_]},
- Line) ->
+bumps_line1({'BUMP',Line,_}, Line) ->
throw(true);
bumps_line1([E | Es], Line) ->
bumps_line1(E, Line),
@@ -1933,19 +1934,12 @@ bumps_line1(T, Line) when is_tuple(T) ->
bumps_line1(_, _) ->
false.
-%%% End of fix of last expression.
-
+%% Insert a place holder for the call to counters:add/3 in the
+%% abstract code.
bump_call(Vars, Line) ->
- A = erl_anno:new(0),
- {call,A,{remote,A,{atom,A,ets},{atom,A,update_counter}},
- [{atom,A,?COVER_TABLE},
- {tuple,A,[{atom,A,?BUMP_REC_NAME},
- {atom,A,Vars#vars.module},
- {atom,A,Vars#vars.function},
- {integer,A,Vars#vars.arity},
- {integer,A,Vars#vars.clause},
- {integer,A,Line}]},
- {integer,A,1}]}.
+ {'BUMP',Line,counter_index(Vars, Line)}.
+
+%%% End of fix of last expression.
munge_expr({match,Line,ExprL,ExprR}, Vars) ->
{MungedExprL, Vars2} = munge_expr(ExprL, Vars),
@@ -2105,6 +2099,159 @@ subtract(L1, L2) ->
common_elems(L1, L2) ->
[E || E <- L1, lists:member(E, L2)].
+%%%--Counters------------------------------------------------------------
+
+init_counter_mapping(Mod) ->
+ true = ets:insert_new(?COVER_MAPPING_TABLE, {Mod,0}),
+ ok.
+
+counter_index(Vars, Line) ->
+ #vars{module=Mod,function=F,arity=A,clause=C} = Vars,
+ Key = #bump{module=Mod,function=F,arity=A,
+ clause=C,line=Line},
+ case ets:lookup(?COVER_MAPPING_TABLE, Key) of
+ [] ->
+ Index = ets:update_counter(?COVER_MAPPING_TABLE,
+ Mod, {2,1}),
+ true = ets:insert(?COVER_MAPPING_TABLE, {Key,Index}),
+ Index;
+ [{Key,Index}] ->
+ Index
+ end.
+
+%% Create the counter array and store as a persistent term.
+maybe_create_counters(Mod, true) ->
+ Cref = create_counters(Mod),
+ Key = {?MODULE,Mod},
+ persistent_term:put(Key, Cref),
+ ok;
+maybe_create_counters(_Mod, false) ->
+ ok.
+
+create_counters(Mod) ->
+ Size0 = ets:lookup_element(?COVER_MAPPING_TABLE, Mod, 2),
+ Size = max(1, Size0), %Size must not be 0.
+ Cref = counters:new(Size, [write_concurrency]),
+ ets:insert(?COVER_MAPPING_TABLE, {{counters,Mod},Cref}),
+ Cref.
+
+patch_code(Mod, Forms, false) ->
+ A = erl_anno:new(0),
+ AbstrKey = {tuple,A,[{atom,A,?MODULE},{atom,A,Mod}]},
+ patch_code1(Forms, {distributed,AbstrKey});
+patch_code(Mod, Forms, true) ->
+ Cref = create_counters(Mod),
+ AbstrCref = cid_to_abstract(Cref),
+ patch_code1(Forms, {local_only,AbstrCref}).
+
+%% Go through the abstract code and replace 'BUMP' forms
+%% with the actual code to increment the counters.
+patch_code1({'BUMP',_Line,Index}, {distributed,AbstrKey}) ->
+ %% Replace with counters:add(persistent_term:get(Key), Index, 1).
+ %% This code will work on any node.
+ A = element(2, AbstrKey),
+ GetCref = {call,A,{remote,A,{atom,A,persistent_term},{atom,A,get}},
+ [AbstrKey]},
+ {call,A,{remote,A,{atom,A,counters},{atom,A,add}},
+ [GetCref,{integer,A,Index},{integer,A,1}]};
+patch_code1({'BUMP',_Line,Index}, {local_only,AbstrCref}) ->
+ %% Replace with counters:add(Cref, Index, 1). This code
+ %% will only work on the local node.
+ A = element(2, AbstrCref),
+ {call,A,{remote,A,{atom,A,counters},{atom,A,add}},
+ [AbstrCref,{integer,A,Index},{integer,A,1}]};
+patch_code1({clauses,Cs}, Key) ->
+ {clauses,[patch_code1(El, Key) || El <- Cs]};
+patch_code1([_|_]=List, Key) ->
+ [patch_code1(El, Key) || El <- List];
+patch_code1(Tuple, Key) when tuple_size(Tuple) >= 3 ->
+ Acc = [element(2, Tuple),element(1, Tuple)],
+ patch_code_tuple(3, tuple_size(Tuple), Tuple, Key, Acc);
+patch_code1(Other, _Key) ->
+ Other.
+
+patch_code_tuple(I, Size, Tuple, Key, Acc) when I =< Size ->
+ El = patch_code1(element(I, Tuple), Key),
+ patch_code_tuple(I + 1, Size, Tuple, Key, [El|Acc]);
+patch_code_tuple(_I, _Size, _Tuple, _Key, Acc) ->
+ list_to_tuple(lists:reverse(Acc)).
+
+%% Don't try this at home! Assumes knowledge of the internal
+%% representation of a counter ref.
+cid_to_abstract(Cref0) ->
+ A = erl_anno:new(0),
+ %% Disable dialyzer warning for breaking opacity.
+ Cref = binary_to_term(term_to_binary(Cref0)),
+ {write_concurrency,Ref} = Cref,
+ {tuple,A,[{atom,A,write_concurrency},{integer,A,Ref}]}.
+
+%% Called on the remote node. Collect and send counters to
+%% the main node. Also zero the counters.
+send_counters(Mod, CollectorPid) ->
+ Process = fun(Chunk) -> send_chunk(CollectorPid, Chunk) end,
+ move_counters(Mod, Process).
+
+%% Called on the main node. Collect the counters and consolidate
+%% them into the collection table. Also zero the counters.
+move_counters(Mod) ->
+ move_counters(Mod, fun insert_in_collection_table/1).
+
+move_counters(Mod, Process) ->
+ Pattern = {#bump{module=Mod,_='_'},'_'},
+ Matches = ets:match_object(?COVER_MAPPING_TABLE, Pattern, ?CHUNK_SIZE),
+ Cref = get_counters_ref(Mod),
+ move_counters1(Matches, Cref, Process).
+
+move_counters1({Mappings,Continuation}, Cref, Process) ->
+ Move = fun({Key,Index}) ->
+ Count = counters:get(Cref, Index),
+ ok = counters:sub(Cref, Index, Count),
+ {Key,Count}
+ end,
+ Process(lists:map(Move, Mappings)),
+ move_counters1(ets:match_object(Continuation), Cref, Process);
+move_counters1('$end_of_table', _Cref, _Process) ->
+ ok.
+
+counters_mapping_table(Mod) ->
+ Mapping = counters_mapping(Mod),
+ Cref = get_counters_ref(Mod),
+ #{size:=Size} = counters:info(Cref),
+ [{Mod,Size}|Mapping].
+
+get_counters_ref(Mod) ->
+ ets:lookup_element(?COVER_MAPPING_TABLE, {counters,Mod}, 2).
+
+counters_mapping(Mod) ->
+ Pattern = {#bump{module=Mod,_='_'},'_'},
+ ets:match_object(?COVER_MAPPING_TABLE, Pattern).
+
+clear_counters(Mod) ->
+ _ = persistent_term:erase({?MODULE,Mod}),
+ ets:delete(?COVER_MAPPING_TABLE, Mod),
+ Pattern = {#bump{module=Mod,_='_'},'_'},
+ _ = ets:match_delete(?COVER_MAPPING_TABLE, Pattern),
+ ok.
+
+%% Reset counters (set counters to 0).
+reset_counters(Mod) ->
+ Pattern = {#bump{module=Mod,_='_'},'$1'},
+ MatchSpec = [{Pattern,[],['$1']}],
+ Matches = ets:select(?COVER_MAPPING_TABLE,
+ MatchSpec, ?CHUNK_SIZE),
+ Cref = get_counters_ref(Mod),
+ reset_counters1(Matches, Cref).
+
+reset_counters1({Indices,Continuation}, Cref) ->
+ _ = [counters:put(Cref, N, 0) || N <- Indices],
+ reset_counters1(ets:select(Continuation), Cref);
+reset_counters1('$end_of_table', _Cref) ->
+ ok.
+
+delete_all_counters() ->
+ _ = [persistent_term:erase(Key) || {?MODULE,_}=Key <- persistent_term:get()],
+ ok.
+
%%%--Analysis------------------------------------------------------------
%% Collect data for all modules
@@ -2140,20 +2287,7 @@ collect(Module,Clauses,Nodes) ->
%% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
move_modules({Module,Clauses}) ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
- Pattern = {#bump{module=Module, _='_'}, '_'},
- MatchSpec = [{Pattern,[],['$_']}],
- Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
- do_move_module(Match).
-
-do_move_module({Bumps,Continuation}) ->
- lists:foreach(fun({Key,Val}) ->
- ets:insert(?COVER_TABLE, {Key,0}),
- insert_in_collection_table(Key,Val)
- end,
- Bumps),
- do_move_module(ets:select(Continuation));
-do_move_module('$end_of_table') ->
- ok.
+ move_counters(Module).
%% Given a .beam file, find the .erl file. Look first in same directory as
%% the .beam file, then in ../src, then in compile info.
@@ -2709,7 +2843,7 @@ get_term(Fd) ->
%% Reset main node and all remote nodes
do_reset_main_node(Module,Nodes) ->
- do_reset(Module),
+ reset_counters(Module),
do_reset_collection_table(Module),
remote_reset(Module,Nodes).
@@ -2717,27 +2851,9 @@ do_reset_collection_table(Module) ->
ets:delete(?COLLECTION_CLAUSE_TABLE,Module),
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
-%% do_reset(Module) -> ok
-%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building
-%% long lists in the case of very large modules
-do_reset(Module) ->
- Pattern = {#bump{module=Module, _='_'}, '$1'},
- MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
- Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
- do_reset2(Match).
-
-do_reset2({Bumps,Continuation}) ->
- lists:foreach(fun({Bump,_N}) ->
- ets:insert(?COVER_TABLE, {Bump,0})
- end,
- Bumps),
- do_reset2(ets:select(Continuation));
-do_reset2('$end_of_table') ->
- ok.
-
do_clear(Module) ->
ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}),
- ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}),
+ clear_counters(Module),
case lists:member(?COLLECTION_TABLE, ets:all()) of
true ->
%% We're on the main node
diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl
index 161b0105b9..ee58fd7a10 100644
--- a/lib/tools/test/cover_SUITE.erl
+++ b/lib/tools/test/cover_SUITE.erl
@@ -24,7 +24,8 @@
-include_lib("common_test/include/ct.hrl").
suite() ->
- [{ct_hooks,[ts_install_cth]}].
+ [{ct_hooks,[ts_install_cth]},
+ {timetrap,{minutes,5}}].
all() ->
NoStartStop = [eif,otp_5305,otp_5418,otp_7095,otp_8273,
@@ -35,7 +36,8 @@ all() ->
distribution, reconnect, die_and_reconnect,
dont_reconnect_after_stop, stop_node_after_disconnect,
export_import, otp_5031, otp_6115,
- otp_8270, otp_10979_hanging_node, otp_14817],
+ otp_8270, otp_10979_hanging_node, otp_14817,
+ local_only],
case whereis(cover_server) of
undefined ->
[coverage,StartStop ++ NoStartStop];
@@ -1742,6 +1744,40 @@ otp_13289(Config) ->
ok = file:delete(File),
ok.
+local_only(Config) ->
+ ok = file:set_cwd(proplists:get_value(data_dir, Config)),
+
+ %% Trying restricting to local nodes too late.
+ cover:start(),
+ {ok,a} = cover:compile(a),
+ [a] = cover:modules(),
+ {error,too_late} = cover:local_only(),
+ cover:stop(),
+
+ %% Now test local only mode.
+ cover:start(),
+ ok = cover:local_only(),
+ [] = cover:modules(),
+ {ok,a} = cover:compile(a),
+ [a] = cover:modules(),
+ done = a:start(5),
+ {ok, {a,{17,2}}} = cover:analyse(a, coverage, module),
+ {ok, [{{a,exit_kalle,0},{1,0}},
+ {{a,loop,3},{5,1}},
+ {{a,pong,1},{1,0}},
+ {{a,start,1},{6,0}},
+ {{a,stop,1},{0,1}},
+ {{a,trycatch,1},{4,0}}]} =
+ cover:analyse(a, coverage, function),
+
+ %% Make sure that it is not possible to run cover on
+ %% slave nodes.
+ {ok,Name} = test_server:start_node(?FUNCTION_NAME, slave, []),
+ {error,local_only} = cover:start([Name]),
+ test_server:stop_node(Name),
+
+ ok.
+
%%--Auxiliary------------------------------------------------------------
analyse_expr(Expr, Config) ->