aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorPeter Andersson <[email protected]>2010-06-03 14:14:38 +0200
committerRaimo Niskanen <[email protected]>2010-06-09 16:19:20 +0200
commitc3a1e56608ebe08f1ddc07273d85ff9c2779de9b (patch)
tree3477d760c98c305a70076e45e74239c7d140c355 /lib
parent24f7909acb5d395756ce1912ab426090e369eb84 (diff)
downloadotp-c3a1e56608ebe08f1ddc07273d85ff9c2779de9b.tar.gz
otp-c3a1e56608ebe08f1ddc07273d85ff9c2779de9b.tar.bz2
otp-c3a1e56608ebe08f1ddc07273d85ff9c2779de9b.zip
Implement support for user controllable timetrap parameters (multiply and scale)
Documentation still missing.
Diffstat (limited to 'lib')
-rw-r--r--lib/common_test/src/ct.erl39
-rw-r--r--lib/common_test/src/ct_run.erl1226
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl3
-rw-r--r--lib/test_server/doc/src/test_server.xml16
-rw-r--r--lib/test_server/doc/src/test_server_ctrl.xml25
-rw-r--r--lib/test_server/src/test_server.erl314
-rw-r--r--lib/test_server/src/test_server_ctrl.erl999
-rw-r--r--lib/test_server/src/test_server_sup.erl13
8 files changed, 1443 insertions, 1192 deletions
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index 2d71a3812d..57035719e2 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -64,7 +64,8 @@
print/1, print/2, print/3,
pal/1, pal/2, pal/3,
fail/1, comment/1,
- testcases/2, userdata/2, userdata/3]).
+ testcases/2, userdata/2, userdata/3,
+ sleep/1]).
%% New API for manipulating with config handlers
-export([add_config/2, remove_config/2]).
@@ -143,8 +144,9 @@ run(TestDirs) ->
%%% {allow_user_terms,Bool} | {logdir,LogDir} |
%%% {silent_connections,Conns} | {cover,CoverSpecFile} |
%%% {step,StepOpts} | {event_handler,EventHandlers} | {include,InclDirs} |
-%%% {auto_compile,Bool} | {repeat,N} | {duration,DurTime} |
-%%% {until,StopTime} | {force_stop,Bool} | {decrypt,DecryptKeyOrFile} |
+%%% {auto_compile,Bool} | {multiply_timetraps,M} | {scale_timetraps,Bool} |
+%%% {repeat,N} | {duration,DurTime} | {until,StopTime} |
+%%% {force_stop,Bool} | {decrypt,DecryptKeyOrFile} |
%%% {refresh_logs,LogDir} | {basic_html,Bool}
%%% CfgFiles = [string()] | string()
%%% TestDirs = [string()] | string()
@@ -161,6 +163,7 @@ run(TestDirs) ->
%%% EH = atom() | {atom(),InitArgs} | {[atom()],InitArgs}
%%% InitArgs = [term()]
%%% InclDirs = [string()] | string()
+%%% M = integer()
%%% N = integer()
%%% DurTime = string(HHMMSS)
%%% StopTime = string(YYMoMoDDHHMMSS) | string(HHMMSS)
@@ -812,8 +815,7 @@ decrypt_config_file(EncryptFileName, TargetFileName, KeyOrFile) ->
%%%-----------------------------------------------------------------
-%%% @spec add_config(Callback, Config) ->
-%%% ok | {error, Reason}
+%%% @spec add_config(Callback, Config) -> ok | {error, Reason}
%%% Callback = atom()
%%% Config = string()
%%% Reason = term()
@@ -827,8 +829,7 @@ add_config(Callback, Config)->
ct_config:add_config(Callback, Config).
%%%-----------------------------------------------------------------
-%%% @spec remove_config(Callback, Config) ->
-%%% ok
+%%% @spec remove_config(Callback, Config) -> ok
%%% Callback = atom()
%%% Config = string()
%%% Reason = term()
@@ -836,5 +837,27 @@ add_config(Callback, Config)->
%%% @doc <p>This function removes configuration variables (together with
%%% their aliases) which were loaded with specified callback module and
%%% configuration string.</p>
-remove_config(Callback, Config)->
+remove_config(Callback, Config) ->
ct_config:remove_config(Callback, Config).
+
+%%%-----------------------------------------------------------------
+%%% @spec sleep(Time) -> ok
+%%% Time = {hours,Hours} | {minutes,Mins} | {seconds,Secs} | Millisecs | infinity
+%%% Hours = integer()
+%%% Mins = integer()
+%%% Secs = integer()
+%%% Millisecs = integer() | float()
+%%%
+%%% @doc <p>This function, similar to <c>timer:sleep/1</c>, suspends the test
+%%% case for specified time. However, this function also multiplies
+%%% <c>Time</c> with the 'multiply_timetraps' value (if set) and under
+%%% certain circumstances also scales up the time automatically
+%%% if 'scale_timetraps' is set to true (default is false).</p>
+sleep({hours,Hs}) ->
+ sleep(trunc(Hs * 1000 * 60 * 60));
+sleep({minutes,Ms}) ->
+ sleep(trunc(Ms * 1000 * 60));
+sleep({seconds,Ss}) ->
+ sleep(trunc(Ss * 1000));
+sleep(Time) ->
+ test_server:adjusted_sleep(Time).
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 021bda2951..96703031d2 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -24,7 +24,6 @@
-module(ct_run).
-
%% Script interface
-export([script_start/0,script_usage/0]).
@@ -46,11 +45,26 @@
-define(abs(Name), filename:absname(Name)).
-define(testdir(Name, Suite), ct_util:get_testdir(Name, Suite)).
+-record(opts, {vts,
+ shell,
+ cover,
+ coverspec,
+ step,
+ logdir,
+ config = [],
+ event_handlers = [],
+ include = [],
+ silent_connections,
+ multiply_timetraps = 1,
+ scale_timetraps = false,
+ testspecs = [],
+ tests}).
+
%%%-----------------------------------------------------------------
%%% @spec script_start() -> void()
%%%
%%% @doc Start tests via the run_test program or script.
-%%%
+%%%
%%% <p>Example:<br/><code>./run_test -config config.ctc -dir
%%% $TEST_DIR</code></p>
%%%
@@ -60,12 +74,18 @@
script_start() ->
process_flag(trap_exit, true),
Args = merge_arguments(init:get_arguments()),
+ case proplists:get_value(help, Args) of
+ undefined -> script_start(Args);
+ _ -> script_usage()
+ end.
+
+script_start(Args) ->
Tracing = start_trace(Args),
- Res =
+ Res =
case ct_repeat:loop_test(script, Args) of
- false ->
+ false ->
{ok,Cwd} = file:get_cwd(),
- CTVsn =
+ CTVsn =
case filename:basename(code:lib_dir(common_test)) of
CTBase when is_list(CTBase) ->
case string:tokens(CTBase, "-") of
@@ -76,7 +96,7 @@ script_start() ->
io:format("~nCommon Test~s starting (cwd is ~s)~n~n", [CTVsn,Cwd]),
Self = self(),
Pid = spawn_link(fun() -> script_start1(Self, Args) end),
- receive
+ receive
{'EXIT',Pid,Reason} ->
case Reason of
{user_error,What} ->
@@ -101,287 +121,285 @@ script_start() ->
Res.
script_start1(Parent, Args) ->
- VtsOrShell =
- case lists:keymember(vts, 1, Args) of
- true ->
- vts;
- false ->
- case lists:keymember(shell, 1, Args) of
- true -> shell;
- false -> false
- end
- end,
- LogDir =
- case lists:keysearch(logdir, 1, Args) of
- {value,{logdir,[LogD]}} -> LogD;
- false -> "."
- end,
- EvHandlers =
- case lists:keysearch(event_handler, 1, Args) of
- {value,{event_handler,Handlers}} ->
- lists:map(fun(H) -> {list_to_atom(H),[]} end, Handlers);
- false ->
- []
- end,
- Cover =
- case lists:keysearch(cover, 1, Args) of
- {value,{cover,CoverFile}} ->
- {cover,?abs(CoverFile)};
- false ->
- false
- end,
-
- case lists:keysearch(ct_decrypt_key, 1, Args) of
- {value,{_,[DecryptKey]}} ->
+ %% read general start flags
+ Vts = get_start_opt(vts, true, Args),
+ Shell = get_start_opt(shell, true, Args),
+ Cover = get_start_opt(cover, fun(CoverFile) -> ?abs(CoverFile) end, Args),
+ LogDir = get_start_opt(logdir, fun([LogD]) -> LogD end, Args),
+ MultTT = get_start_opt(multiply_timetraps, fun(MT) -> MT end, Args),
+ ScaleTT = get_start_opt(scale_timetraps, fun(CT) -> CT end, Args),
+ EvHandlers = get_start_opt(event_handler,
+ fun(Handlers) ->
+ lists:map(fun(H) ->
+ {list_to_atom(H),[]}
+ end, Handlers) end,
+ [], Args),
+
+ %% check flags and set corresponding application env variables
+
+ %% ct_decrypt_key | ct_decrypt_file
+ case proplists:get_value(ct_decrypt_key, Args) of
+ [DecryptKey] ->
application:set_env(common_test, decrypt, {key,DecryptKey});
- false ->
- case lists:keysearch(ct_decrypt_file, 1, Args) of
- {value,{_,[DecryptFile]}} ->
- application:set_env(common_test, decrypt,
+ undefined ->
+ case proplists:get_value(ct_decrypt_file, Args) of
+ [DecryptFile] ->
+ application:set_env(common_test, decrypt,
{file,filename:absname(DecryptFile)});
- false ->
+ undefined ->
application:unset_env(common_test, decrypt)
end
end,
-
- case lists:keysearch(no_auto_compile, 1, Args) of
- {value,_} ->
- application:set_env(common_test, auto_compile, false);
- false ->
- application:set_env(common_test, auto_compile, true),
-
- InclDirs =
- case lists:keysearch(include,1,Args) of
- {value,{include,Incl}} when is_list(hd(Incl)) ->
- Incl;
- {value,{include,Incl}} when is_list(Incl) ->
- [Incl];
+ %% no_auto_compile + include
+ IncludeDirs =
+ case proplists:get_value(no_auto_compile, Args) of
+ undefined ->
+ application:set_env(common_test, auto_compile, true),
+ InclDirs =
+ case proplists:get_value(include, Args) of
+ {include,Incl} when is_list(hd(Incl)) ->
+ Incl;
+ {include,Incl} when is_list(Incl) ->
+ [Incl];
+ undefined ->
+ []
+ end,
+ case os:getenv("CT_INCLUDE_PATH") of
false ->
- []
- end,
- case os:getenv("CT_INCLUDE_PATH") of
- false ->
- application:set_env(common_test, include, InclDirs);
- CtInclPath ->
- InclDirs1 = string:tokens(CtInclPath,[$:,$ ,$,]),
- application:set_env(common_test, include, InclDirs1++InclDirs)
- end
+ application:set_env(common_test, include, InclDirs),
+ InclDirs;
+ CtInclPath ->
+ AllInclDirs =
+ string:tokens(CtInclPath,[$:,$ ,$,]) ++ InclDirs,
+ application:set_env(common_test, include, AllInclDirs),
+ AllInclDirs
+ end;
+ _ ->
+ application:set_env(common_test, auto_compile, false),
+ []
+ end,
+ %% basic_html - used by ct_logs
+ case proplists:get_value(basic_html, Args) of
+ undefined ->
+ application:set_env(common_test, basic_html, false);
+ _ ->
+ application:set_env(common_test, basic_html, true)
end,
- case lists:keysearch(basic_html, 1, Args) of
- {value,_} ->
- application:set_env(common_test, basic_html, true);
- false ->
- application:set_env(common_test, basic_html, false)
- end,
+ StartOpts = #opts({vts = Vts, shell = Shell, cover = Cover,
+ logdir = LogDir, event_handlers = EvHandlers,
+ include = IncludeDirs,
+ multiply_timetraps = MultTT,
+ scale_timetraps = ScaleTT}),
- Result =
- case lists:keysearch(refresh_logs, 1, Args) of
- {value,{refresh_logs,Refresh}} ->
- LogDir1 = case Refresh of
- [] -> LogDir;
- [RefreshDir] -> ?abs(RefreshDir)
- end,
- {ok,Cwd} = file:get_cwd(),
- file:set_cwd(LogDir1),
- timer:sleep(500), % give the shell time to print version etc
- io:nl(),
- case catch ct_logs:make_all_suites_index(refresh) of
- {'EXIT',ASReason} ->
- file:set_cwd(Cwd),
- {error,{all_suites_index,ASReason}};
- _ ->
- case catch ct_logs:make_all_runs_index(refresh) of
- {'EXIT',ARReason} ->
- file:set_cwd(Cwd),
- {error,{all_runs_index,ARReason}};
- _ ->
- file:set_cwd(Cwd),
- io:format("Logs in ~s refreshed!~n~n", [LogDir1]),
- timer:sleep(500), % time to flush io before quitting
- ok
- end
- end;
- false ->
- Config = ct_config:prepare_config_list(Args),
- case Config of
- [] ->
- case install([{config,[]},
- {event_handler,EvHandlers}],
- LogDir) of
- ok ->
- script_start2(VtsOrShell, [], EvHandlers,
- Args, LogDir, Cover);
- Error ->
- Error
- end;
- Config ->
- case lists:keysearch(spec, 1, Args) of
- false ->
- case check_and_install_configfiles(Config,
- LogDir, EvHandlers) of
- ok ->
- script_start2(VtsOrShell, Config,
- EvHandlers, Args, LogDir,
- Cover);
- Error ->
- Error
- end;
- _ ->
- script_start2(VtsOrShell, Config,
- EvHandlers, Args, LogDir, Cover)
- end
- end
- end,
+ %% check if log files should be refreshed or go on to run tests...
+ Result = run_or_refresh(StartOpts, Args),
+ %% send final results to starting process waiting in script_start/0
Parent ! {self(), Result}.
-check_and_install_configfiles(Configs, LogDir, EvHandlers) ->
- case ct_config:check_config_files(Configs) of
- false->
- install([{config,Configs},
- {event_handler,EvHandlers}], LogDir);
- {value, {error, {nofile, File}}} ->
- {error,{cant_read_config_file,File}};
- {value, {error, {wrong_config, Message}}}->
- {error,{wrong_config, Message}};
- {value, {error, {callback, File}}} ->
- {error,{cant_load_callback_module,File}}
+run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) ->
+ case proplists:get_value(refresh_logs, Args) of
+ undefined ->
+ script_start2(StartOpts, Args);
+ Refresh ->
+ LogDir1 = case Refresh of
+ [] -> which_logdir(LogDir);
+ [RefreshDir] -> ?abs(RefreshDir)
+ end,
+ {ok,Cwd} = file:get_cwd(),
+ file:set_cwd(LogDir1),
+ %% give the shell time to print version etc
+ timer:sleep(500),
+ io:nl(),
+ case catch ct_logs:make_all_suites_index(refresh) of
+ {'EXIT',ASReason} ->
+ file:set_cwd(Cwd),
+ {error,{all_suites_index,ASReason}};
+ _ ->
+ case catch ct_logs:make_all_runs_index(refresh) of
+ {'EXIT',ARReason} ->
+ file:set_cwd(Cwd),
+ {error,{all_runs_index,ARReason}};
+ _ ->
+ file:set_cwd(Cwd),
+ io:format("Logs in ~s refreshed!~n~n", [LogDir1]),
+ timer:sleep(500), % time to flush io before quitting
+ ok
+ end
+ end
end.
-script_start2(false, ConfigFiles, EvHandlers, Args, LogDir, Cover) ->
- case lists:keysearch(spec, 1, Args) of
- {value,{spec,[]}} ->
+script_start2(StartOpts = #opts{vts = undefined,
+ shell = undefined}, Args) ->
+ TestSpec = proplists:get_value(spec, Args),
+ Opts =
+ case TestSpec of
+ Specs when Specs =/= [], Specs =/= undefined ->
+ %% using testspec as input for test
+ Relaxed = get_start_opt(allow_user_terms, true, false, Args),
+ case catch ct_testspec:collect_tests_from_file(Specs, Relaxed) of
+ {error,Reason} ->
+ {error,Reason};
+ TS ->
+ SpecStartOpts = get_data_for_node(TS, node()),
+
+ LogDir = choose_val(StartOpts#opts.logdir,
+ SpecStartOpts#opts.logdir),
+
+ Cover = choose_val(StartOpts#opts.cover,
+ SpecStartOpts#opts.cover),
+ MultTT = choose_val(StartOpts#opts.multiply_timetraps,
+ SpecStartOpts#opts.multiply_timetraps),
+ ScaleTT = choose_val(StartOpts#opts.scale_timetraps,
+ SpecStartOpts#opts.scale_timetraps),
+ AllEvHs = merge_vals([StartOpts#opts.event_handlers,
+ SpecStartOpts#opts.event_handlers]),
+ AllInclude = merge_vals([StartOpts#opts.include,
+ SpecStartOpts#opts.include]),
+ application:set_env(common_test, include, AllInclude),
+
+ StartOpts#opts{testspecs = Specs,
+ cover = Cover,
+ logdir = LogDir,
+ config = SpecStartOpts#opts.config,
+ event_handlers = AllEvHs,
+ include = AllInclude,
+ multiply_timetraps = MultTT,
+ scale_timetraps = ScaleTT}
+ end;
+ _ ->
+ StartOpts
+ end,
+ %% read config/userconfig from start flags
+ InitConfig = ct_config:prepare_config_list(Args),
+ case TestSpec of
+ [] ->
{error,no_testspec_specified};
- {value,{spec,Specs}} ->
- Relaxed = lists:keymember(allow_user_terms, 1, Args),
- %% using testspec as input for test
- case catch ct_testspec:collect_tests_from_file(Specs, Relaxed) of
- {error,Reason} ->
- {error,Reason};
- TS ->
- {LogDir1,TSCoverFile,ConfigFiles1,EvHandlers1,Include1} =
- get_data_for_node(TS,node()),
- UserInclude =
- case application:get_env(common_test, include) of
- {ok,Include} -> Include++Include1;
- _ -> Include1
- end,
- application:set_env(common_test, include, UserInclude),
- LogDir2 = which_logdir(LogDir,LogDir1),
- CoverOpt = case {Cover,TSCoverFile} of
- {false,undef} -> [];
- {_,undef} -> [Cover];
- {false,_} -> [{cover,TSCoverFile}]
- end,
- case check_and_install_configfiles(
- ConfigFiles++ConfigFiles1, LogDir2,
- EvHandlers++EvHandlers1) of
- ok ->
- {Run,Skip} = ct_testspec:prepare_tests(TS, node()),
- do_run(Run, Skip, CoverOpt, Args, LogDir2);
- Error ->
- Error
- end
+ undefined -> % no testspec used
+ case check_and_install_configfiles(InitConfig,
+ which(logdir,Opts#opts.logdir),
+ Opts#opts.event_handlers) of
+ ok -> % go on read tests from start flags
+ script_start3(Opts#opts{config=InitConfig}, Args);
+ Error ->
+ Error
end;
- false ->
- script_start3(false, ConfigFiles, EvHandlers, Args, LogDir, Cover)
+ _ -> % testspec used
+ %% merge config from start flags with config from testspec
+ AllConfig = merge_vals([InitConfig, Opts#opts.config]),
+ case check_and_install_configfiles(AllConfig,
+ which(logdir,Opts#opts.logdir),
+ Opts#opts.event_handlers) of
+ ok -> % read tests from spec
+ {Run,Skip} = ct_testspec:prepare_tests(TS, node()),
+ do_run(Run, Skip, Opts#opts{config=AllConfig}, Args);
+ Error ->
+ Error
+ end
end;
-script_start2(VtsOrShell, ConfigFiles, EvHandlers, Args, LogDir, Cover) ->
- script_start3(VtsOrShell, ConfigFiles, EvHandlers, Args, LogDir, Cover).
-script_start3(VtsOrShell, ConfigFiles, EvHandlers, Args, LogDir, Cover) ->
- case lists:keysearch(dir, 1, Args) of
- {value,{dir,[]}} ->
- {error,no_dir_specified};
- {value,{dir,Dirs}} ->
- script_start4(VtsOrShell, ConfigFiles, EvHandlers, tests(Dirs),
- Cover, Args, LogDir);
+script_start2(StartOpts, Args) ->
+ script_start3(StartOpts, Args).
+
+check_and_install_configfiles(Configs, LogDir, EvHandlers) ->
+ case ct_config:check_config_files(Configs) of
false ->
- case lists:keysearch(suite, 1, Args) of
- {value,{suite,[]}} ->
+ install([{config,Configs},
+ {event_handler,EvHandlers}], LogDir);
+ {value,{error,{nofile,File}}} ->
+ {error,{cant_read_config_file,File}};
+ {value,{error,{wrong_config,Message}}}->
+ {error,{wrong_config,Message}};
+ {value,{error,{callback,File}}} ->
+ {error,{cant_load_callback_module,File}}
+ end.
+
+script_start3(StartOpts = #opts{cover = Cover}, Args) ->
+ case proplists:get_value(dir, Args) of
+ [] ->
+ {error,no_dir_specified};
+ Dirs when is_list(Dirs) ->
+ script_start4(StartOpts#opts{tests = tests(Dirs)}, Args);
+ undefined ->
+ case proplists:get_value(suite, Args) of
+ [] ->
{error,no_suite_specified};
- {value,{suite,Suites}} ->
- StepOrCover =
- case lists:keysearch(step, 1, Args) of
- {value,Step} -> Step;
- false -> Cover
- end,
- S2M = fun(S) ->
- {filename:dirname(S),
- list_to_atom(
- filename:rootname(filename:basename(S)))}
- end,
- DirMods = lists:map(S2M, Suites),
- {Specified,GroupsAndCases} =
- case {lists:keysearch(group, 1, Args),
- lists:keysearch('case', 1, Args)} of
- {{value,{_,Gs}},{value,{_,Cs}}} -> {true,Gs++Cs};
- {{value,{_,Gs}},_} -> {true,Gs};
- {_,{value,{_,Cs}}} -> {true,Cs};
- _ -> {false,[]}
- end,
- if Specified, length(GroupsAndCases) == 0 ->
- {error,no_case_or_group_specified};
- Specified, length(DirMods) > 1 ->
+ Suites when is_list(Suites) ->
+ StartOpts1 =
+ get_start_opt(step,
+ fun(Step) ->
+ StartOpts#opts{step = Step,
+ cover = undefined}
+ end, StartOpts, Args),
+ DirMods = [suite_to_test(S) || S <- Suites],
+ case groups_and_cases(proplist:get_value(group, Args),
+ proplist:get_value(testcase, Args)) of
+ Error = {error,_} ->
+ Error;
+ [] when DirMods =/= [] ->
+ Ts = tests(DirMods),
+ script_start4(StartOpts1#opts{tests = Ts}, Args);
+ GroupsAndCases when length(DirMods) == 1 ->
+ Ts = tests(DirMods, GroupsAndCases),
+ script_start4(StartOpts1#opts{tests = Ts}, Args);
+ [_,_|_] when length(DirMods) > 1 ->
{error,multiple_suites_and_cases};
- length(GroupsAndCases) > 0, length(DirMods) == 1 ->
- GsAndCs = lists:map(fun(C) -> list_to_atom(C) end,
- GroupsAndCases),
- script_start4(VtsOrShell, ConfigFiles, EvHandlers,
- tests(DirMods, GsAndCs),
- StepOrCover, Args, LogDir);
- not Specified, length(DirMods) > 0 ->
- script_start4(VtsOrShell, ConfigFiles, EvHandlers,
- tests(DirMods),
- StepOrCover, Args, LogDir);
- true ->
- {error,incorrect_suite_and_case_options}
+ _ ->
+ {error,incorrect_suite_option}
end;
- false when VtsOrShell=/=false ->
- script_start4(VtsOrShell, ConfigFiles, EvHandlers,
- [], Cover, Args, LogDir);
- false ->
- script_usage(),
- {error,incorrect_usage}
+ undefined ->
+ if StartOpts#opts.vts ; StartOpts#opts.shell ->
+ script_start4(StartOpts#opts{tests = []}, Args);
+ true ->
+ script_usage(),
+ {error,incorrect_usage}
+ end
end
end.
-script_start4(vts, ConfigFiles, EvHandlers, Tests, false, _Args, LogDir) ->
- vts:init_data(ConfigFiles, EvHandlers, ?abs(LogDir), Tests);
-script_start4(shell, ConfigFiles, EvHandlers, _Tests, false, Args, LogDir) ->
- Opts = [{config,ConfigFiles},{event_handler,EvHandlers}],
+script_start4(#opts{vts = true, config = Config, event_handler = EvHandlers,
+ tests = Tests, logdir = LogDir}, Args) ->
+ vts:init_data(Config, EvHandlers, ?abs(LogDir), Tests);
+
+script_start4(#opts{shell = true, config = Config, event_handlers = EvHandlers,
+ logdir = LogDir, testspecs = Specs}, Args) ->
+ InstallOpts = [{config,Config},{event_handler,EvHandlers}],
if ConfigFiles == [] ->
ok;
true ->
- io:format("\nInstalling: ~p\n\n", [ConfigFiles])
+ io:format("\nInstalling: ~p\n\n", [Config])
end,
- case install(Opts) of
+ case install(InstallOpts) of
ok ->
ct_util:start(interactive, LogDir),
- log_ts_names(Args),
+ log_ts_names(Specs),
io:nl(),
ok;
Error ->
Error
end;
-script_start4(vts, _CfgFs, _EvHs, _Tests, _Cover={cover,_}, _Args, _LogDir) ->
- %% Add support later (maybe).
- script_usage(),
- erlang:halt();
-script_start4(shell, _CfgFs, _EvHs, _Tests, _Cover={cover,_}, _Args, _LogDir) ->
- %% Add support later (maybe).
- script_usage();
-script_start4(false, _CfgFs, _EvHs, Tests, Cover={cover,_}, Args, LogDir) ->
- do_run(Tests, [], [Cover], Args, LogDir);
-script_start4(false, _ConfigFiles, _EvHandlers, Tests, false, Args, LogDir) ->
- do_run(Tests, [], [], Args, LogDir);
-script_start4(false, _ConfigFiles, _EvHandlers, Test, Step, Args, LogDir) ->
- do_run(Test, [], [Step], Args, LogDir);
-script_start4(vts, _ConfigFiles, _EvHandlers, _Test, _Step, _Args, _LogDir) ->
- script_usage(),
+
+script_start4(#opts{vts = true, cover = Cover}, _) ->
+ case Cover of
+ undefined ->
+ script_usage();
+ _ ->
+ %% Add support later (maybe).
+ io:format("\nCan't run cover in vts mode.\n\n", [])
+ end,
erlang:halt();
-script_start4(shell, _ConfigFiles, _EvHandlers, _Test, _Step, _Args, _LogDir) ->
- script_usage().
+
+script_start4(#opts{shell = true, cover = Cover}, _) ->
+ case Cover of
+ undefined ->
+ script_usage();
+ _ ->
+ %% Add support later (maybe).
+ io:format("\nCan't run cover in interactive mode.\n\n", [])
+ end;
+
+script_start4(Opts = #opts{tests = Tests}, Args) ->
+ do_run(Tests, [], Opts, Args).
%%%-----------------------------------------------------------------
%%% @spec script_usage() -> ok
@@ -394,8 +412,10 @@ script_usage() ->
"\n\t[-decrypt_key Key] | [-decrypt_file KeyFile]"
"\n\t[-dir TestDir1 TestDir2 .. TestDirN] |"
"\n\t[-suite Suite [-case Case]]"
- "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
+ "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
"\n\t[-no_auto_compile]"
+ "\n\t[-multiply_timetraps]"
+ "\n\t[-scale_timetraps]"
"\n\t[-basic_html]\n\n"),
io:format("Run tests from command line:\n\n"
"\trun_test [-dir TestDir1 TestDir2 .. TestDirN] |"
@@ -409,10 +429,12 @@ script_usage() ->
"\n\t[-stylesheet CSSFile]"
"\n\t[-cover CoverCfgFile]"
"\n\t[-event_handler EvHandler1 EvHandler2 .. EvHandlerN]"
- "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
+ "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
"\n\t[-no_auto_compile]"
- "\n\t[-basic_html]"
- "\n\t[-repeat N [-force_stop]] |"
+ "\n\t[-multiply_timetraps]"
+ "\n\t[-scale_timetraps]"
+ "\n\t[-basic_html]"
+ "\n\t[-repeat N [-force_stop]] |"
"\n\t[-duration HHMMSS [-force_stop]] |"
"\n\t[-until [YYMoMoDD]HHMMSS [-force_stop]]\n\n"),
io:format("Run tests using test specification:\n\n"
@@ -425,10 +447,12 @@ script_usage() ->
"\n\t[-stylesheet CSSFile]"
"\n\t[-cover CoverCfgFile]"
"\n\t[-event_handler EvHandler1 EvHandler2 .. EvHandlerN]"
- "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
+ "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
"\n\t[-no_auto_compile]"
- "\n\t[-basic_html]"
- "\n\t[-repeat N [-force_stop]] |"
+ "\n\t[-multiply_timetraps]"
+ "\n\t[-scale_timetraps]"
+ "\n\t[-basic_html]"
+ "\n\t[-repeat N [-force_stop]] |"
"\n\t[-duration HHMMSS [-force_stop]] |"
"\n\t[-until [YYMoMoDD]HHMMSS [-force_stop]]\n\n"),
io:format("Refresh the HTML index files:\n\n"
@@ -439,7 +463,6 @@ script_usage() ->
"\trun_test -shell"
"\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
"\n\t[-decrypt_key Key] | [-decrypt_file KeyFile]\n\n").
-
%%%-----------------------------------------------------------------
%%% @hidden
@@ -462,13 +485,13 @@ install(Opts, LogDir) ->
case whereis(ct_util_server) of
undefined ->
VarFile = variables_file_name(LogDir),
- %% io:format("Varfile=~p~n", [VarFile]),
+ io:format("Varfile=~p~n", [VarFile]),
case file:open(VarFile, [write]) of
{ok,Fd} ->
[io:format(Fd, "~p.\n", [Opt]) || Opt <- Opts],
file:close(Fd),
ok;
- {error,Reason} ->
+ {error,Reason} ->
io:format("CT failed to install configuration data. Please "
"verify that the log directory exists and that "
"write permission is set.\n\n", []),
@@ -487,59 +510,58 @@ variables_file_name(Dir) ->
filename:join(Dir, "variables-"++atom_to_list(node())).
%%%-----------------------------------------------------------------
-%%% @hidden
+%%% @spec run_test(Opts) -> Result
+%%% Opts = [tuple()]
+%%% Result = [TestResult] | {error,Reason}
+%%%
+%%% @doc Start tests from the erlang shell or from an erlang program.
%%% @equiv ct:run_test/1
+%%%-----------------------------------------------------------------
-%% Opts = [OptTuples]
-%% OptTuples = {config,CfgFiles} | {dir,TestDirs} | {suite,Suites} |
-%% {testcase,Cases} | {spec,TestSpecs} | {allow_user_terms,Bool} |
-%% {logdir,LogDir} | {cover,CoverSpecFile} | {step,StepOpts} |
-%% {silent_connections,Conns} | {event_handler,EventHandlers} |
-%% {include,InclDirs} | {auto_compile,Bool} |
-%% {repeat,N} | {duration,DurTime} | {until,StopTime} | {force_stop,Bool} |
-%% {decrypt,KeyOrFile}
-
-run_test(Opt) when is_tuple(Opt) ->
- run_test([Opt]);
-
-run_test(Opts) when is_list(Opts) ->
- case lists:keysearch(refresh_logs, 1, Opts) of
- {value,{_,RefreshDir}} ->
- refresh_logs(?abs(RefreshDir)),
- ok;
- false ->
- Tracing = start_trace(Opts),
+run_test(StartOpt) when is_tuple(StartOpt) ->
+ run_test([StartOpt]);
+
+run_test(StartOpts) when is_list(StartOpts) ->
+ case proplist:get_value(refresh_logs, StartOpts) of
+ undefined ->
+ Tracing = start_trace(StartOpts),
{ok,Cwd} = file:get_cwd(),
- io:format("~nCommon Test starting (cwd is ~s)~n~n", [Cwd]),
+ io:format("~nCommon Test starting (cwd is ~s)~n~n", [Cwd]),
Res =
- case ct_repeat:loop_test(func, Opts) of
+ case ct_repeat:loop_test(func, StartOpts) of
false ->
- case catch run_test1(Opts) of
- {'EXIT',Reason} ->
+ case catch run_test1(StartOpts) of
+ {'EXIT',Reason} ->
file:set_cwd(Cwd),
{error,Reason};
- Result ->
+ Result ->
Result
end;
Result ->
Result
end,
stop_trace(Tracing),
- Res
+ Res;
+ RefreshDir ->
+ refresh_logs(?abs(RefreshDir)),
+ ok
end.
-run_test1(Opts) ->
- LogDir =
- case lists:keysearch(logdir, 1, Opts) of
- {value,{_,LD}} when is_list(LD) -> LD;
- false -> "."
- end,
- CfgFiles = ct_config:get_config_file_list(Opts),
+run_test1(StartOpts) ->
+ %% logdir
+ LogDir = get_start_opt(logdir, fun(LD) when is_list(LD) -> LD end,
+ ".", StartOpts),
+ %% config & userconfig
+ CfgFiles = ct_config:get_config_file_list(StartOpts),
+
+ %% event handlers
EvHandlers =
- case lists:keysearch(event_handler, 1, Opts) of
- {value,{_,H}} when is_atom(H) ->
+ case proplists:get_value(event_handler, StartOpts) of
+ undefined ->
+ [];
+ H when is_atom(H) ->
[{H,[]}];
- {value,{_,H}} ->
+ H ->
Hs =
if is_tuple(H) -> [H];
is_list(H) -> H;
@@ -554,41 +576,34 @@ run_test1(Opts) ->
{EH,Args};
(_) ->
[]
- end, Hs));
- _ ->
- []
- end,
- SilentConns =
- case lists:keysearch(silent_connections, 1, Opts) of
- {value,{_,all}} ->
- [];
- {value,{_,Conns}} ->
- Conns;
- _ ->
- undefined
- end,
- Cover =
- case lists:keysearch(cover, 1, Opts) of
- {value,{_,CoverFile}} ->
- [{cover,?abs(CoverFile)}];
- _ ->
- []
+ end, Hs))
end,
+
+ %% silent connections
+ SilentConns = get_start_opt(silent_connections,
+ fun(all) -> [];
+ (Conns) -> Conns
+ end, StartOpts),
+ %% code coverage
+ Cover = get_start_opt(cover, fun(CoverFile) -> ?abs(CoverFile) end, StartOpts),
+
+ %% timetrap manipulation
+ MultiplyTT = get_start_opt(multiply_timetraps, value, 1, StartOpts),
+ ScaleTT = get_start_opt(scale_timetraps, value, false, StartOpts),
+
+ %% auto compile & include files
Include =
- case lists:keysearch(auto_compile, 1, Opts) of
- {value,{auto_compile,ACBool}} ->
- application:set_env(common_test, auto_compile, ACBool),
- [];
- _ ->
+ case proplists:get_value(auto_compile, StartOpts) of
+ undefined ->
application:set_env(common_test, auto_compile, true),
InclDirs =
- case lists:keysearch(include, 1, Opts) of
- {value,{include,Incl}} when is_list(hd(Incl)) ->
- Incl;
- {value,{include,Incl}} when is_list(Incl) ->
- [Incl];
- false ->
- []
+ case proplists:get_value(include, StartOpts) of
+ undefined ->
+ [];
+ Incl when is_list(hd(Incl)) ->
+ Incl;
+ Incl when is_list(Incl) ->
+ [Incl]
end,
case os:getenv("CT_INCLUDE_PATH") of
false ->
@@ -599,10 +614,14 @@ run_test1(Opts) ->
AllInclDirs = InclDirs1++InclDirs,
application:set_env(common_test, include, AllInclDirs),
AllInclDirs
- end
+ end;
+ ACBool ->
+ application:set_env(common_test, auto_compile, ACBool),
+ []
end,
- case lists:keysearch(decrypt, 1, Opts) of
+ %% decrypt config file
+ case lists:keysearch(decrypt, 1, StartOpts) of
{value,{_,Key={key,_}}} ->
application:set_env(common_test, decrypt, Key);
{value,{_,{file,KeyFile}}} ->
@@ -611,39 +630,35 @@ run_test1(Opts) ->
application:unset_env(common_test, decrypt)
end,
- case lists:keysearch(basic_html, 1, Opts) of
- {value,{basic_html,BasicHtmlBool}} ->
- application:set_env(common_test, basic_html, BasicHtmlBool);
- _ ->
- application:set_env(common_test, basic_html, false)
+ %% basic html - used by ct_logs
+ case proplists:get_value(basic_html, StartOpts) of
+ undefined ->
+ application:set_env(common_test, basic_html, false);
+ BasicHtmlBool ->
+ application:set_env(common_test, basic_html, BasicHtmlBool)
end,
- case lists:keysearch(spec, 1, Opts) of
- {value,{_,Specs}} ->
- Relaxed =
- case lists:keysearch(allow_user_terms, 1, Opts) of
- {value,{_,true}} -> true;
- _ -> false
- end,
+ %% stepped execution
+ Step = get_start_opt(step, value, StepOpts),
+
+ Opts = #opts{cover = Cover, step = Step, logdir = LogDir, config = CfgFiles,
+ event_handlers = EvHandlers, include = Include,
+ silent_connections = SilentConns, multiply_timetraps = MultiplyTT,
+ scale_timetraps = ScaleTT},
+
+ %% test specification
+ case proplists:get_value(spec, StartOpts) of
+ undefined ->
+ case proplists:get_value(prepared_tests, StartOpts) of
+ undefined -> % use dir|suite|case
+ run_dir(Opts, StartOpts);
+ {{Run,Skip},Specs} -> % use prepared tests
+ run_prepared(Run, Skip, Opts#opts{testspecs = Specs}, StartOpts)
+ end;
+ Specs ->
+ Relaxed = get_start_opt(allow_user_term, value, false),
%% using testspec(s) as input for test
- run_spec_file(LogDir, CfgFiles, EvHandlers, Include, Specs, Relaxed, Cover,
- replace_opt([{silent_connections,SilentConns}], Opts));
- false ->
- case lists:keysearch(prepared_tests, 1, Opts) of
- {value,{_,{Run,Skip},Specs}} -> % use prepared tests
- run_prepared(LogDir, CfgFiles, EvHandlers,
- Run, Skip, Cover,
- replace_opt([{silent_connections,SilentConns},
- {spec,Specs}],Opts));
- false -> % use dir|suite|case
- StepOrCover =
- case lists:keysearch(step, 1, Opts) of
- {value,Step} -> [Step];
- false -> Cover
- end,
- run_dir(LogDir, CfgFiles, EvHandlers, StepOrCover,
- replace_opt([{silent_connections,SilentConns}], Opts))
- end
+ run_spec_file(Relaxed, Opts#opts{testspecs = Specs}, StartOpts)
end.
replace_opt([O={Key,_Val}|Os], Opts) ->
@@ -651,83 +666,105 @@ replace_opt([O={Key,_Val}|Os], Opts) ->
replace_opt([], Opts) ->
Opts.
-run_spec_file(LogDir, CfgFiles, EvHandlers, Include, Specs, Relaxed, Cover, Opts) ->
+run_spec_file(Relaxed,
+ Opts = #opts{testspecs = Specs, config = CfgFiles},
+ StartOpts) ->
Specs1 = case Specs of
[X|_] when is_integer(X) -> [Specs];
_ -> Specs
end,
- AbsSpecs = lists:map(fun(SF) -> ?abs(SF) end, Specs1),
+ AbsSpecs = lists:map(fun(SF) -> ?abs(SF) end, Specs1),
log_ts_names(AbsSpecs),
case catch ct_testspec:collect_tests_from_file(AbsSpecs, Relaxed) of
{error,CTReason} ->
exit(CTReason);
TS ->
- {LogDir1,TSCoverFile,CfgFiles1,EvHandlers1,Include1} =
- get_data_for_node(TS, node()),
- application:set_env(common_test, include, Include++Include1),
- LogDir2 = which_logdir(LogDir, LogDir1),
- CoverOpt = case {Cover,TSCoverFile} of
- {[],undef} -> [];
- {_,undef} -> Cover;
- {[],_} -> [{cover,TSCoverFile}]
- end,
- case check_and_install_configfiles(CfgFiles++CfgFiles1, LogDir2,
- EvHandlers++EvHandlers1) of
+ SpecOpts = get_data_for_node(TS, node()),
+ LogDir = choose_val(Opts#opts.logdir,
+ SpecOpts#opts.logdir),
+ AllConfig = merge_vals([CfgFiles, SpecOpts#opts.config]),
+ Cover = choose_val(Opts#opts.cover,
+ SpecOpts#opts.cover),
+ MultTT = choose_val(Opts#opts.multiply_timetraps,
+ SpecOpts#opts.multiply_timetraps),
+ ScaleTT = choose_val(Opts#opts.scale_timetraps,
+ SpecOpts#opts.scale_timetraps),
+ AllEvHs = merge_vals([Opts#opts.event_handlers,
+ SpecOpts#opts.event_handlers]),
+ AllInclude = merge_vals([Opts#opts.include,
+ SpecOpts#opts.include]),
+ application:set_env(common_test, include, AllInclude),
+
+ case check_and_install_configfiles(AllConfig,
+ which(logdir,LogDir),
+ AllEvHs) of
ok ->
+ Opts1 = Opts#opts{testspecs = Specs,
+ cover = Cover,
+ logdir = LogDir,
+ config = AllConfig,
+ event_handlers = AllEvHs,
+ include = AllInclude,
+ testspecs = AbsSpecs,
+ multiply_timetraps = MultTT,
+ scale_timetraps = ScaleTT}
+
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
- do_run(Run, Skip, CoverOpt,
- replace_opt([{spec,AbsSpecs}], Opts),
- LogDir2);
+ do_run(Run, Skip, Opts1, StartOpts);
{error,GCFReason} ->
exit(GCFReason)
end
end.
-run_prepared(LogDir, CfgFiles, EvHandlers, Run, Skip, Cover, Opts) ->
+run_prepared(Run, Skip, Opts = #opts{logdir = LogDir,
+ config = CfgFiles,
+ event_handlers = EvHandlers},
+ StartOpts) ->
case check_and_install_configfiles(CfgFiles, LogDir, EvHandlers) of
ok ->
- do_run(Run, Skip, Cover, Opts, LogDir);
+ do_run(Run, Skip, Opts, StartOpts);
{error,Reason} ->
exit(Reason)
- end.
+ end.
check_config_file(Callback, File)->
case Callback:check_parameter(File) of
- {ok, {file, File}}->
+ {ok,{file,File}}->
?abs(File);
- {ok, {config, _}}->
+ {ok,{config,_}}->
File;
- {error, {wrong_config, Message}}->
- exit({wrong_config, {Callback, Message}});
- {error, {nofile, File}}->
- exit({no_such_file, ?abs(File)})
+ {nok,{wrong_config,Message}}->
+ exit({wrong_config,{Callback,Message}});
+ {nok,{nofile,File}}->
+ exit({no_such_file,?abs(File)})
end.
-run_dir(LogDir, CfgFiles, EvHandlers, StepOrCover, Opts) ->
- AbsCfgFiles =
- lists:map(fun({Callback, FileList})->
- case code:is_loaded(Callback) of
- {file, _Path}->
- ok;
- false->
- case code:load_file(Callback) of
- {module, Callback}->
- ok;
- {error, _}->
- exit({no_such_module, Callback})
- end
- end,
- {Callback,
- lists:map(fun(File)->
- check_config_file(Callback, File)
- end, FileList)}
- end,
- CfgFiles),
+run_dir(Opts = #opts{logdir = LogDir,
+ config = CfgFiles,
+ event_handlers = EvHandlers}, StartOpts) ->
+ AbsCfgFiles =
+ lists:map(fun({Callback,FileList})->
+ case code:is_loaded(Callback) of
+ {file,_Path}->
+ ok;
+ false ->
+ case code:load_file(Callback) of
+ {module,Callback}->
+ ok;
+ {error,_}->
+ exit({no_such_module,Callback})
+ end
+ end,
+ {Callback,
+ lists:map(fun(File)->
+ check_config_file(Callback, File)
+ end, FileList)}
+ end, CfgFiles),
case install([{config,AbsCfgFiles},{event_handler,EvHandlers}], LogDir) of
ok -> ok;
{error,IReason} -> exit(IReason)
end,
- case lists:keysearch(dir,1,Opts) of
+ case lists:keysearch(dir, 1, Opts) of
{value,{_,Dirs=[Dir|_]}} when not is_integer(Dir),
length(Dirs)>1 ->
%% multiple dirs (no suite)
@@ -754,12 +791,12 @@ run_dir(LogDir, CfgFiles, EvHandlers, StepOrCover, Opts) ->
do_run(tests(lists:map(S2M, Suites)), [], StepOrCover, Opts, LogDir);
_ ->
exit(no_tests_specified)
- end;
+ end;
{value,{_,Dir}} ->
case lists:keysearch(suite, 1, Opts) of
{value,{_,Suite}} when is_integer(hd(Suite)) ; is_atom(Suite) ->
- Mod = if is_atom(Suite) -> Suite;
- true -> list_to_atom(Suite)
+ Mod = if is_atom(Suite) -> Suite;
+ true -> list_to_atom(Suite)
end,
case listify(proplists:get_value(group, Opts, [])) ++
listify(proplists:get_value(testcase, Opts, [])) of
@@ -770,77 +807,90 @@ run_dir(LogDir, CfgFiles, EvHandlers, StepOrCover, Opts) ->
end;
{value,{_,Suites=[Suite|_]}} when is_list(Suite) ->
Mods = lists:map(fun(Str) -> list_to_atom(Str) end, Suites),
- do_run(tests(delistify(Dir), Mods), [], StepOrCover, Opts, LogDir);
+ do_run(tests(delistify(Dir), Mods), [], StepOrCover, Opts, LogDir);
{value,{_,Suites}} ->
- do_run(tests(delistify(Dir), Suites), [], StepOrCover, Opts, LogDir);
+ do_run(tests(delistify(Dir), Suites), [], StepOrCover, Opts, LogDir);
false -> % no suite, only dir
do_run(tests(listify(Dir)), [], StepOrCover, Opts, LogDir)
- end
+ end
end.
%%%-----------------------------------------------------------------
-%%% @hidden
+%%% @spec run_testspec(TestSpec) -> Result
+%%% TestSpec = [term()]
%%%
+%%% @doc Run test specified by <code>TestSpec</code>. The terms are
+%%% the same as those used in test specification files.
+%%% @equiv ct:run_testspec/1
+%%%-----------------------------------------------------------------
-%% using testspec(s) as input for test
run_testspec(TestSpec) ->
{ok,Cwd} = file:get_cwd(),
io:format("~nCommon Test starting (cwd is ~s)~n~n", [Cwd]),
case catch run_testspec1(TestSpec) of
- {'EXIT',Reason} ->
+ {'EXIT',Reason} ->
file:set_cwd(Cwd),
{error,Reason};
- Result ->
+ Result ->
Result
end.
run_testspec1(TestSpec) ->
- case ct_testspec:collect_tests_from_list(TestSpec,false) of
+ case ct_testspec:collect_tests_from_list(TestSpec, false) of
{error,CTReason} ->
exit(CTReason);
TS ->
- {LogDir,TSCoverFile,CfgFiles,EvHandlers,Include} =
- get_data_for_node(TS,node()),
- case os:getenv("CT_INCLUDE_PATH") of
- false ->
- application:set_env(common_test, include, Include);
- CtInclPath ->
- EnvInclude = string:tokens(CtInclPath, [$:,$ ,$,]),
- application:set_env(common_test, include, EnvInclude++Include)
- end,
- CoverOpt = if TSCoverFile == undef -> [];
- true -> [{cover,TSCoverFile}]
- end,
- case check_and_install_configfiles(CfgFiles,LogDir,EvHandlers) of
+ Opts = get_data_for_node(TS, node()),
+
+ AllInclude =
+ case os:getenv("CT_INCLUDE_PATH") of
+ false ->
+ Opts#opts.include;
+ CtInclPath ->
+ EnvInclude = string:tokens(CtInclPath, [$:,$ ,$,]),
+ EnvInclude++Opts#opts.include,
+ end,
+ application:set_env(common_test, include, AllInclude),
+
+ case check_and_install_configfiles(Opts#opts.config,
+ which(logdir,Opts#opts.logdir),
+ Opts#opts.event_handlers) of
ok ->
- {Run,Skip} = ct_testspec:prepare_tests(TS,node()),
- do_run(Run,Skip,CoverOpt,[],LogDir);
+ Opts1 = Opts#opts{testspecs = [TestSpec],
+ include = AllInclude},
+ {Run,Skip} = ct_testspec:prepare_tests(TS, node()),
+ do_run(Run, Skip, Opts1, []);
{error,GCFReason} ->
exit(GCFReason)
end
end.
-
get_data_for_node(#testspec{logdir=LogDirs,
cover=CoverFs,
config=Cfgs,
userconfig=UsrCfgs,
event_handler=EvHs,
- include=Incl}, Node) ->
- LogDir = case lists:keysearch(Node,1,LogDirs) of
- {value,{Node,Dir}} -> Dir;
- false -> "."
+ include=Incl,
+ multiply_timetraps=MTs,
+ scale_timetraps=STs}, Node) ->
+ LogDir = case proplists:get_value(Node, LogDirs) of
+ undefined -> ".";
+ Dir -> Dir
end,
- Cover = case lists:keysearch(Node,1,CoverFs) of
- {value,{Node,CovFile}} -> CovFile;
- false -> undef
- end,
- ConfigFiles = [{?ct_config_txt, F} || {N,F} <- Cfgs, N==Node] ++
- [CBF || {N, CBF} <- UsrCfgs, N==Node],
+ Cover = proplists:get_value(Node, CoverFs),
+ MT = proplists:get_value(Node, MTs),
+ ST = proplists:get_value(Node, STs),
+ ConfigFiles = [{?ct_config_txt,F} || {N,F} <- Cfgs, N==Node] ++
+ [CBF || {N,CBF} <- UsrCfgs, N==Node],
EvHandlers = [{H,A} || {N,H,A} <- EvHs, N==Node],
Include = [I || {N,I} <- Incl, N==Node],
- {LogDir,Cover,ConfigFiles,EvHandlers,Include}.
-
+ #opts{logdir = LogDir,
+ cover = Cover,
+ config = ConfigFiles,
+ event_handlers = EvHandlers,
+ include = Include,
+ multiply_timetraps = MT,
+ scale_timetraps = ST}.
refresh_logs(LogDir) ->
{ok,Cwd} = file:get_cwd(),
@@ -865,11 +915,27 @@ refresh_logs(LogDir) ->
end
end.
-which_logdir(".",Dir) ->
+which(logdir, undefined) ->
+ ".";
+which(logdir, Dir) ->
Dir;
-which_logdir(Dir,_) ->
- Dir.
-
+which(multiply_timetraps, undefined) ->
+ 1;
+which(multiply_timetraps, MT) ->
+ MT;
+which(scale_timetraps, undefined) ->
+ false;
+which(scale_timetraps, ST) ->
+ ST.
+
+choose_val(undefined, V1) ->
+ V1;
+choose_val(V0, _V1) ->
+ V0.
+
+merge_vals(Vs) ->
+ lists:append(Vs).
+
listify([C|_]=Str) when is_integer(C) -> [Str];
listify(L) when is_list(L) -> L;
listify(E) -> [E].
@@ -899,6 +965,22 @@ run(TestDirs) ->
install([]),
do_run(tests(TestDirs), []).
+suite_to_test(Suite) ->
+ {filename:dirname(Suite),list_to_atom(filename:rootname(filename:basename(Suite)))}.
+
+groups_and_cases(Gs, Cs) when (Gs == undefined ; Gs == []),
+ (Cs == undefined ; Cs == []) ->
+ [];
+groups_and_cases(Gs, Cs) when Gs == undefined ; Gs == [] ->
+ [list_to_atom(C) || C <- Cs];
+groups_and_cases(Gs, Cs) when Cs == undefined ; Cs == [] ->
+ [{list_to_atom(G),all} || G <- Gs];
+groups_and_cases([G], Cs) ->
+ [{list_to_atom(G),[list_to_atom(C) || C <- Cs]}];
+groups_and_cases([_,_|_] , Cs) when Cs =/= [] ->
+ {error,multiple_groups_and_cases};
+groups_and_cases(Gs, Cs) ->
+ {error,incorrect_group_or_case_option}.
tests(TestDir, Suites, []) when is_list(TestDir), is_integer(hd(TestDir)) ->
[{?testdir(TestDir,Suites),ensure_atom(Suites),all}];
@@ -915,30 +997,25 @@ tests(TestDir) when is_list(TestDir), is_integer(hd(TestDir)) ->
tests(TestDirs) when is_list(TestDirs), is_list(hd(TestDirs)) ->
[{?testdir(TestDir,all),all,all} || TestDir <- TestDirs].
-do_run(Tests, Opt) ->
- do_run(Tests, [], Opt, [], ".").
-
-do_run(Tests, Opt, LogDir) ->
- do_run(Tests, [], Opt, [], LogDir).
+do_run(Tests, Opt, LogDir) when is_list(Opt) ->
+ do_run(Tests, [], #opts{tests = Tests, logdir = LogDir}, []).
-do_run(Tests, Skip, Opt, Args, LogDir) ->
+do_run(Tests, Skip, Opts, Args) ->
+ #opts{cover = Cover} = Opts
case code:which(test_server) of
non_existing ->
exit({error,no_path_to_test_server});
_ ->
- Opt1 =
- case lists:keysearch(cover, 1, Opt) of
- {value,{_,CoverFile}} ->
- case ct_cover:get_spec(CoverFile) of
- {error,Reason} ->
- exit({error,Reason});
- Spec ->
- [{cover_spec,Spec} |
- lists:keydelete(cover, 1, Opt)]
- end;
- _ ->
- Opt
- end,
+ Opts1 = if Cover == undefined ->
+ Opts;
+ true ->
+ case ct_cover:get_spec(Cover) of
+ {error,Reason} ->
+ exit({error,Reason});
+ CoverSpec ->
+ Opts#opts{coverspec = CoverSpec}
+ end
+ end,
%% This env variable is used by test_server to determine
%% which framework it runs under.
case os:getenv("TEST_SERVER_FRAMEWORK") of
@@ -956,7 +1033,6 @@ do_run(Tests, Skip, Opt, Args, LogDir) ->
"To enter the interactive mode again, "
"run ct:start_interactive()\n\n",[]),
{error,interactive_mode};
-
_Pid ->
%% save style sheet info
case lists:keysearch(stylesheet, 1, Args) of
@@ -982,7 +1058,7 @@ do_run(Tests, Skip, Opt, Args, LogDir) ->
_ ->
ok
end,
- log_ts_names(Args),
+ log_ts_names(Opts1#opts.testspecs),
TestSuites = suite_tuples(Tests),
{SuiteMakeErrors,AllMakeErrors} =
@@ -1000,7 +1076,7 @@ do_run(Tests, Skip, Opt, Args, LogDir) ->
SavedErrors = save_make_errors(SuiteMakeErrors),
ct_repeat:log_loop_info(Args),
{Tests1,Skip1} = final_tests(Tests,[],Skip,SavedErrors),
- R = do_run_test(Tests1, Skip1, Opt1),
+ R = do_run_test(Tests1, Skip1, Opts1),
ct_util:stop(normal),
R;
false ->
@@ -1026,7 +1102,7 @@ auto_compile(TestSuites) ->
case application:get_env(common_test, include) of
{ok,UserInclDirs} when length(UserInclDirs) > 0 ->
io:format("Including the following directories:~n"),
- [begin io:format("~p~n",[UserInclDir]), {i,UserInclDir} end ||
+ [begin io:format("~p~n",[UserInclDir]), {i,UserInclDir} end ||
UserInclDir <- UserInclDirs];
_ ->
[]
@@ -1034,11 +1110,11 @@ auto_compile(TestSuites) ->
SuiteMakeErrors =
lists:flatmap(fun({TestDir,Suite} = TS) ->
case run_make(suites, TestDir, Suite, UserInclude) of
- {error,{make_failed,Bad}} ->
+ {error,{make_failed,Bad}} ->
[{TS,Bad}];
- {error,_} ->
+ {error,_} ->
[{TS,[filename:join(TestDir,"*_SUITE")]}];
- _ ->
+ _ ->
[]
end
end, TestSuites),
@@ -1062,13 +1138,13 @@ auto_compile(TestSuites) ->
true -> % already visited
{Done,Failed}
end
- end, {[],[]}, TestSuites),
+ end, {[],[]}, TestSuites),
{SuiteMakeErrors,lists:reverse(HelpMakeErrors)}.
%% verify that specified test suites exist (if auto compile is disabled)
verify_suites(TestSuites) ->
io:nl(),
- Verify =
+ Verify =
fun({Dir,Suite},NotFound) ->
case locate_test_dir(Dir, Suite) of
{ok,TestDir} ->
@@ -1077,11 +1153,11 @@ verify_suites(TestSuites) ->
true ->
Beam = filename:join(TestDir, atom_to_list(Suite)++".beam"),
case filelib:is_regular(Beam) of
- true ->
+ true ->
NotFound;
- false ->
+ false ->
Name = filename:join(TestDir, atom_to_list(Suite)),
- io:format("Suite ~w not found in directory ~s~n",
+ io:format("Suite ~w not found in directory ~s~n",
[Suite,TestDir]),
[{{Dir,Suite},[Name]} | NotFound]
end
@@ -1093,8 +1169,7 @@ verify_suites(TestSuites) ->
end
end,
lists:reverse(lists:foldl(Verify, [], TestSuites)).
-
-
+
save_make_errors([]) ->
[];
save_make_errors(Errors) ->
@@ -1110,7 +1185,7 @@ get_bad_suites([{{_TestDir,_Suite},Failed}|Errors], BadSuites) ->
get_bad_suites([], BadSuites) ->
BadSuites.
-
+
%%%-----------------------------------------------------------------
%%% @hidden
@@ -1121,7 +1196,7 @@ step(TestDir, Suite, Case) ->
%%%-----------------------------------------------------------------
%%% @hidden
%%% @equiv ct:step/4
-step(TestDir, Suite, Case, Opts) when is_list(TestDir), is_atom(Suite), is_atom(Case),
+step(TestDir, Suite, Case, Opts) when is_list(TestDir), is_atom(Suite), is_atom(Case),
Suite =/= all, Case =/= all ->
do_run([{TestDir,Suite,Case}], [{step,Opts}]).
@@ -1154,7 +1229,7 @@ final_tests([{TestDir,Suites,_}|Tests],
Skip1 = [{TD,S,"Make failed"} || {{TD,S},_} <- Bad, S1 <- Suites,
S == S1, TD == TestDir],
- Final1 = [{TestDir,S,all} || S <- Suites],
+ Final1 = [{TestDir,S,all} || S <- Suites],
final_tests(Tests, lists:reverse(Final1)++Final, Skip++Skip1, Bad);
final_tests([{TestDir,all,all}|Tests], Final, Skip, Bad) ->
@@ -1187,7 +1262,7 @@ final_tests([{TestDir,Suite,Cases}|Tests], Final, Skip, Bad) ->
final_tests([], Final, Skip, _Bad) ->
{lists:reverse(Final),Skip}.
-continue([]) ->
+continue([]) ->
true;
continue(_MakeErrors) ->
io:nl(),
@@ -1228,7 +1303,7 @@ set_group_leader_same_as_shell() ->
false
end
end,
- case [P || P <- processes(), GS2or3(P),
+ case [P || P <- processes(), GS2or3(P),
true == lists:keymember(shell,1,element(2,process_info(P,dictionary)))] of
[GL|_] ->
group_leader(GL, self());
@@ -1252,29 +1327,29 @@ check_and_add([{TestDir0,M,_} | Tests], Added) ->
check_and_add([], _) ->
ok.
-do_run_test(Tests, Skip, Opt) ->
+do_run_test(Tests, Skip, Opts) ->
case check_and_add(Tests, []) of
ok ->
ct_util:set_testdata({stats,{0,0,{0,0}}}),
ct_util:set_testdata({cover,undefined}),
test_server_ctrl:start_link(local),
- case lists:keysearch(cover_spec, 1, Opt) of
- {value,{_,CovData={CovFile,
- CovNodes,
- _CovImport,
- CovExport,
- #cover{app = CovApp,
- level = CovLevel,
- excl_mods = CovExcl,
- incl_mods = CovIncl,
- cross = CovCross,
- src = _CovSrc}}}} ->
+ case Opts#opts.coverspec of
+ CovData={CovFile,
+ CovNodes,
+ _CovImport,
+ CovExport,
+ #cover{app = CovApp,
+ level = CovLevel,
+ excl_mods = CovExcl,
+ incl_mods = CovIncl,
+ cross = CovCross,
+ src = _CovSrc}} ->
ct_logs:log("COVER INFO","Using cover specification file: ~s~n"
"App: ~w~n"
"Cross cover: ~w~n"
"Including ~w modules~n"
"Excluding ~w modules",
- [CovFile,CovApp,CovCross,length(CovIncl),length(CovExcl)]),
+ [CovFile,CovApp,CovCross,length(CovIncl),length(CovExcl)]),
%% cover export file will be used for export and import
%% between tests so make sure it doesn't exist initially
@@ -1307,33 +1382,37 @@ do_run_test(Tests, Skip, Opt) ->
true;
_ ->
false
- end,
+ end,
%% let test_server expand the test tuples and count no of cases
{Suites,NoOfCases} = count_test_cases(Tests, Skip),
Suites1 = delete_dups(Suites),
NoOfTests = length(Tests),
NoOfSuites = length(Suites1),
- ct_util:warn_duplicates(Suites1),
+ ct_util:warn_duplicates(Suites1),
{ok,Cwd} = file:get_cwd(),
io:format("~nCWD set to: ~p~n", [Cwd]),
if NoOfCases == unknown ->
- io:format("~nTEST INFO: ~w test(s), ~w suite(s)~n~n",
+ io:format("~nTEST INFO: ~w test(s), ~w suite(s)~n~n",
[NoOfTests,NoOfSuites]),
- ct_logs:log("TEST INFO","~w test(s), ~w suite(s)",
+ ct_logs:log("TEST INFO","~w test(s), ~w suite(s)",
[NoOfTests,NoOfSuites]);
true ->
- io:format("~nTEST INFO: ~w test(s), ~w case(s) in ~w suite(s)~n~n",
+ io:format("~nTEST INFO: ~w test(s), ~w case(s) in ~w suite(s)~n~n",
[NoOfTests,NoOfCases,NoOfSuites]),
- ct_logs:log("TEST INFO","~w test(s), ~w case(s) in ~w suite(s)",
+ ct_logs:log("TEST INFO","~w test(s), ~w case(s) in ~w suite(s)",
[NoOfTests,NoOfCases,NoOfSuites])
end,
+
+ test_server_ctrl:multiply_timetraps(Opts#opts.multiply_timetraps),
+ test_server_ctrl:scale_timetraps(Opts#opts.scale_timetraps),
+
ct_event:notify(#event{name=start_info,
node=node(),
data={NoOfTests,NoOfSuites,NoOfCases}}),
- CleanUp = add_jobs(Tests, Skip, Opt, []),
+ CleanUp = add_jobs(Tests, Skip, Opts, []),
unlink(whereis(test_server_ctrl)),
- catch test_server_ctrl:wait_finish(),
- %% check if last testcase has left a "dead" trace window
+ catch test_server_ctrl:wait_finish(),
+ %% check if last testcase has left a "dead" trace window
%% behind, and if so, kill it
case ct_util:get_testdata(interpret) of
{_What,kill,{TCPid,AttPid}} ->
@@ -1341,8 +1420,8 @@ do_run_test(Tests, Skip, Opt) ->
_ ->
ok
end,
- lists:foreach(fun(Suite) ->
- maybe_cleanup_interpret(Suite, Opt)
+ lists:foreach(fun(Suite) ->
+ maybe_cleanup_interpret(Suite, Opt)
end, CleanUp);
Error ->
Error
@@ -1358,7 +1437,7 @@ count_test_cases(Tests, Skip) ->
SendResult = fun(Me, Result) -> Me ! {no_of_cases,Result} end,
TSPid = test_server_ctrl:start_get_totals(SendResult),
Ref = erlang:monitor(process, TSPid),
- add_jobs(Tests, Skip, [], []),
+ add_jobs(Tests, Skip, #opts{}, []),
{Suites,NoOfCases} = count_test_cases1(length(Tests), 0, [], Ref),
erlang:demonitor(Ref),
test_server_ctrl:stop_get_totals(),
@@ -1368,11 +1447,11 @@ count_test_cases1(0, N, Suites, _) ->
{lists:flatten(Suites), N};
count_test_cases1(Jobs, N, Suites, Ref) ->
receive
- {no_of_cases,{Ss,N1}} ->
+ {no_of_cases,{Ss,N1}} ->
count_test_cases1(Jobs-1, add_known(N,N1), [Ss|Suites], Ref);
- {'DOWN', Ref, _, _, _} ->
+ {'DOWN', Ref, _, _, _} ->
{[],0}
- end.
+ end.
add_known(unknown, _) ->
unknown;
@@ -1381,72 +1460,72 @@ add_known(_, unknown) ->
add_known(N, N1) ->
N+N1.
-add_jobs([{TestDir,all,_}|Tests], Skip, Opt, CleanUp) ->
+add_jobs([{TestDir,all,_}|Tests], Skip, Opts, CleanUp) ->
Name = get_name(TestDir),
case catch test_server_ctrl:add_dir_with_skip(Name, TestDir,
skiplist(TestDir,Skip)) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
CleanUp;
_ ->
wait_for_idle(),
- add_jobs(Tests, Skip, Opt, CleanUp)
+ add_jobs(Tests, Skip, Opts, CleanUp)
end;
-add_jobs([{TestDir,[Suite],all}|Tests], Skip, Opt, CleanUp) when is_atom(Suite) ->
- add_jobs([{TestDir,Suite,all}|Tests], Skip, Opt, CleanUp);
-add_jobs([{TestDir,Suites,all}|Tests], Skip, Opt, CleanUp) when is_list(Suites) ->
+add_jobs([{TestDir,[Suite],all}|Tests], Skip, Opts, CleanUp) when is_atom(Suite) ->
+ add_jobs([{TestDir,Suite,all}|Tests], Skip, Opts, CleanUp);
+add_jobs([{TestDir,Suites,all}|Tests], Skip, Opts, CleanUp) when is_list(Suites) ->
Name = get_name(TestDir) ++ ".suites",
case catch test_server_ctrl:add_module_with_skip(Name, Suites,
skiplist(TestDir,Skip)) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
CleanUp;
_ ->
wait_for_idle(),
- add_jobs(Tests, Skip, Opt, CleanUp)
+ add_jobs(Tests, Skip, Opts, CleanUp)
end;
-add_jobs([{TestDir,Suite,all}|Tests], Skip, Opt, CleanUp) ->
- case maybe_interpret(Suite, all, Opt) of
+add_jobs([{TestDir,Suite,all}|Tests], Skip, Opts, CleanUp) ->
+ case maybe_interpret(Suite, all, Opts) of
ok ->
Name = get_name(TestDir) ++ "." ++ atom_to_list(Suite),
case catch test_server_ctrl:add_module_with_skip(Name, [Suite],
skiplist(TestDir,Skip)) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
CleanUp;
_ ->
wait_for_idle(),
- add_jobs(Tests, Skip, Opt, [Suite|CleanUp])
+ add_jobs(Tests, Skip, Opts, [Suite|CleanUp])
end;
Error ->
Error
end;
-add_jobs([{TestDir,Suite,[Case]}|Tests], Skip, Opt, CleanUp) when is_atom(Case) ->
- add_jobs([{TestDir,Suite,Case}|Tests], Skip, Opt, CleanUp);
-add_jobs([{TestDir,Suite,Cases}|Tests], Skip, Opt, CleanUp) when is_list(Cases) ->
- case maybe_interpret(Suite, Cases, Opt) of
+add_jobs([{TestDir,Suite,[Case]}|Tests], Skip, Opts, CleanUp) when is_atom(Case) ->
+ add_jobs([{TestDir,Suite,Case}|Tests], Skip, Opts, CleanUp);
+add_jobs([{TestDir,Suite,Cases}|Tests], Skip, Opts, CleanUp) when is_list(Cases) ->
+ case maybe_interpret(Suite, Cases, Opts) of
ok ->
Name = get_name(TestDir) ++ "." ++ atom_to_list(Suite) ++ ".cases",
case catch test_server_ctrl:add_cases_with_skip(Name, Suite, Cases,
skiplist(TestDir,Skip)) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
CleanUp;
_ ->
wait_for_idle(),
- add_jobs(Tests, Skip, Opt, [Suite|CleanUp])
+ add_jobs(Tests, Skip, Opts, [Suite|CleanUp])
end;
Error ->
Error
end;
-add_jobs([{TestDir,Suite,Case}|Tests], Skip, Opt, CleanUp) when is_atom(Case) ->
- case maybe_interpret(Suite, Case, Opt) of
+add_jobs([{TestDir,Suite,Case}|Tests], Skip, Opts, CleanUp) when is_atom(Case) ->
+ case maybe_interpret(Suite, Case, Opts) of
ok ->
- Name = get_name(TestDir) ++ "." ++ atom_to_list(Suite) ++ "." ++
+ Name = get_name(TestDir) ++ "." ++ atom_to_list(Suite) ++ "." ++
atom_to_list(Case),
case catch test_server_ctrl:add_case_with_skip(Name, Suite, Case,
skiplist(TestDir,Skip)) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
CleanUp;
_ ->
wait_for_idle(),
- add_jobs(Tests, Skip, Opt, [Suite|CleanUp])
+ add_jobs(Tests, Skip, Opts, [Suite|CleanUp])
end;
Error ->
Error
@@ -1496,7 +1575,7 @@ get_name(Dir) ->
end,
Base = filename:basename(TestDir),
case filename:basename(filename:dirname(TestDir)) of
- "" ->
+ "" ->
Base;
TopDir ->
TopDir ++ "." ++ Base
@@ -1527,10 +1606,10 @@ run_make(Targets, TestDir0, Mod, UserInclude) ->
{i,CtInclude},
{i,XmerlInclude},
debug_info],
- Result =
+ Result =
if Mod == all ; Targets == helpmods ->
case (catch ct_make:all([noexec|ErlFlags])) of
- {'EXIT',_} = Failure ->
+ {'EXIT',_} = Failure ->
Failure;
MakeInfo ->
FileTest = fun(F, suites) -> is_suite(F);
@@ -1549,7 +1628,7 @@ run_make(Targets, TestDir0, Mod, UserInclude) ->
true ->
(catch ct_make:files([Mod], [load|ErlFlags]))
end,
-
+
ok = file:set_cwd(Cwd),
%% send finished_make notification
ct_event:notify(#event{name=finished_make,
@@ -1563,7 +1642,7 @@ run_make(Targets, TestDir0, Mod, UserInclude) ->
{error,{make_crashed,TestDir,Reason}};
{error,ModInfo} ->
io:format("{error,make_failed}\n", []),
- Bad = [filename:join(TestDir, M) || {M,R} <- ModInfo,
+ Bad = [filename:join(TestDir, M) || {M,R} <- ModInfo,
R == error],
{error,{make_failed,Bad}}
end;
@@ -1575,8 +1654,8 @@ run_make(Targets, TestDir0, Mod, UserInclude) ->
get_dir(App, Dir) ->
filename:join(code:lib_dir(App), Dir).
-maybe_interpret(Suite, Cases, [{step,StepOpts}]) ->
- %% if other suite has run before this one, check if last testcase
+maybe_interpret(Suite, Cases, #opts{step = StepOpts}) when StepOpts =/= undefined ->
+ %% if other suite has run before this one, check if last testcase
%% has left a "dead" trace window behind, and if so, kill it
case ct_util:get_testdata(interpret) of
{_What,kill,{TCPid,AttPid}} ->
@@ -1619,7 +1698,7 @@ maybe_interpret2(Suite, Cases, StepOpts) ->
WinOp = case lists:member(keep_inactive, ensure_atom(StepOpts)) of
true -> no_kill;
false -> kill
- end,
+ end,
ct_util:set_testdata({interpret,{{Suite,Cases},WinOp,
{undefined,undefined}}}),
ok.
@@ -1640,18 +1719,15 @@ maybe_cleanup_interpret(Suite, [{step,_}]) ->
maybe_cleanup_interpret(_, _) ->
ok.
-log_ts_names(Args) ->
- case lists:keysearch(spec, 1, Args) of
- {value,{_,Specs}} ->
- List = lists:map(fun(Name) ->
- Name ++ " "
- end, Specs),
- ct_logs:log("Test Specification file(s)", "~s",
- [lists:flatten(List)]);
- _ ->
- ok
- end.
-
+log_ts_names([]) ->
+ ok;
+log_ts_names(Specs) ->
+ List = lists:map(fun(Name) ->
+ Name ++ " "
+ end, Specs),
+ ct_logs:log("Test Specification file(s)", "~s",
+ [lists:flatten(List)]).
+
merge_arguments(Args) ->
merge_arguments(Args, []).
@@ -1659,6 +1735,8 @@ merge_arguments([LogDir={logdir,_}|Args], Merged) ->
merge_arguments(Args, handle_arg(replace, LogDir, Merged));
merge_arguments([CoverFile={cover,_}|Args], Merged) ->
merge_arguments(Args, handle_arg(replace, CoverFile, Merged));
+merge_arguments([{'case',TC}|Args], Merged) ->
+ merge_arguments(Args, handle_arg(merge, {testcase,TC}, Merged));
merge_arguments([Arg={_,_}|Args], Merged) ->
merge_arguments(Args, handle_arg(merge, Arg, Merged));
merge_arguments([], Merged) ->
@@ -1673,6 +1751,23 @@ handle_arg(Op, Arg, [Other|Merged]) ->
handle_arg(_,Arg,[]) ->
[Arg].
+get_start_opt(Key, IfExists, Args) ->
+ get_start_opt(Key, IfExists, undefined, Args);
+
+get_start_opt(Key, IfExists, IfNotExists, Args) ->
+ case lists:keysearch(Key, 1, Args) of
+ {value,{Key,Val}} when is_function(IfExists) ->
+ IfExists(Val);
+ {value,{Key,Val}} when IfExists == value ->
+ Val;
+ {value,{Key,_Val}} ->
+ IfExists;
+ _ when is_function(IfNotExists) ->
+ IfNotExists();
+ _ ->
+ IfNotExists
+ end.
+
locate_test_dir(Dir, Suite) ->
TestDir = case ct_util:is_test_dir(Dir) of
true -> Dir;
@@ -1737,18 +1832,18 @@ start_trace(Args) ->
case file:consult(TraceSpec) of
{ok,Terms} ->
case catch do_trace(Terms) of
- ok ->
+ ok ->
true;
{_,Error} ->
io:format("Warning! Tracing not started. Reason: ~p~n~n",
[Error]),
false
- end;
+ end;
{_,Error} ->
io:format("Warning! Tracing not started. Reason: ~p~n~n",
[Error]),
false
- end;
+ end;
false ->
false
end.
@@ -1760,22 +1855,22 @@ do_trace(Terms) ->
case dbg:tpl(M,[{'_',[],[{return_trace}]}]) of
{error,What} -> exit({error,{tracing_failed,What}});
_ -> ok
- end;
+ end;
({f,M,F}) ->
case dbg:tpl(M,F,[{'_',[],[{return_trace}]}]) of
{error,What} -> exit({error,{tracing_failed,What}});
_ -> ok
- end;
+ end;
(Huh) ->
exit({error,{unrecognized_trace_term,Huh}})
end, Terms),
ok.
-
+
stop_trace(true) ->
dbg:stop_clear();
stop_trace(false) ->
ok.
-
+
ensure_atom(Atom) when is_atom(Atom) ->
Atom;
ensure_atom(String) when is_list(String), is_integer(hd(String)) ->
@@ -1784,4 +1879,3 @@ ensure_atom(List) when is_list(List) ->
[ensure_atom(Item) || Item <- List];
ensure_atom(Other) ->
Other.
-
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl
index bf01bb52d9..08c57887ef 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl
@@ -37,7 +37,8 @@ suite() ->
%%--------------------------------------------------------------------
init_per_suite(Config) ->
timer:sleep(5000),
- Config.
+ exit(shouldnt_happen).
+% Config.
%%--------------------------------------------------------------------
%% Function: end_per_suite(Config0) -> void() | {save_config,Config1}
diff --git a/lib/test_server/doc/src/test_server.xml b/lib/test_server/doc/src/test_server.xml
index 6e75425862..0cae75d692 100644
--- a/lib/test_server/doc/src/test_server.xml
+++ b/lib/test_server/doc/src/test_server.xml
@@ -167,6 +167,22 @@
</desc>
</func>
<func>
+ <name>adjusted_sleep(MSecs) -> ok</name>
+ <fsummary>Suspens the calling task for a specified time.</fsummary>
+ <type>
+ <v>MSecs = integer() | float() | infinity</v>
+ <d>The default number of milliseconds to sleep</d>
+ </type>
+ <desc>
+ <p>This function suspends the calling process for at least the
+ supplied number of milliseconds. The function behaves the same
+ way as <c>test_server:sleep/1</c>, only <c>MSecs</c>
+ will be multiplied by the 'multiply_timetraps' value, if set,
+ and also automatically scaled up if 'scale_timetraps' is set
+ to true (which it is by default).</p>
+ </desc>
+ </func>
+ <func>
<name>hours(N) -> MSecs</name>
<name>minutes(N) -> MSecs</name>
<name>seconds(N) -> MSecs</name>
diff --git a/lib/test_server/doc/src/test_server_ctrl.xml b/lib/test_server/doc/src/test_server_ctrl.xml
index 8b60849b61..2368c4bacc 100644
--- a/lib/test_server/doc/src/test_server_ctrl.xml
+++ b/lib/test_server/doc/src/test_server_ctrl.xml
@@ -376,6 +376,31 @@ Optional, if not given the test server controller node
</desc>
</func>
<func>
+ <name>scale_timetraps(Bool) -> ok</name>
+ <fsummary>.</fsummary>
+ <type>
+ <v>Bool = true | false</v>
+ </type>
+ <desc>
+ <p>This function should be called before a test is started.
+ The parameter specifies if test_server should attempt
+ to automatically scale the timetrap value in order to compensate
+ for delays caused by e.g. the cover tool.</p>
+ </desc>
+ </func>
+ <func>
+ <name>get_timetrap_parameters() -> {N,Bool} </name>
+ <fsummary>Read the parameter values that affect timetraps.</fsummary>
+ <type>
+ <v>N = integer() | infinity</v>
+ <v>Bool = true | false</v>
+ </type>
+ <desc>
+ <p>This function may be called to read the values set by
+ <c>multiply_timetraps/1</c> and <c>scale_timetraps/1</c>.</p>
+ </desc>
+ </func>
+ <func>
<name>cover(Application,Analyse) -> ok</name>
<name>cover(CoverFile,Analyse) -> ok</name>
<name>cover(App,CoverFile,Analyse) -> ok</name>
diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl
index 7db103a4c6..7fb708778c 100644
--- a/lib/test_server/src/test_server.erl
+++ b/lib/test_server/src/test_server.erl
@@ -35,7 +35,7 @@
-export([fail/0,fail/1,format/1,format/2,format/3]).
-export([capture_start/0,capture_stop/0,capture_get/0]).
-export([messages_get/0]).
--export([hours/1,minutes/1,seconds/1,sleep/1,timecall/3]).
+-export([hours/1,minutes/1,seconds/1,sleep/1,adjusted_sleep/1,timecall/3]).
-export([timetrap_scale_factor/0,timetrap/1,timetrap_cancel/1]).
-export([m_out_of_n/3,do_times/4,do_times/2]).
-export([call_crash/3,call_crash/4,call_crash/5]).
@@ -89,14 +89,14 @@ init(Host,Port,Starter) ->
global:register_name(?MODULE,self()),
process_flag(trap_exit,true),
test_server_sup:cleanup_crash_dumps(),
- case gen_tcp:connect(Host,Port, [binary,
- {reuseaddr,true},
+ case gen_tcp:connect(Host,Port, [binary,
+ {reuseaddr,true},
{packet,2}]) of
- {ok,MainSock} ->
+ {ok,MainSock} ->
Starter ! {self(),started},
request(MainSock,{target_info,init_target_info()}),
loop(#state{controller={Host,MainSock}});
- Error ->
+ Error ->
Starter ! {self(),{error,
{could_not_contact_controller,Error}}}
end.
@@ -127,7 +127,7 @@ loop(#state{controller={_,MainSock}} = State) ->
halt();
{'EXIT',Pid,Reason} ->
case lists:keysearch(Pid,1,State#state.jobs) of
- {value,{Pid,Name}} ->
+ {value,{Pid,Name}} ->
case Reason of
normal -> ignore;
_other -> request(MainSock,{job_proc_killed,Name,Reason})
@@ -157,14 +157,14 @@ init_purify() ->
job(Host,Port,Starter) ->
process_flag(trap_exit,true),
init_purify(),
- case gen_tcp:connect(Host,Port, [binary,
- {reuseaddr,true},
+ case gen_tcp:connect(Host,Port, [binary,
+ {reuseaddr,true},
{packet,4},
{active,false}]) of
{ok,JobSock} ->
Starter ! {self(),started},
job(JobSock);
- Error ->
+ Error ->
Starter ! {self(),{error,
{could_not_contact_controller,Error}}}
end.
@@ -192,7 +192,7 @@ get_jobdir() ->
true ->
{ok,Cwd} = file:get_cwd(),
Cwd ++ "/" ++ Basename;
- false ->
+ false ->
filename:absname(Basename)
end.
@@ -216,7 +216,7 @@ send_privdir(JobDir,JobSock) ->
del_dir(Dir) ->
case file:read_file_info(Dir) of
- {ok,#file_info{type=directory}} ->
+ {ok,#file_info{type=directory}} ->
{ok,Cont} = file:list_dir(Dir),
lists:foreach(fun(F) -> del_dir(filename:join(Dir,F)) end, Cont),
ok = file:del_dir(Dir);
@@ -227,7 +227,7 @@ del_dir(Dir) ->
catch file:delete(Dir),
ok
end.
-
+
%%
%% Receive and decode request on job socket
%%
@@ -237,7 +237,7 @@ job_loop(JobSock) ->
ok -> job_loop(JobSock);
{stop,R} -> R
end.
-
+
decode_job({{beam,Mod,Which},Beam}) ->
% FIXME, shared directory structure on host and target required,
% "Library beams" are not loaded from HOST... /Patrik
@@ -254,7 +254,7 @@ decode_job({{datadir,Tarfile0},Archive}) ->
ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir}]),
ok = file:delete(Tarfile),
ok;
-decode_job({test_case,Case}) ->
+decode_job({test_case,Case}) ->
Result = run_test_case_apply(Case),
JobSock = get(test_server_job_sock),
request(JobSock,{test_case_result,Result}),
@@ -266,11 +266,11 @@ decode_job({test_case,Case}) ->
request(JobSock,{{crash_dumps,filename:basename(TarFile)},TarBin})
end,
ok;
-decode_job({sync_apply,{M,F,A}}) ->
+decode_job({sync_apply,{M,F,A}}) ->
R = apply(M,F,A),
request(get(test_server_job_sock),{sync_result,R}),
ok;
-decode_job(job_done) ->
+decode_job(job_done) ->
{stop,stopped}.
%%
@@ -282,9 +282,9 @@ decode_job(job_done) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% cover_compile({App,Include,Exclude,Cross}) ->
+%% cover_compile({App,Include,Exclude,Cross}) ->
%% {ok,AnalyseModules} | {error,Reason}
-%%
+%%
%% App = atom() , name of application to be compiled
%% Exclude = [atom()], list of modules to exclude
%% Include = [atom()], list of modules outside of App that should be included
@@ -293,7 +293,7 @@ decode_job(job_done) ->
%% in the cover compilation, but that shall not be part of
%% the cover analysis for this application.
%%
-%% Cover compile the given application. Return {ok,AnalyseMods} if application
+%% Cover compile the given application. Return {ok,AnalyseMods} if application
%% is found, else {error,application_not_found}.
cover_compile({none,_Exclude,Include,Cross}) ->
@@ -330,7 +330,7 @@ cover_compile({App,all,Include,Cross}) ->
end;
cover_compile({App,Exclude,Include,Cross}) ->
case code:lib_dir(App) of
- {error,bad_name} ->
+ {error,bad_name} ->
case Include++Cross of
[] ->
io:format("\nWARNING: Can't find lib_dir for \'~w\'\n"
@@ -366,7 +366,7 @@ cover_compile({App,Exclude,Include,Cross}) ->
{ok,AnalyseMods}
end
end.
-
+
module_names(Beams) ->
[list_to_atom(filename:basename(filename:rootname(Beam))) || Beam <- Beams].
@@ -380,11 +380,11 @@ do_cover_compile1([Dont|Rest]) when Dont=:=cover;
Dont=:=test_server_ctrl ->
do_cover_compile1(Rest);
do_cover_compile1([M|Rest]) ->
- case {code:is_sticky(M),code:is_loaded(M)} of
+ case {code:is_sticky(M),code:is_loaded(M)} of
{true,_} ->
code:unstick_mod(M),
case cover:compile_beam(M) of
- {ok,_} ->
+ {ok,_} ->
ok;
Error ->
io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
@@ -402,7 +402,7 @@ do_cover_compile1([M|Rest]) ->
end;
{false,_} ->
case cover:compile_beam(M) of
- {ok,_} ->
+ {ok,_} ->
ok;
Error ->
io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
@@ -415,14 +415,14 @@ do_cover_compile1([]) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% cover_analyse(Analyse,Modules) -> [{M,{Cov,NotCov,Details}}]
-%%
+%%
%% Analyse = {details,Dir} | details | {overview,void()} | overview
%% Modules = [atom()], the modules to analyse
%%
%% Cover analysis. If this is a remote target, analyse_to_file can not be used.
%% In that case the analyse level 'line' is used instead if Analyse==details.
%%
-%% If this is a local target, the test directory is given
+%% If this is a local target, the test directory is given
%% (Analyse=={details,Dir}) and analyse_to_file can be used directly.
%%
%% If Analyse==overview | {overview,Dir} analyse_to_file is not used, only
@@ -432,12 +432,12 @@ do_cover_compile1([]) ->
%% all.coverdata in that directory.
cover_analyse(Analyse,Modules) ->
io:fwrite("Cover analysing...\n",[]),
- DetailsFun =
+ DetailsFun =
case Analyse of
{details,Dir} ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(M) ->
+ fun(M) ->
OutFile = filename:join(Dir,
atom_to_list(M) ++
".COVER.html"),
@@ -451,7 +451,7 @@ cover_analyse(Analyse,Modules) ->
Error ->
fun(_) -> Error end
end;
- details ->
+ details ->
fun(M) ->
case cover:analyse(M,line) of
{ok,Lines} ->
@@ -489,7 +489,7 @@ cover_analyse(Analyse,Modules) ->
unstick_all_sticky(Node) ->
lists:filter(
- fun(M) ->
+ fun(M) ->
case code:is_sticky(M) of
true ->
rpc:call(Node,code,unstick_mod,[M]),
@@ -502,24 +502,24 @@ unstick_all_sticky(Node) ->
stick_all_sticky(Node,Sticky) ->
lists:foreach(
- fun(M) ->
+ fun(M) ->
rpc:call(Node,code,stick_mod,[M])
end,
Sticky).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% run_test_case_apply(Mod,Func,Args,Name,RunInit,MultiplyTimetrap) ->
+%% run_test_case_apply(Mod,Func,Args,Name,RunInit,TimetrapData) ->
%% {Time,Value,Loc,Opts,Comment} | {died,Reason,unknown,Comment}
-%%
+%%
%% Time = float() (seconds)
%% Value = term()
%% Loc = term()
%% Comment = string()
%% Reason = term()
%%
-%% Spawns off a process (case process) that actually runs the test suite.
-%% The case process will have the job process as group leader, which makes
+%% Spawns off a process (case process) that actually runs the test suite.
+%% The case process will have the job process as group leader, which makes
%% it possible to capture all it's output from io:format/2, etc.
%%
%% The job process then sits down and waits for news from the case process.
@@ -535,40 +535,43 @@ stick_all_sticky(Node,Sticky) ->
%% called or the comment given by the return value {comment,Comment} from
%% a test case.
%%
-%% {died,Reason,unknown,Comment} is returned if the test case was killed
+%% {died,Reason,unknown,Comment} is returned if the test case was killed
%% by some other process. Reason is the kill reason provided.
%%
-%% MultiplyTimetrap indicates a possible extension of all timetraps
-%% Timetraps will be multiplied by this integer. If it is infinity, no
-%% timetraps will be started at all.
+%% TimetrapData = {MultiplyTimetrap,ScaleTimetrap}, which indicates a
+%% possible extension of all timetraps. Timetraps will be multiplied by
+%% MultiplyTimetrap. If it is infinity, no timetraps will be started at all.
+%% ScaleTimetrap indicates if test_server should attemp to automatically
+%% compensate timetraps for runtime delays introduced by e.g. tools like
+%% cover.
-run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,MultiplyTimetrap}) ->
+run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,TimetrapData}) ->
purify_format("Test case #~w ~w:~w/1", [CaseNum, Mod, Func]),
case os:getenv("TS_RUN_VALGRIND") of
- false ->
+ false ->
ok;
_ ->
os:putenv("VALGRIND_LOGFILE_INFIX",atom_to_list(Mod)++"."++
atom_to_list(Func)++"-")
end,
test_server_h:testcase({Mod,Func,1}),
- ProcBef = erlang:system_info(process_count),
- Result = run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap),
+ ProcBef = erlang:system_info(process_count),
+ Result = run_test_case_apply(Mod, Func, Args, Name, RunInit, TimetrapData),
ProcAft = erlang:system_info(process_count),
purify_new_leaks(),
DetFail = get(test_server_detected_fail),
{Result,DetFail,ProcBef,ProcAft}.
-
-run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) ->
+
+run_test_case_apply(Mod, Func, Args, Name, RunInit, TimetrapData) ->
case get(test_server_job_dir) of
undefined ->
%% i'm a local target
- do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap);
+ do_run_test_case_apply(Mod, Func, Args, Name, RunInit, TimetrapData);
JobDir ->
%% i'm a remote target
case Args of
[Config] when is_list(Config) ->
- {value,{data_dir,HostDataDir}} =
+ {value,{data_dir,HostDataDir}} =
lists:keysearch(data_dir, 1, Config),
DataBase = filename:basename(HostDataDir),
TargetDataDir = filename:join(JobDir, DataBase),
@@ -578,18 +581,18 @@ run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) ->
Config2 = lists:keyreplace(priv_dir, 1, Config1,
{priv_dir,TargetPrivDir}),
do_run_test_case_apply(Mod, Func, [Config2], Name, RunInit,
- MultiplyTimetrap);
+ TimetrapData);
_other ->
do_run_test_case_apply(Mod, Func, Args, Name, RunInit,
- MultiplyTimetrap)
+ TimetrapData)
end
end.
-do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) ->
+do_run_test_case_apply(Mod, Func, Args, Name, RunInit, TimetrapData) ->
{ok,Cwd} = file:get_cwd(),
Args2Print = case Args of
- [Args1] when is_list(Args1) ->
+ [Args1] when is_list(Args1) ->
lists:keydelete(tc_group_result, 1, Args1);
- _ ->
+ _ ->
Args
end,
print(minor, "Test case started with:\n~s:~s(~p)\n", [Mod,Func,Args2Print]),
@@ -600,11 +603,11 @@ do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) ->
OldGLeader = group_leader(),
%% Set ourself to group leader for the spawned process
group_leader(self(),self()),
- Pid =
+ Pid =
spawn_link(
- fun() ->
- run_test_case_eval(Mod, Func, Args, Name, Ref,
- RunInit, MultiplyTimetrap,
+ fun() ->
+ run_test_case_eval(Mod, Func, Args, Name, Ref,
+ RunInit, TimetrapData,
TCCallback)
end),
group_leader(OldGLeader, self()),
@@ -641,13 +644,13 @@ run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) ->
receive
{'DOWN', Mon, process, Pid, _} ->
Comment
- after 10000 ->
+ after 10000 ->
%% Pid is probably trapping exits, hit it harder...
exit(Pid, kill),
%% here's the only place we know Reason, so we save
%% it as a comment, potentially replacing user data
Error = lists:flatten(io_lib:format("Aborted: ~p",[Reason])),
- Error1 = lists:flatten([string:strip(S,left) ||
+ Error1 = lists:flatten([string:strip(S,left) ||
S <- string:tokens(Error,[$\n])]),
if length(Error1) > 63 ->
string:substr(Error1,1,60) ++ "...";
@@ -719,7 +722,7 @@ run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) ->
{comment,NewComment} ->
Terminate1 =
case Terminate of
- {true,{Time,Value,Loc,Opts,_OldComment}} ->
+ {true,{Time,Value,Loc,Opts,_OldComment}} ->
{true,{Time,Value,mod_loc(Loc),Opts,NewComment}};
Other ->
Other
@@ -746,16 +749,16 @@ run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) ->
{Mod,_Func} = get_mf(Loc1),
spawn_fw_call(Mod,InitOrEnd,Pid,{timetrap_timeout,TVal},
Loc1,self(),Comment),
- run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
{testcase_aborted,Reason,Loc} ->
Loc1 = mod_loc(Loc),
{Mod,Func} = get_mf(Loc1),
spawn_fw_call(Mod,Func,Pid,{testcase_aborted,Reason},
Loc1,self(),Comment),
run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
- killed ->
+ killed ->
%% result of an exit(TestCase,kill) call, which is the
- %% only way to abort a testcase process that traps exits
+ %% only way to abort a testcase process that traps exits
%% (see abort_current_testcase)
spawn_fw_call(undefined,undefined,Pid,testcase_aborted_or_killed,
unknown,self(),Comment),
@@ -763,7 +766,7 @@ run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) ->
{fw_error,{FwMod,FwFunc,FwError}} ->
spawn_fw_call(FwMod,FwFunc,Pid,{framework_error,FwError},
unknown,self(),Comment),
- run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
_ ->
%% the testcase has terminated because of Reason (e.g. an exit
%% because a linked process failed)
@@ -773,25 +776,25 @@ run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) ->
end;
{_FwCallPid,fw_notify_done,RetVal} ->
%% the framework has been notified, we're finished
- run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment);
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment);
{'EXIT',_FwCallPid,{fw_notify_done,Func,Error}} ->
%% a framework function failed
CB = os:getenv("TEST_SERVER_FRAMEWORK"),
Loc = case CB of
- false ->
+ false ->
{test_server,Func};
- _ ->
+ _ ->
{list_to_atom(CB),Func}
end,
RetVal = {died,{framework_error,Loc,Error},Loc,"Framework error"},
run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment);
{failed,File,Line} ->
- put(test_server_detected_fail,
+ put(test_server_detected_fail,
[{File, Line}| get(test_server_detected_fail)]),
run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
_Other when not is_tuple(_Other) ->
%% ignore anything not generated by test server
- run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
_Other when element(1, _Other) /= 'EXIT',
element(1, _Other) /= started,
element(1, _Other) /= finished,
@@ -824,7 +827,7 @@ spawn_fw_call(Mod,{init_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why,
FwCall =
fun() ->
Skip = {skip,{failed,{Mod,init_per_testcase,Why}}},
- %% if init_per_testcase fails, the test case
+ %% if init_per_testcase fails, the test case
%% should be skipped
case catch test_server_sup:framework_call(
end_tc,[?pl2a(Mod),Func,{Pid,Skip,[[]]}]) of
@@ -869,7 +872,7 @@ spawn_fw_call(FwMod,FwFunc,_Pid,{framework_error,FwError},_,SendTo,_Comment) ->
fun() ->
test_server_sup:framework_call(report, [framework_error,
{{FwMod,FwFunc},FwError}]),
- Comment =
+ Comment =
lists:flatten(
io_lib:format("<font color=\"red\">"
"WARNING! ~w:~w failed!</font>", [FwMod,FwFunc])),
@@ -953,9 +956,11 @@ job_proxy_msgloop() ->
%% A test case is known to have failed if it returns {'EXIT', _} tuple,
%% or sends a message {failed, File, Line} to it's group_leader
-run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit,
- MultiplyTimetrap, TCCallback) ->
- put(test_server_multiply_timetraps,MultiplyTimetrap),
+run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit,
+ TimetrapData, TCCallback) ->
+
+ put(test_server_multiply_timetraps,TimetrapData),
+
{{Time,Value},Loc,Opts} =
case test_server_sup:framework_call(init_tc,[?pl2a(Mod),Func,Args0],
{ok,Args0}) of
@@ -1036,7 +1041,7 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
{{error,ReasonToFail},{failed,ReasonToFail},EndConf1};
{failed,{_,end_per_testcase,_}} = Failure -> % unexpected termination
{Failure,TSReturn,EndConf1};
- _ ->
+ _ ->
{FWReturn,TSReturn,EndConf1}
end,
case test_server_sup:framework_call(end_tc, [?pl2a(Mod), Func,
@@ -1067,7 +1072,7 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
{{T,Return2},Loc,Opts}
end.
-%% the return value is a list and we have to check if it contains
+%% the return value is a list and we have to check if it contains
%% the result of an end conf case or if it's a Config list
process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) ->
ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result],
@@ -1090,16 +1095,16 @@ process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) ->
process_return_val(Return, M,F,A, Loc, Final) ->
process_return_val1(Return, M,F,A, Loc, Final, []).
-process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts) when E=='EXIT';
+process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts) when E=='EXIT';
E==failed ->
fw_error_notify(M,F,A, TCError, mod_loc(Loc)),
test_server_sup:framework_call(end_tc,
[?pl2a(M),F,{{error,TCError},
[[{tc_status,{failed,TCError}}|Args]]}]),
{Failed,SaveOpts};
-process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) ->
+process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts);
-process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) ->
+process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) ->
process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts);
process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]);
@@ -1138,7 +1143,7 @@ init_per_testcase(Mod, Func, Args) ->
case erlang:function_exported(Mod,init_per_testcase,2) of
true ->
case catch my_apply(Mod, init_per_testcase, [Func|Args]) of
- {'$test_server_ok',{Skip,Reason}} when Skip==skip;
+ {'$test_server_ok',{Skip,Reason}} when Skip==skip;
Skip==skipped ->
{skip,Reason};
{'$test_server_ok',Res={skip_and_save,_,_}} ->
@@ -1149,31 +1154,31 @@ init_per_testcase(Mod, Func, Args) ->
[] ->
{ok,NewConf};
Bad ->
- group_leader() ! {printout,12,
+ group_leader() ! {printout,12,
"ERROR! init_per_testcase has returned "
- "bad elements in Config: ~p\n",[Bad]},
+ "bad elements in Config: ~p\n",[Bad]},
{skip,{failed,{Mod,init_per_testcase,bad_return}}}
end;
{'$test_server_ok',_Other} ->
- group_leader() ! {printout,12,
+ group_leader() ! {printout,12,
"ERROR! init_per_testcase did not return "
- "a Config list.\n",[]},
+ "a Config list.\n",[]},
{skip,{failed,{Mod,init_per_testcase,bad_return}}};
{'EXIT',Reason} ->
Line = get_loc(),
FormattedLoc = test_server_sup:format_loc(mod_loc(Line)),
- group_leader() ! {printout,12,
+ group_leader() ! {printout,12,
"ERROR! init_per_testcase crashed!\n"
"\tLocation: ~s\n\tReason: ~p\n",
- [FormattedLoc,Reason]},
+ [FormattedLoc,Reason]},
{skip,{failed,{Mod,init_per_testcase,Reason}}};
Other ->
Line = get_loc(),
FormattedLoc = test_server_sup:format_loc(mod_loc(Line)),
- group_leader() ! {printout,12,
+ group_leader() ! {printout,12,
"ERROR! init_per_testcase thrown!\n"
"\tLocation: ~s\n\tReason: ~p\n",
- [FormattedLoc, Other]},
+ [FormattedLoc, Other]},
{skip,{failed,{Mod,init_per_testcase,Other}}}
end;
false ->
@@ -1182,7 +1187,7 @@ init_per_testcase(Mod, Func, Args) ->
[Config] = Args,
{ok, Config}
end.
-
+
end_per_testcase(Mod, Func, Conf) ->
case erlang:function_exported(Mod,end_per_testcase,2) of
true ->
@@ -1211,11 +1216,11 @@ do_end_per_testcase(Mod,EndFunc,Func,Conf) ->
comment(io_lib:format("<font color=\"red\">"
"WARNING: ~w crashed!"
"</font>\n",[EndFunc])),
- group_leader() ! {printout,12,
+ group_leader() ! {printout,12,
"WARNING: ~w crashed!\n"
"Reason: ~p\n"
"Line: ~s\n",
- [EndFunc, Reason,
+ [EndFunc, Reason,
test_server_sup:format_loc(
mod_loc(get_loc()))]},
{failed,{Mod,end_per_testcase,Why}};
@@ -1223,13 +1228,13 @@ do_end_per_testcase(Mod,EndFunc,Func,Conf) ->
comment(io_lib:format("<font color=\"red\">"
"WARNING: ~w thrown!"
"</font>\n",[EndFunc])),
- group_leader() ! {printout,12,
+ group_leader() ! {printout,12,
"WARNING: ~w thrown!\n"
"Reason: ~p\n"
"Line: ~s\n",
- [EndFunc, Other,
+ [EndFunc, Other,
test_server_sup:format_loc(
- mod_loc(get_loc()))]},
+ mod_loc(get_loc()))]},
{failed,{Mod,end_per_testcase,Other}}
end.
@@ -1254,7 +1259,7 @@ get_mf(_) -> {undefined,undefined}.
mod_loc(Loc) ->
%% handle diff line num versions
- case Loc of
+ case Loc of
[{{_M,_F},_L}|_] ->
[{?pl2a(M),F,L} || {{M,F},L} <- Loc];
[{_M,_F}|_] ->
@@ -1286,7 +1291,7 @@ fw_error_notify(Mod, Func, Args, Error, Loc) ->
%% Args = [term()]
%%
%% Just like io:format, except that depending on the Detail value, the output
-%% is directed to console, major and/or minor log files.
+%% is directed to console, major and/or minor log files.
print(Detail,Format,Args) ->
local_or_remote_apply({test_server_ctrl,print,[Detail,Format,Args]}).
@@ -1296,11 +1301,11 @@ print(Detail,Format,Args) ->
%%
%% Prints Leader followed by a time stamp (date and time). Depending on
%% the Detail value, the output is directed to console, major and/or minor
-%% log files.
+%% log files.
print_timestamp(Detail,Leader) ->
local_or_remote_apply({test_server_ctrl,print_timestamp,[Detail,Leader]}).
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% lookup_config(Key,Config) -> {value,{Key,Value}} | undefined
@@ -1326,11 +1331,11 @@ ts_tc(M, F, A) ->
Val = (catch my_apply(M, F, A)),
After = erlang:now(),
Result = case Val of
- {'$test_server_ok', R} ->
+ {'$test_server_ok', R} ->
R; % test case ok
- {'EXIT',_Reason} = R ->
+ {'EXIT',_Reason} = R ->
R; % test case crashed
- Other ->
+ Other ->
{failed, {thrown,Other}} % test case was thrown
end,
Elapsed =
@@ -1352,7 +1357,7 @@ my_apply(M, F, A) ->
%% in an attempt to keep this modules small (yeah, right!) %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
unicode_to_latin1(Chars) when is_list(Chars); is_binary(Chars) ->
- lists:flatten(
+ lists:flatten(
[ case X of
High when High > 255 ->
io_lib:format("\\{~.8B}",[X]);
@@ -1460,6 +1465,44 @@ sleep(MSecs) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% adjusted_sleep(Time) -> ok
+%% Time = integer() | float() | infinity
+%%
+%% Sleeps the specified number of milliseconds, multiplied by the
+%% 'multiply_timetraps' value (if set) and possibly also automatically scaled
+%% up if 'scale_timetraps' is set to true (which is default).
+%% This function also accepts floating point numbers (which are truncated) and
+%% the atom 'infinity'.
+adjusted_sleep(infinity) ->
+ receive
+ after infinity ->
+ ok
+ end;
+adjusted_sleep(MSecs) ->
+ {Multiplier,ScaleFactor} =
+ case test_server_ctrl:get_timetrap_parameters() of
+ {undefined,undefined} ->
+ {1,1};
+ {undefined,false} ->
+ {1,1};
+ {undefined,true} ->
+ {1,timetrap_scale_factor()};
+ {infinity,_} ->
+ {infinity,1};
+ {Mult,undefined} ->
+ {Mult,1};
+ {Mult,false} ->
+ {Mult,1};
+ {Mult,true} ->
+ {Mult,timetrap_scale_factor()}
+ end,
+ receive
+ after trunc(MSecs*Multiplier*ScaleFactor) ->
+ ok
+ end,
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% fail(Reason) -> exit({suite_failed,Reason})
%%
%% Immediately calls exit. Included because test suites are easier
@@ -1509,9 +1552,9 @@ break(Comment) ->
receive continue -> ok end.
spawn_break_process(Pid) ->
- spawn(fun() ->
+ spawn(fun() ->
register(test_server_break_process,self()),
- receive
+ receive
continue -> continue(Pid);
cancel -> ok
end
@@ -1561,20 +1604,20 @@ timetrap_scale_factor() ->
%% timetrap(Timeout) -> Handle
%% Handle = term()
%%
-%% Creates a time trap, that will kill the calling process if the
+%% Creates a time trap, that will kill the calling process if the
%% trap is not cancelled with timetrap_cancel/1, within Timeout milliseconds.
timetrap(Timeout0) ->
Timeout = time_ms(Timeout0),
cancel_default_timetrap(),
case get(test_server_multiply_timetraps) of
- undefined -> timetrap1(Timeout);
- infinity -> infinity;
- Int -> timetrap1(Timeout*Int)
+ undefined -> timetrap1(Timeout, true);
+ {infinity,_} -> infinity;
+ {Int,Scale} -> timetrap1(Timeout*Int, Scale)
end.
-timetrap1(Timeout) ->
- Ref = spawn_link(test_server_sup,timetrap,[Timeout,self()]),
+timetrap1(Timeout, Scale) ->
+ Ref = spawn_link(test_server_sup,timetrap,[Timeout,Scale,self()]),
case get(test_server_timetraps) of
undefined -> put(test_server_timetraps,[Ref]);
List -> put(test_server_timetraps,[Ref|List])
@@ -1582,7 +1625,6 @@ timetrap1(Timeout) ->
Ref.
ensure_timetrap(Config) ->
- %format("ensure_timetrap:~p~n",[Config]),
case get(test_server_timetraps) of
[_|_] ->
ok;
@@ -1623,7 +1665,7 @@ cancel_default_timetrap() ->
time_ms({hours,N}) -> hours(N);
time_ms({minutes,N}) -> minutes(N);
time_ms({seconds,N}) -> seconds(N);
-time_ms({Other,_N}) ->
+time_ms({Other,_N}) ->
format("=== ERROR: Invalid time specification: ~p. "
"Should be seconds, minutes, or hours.~n", [Other]),
exit({invalid_time_spec,Other});
@@ -1770,14 +1812,14 @@ call_crash(Time,Crash,M,F,A) ->
%% by the test server after completion of the test case
%% Therefore it is IMPORTANT that the USER terminates
%% the node!!
-%% {erl, ReleaseList} - Use an Erlang emulator determined by ReleaseList
-%% when starting nodes, instead of the same emulator
+%% {erl, ReleaseList} - Use an Erlang emulator determined by ReleaseList
+%% when starting nodes, instead of the same emulator
%% as the test server is running. ReleaseList is a list
-%% of specifiers, where a specifier is either
-%% {release, Rel}, {prog, Prog}, or 'this'. Rel is
-%% either the name of a release, e.g., "r7a" or
-%% 'latest'. 'this' means using the same emulator as
-%% the test server. Prog is the name of an emulator
+%% of specifiers, where a specifier is either
+%% {release, Rel}, {prog, Prog}, or 'this'. Rel is
+%% either the name of a release, e.g., "r7a" or
+%% 'latest'. 'this' means using the same emulator as
+%% the test server. Prog is the name of an emulator
%% executable. If the list has more than one element,
%% one of them is picked randomly. (Only
%% works on Solaris and Linux, and the test
@@ -1792,13 +1834,13 @@ call_crash(Time,Crash,M,F,A) ->
%% peer nodes.
%% Note that slave nodes always act as if they had
%% fail_on_error==false.
-%%
+%%
start_node(Name, Type, Options) ->
lists:foreach(
- fun(N) ->
+ fun(N) ->
case firstname(N) of
- Name ->
+ Name ->
format("=== WARNING: Trying to start node \'~w\' when node"
" with same first name exists: ~w", [Name, N]);
_other -> ok
@@ -1817,19 +1859,19 @@ start_node(Name, Type, Options) ->
%% Cannot run cover on shielded node or on a node started
%% by a shielded node.
Cover = case is_cover() of
- true ->
+ true ->
not is_shielded(Name) andalso same_version(Node);
- false ->
+ false ->
false
end,
net_adm:ping(Node),
case Cover of
- true ->
+ true ->
Sticky = unstick_all_sticky(Node),
cover:start(Node),
stick_all_sticky(Node,Sticky);
- _ ->
+ _ ->
ok
end,
{ok,Node};
@@ -1857,7 +1899,7 @@ wait_for_node(Slave) ->
self(),
{test_server_ctrl,wait_for_node,[Slave]}},
receive {sync_result,R} -> R end.
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% stop_node(Name) -> true|false
@@ -1867,7 +1909,7 @@ wait_for_node(Slave) ->
stop_node(Slave) ->
Nocover = is_shielded(Slave) orelse not same_version(Slave),
case is_cover() of
- true when not Nocover ->
+ true when not Nocover ->
Sticky = unstick_all_sticky(Slave),
cover:stop(Slave),
stick_all_sticky(Slave,Sticky);
@@ -1895,10 +1937,10 @@ stop_node(Slave) ->
%% with the {cleanup,false} option, or it was started
%% in some other way than test_server:start_node/3
format("=== WARNING: Attempt to stop a nonexisting slavenode (~p)~n"
- "=== Trying to kill it anyway!!!",
+ "=== Trying to kill it anyway!!!",
[Slave]),
case net_adm:ping(Slave)of
- pong ->
+ pong ->
slave:stop(Slave),
true;
pang ->
@@ -1918,7 +1960,7 @@ is_release_available(Release) ->
self(),
{test_server_ctrl,is_release_available,[Release]}},
receive {sync_result,R} -> R end.
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% run_on_shielded_node(Fun, CArgs) -> term()
@@ -1937,7 +1979,7 @@ is_release_available(Release) ->
%%
%% Fun - Function to execute
%% CArg - Extra command line arguments to use when starting
-%% the shielded node.
+%% the shielded node.
%%
%% If Fun is successfully executed, the result is returned.
%%
@@ -2037,8 +2079,8 @@ is_native(Mod) ->
%% The given String will occur in the comment field
%% of the table on the test suite result page. If
%% called several times, only the last comment is
-%% printed.
-%% comment/1 is also overwritten by the return value
+%% printed.
+%% comment/1 is also overwritten by the return value
%% {comment,Comment} or fail/1 (which prints Reason
%% as a comment).
comment(String) ->
@@ -2154,7 +2196,7 @@ purify_new_fds_inuse() ->
{'EXIT', _} -> false;
Inuse when is_integer(Inuse) -> Inuse
end.
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% purify_format(Format, Args) -> ok
%% Format = string()
@@ -2202,9 +2244,9 @@ local_or_remote_apply({M,F,A} = MFA) ->
request(Sock,Request) ->
gen_tcp:send(Sock,<<1,(term_to_binary(Request))/binary>>).
-%%
+%%
%% Generic receive function for communication with host
-%%
+%%
recv(Sock) ->
case gen_tcp:recv(Sock,0) of
{error,closed} ->
diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl
index 5c8ef0b703..49c8bfa2c9 100644
--- a/lib/test_server/src/test_server_ctrl.erl
+++ b/lib/test_server/src/test_server_ctrl.erl
@@ -27,7 +27,7 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% MODULE DEPENDENCIES:
-%% HARD TO REMOVE: erlang, lists, io_lib, gen_server, file, io, string,
+%% HARD TO REMOVE: erlang, lists, io_lib, gen_server, file, io, string,
%% code, ets, rpc, gen_tcp, inet, erl_tar, sets,
%% test_server, test_server_sup, test_server_node
%% EASIER TO REMOVE: filename, filelib, lib, re
@@ -36,7 +36,7 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% ARCHITECTURE
-%%
+%%
%% The Erlang Test Server can be run on the target machine (local target)
%% or towards a remote target. The execution flow is mainly the same in
%% both cases, but with a remote target the test cases are (obviously)
@@ -44,11 +44,11 @@
%% socket connections because the host should not be introduced as an
%% additional node in the distributed erlang system in which the test
%% cases are run.
-%%
-%%
+%%
+%%
%% Local Target:
%% =============
-%%
+%%
%% -----
%% | | test_server_ctrl ({global,test_server})
%% ----- (test_server_ctrl.erl)
@@ -62,33 +62,33 @@
%% -----
%% | | CaseProc
%% ----- (test_server.erl)
-%%
-%%
-%%
+%%
+%%
+%%
%% test_server_ctrl is the main process in the system. It is a registered
%% process, and it will always be alive when testing is ongoing.
%% test_server_ctrl initiates testing and monitors JobProc(s).
-%%
-%% When target is local, and Test Server is *not* being used by a framework
-%% application (where it might cause duplicate name problems in a distributed
-%% test environment), the process is globally registered as 'test_server'
+%%
+%% When target is local, and Test Server is *not* being used by a framework
+%% application (where it might cause duplicate name problems in a distributed
+%% test environment), the process is globally registered as 'test_server'
%% to be able to simulate the {global,test_server} process on a remote target.
-%%
-%% JobProc is spawned for each 'job' added to the test_server_ctrl.
+%%
+%% JobProc is spawned for each 'job' added to the test_server_ctrl.
%% A job can mean one test case, one test suite or one spec.
%% JobProc creates and writes logs and presents results from testing.
%% JobProc is the group leader for CaseProc.
-%%
+%%
%% CaseProc is spawned for each test case. It runs the test case and
%% sends results and any other information to its group leader - JobProc.
-%%
-%%
-%%
+%%
+%%
+%%
%% Remote Target:
%% ==============
-%%
+%%
%% HOST TARGET
-%%
+%%
%% ----- MainSock -----
%% test_server_ctrl | |- - - - - - -| | {global,test_server}
%% (test_server_ctrl.erl) ----- ----- (test_server.erl)
@@ -102,36 +102,36 @@
%% -----
%% | | CaseProc
%% ----- (test_server.erl)
-%%
-%%
-%%
-%%
+%%
+%%
+%%
+%%
%% A separate test_server process only exists when target is remote. It
%% is then the main process on target. It is started when test_server_ctrl
%% is started, and a socket connection is established between
%% test_server_ctrl and test_server. The following information can be sent
%% over MainSock:
-%%
+%%
%% HOST TARGET
%% -> {target_info, TargetInfo} (during initiation)
%% <- {job_proc_killed,Name,Reason} (if a JobProcT dies unexpectedly)
%% -> {job,Port,Name} (to start a new JobProcT)
-%%
-%%
+%%
+%%
%% When target is remote, JobProc is split into to processes: JobProcH
%% executing on Host and JobProcT executing on Target. (The two processes
%% execute the same code as JobProc does when target is local.) JobProcH
%% and JobProcT communicates over a socket connection. The following
%% information can be sent over JobSock:
-%%
+%%
%% HOST TARGET
%% -> {test_case, Case} To start a new test case
%% -> {beam,Mod} .beam file as binary to be loaded
%% on target, e.g. a test suite
%% -> {datadir,Tarfile} Content of the datadir for a test suite
%% <- {apply,MFA} MFA to be applied on host, ignore return;
-%% (apply is used for printing information in
-%% log or console)
+%% (apply is used for printing information in
+%% log or console)
%% <- {sync_apply,MFA} MFA to be applied on host, wait for return
%% (used for starting and stopping slave nodes)
%% -> {sync_apply,MFA} MFA to be applied on target, wait for return
@@ -141,7 +141,7 @@
%% <- {crash_dumps,Tarfile} When a test case is finished
%% -> job_done When a job is finished
%% <- {privdir,Privdir} When a job is finished
-%%
+%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -161,7 +161,7 @@
abort_current_testcase/1, abort/0]).
-export([start_get_totals/1, stop_get_totals/0]).
-export([get_levels/0, set_levels/3]).
--export([multiply_timetraps/1]).
+-export([multiply_timetraps/1, scale_timetraps/1, get_timetrap_parameters/0]).
-export([cover/2, cover/3, cover/7,
cross_cover_analyse/1, cross_cover_analyse/2, trc/1, stop_trace/0]).
-export([testcase_callback/1]).
@@ -207,13 +207,15 @@
-define(pl2a(M), test_server_sup:package_atom(M)).
-define(void_fun, fun() -> ok end).
--define(mod_result(X), if X == skip -> skipped;
- X == auto_skip -> skipped;
+-define(mod_result(X), if X == skip -> skipped;
+ X == auto_skip -> skipped;
true -> X end).
--record(state,{jobs=[],levels={1,19,10},multiply_timetraps=1,finish=false,
+-record(state,{jobs=[],levels={1,19,10},
+ multiply_timetraps=1,scale_timetraps=true,
+ finish=false,
target_info, trc=false, cover=false, wait_for_node=[],
- testcase_callback=undefined, idle_notify=[],
+ testcase_callback=undefined, idle_notify=[],
get_totals=false, random_seed=undefined}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -222,14 +224,14 @@
add_dir(Name, Job=[Dir|_Dirs]) when is_list(Dir) ->
add_job(cast_to_list(Name),
lists:map(fun(D)-> {dir,cast_to_list(D)} end, Job));
-add_dir(Name, Dir) ->
+add_dir(Name, Dir) ->
add_job(cast_to_list(Name), {dir,cast_to_list(Dir)}).
add_dir(Name, Job=[Dir|_Dirs], Pattern) when is_list(Dir) ->
add_job(cast_to_list(Name),
lists:map(fun(D)-> {dir,cast_to_list(D),
cast_to_list(Pattern)} end, Job));
-add_dir(Name, Dir, Pattern) ->
+add_dir(Name, Dir, Pattern) ->
add_job(cast_to_list(Name), {dir,cast_to_list(Dir),cast_to_list(Pattern)}).
add_module(Mod) when is_atom(Mod) ->
@@ -256,14 +258,14 @@ add_spec(Spec) ->
false -> {error,nofile}
end.
-%% This version of the interface is to be used if there are
+%% This version of the interface is to be used if there are
%% suites or cases that should be skipped.
add_dir_with_skip(Name, Job=[Dir|_Dirs], Skip) when is_list(Dir) ->
add_job(cast_to_list(Name),
lists:map(fun(D)-> {dir,cast_to_list(D)} end, Job),
Skip);
-add_dir_with_skip(Name, Dir, Skip) ->
+add_dir_with_skip(Name, Dir, Skip) ->
add_job(cast_to_list(Name), {dir,cast_to_list(Dir)}, Skip).
add_dir_with_skip(Name, Job=[Dir|_Dirs], Pattern, Skip) when is_list(Dir) ->
@@ -271,7 +273,7 @@ add_dir_with_skip(Name, Job=[Dir|_Dirs], Pattern, Skip) when is_list(Dir) ->
lists:map(fun(D)-> {dir,cast_to_list(D),
cast_to_list(Pattern)} end, Job),
Skip);
-add_dir_with_skip(Name, Dir, Pattern, Skip) ->
+add_dir_with_skip(Name, Dir, Pattern, Skip) ->
add_job(cast_to_list(Name),
{dir,cast_to_list(Dir),cast_to_list(Pattern)}, Skip).
@@ -295,15 +297,14 @@ add_cases_with_skip(Name, Mod, Cases, Skip) when is_atom(Mod), is_list(Cases) ->
add_tests_with_skip(LogDir, Tests, Skip) ->
add_job(LogDir,
- lists:map(fun({Dir,all,all}) ->
+ lists:map(fun({Dir,all,all}) ->
{Dir,{dir,Dir}};
- ({Dir,Mods,all}) ->
+ ({Dir,Mods,all}) ->
{Dir,lists:map(fun(M) -> {M,all} end, Mods)};
({Dir,Mod,Cases}) ->
{Dir,{Mod,Cases}}
end, Tests),
Skip).
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% COMMAND LINE INTERFACE
@@ -315,7 +316,7 @@ parse_cmd_line(['SPEC',Spec|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
case file:consult(Spec) of
{ok, TermList} ->
Name = filename:rootname(Spec),
- parse_cmd_line(Cmds, TermList++SpecList, [Name|Names], Param,
+ parse_cmd_line(Cmds, TermList++SpecList, [Name|Names], Param,
Trc, Cov, TCCB);
{error,Reason} ->
io:format("Can't open ~s: ~p\n",
@@ -406,7 +407,7 @@ run_test(CommandLine) ->
end,
testcase_callback(TCCB),
add_job(Name, {command_line,SpecList}),
-
+
%% adding of jobs involves file i/o which may take long time
%% when running a nfs mounted file system (VxWorks).
case controller_call(get_target_info) of
@@ -479,6 +480,12 @@ set_levels(Show, Major, Minor) ->
multiply_timetraps(N) ->
controller_call({multiply_timetraps,N}).
+scale_timetraps(Bool) ->
+ controller_call({scale_timetraps,Bool}).
+
+get_timetrap_parameters() ->
+ controller_call(get_timetrap_parameters).
+
trc(TraceFile) ->
controller_call({trace,TraceFile}, 2*?ACCEPT_TIMEOUT).
@@ -551,7 +558,7 @@ controller_call(Arg, Timeout) ->
Other ->
Other
end.
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -617,7 +624,7 @@ contact_main_target(local) ->
case os:getenv("TEST_SERVER_FRAMEWORK") of
false ->
%% Local target! The global test_server process implemented by
- %% test_server.erl will not be started, so we simulate it by
+ %% test_server.erl will not be started, so we simulate it by
%% globally registering this process instead.
global:sync(),
case global:whereis_name(test_server) of
@@ -681,9 +688,9 @@ read_parameters([], Par) when Par#par.type==undefined ->
read_parameters([], Par) when Par#par.target==undefined ->
{error, {missing_mandatory_parameter,target}};
read_parameters([], Par0) ->
- Par =
+ Par =
case {Par0#par.type, Par0#par.master} of
- {ose, undefined} ->
+ {ose, undefined} ->
%% Use this node as master and bootserver for target
%% and slave nodes
Par0#par{master = atom_to_list(node()),
@@ -691,10 +698,10 @@ read_parameters([], Par0) ->
{ose, _Master} ->
%% Master for target and slave nodes was defined in parameterfile
Par0;
- _ ->
+ _ ->
%% Use target as master for slave nodes,
%% (No master is used for target)
- Par0#par{master="test_server@" ++ Par0#par.target}
+ Par0#par{master="test_server@" ++ Par0#par.target}
end,
{ok,Par}.
@@ -708,7 +715,7 @@ naming() ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% handle_call(kill_slavenodes, From, State) -> ok
%%
-%% Kill all slave nodes that remain after a test case
+%% Kill all slave nodes that remain after a test case
%% is completed.
%%
handle_call(kill_slavenodes, _From, State) ->
@@ -736,7 +743,7 @@ handle_call(get_hosts, _From, State) ->
{reply, Hosts, State};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% handle_call({add_job,Dir,Name,TopCase,Skip}, _, State) ->
+%% handle_call({add_job,Dir,Name,TopCase,Skip}, _, State) ->
%% ok | {error,Reason}
%%
%% Dir = string()
@@ -760,7 +767,7 @@ handle_call(get_hosts, _From, State) ->
handle_call({add_job,Dir,Name,TopCase,Skip}, _From, State) ->
LogDir = Dir ++ ?logdir_ext,
- ExtraTools =
+ ExtraTools =
case State#state.cover of
false -> [];
{App,Analyse} -> [{cover,App,Analyse}]
@@ -776,19 +783,21 @@ handle_call({add_job,Dir,Name,TopCase,Skip}, _From, State) ->
{spec,SpecName} ->
Pid = spawn_tester(
?MODULE, do_spec,
- [SpecName,State#state.multiply_timetraps],
- LogDir, Name, State#state.levels,
+ [SpecName,{State#state.multiply_timetraps,
+ State#state.scale_timetraps}],
+ LogDir, Name, State#state.levels,
State#state.testcase_callback, ExtraTools1),
NewJobs = [{Name,Pid}|State#state.jobs],
- {reply, ok, State#state{jobs=NewJobs}};
+ {reply, ok, State#state{jobs=NewJobs}};
{command_line,SpecList} ->
Pid = spawn_tester(
?MODULE, do_spec_list,
- [SpecList,State#state.multiply_timetraps],
- LogDir, Name, State#state.levels,
+ [SpecList,{State#state.multiply_timetraps,
+ State#state.scale_timetraps}],
+ LogDir, Name, State#state.levels,
State#state.testcase_callback, ExtraTools1),
NewJobs = [{Name,Pid}|State#state.jobs],
- {reply, ok, State#state{jobs=NewJobs}};
+ {reply, ok, State#state{jobs=NewJobs}};
TopCase ->
case State#state.get_totals of
{CliPid,Fun} ->
@@ -798,10 +807,11 @@ handle_call({add_job,Dir,Name,TopCase,Skip}, _From, State) ->
_ ->
Cfg = make_config([]),
Pid = spawn_tester(
- ?MODULE, do_test_cases,
+ ?MODULE, do_test_cases,
[TopCase,Skip,Cfg,
- State#state.multiply_timetraps],
- LogDir, Name, State#state.levels,
+ {State#state.multiply_timetraps,
+ State#state.scale_timetraps}],
+ LogDir, Name, State#state.levels,
State#state.testcase_callback, ExtraTools1),
NewJobs = [{Name,Pid}|State#state.jobs],
{reply, ok, State#state{jobs=NewJobs}}
@@ -827,7 +837,7 @@ handle_call(jobs, _From, State) ->
%% handle_call({abort_current_testcase,Reason}, _, State) -> Result
%% Reason = term()
%% Result = ok | {error,no_testcase_running}
-%%
+%%
%% Attempts to abort the test case that's currently running.
handle_call({abort_current_testcase,Reason}, _From, State) ->
@@ -855,7 +865,7 @@ handle_call({abort_current_testcase,Reason}, _From, State) ->
handle_call({finish,Fini}, _From, State) ->
case State#state.jobs of
[] ->
- lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
+ lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
State#state.idle_notify),
State2 = State#state{finish=false},
{stop,shutdown,{ok,self()}, State2};
@@ -878,7 +888,7 @@ handle_call({idle_notify,Fun}, {Cli,_Ref}, State) ->
{reply, {ok,self()}, State};
_ ->
Subscribed = State#state.idle_notify,
- {reply, {ok,self()},
+ {reply, {ok,self()},
State#state{idle_notify=[{Cli,Fun}|Subscribed]}}
end;
@@ -891,7 +901,7 @@ handle_call({idle_notify,Fun}, {Cli,_Ref}, State) ->
handle_call({start_get_totals,Fun}, {Cli,_Ref}, State) ->
{reply, {ok,self()}, State#state{get_totals={Cli,Fun}}};
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% handle_call(stop_get_totals, From, State) -> ok
%%
@@ -942,11 +952,31 @@ handle_call({multiply_timetraps,N}, _From, State) ->
{reply,ok,State#state{multiply_timetraps=N}};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({scale_timetraps,Bool}, _, State) -> ok
+%% Bool = true | false
+%%
+%% Specifies if test_server should scale the timetrap value
+%% automatically if e.g. cover is running.
+
+handle_call({scale_timetraps,Bool}, _From, State) ->
+ {reply,ok,State#state{scale_timetraps=Bool}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(get_timetrap_parameters, _, State) -> {Multiplier,Scale}
+%% Multiplier = integer() | infinity
+%% Scale = true | false
+%%
+%% Returns the parameter values that affect timetraps.
+
+handle_call(get_timetrap_parameters, _From, State) ->
+ {reply,{State#state.multiply_timetraps,State#state.scale_timetraps},State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% handle_call({trace,TraceFile}, _, State) -> ok | {error,Reason}
%%
-%% Starts a separate node (trace control node) which
+%% Starts a separate node (trace control node) which
%% starts tracing on target and all slave nodes
-%%
+%%
%% TraceFile is a text file with elements of type
%% {Trace,Mod,TracePattern}.
%% {Trace,Mod,Func,TracePattern}.
@@ -955,10 +985,10 @@ handle_call({multiply_timetraps,N}, _From, State) ->
%% Trace = tp | tpl; local or global call trace
%% Mod,Func = atom(), Arity=integer(); defines what to trace
%% TracePattern = [] | match_spec()
-%%
+%%
%% The 'call' trace flag is set on all processes, and then
%% the given trace patterns are set.
-
+
handle_call({trace,TraceFile}, _From, State=#state{trc=false}) ->
TI = State#state.target_info,
case test_server_node:start_tracer_node(TraceFile, TI) of
@@ -993,7 +1023,7 @@ handle_call({cover,App,Analyse}, _From, State) ->
%% handle_call({testcase_callback,{Mod,Func}}, _, State) -> ok | {error,Reason}
%%
%% Add a callback function that will be called before and after every
-%% test case (on the test case process):
+%% test case (on the test case process):
%%
%% Mod:Func(Suite,TestCase,InitOrEnd,Config)
%%
@@ -1001,9 +1031,9 @@ handle_call({cover,App,Analyse}, _From, State) ->
handle_call({testcase_callback,ModFunc}, _From, State) ->
case ModFunc of
- {Mod,Func} ->
+ {Mod,Func} ->
case code:is_loaded(Mod) of
- {file,_} ->
+ {file,_} ->
ok;
false ->
code:load_file(Mod)
@@ -1065,15 +1095,15 @@ handle_call({start_node, Name, Type, Options}, From, State) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% handle_call({wait_for_node,Node}, _, State) -> ok
%%
-%% Waits for a new node to take contact. Used if
+%% Waits for a new node to take contact. Used if
%% node is started with option {wait,false}
handle_call({wait_for_node, Node}, From, State) ->
- NewWaitList =
+ NewWaitList =
case ets:lookup(slave_tab,Node) of
- [] ->
+ [] ->
[{Node,From}|State#state.wait_for_node];
- _ ->
+ _ ->
gen_server:reply(From,ok),
State#state.wait_for_node
end,
@@ -1086,7 +1116,7 @@ handle_call({wait_for_node, Node}, From, State) ->
%% - the node is really stopped by test_server when this returns.
handle_call({stop_node, Name}, _From, State) ->
- R = test_server_node:stop_node(Name, State#state.target_info),
+ R = test_server_node:stop_node(Name, State#state.target_info),
{reply, R, State};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -1112,7 +1142,7 @@ handle_cast({node_started,Node}, State) ->
false -> ok;
Trc -> test_server_node:trace_nodes(Trc, [Node])
end,
- NewWaitList =
+ NewWaitList =
case lists:keysearch(Node,1,State#state.wait_for_node) of
{value,{Node,From}} ->
gen_server:reply(From, ok),
@@ -1128,10 +1158,10 @@ handle_cast({node_started,Node}, State) ->
%% Reason = term()
%%
%% Handles exit messages from linked processes. Only test suites and
-%% possibly a target client are expected to be linked.
+%% possibly a target client are expected to be linked.
%% When a test suite terminates, it is removed from the job queue.
%% If a target client terminates it means that we lost contact with
-%% target. The test_server_ctrl process is terminated, and teminate/2
+%% target. The test_server_ctrl process is terminated, and teminate/2
%% will do the cleanup
handle_info({'EXIT',Pid,Reason}, State) ->
@@ -1139,7 +1169,7 @@ handle_info({'EXIT',Pid,Reason}, State) ->
false ->
TI = State#state.target_info,
case TI#target_info.target_client of
- Pid ->
+ Pid ->
%% The target client died - lost contact with target
{stop,{lost_contact_with_target,Reason},State};
_other ->
@@ -1160,13 +1190,13 @@ handle_info({'EXIT',Pid,Reason}, State) ->
State2 = State#state{jobs=NewJobs},
case NewJobs of
[] ->
- lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
+ lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
State2#state.idle_notify),
case State2#state.finish of
false ->
{noreply,State2#state{idle_notify=[]}};
_ -> % true | abort
- %% test_server:finish() has been called and
+ %% test_server:finish() has been called and
%% there are no jobs in the job queue =>
%% stop the test_server_ctrl
{stop,shutdown,State2#state{finish=false}}
@@ -1174,7 +1204,7 @@ handle_info({'EXIT',Pid,Reason}, State) ->
_ -> % pending jobs
case State2#state.finish of
abort -> % abort test now!
- lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
+ lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
State2#state.idle_notify),
{stop,shutdown,State2#state{finish=false}};
_ -> % true | false
@@ -1194,9 +1224,9 @@ handle_info({tcp,_MainSock,<<1,Request/binary>>}, State) ->
case binary_to_term(Request) of
{job_proc_killed,Name,Reason} ->
%% The only purpose of this is to inform the user about what
- %% happened on target.
+ %% happened on target.
%% The local job proc will soon be killed by the closed socket or
- %% because the job is finished. Then the above clause ('EXIT') will
+ %% because the job is finished. Then the above clause ('EXIT') will
%% handle the problem.
io:format("Suite ~s was killed on remote target with reason"
" ~p\n", [Name,Reason]);
@@ -1204,13 +1234,13 @@ handle_info({tcp,_MainSock,<<1,Request/binary>>}, State) ->
ignore
end,
{noreply,State};
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% handle_info({tcp_closed,Sock}, State)
%%
%% A Socket was closed. This indicates that a node died.
-%% This can be
+%% This can be
%% *Target node (if remote)
%% *Slave or peer node started by a test suite
%% *Trace controll node
@@ -1221,10 +1251,10 @@ handle_info({tcp_closed,Sock}, State=#state{trc=Sock}) ->
{noreply,State#state{trc=false}};
handle_info({tcp_closed,Sock}, State) ->
case test_server_node:nodedown(Sock,State#state.target_info) of
- target_died ->
+ target_died ->
%% terminate/2 will do the cleanup
{stop,target_died,State};
- _ ->
+ _ ->
{noreply,State}
end;
@@ -1260,7 +1290,7 @@ kill_all_jobs([]) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% spawn_tester(Mod, Func, Args, Dir, Name, Levels,
+%% spawn_tester(Mod, Func, Args, Dir, Name, Levels,
%% TestCaseCallback, ExtraTools) -> Pid
%% Mod = atom()
%% Func = atom()
@@ -1268,23 +1298,23 @@ kill_all_jobs([]) ->
%% Dir = string()
%% Name = string()
%% Levels = {integer(),integer(),integer()}
-%% TestCaseCallback = {CBMod,CBFunc} | undefined
+%% TestCaseCallback = {CBMod,CBFunc} | undefined
%% ExtraTools = [ExtraTool,...]
%% ExtraTool = CoverInfo | TraceInfo | RandomSeed
%%
%% Spawns a test suite execute-process, just an ordinary spawn, except
%% that it will set a lot of dictionary information before starting the
%% named function. Also, the execution is timed and protected by a catch.
-%% When the named function is done executing, a summary of the results
+%% When the named function is done executing, a summary of the results
%% is printed to the log files.
spawn_tester(Mod, Func, Args, Dir, Name, Levels, TCCallback, ExtraTools) ->
spawn_link(
- fun() -> init_tester(Mod, Func, Args, Dir, Name, Levels,
- TCCallback, ExtraTools)
+ fun() -> init_tester(Mod, Func, Args, Dir, Name, Levels,
+ TCCallback, ExtraTools)
end).
-init_tester(Mod, Func, Args, Dir, Name, {SumLev,MajLev,MinLev},
+init_tester(Mod, Func, Args, Dir, Name, {SumLev,MajLev,MinLev},
TCCallback, ExtraTools) ->
process_flag(trap_exit, true),
put(test_server_name, Name),
@@ -1324,7 +1354,7 @@ init_tester(Mod, Func, Args, Dir, Name, {SumLev,MajLev,MinLev},
{Skipped,_} -> {Skipped,io_lib:format(", ~p Skipped", [Skipped])}
end,
OkN = get(test_server_ok),
- FailedN = get(test_server_failed),
+ FailedN = get(test_server_failed),
print(html,"<tr><td></td><td><b>TOTAL</b></td><td></td><td></td>"
"<td>~.3fs</td><td><b>~s</b></td><td>~p Ok, ~p Failed~s of ~p</td></tr>\n",
[Time,SuccessStr,OkN,FailedN,SkipStr,OkN+FailedN+SkippedN]).
@@ -1338,9 +1368,9 @@ ts_tc(M, F, A) ->
{Elapsed,Val}.
elapsed_time(Before, After) ->
- (element(1,After)*1000000000000 +
+ (element(1,After)*1000000000000 +
element(2,After)*1000000 + element(3,After)) -
- (element(1,Before)*1000000000000 +
+ (element(1,Before)*1000000000000 +
element(2,Before)*1000000 + element(3,Before)).
start_extra_tools(ExtraTools) ->
@@ -1378,28 +1408,32 @@ stop_extra_tools([], _) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% do_spec(SpecName, MultiplyTimetrap) -> {error,Reason} | exit(Result)
+%% do_spec(SpecName, TimetrapSpec) -> {error,Reason} | exit(Result)
%% SpecName = string()
+%% TimetrapSpec = MultiplyTimetrap | {MultiplyTimetrap,ScaleTimetrap}
%% MultiplyTimetrap = integer() | infinity
+%% ScaleTimetrap = bool()
%%
%% Reads the named test suite specification file, and executes it.
%%
-%% This function is meant to be called by a process created by
+%% This function is meant to be called by a process created by
%% spawn_tester/7, which sets up some necessary dictionary values.
-do_spec(SpecName, MultiplyTimetrap) when is_list(SpecName) ->
+do_spec(SpecName, TimetrapSpec) when is_list(SpecName) ->
case file:consult(SpecName) of
{ok,TermList} ->
- do_spec_list(TermList,MultiplyTimetrap);
+ do_spec_list(TermList,TimetrapSpec);
{error,Reason} ->
io:format("Can't open ~s: ~p\n", [SpecName,Reason]),
{error,{cant_open_spec,Reason}}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% do_spec_list(TermList) -> exit(Result)
+%% do_spec_list(TermList, TimetrapSpec) -> exit(Result)
%% TermList = [term()|...]
+%% TimetrapSpec = MultiplyTimetrap | {MultiplyTimetrap,ScaleTimetrap}
%% MultiplyTimetrap = integer() | infinity
+%% ScaleTimetrap = bool()
%%
%% Executes a list of test suite specification commands. The following
%% commands are available, and may occur zero or more times (if several,
@@ -1422,21 +1456,21 @@ do_spec(SpecName, MultiplyTimetrap) when is_list(SpecName) ->
%% nodenames will be generated from the local host.
%%
%% {hosts, Hosts} Specifies a list of available hosts on which to start
-%% slave nodes. It is used when the {remote, true} option is given to the
+%% slave nodes. It is used when the {remote, true} option is given to the
%% test_server:start_node/3 function. Also, if {require_nodenames, Num} is
-%% contained in the TermList, the generated nodenames will be spread over
+%% contained in the TermList, the generated nodenames will be spread over
%% all hosts given in this Hosts list. The hostnames are given as atoms or
%% strings.
-%%
+%%
%% {diskless, true}</c></tag> is kept for backwards compatiblilty and
%% should not be used. Use a configuration test case instead.
-%%
-%% This function is meant to be called by a process created by
+%%
+%% This function is meant to be called by a process created by
%% spawn_tester/7, which sets up some necessary dictionary values.
-do_spec_list(TermList0, MultiplyTimetrap) ->
+do_spec_list(TermList0, TimetrapSpec) ->
Nodes = [],
- TermList =
+ TermList =
case lists:keysearch(hosts, 1, TermList0) of
{value, {hosts, Hosts0}} ->
Hosts = lists:map(fun(H) -> cast_to_list(H) end, Hosts0),
@@ -1447,7 +1481,7 @@ do_spec_list(TermList0, MultiplyTimetrap) ->
end,
DefaultConfig = make_config([{nodes,Nodes}]),
{TopCases,SkipList,Config} = do_spec_terms(TermList, [], [], DefaultConfig),
- do_test_cases(TopCases, SkipList, Config, MultiplyTimetrap).
+ do_test_cases(TopCases, SkipList, Config, TimetrapSpec).
do_spec_terms([], TopCases, SkipList, Config) ->
{TopCases,SkipList,Config};
@@ -1470,21 +1504,21 @@ do_spec_terms([{default_timeout,Tmo}|Terms], TopCases, SkipList, Config) ->
do_spec_terms([{require_nodenames,NumNames}|Terms], TopCases, SkipList, Config) ->
NodeNames0=generate_nodenames(NumNames),
NodeNames=lists:delete([], NodeNames0),
- do_spec_terms(Terms, TopCases, SkipList,
+ do_spec_terms(Terms, TopCases, SkipList,
update_config(Config, {nodenames,NodeNames}));
do_spec_terms([Other|Terms], TopCases, SkipList, Config) ->
io:format("** WARNING: Spec file contains unknown directive ~p\n",
[Other]),
do_spec_terms(Terms, TopCases, SkipList, Config).
-
+
generate_nodenames(Num) ->
Hosts = case controller_call(get_hosts) of
- [] ->
+ [] ->
TI = controller_call(get_target_info),
[TI#target_info.host];
- List ->
+ List ->
List
end,
generate_nodenames2(Num, Hosts, []).
@@ -1511,7 +1545,7 @@ temp_nodename([Chr|Base], Acc) ->
%% NoOfCases = integer() | unknown
%%
%% Counts the test cases that are about to run and returns that number.
-%% If there's a conf group in TestSpec with a repeat property, the total number
+%% If there's a conf group in TestSpec with a repeat property, the total number
%% of cases can not be calculated and NoOfCases = unknown.
count_test_cases(TopCases, SkipCases) when is_list(TopCases) ->
case collect_all_cases(TopCases, SkipCases) of
@@ -1522,14 +1556,14 @@ count_test_cases(TopCases, SkipCases) when is_list(TopCases) ->
case remove_conf(TestSpec) of
{repeats,_} ->
unknown;
- TestSpec1 ->
+ TestSpec1 ->
length(TestSpec1)
end}
end;
count_test_cases(TopCase, SkipCases) ->
count_test_cases([TopCase], SkipCases).
-
+
remove_conf(Cases) ->
remove_conf(Cases, [], false).
@@ -1543,8 +1577,8 @@ remove_conf([{conf, _Ref, Props, _MF}|Cases], NoConf, Repeats) ->
end;
remove_conf([{make,_Ref,_MF}|Cases], NoConf, Repeats) ->
remove_conf(Cases, NoConf, Repeats);
-remove_conf([{skip_case,{Type,_Ref,_MF,_Cmt}}|Cases],
- NoConf, Repeats) when Type==conf;
+remove_conf([{skip_case,{Type,_Ref,_MF,_Cmt}}|Cases],
+ NoConf, Repeats) when Type==conf;
Type==make ->
remove_conf(Cases, NoConf, Repeats);
remove_conf([C|Cases], NoConf, Repeats) ->
@@ -1582,22 +1616,30 @@ add_mod(Mod, Mods) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) ->
+%% do_test_cases(TopCases, SkipCases, Config, TimetrapSpec) ->
%% exit(Result)
%%
%% TopCases = term() (See collect_cases/3)
%% SkipCases = term() (See collect_cases/3)
%% Config = term() (See collect_cases/3)
+%% TimetrapSpec = MultiplyTimetrap | {MultiplyTimetrap,ScaleTimetrap}
%% MultiplyTimetrap = integer() | infinity
+%% ScaleTimetrap = bool()
%%
%% Initializes and starts the test run, for "ordinary" test suites.
%% Creates log directories and log files, inserts initial timestamps and
%% configuration information into the log files.
%%
-%% This function is meant to be called by a process created by
+%% This function is meant to be called by a process created by
%% spawn_tester/7, which sets up some necessary dictionary values.
-
-do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) when is_list(TopCases) ->
+do_test_cases(TopCases, SkipCases,
+ Config, MultiplyTimetrap) when is_integer(MultiplyTimetrap);
+ MultiplyTimetrap == infinity ->
+ do_test_cases(TopCases, SkipCases, Config, {MultiplyTimetrap,true});
+
+do_test_cases(TopCases, SkipCases,
+ Config, TimetrapData) when is_list(TopCases),
+ is_tuple(TimetrapData) ->
start_log_file(),
case collect_all_cases(TopCases, SkipCases) of
{error,Why} ->
@@ -1607,10 +1649,10 @@ do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) when is_list(TopCas
N = case remove_conf(TestSpec0) of
{repeats,_} -> unknown;
TS -> length(TS)
- end,
+ end,
put(test_server_cases, N),
put(test_server_case_num, 0),
- TestSpec =
+ TestSpec =
add_init_and_end_per_suite(TestSpec0, undefined, undefined),
TI = get_target_info(),
print(1, "Starting test~s", [print_if_known(N, {", ~w test cases",[N]},
@@ -1643,7 +1685,7 @@ do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) when is_list(TopCas
[TI#target_info.version, TI#target_info.root_dir]);
_ ->
case test_server_sup:framework_call(target_info, []) of
- TargetInfo when is_list(TargetInfo),
+ TargetInfo when is_list(TargetInfo),
length(TargetInfo) > 0 ->
print(html, "<p>Target:<br>\n"),
print(html, "~s\n", [TargetInfo]);
@@ -1681,12 +1723,12 @@ do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) when is_list(TopCas
print(major, "=otp_release ~s", [TI#target_info.otp_release]),
print(major, "=started ~s",
[lists:flatten(timestamp_get(""))]),
- run_test_cases(TestSpec, Config, MultiplyTimetrap)
+ run_test_cases(TestSpec, Config, TimetrapData)
end;
-do_test_cases(TopCase, SkipCases, Config, MultiplyTimetrap) ->
+do_test_cases(TopCase, SkipCases, Config, TimetrapSpec) ->
%% when not list(TopCase)
- do_test_cases([TopCase], SkipCases, Config, MultiplyTimetrap).
+ do_test_cases([TopCase], SkipCases, Config, TimetrapSpec).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -1742,7 +1784,7 @@ start_log_file() ->
make_html_link(LinkName, Target, Explanation) ->
%% if possible use a relative reference to Target.
- TargetL = filename:split(Target),
+ TargetL = filename:split(Target),
PwdL = filename:split(filename:dirname(LinkName)),
Href = case lists:prefix(PwdL, TargetL) of
true ->
@@ -1782,7 +1824,7 @@ start_minor_log_file(Mod, Func) ->
start_minor_log_file1(Mod, Func, LogDir, AbsName);
{ok,_} -> %% special case, duplicate names
{_,S,Us} = now(),
- Name1_0 =
+ Name1_0 =
lists:flatten(io_lib:format("~s.~s.~w.~w~s", [Mod,Func,S,
trunc(Us/1000),
?html_ext])),
@@ -1853,7 +1895,7 @@ html_convert_modules(TestSpec, _Config) ->
%% Retrieve a list of modules out of the test spec.
html_isolate_modules(List) -> html_isolate_modules(List, sets:new()).
-
+
html_isolate_modules([], Set) -> sets:to_list(Set);
html_isolate_modules([{skip_case,_}|Cases], Set) ->
html_isolate_modules(Cases, Set);
@@ -1919,36 +1961,36 @@ add_init_and_end_per_suite([{make,_,_}=Case|Cases], LastMod, LastRef) ->
[Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
add_init_and_end_per_suite([{skip_case,{{Mod,all},_}}=Case|Cases], LastMod, LastRef)
when Mod =/= LastMod ->
- {PreCases, NextMod, NextRef} =
+ {PreCases, NextMod, NextRef} =
do_add_end_per_suite_and_skip(LastMod, LastRef, Mod),
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
add_init_and_end_per_suite([{skip_case,{{Mod,_},_}}=Case|Cases], LastMod, LastRef)
when Mod =/= LastMod ->
- {PreCases, NextMod, NextRef} =
+ {PreCases, NextMod, NextRef} =
do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
add_init_and_end_per_suite([{skip_case,{conf,_,{Mod,_},_}}=Case|Cases], LastMod, LastRef)
when Mod =/= LastMod ->
- {PreCases, NextMod, NextRef} =
+ {PreCases, NextMod, NextRef} =
do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
add_init_and_end_per_suite([{skip_case,_}=Case|Cases], LastMod, LastRef) ->
[Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
-add_init_and_end_per_suite([{conf,_,_,{Mod,_}}=Case|Cases], LastMod, LastRef)
+add_init_and_end_per_suite([{conf,_,_,{Mod,_}}=Case|Cases], LastMod, LastRef)
when Mod =/= LastMod ->
- {PreCases, NextMod, NextRef} =
+ {PreCases, NextMod, NextRef} =
do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
add_init_and_end_per_suite([{conf,_,_,_}=Case|Cases], LastMod, LastRef) ->
[Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
-add_init_and_end_per_suite([{Mod,_}=Case|Cases], LastMod, LastRef)
+add_init_and_end_per_suite([{Mod,_}=Case|Cases], LastMod, LastRef)
when Mod =/= LastMod ->
- {PreCases, NextMod, NextRef} =
+ {PreCases, NextMod, NextRef} =
do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
add_init_and_end_per_suite([{Mod,_,_}=Case|Cases], LastMod, LastRef)
when Mod =/= LastMod ->
- {PreCases, NextMod, NextRef} =
+ {PreCases, NextMod, NextRef} =
do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
add_init_and_end_per_suite([Case|Cases], LastMod, LastRef)->
@@ -1965,7 +2007,7 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod) ->
false -> code:load_file(Mod);
_ -> ok
end,
- {Init,NextMod,NextRef} =
+ {Init,NextMod,NextRef} =
case erlang:function_exported(Mod, init_per_suite, 1) of
true ->
Ref = make_ref(),
@@ -1973,15 +2015,15 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod) ->
false ->
{[],Mod,undefined}
end,
- Cases =
+ Cases =
if LastRef==undefined ->
Init;
LastRef==skipped_suite ->
Init;
true ->
- %% Adding end_per_suite here without checking if the
+ %% Adding end_per_suite here without checking if the
%% function is actually exported. This is because a
- %% conf case must have an end case - so if it doesn't
+ %% conf case must have an end case - so if it doesn't
%% exist, it will only fail...
[{conf,LastRef,[],{LastMod,end_per_suite}}|Init]
end,
@@ -1997,12 +2039,12 @@ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% run_test_cases(TestSpec, Config, MultiplyTimetrap) -> exit(Result)
+%% run_test_cases(TestSpec, Config, TimetrapData) -> exit(Result)
%%
%% If remote target, a socket connection is established.
%% Runs the specified tests, then displays/logs the summary.
-run_test_cases(TestSpec, Config, MultiplyTimetrap) ->
+run_test_cases(TestSpec, Config, TimetrapData) ->
maybe_open_job_sock(),
@@ -2010,10 +2052,10 @@ run_test_cases(TestSpec, Config, MultiplyTimetrap) ->
%%! For readable tracing...
%%! Config1 = [{data_dir,""},{priv_dir,""},{nodes,[]}],
- %%! run_test_cases_loop(TestSpec, [[]], MultiplyTimetrap, [], []),
+ %%! run_test_cases_loop(TestSpec, [[]], TimetrapData, [], []),
+
+ run_test_cases_loop(TestSpec, [Config], TimetrapData, [], []),
- run_test_cases_loop(TestSpec, [Config], MultiplyTimetrap, [], []),
-
maybe_get_privdir(),
{AllSkippedN,UserSkipN,AutoSkipN,SkipStr} =
@@ -2060,10 +2102,10 @@ maybe_open_job_sock() ->
%% tar packet containing the privdir created by the test case.
maybe_get_privdir() ->
case get(test_server_ctrl_job_sock) of
- undefined ->
+ undefined ->
%% local target
ok;
- Sock ->
+ Sock ->
%% remote target
request(Sock, job_done),
gen_tcp:close(Sock)
@@ -2071,37 +2113,39 @@ maybe_get_privdir() ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% run_test_cases_loop(TestCases, Config, MultiplyTimetrap, Mode, Status) -> ok
+%% run_test_cases_loop(TestCases, Config, TimetrapData, Mode, Status) -> ok
%% TestCases = [Test,...]
%% Config = [[{Key,Val},...],...]
+%% TimetrapData = {MultiplyTimetrap,ScaleTimetrap}
%% MultiplyTimetrap = integer() | infinity
+%% ScaleTimetrap = bool()
%% Mode = [{Ref,[Prop,..],StartTime}]
%% Ref = reference()
-%% Prop = {name,Name} | sequence | parallel |
+%% Prop = {name,Name} | sequence | parallel |
%% shuffle | {shuffle,Seed} |
-%% repeat | {repeat,N} |
+%% repeat | {repeat,N} |
%% repeat_until_all_ok | {repeat_until_all_ok,N} |
-%% repeat_until_any_ok | {repeat_until_any_ok,N} |
-%% repeat_until_any_fail | {repeat_until_any_fail,N} |
-%% repeat_until_all_fail | {repeat_until_all_fail,N}
+%% repeat_until_any_ok | {repeat_until_any_ok,N} |
+%% repeat_until_any_fail | {repeat_until_any_fail,N} |
+%% repeat_until_all_fail | {repeat_until_all_fail,N}
%% Status = [{Ref,{{Ok,Skipped,Failed},CopiedCases}}]
%% Ok = Skipped = Failed = [Case,...]
%%
%% Execute the TestCases under configuration Config. Config is a list
%% of lists, where hd(Config) holds the config tuples for the current
-%% conf case and tl(Config) is the data for the higher level conf cases.
-%% Config data is "inherited" from top to nested conf cases, but
+%% conf case and tl(Config) is the data for the higher level conf cases.
+%% Config data is "inherited" from top to nested conf cases, but
%% never the other way around. if length(Config) == 1, Config contains
%% only the initial config data for the suite.
%%
%% Test may be one of the following:
%%
-%% {conf,Ref,Props,{Mod,Func}} Mod:Func is a configuration modification
+%% {conf,Ref,Props,{Mod,Func}} Mod:Func is a configuration modification
%% function, call it with the current configuration as argument. It will
%% return a new configuration.
%%
-%% {make,Ref,{Mod,Func,Args}} Mod:Func is a make function, and it is called
-%% with the given arguments. This function will *always* be called on the host
+%% {make,Ref,{Mod,Func,Args}} Mod:Func is a make function, and it is called
+%% with the given arguments. This function will *always* be called on the host
%% - not on target.
%%
%% {Mod,Case} This is a normal test case. Determine the correct
@@ -2114,16 +2158,16 @@ maybe_get_privdir() ->
%% {skip_case,{conf,Ref,Case,Comment}} An init conf case gets skipped
%% by the user. This will also cause the end conf case to be skipped.
%% Note that it is not possible to skip an end conf case directly (it
-%% can only be skipped indirectly by a skipped init conf case). The
-%% comment (which gets printed in the log files) describes why the case
+%% can only be skipped indirectly by a skipped init conf case). The
+%% comment (which gets printed in the log files) describes why the case
%% was skipped.
%%
-%% {skip_case,{Case,Comment}} A normal test case skipped by the user.
-%% The comment (which gets printed in the log files) describes why the
+%% {skip_case,{Case,Comment}} A normal test case skipped by the user.
+%% The comment (which gets printed in the log files) describes why the
%% case was skipped.
%%
%% {auto_skip_case,{conf,Ref,Case,Comment},Mode} This is the result of
-%% an end conf case being automatically skipped due to a failing init
+%% an end conf case being automatically skipped due to a failing init
%% conf case. It could also be a nested conf case that gets skipped
%% because of a failed or skipped top level conf.
%%
@@ -2151,25 +2195,25 @@ maybe_get_privdir() ->
%% messages to the main process instead of writing the data to file
%% (only true for printouts to common log files).
%%
-%% If a conf group nested under a parallel group in the test
+%% If a conf group nested under a parallel group in the test
%% specification should be started, the 'test_server_common_io_handler'
%% value gets set also on the main process. This causes all printouts
-%% to common files - both from parallel test cases and from cases
+%% to common files - both from parallel test cases and from cases
%% executed by the main process - to all end up as messages in the
-%% inbox of the main process.
+%% inbox of the main process.
%%
%% During execution of a parallel group (or of a group nested under a
-%% parallel group), *any* new test case being started gets registered
+%% parallel group), *any* new test case being started gets registered
%% in a list saved in the dictionary with 'test_server_queued_io' as key.
%% When the top level parallel group is finished (only then can we be
%% sure all parallel test cases have finished and "reported in"), the
-%% list of test cases is traversed in order and printout messages from
-%% each process - including the main process - are handled in turn. See
+%% list of test cases is traversed in order and printout messages from
+%% each process - including the main process - are handled in turn. See
%% handle_test_case_io_and_status/0 for details.
%%
%% To be able to handle nested conf groups with different properties,
%% the Mode argument specifies a list of {Ref,Properties} tuples.
-%% The head of the Mode list at any given time identifies the group
+%% The head of the Mode list at any given time identifies the group
%% currently being processed. The tail of the list identifies groups
%% on higher level.
%%
@@ -2179,13 +2223,13 @@ maybe_get_privdir() ->
%%
%% A group nested under a parallel group will start executing in
%% parallel with previous (parallel) test cases (no matter what
-%% properties the nested group has). Test cases are however never
+%% properties the nested group has). Test cases are however never
%% executed in parallel with the start or end conf case of the same
%% group! Because of this, the test_server_ctrl loop waits at
%% the end conf of a group for all parallel cases to finish
%% before the end conf case actually executes. This has the effect
%% that it's only after a nested group has finished that any
-%% remaining parallel cases in the previous group get spawned (*).
+%% remaining parallel cases in the previous group get spawned (*).
%% Example (all parallel cases):
%%
%% group1_init |---->
@@ -2201,7 +2245,7 @@ maybe_get_privdir() ->
%%
run_test_cases_loop([{auto_skip_case,{Type,Ref,Case,Comment},SkipMode}|Cases],
- Config, MultiplyTimetrap, Mode, Status) when Type==conf;
+ Config, TimetrapData, Mode, Status) when Type==conf;
Type==make ->
file:set_cwd(filename:dirname(get(test_server_dir))),
@@ -2217,24 +2261,24 @@ run_test_cases_loop([{auto_skip_case,{Type,Ref,Case,Comment},SkipMode}|Cases],
set_io_buffering(undefined),
{Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, false, SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ run_test_cases_loop(Cases, Config, TimetrapData, tl(Mode),
delete_status(Ref, Status));
_ ->
- %% this is a skipped end conf for a parallel group nested under a
+ %% this is a skipped end conf for a parallel group nested under a
%% parallel group (io buffering is active)
wait_for_cases(Ref),
{Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, true, SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
case CurrIOHandler of
- {Ref,_} ->
+ {Ref,_} ->
%% current_io_handler was set by start conf of this
- %% group, so we can unset it now (no more io from main
+ %% group, so we can unset it now (no more io from main
%% process needs to be buffered)
set_io_buffering(undefined);
- _ ->
+ _ ->
ok
end,
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ run_test_cases_loop(Cases, Config, TimetrapData, tl(Mode),
delete_status(Ref, Status))
end;
{Ref,false} ->
@@ -2242,7 +2286,7 @@ run_test_cases_loop([{auto_skip_case,{Type,Ref,Case,Comment},SkipMode}|Cases],
%% nested under a parallel group
{Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, false, SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ run_test_cases_loop(Cases, Config, TimetrapData, tl(Mode),
delete_status(Ref, Status));
{Ref,_} ->
%% this is a skipped end conf for a non-parallel group nested under
@@ -2250,22 +2294,22 @@ run_test_cases_loop([{auto_skip_case,{Type,Ref,Case,Comment},SkipMode}|Cases],
{Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, true, SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
case CurrIOHandler of
- {Ref,_} ->
+ {Ref,_} ->
%% current_io_handler was set by start conf of this
- %% group, so we can unset it now (no more io from main
+ %% group, so we can unset it now (no more io from main
%% process needs to be buffered)
set_io_buffering(undefined);
- _ ->
+ _ ->
ok
end,
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ run_test_cases_loop(Cases, Config, TimetrapData, tl(Mode),
delete_status(Ref, Status));
{_,false} ->
%% this is a skipped start conf for a group which is not nested
%% under a parallel group
{Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, false, SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, [conf(Ref,[])|Mode], Status);
+ run_test_cases_loop(Cases, Config, TimetrapData, [conf(Ref,[])|Mode], Status);
{_,Ref0} when is_reference(Ref0) ->
%% this is a skipped start conf for a group nested under a parallel group
%% and if this is the first nested group, io buffering must be activated
@@ -2276,19 +2320,19 @@ run_test_cases_loop([{auto_skip_case,{Type,Ref,Case,Comment},SkipMode}|Cases],
end,
{Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, true, SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, [conf(Ref,[])|Mode], Status)
- end;
+ run_test_cases_loop(Cases, Config, TimetrapData, [conf(Ref,[])|Mode], Status)
+ end;
run_test_cases_loop([{auto_skip_case,{Case,Comment},SkipMode}|Cases],
- Config, MultiplyTimetrap, Mode, Status) ->
+ Config, TimetrapData, Mode, Status) ->
{Mod,Func} = skip_case(auto, undefined, get(test_server_case_num)+1, Case, Comment,
(undefined /= get(test_server_common_io_handler)), SkipMode),
test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode,
+ run_test_cases_loop(Cases, Config, TimetrapData, Mode,
update_status(skipped, Mod, Func, Status));
run_test_cases_loop([{skip_case,{conf,Ref,Case,Comment}}|Cases0],
- Config, MultiplyTimetrap, Mode, Status) ->
+ Config, TimetrapData, Mode, Status) ->
{Mod,Func} = skip_case(user, Ref, 0, Case, Comment,
(undefined /= get(test_server_common_io_handler))),
{Cases,Config1} =
@@ -2301,24 +2345,24 @@ run_test_cases_loop([{skip_case,{conf,Ref,Case,Comment}}|Cases0],
{skip_cases_upto(Ref, Cases0, Comment, conf, Mode),Config}
end,
test_server_sup:framework_call(report, [tc_user_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config1, MultiplyTimetrap, Mode,
+ run_test_cases_loop(Cases, Config1, TimetrapData, Mode,
update_status(skipped, Mod, Func, Status));
run_test_cases_loop([{skip_case,{Case,Comment}}|Cases],
- Config, MultiplyTimetrap, Mode, Status) ->
+ Config, TimetrapData, Mode, Status) ->
{Mod,Func} = skip_case(user, undefined, get(test_server_case_num)+1, Case, Comment,
(undefined /= get(test_server_common_io_handler))),
test_server_sup:framework_call(report, [tc_user_skip,{?pl2a(Mod),Func,Comment}]),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode,
+ run_test_cases_loop(Cases, Config, TimetrapData, Mode,
update_status(skipped, Mod, Func, Status));
%% a start *or* end conf case, wrapping test cases or other conf cases
-run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
- Config, MultiplyTimetrap, Mode0, Status) ->
-
+run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
+ Config, TimetrapData, Mode0, Status) ->
+
CurrIOHandler = get(test_server_common_io_handler),
%% check and update the mode for test case execution and io msg handling
- {StartConf,Mode,IOHandler,ConfTime,Status1} =
+ {StartConf,Mode,IOHandler,ConfTime,Status1} =
case {curr_ref(Mode0),check_props(parallel, Mode0)} of
{Ref,Ref} ->
case check_props(parallel, tl(Mode0)) of
@@ -2334,19 +2378,19 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
{false,tl(Mode0),undefined,Elapsed,
update_status(Ref, OkSkipFail, Status)};
_ ->
- %% this is an end conf for a parallel group nested under a
+ %% this is an end conf for a parallel group nested under a
%% parallel group (io buffering is active)
OkSkipFail = wait_for_cases(Ref),
queue_test_case_io(Ref, self(), 0, Mod, Func),
Elapsed = elapsed_time(conf_start(Ref, Mode0),?now)/1000000,
case CurrIOHandler of
- {Ref,_} ->
+ {Ref,_} ->
%% current_io_handler was set by start conf of this
%% group, so we can unset it after this case (no
%% more io from main process needs to be buffered)
{false,tl(Mode0),undefined,Elapsed,
update_status(Ref, OkSkipFail, Status)};
- _ ->
+ _ ->
{false,tl(Mode0),CurrIOHandler,Elapsed,
update_status(Ref, OkSkipFail, Status)}
end
@@ -2362,16 +2406,16 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
queue_test_case_io(Ref, self(), 0, Mod, Func),
Elapsed = elapsed_time(conf_start(Ref, Mode0),?now)/1000000,
case CurrIOHandler of
- {Ref,_} ->
+ {Ref,_} ->
%% current_io_handler was set by start conf of this
%% group, so we can unset it after this case (no
%% more io from main process needs to be buffered)
{false,tl(Mode0),undefined,Elapsed,Status};
- _ ->
+ _ ->
{false,tl(Mode0),CurrIOHandler,Elapsed,Status}
end;
{_,false} ->
- %% this is a start conf for a group which is not nested under a
+ %% this is a start conf for a group which is not nested under a
%% parallel group, check if this case starts a new parallel group
case lists:member(parallel, Props) of
true ->
@@ -2424,9 +2468,9 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
end;
NumStr ->
%% Ex: "123 456 789" or "123,456,789" -> {123,456,789}
- list_to_tuple([list_to_integer(NS) ||
+ list_to_tuple([list_to_integer(NS) ||
NS <- string:tokens(NumStr, [$ ,$:,$,])])
- end,
+ end,
{shuffle_cases(Ref, Cs0, UseSeed),{shuffle,UseSeed}}
end;
not StartConf ->
@@ -2440,17 +2484,17 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
if StartConf ->
case get_repeat(Props) of
undefined ->
- %% we *must* have a status entry for every conf since we
+ %% we *must* have a status entry for every conf since we
%% will continously update status with test case results
%% without knowing the Ref (but update hd(Status))
{false,new_status(Ref, Status1),Cases1,?void_fun};
- _ ->
+ _ ->
{Copied,_} = copy_cases(Ref, make_ref(), Cs1),
{true,new_status(Ref, Copied, Status1),Cases1,?void_fun}
end;
not StartConf ->
RepVal = get_repeat(get_props(Mode0)),
- ReportStop =
+ ReportStop =
fun() ->
print(minor, "~n*** Stopping repeat operation ~w", [RepVal]),
print(1, "Stopping repeat operation ~w", [RepVal])
@@ -2465,17 +2509,17 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
{true,EndStatus,CopiedCases++Cases1,?void_fun};
{repeat_until_all_ok,_} ->
{RestCs,Fun} = case get_tc_results(Status1) of
- {_,_,[]} ->
+ {_,_,[]} ->
{Cases1,ReportStop};
- _ ->
+ _ ->
{CopiedCases++Cases1,?void_fun}
end,
{true,EndStatus,RestCs,Fun};
{repeat_until_any_ok,_} ->
{RestCs,Fun} = case get_tc_results(Status1) of
- {Ok,_,_} when length(Ok) > 0 ->
+ {Ok,_,_} when length(Ok) > 0 ->
{Cases1,ReportStop};
- _ ->
+ _ ->
{CopiedCases++Cases1,?void_fun}
end,
{true,EndStatus,RestCs,Fun};
@@ -2483,15 +2527,15 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
{RestCs,Fun} = case get_tc_results(Status1) of
{_,_,Fails} when length(Fails) > 0 ->
{Cases1,ReportStop};
- _ ->
+ _ ->
{CopiedCases++Cases1,?void_fun}
end,
{true,EndStatus,RestCs,Fun};
{repeat_until_all_fail,_} ->
{RestCs,Fun} = case get_tc_results(Status1) of
- {[],_,_} ->
+ {[],_,_} ->
{Cases1,ReportStop};
- _ ->
+ _ ->
{CopiedCases++Cases1,?void_fun}
end,
{true,EndStatus,RestCs,Fun}
@@ -2517,13 +2561,13 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
[{tc_group_properties,get_props(Mode0)},
{tc_group_result,[{ok,TcOk},{skipped,TcSkip},{failed,TcFail}]}]
end,
- ActualCfg =
+ ActualCfg =
update_config(hd(Config), [{priv_dir,get(test_server_priv_dir)},
{data_dir,get_data_dir(Mod)}] ++ CfgProps),
CurrMode = curr_mode(Ref, Mode0, Mode),
- ConfCaseResult = run_test_case(Ref, 0, Mod, Func, [ActualCfg], skip_init, target,
- MultiplyTimetrap, CurrMode),
+ ConfCaseResult = run_test_case(Ref, 0, Mod, Func, [ActualCfg], skip_init, target,
+ TimetrapData, CurrMode),
case ConfCaseResult of
{_,NewCfg,_} when Func == init_per_suite, is_list(NewCfg) ->
@@ -2533,8 +2577,8 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
[] ->
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases, [NewCfg|Config],
- MultiplyTimetrap, Mode, Status2);
+ run_test_cases_loop(Cases, [NewCfg|Config],
+ TimetrapData, Mode, Status2);
Bad ->
print(minor, "~n*** ~p returned bad elements in Config: ~p.~n",
[Func,Bad]),
@@ -2542,22 +2586,22 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
Cases2 = skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode,
+ run_test_cases_loop(Cases2, Config, TimetrapData, Mode,
delete_status(Ref, Status2))
- end;
+ end;
{_,NewCfg,_} when StartConf, is_list(NewCfg) ->
print_conf_time(ConfTime),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases, [NewCfg|Config], MultiplyTimetrap, Mode, Status2);
+ run_test_cases_loop(Cases, [NewCfg|Config], TimetrapData, Mode, Status2);
{_,{framework_error,{FwMod,FwFunc},Reason},_} ->
print(minor, "~n*** ~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
print(1, "~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
exit(framework_error);
- {_,Fail,_} when element(1,Fail) == 'EXIT';
+ {_,Fail,_} when element(1,Fail) == 'EXIT';
element(1,Fail) == timetrap_timeout;
element(1,Fail) == failed ->
- {Cases2,Config1} =
+ {Cases2,Config1} =
if StartConf ->
ReportAbortRepeat(failed),
print(minor, "~n*** ~p failed.~n"
@@ -2571,7 +2615,7 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
end,
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases2, Config1, MultiplyTimetrap, Mode,
+ run_test_cases_loop(Cases2, Config1, TimetrapData, Mode,
delete_status(Ref, Status2));
{died,Why,_} when Func == init_per_suite ->
print(minor, "~n*** Unexpected exit during init_per_suite.~n", []),
@@ -2579,16 +2623,16 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
Cases2 = skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode,
- delete_status(Ref, Status2));
+ run_test_cases_loop(Cases2, Config, TimetrapData, Mode,
+ delete_status(Ref, Status2));
{_,{Skip,Reason},_} when StartConf and ((Skip==skip) or (Skip==skipped)) ->
ReportAbortRepeat(skipped),
print(minor, "~n*** ~p skipped.~n"
" Skipping all cases.", [Func]),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
- Config, MultiplyTimetrap, Mode,
+ run_test_cases_loop(skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ Config, TimetrapData, Mode,
delete_status(Ref, Status2));
{_,{skip_and_save,Reason,_SavedConfig},_} when StartConf ->
ReportAbortRepeat(skipped),
@@ -2596,8 +2640,8 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
" Skipping all cases.", [Func]),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
- Config, MultiplyTimetrap, Mode,
+ run_test_cases_loop(skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ Config, TimetrapData, Mode,
delete_status(Ref, Status2));
{_,_Other,_} when Func == init_per_suite ->
print(minor, "~n*** init_per_suite failed to return a Config list.~n", []),
@@ -2605,16 +2649,16 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
Cases2 = skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode,
+ run_test_cases_loop(Cases2, Config, TimetrapData, Mode,
delete_status(Ref, Status2));
{_,_Other,_} when StartConf ->
print_conf_time(ConfTime),
set_io_buffering(IOHandler),
ReportRepeatStop(),
stop_minor_log_file(),
- run_test_cases_loop(Cases, [hd(Config)|Config], MultiplyTimetrap,
+ run_test_cases_loop(Cases, [hd(Config)|Config], TimetrapData,
Mode, Status2);
-
+
{_,_EndConfRetVal,Opts} ->
%% check if return_group_result is set (ok, skipped or failed) and
%% if so return the value to the group "above" so that result may be
@@ -2631,35 +2675,35 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
ReportRepeatStop(),
set_io_buffering(IOHandler),
stop_minor_log_file(),
- run_test_cases_loop(Cases, tl(Config), MultiplyTimetrap, Mode, Status3)
+ run_test_cases_loop(Cases, tl(Config), TimetrapData, Mode, Status3)
end;
-run_test_cases_loop([{make,Ref,{Mod,Func,Args}}|Cases0], Config, MultiplyTimetrap, Mode, Status) ->
- case run_test_case(Ref, 0, Mod, Func, Args, skip_init, host, MultiplyTimetrap) of
+run_test_cases_loop([{make,Ref,{Mod,Func,Args}}|Cases0], Config, TimetrapData, Mode, Status) ->
+ case run_test_case(Ref, 0, Mod, Func, Args, skip_init, host, TimetrapData) of
{_,Why={'EXIT',_},_} ->
print(minor, "~n*** ~p failed.~n"
" Skipping all cases.", [Func]),
Reason = {failed,{Mod,Func,Why}},
Cases = skip_cases_upto(Ref, Cases0, Reason, conf, Mode),
stop_minor_log_file(),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status);
+ run_test_cases_loop(Cases, Config, TimetrapData, Mode, Status);
{_,_Whatever,_} ->
stop_minor_log_file(),
- run_test_cases_loop(Cases0, Config, MultiplyTimetrap, Mode, Status)
+ run_test_cases_loop(Cases0, Config, TimetrapData, Mode, Status)
end;
-run_test_cases_loop([{conf,_Ref,_Props,_X}=Conf|_Cases0],
- Config, _MultiplyTimetrap, _Mode, _Status) ->
+run_test_cases_loop([{conf,_Ref,_Props,_X}=Conf|_Cases0],
+ Config, _TimetrapData, _Mode, _Status) ->
erlang:error(badarg, [Conf,Config]);
-run_test_cases_loop([{Mod,Case}|Cases], Config, MultiplyTimetrap, Mode, Status) ->
- ActualCfg =
+run_test_cases_loop([{Mod,Case}|Cases], Config, TimetrapData, Mode, Status) ->
+ ActualCfg =
update_config(hd(Config), [{priv_dir,get(test_server_priv_dir)},
{data_dir,get_data_dir(Mod)}]),
run_test_cases_loop([{Mod,Case,[ActualCfg]}|Cases], Config,
- MultiplyTimetrap, Mode, Status);
+ TimetrapData, Mode, Status);
-run_test_cases_loop([{Mod,Func,Args}|Cases], Config, MultiplyTimetrap, Mode, Status) ->
+run_test_cases_loop([{Mod,Func,Args}|Cases], Config, TimetrapData, Mode, Status) ->
Num = put(test_server_case_num, get(test_server_case_num)+1),
%% check the current execution mode and save info about the case if
%% detected that printouts to common log files is handled later
@@ -2669,15 +2713,15 @@ run_test_cases_loop([{Mod,Func,Args}|Cases], Config, MultiplyTimetrap, Mode, Sta
undefined ->
%% io printouts are written to straight to file
ok;
- _ ->
+ _ ->
%% io messages are buffered, put test case in queue
queue_test_case_io(undefined, self(), Num+1, Mod, Func)
end;
_ ->
ok
end,
- case run_test_case(undefined, Num+1, Mod, Func, Args,
- run_init, target, MultiplyTimetrap, Mode) of
+ case run_test_case(undefined, Num+1, Mod, Func, Args,
+ run_init, target, TimetrapData, Mode) of
%% callback to framework module failed, exit immediately
{_,{framework_error,{FwMod,FwFunc},Reason},_} ->
print(minor, "~n*** ~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
@@ -2688,50 +2732,50 @@ run_test_cases_loop([{Mod,Func,Args}|Cases], Config, MultiplyTimetrap, Mode, Sta
{Time,RetVal,_} ->
{Failed,Status1} =
case Time of
- died ->
+ died ->
{true,update_status(failed, Mod, Func, Status)};
_ when is_tuple(RetVal) ->
case element(1, RetVal) of
- R when R=='EXIT'; R==failed ->
+ R when R=='EXIT'; R==failed ->
{true,update_status(failed, Mod, Func, Status)};
- R when R==skip; R==skipped ->
+ R when R==skip; R==skipped ->
{false,update_status(skipped, Mod, Func, Status)};
- _ ->
+ _ ->
{false,update_status(ok, Mod, Func, Status)}
end;
- _ ->
+ _ ->
{false,update_status(ok, Mod, Func, Status)}
- end,
+ end,
case check_prop(sequence, Mode) of
false ->
stop_minor_log_file(),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status1);
- Ref ->
- %% the case is in a sequence; we must check the result and
+ run_test_cases_loop(Cases, Config, TimetrapData, Mode, Status1);
+ Ref ->
+ %% the case is in a sequence; we must check the result and
%% determine if the following cases should run or be skipped
if not Failed -> % proceed with next case
stop_minor_log_file(),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status1);
+ run_test_cases_loop(Cases, Config, TimetrapData, Mode, Status1);
true -> % skip rest of cases in sequence
print(minor, "~n*** ~p failed.~n"
" Skipping all other cases in sequence.", [Func]),
Reason = {failed,{Mod,Func}},
Cases2 = skip_cases_upto(Ref, Cases, Reason, tc, Mode),
stop_minor_log_file(),
- run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode, Status1)
+ run_test_cases_loop(Cases2, Config, TimetrapData, Mode, Status1)
end
end;
%% the test case is being executed in parallel with the main process (and
%% other test cases) and Pid is the dedicated process executing the case
Pid ->
- %% io from Pid will be buffered in the main process inbox and handled
+ %% io from Pid will be buffered in the main process inbox and handled
%% later, so we have to save info about the case
queue_test_case_io(undefined, Pid, Num+1, Mod, Func),
- run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status)
+ run_test_cases_loop(Cases, Config, TimetrapData, Mode, Status)
end;
%% TestSpec processing finished
-run_test_cases_loop([], _Config, _MultiplyTimetrap, _, _) ->
+run_test_cases_loop([], _Config, _TimetrapData, _, _) ->
ok.
%%--------------------------------------------------------------------
@@ -2798,12 +2842,12 @@ check_props(Attrib, Mode) ->
case [R || {R,Ps,_} <- Mode, lists:member(Attrib, Ps)] of
[] -> false;
[Ref|_] -> Ref
- end.
+ end.
get_name([{_Ref,Props,_}|_]) ->
proplists:get_value(name, Props);
get_name([]) ->
- undefined.
+ undefined.
conf_start(Ref, Mode) ->
case lists:keysearch(Ref, 1, Mode) of
@@ -2826,10 +2870,10 @@ print_conf_time(0) ->
print_conf_time(ConfTime) ->
print(major, "=group_time ~.3fs", [ConfTime]),
print(minor, "~n=== Total execution time of group: ~.3fs~n", [ConfTime]).
-
-print_props(_, []) ->
+
+print_props(_, []) ->
ok;
-print_props(true, Props) ->
+print_props(true, Props) ->
print(major, "=group_props ~p", [Props]),
print(minor, "Group properties: ~p~n", [Props]);
print_props(_, _) ->
@@ -2858,7 +2902,7 @@ update_repeat(Props) ->
N >= 2 ->
[{RepType,N-1}|lists:keydelete(RepType, 1, Props)]
end,
- %% if shuffle is used in combination with repeat, a new
+ %% if shuffle is used in combination with repeat, a new
%% seed shouldn't be set every new turn
case get_shuffle(Props1) of
undefined ->
@@ -2874,13 +2918,13 @@ get_shuffle(Props) ->
delete_shuffle(Props) ->
delete_prop([shuffle], Props).
-%% Return {Item,Value} if found, else if Item alone
+%% Return {Item,Value} if found, else if Item alone
%% is found, return {Item,Default}
get_prop([Item|Items], Default, Props) ->
case lists:keysearch(Item, 1, Props) of
- {value,R} ->
+ {value,R} ->
R;
- false ->
+ false ->
case lists:member(Item, Props) of
true ->
{Item,Default};
@@ -2940,8 +2984,8 @@ random_order(1, {_Pos,Seed}, [{_Ix,CaseOrGroup}], Shuffled) ->
put(test_server_curr_random_seed, Seed),
Shuffled++CaseOrGroup;
random_order(N, {Pos,NewSeed}, IxCases, Shuffled) ->
- {First,[{_Ix,CaseOrGroup}|Rest]} = lists:split(Pos-1, IxCases),
- random_order(N-1, random:uniform_s(N-1, NewSeed),
+ {First,[{_Ix,CaseOrGroup}|Rest]} = lists:split(Pos-1, IxCases),
+ random_order(N-1, random:uniform_s(N-1, NewSeed),
First++Rest, Shuffled++CaseOrGroup).
@@ -2949,7 +2993,7 @@ random_order(N, {Pos,NewSeed}, IxCases, Shuffled) ->
%% skip_case(Type, Ref, CaseNum, Case, Comment, SendSync) -> {Mod,Func}
%%
%% Prints info about a skipped case in the major and html log files.
-%% SendSync determines if start and finished messages must be sent so
+%% SendSync determines if start and finished messages must be sent so
%% that the printouts can be buffered and handled in order with io from
%% parallel processes.
@@ -2969,13 +3013,13 @@ skip_case(Type, Ref, CaseNum, Case, Comment, SendSync, Mode) ->
not SendSync ->
skip_case1(Type, CaseNum, Mod, Func, Comment, Mode)
end,
- MF.
+ MF.
skip_case1(Type, CaseNum, Mod, Func, Comment, Mode) ->
{{Col0,Col1},_} = get_font_style((CaseNum > 0), Mode),
ResultCol = if Type == auto -> "#ffcc99";
Type == user -> "#ff9933"
- end,
+ end,
Comment1 = reason_to_string(Comment),
@@ -3084,7 +3128,7 @@ modify_cases_upto1(Ref, ModOp, [{skip_case,{_F,_Cmt}}=MF|T], Orig, Alt) ->
%% next is a normal case (possibly in a sequence), mark as skipped, or copy, and proceed
modify_cases_upto1(Ref, {skip,Reason,_,Mode}=Op, [{_M,_F}=MF|T], Orig, Alt) ->
- modify_cases_upto1(Ref, Op, T, Orig, [{auto_skip_case,{MF,Reason},Mode}|Alt]);
+ modify_cases_upto1(Ref, Op, T, Orig, [{auto_skip_case,{MF,Reason},Mode}|Alt]);
modify_cases_upto1(Ref, CopyOp, [{_M,_F}=MF|T], Orig, Alt) ->
modify_cases_upto1(Ref, CopyOp, T, [MF|Orig], [MF|Alt]);
@@ -3110,7 +3154,7 @@ set_io_buffering(IOHandler) ->
%% queue_test_case_io(Pid, Num, Mod, Func) -> ok
%%
%% Save info about test case that gets its io buffered. This can
-%% be a parallel test case or it can be a test case (conf or normal)
+%% be a parallel test case or it can be a test case (conf or normal)
%% that belongs to a group nested under a parallel group. The queue
%% is processed after io buffering is disabled. See run_test_cases_loop/4
%% and handle_test_case_io_and_status/0 for more info.
@@ -3124,10 +3168,10 @@ queue_test_case_io(Ref, Pid, Num, Mod, Func) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% wait_for_cases(Ref) -> {Ok,Skipped,Failed}
%%
-%% At the end of a nested parallel group, we have to wait for the test
+%% At the end of a nested parallel group, we have to wait for the test
%% cases to terminate before we can go on (since test cases never execute
-%% in parallel with the end conf case of the group). When a top level
-%% parallel group is finished, buffered io messages must be handled and
+%% in parallel with the end conf case of the group). When a top level
+%% parallel group is finished, buffered io messages must be handled and
%% this is taken care of by handle_test_case_io_and_status/0.
wait_for_cases(Ref) ->
@@ -3135,15 +3179,15 @@ wait_for_cases(Ref) ->
[] ->
{[],[],[]};
Cases ->
- [_Start|TCs] =
+ [_Start|TCs] =
lists:dropwhile(fun({R,_,_,_,_}) when R == Ref -> false;
(_) -> true
end, Cases),
wait_and_resend(Ref, TCs, [],[],[])
end.
-wait_and_resend(Ref, [{OtherRef,_,0,_,_}|Ps],
- Ok,Skip,Fail) when is_reference(OtherRef),
+wait_and_resend(Ref, [{OtherRef,_,0,_,_}|Ps],
+ Ok,Skip,Fail) when is_reference(OtherRef),
OtherRef /= Ref ->
%% ignore cases that belong to nested group
Ps1 = rm_cases_upto(OtherRef, Ps),
@@ -3152,7 +3196,7 @@ wait_and_resend(Ref, [{OtherRef,_,0,_,_}|Ps],
wait_and_resend(Ref, [{_,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
receive
{finished,_Ref,CurrPid,CaseNum,Mod,Func,Result,_RetVal} = Msg ->
- %% resend message to main process so that it can be used
+ %% resend message to main process so that it can be used
%% to handle buffered io messages later
self() ! Msg,
MF = {Mod,Func},
@@ -3163,7 +3207,7 @@ wait_and_resend(Ref, [{_,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
failed -> {Ok,Skip,[MF|Fail]}
end,
wait_and_resend(Ref, Ps, Ok1,Skip1,Fail1);
- {'EXIT',CurrPid,Reason} when Reason /= normal ->
+ {'EXIT',CurrPid,Reason} when Reason /= normal ->
%% unexpected termination of test case process
{value,{_,_,CaseNum,Mod,Func}} = lists:keysearch(CurrPid, 2, Cases),
print(1, "Error! Process for test case #~p (~p:~p) died! Reason: ~p",
@@ -3186,17 +3230,17 @@ rm_cases_upto(Ref, [_|Ps]) ->
%% execution. The common log files (major, html etc) must however be
%% written to sequentially. The test case processes send print requests
%% to the main (starting) process (the same process executing
-%% run_test_cases_loop/4), which handles these requests in the same
+%% run_test_cases_loop/4), which handles these requests in the same
%% order that the test case processes were started.
%%
%% An io session is always started with a {started,Ref,Pid,Num,Mod,Func}
%% message and terminated with {finished,Ref,Pid,Num,Mod,Func,Result,RetVal}.
%% The result shipped with the finished message from a parallel process
-%% is used to update status data of the current test run. An 'EXIT'
-%% message from each parallel test case process (after finishing and
+%% is used to update status data of the current test run. An 'EXIT'
+%% message from each parallel test case process (after finishing and
%% terminating) is also received and handled here.
%%
-%% During execution of a parallel group, any cases (conf or normal)
+%% During execution of a parallel group, any cases (conf or normal)
%% belonging to a nested group will also get its io printouts buffered.
%% This is necessary to get the major and html log files written in
%% correct sequence. This function handles also the print messages
@@ -3207,7 +3251,7 @@ rm_cases_upto(Ref, [_|Ps]) ->
%% See the header comment for run_test_cases_loop/4 for more
%% info about IO handling.
%%
-%% Note: It is important that the type of messages handled here
+%% Note: It is important that the type of messages handled here
%% do not get consumated by test_server:run_test_case_msgloop/5
%% during the test case execution (e.g. in the catch clause of
%% the receive)!
@@ -3231,7 +3275,7 @@ handle_test_case_io_and_status() ->
ok
end, Cases),
Result
- end.
+ end.
%% Handle cases (without Ref) that belong to the top parallel group (i.e. when Refs = [])
handle_io_and_exit_loop([], [{undefined,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
@@ -3249,7 +3293,7 @@ handle_io_and_exit_loop([], [{undefined,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, O
1000 ->
exit({testcase_failed_to_start,Mod,Func})
end;
-
+
%% Handle cases that belong to groups nested under top parallel group
handle_io_and_exit_loop(Refs, [{Ref,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
receive
@@ -3269,7 +3313,7 @@ handle_io_and_exit_loop(Refs, [{Ref,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Sk
1000 ->
exit({testcase_failed_to_start,Mod,Func})
end;
-
+
handle_io_and_exit_loop(_, [], Ok,Skip,Fail) ->
{lists:reverse(Ok),lists:reverse(Skip),lists:reverse(Fail)}.
@@ -3286,7 +3330,7 @@ handle_io_and_exits(Main, CurrPid, CaseNum, Mod, Func, Cases) ->
failed ->
put(test_server_failed, get(test_server_failed)+1);
skipped ->
- SkipCounters =
+ SkipCounters =
update_skip_counters(RetVal, get(test_server_skipped)),
put(test_server_skipped, SkipCounters)
end,
@@ -3298,7 +3342,7 @@ handle_io_and_exits(Main, CurrPid, CaseNum, Mod, Func, Cases) ->
handle_io_and_exits(Main, CurrPid, CaseNum, Mod, Func, Cases);
%% unexpected termination of test case process
- {'EXIT',TCPid,Reason} when Reason /= normal ->
+ {'EXIT',TCPid,Reason} when Reason /= normal ->
{value,{_,_,Num,M,F}} = lists:keysearch(TCPid, 2, Cases),
print(1, "Error! Process for test case #~p (~p:~p) died! Reason: ~p",
[Num, M, F, Reason]),
@@ -3307,65 +3351,65 @@ handle_io_and_exits(Main, CurrPid, CaseNum, Mod, Func, Cases) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% run_test_case(Ref, Num, Mod, Func, Args, RunInit,
-%% Where, MultiplyTimetrap, Mode) -> RetVal
+%% run_test_case(Ref, Num, Mod, Func, Args, RunInit,
+%% Where, TimetrapData, Mode) -> RetVal
%%
%% Creates the minor log file and inserts some test case specific headers
-%% and footers into the log files. If a remote target is used, the test
+%% and footers into the log files. If a remote target is used, the test
%% suite (binary) and the content of data_dir is sent. Then the test case
-%% is executed and the result is printed to the log files (also info
+%% is executed and the result is printed to the log files (also info
%% about lingering processes & slave nodes in the system is presented).
-%%
+%%
%% RunInit decides if the per test case init is to be run (true for all
%% but conf cases).
%%
-%% Where specifies if the test case should run on target or on the host.
+%% Where specifies if the test case should run on target or on the host.
%% (Note that 'make' test cases always run on host).
-%%
+%%
%% Mode specifies if the test case should be executed by a dedicated,
%% parallel, process rather than sequentially by the main process. If
%% the former, the new process is spawned and the dictionary of the main
%% process is copied to the test case process.
-%%
-%% RetVal is the result of executing the test case. It contains info
+%%
+%% RetVal is the result of executing the test case. It contains info
%% about the execution time and the return value of the test case function.
-run_test_case(Ref, Num, Mod, Func, Args, RunInit, Where, MultiplyTimetrap) ->
+run_test_case(Ref, Num, Mod, Func, Args, RunInit, Where, TimetrapData) ->
file:set_cwd(filename:dirname(get(test_server_dir))),
- run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
- MultiplyTimetrap, [], [], self()).
+ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ TimetrapData, [], [], self()).
-run_test_case(Ref, Num, Mod, Func, Args, skip_init, Where, MultiplyTimetrap, Mode) ->
+run_test_case(Ref, Num, Mod, Func, Args, skip_init, Where, TimetrapData, Mode) ->
%% a conf case is always executed by the main process
- run_test_case1(Ref, Num, Mod, Func, Args, skip_init, Where,
- MultiplyTimetrap, [], Mode, self());
+ run_test_case1(Ref, Num, Mod, Func, Args, skip_init, Where,
+ TimetrapData, [], Mode, self());
-run_test_case(Ref, Num, Mod, Func, Args, RunInit, Where, MultiplyTimetrap, Mode) ->
+run_test_case(Ref, Num, Mod, Func, Args, RunInit, Where, TimetrapData, Mode) ->
file:set_cwd(filename:dirname(get(test_server_dir))),
case check_prop(parallel, Mode) of
false ->
%% this is a sequential test case
- run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
- MultiplyTimetrap, [], Mode, self());
+ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ TimetrapData, [], Mode, self());
_Ref ->
%% this a parallel test case, spawn the new process
Main = self(),
- {dictionary,State} = process_info(self(), dictionary),
+ {dictionary,State} = process_info(self(), dictionary),
spawn_link(fun() ->
- run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
- MultiplyTimetrap, State, Mode, Main)
- end)
+ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ TimetrapData, State, Mode, Main)
+ end)
end.
-run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
- MultiplyTimetrap, State, Mode, Main) ->
- %% if this runs on a parallel test case process,
+run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ TimetrapData, State, Mode, Main) ->
+ %% if this runs on a parallel test case process,
%% copy the dictionary from the main process
do_if_parallel(Main, fun() -> process_flag(trap_exit, true) end, ok),
CopyDict = fun() -> lists:foreach(fun({Key,Val}) -> put(Key, Val) end, State) end,
do_if_parallel(Main, CopyDict, ok),
do_if_parallel(Main, fun() -> put(test_server_common_io_handler, {tc,Main}) end, ok),
- %% if io is being buffered, send start io session message
+ %% if io is being buffered, send start io session message
%% (no matter if case runs on parallel or main process)
case get(test_server_common_io_handler) of
undefined -> ok;
@@ -3373,7 +3417,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
end,
TSDir = get(test_server_dir),
case Where of
- target ->
+ target ->
maybe_send_beam_and_datadir(Mod);
host ->
ok
@@ -3396,8 +3440,8 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
do_if_parallel(Main, ok, fun erlang:yield/0),
%% run the test case
{Result,DetectedFail,ProcsBefore,ProcsAfter} =
- run_test_case_apply(Num, Mod, Func, Args, get_name(Mode),
- RunInit, Where, MultiplyTimetrap),
+ run_test_case_apply(Num, Mod, Func, Args, get_name(Mode),
+ RunInit, Where, TimetrapData),
{Time,RetVal,Loc,Opts,Comment} =
case Result of
Normal={_Time,_RetVal,_Loc,_Opts,_Comment} -> Normal;
@@ -3409,7 +3453,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
print(major, "=ended ~s", [lists:flatten(timestamp_get(""))]),
do_if_parallel(Main, ok, fun() -> file:set_cwd(filename:dirname(TSDir)) end),
-
+
%% call the appropriate progress function clause to print the results to log
Status =
case {Time,RetVal} of
@@ -3423,16 +3467,16 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
progress(skip, Num, Mod, Func, Loc, Reason,
Time, Comment, Style);
{_,{'EXIT',_Pid,{Skip,Reason}}} when Skip==skip; Skip==skipped ->
- progress(skip, Num, Mod, Func, Loc, Reason,
+ progress(skip, Num, Mod, Func, Loc, Reason,
Time, Comment, Style);
{_,{'EXIT',_Pid,Reason}} ->
- progress(failed, Num, Mod, Func, Loc, Reason,
+ progress(failed, Num, Mod, Func, Loc, Reason,
Time, Comment, Style);
{_,{'EXIT',Reason}} ->
- progress(failed, Num, Mod, Func, Loc, Reason,
+ progress(failed, Num, Mod, Func, Loc, Reason,
Time, Comment, Style);
{_, {failed, Reason}} ->
- progress(failed, Num, Mod, Func, Loc, Reason,
+ progress(failed, Num, Mod, Func, Loc, Reason,
Time, Comment, Style);
{_, {Skip, Reason}} when Skip==skip; Skip==skipped ->
progress(skip, Num, Mod, Func, Loc, Reason,
@@ -3442,7 +3486,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
[] ->
progress(ok, Num, Mod, Func, Loc, RetVal,
Time, Comment, Style);
-
+
Reason ->
progress(failed, Num, Mod, Func, Loc, Reason,
Time, Comment, Style)
@@ -3465,18 +3509,18 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
{US,AS} = get(test_server_skipped),
put(test_server_skipped, {US,AS+1})
end,
- %% only if test case execution is sequential do we care about the
+ %% only if test case execution is sequential do we care about the
%% remaining processes and slave nodes count
case self() of
Main ->
case test_server_sup:framework_call(warn, [processes], true) of
true ->
if ProcsBefore < ProcsAfter ->
- print(minor,
+ print(minor,
"WARNING: ~w more processes in system after test case",
[ProcsAfter-ProcsBefore]);
ProcsBefore > ProcsAfter ->
- print(minor,
+ print(minor,
"WARNING: ~w less processes in system after test case",
[ProcsBefore-ProcsAfter]);
true -> ok
@@ -3493,7 +3537,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
" system. I tried to kill them, but I failed: ~p\n",
[Exit]);
[] -> ok;
- List ->
+ List ->
print(minor, "WARNING: ~w slave nodes in system after test"++
"case. Tried to killed them.~n"++
" Names:~p",
@@ -3505,8 +3549,8 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
_ ->
ok
end,
- %% if the test case was executed sequentially, this updates the execution
- %% time count on the main process (adding execution time of parallel test
+ %% if the test case was executed sequentially, this updates the execution
+ %% time count on the main process (adding execution time of parallel test
%% case groups is done in run_test_cases_loop/4)
if is_number(Time) ->
put(test_server_total_time, get(test_server_total_time)+Time);
@@ -3515,7 +3559,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
end,
check_new_crash_dumps(Where),
- %% if io is being buffered, send finished message
+ %% if io is being buffered, send finished message
%% (no matter if case runs on parallel or main process)
case get(test_server_common_io_handler) of
undefined -> ok;
@@ -3528,7 +3572,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
%%--------------------------------------------------------------------
%% various help functions
-%% Call If() if we're on parallel process, or
+%% Call If() if we're on parallel process, or
%% call Else() if we're on main process
do_if_parallel(Pid, If, Else) ->
case self() of
@@ -3536,7 +3580,7 @@ do_if_parallel(Pid, If, Else) ->
if is_function(Else) -> Else();
true -> Else
end;
- _ ->
+ _ ->
if is_function(If) -> If();
true -> If
end
@@ -3549,13 +3593,13 @@ num2str(N) -> integer_to_list(N).
%% and the content of datadir til target.
maybe_send_beam_and_datadir(Mod) ->
case get(test_server_ctrl_job_sock) of
- undefined ->
+ undefined ->
%% local target
ok;
JobSock ->
%% remote target
case get(test_server_downloaded_suites) of
- undefined ->
+ undefined ->
send_beam_and_datadir(Mod, JobSock),
put(test_server_downloaded_suites, [Mod]);
Suites ->
@@ -3571,10 +3615,10 @@ maybe_send_beam_and_datadir(Mod) ->
send_beam_and_datadir(Mod, JobSock) ->
case code:which(Mod) of
- non_existing ->
+ non_existing ->
io:format("** WARNING: Suite ~w could not be found on host\n",
[Mod]);
- BeamFile ->
+ BeamFile ->
send_beam(JobSock, Mod, BeamFile)
end,
DataDir = get_data_dir(Mod),
@@ -3589,7 +3633,7 @@ send_beam_and_datadir(Mod, JobSock) ->
ModsInDatadir = filelib:wildcard(Wc),
SendBeamFun = fun(X) -> send_beam(JobSock, X) end,
lists:foreach(SendBeamFun, ModsInDatadir),
- %% No need to send C code or makefiles since
+ %% No need to send C code or makefiles since
%% no compilation can be done on target anyway.
%% Compiled C code must exist on target.
%% Beam files are already sent as binaries.
@@ -3597,7 +3641,7 @@ send_beam_and_datadir(Mod, JobSock) ->
%% is to compile it.
Filter = fun("Makefile") -> false;
("Makefile.src") -> false;
- (Y) ->
+ (Y) ->
case filename:extension(Y) of
".c" -> false;
ObjExt -> false;
@@ -3611,7 +3655,7 @@ send_beam_and_datadir(Mod, JobSock) ->
Tarfile = "data_dir.tar.gz",
{ok,Tar} = erl_tar:open(Tarfile, [write,compressed]),
ShortDataDir = filename:basename(DataDir),
- AddTarFun =
+ AddTarFun =
fun(File) ->
Long = filename:join(DataDir, File),
Short = filename:join(ShortDataDir, File),
@@ -3628,11 +3672,11 @@ send_beam_and_datadir(Mod, JobSock) ->
send_beam(JobSock, BeamFile) ->
Mod=filename:rootname(filename:basename(BeamFile), code:objfile_extension()),
- send_beam(JobSock, list_to_atom(Mod), BeamFile).
+ send_beam(JobSock, list_to_atom(Mod), BeamFile).
send_beam(JobSock, Mod, BeamFile) ->
{ok,BeamBin} = file:read_file(BeamFile),
request(JobSock, {{beam,Mod,BeamFile}, BeamBin}).
-
+
check_new_crash_dumps(Where) ->
case Where of
target ->
@@ -3649,25 +3693,25 @@ check_new_crash_dumps(Where) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% progress(Result, CaseNum, Mod, Func, Location, Reason, Time,
+%% progress(Result, CaseNum, Mod, Func, Location, Reason, Time,
%% Comment, TimeFormat) -> Result
%%
%% Prints the result of the test case to log file.
%% Note: Strings that are to be written to the minor log must
%% be prefixed with "=== " here, or the indentation will be wrong.
-progress(skip, CaseNum, Mod, Func, Loc, Reason, Time,
+progress(skip, CaseNum, Mod, Func, Loc, Reason, Time,
Comment, {St0,St1}) ->
- {Reason1,{Color,Ret}} = if_auto_skip(Reason,
+ {Reason1,{Color,Ret}} = if_auto_skip(Reason,
fun() -> {"#ffcc99",auto_skip} end,
fun() -> {"#ff9933",skip} end),
print(major, "=result skipped", []),
- print(1, "*** SKIPPED *** ~s",
+ print(1, "*** SKIPPED *** ~s",
[get_info_str(Func, CaseNum, get(test_server_cases))]),
test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,
{skipped,Reason1}}]),
ReasonStr = reason_to_string(Reason1),
- ReasonStr1 = lists:flatten([string:strip(S,left) ||
+ ReasonStr1 = lists:flatten([string:strip(S,left) ||
S <- string:tokens(ReasonStr,[$\n])]),
ReasonStr2 =
if length(ReasonStr1) > 80 ->
@@ -3686,10 +3730,10 @@ progress(skip, CaseNum, Mod, Func, Loc, Reason, Time,
[Time,Color,ReasonStr2,Comment1]),
FormatLoc = test_server_sup:format_loc(Loc),
print(minor, "=== location ~s", [FormatLoc]),
- print(minor, "=== reason = ~s", [ReasonStr1]),
+ print(minor, "=== reason = ~s", [ReasonStr1]),
Ret;
-
-progress(failed, CaseNum, Mod, Func, Loc, timetrap_timeout, T,
+
+progress(failed, CaseNum, Mod, Func, Loc, timetrap_timeout, T,
Comment0, {St0,St1}) ->
print(major, "=result failed: timeout, ~p", [Loc]),
print(1, "*** FAILED *** ~s",
@@ -3699,23 +3743,23 @@ progress(failed, CaseNum, Mod, Func, Loc, timetrap_timeout, T,
{failed,timetrap_timeout}}]),
FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
ErrorReason = io_lib:format("{timetrap_timeout,~s}", [FormatLastLoc]),
- Comment =
+ Comment =
case Comment0 of
"" -> "<font color=\"red\">" ++ ErrorReason ++ "</font>";
- _ -> "<font color=\"red\">" ++ ErrorReason ++ "</font><br>" ++
+ _ -> "<font color=\"red\">" ++ ErrorReason ++ "</font><br>" ++
to_string(Comment0)
end,
- print(html,
+ print(html,
"<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>"
"<td><font color=\"red\">FAILED</font></td>"
- "<td>~s</td></tr>\n",
+ "<td>~s</td></tr>\n",
[T/1000,Comment]),
FormatLoc = test_server_sup:format_loc(Loc),
print(minor, "=== location ~s", [FormatLoc]),
print(minor, "=== reason = timetrap timeout", []),
failed;
-progress(failed, CaseNum, Mod, Func, Loc, {testcase_aborted,Reason}, _T,
+progress(failed, CaseNum, Mod, Func, Loc, {testcase_aborted,Reason}, _T,
Comment0, {St0,St1}) ->
print(major, "=result failed: testcase_aborted, ~p", [Loc]),
print(1, "*** FAILED *** ~s",
@@ -3725,23 +3769,23 @@ progress(failed, CaseNum, Mod, Func, Loc, {testcase_aborted,Reason}, _T,
{failed,testcase_aborted}}]),
FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
ErrorReason = io_lib:format("{testcase_aborted,~s}", [FormatLastLoc]),
- Comment =
+ Comment =
case Comment0 of
"" -> "<font color=\"red\">" ++ ErrorReason ++ "</font>";
- _ -> "<font color=\"red\">" ++ ErrorReason ++ "</font><br>" ++
+ _ -> "<font color=\"red\">" ++ ErrorReason ++ "</font><br>" ++
to_string(Comment0)
end,
- print(html,
+ print(html,
"<td>" ++ St0 ++ "died" ++ St1 ++ "</td>"
"<td><font color=\"red\">FAILED</font></td>"
- "<td>~s</td></tr>\n",
+ "<td>~s</td></tr>\n",
[Comment]),
FormatLoc = test_server_sup:format_loc(Loc),
print(minor, "=== location ~s", [FormatLoc]),
print(minor, "=== reason = {testcase_aborted,~p}", [Reason]),
failed;
-progress(failed, CaseNum, Mod, Func, unknown, Reason, Time,
+progress(failed, CaseNum, Mod, Func, unknown, Reason, Time,
Comment0, {St0,St1}) ->
print(major, "=result failed: ~p, ~p", [Reason,unknown]),
print(1, "*** FAILED *** ~s",
@@ -3749,10 +3793,10 @@ progress(failed, CaseNum, Mod, Func, unknown, Reason, Time,
test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,
{failed,Reason}}]),
TimeStr = io_lib:format(if is_float(Time) -> "~.3fs";
- true -> "~w"
+ true -> "~w"
end, [Time]),
ErrorReason = lists:flatten(io_lib:format("~p", [Reason])),
- ErrorReason1 = lists:flatten([string:strip(S,left) ||
+ ErrorReason1 = lists:flatten([string:strip(S,left) ||
S <- string:tokens(ErrorReason,[$\n])]),
ErrorReason2 =
if length(ErrorReason1) > 63 ->
@@ -3760,13 +3804,13 @@ progress(failed, CaseNum, Mod, Func, unknown, Reason, Time,
true ->
ErrorReason1
end,
- Comment =
+ Comment =
case Comment0 of
"" -> "<font color=\"red\">" ++ ErrorReason2 ++ "</font>";
- _ -> "<font color=\"red\">" ++ ErrorReason2 ++ "</font><br>" ++
+ _ -> "<font color=\"red\">" ++ ErrorReason2 ++ "</font><br>" ++
to_string(Comment0)
end,
- print(html,
+ print(html,
"<td>" ++ St0 ++ "~s" ++ St1 ++ "</td>"
"<td><font color=\"red\">FAILED</font></td>"
"<td>~s</td></tr>\n",
@@ -3776,7 +3820,7 @@ progress(failed, CaseNum, Mod, Func, unknown, Reason, Time,
print(minor, "=== reason = "++FStr, [FormattedReason]),
failed;
-progress(failed, CaseNum, Mod, Func, Loc, Reason, Time,
+progress(failed, CaseNum, Mod, Func, Loc, Reason, Time,
Comment0, {St0,St1}) ->
print(major, "=result failed: ~p, ~p", [Reason,Loc]),
print(1, "*** FAILED *** ~s",
@@ -3784,18 +3828,18 @@ progress(failed, CaseNum, Mod, Func, Loc, Reason, Time,
test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,
{failed,Reason}}]),
TimeStr = io_lib:format(if is_float(Time) -> "~.3fs";
- true -> "~w"
+ true -> "~w"
end, [Time]),
- Comment =
+ Comment =
case Comment0 of
"" -> "";
_ -> "<br>" ++ to_string(Comment0)
end,
FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
- print(html,
+ print(html,
"<td>" ++ St0 ++ "~s" ++ St1 ++ "</td>"
"<td><font color=\"red\">FAILED</font></td>"
- "<td><font color=\"red\">~s</font>~s</td></tr>\n",
+ "<td><font color=\"red\">~s</font>~s</td></tr>\n",
[TimeStr,FormatLastLoc,Comment]),
FormatLoc = test_server_sup:format_loc(Loc),
print(minor, "=== location ~s", [FormatLoc]),
@@ -3803,7 +3847,7 @@ progress(failed, CaseNum, Mod, Func, Loc, Reason, Time,
print(minor, "=== reason = "++FStr, [FormattedReason]),
failed;
-progress(ok, _CaseNum, Mod, Func, _Loc, RetVal, Time,
+progress(ok, _CaseNum, Mod, Func, _Loc, RetVal, Time,
Comment0, {St0,St1}) ->
print(minor, "successfully completed test case", []),
test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,ok}]),
@@ -3852,9 +3896,9 @@ get_info_str(Func, 0, _Cases) ->
get_info_str(_Func, CaseNum, unknown) ->
"test case " ++ integer_to_list(CaseNum);
get_info_str(_Func, CaseNum, Cases) ->
- "test case " ++ integer_to_list(CaseNum) ++
+ "test case " ++ integer_to_list(CaseNum) ++
" of " ++ integer_to_list(Cases).
-
+
print_if_known(Known, {SK,AK}, {SU,AU}) ->
{S,A} = if Known == unknown -> {SU,AU};
true -> {SK,AK}
@@ -3880,7 +3924,7 @@ reason_to_string({failed,{_,FailFunc,bad_return}}) ->
atom_to_list(FailFunc) ++ " bad return value";
reason_to_string({failed,{_,FailFunc,{timetrap_timeout,_}}}) ->
atom_to_list(FailFunc) ++ " timed out";
-reason_to_string(FWInitFail = {failed,{_CB,init_tc,_Reason}}) ->
+reason_to_string(FWInitFail = {failed,{_CB,init_tc,_Reason}}) ->
to_string(FWInitFail);
reason_to_string({failed,{_,FailFunc,_}}) ->
atom_to_list(FailFunc) ++ " failed";
@@ -3889,29 +3933,29 @@ reason_to_string(Other) ->
%get_font_style(Prop) ->
% {Col,St0,St1} = get_font_style1(Prop),
-% {{"<font color="++Col++">","</font>"},
+% {{"<font color="++Col++">","</font>"},
% {"<font color="++Col++">"++St0,St1++"</font>"}}.
-
+
get_font_style(NormalCase, Mode) ->
- Prop = if not NormalCase ->
+ Prop = if not NormalCase ->
default;
true ->
case check_prop(parallel, Mode) of
- false ->
+ false ->
case check_prop(sequence, Mode) of
- false ->
+ false ->
default;
- _ ->
+ _ ->
sequence
end;
- _ ->
+ _ ->
parallel
end
end,
{Col,St0,St1} = get_font_style1(Prop),
- {{"<font color="++Col++">","</font>"},
+ {{"<font color="++Col++">","</font>"},
{"<font color="++Col++">"++St0,St1++"</font>"}}.
-
+
get_font_style1(parallel) ->
{"\"darkslategray\"","<i>","</i>"};
get_font_style1(sequence) ->
@@ -3931,7 +3975,7 @@ get_font_style1(default) ->
%% The framework application can switch this feature off by setting
%% *its* application environment variable 'format_exception' to false.
%% It is also possible to switch formatting off by starting the
-%% test_server node with init argument 'test_server_format_exception'
+%% test_server node with init argument 'test_server_format_exception'
%% set to false.
format_exception(Reason={_Error,Stack}) when is_list(Stack) ->
@@ -3950,17 +3994,17 @@ format_exception(Reason={_Error,Stack}) when is_list(Stack) ->
_ ->
do_format_exception(Reason)
end
- end;
+ end;
format_exception(Error) ->
format_exception({Error,[]}).
do_format_exception(Reason={Error,Stack}) ->
StackFun = fun(_, _, _) -> false end,
- PF = fun(Term, I) ->
- io_lib:format("~." ++ integer_to_list(I) ++ "p", [Term])
+ PF = fun(Term, I) ->
+ io_lib:format("~." ++ integer_to_list(I) ++ "p", [Term])
end,
case catch lib:format_exception(1, error, Error, Stack, StackFun, PF) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
{"~p",Reason};
Formatted ->
Formatted1 = re:replace(Formatted, "exception error: ", "", [{return,list}]),
@@ -3969,8 +4013,8 @@ do_format_exception(Reason={Error,Stack}) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit,
-%% Where, MultiplyTimetrap) ->
+%% run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit,
+%% Where, TimetrapData) ->
%% {{Time,RetVal,Loc,Opts,Comment},DetectedFail,ProcessesBefore,ProcessesAfter} |
%% {{died,Reason,unknown,Comment},DetectedFail,ProcessesBefore,ProcessesAfter}
%% Name = atom()
@@ -3984,24 +4028,24 @@ do_format_exception(Reason={Error,Stack}) ->
%% ProcessesBefore = ProcessesAfter = integer()
%%
%% Where indicates if the test should run on target or always on the host.
-%%
-%% If test is to be run on target, and target is remote the request is
+%%
+%% If test is to be run on target, and target is remote the request is
%% sent over socket to target, and test_server runs the case and sends the
%% result back over the socket. Else test_server runs the case directly on host.
-run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, host, MultiplyTimetrap) ->
+run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, host, TimetrapData) ->
test_server:run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,
- MultiplyTimetrap});
-run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, target, MultiplyTimetrap) ->
+ TimetrapData});
+run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, target, TimetrapData) ->
case get(test_server_ctrl_job_sock) of
undefined ->
%% local target
test_server:run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,
- MultiplyTimetrap});
+ TimetrapData});
JobSock ->
%% remote target
request(JobSock, {test_case,{CaseNum,Mod,Func,Args,Name,RunInit,
- MultiplyTimetrap}}),
+ TimetrapData}}),
read_job_sock_loop(JobSock)
end.
@@ -4012,15 +4056,15 @@ run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, target, MultiplyTim
%% Args = [term()]
%%
%% Just like io:format, except that depending on the Detail value, the output
-%% is directed to console, major and/or minor log files.
+%% is directed to console, major and/or minor log files.
%%
%% To handle printouts to common (not minor) log files from parallel test
%% case processes, the test_server_common_io_handler value is checked. If
%% set, the data is sent to the main controlling process. Note that test
%% cases that belong to a conf group nested under a parallel group will also
%% get its io data sent to main rather than immediately printed out, even
-%% if the test cases are executed by the same, main, process (ie the main
-%% process sends messages to itself then).
+%% if the test cases are executed by the same, main, process (ie the main
+%% process sends messages to itself then).
%%
%% Buffered io is handled by the handle_test_case_io_and_status/0 function.
@@ -4040,21 +4084,21 @@ print_or_buffer(Detail, Msg, Printer) ->
output({Detail,Msg}, Printer);
MinLevel when is_number(Detail), Detail >= MinLevel ->
output({Detail,Msg}, Printer);
- _ -> % Detail < Minor | major | html
+ _ -> % Detail < Minor | major | html
case get(test_server_common_io_handler) of
- undefined ->
+ undefined ->
output({Detail,Msg}, Printer);
{_,MainPid} ->
MainPid ! {print,self(),Detail,Msg}
end
- end.
+ end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% print_timestamp(Detail, Leader) -> ok
%%
%% Prints Leader followed by a time stamp (date and time). Depending on
%% the Detail value, the output is directed to console, major and/or minor
-%% log files.
+%% log files.
print_timestamp(Detail, Leader) ->
print(Detail, timestamp_get(Leader), []).
@@ -4288,7 +4332,7 @@ update_config(Config, []) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% collect_cases(CurMod, TopCase, SkipList) ->
+%% collect_cases(CurMod, TopCase, SkipList) ->
%% BasicCaseList | {error,Reason}
%%
%% CurMod = atom()
@@ -4319,18 +4363,18 @@ update_config(Config, []) ->
%% are listed, and each Module:all(suite) is called
%% {dir,Dir,Pattern} All modules <Pattern>_SUITE in the named dir
%% are listed, and each Module:all(suite) is called
-%% {conf,InitMF,Cases,FinMF}
-%% {conf,Props,InitMF,Cases,FinMF}
+%% {conf,InitMF,Cases,FinMF}
+%% {conf,Props,InitMF,Cases,FinMF}
%% InitMF is placed in the BasicCaseList, then
%% Cases is treated according to this table, then
%% FinMF is placed in the BasicCaseList. InitMF
%% and FinMF are configuration manipulation
%% functions. See below.
-%% {make,InitMFA,Cases,FinMFA}
+%% {make,InitMFA,Cases,FinMFA}
%% InitMFA is placed in the BasicCaseList, then
%% Cases is treated according to this table, then
%% FinMFA is placed in the BasicCaseList. InitMFA
-%% and FinMFA are make/unmake functions. If InitMFA
+%% and FinMFA are make/unmake functions. If InitMFA
%% fails, Cases are not run. InitMFA and FinMFA are
%% always run on the host - not on target.
%%
@@ -4339,7 +4383,7 @@ update_config(Config, []) ->
%%
%% [] Leaf case
%% {req,ReqList} Kept for backwards compatibility - same as []
-%% {req,ReqList,Cases} Kept for backwards compatibility -
+%% {req,ReqList,Cases} Kept for backwards compatibility -
%% Cases parsed recursively with collect_cases/3
%% Cases (list) Recursively parsed with collect_cases/3
%%
@@ -4351,7 +4395,7 @@ update_config(Config, []) ->
%% Configuration manipulation functions are called with the current
%% configuration list as only argument, and are expected to return a new
%% configuration list. Such a pair of function may, for example, start a
-%% server and stop it after a serie of test cases.
+%% server and stop it after a serie of test cases.
%%
%% SkipCases is expected to be in the format:
%%
@@ -4364,7 +4408,7 @@ update_config(Config, []) ->
skip}). % skip list
collect_all_cases(Top, Skip) when is_list(Skip) ->
- Result =
+ Result =
case collect_cases(Top, #cc{mod=[],skip=Skip}) of
{ok,Cases,_St} -> Cases;
Other -> Other
@@ -4384,7 +4428,7 @@ collect_cases([Case|Cs0], St0) ->
{error,_Reason}=Error -> Error
end;
-
+
collect_cases({module,Case}, St) when is_atom(Case), is_atom(St#cc.mod) ->
collect_case({St#cc.mod,Case}, St);
collect_cases({module,Mod,Case}, St) ->
@@ -4409,19 +4453,19 @@ collect_cases({conf,Props,InitMF,CaseList,FinF}, St) when is_atom(FinF) ->
collect_cases({conf,Props,InitMF,CaseList,{St#cc.mod,FinF}}, St);
collect_cases({conf,Props,InitMF,CaseList,FinMF}, St0) ->
case collect_cases(CaseList, St0) of
- {ok,[],_St}=Empty ->
+ {ok,[],_St}=Empty ->
Empty;
{ok,FlatCases,St} ->
Ref = make_ref(),
case in_skip_list(InitMF, St#cc.skip) of
- {true,Comment} ->
- {ok,[{skip_case,{conf,Ref,InitMF,Comment}} |
+ {true,Comment} ->
+ {ok,[{skip_case,{conf,Ref,InitMF,Comment}} |
FlatCases ++ [{conf,Ref,[],FinMF}]],St};
false ->
- {ok,[{conf,Ref,Props,InitMF} |
+ {ok,[{conf,Ref,Props,InitMF} |
FlatCases ++ [{conf,Ref,keep_name(Props),FinMF}]],St}
end;
- {error,_Reason}=Error ->
+ {error,_Reason}=Error ->
Error
end;
@@ -4430,12 +4474,12 @@ collect_cases({make,InitMFA,CaseList,FinMFA}, St0) ->
{ok,[],_St}=Empty -> Empty;
{ok,FlatCases,St} ->
Ref = make_ref(),
- {ok,[{make,Ref,InitMFA}|FlatCases ++
+ {ok,[{make,Ref,InitMFA}|FlatCases ++
[{make,Ref,FinMFA}]],St};
{error,_Reason}=Error -> Error
end;
-collect_cases({Module, Cases}, St) when is_list(Cases) ->
+collect_cases({Module, Cases}, St) when is_list(Cases) ->
case (catch collect_case(Cases, St#cc{mod=Module}, [])) of
{ok, NewCases, NewSt} ->
{ok, NewCases, NewSt};
@@ -4475,9 +4519,9 @@ collect_case_invoke(Mod, Case, MFA, St) ->
case os:getenv("TEST_SERVER_FRAMEWORK") of
false ->
case catch apply(Mod, Case, [suite]) of
- {'EXIT',_} ->
+ {'EXIT',_} ->
{ok,[MFA],St};
- Suite ->
+ Suite ->
collect_subcases(Mod, Case, MFA, St, Suite)
end;
_ ->
@@ -4485,7 +4529,7 @@ collect_case_invoke(Mod, Case, MFA, St) ->
collect_subcases(Mod, Case, MFA, St, Suite)
end.
-collect_subcases(Mod, Case, MFA, St, Suite) ->
+collect_subcases(Mod, Case, MFA, St, Suite) ->
case Suite of
[] when Case == all -> {ok,[],St};
[] -> {ok,[MFA],St};
@@ -4536,7 +4580,7 @@ collect_case_deny(Mod, Case, MFA, ReqList, SubCases, St) ->
{granted,SubCases} ->
collect_case_subcases(Mod, Case, SubCases, St)
end.
-
+
check_deny([Req|Reqs], DenyList) ->
case check_deny_req(Req, DenyList) of
{denied,_Comment}=Denied -> Denied;
@@ -4560,7 +4604,7 @@ check_deny_req(Req, DenyList) ->
end.
in_skip_list({Mod,Func,_Args}, SkipList) ->
- in_skip_list({Mod,Func}, SkipList);
+ in_skip_list({Mod,Func}, SkipList);
in_skip_list({Mod,Func}, [{Mod,Funcs,Comment}|SkipList]) when is_list(Funcs) ->
case lists:member(Func, Funcs) of
true ->
@@ -4579,7 +4623,6 @@ in_skip_list(_, []) ->
keep_name(Props) ->
lists:filter(fun({name,_}) -> true; (_) -> false end, Props).
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Target node handling functions %%
@@ -4615,13 +4658,13 @@ start_node(Name, Type, Options) ->
end,
case Warning of
[] -> ok;
- _ ->
+ _ ->
format(1, Warning),
format(minor, Warning)
end,
{ok, Nodename};
{fail,{Ret, Host, Cmd}} ->
- format(minor,
+ format(minor,
"Failed to start node ~p on ~p with command: ~p~n"
"Reason: ~p",
[Name, Host, Cmd, Ret]),
@@ -4630,7 +4673,7 @@ start_node(Name, Type, Options) ->
format(minor, "Failed to start node ~p: ~p", [Name,Ret]),
Ret;
{Ret, Host, Cmd} ->
- format(minor,
+ format(minor,
"Failed to start node ~p on ~p with command: ~p~n"
"Reason: ~p",
[Name, Host, Cmd, Ret]),
@@ -4685,7 +4728,7 @@ read_job_sock_loop(Sock) ->
exit({controller,connection_lost,Reason});
{ok,<<1,Request/binary>>} ->
case decode(binary_to_term(Request)) of
- ok ->
+ ok ->
read_job_sock_loop(Sock);
{stop,Result} ->
Result
@@ -4695,14 +4738,14 @@ read_job_sock_loop(Sock) ->
decode({apply,{M,F,A}}) ->
apply(M,F,A),
ok;
-decode({sync_apply,{M,F,A}}) ->
+decode({sync_apply,{M,F,A}}) ->
R = apply(M,F,A),
request(get(test_server_ctrl_job_sock),{sync_result,R}),
ok;
decode({sync_result,Result}) ->
{stop,Result};
decode({test_case_result,Result}) ->
- {stop,Result};
+ {stop,Result};
decode({privdir,empty_priv_dir}) ->
{stop,ok};
decode({{privdir,PrivDirTar},TarBin}) ->
@@ -4742,7 +4785,7 @@ p({A,B,C}) ->
p(X) ->
pinfo(X).
-t() ->
+t() ->
t(wall_clock).
t(X) ->
element(1, statistics(X)).
@@ -4781,7 +4824,7 @@ display_info([Pid|T], R, M) ->
Other ->
Other
end,
- Reds = fetch(reductions, Info),
+ Reds = fetch(reductions, Info),
LM = length(fetch(messages, Info)),
pformat(io_lib:format("~w", [Pid]),
io_lib:format("~w", [Call]),
@@ -4822,12 +4865,12 @@ pinfo(P) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% A module is included in the cover analysis if
-%% - it belongs to the tested application and is not listed in the
+%% - it belongs to the tested application and is not listed in the
%% {exclude,List} part of the App.cover file
%% - it does not belong to the application, but is listed in the
%% {include,List} part of the App.cover file
-%% - it does not belong to the application, but is listed in the
-%% cross.cover file (in the test_server application) under 'all'
+%% - it does not belong to the application, but is listed in the
+%% cross.cover file (in the test_server application) under 'all'
%% or under the tested application.
%%
%% The modules listed in the cross.cover file are modules that are
@@ -4893,7 +4936,7 @@ read_cover_file(CoverFile) ->
io:fwrite("Faulty format of CoverFile ~p\n", [CoverFile]),
{[],[]}
end;
- {error,Reason} ->
+ {error,Reason} ->
io:fwrite("Can't read CoverFile ~p\nReason: ~p\n",
[CoverFile,Reason]),
{[],[]}
@@ -4958,7 +5001,7 @@ cover_analyse({App,CoverInfo}, Analyse, AnalyseMods, TestDir) ->
end,
io:fwrite(CoverLog, "<p>Excluded module(s): <code>~p</code>\n", [Excluded]),
-
+
Coverage = cover_analyse(Analyse, AnalyseMods),
case lists:filter(fun({_M,{_,_,_}}) -> false;
@@ -4968,7 +5011,7 @@ cover_analyse({App,CoverInfo}, Analyse, AnalyseMods, TestDir) ->
ok;
Bad ->
io:fwrite(CoverLog, "<p>Analysis failed for ~w module(s): "
- "<code>~w</code>\n",
+ "<code>~w</code>\n",
[length(Bad),[BadM || {BadM,{_,_Why}} <- Bad]])
end,
@@ -5002,10 +5045,10 @@ cross_cover_analyse(Analyse, CrossModules) ->
CoverdataFiles = get_coverdata_files(),
lists:foreach(fun(CDF) -> cover:import(CDF) end, CoverdataFiles),
io:fwrite("Cover analysing... ", []),
- DetailsFun =
+ DetailsFun =
case Analyse of
details ->
- fun(Dir,M) ->
+ fun(Dir,M) ->
OutFile = filename:join(Dir,
atom_to_list(M) ++
".CROSS_COVER.html"),
@@ -5018,7 +5061,7 @@ cross_cover_analyse(Analyse, CrossModules) ->
SortedModules =
case CrossModules of
undefined ->
- sort_modules([Mod || Mod <- get_all_cross_modules(),
+ sort_modules([Mod || Mod <- get_all_cross_modules(),
lists:member(Mod, cover:imported_modules())], []);
_ ->
sort_modules(CrossModules, [])
@@ -5031,7 +5074,7 @@ cross_cover_analyse(Analyse, CrossModules) ->
%% cross.cover, write a cross cover log (cross_cover.html).
write_cross_cover_logs([{App,Coverage}|T]) ->
case last_test_for_app(App) of
- false ->
+ false ->
ok;
Dir ->
CoverLogName = filename:join(Dir,?cross_coverlog_name),
@@ -5045,13 +5088,13 @@ write_cross_cover_logs([{App,Coverage}|T]) ->
end,
write_cross_cover_logs(T);
write_cross_cover_logs([]) ->
- io:fwrite("done\n", []).
+ io:fwrite("done\n", []).
%% Find all exported coverdata files. First find all the latest
%% run.<timestamp> directories, and the check if there is a file named
%% all.coverdata.
get_coverdata_files() ->
- PossibleFiles = [last_coverdata_file(Dir) ||
+ PossibleFiles = [last_coverdata_file(Dir) ||
Dir <- filelib:wildcard([$*|?logdir_ext]),
filelib:is_dir(Dir)],
[File || File <- PossibleFiles, filelib:is_file(File)].
@@ -5074,12 +5117,12 @@ last_test([_|Rest], Latest) ->
last_test(Rest, Latest);
last_test([], Latest) ->
Latest.
-
+
%% Sort modules according to the application they belong to.
%% Return [{App,LastTestDir,ModuleList}]
sort_modules([M|Modules], Acc) ->
App = get_app(M),
- Acc1 =
+ Acc1 =
case lists:keysearch(App, 1, Acc) of
{value,{App,LastTest,List}} ->
lists:keyreplace(App, 1, Acc, {App,LastTest,[M|List]});
@@ -5120,9 +5163,9 @@ get_all_cross_modules() ->
get_cross_modules(all).
get_cross_modules(App) ->
case file:consult(?cross_cover_file) of
- {ok,List} ->
+ {ok,List} ->
get_cross_modules(App, List, []);
- _X ->
+ _X ->
[]
end.
@@ -5134,11 +5177,11 @@ get_cross_modules(App, [_H|T], Acc) ->
get_cross_modules(App, T, Acc);
get_cross_modules(_App, [], Acc) ->
Acc.
-
+
%% Support functions for writing the cover logs (both cross and normal)
write_coverlog_header(CoverLog) ->
- case catch
+ case catch
io:fwrite(CoverLog,
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
"<!-- autogenerated by '~w'. -->\n"
@@ -5162,13 +5205,13 @@ format_analyse(M,Cov,NotCov,undefined) ->
io_lib:fwrite("<tr><td>~w</td>"
"<td align=right>~w %</td>"
"<td align=right>~w</td>"
- "<td align=right>~w</td></tr>\n",
+ "<td align=right>~w</td></tr>\n",
[M,pc(Cov,NotCov),Cov,NotCov]);
format_analyse(M,Cov,NotCov,{file,File}) ->
io_lib:fwrite("<tr><td><a href=\"~s\">~w</a></td>"
"<td align=right>~w %</td>"
"<td align=right>~w</td>"
- "<td align=right>~w</td></tr>\n",
+ "<td align=right>~w</td></tr>\n",
[filename:basename(File),M,pc(Cov,NotCov),Cov,NotCov]);
format_analyse(M,Cov,NotCov,{lines,Lines}) ->
CoverOutName = atom_to_list(M)++".COVER.html",
@@ -5177,15 +5220,15 @@ format_analyse(M,Cov,NotCov,{lines,Lines}) ->
io_lib:fwrite("<tr><td><a href=\"~s\">~w</a></td>"
"<td align=right>~w %</td>"
"<td align=right>~w</td>"
- "<td align=right>~w</td></tr>\n",
+ "<td align=right>~w</td></tr>\n",
[CoverOutName,M,pc(Cov,NotCov),Cov,NotCov]);
format_analyse(M,Cov,NotCov,{error,_}) ->
io_lib:fwrite("<tr><td>~w</td>"
"<td align=right>~w %</td>"
"<td align=right>~w</td>"
- "<td align=right>~w</td></tr>\n",
+ "<td align=right>~w</td></tr>\n",
[M,pc(Cov,NotCov),Cov,NotCov]).
-
+
pc(0,0) ->
0;
@@ -5200,9 +5243,9 @@ write_not_covered(CoverOut,M,Lines) ->
"<table border=3 cellpadding=5>\n"
"<th>Line Number</th>\n",
[M]),
- lists:foreach(fun({{_M,Line},{0,1}}) ->
+ lists:foreach(fun({{_M,Line},{0,1}}) ->
io:fwrite(CoverOut,"<tr><td>~w</td></tr>\n", [Line]);
- (_) ->
+ (_) ->
ok
end,
Lines),
@@ -5216,7 +5259,7 @@ write_default_coverlog(TestDir) ->
file:close(CoverLog).
write_default_cross_coverlog(TestDir) ->
- {ok,CrossCoverLog} =
+ {ok,CrossCoverLog} =
file:open(filename:join(TestDir,?cross_coverlog_name), [write]),
write_coverlog_header(CrossCoverLog),
io:fwrite(CrossCoverLog,
@@ -5232,7 +5275,7 @@ write_cover_result_table(CoverLog,Coverage) ->
"<th>Not covered (Lines)</th>\n",
[]),
{TotCov,TotNotCov} =
- lists:foldl(fun({M,{Cov,NotCov,Details}},{AccCov,AccNotCov}) ->
+ lists:foldl(fun({M,{Cov,NotCov,Details}},{AccCov,AccNotCov}) ->
Str = format_analyse(M,Cov,NotCov,Details),
io:fwrite(CoverLog,"~s", [Str]),
{AccCov+Cov,AccNotCov+NotCov};
diff --git a/lib/test_server/src/test_server_sup.erl b/lib/test_server/src/test_server_sup.erl
index 89edb0f881..2b4e1efb71 100644
--- a/lib/test_server/src/test_server_sup.erl
+++ b/lib/test_server/src/test_server_sup.erl
@@ -21,7 +21,7 @@
%%% Purpose: Test server support functions.
%%%-------------------------------------------------------------------
-module(test_server_sup).
--export([timetrap/2, timetrap_cancel/1, capture_get/1, messages_get/1,
+-export([timetrap/2, timetrap/3, timetrap_cancel/1, capture_get/1, messages_get/1,
timecall/3, call_crash/5, app_test/2, check_new_crash_dumps/0,
cleanup_crash_dumps/0, crash_dump_dir/0, tar_crash_dumps/0,
get_username/0, get_os_family/0,
@@ -34,16 +34,23 @@
-define(src_listing_ext, ".src.html").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% timetrap(Timeout,Pid) -> Handle
+%% timetrap(Timeout,Scale,Pid) -> Handle
%% Handle = term()
%%
%% Creates a time trap, that will kill the given process if the
%% trap is not cancelled with timetrap_cancel/1, within Timeout
%% milliseconds.
+%% Scale says if the time should be scaled up to compensate for
+%% delays during the test (e.g. if cover is running).
timetrap(Timeout0, Pid) ->
+ timetrap(Timeout0, true, Pid).
+
+timetrap(Timeout0, Scale, Pid) ->
process_flag(priority, max),
- Timeout = test_server:timetrap_scale_factor() * Timeout0,
+ Timeout = if not Scale -> Timeout0;
+ true -> test_server:timetrap_scale_factor() * Timeout0
+ end,
receive
after trunc(Timeout) ->
Line = test_server:get_loc(Pid),