aboutsummaryrefslogtreecommitdiffstats
path: root/lib/common_test
diff options
context:
space:
mode:
authorPeter Andersson <[email protected]>2010-06-29 17:13:05 +0200
committerPeter Andersson <[email protected]>2010-07-06 02:44:26 +0200
commit31b52954dc667861a2e7cd4edba100132499cd5f (patch)
treeaa03a9f884458626ff2ccbebeed402b7208c9a95 /lib/common_test
parent4b399cdf4472975b0f3d4d21054f30064b32ed32 (diff)
downloadotp-31b52954dc667861a2e7cd4edba100132499cd5f.tar.gz
otp-31b52954dc667861a2e7cd4edba100132499cd5f.tar.bz2
otp-31b52954dc667861a2e7cd4edba100132499cd5f.zip
Add new option to label test runs
With the option 'label', the test run gets a user defined name that Common Test prints in the overview log files.
Diffstat (limited to 'lib/common_test')
-rw-r--r--lib/common_test/src/ct_logs.erl66
-rw-r--r--lib/common_test/src/ct_run.erl63
-rw-r--r--lib/common_test/src/ct_testspec.erl41
-rw-r--r--lib/common_test/src/ct_util.hrl1
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE.erl312
5 files changed, 390 insertions, 93 deletions
diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl
index 5683d06aa7..3ed0ea5c40 100644
--- a/lib/common_test/src/ct_logs.erl
+++ b/lib/common_test/src/ct_logs.erl
@@ -716,7 +716,7 @@ make_last_run_index1(StartTime,IndexName) ->
[Log];
Logs ->
case read_totals_file(?totals_name) of
- {_Node,Logs0,_Totals} ->
+ {_Node,_Lbl,Logs0,_Totals} ->
insert_dirs(Logs,Logs0);
_ ->
%% someone deleted the totals file!?
@@ -728,10 +728,15 @@ make_last_run_index1(StartTime,IndexName) ->
{ok,Bin} -> binary_to_term(Bin);
_ -> []
end,
- {ok,Index0,Totals} = make_last_run_index(Logs1, index_header(StartTime),
+ Label = case application:get_env(common_test, test_label) of
+ {ok,Lbl} -> Lbl;
+ _ -> undefined
+ end,
+ {ok,Index0,Totals} = make_last_run_index(Logs1,
+ index_header(Label,StartTime),
0, 0, 0, 0, 0, Missing),
%% write current Totals to file, later to be used in all_runs log
- write_totals_file(?totals_name,Logs1,Totals),
+ write_totals_file(?totals_name,Label,Logs1,Totals),
Index = [Index0|index_footer()],
case force_write_file(IndexName, Index) of
ok ->
@@ -937,8 +942,16 @@ term_to_text(Term) ->
%%% Headers and footers.
-index_header(StartTime) ->
- [header("Test Results " ++ format_time(StartTime)) |
+index_header(Label, StartTime) ->
+ Head =
+ case Label of
+ undefined ->
+ header("Test Results", format_time(StartTime));
+ _ ->
+ header("Test Results for \"" ++ Label ++ "\"",
+ format_time(StartTime))
+ end,
+ [Head |
["<CENTER>\n",
"<P><A HREF=\"",?ct_log_name,"\">Common Test Framework Log</A></P>",
"<TABLE border=\"3\" cellpadding=\"5\" "
@@ -976,6 +989,7 @@ all_runs_header() ->
"BGCOLOR=\"",?table_color1,"\">\n"
"<th><B>History</B></th>\n"
"<th><B>Node</B></th>\n"
+ "<th><B>Label</B></th>\n"
"<th>Tests</th>\n"
"<th><B>Names</B></th>\n"
"<th>Total</th>\n"
@@ -987,12 +1001,23 @@ all_runs_header() ->
"\n"]].
header(Title) ->
+ header1(Title, "").
+header(Title, SubTitle) ->
+ header1(Title, SubTitle).
+
+header1(Title, SubTitle) ->
+ SubTitleHTML = if SubTitle =/= "" ->
+ ["<CENTER>\n",
+ "<H2>" ++ SubTitle ++ "</H2>\n",
+ "</CENTER>\n<BR>\n"];
+ true -> "<BR>\n"
+ end,
["<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
"<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
"<HTML>\n",
"<HEAD>\n",
- "<TITLE>" ++ Title ++ "</TITLE>\n",
+ "<TITLE>" ++ Title ++ " " ++ SubTitle ++ "</TITLE>\n",
"<META HTTP-EQUIV=\"CACHE-CONTROL\" CONTENT=\"NO-CACHE\">\n",
"</HEAD>\n",
@@ -1004,6 +1029,7 @@ header(Title) ->
"<CENTER>\n",
"<H1>" ++ Title ++ "</H1>\n",
"</CENTER>\n",
+ SubTitleHTML,
"<!-- ---- CONTENT ---- -->\n"].
@@ -1217,7 +1243,7 @@ runentry(Dir, BasicHtml) ->
TotalsFile = filename:join(Dir,?totals_name),
TotalsStr =
case read_totals_file(TotalsFile) of
- {Node,Logs,{TotSucc,TotFail,UserSkip,AutoSkip,NotBuilt}} ->
+ {Node,Label,Logs,{TotSucc,TotFail,UserSkip,AutoSkip,NotBuilt}} ->
TotFailStr =
if TotFail > 0 ->
["<FONT color=\"red\">",
@@ -1263,6 +1289,7 @@ runentry(Dir, BasicHtml) ->
end,
Total = TotSucc+TotFail+AllSkip,
A = ["<TD ALIGN=center><FONT SIZE=-1>",Node,"</FONT></TD>\n",
+ "<TD ALIGN=center><FONT SIZE=-1>",Label,"</FONT></TD>\n",
"<TD ALIGN=right>",NoOfTests,"</TD>\n"],
B = if BasicHtml ->
["<TD ALIGN=center><FONT SIZE=-1>",TestNamesTrunc,"</FONT></TD>\n"];
@@ -1286,14 +1313,16 @@ runentry(Dir, BasicHtml) ->
"<TD><A HREF=\"",Index,"\">",timestamp(Dir),"</A>",TotalsStr,"</TD>\n"
"</TR>\n"].
-write_totals_file(Name,Logs,Totals) ->
+write_totals_file(Name,Label,Logs,Totals) ->
AbsName = ?abs(Name),
notify_and_lock_file(AbsName),
force_write_file(AbsName,
term_to_binary({atom_to_list(node()),
- Logs,Totals})),
+ Label,Logs,Totals})),
notify_and_unlock_file(AbsName).
+%% this function needs to convert from old formats to new so that old
+%% test results (prev ct versions) can be listed together with new
read_totals_file(Name) ->
AbsName = ?abs(Name),
notify_and_lock_file(AbsName),
@@ -1303,12 +1332,23 @@ read_totals_file(Name) ->
case catch binary_to_term(Bin) of
{'EXIT',_Reason} -> % corrupt file
{"-",[],undefined};
- R = {Node,Ls,Tot} ->
+ {Node,Label,Ls,Tot} -> % all info available
+ Label1 = case Label of
+ undefined -> "-";
+ _ -> Label
+ end,
+ case Tot of
+ {_Ok,_Fail,_USkip,_ASkip,_NoBuild} -> % latest format
+ {Node,Label1,Ls,Tot};
+ {TotSucc,TotFail,AllSkip,NotBuilt} ->
+ {Node,Label1,Ls,{TotSucc,TotFail,AllSkip,undefined,NotBuilt}}
+ end;
+ {Node,Ls,Tot} -> % no label found
case Tot of
- {_,_,_,_,_} -> % latest format
- R;
+ {_Ok,_Fail,_USkip,_ASkip,_NoBuild} -> % latest format
+ {Node,"-",Ls,Tot};
{TotSucc,TotFail,AllSkip,NotBuilt} ->
- {Node,Ls,{TotSucc,TotFail,AllSkip,undefined,NotBuilt}}
+ {Node,"-",Ls,{TotSucc,TotFail,AllSkip,undefined,NotBuilt}}
end;
%% for backwards compatibility
{Ls,Tot} -> {"-",Ls,Tot};
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index adb49aa44d..0b4a508321 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -45,7 +45,8 @@
-define(abs(Name), filename:absname(Name)).
-define(testdir(Name, Suite), ct_util:get_testdir(Name, Suite)).
--record(opts, {vts,
+-record(opts, {label,
+ vts,
shell,
cover,
coverspec,
@@ -158,6 +159,7 @@ script_start(Args) ->
script_start1(Parent, Args) ->
%% read general start flags
+ Label = get_start_opt(label, fun([Lbl]) -> Lbl end, Args),
Vts = get_start_opt(vts, true, Args),
Shell = get_start_opt(shell, true, Args),
Cover = get_start_opt(cover, fun([CoverFile]) -> ?abs(CoverFile) end, Args),
@@ -230,7 +232,7 @@ script_start1(Parent, Args) ->
application:set_env(common_test, basic_html, true)
end,
- StartOpts = #opts{vts = Vts, shell = Shell, cover = Cover,
+ StartOpts = #opts{label = Label, vts = Vts, shell = Shell, cover = Cover,
logdir = LogDir, event_handlers = EvHandlers,
include = IncludeDirs,
silent_connections = SilentConns,
@@ -289,6 +291,9 @@ script_start2(StartOpts = #opts{vts = undefined,
TS ->
SpecStartOpts = get_data_for_node(TS, node()),
+ Label = choose_val(StartOpts#opts.label,
+ SpecStartOpts#opts.label),
+
LogDir = choose_val(StartOpts#opts.logdir,
SpecStartOpts#opts.logdir),
@@ -304,7 +309,8 @@ script_start2(StartOpts = #opts{vts = undefined,
SpecStartOpts#opts.include]),
application:set_env(common_test, include, AllInclude),
- {TS,StartOpts#opts{testspecs = Specs,
+ {TS,StartOpts#opts{label = Label,
+ testspecs = Specs,
cover = Cover,
logdir = LogDir,
config = SpecStartOpts#opts.config,
@@ -430,8 +436,12 @@ script_start4(#opts{vts = true, config = Config, event_handlers = EvHandlers,
end, [], Config),
vts:init_data(ConfigFiles, EvHandlers, ?abs(LogDir), Tests);
-script_start4(#opts{shell = true, config = Config, event_handlers = EvHandlers,
+script_start4(#opts{label = Label, shell = true, config = Config,
+ event_handlers = EvHandlers,
logdir = LogDir, testspecs = Specs}, _Args) ->
+ %% label - used by ct_logs
+ application:set_env(common_test, test_label, Label),
+
InstallOpts = [{config,Config},{event_handler,EvHandlers}],
if Config == [] ->
ok;
@@ -616,6 +626,10 @@ run_test(StartOpts) when is_list(StartOpts) ->
end.
run_test1(StartOpts) ->
+ %% label
+ Label = get_start_opt(label, fun(Lbl) when is_list(Lbl) -> Lbl;
+ (Lbl) when is_atom(Lbl) -> atom_to_list(Lbl)
+ end, StartOpts),
%% logdir
LogDir = get_start_opt(logdir, fun(LD) when is_list(LD) -> LD end,
StartOpts),
@@ -714,7 +728,8 @@ run_test1(StartOpts) ->
%% stepped execution
Step = get_start_opt(step, value, StartOpts),
- Opts = #opts{cover = Cover, step = Step, logdir = LogDir, config = CfgFiles,
+ Opts = #opts{label = Label,
+ cover = Cover, step = Step, logdir = LogDir, config = CfgFiles,
event_handlers = EvHandlers, include = Include,
silent_connections = SilentConns,
stylesheet = Stylesheet,
@@ -750,6 +765,8 @@ run_spec_file(Relaxed,
exit(CTReason);
TS ->
SpecOpts = get_data_for_node(TS, node()),
+ Label = choose_val(Opts#opts.label,
+ SpecOpts#opts.label),
LogDir = choose_val(Opts#opts.logdir,
SpecOpts#opts.logdir),
AllConfig = merge_vals([CfgFiles, SpecOpts#opts.config]),
@@ -769,7 +786,8 @@ run_spec_file(Relaxed,
which(logdir,LogDir),
AllEvHs) of
ok ->
- Opts1 = Opts#opts{cover = Cover,
+ Opts1 = Opts#opts{label = Label,
+ cover = Cover,
logdir = which(logdir, LogDir),
config = AllConfig,
event_handlers = AllEvHs,
@@ -954,14 +972,16 @@ run_testspec1(TestSpec) ->
end
end.
-get_data_for_node(#testspec{logdir=LogDirs,
- cover=CoverFs,
- config=Cfgs,
- userconfig=UsrCfgs,
- event_handler=EvHs,
- include=Incl,
- multiply_timetraps=MTs,
- scale_timetraps=STs}, Node) ->
+get_data_for_node(#testspec{label = Labels,
+ logdir = LogDirs,
+ cover = CoverFs,
+ config = Cfgs,
+ userconfig = UsrCfgs,
+ event_handler = EvHs,
+ include = Incl,
+ multiply_timetraps = MTs,
+ scale_timetraps = STs}, Node) ->
+ Label = proplists:get_value(Node, Labels),
LogDir = case proplists:get_value(Node, LogDirs) of
undefined -> ".";
Dir -> Dir
@@ -973,7 +993,8 @@ get_data_for_node(#testspec{logdir=LogDirs,
[CBF || {N,CBF} <- UsrCfgs, N==Node],
EvHandlers = [{H,A} || {N,H,A} <- EvHs, N==Node],
Include = [I || {N,I} <- Incl, N==Node],
- #opts{logdir = LogDir,
+ #opts{label = Label,
+ logdir = LogDir,
cover = Cover,
config = ConfigFiles,
event_handlers = EvHandlers,
@@ -1112,7 +1133,17 @@ do_run(Tests, Misc, LogDir) when is_list(Misc) ->
do_run(Tests, [], Opts1#opts{logdir = LogDir}, []).
do_run(Tests, Skip, Opts, Args) ->
- #opts{cover = Cover} = Opts,
+ #opts{label = Label, cover = Cover} = Opts,
+
+ %% label - used by ct_logs
+ TestLabel =
+ if Label == undefined -> undefined;
+ is_atom(Label) -> atom_to_list(Label);
+ is_list(Label) -> Label;
+ true -> undefined
+ end,
+ application:set_env(common_test, test_label, TestLabel),
+
case code:which(test_server) of
non_existing ->
exit({error,no_path_to_test_server});
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 9d2a791406..1aa3a859ab 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -440,6 +440,15 @@ save_nodes(Nodes,Spec=#testspec{nodes=NodeRefs}) ->
list_nodes(#testspec{nodes=NodeRefs}) ->
lists:map(fun({_Ref,Node}) -> Node end, NodeRefs).
+
+
+%% ---------------------------------------------------------
+%% / \
+%% | When adding tests, remember to update valid_terms/0 also! |
+%% \ /
+%% ---------------------------------------------------------
+
+
%% Associate a "global" logdir with all nodes
%% except those with specific logdir, e.g:
%% ["/tmp/logdir",{ct1@finwe,"/tmp/logdir2"}]
@@ -465,6 +474,24 @@ add_tests([{logdir,Node,Dir}|Ts],Spec) ->
add_tests([{logdir,Dir}|Ts],Spec) ->
add_tests([{logdir,all_nodes,Dir}|Ts],Spec);
+%% --- label ---
+add_tests([{label,all_nodes,Lbl}|Ts],Spec) ->
+ Labels = Spec#testspec.label,
+ Tests = [{label,N,Lbl} || N <- list_nodes(Spec),
+ lists:keymember(ref2node(N,Spec#testspec.nodes),
+ 1,Labels) == false],
+ add_tests(Tests++Ts,Spec);
+add_tests([{label,Nodes,Lbl}|Ts],Spec) when is_list(Nodes) ->
+ Ts1 = separate(Nodes,label,[Lbl],Ts,Spec#testspec.nodes),
+ add_tests(Ts1,Spec);
+add_tests([{label,Node,Lbl}|Ts],Spec) ->
+ Labels = Spec#testspec.label,
+ Labels1 = [{ref2node(Node,Spec#testspec.nodes),Lbl} |
+ lists:keydelete(ref2node(Node,Spec#testspec.nodes),1,Labels)],
+ add_tests(Ts,Spec#testspec{label=Labels1});
+add_tests([{label,Lbl}|Ts],Spec) ->
+ add_tests([{label,all_nodes,Lbl}|Ts],Spec);
+
%% --- cover ---
add_tests([{cover,all_nodes,File}|Ts],Spec) ->
Tests = lists:map(fun(N) -> {cover,N,File} end, list_nodes(Spec)),
@@ -1013,22 +1040,32 @@ valid_terms() ->
{cover,3},
{config,2},
{config,3},
- {userconfig, 2},
- {userconfig, 3},
+ {userconfig,2},
+ {userconfig,3},
{alias,3},
{logdir,2},
{logdir,3},
{event_handler,2},
{event_handler,3},
{event_handler,4},
+ {multiply_timetraps,2},
+ {multiply_timetraps,3},
+ {scale_timetraps,2},
+ {scale_timetraps,3},
{include,2},
{include,3},
{suites,3},
{suites,4},
+ {groups,4},
+ {groups,5},
+ {groups,6},
{cases,4},
{cases,5},
{skip_suites,4},
{skip_suites,5},
+ {skip_groups,5},
+ {skip_groups,6},
+ {skip_groups,7},
{skip_cases,5},
{skip_cases,6}
].
diff --git a/lib/common_test/src/ct_util.hrl b/lib/common_test/src/ct_util.hrl
index 54eed29415..ee973f6220 100644
--- a/lib/common_test/src/ct_util.hrl
+++ b/lib/common_test/src/ct_util.hrl
@@ -30,6 +30,7 @@
-record(testspec, {spec_dir,
nodes=[],
init=[],
+ label=[],
logdir=["."],
cover=[],
config=[],
diff --git a/lib/common_test/test/ct_testspec_1_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE.erl
index fb7f3fc9a9..c3840d0425 100644
--- a/lib/common_test/test/ct_testspec_1_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_1_SUITE.erl
@@ -61,9 +61,18 @@ all(doc) ->
all(suite) ->
[all_suites, skip_all_suites,
- suite, skip_suite].
-% cases_1, skip_cases_1,
-% groups_1, skip_groups_1].
+ suite, skip_suite,
+ all_testcases, skip_all_testcases,
+ testcase, skip_testcase,
+ all_groups, skip_all_groups,
+ group, skip_group,
+ group_all_testcases, skip_group_all_testcases,
+ group_testcase, skip_group_testcase,
+ topgroup,
+ subgroup, skip_subgroup,
+ subgroup_all_testcases, skip_subgroup_all_testcases,
+ subgroup_testcase, skip_subgroup_testcase,
+ only_skip].
%%--------------------------------------------------------------------
%% TEST CASES
@@ -73,102 +82,279 @@ all(suite) ->
%%%
all_suites(Config) when is_list(Config) ->
- Self = all_suites,
DataDir = ?config(data_dir, Config),
TestDir = filename:join(DataDir, "suites_1"),
- TestSpec = [{suites,TestDir,all}],
- SpecFile = create_spec_file(?config(priv_dir, Config),
- Self, TestSpec),
-
- {Opts,ERPid} = setup({spec,SpecFile}, Config),
- ok = ct_test_support:run(Opts, Config),
- ok = ct_test_support:run(ct, run_testspec, [TestSpec], Config),
- Events = ct_test_support:get_events(ERPid, Config),
+ TestSpec = [{label,"all_suites"},
+ {suites,TestDir,all}],
- ct_test_support:log_events(Self,
- reformat(Events, ?eh),
- ?config(priv_dir, Config)),
-
- TestEvents = events_to_check(Self),
- ok = ct_test_support:verify_events(TestEvents, Events, Config).
+ setup_and_execute(all_suites, TestSpec, Config).
skip_all_suites(Config) when is_list(Config) ->
- Self = skip_all_suites,
DataDir = ?config(data_dir, Config),
TestDir = filename:join(DataDir, "suites_1"),
- TestSpec = [{suites,TestDir,all},
+ TestSpec = [{label,skip_all_suites},
+ {suites,TestDir,all},
{skip_suites,TestDir,all,"SKIPPED!"}],
- SpecFile = create_spec_file(?config(priv_dir, Config),
- Self, TestSpec),
- {Opts,ERPid} = setup({spec,SpecFile}, Config),
- ok = ct_test_support:run(Opts, Config),
- ok = ct_test_support:run(ct, run_testspec, [TestSpec], Config),
- Events = ct_test_support:get_events(ERPid, Config),
-
- ct_test_support:log_events(Self,
- reformat(Events, ?eh),
- ?config(priv_dir, Config)),
-
- TestEvents = events_to_check(Self),
- ok = ct_test_support:verify_events(TestEvents, Events, Config).
+ setup_and_execute(skip_all_suites, TestSpec, Config).
%%%-----------------------------------------------------------------
%%%
suite(Config) when is_list(Config) ->
- Self = suite,
DataDir = ?config(data_dir, Config),
TestDir = filename:join(DataDir, "suites_1"),
- TestSpec = [{suites,TestDir,simple_1_SUITE}],
- SpecFile = create_spec_file(?config(priv_dir, Config),
- Self, TestSpec),
+ TestSpec = [{label,undefined},
+ {suites,TestDir,simple_1_SUITE}],
- {Opts,ERPid} = setup({spec,SpecFile}, Config),
- ok = ct_test_support:run(Opts, Config),
- ok = ct_test_support:run(ct, run_testspec, [TestSpec], Config),
- Events = ct_test_support:get_events(ERPid, Config),
-
- ct_test_support:log_events(Self,
- reformat(Events, ?eh),
- ?config(priv_dir, Config)),
-
- TestEvents = events_to_check(Self),
- ok = ct_test_support:verify_events(TestEvents, Events, Config).
+ setup_and_execute(suite, TestSpec, Config).
skip_suite(Config) when is_list(Config) ->
- Self = skip_suite,
DataDir = ?config(data_dir, Config),
TestDir = filename:join(DataDir, "suites_1"),
TestSpec = [{suites,TestDir,[simple_1_SUITE,simple_2_SUITE]},
{skip_suites,TestDir,simple_1_SUITE,"SKIPPED!"}],
- SpecFile = create_spec_file(?config(priv_dir, Config),
- Self, TestSpec),
- {Opts,ERPid} = setup({spec,SpecFile}, Config),
- ok = ct_test_support:run(Opts, Config),
- ok = ct_test_support:run(ct, run_testspec, [TestSpec], Config),
- Events = ct_test_support:get_events(ERPid, Config),
+ setup_and_execute(skip_suite, TestSpec, Config).
- ct_test_support:log_events(Self,
- reformat(Events, ?eh),
- ?config(priv_dir, Config)),
+%%%-----------------------------------------------------------------
+%%%
- TestEvents = events_to_check(Self),
- ok = ct_test_support:verify_events(TestEvents, Events, Config).
+all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{cases,TestDir,simple_1_SUITE,all}],
+
+ setup_and_execute(all_testcases, TestSpec, Config).
+
+skip_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{suites,TestDir,[simple_1_SUITE]},
+ {skip_cases,TestDir,simple_1_SUITE,all,"SKIPPED!"}],
+
+ setup_and_execute(skip_all_testcases, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{cases,TestDir,simple_1_SUITE,tc1}],
+
+ setup_and_execute(testcase, TestSpec, Config).
+
+skip_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{cases,TestDir,simple_1_SUITE,[tc1,tc2]},
+ {cases,TestDir,simple_2_SUITE,[tc2,tc1]},
+ {skip_cases,TestDir,simple_1_SUITE,[tc1],"SKIPPED!"},
+ {skip_cases,TestDir,simple_2_SUITE,tc2,"SKIPPED!"}],
+
+ setup_and_execute(skip_testcase, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+all_groups(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,all}],
+
+ setup_and_execute(all_groups, TestSpec, Config).
+
+skip_all_groups(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,all},
+ {skip_groups,TestDir,groups_11_SUITE,all,"SKIPPED!"}],
+
+ setup_and_execute(skip_all_groups, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+group(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a}],
+
+ setup_and_execute(group, TestSpec, Config).
+
+skip_group(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,[test_group_1a,
+ test_group_1b]},
+ {skip_groups,TestDir,groups_11_SUITE,
+ [test_group_1b,test_group_2],"SKIPPED!"}],
+
+ setup_and_execute(skip_group, TestSpec, Config).
+
+
+%%%-----------------------------------------------------------------
+%%%
+
+group_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a,{cases,all}}],
+
+ setup_and_execute(group_all_testcases, TestSpec, Config).
+
+skip_group_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,[test_group_1a,
+ test_group_1b]},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1b,{cases,all},"SKIPPED!"},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1a,{cases,all},"SKIPPED!"}],
+
+ setup_and_execute(skip_group_all_testcases, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+group_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a,{cases,testcase_1a}}],
+
+ setup_and_execute(group_testcase, TestSpec, Config).
+
+skip_group_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a,
+ {cases,[testcase_1a,testcase_1b]}},
+ {groups,TestDir,groups_11_SUITE,test_group_1b,
+ {cases,[testcase_1a,testcase_1b]}},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1a,{cases,testcase_1b},"SKIPPED!"},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1b,{cases,[testcase_1a]},"SKIPPED!"}],
+
+ setup_and_execute(skip_group_testcase, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+topgroup(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_2}],
+
+ setup_and_execute(topgroup, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+subgroup(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_3}],
+
+ setup_and_execute(subgroup, TestSpec, Config).
+
+skip_subgroup(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,[test_group_2]},
+ {skip_groups,TestDir,groups_12_SUITE,
+ test_group_3,"SKIPPED!"}],
+
+ setup_and_execute(skip_subgroup, TestSpec, Config).
%%%-----------------------------------------------------------------
%%%
+subgroup_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,
+ test_group_5,{cases,all}}],
+
+ setup_and_execute(subgroup_all_testcases, TestSpec, Config).
+
+skip_subgroup_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_4},
+ {skip_groups,TestDir,groups_12_SUITE,
+ test_group_5,{cases,all},"SKIPPED!"}],
+
+ setup_and_execute(skip_subgroup_all_testcases, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+subgroup_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,
+ test_group_7,{cases,testcase_7a}}],
+
+ setup_and_execute(subgroup_testcase, TestSpec, Config).
+
+skip_subgroup_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_5},
+ {skip_groups,TestDir,groups_12_SUITE,
+ test_group_7,{cases,[testcase_7a,testcase_7b]},"SKIPPED!"}],
+
+ setup_and_execute(skip_subgroup_testcase, TestSpec, Config).
+
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
+setup_and_execute(TCName, TestSpec, Config) ->
+ SpecFile = create_spec_file(?config(priv_dir, Config),
+ TCName, TestSpec),
+ TestTerms =
+ case lists:keymember(label, 1, TestSpec) of
+ true -> [{spec,SpecFile}];
+ false -> [{spec,SpecFile},{label,TCName}]
+ end,
+ {Opts,ERPid} = setup(TestTerms, Config),
+ ok = ct_test_support:run(Opts, Config),
+ ok = ct_test_support:run(ct, run_testspec, [TestSpec], Config),
+ Events = ct_test_support:get_events(ERPid, Config),
+
+ ct_test_support:log_events(TCName,
+ reformat(Events, ?eh),
+ ?config(priv_dir, Config)),
+
+ TestEvents = events_to_check(TCName),
+ ok = ct_test_support:verify_events(TestEvents, Events, Config).
+
create_spec_file(SpecDir, TCName, TestSpec) ->
FileName = filename:join(SpecDir,
atom_to_list(TCName)++".spec"),
@@ -179,11 +365,13 @@ create_spec_file(SpecDir, TCName, TestSpec) ->
[FileName,FileName]),
FileName.
-setup(Test, Config) ->
+setup(Test, Config) when is_tuple(Test) ->
+ setup([Test], Config);
+setup(Tests, Config) ->
Opts0 = ct_test_support:get_opts(Config),
Level = ?config(trace_level, Config),
EvHArgs = [{cbm,ct_test_support},{trace_level,Level}],
- Opts = Opts0 ++ [Test,{event_handler,{?eh,EvHArgs}}],
+ Opts = Opts0 ++ Tests ++ [{event_handler,{?eh,EvHArgs}}],
ERPid = ct_test_support:start_event_receiver(Config),
{Opts,ERPid}.