aboutsummaryrefslogtreecommitdiffstats
path: root/lib/common_test
diff options
context:
space:
mode:
Diffstat (limited to 'lib/common_test')
-rw-r--r--lib/common_test/Makefile6
-rw-r--r--lib/common_test/doc/src/run_test.xml13
-rw-r--r--lib/common_test/doc/src/run_test_chapter.xml118
-rw-r--r--lib/common_test/priv/Makefile.in20
-rw-r--r--lib/common_test/priv/tile1.jpgbin0 -> 34734 bytes
-rw-r--r--lib/common_test/src/ct.erl20
-rw-r--r--lib/common_test/src/ct_framework.erl299
-rw-r--r--lib/common_test/src/ct_logs.erl179
-rw-r--r--lib/common_test/src/ct_run.erl303
-rw-r--r--lib/common_test/src/ct_testspec.erl66
-rw-r--r--lib/common_test/src/ct_util.erl1
-rw-r--r--lib/common_test/src/ct_util.hrl1
-rw-r--r--lib/common_test/test/Makefile1
-rw-r--r--lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_11_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_groups_test_2_SUITE.erl24
-rw-r--r--lib/common_test/test/ct_groups_test_2_SUITE_data/specs/groups_2.1.spec26
-rw-r--r--lib/common_test/test/ct_test_support.erl7
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE.erl433
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_11_SUITE.erl281
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl344
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_21_SUITE.erl281
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl314
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_1_SUITE.erl146
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_2_SUITE.erl146
-rw-r--r--lib/common_test/vsn.mk2
25 files changed, 2711 insertions, 322 deletions
diff --git a/lib/common_test/Makefile b/lib/common_test/Makefile
index e16efd0c9d..aecec1a50d 100644
--- a/lib/common_test/Makefile
+++ b/lib/common_test/Makefile
@@ -25,12 +25,12 @@ include $(ERL_TOP)/make/$(TARGET)/otp.mk
#
ifeq ($(findstring linux,$(TARGET)),linux)
-SUB_DIRECTORIES = doc/src src
+SUB_DIRECTORIES = doc/src src priv
else
ifeq ($(findstring solaris,$(TARGET)),solaris)
-SUB_DIRECTORIES = doc/src src
+SUB_DIRECTORIES = doc/src src priv
else
-SUB_DIRECTORIES = doc/src src
+SUB_DIRECTORIES = doc/src src priv
endif
endif
diff --git a/lib/common_test/doc/src/run_test.xml b/lib/common_test/doc/src/run_test.xml
index d609c4287f..2f0a94afba 100644
--- a/lib/common_test/doc/src/run_test.xml
+++ b/lib/common_test/doc/src/run_test.xml
@@ -45,6 +45,11 @@
flags start an Erlang node prepared for running Common Test in a
particular mode.</p>
+ <p>There is an interface function that corresponds to this program,
+ called <c>ct:run_test/1</c>, for starting Common Test from the Erlang
+ shell (or an Erlang program). Please see the <c>ct</c> man page for
+ details.</p>
+
<p><c>run_test</c> also accepts Erlang emulator flags. These are used
when <c>run_test</c> calls <c>erl</c> to start the Erlang node
(making it possible to e.g. add directories to the code server path,
@@ -72,6 +77,8 @@
<p>it prints all valid start flags to stdout.</p>
</description>
+ <marker id="run_test"></marker>
+
<section>
<title>Run tests from command line</title>
<pre>
@@ -83,6 +90,7 @@
[-userconfig CallbackModule1 ConfigString1 and CallbackModule2
ConfigString2 and .. and CallbackModuleN ConfigStringN]
[-decrypt_key Key] | [-decrypt_file KeyFile]
+ [-label Label]
[-logdir LogDir]
[-silent_connections [ConnType1 ConnType2 .. ConnTypeN]]
[-stylesheet CSSFile]
@@ -107,6 +115,7 @@
[-userconfig CallbackModule1 ConfigString1 and CallbackModule2
ConfigString2 and .. and CallbackModuleN ConfigStringN]
[-decrypt_key Key] | [-decrypt_file KeyFile]
+ [-label Label]
[-logdir LogDir]
[-allow_user_terms]
[-silent_connections [ConnType1 ConnType2 .. ConnTypeN]]
@@ -128,12 +137,12 @@
<title>Run tests in web based GUI</title>
<pre>
run_test -vts [-browser Browser]
+ [-dir TestDir1 TestDir2 .. TestDirN] |
+ [-suite Suite [[-group Group] [-case Case]]]
[-config ConfigFile1 ConfigFile2 .. ConfigFileN]
[-userconfig CallbackModule1 ConfigString1 and CallbackModule2
ConfigString2 and .. and CallbackModuleN ConfigStringN]
[-decrypt_key Key] | [-decrypt_file KeyFile]
- [-dir TestDir1 TestDir2 .. TestDirN] |
- [-suite Suite [[-group Group] [-case Case]]]
[-include InclDir1 InclDir2 .. InclDirN]
[-no_auto_compile]
[-muliply_timetraps Multiplier]
diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml
index 207df7f5b5..1efff25f5b 100644
--- a/lib/common_test/doc/src/run_test_chapter.xml
+++ b/lib/common_test/doc/src/run_test_chapter.xml
@@ -21,7 +21,7 @@
</legalnotice>
- <title>Running Test Suites</title>
+ <title>Running Tests</title>
<prepared>Peter Andersson, Kenneth Lundin</prepared>
<docno></docno>
<date></date>
@@ -130,6 +130,8 @@
<p>Other flags that may be used with <c>run_test</c>:</p>
<list>
<item><c><![CDATA[-logdir <dir>]]></c>, specifies where the HTML log files are to be written.</item>
+ <item><c><![CDATA[-label <name_of_test_run>]]></c>, associates the test run with a name that gets printed
+ in the overview HTML log files.</item>
<item><c>-refresh_logs</c>, refreshes the top level HTML index files.</item>
<item><c>-vts</c>, start web based GUI (see below).</item>
<item><c>-shell</c>, start interactive shell mode (see below).</item>
@@ -335,43 +337,72 @@
<marker id="test_specifications"></marker>
<title>Using test specifications</title>
- <p>The most expressive way to specify what to test is to use a so
- called test specification. A test specification is a sequence of
- Erlang terms. The terms may be declared in a text file or passed
- to the test server at runtime as a list (see <c>run_testspec/1</c>
- in the manual page for <c>ct</c>). There are two general types
- of terms: configuration terms and test specification terms.</p>
- <p>With configuration terms it is possible to import configuration
- data (similar to <c>run_test -config</c>), specify HTML log
- directories (similar to <c>run_test -logdir</c>), give aliases
- to test nodes and test directories (to make a specification
- easier to read and maintain), enable code coverage analysis
- (see the <seealso marker="cover_chapter#cover">Code Coverage
- Analysis</seealso> chapter) and specify event_handler plugins
+ <p>The most flexible way to specify what to test, is to use a so
+ called test specification. A test specification is a sequence of
+ Erlang terms. The terms may be declared in a text file or passed
+ to the test server at runtime as a list
+ (see <c>run_testspec/1</c> in the manual page
+ for <c>ct</c>). There are two general types of terms:
+ configuration terms and test specification terms.</p>
+ <p>With configuration terms it is possible to e.g. label the test
+ run (similar to <c>run_test -label</c>), evaluate arbitrary expressions
+ before starting a test, import configuration
+ data (similar to
+ <c>run_test -config/-userconfig</c>), specify HTML log directories (similar
+ to
+ <c>run_test -logdir</c>), give aliases to test nodes and test
+ directories (to make a specification easier to read and
+ maintain), enable code coverage analysis (see
+ the <seealso marker="cover_chapter#cover">Code Coverage
+ Analysis</seealso> chapter) and specify event_handler plugins
(see the <seealso marker="event_handler_chapter#event_handling">
- Event Handling</seealso> chapter). There is also a term
- for specifying include directories that should be passed on
- to the compiler when automatic compilation is performed
- (similar to <c>run_test -include</c>, see above).</p>
- <p>With test specification terms it is possible to state exactly which
- tests should run and in which order. A test term specifies either
- one or more suites or one or more test cases. An arbitrary number of test
- terms may be declared in sequence. A test term can also specify one or
- more test suites or test cases to be skipped. Skipped suites and cases
- are not executed and show up in the HTML test log as SKIPPED.</p>
-
- <note><p>It is not yet possible to specify test case groups in
- test specifications. This will be supported in a soon upcoming
- release.</p></note>
+ Event Handling</seealso> chapter). There is also a term for
+ specifying include directories that should be passed on to the
+ compiler when automatic compilation is performed (similar
+ to <c>run_test -include</c>, see above).</p>
+ <p>With test specification terms it is possible to state exactly
+ which tests should run and in which order. A test term specifies
+ either one or more suites, one or more test case groups, or one
+ or more test cases in a group or suite.</p>
+ <p>An arbitrary number of test terms may be declared in sequence.
+ Common Test will compile the terms into one or more tests to be
+ performed in one resulting test run. Note that a term that
+ specifies a set of test cases will "swallow" one that only
+ specifies a subset of these cases. E.g. the result of merging
+ one term that specifies that all cases in suite S should be
+ executed, with another term specifying only test case X and Y in
+ S, is a test of all cases in S. However, if a term specifying
+ test case X and Y in S is merged with a term specifying case Z
+ in S, the result is a test of X, Y and Z in S.</p>
+ <p>A test term can also specify one or more test suites, groups,
+ or test cases to be skipped. Skipped suites, groups and cases
+ are not executed and show up in the HTML test log files as
+ SKIPPED.</p>
+ <p>When a test case group is specified, the resulting test
+ executes the
+ <c>init_per_group</c> function, followed by all test cases and
+ sub groups (including their configuration functions), and
+ finally the <c>end_per_group</c> function. Also if particular
+ test cases in a group are specified, <c>init_per_group</c>
+ and <c>end_per_group</c> for the group in question are
+ called. If a group which is defined (in <c>Suite:group/0</c>) to
+ be a sub group of another group, is specified (or particular test
+ cases of a sub group are), Common Test will call the configuration
+ functions for the top level groups as well as for the sub group
+ in question (making it possible to pass configuration data all
+ the way from <c>init_per_suite</c> down to the test cases in the
+ sub group).</p>
<p>Below is the test specification syntax. Test specifications can
- be used to run tests both in a single test host environment and in
- a distributed Common Test environment (Large Scale Testing). The init term,
- as well as node parameters, are only relevant in the latter (see the
- <seealso marker="ct_master_chapter#test_specifications">Large Scale Testing</seealso>
- chapter for information). For details on the event_handler term, see the
- <seealso marker="event_handler_chapter#event_handling">Event Handling</seealso>
- chapter.</p>
+ be used to run tests both in a single test host environment and
+ in a distributed Common Test environment (Large Scale
+ Testing). The node parameters in the init term are only
+ relevant in the latter (see the
+ <seealso marker="ct_master_chapter#test_specifications">Large
+ Scale Testing</seealso> chapter for information). For details on
+ the event_handler term, see the
+ <seealso marker="event_handler_chapter#event_handling">Event
+ Handling</seealso> chapter.</p>
<p>Config terms:</p>
<pre>
{node, NodeAlias, Node}.
@@ -379,11 +410,17 @@
{init, InitOptions}.
{init, [NodeAlias], InitOptions}.
+ {label, Label}.
+ {label, NodeRefs, Label}.
+
{multiply_timetraps, N}.
+ {multiply_timetraps, NodeRefs, N}.
+
{scale_timetraps, Bool}.
+ {scale_timetraps, NodeRefs, Bool}.
{cover, CoverSpecFile}.
- {cover, NodeRef, CoverSpecFile}.
+ {cover, NodeRefs, CoverSpecFile}.
{include, IncludeDirs}.
{include, NodeRefs, IncludeDirs}.
@@ -409,6 +446,12 @@
{suites, DirRef, Suites}.
{suites, NodeRefs, DirRef, Suites}.
+ {groups, DirRef, Suite, Groups}.
+ {groups, NodeRefsDirRef, Suite, Groups}.
+
+ {groups, DirRef, Suite, Group, {cases,Cases}}.
+ {groups, NodeRefsDirRef, Suite, Group, {cases,Cases}}.
+
{cases, DirRef, Suite, Cases}.
{cases, NodeRefs, DirRef, Suite, Cases}.
@@ -437,6 +480,9 @@
InitArgs = [term()]
DirRef = DirAlias | Dir
Suites = atom() | [atom()] | all
+ Suite = atom()
+ Groups = atom() | [atom()] | all
+ Group = atom()
Cases = atom() | [atom()] | all
Comment = string() | ""
</pre>
diff --git a/lib/common_test/priv/Makefile.in b/lib/common_test/priv/Makefile.in
index f144847a29..6372bbc8d5 100644
--- a/lib/common_test/priv/Makefile.in
+++ b/lib/common_test/priv/Makefile.in
@@ -56,8 +56,9 @@ ifneq ($(findstring win32,$(TARGET)),win32)
#
# Files
#
-FILES = vts.tool run_test.in
-SCRIPTS = install.sh
+FILES =
+SCRIPTS =
+IMAGES = tile1.jpg
#
# Rules
@@ -83,14 +84,12 @@ include $(ERL_TOP)/make/otp_release_targets.mk
ifeq ($(XNIX),true)
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/priv/bin
- $(INSTALL_SCRIPT) $(SCRIPTS) $(RELSYSDIR)
- $(INSTALL_DATA) $(FILES) $(RELSYSDIR)/priv
+ $(INSTALL_DIR) $(RELSYSDIR)/priv
+ $(INSTALL_DATA) $(FILES) $(IMAGES) $(RELSYSDIR)/priv
else
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/priv/bin
- $(INSTALL_SCRIPT) $(SCRIPTS) $(RELSYSDIR)
- $(INSTALL_DATA) $(FILES) $(RELSYSDIR)/priv
+ $(INSTALL_DIR) $(RELSYSDIR)/priv
+ $(INSTALL_DATA) $(FILES) $(IMAGES) $(RELSYSDIR)/priv
endif
release_docs_spec:
@@ -105,6 +104,7 @@ else
# Files
#
FILES = vts.tool
+IMAGES = tile1.jpg
#
# Rules
@@ -123,8 +123,8 @@ clean:
include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/priv/bin
- $(INSTALL_DATA) $(FILES) $(RELSYSDIR)/priv
+ $(INSTALL_DIR) $(RELSYSDIR)/priv
+ $(INSTALL_DATA) $(FILES) $(IMAGES) $(RELSYSDIR)/priv
release_docs_spec:
diff --git a/lib/common_test/priv/tile1.jpg b/lib/common_test/priv/tile1.jpg
new file mode 100644
index 0000000000..8749383716
--- /dev/null
+++ b/lib/common_test/priv/tile1.jpg
Binary files differ
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index 0d82a86e7d..8ae175f10d 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -138,10 +138,10 @@ run(TestDirs) ->
%%%-----------------------------------------------------------------
%%% @spec run_test(Opts) -> Result
%%% Opts = [OptTuples]
-%%% OptTuples = {config,CfgFiles} | {dir,TestDirs} | {suite,Suites} |
-%%% {userconfig, UserConfig} |
-%%% {testcase,Cases} | {group,Groups} | {spec,TestSpecs} |
-%%% {allow_user_terms,Bool} | {logdir,LogDir} |
+%%% OptTuples = {dir,TestDirs} | {suite,Suites} | {group,Groups} |
+%%% {testcase,Cases} | {spec,TestSpecs} | {label,Label} |
+%%% {config,CfgFiles} | {userconfig, UserConfig} |
+%%% {allow_user_terms,Bool} | {logdir,LogDir} |
%%% {silent_connections,Conns} | {stylesheet,CSSFile} |
%%% {cover,CoverSpecFile} | {step,StepOpts} |
%%% {event_handler,EventHandlers} | {include,InclDirs} |
@@ -149,15 +149,16 @@ run(TestDirs) ->
%%% {repeat,N} | {duration,DurTime} | {until,StopTime} |
%%% {force_stop,Bool} | {decrypt,DecryptKeyOrFile} |
%%% {refresh_logs,LogDir} | {basic_html,Bool}
-%%% CfgFiles = [string()] | string()
-%%% UserConfig = [{CallbackMod,CfgStrings}] | {CallbackMod,CfgStrings}
-%%% CallbackMod = atom()
-%%% CfgStrings = [string()] | string()
%%% TestDirs = [string()] | string()
%%% Suites = [string()] | string()
%%% Cases = [atom()] | atom()
%%% Groups = [atom()] | atom()
%%% TestSpecs = [string()] | string()
+%%% Label = string() | atom()
+%%% CfgFiles = [string()] | string()
+%%% UserConfig = [{CallbackMod,CfgStrings}] | {CallbackMod,CfgStrings}
+%%% CallbackMod = atom()
+%%% CfgStrings = [string()] | string()
%%% LogDir = string()
%%% Conns = all | [atom()]
%%% CSSFile = string()
@@ -177,7 +178,8 @@ run(TestDirs) ->
%%% DecryptFile = string()
%%% Result = [TestResult] | {error,Reason}
%%% @doc Run tests as specified by the combination of options in <code>Opts</code>.
-%%% The options are the same as those used with the <code>run_test</code> program.
+%%% The options are the same as those used with the
+%%% <seealso marker="run_test#run_test"><code>run_test</code></seealso> program.
%%% Note that here a <code>TestDir</code> can be used to point out the path to
%%% a <code>Suite</code>. Note also that the option <code>testcase</code>
%%% corresponds to the <code>-case</code> option in the <code>run_test</code>
diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl
index 3dd1026f13..f2ca023cff 100644
--- a/lib/common_test/src/ct_framework.erl
+++ b/lib/common_test/src/ct_framework.erl
@@ -27,8 +27,12 @@
-export([init_tc/3, end_tc/3, get_suite/2, report/2, warn/1]).
-export([error_notification/4]).
+-export([overview_html_header/1]).
+
-export([error_in_suite/1, ct_init_per_group/2, ct_end_per_group/2]).
+-export([make_all_conf/3, make_conf/5]).
+
-include("ct_event.hrl").
-include("ct_util.hrl").
@@ -101,7 +105,8 @@ init_tc1(Mod,Func,[Config0],DoInit) when is_list(Config0) ->
[{saved_config,{LastFunc,SavedConfig}} |
lists:keydelete(saved_config,1,Config0)];
{{LastSuite,InitOrEnd},SavedConfig} when InitOrEnd == init_per_suite ;
- InitOrEnd == end_per_suite -> % last suite
+ InitOrEnd == end_per_suite ->
+ %% last suite
[{saved_config,{LastSuite,SavedConfig}} |
lists:keydelete(saved_config,1,Config0)];
undefined ->
@@ -649,7 +654,7 @@ get_suite(Mod, all) ->
{'EXIT',_} ->
get_all(Mod, []);
GroupDefs when is_list(GroupDefs) ->
- case catch check_groups(Mod, GroupDefs) of
+ case catch find_groups(Mod, all, all, GroupDefs) of
{error,_} = Error ->
%% this makes test_server call error_in_suite as first
%% (and only) test case so we can report Error properly
@@ -664,102 +669,193 @@ get_suite(Mod, all) ->
%%!============================================================
%%! Note: The handling of sequences in get_suite/2 and get_all/2
-%%! is deprecated and should be removed after OTP R13!
+%%! is deprecated and should be removed at some point...
%%!============================================================
-get_suite(Mod, Name) ->
- %% Name may be name of a group or a test case. If it's a group,
- %% it should be expanded to list of cases (in a conf term)
+%% group
+get_suite(Mod, Group={conf,Props,_Init,TCs,_End}) ->
+ Name = proplists:get_value(name, Props),
case catch apply(Mod, groups, []) of
{'EXIT',_} ->
- get_seq(Mod, Name);
+ [Group];
GroupDefs when is_list(GroupDefs) ->
- case catch check_groups(Mod, GroupDefs) of
+ case catch find_groups(Mod, Name, TCs, GroupDefs) of
{error,_} = Error ->
%% this makes test_server call error_in_suite as first
%% (and only) test case so we can report Error properly
[{?MODULE,error_in_suite,[[Error]]}];
+ [] ->
+ {error,{invalid_group_spec,Name}};
ConfTests ->
-
- %%! --- Thu Jun 3 19:13:22 2010 --- peppe was here!
- %%! HEERE!
- %%! Must be able to search recursively for group Name,
- %%! this only handles top level groups!
-
- FindConf = fun({conf,Props,_,_,_}) ->
- case proplists:get_value(name, Props) of
- Name -> true;
- _ -> false
- end
- end,
- case lists:filter(FindConf, ConfTests) of
- [] -> % must be a test case
- get_seq(Mod, Name);
- [ConfTest|_] ->
- ConfTest
+ case lists:member(skipped, Props) of
+ true ->
+ %% a *subgroup* specified *only* as skipped (and not
+ %% as an explicit test) should not be returned, or
+ %% init/end functions for top groups will be executed
+ case catch proplists:get_value(name, element(2, hd(ConfTests))) of
+ Name -> % top group
+ ConfTests;
+ _ ->
+ []
+ end;
+ false ->
+ ConfTests
end
end;
_ ->
E = "Bad return value from "++atom_to_list(Mod)++":groups/0",
[{?MODULE,error_in_suite,[[{error,list_to_atom(E)}]]}]
- end.
+ end;
-check_groups(_Mod, []) ->
- [];
-check_groups(Mod, Defs) ->
- check_groups(Mod, Defs, Defs, []).
+%% testcase
+get_suite(Mod, Name) ->
+ get_seq(Mod, Name).
-check_groups(Mod, [TC | Gs], Defs, Levels) when is_atom(TC), length(Levels)>0 ->
- [TC | check_groups(Mod, Gs, Defs, Levels)];
+%%%-----------------------------------------------------------------
-check_groups(Mod, [{group,SubName} | Gs], Defs, Levels) when is_atom(SubName) ->
- case lists:member(SubName, Levels) of
- true ->
- E = "Cyclic reference to group "++atom_to_list(SubName)++
- " in "++atom_to_list(Mod)++":groups/0",
- throw({error,list_to_atom(E)});
- false ->
- case find_group(Mod, SubName, Defs) of
- {error,_} = Error ->
- throw(Error);
- G ->
- [check_groups(Mod, [G], Defs, Levels) |
- check_groups(Mod, Gs, Defs, Levels)]
- end
+find_groups(Mod, Name, TCs, GroupDefs) ->
+ Found = find(Mod, Name, TCs, GroupDefs, [], GroupDefs, false),
+ Trimmed = trim(Found),
+ delete_subs(Trimmed, Trimmed).
+
+find(Mod, all, _TCs, [{Name,Props,Tests} | Gs], Known, Defs, _) ->
+ cyclic_test(Mod, Name, Known),
+ [make_conf(Mod, Name, Props,
+ find(Mod, all, all, Tests, [Name | Known], Defs, true)) |
+ find(Mod, all, all, Gs, [], Defs, true)];
+
+find(Mod, Name, TCs, [{Name,Props,Tests} | _Gs], Known, Defs, false)
+ when is_atom(Name), is_list(Props), is_list(Tests) ->
+ cyclic_test(Mod, Name, Known),
+ case TCs of
+ all ->
+ [make_conf(Mod, Name, Props,
+ find(Mod, Name, TCs, Tests, [Name | Known], Defs, true))];
+ _ ->
+ Tests1 = [TC || TC <- TCs,
+ lists:member(TC, Tests) == true],
+ [make_conf(Mod, Name, Props, Tests1)]
end;
-check_groups(Mod, [{Name,Tests} | Gs], Defs, Levels) when is_atom(Name),
- is_list(Tests) ->
- check_groups(Mod, [{Name,[],Tests} | Gs], Defs, Levels);
-
-check_groups(Mod, [{Name,Props,Tests} | Gs], Defs, Levels) when is_atom(Name),
- is_list(Props),
- is_list(Tests) ->
- {TestSpec,Levels1} =
- case Levels of
- [] ->
- {check_groups(Mod, Tests, Defs, [Name]),[]};
- _ ->
- {check_groups(Mod, Tests, Defs, [Name|Levels]),Levels}
- end,
- [make_conf(Mod, Name, Props, TestSpec) |
- check_groups(Mod, Gs, Defs, Levels1)];
+find(Mod, Name, TCs, [{Name1,Props,Tests} | Gs], Known, Defs, false)
+ when is_atom(Name1), is_list(Props), is_list(Tests) ->
+ cyclic_test(Mod, Name1, Known),
+ [make_conf(Mod, Name1, Props,
+ find(Mod, Name, TCs, Tests, [Name1 | Known], Defs, false)) |
+ find(Mod, Name, TCs, Gs, [], Defs, false)];
+
+find(Mod, Name, _TCs, [{Name,_Props,_Tests} | _Gs], _Known, _Defs, true)
+ when is_atom(Name) ->
+ E = "Duplicate groups named "++atom_to_list(Name)++" in "++
+ atom_to_list(Mod)++":groups/0",
+ throw({error,list_to_atom(E)});
+
+find(Mod, Name, all, [{Name1,Props,Tests} | Gs], Known, Defs, true)
+ when is_atom(Name1), is_list(Props), is_list(Tests) ->
+ cyclic_test(Mod, Name1, Known),
+ [make_conf(Mod, Name1, Props,
+ find(Mod, Name, all, Tests, [Name1 | Known], Defs, true)) |
+ find(Mod, Name, all, Gs, [], Defs, true)];
+
+find(Mod, Name, TCs, [{group,Name1} | Gs], Known, Defs, Found) when is_atom(Name1) ->
+ find(Mod, Name, TCs, [expand(Mod, Name1, Defs) | Gs], Known, Defs, Found);
+
+find(Mod, Name, TCs, [{Name1,Tests} | Gs], Known, Defs, Found)
+ when is_atom(Name1), is_list(Tests) ->
+ find(Mod, Name, TCs, [{Name1,[],Tests} | Gs], Known, Defs, Found);
+
+find(Mod, Name, TCs, [TC | Gs], Known, Defs, false) when is_atom(TC) ->
+ find(Mod, Name, TCs, Gs, Known, Defs, false);
-check_groups(Mod, [BadTerm | _Gs], _Defs, Levels) ->
- Where = if length(Levels) == 0 ->
+find(Mod, Name, TCs, [TC | Gs], Known, Defs, true) when is_atom(TC) ->
+ [TC | find(Mod, Name, TCs, Gs, Known, Defs, true)];
+
+find(Mod, _Name, _TCs, [BadTerm | _Gs], Known, _Defs, _Found) ->
+ Where = if length(Known) == 0 ->
atom_to_list(Mod)++":groups/0";
true ->
- "group "++atom_to_list(lists:last(Levels))++
+ "group "++atom_to_list(lists:last(Known))++
" in "++atom_to_list(Mod)++":groups/0"
end,
Term = io_lib:format("~p", [BadTerm]),
E = "Bad term "++lists:flatten(Term)++" in "++Where,
throw({error,list_to_atom(E)});
-check_groups(_Mod, [], _Defs, _) ->
+find(_Mod, _Name, _TCs, [], _Known, _Defs, false) ->
+ ['$NOMATCH'];
+
+find(_Mod, _Name, _TCs, [], _Known, _Defs, _Found) ->
+ [].
+
+delete_subs([Conf | Confs], All) ->
+ All1 = delete_conf(Conf, All),
+ case is_sub(Conf, All1) of
+ true ->
+ delete_subs(Confs, All1);
+ false ->
+ delete_subs(Confs, All)
+ end;
+
+delete_subs([], All) ->
+ All.
+
+delete_conf({conf,Props,_,_,_}, Confs) ->
+ Name = proplists:get_value(name, Props),
+ [Conf || Conf = {conf,Props0,_,_,_} <- Confs,
+ Name =/= proplists:get_value(name, Props0)].
+
+is_sub({conf,Props,_,_,_}=Conf, [{conf,_,_,Tests,_} | Confs]) ->
+ Name = proplists:get_value(name, Props),
+ case lists:any(fun({conf,Props0,_,_,_}) ->
+ case proplists:get_value(name, Props0) of
+ N when N == Name ->
+ true;
+ _ ->
+ false
+ end;
+ (_) ->
+ false
+ end, Tests) of
+ true ->
+ true;
+ false ->
+ is_sub(Conf, Tests) or is_sub(Conf, Confs)
+ end;
+
+is_sub(Conf, [_TC | Tests]) ->
+ is_sub(Conf, Tests);
+
+is_sub(_Conf, []) ->
+ false.
+
+trim(['$NOMATCH' | Tests]) ->
+ trim(Tests);
+
+trim([{conf,Props,Init,Tests,End} | Confs]) ->
+ case trim(Tests) of
+ [] ->
+ trim(Confs);
+ Trimmed ->
+ [{conf,Props,Init,Trimmed,End} | trim(Confs)]
+ end;
+
+trim([TC | Tests]) ->
+ [TC | trim(Tests)];
+
+trim([]) ->
[].
-find_group(Mod, Name, Defs) ->
+cyclic_test(Mod, Name, Names) ->
+ case lists:member(Name, Names) of
+ true ->
+ E = "Cyclic reference to group "++atom_to_list(Name)++
+ " in "++atom_to_list(Mod)++":groups/0",
+ throw({error,list_to_atom(E)});
+ false ->
+ ok
+ end.
+
+expand(Mod, Name, Defs) ->
case lists:keysearch(Name, 1, Defs) of
{value,Def} ->
Def;
@@ -769,7 +865,48 @@ find_group(Mod, Name, Defs) ->
throw({error,list_to_atom(E)})
end.
+make_all_conf(Dir, Mod, _Props) ->
+ case code:is_loaded(Mod) of
+ false ->
+ code:load_abs(filename:join(Dir,atom_to_list(Mod)));
+ _ ->
+ ok
+ end,
+ make_all_conf(Mod).
+
+make_all_conf(Mod) ->
+ case catch apply(Mod, groups, []) of
+ {'EXIT',_} ->
+ {error,{invalid_group_definition,Mod}};
+ GroupDefs when is_list(GroupDefs) ->
+ case catch find_groups(Mod, all, all, GroupDefs) of
+ {error,_} = Error ->
+ %% this makes test_server call error_in_suite as first
+ %% (and only) test case so we can report Error properly
+ [{?MODULE,error_in_suite,[[Error]]}];
+ [] ->
+ {error,{invalid_group_spec,Mod}};
+ ConfTests ->
+ [{conf,Props,Init,all,End} || {conf,Props,Init,_,End} <- ConfTests]
+ end
+ end.
+
+make_conf(Dir, Mod, Name, Props, TestSpec) ->
+ case code:is_loaded(Mod) of
+ false ->
+ code:load_abs(filename:join(Dir,atom_to_list(Mod)));
+ _ ->
+ ok
+ end,
+ make_conf(Mod, Name, Props, TestSpec).
+
make_conf(Mod, Name, Props, TestSpec) ->
+ case code:is_loaded(Mod) of
+ false ->
+ code:load_file(Mod);
+ _ ->
+ ok
+ end,
{InitConf,EndConf} =
case erlang:function_exported(Mod,init_per_group,2) of
true ->
@@ -780,6 +917,7 @@ make_conf(Mod, Name, Props, TestSpec) ->
end,
{conf,[{name,Name}|Props],InitConf,TestSpec,EndConf}.
+%%%-----------------------------------------------------------------
get_all(Mod, ConfTests) ->
case catch apply(Mod, all, []) of
@@ -1095,4 +1233,31 @@ add_data_dir(File,Config) when is_list(File) ->
File
end.
+%%%-----------------------------------------------------------------
+%%% @spec overview_html_header(TestName) -> Header
+overview_html_header(TestName) ->
+ TestName1 = lists:flatten(io_lib:format("~p", [TestName])),
+ Label = case application:get_env(common_test, test_label) of
+ {ok,Lbl} when Lbl =/= undefined ->
+ "<H1><FONT color=\"green\">" ++ Lbl ++ "</FONT></H1>\n";
+ _ ->
+ ""
+ end,
+ Bgr = case ct_logs:basic_html() of
+ true ->
+ "";
+ false ->
+ CTPath = code:lib_dir(common_test),
+ TileFile = filename:join(filename:join(CTPath,"priv"),"tile1.jpg"),
+ " background=\"" ++ TileFile ++ "\""
+ end,
+
+ ["<html>\n",
+ "<head><title>Test ", TestName1, " results</title>\n",
+ "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n",
+ "</head>\n",
+ "<body", Bgr, " bgcolor=\"white\" text=\"black\" ",
+ "link=\"blue\" vlink=\"purple\" alink=\"red\">\n",
+ Label,
+ "<H2>Results from test ", TestName1, "</H2>\n"].
diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl
index 5683d06aa7..f8ae7202e6 100644
--- a/lib/common_test/src/ct_logs.erl
+++ b/lib/common_test/src/ct_logs.erl
@@ -36,7 +36,8 @@
-export([make_all_suites_index/1,make_all_runs_index/1]).
%% Logging stuff directly from testcase
--export([tc_log/3,tc_print/3,tc_pal/3]).
+-export([tc_log/3,tc_print/3,tc_pal/3,
+ basic_html/0]).
%% Simulate logger process for use without ct environment running
-export([simulate/0]).
@@ -57,7 +58,7 @@
-define(table_color2,"#E4F0FE").
-define(table_color3,"#F0F8FF").
--define(testname_width, 70).
+-define(testname_width, 60).
-define(abs(Name), filename:absname(Name)).
@@ -716,7 +717,7 @@ make_last_run_index1(StartTime,IndexName) ->
[Log];
Logs ->
case read_totals_file(?totals_name) of
- {_Node,Logs0,_Totals} ->
+ {_Node,_Lbl,Logs0,_Totals} ->
insert_dirs(Logs,Logs0);
_ ->
%% someone deleted the totals file!?
@@ -728,10 +729,15 @@ make_last_run_index1(StartTime,IndexName) ->
{ok,Bin} -> binary_to_term(Bin);
_ -> []
end,
- {ok,Index0,Totals} = make_last_run_index(Logs1, index_header(StartTime),
+ Label = case application:get_env(common_test, test_label) of
+ {ok,Lbl} -> Lbl;
+ _ -> undefined
+ end,
+ {ok,Index0,Totals} = make_last_run_index(Logs1,
+ index_header(Label,StartTime),
0, 0, 0, 0, 0, Missing),
%% write current Totals to file, later to be used in all_runs log
- write_totals_file(?totals_name,Logs1,Totals),
+ write_totals_file(?totals_name,Label,Logs1,Totals),
Index = [Index0|index_footer()],
case force_write_file(IndexName, Index) of
ok ->
@@ -761,7 +767,7 @@ make_last_run_index([Name|Rest], Result, TotSucc, TotFail, UserSkip, AutoSkip,
TotNotBuilt, Missing);
LastLogDir ->
SuiteName = filename:rootname(filename:basename(Name)),
- case make_one_index_entry(SuiteName, LastLogDir, false, Missing) of
+ case make_one_index_entry(SuiteName, LastLogDir, "-", false, Missing) of
{Result1,Succ,Fail,USkip,ASkip,NotBuilt} ->
%% for backwards compatibility
AutoSkip1 = case catch AutoSkip+ASkip of
@@ -780,18 +786,18 @@ make_last_run_index([], Result, TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuil
{ok, [Result|total_row(TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt, false)],
{TotSucc,TotFail,UserSkip,AutoSkip,TotNotBuilt}}.
-make_one_index_entry(SuiteName, LogDir, All, Missing) ->
+make_one_index_entry(SuiteName, LogDir, Label, All, Missing) ->
case count_cases(LogDir) of
{Succ,Fail,UserSkip,AutoSkip} ->
NotBuilt = not_built(SuiteName, LogDir, All, Missing),
- NewResult = make_one_index_entry1(SuiteName, LogDir, Succ, Fail,
+ NewResult = make_one_index_entry1(SuiteName, LogDir, Label, Succ, Fail,
UserSkip, AutoSkip, NotBuilt, All),
{NewResult,Succ,Fail,UserSkip,AutoSkip,NotBuilt};
error ->
error
end.
-make_one_index_entry1(SuiteName, Link, Success, Fail, UserSkip, AutoSkip,
+make_one_index_entry1(SuiteName, Link, Label, Success, Fail, UserSkip, AutoSkip,
NotBuilt, All) ->
LogFile = filename:join(Link, ?suitelog_name ++ ".html"),
CrashDumpName = SuiteName ++ "_erl_crash.dump",
@@ -803,7 +809,7 @@ make_one_index_entry1(SuiteName, Link, Success, Fail, UserSkip, AutoSkip,
false ->
""
end,
- {Timestamp,Node,AllInfo} =
+ {Lbl,Timestamp,Node,AllInfo} =
case All of
{true,OldRuns} ->
[_Prefix,NodeOrDate|_] = string:tokens(Link,"."),
@@ -811,20 +817,21 @@ make_one_index_entry1(SuiteName, Link, Success, Fail, UserSkip, AutoSkip,
0 -> "-";
_ -> NodeOrDate
end,
- N = ["<TD ALIGN=right>",Node1,"</TD>\n"],
+ N = ["<TD ALIGN=right><FONT SIZE=-1>",Node1,"</FONT></TD>\n"],
CtRunDir = filename:dirname(filename:dirname(Link)),
- T = ["<TD>",timestamp(CtRunDir),"</TD>\n"],
+ L = ["<TD ALIGN=center><FONT SIZE=-1><B>",Label,"</FONT></B></TD>\n"],
+ T = ["<TD><FONT SIZE=-1>",timestamp(CtRunDir),"</FONT></TD>\n"],
CtLogFile = filename:join(CtRunDir,?ct_log_name),
OldRunsLink =
case OldRuns of
[] -> "none";
_ -> "<A HREF=\""++?all_runs_name++"\">Old Runs</A>"
end,
- A=["<TD><A HREF=\"",CtLogFile,"\">CT Log</A></TD>\n",
- "<TD>",OldRunsLink,"</TD>\n"],
- {T,N,A};
+ A=["<TD><FONT SIZE=-1><A HREF=\"",CtLogFile,"\">CT Log</A></FONT></TD>\n",
+ "<TD><FONT SIZE=-1>",OldRunsLink,"</FONT></TD>\n"],
+ {L,T,N,A};
false ->
- {"","",""}
+ {"","","",""}
end,
NotBuiltStr =
if NotBuilt == 0 ->
@@ -851,7 +858,8 @@ make_one_index_entry1(SuiteName, Link, Success, Fail, UserSkip, AutoSkip,
{UserSkip+AutoSkip,integer_to_list(UserSkip),ASStr}
end,
["<TR valign=top>\n",
- "<TD><A HREF=\"",LogFile,"\">",SuiteName,"</A>",CrashDumpLink,"</TD>\n",
+ "<TD><FONT SIZE=-1><A HREF=\"",LogFile,"\">",SuiteName,"</A>",CrashDumpLink,"</FONT></TD>\n",
+ Lbl,
Timestamp,
"<TD ALIGN=right>",integer_to_list(Success),"</TD>\n",
"<TD ALIGN=right>",FailStr,"</TD>\n",
@@ -862,12 +870,14 @@ make_one_index_entry1(SuiteName, Link, Success, Fail, UserSkip, AutoSkip,
AllInfo,
"</TR>\n"].
total_row(Success, Fail, UserSkip, AutoSkip, NotBuilt, All) ->
- {TimestampCell,AllInfo} =
+ {Label,TimestampCell,AllInfo} =
case All of
- true ->
- {"<TD>&nbsp;</TD>\n","<TD>&nbsp;</TD>\n<TD>&nbsp;</TD>\n"};
+ true ->
+ {"<TD>&nbsp;</TD>\n",
+ "<TD>&nbsp;</TD>\n",
+ "<TD>&nbsp;</TD>\n<TD>&nbsp;</TD>\n"};
false ->
- {"",""}
+ {"","",""}
end,
{AllSkip,UserSkipStr,AutoSkipStr} =
@@ -877,6 +887,7 @@ total_row(Success, Fail, UserSkip, AutoSkip, NotBuilt, All) ->
end,
["<TR valign=top>\n",
"<TD><B>Total</B></TD>",
+ Label,
TimestampCell,
"<TD ALIGN=right><B>",integer_to_list(Success),"<B></TD>\n",
"<TD ALIGN=right><B>",integer_to_list(Fail),"<B></TD>\n",
@@ -937,13 +948,21 @@ term_to_text(Term) ->
%%% Headers and footers.
-index_header(StartTime) ->
- [header("Test Results " ++ format_time(StartTime)) |
+index_header(Label, StartTime) ->
+ Head =
+ case Label of
+ undefined ->
+ header("Test Results", format_time(StartTime));
+ _ ->
+ header("Test Results for \"" ++ Label ++ "\"",
+ format_time(StartTime))
+ end,
+ [Head |
["<CENTER>\n",
"<P><A HREF=\"",?ct_log_name,"\">Common Test Framework Log</A></P>",
"<TABLE border=\"3\" cellpadding=\"5\" "
"BGCOLOR=\"",?table_color3,"\">\n"
- "<th><B>Name</B></th>\n",
+ "<th><B>Test Name</B></th>\n",
"<th><font color=\"",?table_color3,"\">_</font>Ok"
"<font color=\"",?table_color3,"\">_</font></th>\n"
"<th>Failed</th>\n",
@@ -952,13 +971,17 @@ index_header(StartTime) ->
"\n"]].
all_suites_index_header() ->
+ {ok,Cwd} = file:get_cwd(),
+ LogDir = filename:basename(Cwd),
+ AllRuns = "All test runs in \"" ++ LogDir ++ "\"",
[header("Test Results") |
["<CENTER>\n",
- "<A HREF=\"",?all_runs_name,"\">All Test Runs in this directory</A>\n",
+ "<A HREF=\"",?all_runs_name,"\">",AllRuns,"</A>\n",
"<br><br>\n",
"<TABLE border=\"3\" cellpadding=\"5\" "
"BGCOLOR=\"",?table_color2,"\">\n"
- "<th>Name</th>\n",
+ "<th>Test Name</th>\n",
+ "<th>Label</th>\n",
"<th>Test Run Started</th>\n",
"<th><font color=\"",?table_color2,"\">_</font>Ok"
"<font color=\"",?table_color2,"\">_</font></th>\n"
@@ -971,13 +994,17 @@ all_suites_index_header() ->
"\n"]].
all_runs_header() ->
- [header("All test runs in current directory") |
+ {ok,Cwd} = file:get_cwd(),
+ LogDir = filename:basename(Cwd),
+ Title = "All test runs in \"" ++ LogDir ++ "\"",
+ [header(Title) |
["<CENTER><TABLE border=\"3\" cellpadding=\"5\" "
"BGCOLOR=\"",?table_color1,"\">\n"
"<th><B>History</B></th>\n"
"<th><B>Node</B></th>\n"
+ "<th><B>Label</B></th>\n"
"<th>Tests</th>\n"
- "<th><B>Names</B></th>\n"
+ "<th><B>Test Names</B></th>\n"
"<th>Total</th>\n"
"<th><font color=\"",?table_color1,"\">_</font>Ok"
"<font color=\"",?table_color1,"\">_</font></th>\n"
@@ -987,12 +1014,23 @@ all_runs_header() ->
"\n"]].
header(Title) ->
+ header1(Title, "").
+header(Title, SubTitle) ->
+ header1(Title, SubTitle).
+
+header1(Title, SubTitle) ->
+ SubTitleHTML = if SubTitle =/= "" ->
+ ["<CENTER>\n",
+ "<H2>" ++ SubTitle ++ "</H2>\n",
+ "</CENTER>\n<BR>\n"];
+ true -> "<BR>\n"
+ end,
["<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
"<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
"<HTML>\n",
"<HEAD>\n",
- "<TITLE>" ++ Title ++ "</TITLE>\n",
+ "<TITLE>" ++ Title ++ " " ++ SubTitle ++ "</TITLE>\n",
"<META HTTP-EQUIV=\"CACHE-CONTROL\" CONTENT=\"NO-CACHE\">\n",
"</HEAD>\n",
@@ -1004,6 +1042,7 @@ header(Title) ->
"<CENTER>\n",
"<H1>" ++ Title ++ "</H1>\n",
"</CENTER>\n",
+ SubTitleHTML,
"<!-- ---- CONTENT ---- -->\n"].
@@ -1013,19 +1052,28 @@ index_footer() ->
footer() ->
["<P><CENTER>\n"
- "<HR>\n"
- "<P><FONT SIZE=-1>\n"
- "Copyright &copy; ", year(),
- " <A HREF=\"http://erlang.ericsson.se\">Open Telecom Platform</A><BR>\n"
- "Updated: <!date>", current_time(), "<!/date><BR>\n"
- "</FONT>\n"
- "</CENTER>\n"
- "</body>\n"].
+ "<BR><BR>\n"
+ "<HR>\n"
+ "<P><FONT SIZE=-1>\n"
+ "Copyright &copy; ", year(),
+ " <A HREF=\"http://erlang.ericsson.se\">Open Telecom Platform</A><BR>\n"
+ "Updated: <!date>", current_time(), "<!/date><BR>\n"
+ "</FONT>\n"
+ "</CENTER>\n"
+ "</body>\n"].
body_tag() ->
- "<body bgcolor=\"#FFFFFF\" text=\"#000000\" link=\"#0000FF\""
- "vlink=\"#800080\" alink=\"#FF0000\">\n".
+ case basic_html() of
+ true ->
+ "<body bgcolor=\"#FFFFFF\" text=\"#000000\" link=\"#0000FF\" "
+ "vlink=\"#800080\" alink=\"#FF0000\">\n";
+ false ->
+ CTPath = code:lib_dir(common_test),
+ TileFile = filename:join(filename:join(CTPath,"priv"),"tile1.jpg"),
+ "<body background=\"" ++ TileFile ++ "\" bgcolor=\"#FFFFFF\" text=\"#000000\" link=\"#0000FF\" "
+ "vlink=\"#800080\" alink=\"#FF0000\">\n"
+ end.
current_time() ->
format_time(calendar:local_time()).
@@ -1217,7 +1265,7 @@ runentry(Dir, BasicHtml) ->
TotalsFile = filename:join(Dir,?totals_name),
TotalsStr =
case read_totals_file(TotalsFile) of
- {Node,Logs,{TotSucc,TotFail,UserSkip,AutoSkip,NotBuilt}} ->
+ {Node,Label,Logs,{TotSucc,TotFail,UserSkip,AutoSkip,NotBuilt}} ->
TotFailStr =
if TotFail > 0 ->
["<FONT color=\"red\">",
@@ -1263,6 +1311,7 @@ runentry(Dir, BasicHtml) ->
end,
Total = TotSucc+TotFail+AllSkip,
A = ["<TD ALIGN=center><FONT SIZE=-1>",Node,"</FONT></TD>\n",
+ "<TD ALIGN=center><FONT SIZE=-1><B>",Label,"</B></FONT></TD>\n",
"<TD ALIGN=right>",NoOfTests,"</TD>\n"],
B = if BasicHtml ->
["<TD ALIGN=center><FONT SIZE=-1>",TestNamesTrunc,"</FONT></TD>\n"];
@@ -1283,17 +1332,19 @@ runentry(Dir, BasicHtml) ->
end,
Index = filename:join(Dir,?index_name),
["<TR>\n"
- "<TD><A HREF=\"",Index,"\">",timestamp(Dir),"</A>",TotalsStr,"</TD>\n"
+ "<TD><FONT SIZE=-1><A HREF=\"",Index,"\">",timestamp(Dir),"</A>",TotalsStr,"</FONT></TD>\n"
"</TR>\n"].
-write_totals_file(Name,Logs,Totals) ->
+write_totals_file(Name,Label,Logs,Totals) ->
AbsName = ?abs(Name),
notify_and_lock_file(AbsName),
force_write_file(AbsName,
term_to_binary({atom_to_list(node()),
- Logs,Totals})),
+ Label,Logs,Totals})),
notify_and_unlock_file(AbsName).
+%% this function needs to convert from old formats to new so that old
+%% test results (prev ct versions) can be listed together with new
read_totals_file(Name) ->
AbsName = ?abs(Name),
notify_and_lock_file(AbsName),
@@ -1303,12 +1354,23 @@ read_totals_file(Name) ->
case catch binary_to_term(Bin) of
{'EXIT',_Reason} -> % corrupt file
{"-",[],undefined};
- R = {Node,Ls,Tot} ->
+ {Node,Label,Ls,Tot} -> % all info available
+ Label1 = case Label of
+ undefined -> "-";
+ _ -> Label
+ end,
+ case Tot of
+ {_Ok,_Fail,_USkip,_ASkip,_NoBuild} -> % latest format
+ {Node,Label1,Ls,Tot};
+ {TotSucc,TotFail,AllSkip,NotBuilt} ->
+ {Node,Label1,Ls,{TotSucc,TotFail,AllSkip,undefined,NotBuilt}}
+ end;
+ {Node,Ls,Tot} -> % no label found
case Tot of
- {_,_,_,_,_} -> % latest format
- R;
+ {_Ok,_Fail,_USkip,_ASkip,_NoBuild} -> % latest format
+ {Node,"-",Ls,Tot};
{TotSucc,TotFail,AllSkip,NotBuilt} ->
- {Node,Ls,{TotSucc,TotFail,AllSkip,undefined,NotBuilt}}
+ {Node,"-",Ls,{TotSucc,TotFail,AllSkip,undefined,NotBuilt}}
end;
%% for backwards compatibility
{Ls,Tot} -> {"-",Ls,Tot};
@@ -1411,7 +1473,7 @@ make_all_suites_index1(When,AllSuitesLogDirs) ->
make_all_suites_index2(IndexName,AllSuitesLogDirs) ->
{ok,Index0,_Totals} = make_all_suites_index3(AllSuitesLogDirs,
all_suites_index_header(),
- 0, 0, 0, 0, 0),
+ 0, 0, 0, 0, 0, []),
Index = [Index0|index_footer()],
case force_write_file(IndexName, Index) of
ok ->
@@ -1421,14 +1483,25 @@ make_all_suites_index2(IndexName,AllSuitesLogDirs) ->
end.
make_all_suites_index3([{SuiteName,[LastLogDir|OldDirs]}|Rest],
- Result, TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt) ->
+ Result, TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt,
+ Labels) ->
[EntryDir|_] = filename:split(LastLogDir),
Missing =
case file:read_file(filename:join(EntryDir,?missing_suites_info)) of
{ok,Bin} -> binary_to_term(Bin);
_ -> []
end,
- case make_one_index_entry(SuiteName, LastLogDir, {true,OldDirs}, Missing) of
+ {Label,Labels1} =
+ case proplists:get_value(EntryDir, Labels) of
+ undefined ->
+ case read_totals_file(filename:join(EntryDir,?totals_name)) of
+ {_,Lbl,_,_} -> {Lbl,[{EntryDir,Lbl}|Labels]};
+ _ -> {"-",[{EntryDir,"-"}|Labels]}
+ end;
+ Lbl ->
+ {Lbl,Labels}
+ end,
+ case make_one_index_entry(SuiteName, LastLogDir, Label, {true,OldDirs}, Missing) of
{Result1,Succ,Fail,USkip,ASkip,NotBuilt} ->
%% for backwards compatibility
AutoSkip1 = case catch AutoSkip+ASkip of
@@ -1437,13 +1510,13 @@ make_all_suites_index3([{SuiteName,[LastLogDir|OldDirs]}|Rest],
end,
make_all_suites_index3(Rest, [Result|Result1], TotSucc+Succ,
TotFail+Fail, UserSkip+USkip, AutoSkip1,
- TotNotBuilt+NotBuilt);
+ TotNotBuilt+NotBuilt,Labels1);
error ->
make_all_suites_index3(Rest, Result, TotSucc, TotFail,
- UserSkip, AutoSkip, TotNotBuilt)
+ UserSkip, AutoSkip, TotNotBuilt,Labels1)
end;
make_all_suites_index3([], Result, TotSucc, TotFail, UserSkip, AutoSkip,
- TotNotBuilt) ->
+ TotNotBuilt,_) ->
{ok, [Result|total_row(TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt,true)],
{TotSucc,TotFail,UserSkip,AutoSkip,TotNotBuilt}}.
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 6a9c42d1b9..c5bfd01642 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -45,7 +45,8 @@
-define(abs(Name), filename:absname(Name)).
-define(testdir(Name, Suite), ct_util:get_testdir(Name, Suite)).
--record(opts, {vts,
+-record(opts, {label,
+ vts,
shell,
cover,
coverspec,
@@ -153,14 +154,16 @@ script_start(Args) ->
Result
end,
stop_trace(Tracing),
+ timer:sleep(1000),
Res.
script_start1(Parent, Args) ->
%% read general start flags
+ Label = get_start_opt(label, fun([Lbl]) -> Lbl end, Args),
Vts = get_start_opt(vts, true, Args),
Shell = get_start_opt(shell, true, Args),
Cover = get_start_opt(cover, fun([CoverFile]) -> ?abs(CoverFile) end, Args),
- LogDir = get_start_opt(logdir, fun([LogD]) -> LogD end, ".", Args),
+ LogDir = get_start_opt(logdir, fun([LogD]) -> LogD end, Args),
MultTT = get_start_opt(multiply_timetraps,
fun([MT]) -> list_to_integer(MT) end, 1, Args),
ScaleTT = get_start_opt(scale_timetraps,
@@ -229,7 +232,7 @@ script_start1(Parent, Args) ->
application:set_env(common_test, basic_html, true)
end,
- StartOpts = #opts{vts = Vts, shell = Shell, cover = Cover,
+ StartOpts = #opts{label = Label, vts = Vts, shell = Shell, cover = Cover,
logdir = LogDir, event_handlers = EvHandlers,
include = IncludeDirs,
silent_connections = SilentConns,
@@ -288,6 +291,9 @@ script_start2(StartOpts = #opts{vts = undefined,
TS ->
SpecStartOpts = get_data_for_node(TS, node()),
+ Label = choose_val(StartOpts#opts.label,
+ SpecStartOpts#opts.label),
+
LogDir = choose_val(StartOpts#opts.logdir,
SpecStartOpts#opts.logdir),
@@ -303,7 +309,8 @@ script_start2(StartOpts = #opts{vts = undefined,
SpecStartOpts#opts.include]),
application:set_env(common_test, include, AllInclude),
- {TS,StartOpts#opts{testspecs = Specs,
+ {TS,StartOpts#opts{label = Label,
+ testspecs = Specs,
cover = Cover,
logdir = LogDir,
config = SpecStartOpts#opts.config,
@@ -317,29 +324,30 @@ script_start2(StartOpts = #opts{vts = undefined,
end,
%% read config/userconfig from start flags
InitConfig = ct_config:prepare_config_list(Args),
+ TheLogDir = which(logdir, Opts#opts.logdir),
case {TestSpec,Terms} of
{_,{error,_}=Error} ->
Error;
{[],_} ->
{error,no_testspec_specified};
{undefined,_} -> % no testspec used
- case check_and_install_configfiles(InitConfig,
- which(logdir,Opts#opts.logdir),
+ case check_and_install_configfiles(InitConfig, TheLogDir,
Opts#opts.event_handlers) of
ok -> % go on read tests from start flags
- script_start3(Opts#opts{config=InitConfig}, Args);
+ script_start3(Opts#opts{config=InitConfig,
+ logdir=TheLogDir}, Args);
Error ->
Error
end;
{_,_} -> % testspec used
%% merge config from start flags with config from testspec
AllConfig = merge_vals([InitConfig, Opts#opts.config]),
- case check_and_install_configfiles(AllConfig,
- which(logdir,Opts#opts.logdir),
+ case check_and_install_configfiles(AllConfig, TheLogDir,
Opts#opts.event_handlers) of
ok -> % read tests from spec
{Run,Skip} = ct_testspec:prepare_tests(Terms, node()),
- do_run(Run, Skip, Opts#opts{config=AllConfig}, Args);
+ do_run(Run, Skip, Opts#opts{config=AllConfig,
+ logdir=TheLogDir}, Args);
Error ->
Error
end
@@ -348,11 +356,12 @@ script_start2(StartOpts = #opts{vts = undefined,
script_start2(StartOpts, Args) ->
%% read config/userconfig from start flags
InitConfig = ct_config:prepare_config_list(Args),
- case check_and_install_configfiles(InitConfig,
- which(logdir,StartOpts#opts.logdir),
+ LogDir = which(logdir, StartOpts#opts.logdir),
+ case check_and_install_configfiles(InitConfig, LogDir,
StartOpts#opts.event_handlers) of
ok -> % go on read tests from start flags
- script_start3(StartOpts#opts{config=InitConfig}, Args);
+ script_start3(StartOpts#opts{config=InitConfig,
+ logdir=LogDir}, Args);
Error ->
Error
end.
@@ -427,8 +436,12 @@ script_start4(#opts{vts = true, config = Config, event_handlers = EvHandlers,
end, [], Config),
vts:init_data(ConfigFiles, EvHandlers, ?abs(LogDir), Tests);
-script_start4(#opts{shell = true, config = Config, event_handlers = EvHandlers,
+script_start4(#opts{label = Label, shell = true, config = Config,
+ event_handlers = EvHandlers,
logdir = LogDir, testspecs = Specs}, _Args) ->
+ %% label - used by ct_logs
+ application:set_env(common_test, test_label, Label),
+
InstallOpts = [{config,Config},{event_handler,EvHandlers}],
if Config == [] ->
ok;
@@ -613,9 +626,13 @@ run_test(StartOpts) when is_list(StartOpts) ->
end.
run_test1(StartOpts) ->
+ %% label
+ Label = get_start_opt(label, fun(Lbl) when is_list(Lbl) -> Lbl;
+ (Lbl) when is_atom(Lbl) -> atom_to_list(Lbl)
+ end, StartOpts),
%% logdir
LogDir = get_start_opt(logdir, fun(LD) when is_list(LD) -> LD end,
- ".", StartOpts),
+ StartOpts),
%% config & userconfig
CfgFiles = ct_config:get_config_file_list(StartOpts),
@@ -711,7 +728,8 @@ run_test1(StartOpts) ->
%% stepped execution
Step = get_start_opt(step, value, StartOpts),
- Opts = #opts{cover = Cover, step = Step, logdir = LogDir, config = CfgFiles,
+ Opts = #opts{label = Label,
+ cover = Cover, step = Step, logdir = LogDir, config = CfgFiles,
event_handlers = EvHandlers, include = Include,
silent_connections = SilentConns,
stylesheet = Stylesheet,
@@ -747,6 +765,8 @@ run_spec_file(Relaxed,
exit(CTReason);
TS ->
SpecOpts = get_data_for_node(TS, node()),
+ Label = choose_val(Opts#opts.label,
+ SpecOpts#opts.label),
LogDir = choose_val(Opts#opts.logdir,
SpecOpts#opts.logdir),
AllConfig = merge_vals([CfgFiles, SpecOpts#opts.config]),
@@ -766,8 +786,9 @@ run_spec_file(Relaxed,
which(logdir,LogDir),
AllEvHs) of
ok ->
- Opts1 = Opts#opts{cover = Cover,
- logdir = LogDir,
+ Opts1 = Opts#opts{label = Label,
+ cover = Cover,
+ logdir = which(logdir, LogDir),
config = AllConfig,
event_handlers = AllEvHs,
include = AllInclude,
@@ -775,7 +796,7 @@ run_spec_file(Relaxed,
multiply_timetraps = MultTT,
scale_timetraps = ScaleTT},
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
- do_run(Run, Skip, Opts1, StartOpts);
+ reformat_result(catch do_run(Run, Skip, Opts1, StartOpts));
{error,GCFReason} ->
exit(GCFReason)
end
@@ -785,9 +806,11 @@ run_prepared(Run, Skip, Opts = #opts{logdir = LogDir,
config = CfgFiles,
event_handlers = EvHandlers},
StartOpts) ->
- case check_and_install_configfiles(CfgFiles, LogDir, EvHandlers) of
+ LogDir1 = which(logdir, LogDir),
+ case check_and_install_configfiles(CfgFiles, LogDir1, EvHandlers) of
ok ->
- do_run(Run, Skip, Opts, StartOpts);
+ reformat_result(catch do_run(Run, Skip, Opts#opts{logdir = LogDir1},
+ StartOpts));
{error,Reason} ->
exit(Reason)
end.
@@ -816,6 +839,8 @@ check_config_file(Callback, File)->
run_dir(Opts = #opts{logdir = LogDir,
config = CfgFiles,
event_handlers = EvHandlers}, StartOpts) ->
+ LogDir1 = which(logdir, LogDir),
+ Opts1 = Opts#opts{logdir = LogDir1},
AbsCfgFiles =
lists:map(fun({Callback,FileList})->
case code:is_loaded(Callback) of
@@ -834,7 +859,7 @@ run_dir(Opts = #opts{logdir = LogDir,
check_config_file(Callback, File)
end, FileList)}
end, CfgFiles),
- case install([{config,AbsCfgFiles},{event_handler,EvHandlers}], LogDir) of
+ case install([{config,AbsCfgFiles},{event_handler,EvHandlers}], LogDir1) of
ok -> ok;
{error,IReason} -> exit(IReason)
end,
@@ -842,7 +867,7 @@ run_dir(Opts = #opts{logdir = LogDir,
{value,{_,Dirs=[Dir|_]}} when not is_integer(Dir),
length(Dirs)>1 ->
%% multiple dirs (no suite)
- do_run(tests(Dirs), [], Opts, StartOpts);
+ reformat_result(catch do_run(tests(Dirs), [], Opts1, StartOpts));
false -> % no dir
%% fun for converting suite name to {Dir,Mod} tuple
S2M = fun(S) when is_list(S) ->
@@ -857,12 +882,15 @@ run_dir(Opts = #opts{logdir = LogDir,
case listify(proplists:get_value(group, StartOpts, [])) ++
listify(proplists:get_value(testcase, StartOpts, [])) of
[] ->
- do_run(tests(Dir, listify(Mod)), [], Opts, StartOpts);
+ reformat_result(catch do_run(tests(Dir, listify(Mod)),
+ [], Opts1, StartOpts));
GsAndCs ->
- do_run(tests(Dir, Mod, GsAndCs), [], Opts, StartOpts)
+ reformat_result(catch do_run(tests(Dir, Mod, GsAndCs),
+ [], Opts1, StartOpts))
end;
{value,{_,Suites}} ->
- do_run(tests(lists:map(S2M, Suites)), [], Opts, StartOpts);
+ reformat_result(catch do_run(tests(lists:map(S2M, Suites)),
+ [], Opts1, StartOpts));
_ ->
exit(no_tests_specified)
end;
@@ -875,17 +903,22 @@ run_dir(Opts = #opts{logdir = LogDir,
case listify(proplists:get_value(group, StartOpts, [])) ++
listify(proplists:get_value(testcase, StartOpts, [])) of
[] ->
- do_run(tests(Dir, listify(Mod)), [], Opts, StartOpts);
+ reformat_result(catch do_run(tests(Dir, listify(Mod)),
+ [], Opts1, StartOpts));
GsAndCs ->
- do_run(tests(Dir, Mod, GsAndCs), [], Opts, StartOpts)
+ reformat_result(catch do_run(tests(Dir, Mod, GsAndCs),
+ [], Opts1, StartOpts))
end;
{value,{_,Suites=[Suite|_]}} when is_list(Suite) ->
Mods = lists:map(fun(Str) -> list_to_atom(Str) end, Suites),
- do_run(tests(delistify(Dir), Mods), [], Opts, StartOpts);
+ reformat_result(catch do_run(tests(delistify(Dir), Mods),
+ [], Opts1, StartOpts));
{value,{_,Suites}} ->
- do_run(tests(delistify(Dir), Suites), [], Opts, StartOpts);
+ reformat_result(catch do_run(tests(delistify(Dir), Suites),
+ [], Opts1, StartOpts));
false -> % no suite, only dir
- do_run(tests(listify(Dir)), [], Opts, StartOpts)
+ reformat_result(catch do_run(tests(listify(Dir)),
+ [], Opts1, StartOpts))
end
end.
@@ -925,28 +958,30 @@ run_testspec1(TestSpec) ->
EnvInclude++Opts#opts.include
end,
application:set_env(common_test, include, AllInclude),
-
- case check_and_install_configfiles(Opts#opts.config,
- which(logdir,Opts#opts.logdir),
+ LogDir1 = which(logdir,Opts#opts.logdir),
+ case check_and_install_configfiles(Opts#opts.config, LogDir1,
Opts#opts.event_handlers) of
ok ->
- Opts1 = Opts#opts{testspecs = [TestSpec],
+ Opts1 = Opts#opts{testspecs = [],
+ logdir = LogDir1,
include = AllInclude},
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
- do_run(Run, Skip, Opts1, []);
+ reformat_result(catch do_run(Run, Skip, Opts1, []));
{error,GCFReason} ->
exit(GCFReason)
end
end.
-get_data_for_node(#testspec{logdir=LogDirs,
- cover=CoverFs,
- config=Cfgs,
- userconfig=UsrCfgs,
- event_handler=EvHs,
- include=Incl,
- multiply_timetraps=MTs,
- scale_timetraps=STs}, Node) ->
+get_data_for_node(#testspec{label = Labels,
+ logdir = LogDirs,
+ cover = CoverFs,
+ config = Cfgs,
+ userconfig = UsrCfgs,
+ event_handler = EvHs,
+ include = Incl,
+ multiply_timetraps = MTs,
+ scale_timetraps = STs}, Node) ->
+ Label = proplists:get_value(Node, Labels),
LogDir = case proplists:get_value(Node, LogDirs) of
undefined -> ".";
Dir -> Dir
@@ -958,7 +993,8 @@ get_data_for_node(#testspec{logdir=LogDirs,
[CBF || {N,CBF} <- UsrCfgs, N==Node],
EvHandlers = [{H,A} || {N,H,A} <- EvHs, N==Node],
Include = [I || {N,I} <- Incl, N==Node],
- #opts{logdir = LogDir,
+ #opts{label = Label,
+ logdir = LogDir,
cover = Cover,
config = ConfigFiles,
event_handlers = EvHandlers,
@@ -1023,21 +1059,26 @@ delistify(E) -> E.
%%% @equiv ct:run/3
run(TestDir, Suite, Cases) ->
install([]),
- do_run(tests(TestDir, Suite, Cases), []).
+ reformat_result(catch do_run(tests(TestDir, Suite, Cases), [])).
%%%-----------------------------------------------------------------
%%% @hidden
%%% @equiv ct:run/2
run(TestDir, Suite) when is_list(TestDir), is_integer(hd(TestDir)) ->
install([]),
- do_run(tests(TestDir, Suite), []).
+ reformat_result(catch do_run(tests(TestDir, Suite), [])).
%%%-----------------------------------------------------------------
%%% @hidden
%%% @equiv ct:run/1
run(TestDirs) ->
install([]),
- do_run(tests(TestDirs), []).
+ reformat_result(catch do_run(tests(TestDirs), [])).
+
+reformat_result({user_error,Reason}) ->
+ {error,Reason};
+reformat_result(Result) ->
+ Result.
suite_to_test(Suite) ->
{filename:dirname(Suite),list_to_atom(filename:rootname(filename:basename(Suite)))}.
@@ -1092,7 +1133,17 @@ do_run(Tests, Misc, LogDir) when is_list(Misc) ->
do_run(Tests, [], Opts1#opts{logdir = LogDir}, []).
do_run(Tests, Skip, Opts, Args) ->
- #opts{cover = Cover} = Opts,
+ #opts{label = Label, cover = Cover} = Opts,
+
+ %% label - used by ct_logs
+ TestLabel =
+ if Label == undefined -> undefined;
+ is_atom(Label) -> atom_to_list(Label);
+ is_list(Label) -> Label;
+ true -> undefined
+ end,
+ application:set_env(common_test, test_label, TestLabel),
+
case code:which(test_server) of
non_existing ->
exit({error,no_path_to_test_server});
@@ -1156,10 +1207,19 @@ do_run(Tests, Skip, Opts, Args) ->
true ->
SavedErrors = save_make_errors(SuiteMakeErrors),
ct_repeat:log_loop_info(Args),
- {Tests1,Skip1} = final_tests(Tests,[],Skip,SavedErrors),
- R = do_run_test(Tests1, Skip1, Opts1),
- ct_util:stop(normal),
- R;
+
+ {Tests1,Skip1} = final_tests(Tests,Skip,SavedErrors),
+
+ R = (catch do_run_test(Tests1, Skip1, Opts1)),
+ case R of
+ {EType,_} = Error when EType == user_error ;
+ EType == error ->
+ ct_util:stop(clean),
+ exit(Error);
+ _ ->
+ ct_util:stop(normal),
+ R
+ end;
false ->
io:nl(),
ct_util:stop(clean),
@@ -1316,8 +1376,22 @@ suite_tuples([{TestDir,Suite,_} | Tests]) when is_atom(Suite) ->
suite_tuples([]) ->
[].
-final_tests([{TestDir,Suites,_}|Tests],
- Final, Skip, Bad) when is_list(Suites), is_atom(hd(Suites)) ->
+final_tests(Tests, Skip, Bad) ->
+
+ %%! --- Thu Jun 24 15:47:27 2010 --- peppe was here!
+ %%! io:format(user, "FINAL0 = ~p~nSKIP0 = ~p~n", [Tests, Skip]),
+
+ {Tests1,Skip1} = final_tests1(Tests, [], Skip, Bad),
+ Skip2 = final_skip(Skip1, []),
+
+
+ %%! --- Thu Jun 24 15:47:27 2010 --- peppe was here!
+ %%! io:format(user, "FINAL1 = ~p~nSKIP1 = ~p~n", [Tests1, Skip2]),
+
+ {Tests1,Skip2}.
+
+final_tests1([{TestDir,Suites,_}|Tests], Final, Skip, Bad) when
+ is_list(Suites), is_atom(hd(Suites)) ->
% Separate =
% fun(S,{DoSuite,Dont}) ->
% case lists:keymember({TestDir,S},1,Bad) of
@@ -1336,9 +1410,9 @@ final_tests([{TestDir,Suites,_}|Tests],
Skip1 = [{TD,S,"Make failed"} || {{TD,S},_} <- Bad, S1 <- Suites,
S == S1, TD == TestDir],
Final1 = [{TestDir,S,all} || S <- Suites],
- final_tests(Tests, lists:reverse(Final1)++Final, Skip++Skip1, Bad);
+ final_tests1(Tests, lists:reverse(Final1)++Final, Skip++Skip1, Bad);
-final_tests([{TestDir,all,all}|Tests], Final, Skip, Bad) ->
+final_tests1([{TestDir,all,all}|Tests], Final, Skip, Bad) ->
MissingSuites =
case lists:keysearch({TestDir,all}, 1, Bad) of
{value,{_,Failed}} ->
@@ -1348,26 +1422,58 @@ final_tests([{TestDir,all,all}|Tests], Final, Skip, Bad) ->
end,
Missing = [{TestDir,S,"Make failed"} || S <- MissingSuites],
Final1 = [{TestDir,all,all}|Final],
- final_tests(Tests, Final1, Skip++Missing, Bad);
+ final_tests1(Tests, Final1, Skip++Missing, Bad);
-final_tests([{TestDir,Suite,Cases}|Tests],
- Final, Skip, Bad) when Cases==[]; Cases==all ->
- final_tests([{TestDir,[Suite],all}|Tests], Final, Skip, Bad);
+final_tests1([{TestDir,Suite,Cases}|Tests], Final, Skip, Bad) when
+ Cases==[]; Cases==all ->
+ final_tests1([{TestDir,[Suite],all}|Tests], Final, Skip, Bad);
-final_tests([{TestDir,Suite,Cases}|Tests], Final, Skip, Bad) ->
+final_tests1([{TestDir,Suite,GrsOrCs}|Tests], Final, Skip, Bad) when
+ is_list(GrsOrCs) ->
case lists:keymember({TestDir,Suite}, 1, Bad) of
- false ->
- Do = {TestDir,Suite,Cases},
- final_tests(Tests, [Do|Final], Skip, Bad);
true ->
- Do = {TestDir,Suite,Cases},
- Skip1 = Skip ++ [{TestDir,Suite,Cases,"Make failed"}],
- final_tests(Tests, [Do|Final], Skip1, Bad)
+ Skip1 = Skip ++ [{TestDir,Suite,all,"Make failed"}],
+ final_tests1(Tests, [{TestDir,Suite,all}|Final], Skip1, Bad);
+ false ->
+ GrsOrCs1 =
+ lists:flatmap(
+ %% for now, only flat group defs are allowed as
+ %% start options and test spec terms
+ fun({all,all}) ->
+ ct_framework:make_all_conf(TestDir,
+ Suite, []);
+ ({skipped,Group,TCs}) ->
+ [ct_framework:make_conf(TestDir, Suite,
+ Group, [skipped], TCs)];
+ ({Group,TCs}) ->
+ [ct_framework:make_conf(TestDir, Suite,
+ Group, [], TCs)];
+ (TC) ->
+ [TC]
+ end, GrsOrCs),
+ Do = {TestDir,Suite,GrsOrCs1},
+ final_tests1(Tests, [Do|Final], Skip, Bad)
end;
-final_tests([], Final, Skip, _Bad) ->
+final_tests1([], Final, Skip, _Bad) ->
{lists:reverse(Final),Skip}.
+final_skip([{TestDir,Suite,{all,all},Reason}|Skips], Final) ->
+ SkipConf = ct_framework:make_conf(TestDir, Suite, all, [], all),
+ Skip = {TestDir,Suite,SkipConf,Reason},
+ final_skip(Skips, [Skip|Final]);
+
+final_skip([{TestDir,Suite,{Group,TCs},Reason}|Skips], Final) ->
+ Conf = ct_framework:make_conf(TestDir, Suite, Group, [], TCs),
+ Skip = {TestDir,Suite,Conf,Reason},
+ final_skip(Skips, [Skip|Final]);
+
+final_skip([Skip|Skips], Final) ->
+ final_skip(Skips, [Skip|Final]);
+
+final_skip([], Final) ->
+ lists:reverse(Final).
+
continue([]) ->
true;
continue(_MakeErrors) ->
@@ -1489,6 +1595,7 @@ do_run_test(Tests, Skip, Opts) ->
_ ->
false
end,
+
%% let test_server expand the test tuples and count no of cases
{Suites,NoOfCases} = count_test_cases(Tests, Skip),
Suites1 = delete_dups(Suites),
@@ -1544,19 +1651,30 @@ count_test_cases(Tests, Skip) ->
TSPid = test_server_ctrl:start_get_totals(SendResult),
Ref = erlang:monitor(process, TSPid),
add_jobs(Tests, Skip, #opts{}, []),
- {Suites,NoOfCases} = count_test_cases1(length(Tests), 0, [], Ref),
+ Counted = (catch count_test_cases1(length(Tests), 0, [], Ref)),
erlang:demonitor(Ref, [flush]),
- test_server_ctrl:stop_get_totals(),
- {Suites,NoOfCases}.
+ case Counted of
+ {error,{test_server_died}} = Error ->
+ throw(Error);
+ {error,Reason} ->
+ unlink(whereis(test_server_ctrl)),
+ test_server_ctrl:stop(),
+ throw({user_error,Reason});
+ Result ->
+ test_server_ctrl:stop_get_totals(),
+ Result
+ end.
count_test_cases1(0, N, Suites, _) ->
{lists:flatten(Suites), N};
count_test_cases1(Jobs, N, Suites, Ref) ->
receive
+ {_,{error,_Reason} = Error} ->
+ throw(Error);
{no_of_cases,{Ss,N1}} ->
count_test_cases1(Jobs-1, add_known(N,N1), [Ss|Suites], Ref);
- {'DOWN', Ref, _, _, _} ->
- {[],0}
+ {'DOWN', Ref, _, _, Info} ->
+ throw({error,{test_server_died,Info}})
end.
add_known(unknown, _) ->
@@ -1604,13 +1722,36 @@ add_jobs([{TestDir,Suite,all}|Tests], Skip, Opts, CleanUp) ->
Error
end;
-%% group
-add_jobs([{TestDir,Suite,[{GroupName,_Cases}]}|Tests], Skip, Opts, CleanUp) when
- is_atom(GroupName) ->
- add_jobs([{TestDir,Suite,GroupName}|Tests], Skip, Opts, CleanUp);
-add_jobs([{TestDir,Suite,{GroupName,_Cases}}|Tests], Skip, Opts, CleanUp) when
- is_atom(GroupName) ->
- add_jobs([{TestDir,Suite,GroupName}|Tests], Skip, Opts, CleanUp);
+%% group (= conf case in test_server)
+add_jobs([{TestDir,Suite,Confs}|Tests], Skip, Opts, CleanUp) when
+ element(1, hd(Confs)) == conf ->
+ Group = fun(Conf) -> proplists:get_value(name, element(2, Conf)) end,
+ TestCases = fun(Conf) -> element(4, Conf) end,
+ TCTestName = fun(all) -> "";
+ ([C]) when is_atom(C) -> "." ++ atom_to_list(C);
+ (Cs) when is_list(Cs) -> ".cases"
+ end,
+ GrTestName =
+ case Confs of
+ [Conf] ->
+ "." ++ atom_to_list(Group(Conf)) ++ TCTestName(TestCases(Conf));
+ _ ->
+ ".groups"
+ end,
+ TestName = get_name(TestDir) ++ "." ++ atom_to_list(Suite) ++ GrTestName,
+ case maybe_interpret(Suite, init_per_group, Opts) of
+ ok ->
+ case catch test_server_ctrl:add_conf_with_skip(TestName, Suite, Confs,
+ skiplist(TestDir,Skip)) of
+ {'EXIT',_} ->
+ CleanUp;
+ _ ->
+ wait_for_idle(),
+ add_jobs(Tests, Skip, Opts, [Suite|CleanUp])
+ end;
+ Error ->
+ Error
+ end;
%% test case
add_jobs([{TestDir,Suite,[Case]}|Tests], Skip, Opts, CleanUp) when is_atom(Case) ->
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 0f68b062f6..f5069427a2 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -185,7 +185,15 @@ prepare_cases(Node,Dir,Suite,Cases) ->
{[{{Node,Dir},{Suite,all}}],SkipAll};
Skipped ->
%% note: this adds a test even if only skip is specified
- PrepC = lists:foldr(fun({C,{skip,_Cmt}},Acc) ->
+ PrepC = lists:foldr(fun({{G,Cs},{skip,_Cmt}}, Acc) when
+ is_atom(G) ->
+ case lists:keymember(G, 1, Cases) of
+ true ->
+ Acc;
+ false ->
+ [{skipped,G,Cs}|Acc]
+ end;
+ ({C,{skip,_Cmt}},Acc) ->
case lists:member(C,Cases) of
true ->
Acc;
@@ -194,7 +202,7 @@ prepare_cases(Node,Dir,Suite,Cases) ->
end;
(C,Acc) -> [C|Acc]
end, [], Cases),
- {{{Node,Dir},{Suite,PrepC}},Skipped}
+ {{{Node,Dir},{Suite,PrepC}},Skipped}
end.
get_skipped_suites(Node,Dir,Suites) ->
@@ -210,7 +218,7 @@ get_skipped_cases(Node,Dir,Suite,Cases) ->
case lists:keysearch(all,1,Cases) of
{value,{all,{skip,Cmt}}} ->
[{{Node,Dir},{Suite,Cmt}}];
- false ->
+ _ ->
get_skipped_cases1(Node,Dir,Suite,Cases)
end.
@@ -432,6 +440,15 @@ save_nodes(Nodes,Spec=#testspec{nodes=NodeRefs}) ->
list_nodes(#testspec{nodes=NodeRefs}) ->
lists:map(fun({_Ref,Node}) -> Node end, NodeRefs).
+
+
+%% ---------------------------------------------------------
+%% / \
+%% | When adding tests, remember to update valid_terms/0 also! |
+%% \ /
+%% ---------------------------------------------------------
+
+
%% Associate a "global" logdir with all nodes
%% except those with specific logdir, e.g:
%% ["/tmp/logdir",{ct1@finwe,"/tmp/logdir2"}]
@@ -457,6 +474,24 @@ add_tests([{logdir,Node,Dir}|Ts],Spec) ->
add_tests([{logdir,Dir}|Ts],Spec) ->
add_tests([{logdir,all_nodes,Dir}|Ts],Spec);
+%% --- label ---
+add_tests([{label,all_nodes,Lbl}|Ts],Spec) ->
+ Labels = Spec#testspec.label,
+ Tests = [{label,N,Lbl} || N <- list_nodes(Spec),
+ lists:keymember(ref2node(N,Spec#testspec.nodes),
+ 1,Labels) == false],
+ add_tests(Tests++Ts,Spec);
+add_tests([{label,Nodes,Lbl}|Ts],Spec) when is_list(Nodes) ->
+ Ts1 = separate(Nodes,label,[Lbl],Ts,Spec#testspec.nodes),
+ add_tests(Ts1,Spec);
+add_tests([{label,Node,Lbl}|Ts],Spec) ->
+ Labels = Spec#testspec.label,
+ Labels1 = [{ref2node(Node,Spec#testspec.nodes),Lbl} |
+ lists:keydelete(ref2node(Node,Spec#testspec.nodes),1,Labels)],
+ add_tests(Ts,Spec#testspec{label=Labels1});
+add_tests([{label,Lbl}|Ts],Spec) ->
+ add_tests([{label,all_nodes,Lbl}|Ts],Spec);
+
%% --- cover ---
add_tests([{cover,all_nodes,File}|Ts],Spec) ->
Tests = lists:map(fun(N) -> {cover,N,File} end, list_nodes(Spec)),
@@ -878,8 +913,13 @@ skip_suites(_Node,_Dir,[],_Cmt,Tests) ->
skip_suites(Node,Dir,S,Cmt,Tests) ->
skip_suites(Node,Dir,[S],Cmt,Tests).
-skip_groups(Node,Dir,Suite,Group,Case,Cmt,Tests) when is_atom(Group) ->
- skip_groups(Node,Dir,Suite,[Group],[Case],Cmt,Tests);
+skip_groups(Node,Dir,Suite,Group,all,Cmt,Tests) when is_atom(Group) ->
+ skip_groups(Node,Dir,Suite,[Group],all,Cmt,Tests);
+skip_groups(Node,Dir,Suite,Group,Cases,Cmt,Tests) when is_atom(Group) ->
+ skip_groups(Node,Dir,Suite,[Group],Cases,Cmt,Tests);
+skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests) when is_atom(Case),
+ Case =/= all ->
+ skip_groups(Node,Dir,Suite,Groups,[Case],Cmt,Tests);
skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests) when
((Cases == all) or is_list(Cases)) and is_list(Groups) ->
Suites =
@@ -1001,22 +1041,34 @@ valid_terms() ->
{cover,3},
{config,2},
{config,3},
- {userconfig, 2},
- {userconfig, 3},
+ {userconfig,2},
+ {userconfig,3},
{alias,3},
{logdir,2},
{logdir,3},
+ {label,2},
+ {label,3},
{event_handler,2},
{event_handler,3},
{event_handler,4},
+ {multiply_timetraps,2},
+ {multiply_timetraps,3},
+ {scale_timetraps,2},
+ {scale_timetraps,3},
{include,2},
{include,3},
{suites,3},
{suites,4},
+ {groups,4},
+ {groups,5},
+ {groups,6},
{cases,4},
{cases,5},
{skip_suites,4},
{skip_suites,5},
+ {skip_groups,5},
+ {skip_groups,6},
+ {skip_groups,7},
{skip_cases,5},
{skip_cases,6}
].
diff --git a/lib/common_test/src/ct_util.erl b/lib/common_test/src/ct_util.erl
index eddaf4c8b9..0a434666fa 100644
--- a/lib/common_test/src/ct_util.erl
+++ b/lib/common_test/src/ct_util.erl
@@ -619,7 +619,6 @@ get_testdir(Dir, Suite) when is_list(Suite) ->
get_testdir(Dir, _) ->
get_testdir(Dir, all).
-
%%%-----------------------------------------------------------------
%%% @spec
%%%
diff --git a/lib/common_test/src/ct_util.hrl b/lib/common_test/src/ct_util.hrl
index 54eed29415..ee973f6220 100644
--- a/lib/common_test/src/ct_util.hrl
+++ b/lib/common_test/src/ct_util.hrl
@@ -30,6 +30,7 @@
-record(testspec, {spec_dir,
nodes=[],
init=[],
+ label=[],
logdir=["."],
cover=[],
config=[],
diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile
index 97ded5eb9a..3fb0d627a0 100644
--- a/lib/common_test/test/Makefile
+++ b/lib/common_test/test/Makefile
@@ -32,6 +32,7 @@ MODULES= \
ct_event_handler_SUITE \
ct_groups_test_1_SUITE \
ct_groups_test_2_SUITE \
+ ct_testspec_1_SUITE \
ct_skip_SUITE \
ct_error_SUITE \
ct_test_server_if_1_SUITE \
diff --git a/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_11_SUITE.erl b/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_11_SUITE.erl
index c6d50443d0..c69400e938 100644
--- a/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_11_SUITE.erl
+++ b/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_11_SUITE.erl
@@ -99,6 +99,7 @@ init_per_group(Group, Config) ->
{Grs,_} = grs_and_tcs(),
case lists:member(Group, Grs) of
true ->
+ ct:comment(Group),
init = ?config(suite,Config),
[{Group,Group} | Config];
false ->
@@ -109,6 +110,7 @@ end_per_group(Group, Config) ->
{Grs,_} = grs_and_tcs(),
case lists:member(Group, Grs) of
true ->
+ ct:comment(Group),
init = ?config(suite,Config),
Group = ?config(Group,Config),
ok;
diff --git a/lib/common_test/test/ct_groups_test_2_SUITE.erl b/lib/common_test/test/ct_groups_test_2_SUITE.erl
index 56e0ac30c7..c4371501b3 100644
--- a/lib/common_test/test/ct_groups_test_2_SUITE.erl
+++ b/lib/common_test/test/ct_groups_test_2_SUITE.erl
@@ -60,7 +60,7 @@ all(doc) ->
["Run smoke tests of Common Test."];
all(suite) ->
- [missing_conf, testspec_1, repeat_1].
+ [missing_conf, repeat_1].
%%--------------------------------------------------------------------
%% TEST CASES
@@ -88,25 +88,6 @@ missing_conf(Config) when is_list(Config) ->
%%%-----------------------------------------------------------------
%%%
-testspec_1(Config) when is_list(Config) ->
- DataDir = ?config(data_dir, Config),
-
- TestSpec = filename:join(DataDir, "specs/groups_2.1.spec"),
-
- {Opts,ERPid} = setup({spec,TestSpec}, Config),
- ok = ct_test_support:run(Opts, Config),
- Events = ct_test_support:get_events(ERPid, Config),
-
- ct_test_support:log_events(testspec_1,
- reformat(Events, ?eh),
- ?config(priv_dir, Config)),
-
- TestEvents = events_to_check(testspec_1),
- ok = ct_test_support:verify_events(TestEvents, Events, Config).
-
-%%%-----------------------------------------------------------------
-%%%
-
repeat_1(Config) when is_list(Config) ->
DataDir = ?config(data_dir, Config),
@@ -173,9 +154,6 @@ test_events(missing_conf) ->
{?eh,stop_logging,[]}
];
-test_events(testspec_1) ->
- [];
-
test_events(repeat_1) ->
[
{?eh,start_logging,{'DEF','RUNDIR'}},
diff --git a/lib/common_test/test/ct_groups_test_2_SUITE_data/specs/groups_2.1.spec b/lib/common_test/test/ct_groups_test_2_SUITE_data/specs/groups_2.1.spec
deleted file mode 100644
index 24d778ac44..0000000000
--- a/lib/common_test/test/ct_groups_test_2_SUITE_data/specs/groups_2.1.spec
+++ /dev/null
@@ -1,26 +0,0 @@
-
-{config, "../cfgs/groups_2.1.cfg"}.
-{alias, groups_2, "../groups_2"}.
-
-{suites, groups_2, groups_21_SUITE}.
-%{skip_groups, groups_2, groups_21_SUITE,
-% [test_group_1b, test_group_7], "Skip tg_1b & tg_7"}.
-{skip_cases, groups_2, groups_21_SUITE,
- [testcase_1b, testcase_3a], "Skip tc_1b & tc_3a"}.
-
-{groups, groups_2, groups_22_SUITE,
- test_group_1a}.
-{skip_cases, groups_2, groups_22_SUITE,
- testcase_1a, "Skip tc_1a"}.
-
-{groups, groups_2, groups_22_SUITE,
- test_group_1b}.
-{skip_cases, groups_2, groups_22_SUITE,
- testcase_1b, "Skip tc_1b"}.
-%{skip_groups, groups_2, groups_21_SUITE,
-% [test_group_3], "Skip tg_3"}.
-
-{groups, groups_2, groups_22_SUITE,
- test_group_5}.
-{skip_cases, groups_2, groups_22_SUITE,
- [testcase_7a, testcase_7b], "Skip tc_7a & tc_7b"}.
diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl
index 6a75c8f789..7bfb9ffb49 100644
--- a/lib/common_test/test/ct_test_support.erl
+++ b/lib/common_test/test/ct_test_support.erl
@@ -23,7 +23,7 @@
%%%
-module(ct_test_support).
--include("test_server.hrl").
+-include_lib("test_server/include/test_server.hrl").
-include_lib("common_test/include/ct_event.hrl").
-export([init_per_suite/1, init_per_suite/2, end_per_suite/1,
@@ -111,8 +111,9 @@ init_per_testcase(_TestCase, Config) ->
case lists:keysearch(master, 1, Config) of
false->
test_server:format("See Common Test logs here:\n\n"
- "<a href=\"file://~s/all_runs.html\">~s/all_runs.html</a>",
- [LogDir,LogDir]);
+ "<a href=\"file://~s/all_runs.html\">~s/all_runs.html</a>\n"
+ "<a href=\"file://~s/index.html\">~s/index.html</a>",
+ [LogDir,LogDir,LogDir,LogDir]);
{value, _}->
test_server:format("See CT Master Test logs here:\n\n"
"<a href=\"file://~s/master_runs.html\">~s/master_runs.html</a>",
diff --git a/lib/common_test/test/ct_testspec_1_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE.erl
new file mode 100644
index 0000000000..dc399bfb4c
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE.erl
@@ -0,0 +1,433 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File: ct_testspec_1_SUITE
+%%%
+%%% Description:
+%%% Test test specifications
+%%%
+%%% The suites used for the test are located in the data directory.
+%%%-------------------------------------------------------------------
+-module(ct_testspec_1_SUITE).
+
+-compile(export_all).
+
+-include_lib("test_server/include/test_server.hrl").
+-include_lib("common_test/include/ct_event.hrl").
+
+-define(eh, ct_test_support_eh).
+
+%%--------------------------------------------------------------------
+%% TEST SERVER CALLBACK FUNCTIONS
+%%--------------------------------------------------------------------
+
+%%--------------------------------------------------------------------
+%% Description: Since Common Test starts another Test Server
+%% instance, the tests need to be performed on a separate node (or
+%% there will be clashes with logging processes etc).
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ Config1 = ct_test_support:init_per_suite(Config),
+ Config1.
+
+end_per_suite(Config) ->
+ ct_test_support:end_per_suite(Config).
+
+init_per_testcase(TestCase, Config) ->
+ ct_test_support:init_per_testcase(TestCase, Config).
+
+end_per_testcase(TestCase, Config) ->
+ ct_test_support:end_per_testcase(TestCase, Config).
+
+all(doc) ->
+ ["Run smoke tests of Common Test."];
+
+all(suite) ->
+ [all_suites, skip_all_suites,
+ suite, skip_suite,
+ all_testcases, skip_all_testcases,
+ testcase, skip_testcase,
+ all_groups, skip_all_groups,
+ group, skip_group,
+ group_all_testcases, skip_group_all_testcases,
+ group_testcase, skip_group_testcase,
+ topgroup,
+ subgroup, skip_subgroup,
+ subgroup_all_testcases, skip_subgroup_all_testcases,
+ subgroup_testcase, skip_subgroup_testcase,
+ sub_skipped_by_top,
+ testcase_in_multiple_groups].
+
+%%--------------------------------------------------------------------
+%% TEST CASES
+%%--------------------------------------------------------------------
+
+%%%-----------------------------------------------------------------
+%%%
+
+all_suites(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{label,"all_suites"},
+ {suites,TestDir,all}],
+
+ setup_and_execute(all_suites, TestSpec, Config).
+
+skip_all_suites(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{label,skip_all_suites},
+ {suites,TestDir,all},
+ {skip_suites,TestDir,all,"SKIPPED!"}],
+
+ setup_and_execute(skip_all_suites, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+suite(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{label,undefined},
+ {suites,TestDir,simple_1_SUITE}],
+
+ setup_and_execute(suite, TestSpec, Config).
+
+skip_suite(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{suites,TestDir,[simple_1_SUITE,simple_2_SUITE]},
+ {skip_suites,TestDir,simple_1_SUITE,"SKIPPED!"}],
+
+ setup_and_execute(skip_suite, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{cases,TestDir,simple_1_SUITE,all}],
+
+ setup_and_execute(all_testcases, TestSpec, Config).
+
+skip_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{suites,TestDir,[simple_1_SUITE]},
+ {skip_cases,TestDir,simple_1_SUITE,all,"SKIPPED!"}],
+
+ setup_and_execute(skip_all_testcases, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{cases,TestDir,simple_1_SUITE,tc1}],
+
+ setup_and_execute(testcase, TestSpec, Config).
+
+skip_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "suites_1"),
+ TestSpec = [{cases,TestDir,simple_1_SUITE,[tc1,tc2]},
+ {cases,TestDir,simple_2_SUITE,[tc2,tc1]},
+ {skip_cases,TestDir,simple_1_SUITE,[tc1],"SKIPPED!"},
+ {skip_cases,TestDir,simple_2_SUITE,tc2,"SKIPPED!"}],
+
+ setup_and_execute(skip_testcase, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+all_groups(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,all}],
+
+ setup_and_execute(all_groups, TestSpec, Config).
+
+skip_all_groups(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,all},
+ {skip_groups,TestDir,groups_11_SUITE,all,"SKIPPED!"}],
+
+ setup_and_execute(skip_all_groups, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+group(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a}],
+
+ setup_and_execute(group, TestSpec, Config).
+
+skip_group(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,[test_group_1a,
+ test_group_1b]},
+ {skip_groups,TestDir,groups_11_SUITE,
+ [test_group_1b,test_group_2,test_group_7],"SKIPPED!"}],
+
+ setup_and_execute(skip_group, TestSpec, Config).
+
+
+%%%-----------------------------------------------------------------
+%%%
+
+group_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a,{cases,all}}],
+
+ setup_and_execute(group_all_testcases, TestSpec, Config).
+
+skip_group_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,[test_group_1a,
+ test_group_1b]},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1b,{cases,all},"SKIPPED!"},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1a,{cases,all},"SKIPPED!"}],
+
+ setup_and_execute(skip_group_all_testcases, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+group_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a,{cases,testcase_1a}}],
+
+ setup_and_execute(group_testcase, TestSpec, Config).
+
+skip_group_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_11_SUITE,test_group_1a,
+ {cases,[testcase_1a,testcase_1b]}},
+ {groups,TestDir,groups_11_SUITE,test_group_1b,
+ {cases,[testcase_1b,testcase_1a]}},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1a,{cases,testcase_1b},"SKIPPED!"},
+ {skip_groups,TestDir,groups_11_SUITE,
+ test_group_1b,{cases,[testcase_1a]},"SKIPPED!"}],
+
+ setup_and_execute(skip_group_testcase, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+topgroup(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_2},
+ {groups,TestDir,groups_12_SUITE,test_group_4}],
+
+ setup_and_execute(topgroup, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+subgroup(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_3}],
+
+ setup_and_execute(subgroup, TestSpec, Config).
+
+skip_subgroup(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,[test_group_4]},
+ {skip_groups,TestDir,groups_12_SUITE,
+ test_group_8,"SKIPPED!"}],
+
+ setup_and_execute(skip_subgroup, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+subgroup_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,
+ test_group_5,{cases,all}},
+ {groups,TestDir,groups_12_SUITE,
+ test_group_3,{cases,all}}],
+
+ setup_and_execute(subgroup_all_testcases, TestSpec, Config).
+
+skip_subgroup_all_testcases(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_4},
+ {skip_groups,TestDir,groups_12_SUITE,
+ test_group_5,{cases,all},"SKIPPED!"}],
+
+ setup_and_execute(skip_subgroup_all_testcases, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+subgroup_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,
+ test_group_7,{cases,testcase_7a}},
+ {groups,TestDir,groups_12_SUITE,
+ test_group_3,{cases,testcase_3b}}],
+
+ setup_and_execute(subgroup_testcase, TestSpec, Config).
+
+skip_subgroup_testcase(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_5},
+ {skip_groups,TestDir,groups_12_SUITE,
+ test_group_7,{cases,[testcase_7a,testcase_7b]},"SKIPPED!"}],
+
+ setup_and_execute(skip_subgroup_testcase, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+%%!
+%%! Somewhat weird result from this one:
+%%!
+sub_skipped_by_top(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir,groups_12_SUITE,test_group_5},
+ {skip_groups,TestDir,groups_12_SUITE,test_group_4,"SKIPPED!"}],
+
+ setup_and_execute(sub_skipped_by_top, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+testcase_in_multiple_groups(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir = filename:join(DataDir, "groups_1"),
+ TestSpec = [{cases,TestDir,groups_12_SUITE,[testcase_1a,testcase_1b]},
+ {skip_cases,TestDir,groups_12_SUITE,[testcase_1b],"SKIPPED!"}],
+
+ setup_and_execute(testcase_in_multiple_groups, TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%% HELP FUNCTIONS
+%%%-----------------------------------------------------------------
+
+setup_and_execute(TCName, TestSpec, Config) ->
+ SpecFile = create_spec_file(?config(priv_dir, Config),
+ TCName, TestSpec),
+ TestTerms =
+ case lists:keymember(label, 1, TestSpec) of
+ true -> [{spec,SpecFile}];
+ false -> [{spec,SpecFile},{label,TCName}]
+ end,
+ {Opts,ERPid} = setup(TestTerms, Config),
+ ok = ct_test_support:run(Opts, Config),
+ TestSpec1 = [{logdir,proplists:get_value(logdir,Opts)},
+ {label,proplists:get_value(label,TestTerms)} | TestSpec],
+ ok = ct_test_support:run(ct, run_testspec, [TestSpec1], Config),
+ Events = ct_test_support:get_events(ERPid, Config),
+
+ ct_test_support:log_events(TCName,
+ reformat(Events, ?eh),
+ ?config(priv_dir, Config)),
+
+ TestEvents = events_to_check(TCName),
+ ok = ct_test_support:verify_events(TestEvents, Events, Config).
+
+create_spec_file(SpecDir, TCName, TestSpec) ->
+ FileName = filename:join(SpecDir,
+ atom_to_list(TCName)++".spec"),
+ {ok,Dev} = file:open(FileName, [write]),
+ [io:format(Dev, "~p.~n", [Term]) || Term <- TestSpec],
+ file:close(Dev),
+ io:format("~nTest spec created here~n~n<a href=\"file://~s\">~s</a>~n",
+ [FileName,FileName]),
+ FileName.
+
+setup(Test, Config) when is_tuple(Test) ->
+ setup([Test], Config);
+setup(Tests, Config) ->
+ Opts0 = ct_test_support:get_opts(Config),
+ Level = ?config(trace_level, Config),
+ EvHArgs = [{cbm,ct_test_support},{trace_level,Level}],
+ Opts = Opts0 ++ Tests ++ [{event_handler,{?eh,EvHArgs}}],
+ ERPid = ct_test_support:start_event_receiver(Config),
+ {Opts,ERPid}.
+
+reformat(Events, EH) ->
+ ct_test_support:reformat(Events, EH).
+%reformat(Events, _EH) ->
+% Events.
+
+%%%-----------------------------------------------------------------
+%%% TEST EVENTS
+%%%-----------------------------------------------------------------
+events_to_check(Test) ->
+ %% 2 tests (ct:run_test + script_start) is default
+ events_to_check(Test, 2).
+
+events_to_check(_, 0) ->
+ [];
+events_to_check(Test, N) ->
+ test_events(Test) ++ events_to_check(Test, N-1).
+
+test_events(_) ->
+ [
+ ].
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_11_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_11_SUITE.erl
new file mode 100644
index 0000000000..4f11d8a0e8
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_11_SUITE.erl
@@ -0,0 +1,281 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(groups_11_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+%%====================================================================
+%% COMMON TEST CALLBACK FUNCTIONS
+%%====================================================================
+
+suite() ->
+ [{timetrap,{minutes,1}}].
+
+groups() ->
+ [
+ {test_group_1a, [testcase_1a,testcase_1b]},
+
+ {test_group_1b, [], [testcase_1a,testcase_1b]},
+
+ {test_group_2, [], [testcase_2a,
+
+ {test_group_3, [], [testcase_3a,
+ testcase_3b]},
+ testcase_2b]},
+
+ {test_group_4, [{test_group_5, [], [testcase_5a,
+
+ {group, test_group_6},
+
+ testcase_5b]}]},
+ {test_group_6, [{group, test_group_7}]},
+
+ {test_group_7, [testcase_7a,testcase_7b]}
+ ].
+
+all() ->
+ [testcase_1,
+ {group, test_group_1a},
+ {group, test_group_1b},
+ testcase_2,
+ {group, test_group_2},
+ testcase_3,
+ {group, test_group_4}].
+
+%% this func only for internal test purposes
+grs_and_tcs() ->
+ {[
+ test_group_1a, test_group_1b,
+ test_group_2, test_group_3,
+ test_group_4, test_group_5,
+ test_group_6, test_group_7
+ ],
+ [
+ testcase_1,
+ testcase_1a, testcase_1b,
+ testcase_2,
+ testcase_2a, testcase_2b,
+ testcase_3a, testcase_3b,
+ testcase_3,
+ testcase_5a, testcase_5b,
+ testcase_7a, testcase_7b
+ ]}.
+
+%%--------------------------------------------------------------------
+%% Suite Configuration
+%%--------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ [{suite,init}|Config].
+
+end_per_suite(Config) ->
+ init = ?config(suite,Config).
+
+%%--------------------------------------------------------------------
+%% Group Configuration
+%%--------------------------------------------------------------------
+
+init_per_group(Group, Config) ->
+ [{name,Group}] = ?config(tc_group_properties,Config),
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ ct:comment(Group),
+ init = ?config(suite,Config),
+ [{Group,Group} | Config];
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+end_per_group(Group, Config) ->
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ ct:comment(Group),
+ init = ?config(suite,Config),
+ Group = ?config(Group,Config),
+ ok;
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+%%--------------------------------------------------------------------
+%% Testcase Configuration
+%%--------------------------------------------------------------------
+
+init_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ [{TestCase,TestCase} | Config];
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+end_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ TestCase = ?config(TestCase,Config),
+ ok;
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Testcases
+%%--------------------------------------------------------------------
+
+testcase_1() ->
+ [].
+testcase_1(Config) ->
+ init = ?config(suite,Config),
+ testcase_1 = ?config(testcase_1,Config),
+ ok.
+
+testcase_1a() ->
+ [].
+testcase_1a(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ testcase_1a = ?config(testcase_1a,Config),
+ ok.
+testcase_1b() ->
+ [].
+testcase_1b(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ undefined = ?config(testcase_1a,Config),
+ testcase_1b = ?config(testcase_1b,Config),
+ ok.
+
+testcase_2() ->
+ [].
+testcase_2(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_1a,Config),
+ undefined = ?config(test_group_1b,Config),
+ testcase_2 = ?config(testcase_2,Config),
+ ok.
+
+testcase_2a() ->
+ [].
+testcase_2a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ testcase_2a = ?config(testcase_2a,Config),
+ ok.
+testcase_2b() ->
+ [].
+testcase_2b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ undefined = ?config(testcase_2a,Config),
+ testcase_2b = ?config(testcase_2b,Config),
+ ok.
+
+testcase_3a() ->
+ [].
+testcase_3a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_2b,Config),
+ testcase_3a = ?config(testcase_3a,Config),
+ ok.
+testcase_3b() ->
+ [].
+testcase_3b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_3a,Config),
+ testcase_3b = ?config(testcase_3b,Config),
+ ok.
+
+testcase_3() ->
+ [].
+testcase_3(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_2,Config),
+ undefined = ?config(test_group_3,Config),
+ testcase_3 = ?config(testcase_3,Config),
+ ok.
+
+testcase_5a() ->
+ [].
+testcase_5a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_3,Config),
+ testcase_5a = ?config(testcase_5a,Config),
+ ok.
+testcase_5b() ->
+ [].
+testcase_5b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_5a,Config),
+ testcase_5b = ?config(testcase_5b,Config),
+ ok.
+
+testcase_7a() ->
+ [].
+testcase_7a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ testcase_7a = ?config(testcase_7a,Config),
+ ok.
+testcase_7b() ->
+ [].
+testcase_7b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ undefined = ?config(testcase_7a,Config),
+ testcase_7b = ?config(testcase_7b,Config),
+ ok.
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl
new file mode 100644
index 0000000000..69c06f9b83
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl
@@ -0,0 +1,344 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(groups_12_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+%%====================================================================
+%% COMMON TEST CALLBACK FUNCTIONS
+%%====================================================================
+
+suite() ->
+ [{timetrap,{minutes,1}}].
+
+groups() ->
+ [
+ {test_group_1a, [shuffle], [testcase_1a,testcase_1b,testcase_1c]},
+
+ {test_group_1b, [parallel], [testcase_1a,testcase_1b]},
+
+ {test_group_2, [parallel], [testcase_2a,
+
+ {test_group_3, [{repeat,2}],
+ [testcase_3a, testcase_3b]},
+
+ testcase_2b]},
+
+ {test_group_4, [{test_group_5, [parallel], [testcase_5a,
+
+ {group, test_group_6},
+
+ testcase_5b]}]},
+
+ {test_group_6, [parallel], [{group, test_group_7},
+ {group, test_group_8}]},
+
+ {test_group_7, [sequence], [testcase_7a,testcase_7b]},
+
+ {test_group_8, [shuffle,sequence], [testcase_8a,testcase_8b]}
+ ].
+
+all() ->
+ [{group, test_group_1a},
+ {group, test_group_1b},
+ testcase_1,
+ testcase_2,
+ {group, test_group_2},
+ testcase_3,
+ {group, test_group_4}].
+
+%% this func only for internal test purposes
+grs_and_tcs() ->
+ {[
+ test_group_1a, test_group_1b,
+ test_group_2, test_group_3,
+ test_group_4, test_group_5,
+ test_group_6, test_group_7,
+ test_group_8
+ ],
+ [
+ testcase_1a, testcase_1b, testcase_1c,
+ testcase_1,
+ testcase_2,
+ testcase_2a, testcase_2b,
+ testcase_3a, testcase_3b,
+ testcase_3,
+ testcase_5a, testcase_5b,
+ testcase_7a, testcase_7b,
+ testcase_8a, testcase_8b
+ ]}.
+
+%%--------------------------------------------------------------------
+%% Suite Configuration
+%%--------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ [{suite,init}|Config].
+
+end_per_suite(Config) ->
+ init = ?config(suite,Config).
+
+%%--------------------------------------------------------------------
+%% Group Configuration
+%%--------------------------------------------------------------------
+
+init_per_group(Group, Config) ->
+ Cmt =
+ case {Group,?config(tc_group_properties,Config)} of
+ {test_group_1a,[{shuffle,S},{name,test_group_1a}]} ->
+ io_lib:format("shuffled, ~w", [S]);
+ {test_group_1b,[{name,test_group_1b},parallel]} -> "parallel";
+ {test_group_2,[{name,test_group_2},parallel]} -> "parallel";
+ {test_group_3,[{name,test_group_3},{repeat,2}]} -> "repeat 2";
+ {test_group_3,[{name,test_group_3}]} -> "repeat 1";
+ {test_group_4,[{name,test_group_4}]} -> ok;
+ {test_group_5,[{name,test_group_5},parallel]} -> "parallel";
+ {test_group_6,[{name,test_group_6},parallel]} -> "parallel";
+ {test_group_7,[{name,test_group_7},sequence]} -> "sequence";
+ {test_group_8,[{shuffle,_},{name,test_group_8},sequence]} ->
+ "shuffle & sequence"
+ end,
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ init = ?config(suite,Config),
+ ct:comment(io_lib:format("~w, ~s", [Group,Cmt])),
+ [{Group,Group} | Config];
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+end_per_group(Group, Config) ->
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ init = ?config(suite,Config),
+ Group = ?config(Group,Config),
+ ok;
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+%%--------------------------------------------------------------------
+%% Testcase Configuration
+%%--------------------------------------------------------------------
+
+init_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ [{TestCase,TestCase} | Config];
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+end_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ TestCase = ?config(TestCase,Config),
+ ok;
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Testcases
+%%--------------------------------------------------------------------
+
+testcase_1a() ->
+ [].
+testcase_1a(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ testcase_1a = ?config(testcase_1a,Config),
+ ok.
+testcase_1b() ->
+ [].
+testcase_1b(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ undefined = ?config(testcase_1a,Config),
+ testcase_1b = ?config(testcase_1b,Config),
+ ok.
+
+testcase_1c() ->
+ [].
+testcase_1c(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ undefined = ?config(testcase_1b,Config),
+ testcase_1c = ?config(testcase_1c,Config),
+ ok.
+
+testcase_1() ->
+ [].
+testcase_1(Config) ->
+ init = ?config(suite,Config),
+ testcase_1 = ?config(testcase_1,Config),
+ ok.
+
+testcase_2() ->
+ [].
+testcase_2(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_1a,Config),
+ undefined = ?config(test_group_1b,Config),
+ testcase_2 = ?config(testcase_2,Config),
+ ok.
+
+testcase_2a() ->
+ [].
+testcase_2a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ testcase_2a = ?config(testcase_2a,Config),
+ ok.
+testcase_2b() ->
+ [].
+testcase_2b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ undefined = ?config(testcase_2a,Config),
+ testcase_2b = ?config(testcase_2b,Config),
+ ok.
+
+testcase_3a() ->
+ [].
+testcase_3a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_2b,Config),
+ testcase_3a = ?config(testcase_3a,Config),
+ ok.
+testcase_3b() ->
+ [].
+testcase_3b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_3a,Config),
+ testcase_3b = ?config(testcase_3b,Config),
+ ok.
+
+testcase_3() ->
+ [].
+testcase_3(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_2,Config),
+ undefined = ?config(test_group_3,Config),
+ testcase_3 = ?config(testcase_3,Config),
+ ok.
+
+testcase_5a() ->
+ [].
+testcase_5a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_3,Config),
+ testcase_5a = ?config(testcase_5a,Config),
+ %% increase chance the done event will come
+ %% during execution of subgroup (could be
+ %% tricky to handle)
+ timer:sleep(3),
+ ok.
+testcase_5b() ->
+ [].
+testcase_5b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_5a,Config),
+ testcase_5b = ?config(testcase_5b,Config),
+ ok.
+
+testcase_7a() ->
+ [].
+testcase_7a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ testcase_7a = ?config(testcase_7a,Config),
+ ok.
+testcase_7b() ->
+ [].
+testcase_7b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ undefined = ?config(testcase_7a,Config),
+ testcase_7b = ?config(testcase_7b,Config),
+ ok.
+
+testcase_8a() ->
+ [].
+testcase_8a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_8 = ?config(test_group_8,Config),
+ testcase_8a = ?config(testcase_8a,Config),
+ ok.
+testcase_8b() ->
+ [].
+testcase_8b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_8 = ?config(test_group_8,Config),
+ undefined = ?config(testcase_8a,Config),
+ testcase_8b = ?config(testcase_8b,Config),
+ ok.
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_21_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_21_SUITE.erl
new file mode 100644
index 0000000000..2533ac8e84
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_21_SUITE.erl
@@ -0,0 +1,281 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(groups_21_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+%%====================================================================
+%% COMMON TEST CALLBACK FUNCTIONS
+%%====================================================================
+
+suite() ->
+ [{timetrap,{minutes,1}}].
+
+groups() ->
+ [
+ {test_group_1a, [testcase_1a,testcase_1b]},
+
+ {test_group_1b, [], [testcase_1a,testcase_1b]},
+
+ {test_group_2, [], [testcase_2a,
+
+ {test_group_3, [], [testcase_3a,
+ testcase_3b]},
+ testcase_2b]},
+
+ {test_group_4, [{test_group_5, [], [testcase_5a,
+
+ {group, test_group_6},
+
+ testcase_5b]}]},
+
+ {test_group_6, [{group, test_group_7}]},
+
+ {test_group_7, [testcase_7a,testcase_7b]}
+ ].
+
+all() ->
+ [testcase_1,
+ {group, test_group_1a},
+ {group, test_group_1b},
+ testcase_2,
+ {group, test_group_2},
+ testcase_3,
+ {group, test_group_4}].
+
+%% this func only for internal test purposes
+grs_and_tcs() ->
+ {[
+ test_group_1a, test_group_1b,
+ test_group_2, test_group_3,
+ test_group_4, test_group_5,
+ test_group_6, test_group_7
+ ],
+ [
+ testcase_1,
+ testcase_1a, testcase_1b,
+ testcase_2,
+ testcase_2a, testcase_2b,
+ testcase_3a, testcase_3b,
+ testcase_3,
+ testcase_5a, testcase_5b,
+ testcase_7a, testcase_7b
+ ]}.
+
+%%--------------------------------------------------------------------
+%% Suite Configuration
+%%--------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ [{suite,init}|Config].
+
+end_per_suite(Config) ->
+ init = ?config(suite,Config).
+
+%%--------------------------------------------------------------------
+%% Group Configuration
+%%--------------------------------------------------------------------
+
+init_per_group(Group, Config) ->
+ [{name,Group}] = ?config(tc_group_properties,Config),
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ ct:comment(io_lib:format("~w", [Group])),
+ init = ?config(suite,Config),
+ [{Group,Group} | Config];
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+end_per_group(Group, Config) ->
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ init = ?config(suite,Config),
+ Group = ?config(Group,Config),
+ ok;
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+%%--------------------------------------------------------------------
+%% Testcase Configuration
+%%--------------------------------------------------------------------
+
+init_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ [{TestCase,TestCase} | Config];
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+end_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ TestCase = ?config(TestCase,Config),
+ ok;
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Testcases
+%%--------------------------------------------------------------------
+
+testcase_1() ->
+ [].
+testcase_1(Config) ->
+ init = ?config(suite,Config),
+ testcase_1 = ?config(testcase_1,Config),
+ ok.
+
+testcase_1a() ->
+ [].
+testcase_1a(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ testcase_1a = ?config(testcase_1a,Config),
+ ok.
+testcase_1b() ->
+ [].
+testcase_1b(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ undefined = ?config(testcase_1a,Config),
+ testcase_1b = ?config(testcase_1b,Config),
+ ok.
+
+testcase_2() ->
+ [].
+testcase_2(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_1a,Config),
+ undefined = ?config(test_group_1b,Config),
+ testcase_2 = ?config(testcase_2,Config),
+ ok.
+
+testcase_2a() ->
+ [].
+testcase_2a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ testcase_2a = ?config(testcase_2a,Config),
+ ok.
+testcase_2b() ->
+ [].
+testcase_2b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ undefined = ?config(testcase_2a,Config),
+ testcase_2b = ?config(testcase_2b,Config),
+ ok.
+
+testcase_3a() ->
+ [].
+testcase_3a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_2b,Config),
+ testcase_3a = ?config(testcase_3a,Config),
+ ok.
+testcase_3b() ->
+ [].
+testcase_3b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_3a,Config),
+ testcase_3b = ?config(testcase_3b,Config),
+ ok.
+
+testcase_3() ->
+ [].
+testcase_3(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_2,Config),
+ undefined = ?config(test_group_3,Config),
+ testcase_3 = ?config(testcase_3,Config),
+ ok.
+
+testcase_5a() ->
+ [].
+testcase_5a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_3,Config),
+ testcase_5a = ?config(testcase_5a,Config),
+ ok.
+testcase_5b() ->
+ [].
+testcase_5b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_5a,Config),
+ testcase_5b = ?config(testcase_5b,Config),
+ ok.
+
+testcase_7a() ->
+ [].
+testcase_7a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ testcase_7a = ?config(testcase_7a,Config),
+ ok.
+testcase_7b() ->
+ [].
+testcase_7b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ undefined = ?config(testcase_7a,Config),
+ testcase_7b = ?config(testcase_7b,Config),
+ ok.
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl
new file mode 100644
index 0000000000..cd517876df
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl
@@ -0,0 +1,314 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(groups_22_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+%%====================================================================
+%% COMMON TEST CALLBACK FUNCTIONS
+%%====================================================================
+
+suite() ->
+ [{timetrap,{minutes,1}}].
+
+groups() ->
+ [
+ {test_group_1a, [shuffle], [testcase_1a,testcase_1b,testcase_1c]},
+
+ {test_group_1b, [parallel], [testcase_1a,testcase_1b]},
+
+ {test_group_2, [parallel], [testcase_2a,
+
+ {test_group_3, [{repeat,1}],
+ [testcase_3a, testcase_3b]},
+
+ testcase_2b]},
+
+ {test_group_4, [{test_group_5, [parallel], [testcase_5a,
+
+ {group, test_group_6},
+
+ testcase_5b]}]},
+
+ {test_group_6, [parallel], [{group, test_group_7}]},
+
+ {test_group_7, [sequence], [testcase_7a,testcase_7b]}
+ ].
+
+all() ->
+ [{group, test_group_1a},
+ {group, test_group_1b},
+ testcase_1,
+ testcase_2,
+ {group, test_group_2},
+ testcase_3,
+ {group, test_group_4}].
+
+%% this func only for internal test purposes
+grs_and_tcs() ->
+ {[
+ test_group_1a, test_group_1b,
+ test_group_2, test_group_3,
+ test_group_4, test_group_5,
+ test_group_6, test_group_7
+ ],
+ [
+ testcase_1a, testcase_1b, testcase_1c,
+ testcase_1,
+ testcase_2,
+ testcase_2a, testcase_2b,
+ testcase_3a, testcase_3b,
+ testcase_3,
+ testcase_5a, testcase_5b,
+ testcase_7a, testcase_7b
+ ]}.
+
+%%--------------------------------------------------------------------
+%% Suite Configuration
+%%--------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ [{suite,init}|Config].
+
+end_per_suite(Config) ->
+ init = ?config(suite,Config).
+
+%%--------------------------------------------------------------------
+%% Group Configuration
+%%--------------------------------------------------------------------
+
+init_per_group(Group, Config) ->
+ Cmt =
+ case {Group,?config(tc_group_properties,Config)} of
+ {test_group_1a,[{shuffle,S},{name,test_group_1a}]} ->
+ io_lib:format("shuffled, ~w", [S]);
+ {test_group_1b,[{name,test_group_1b},parallel]} -> "parallel";
+ {test_group_2,[{name,test_group_2},parallel]} -> "parallel";
+ {test_group_3,[{name,test_group_3},{repeat,1}]} -> "repeat 1";
+ {test_group_3,[{name,test_group_3}]} -> "repeat 0";
+ {test_group_4,[{name,test_group_4}]} -> ok;
+ {test_group_5,[{name,test_group_5},parallel]} -> "parallel";
+ {test_group_6,[{name,test_group_6},parallel]} -> "parallel";
+ {test_group_7,[{name,test_group_7},sequence]} -> "sequence"
+ end,
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ init = ?config(suite,Config),
+ ct:comment(io_lib:format("~w, ~s", [Group,Cmt])),
+ [{Group,Group} | Config];
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+end_per_group(Group, Config) ->
+ {Grs,_} = grs_and_tcs(),
+ case lists:member(Group, Grs) of
+ true ->
+ init = ?config(suite,Config),
+ Group = ?config(Group,Config),
+ ok;
+ false ->
+ ct:fail({bad_group,Group})
+ end.
+
+%%--------------------------------------------------------------------
+%% Testcase Configuration
+%%--------------------------------------------------------------------
+
+init_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ [{TestCase,TestCase} | Config];
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+end_per_testcase(TestCase, Config) ->
+ {_,TCs} = grs_and_tcs(),
+ case lists:member(TestCase, TCs) of
+ true ->
+ init = ?config(suite,Config),
+ TestCase = ?config(TestCase,Config),
+ ok;
+ false ->
+ ct:fail({unknown_testcase,TestCase})
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Testcases
+%%--------------------------------------------------------------------
+
+testcase_1a() ->
+ [].
+testcase_1a(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ testcase_1a = ?config(testcase_1a,Config),
+ ok.
+testcase_1b() ->
+ [].
+testcase_1b(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ undefined = ?config(testcase_1a,Config),
+ testcase_1b = ?config(testcase_1b,Config),
+ ok.
+
+testcase_1c() ->
+ [].
+testcase_1c(Config) ->
+ init = ?config(suite,Config),
+ case ?config(test_group_1a,Config) of
+ test_group_1a -> ok;
+ _ ->
+ case ?config(test_group_1b,Config) of
+ test_group_1b -> ok;
+ _ -> ct:fail(no_group_data)
+ end
+ end,
+ undefined = ?config(testcase_1b,Config),
+ testcase_1c = ?config(testcase_1c,Config),
+ ok.
+
+testcase_1() ->
+ [].
+testcase_1(Config) ->
+ init = ?config(suite,Config),
+ testcase_1 = ?config(testcase_1,Config),
+ ok.
+
+testcase_2() ->
+ [].
+testcase_2(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_1a,Config),
+ undefined = ?config(test_group_1b,Config),
+ testcase_2 = ?config(testcase_2,Config),
+ ok.
+
+testcase_2a() ->
+ [].
+testcase_2a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ testcase_2a = ?config(testcase_2a,Config),
+ ok.
+testcase_2b() ->
+ [].
+testcase_2b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ undefined = ?config(testcase_2a,Config),
+ testcase_2b = ?config(testcase_2b,Config),
+ ok.
+
+testcase_3a() ->
+ [].
+testcase_3a(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_2b,Config),
+ testcase_3a = ?config(testcase_3a,Config),
+ ok.
+testcase_3b() ->
+ [].
+testcase_3b(Config) ->
+ init = ?config(suite,Config),
+ test_group_2 = ?config(test_group_2,Config),
+ test_group_3 = ?config(test_group_3,Config),
+ undefined = ?config(testcase_3a,Config),
+ testcase_3b = ?config(testcase_3b,Config),
+ ok.
+
+testcase_3() ->
+ [].
+testcase_3(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_2,Config),
+ undefined = ?config(test_group_3,Config),
+ testcase_3 = ?config(testcase_3,Config),
+ ok.
+
+testcase_5a() ->
+ [].
+testcase_5a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_3,Config),
+ testcase_5a = ?config(testcase_5a,Config),
+ %% increase chance the done event will come
+ %% during execution of subgroup (could be
+ %% tricky to handle)
+ timer:sleep(3),
+ ok.
+testcase_5b() ->
+ [].
+testcase_5b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ undefined = ?config(testcase_5a,Config),
+ testcase_5b = ?config(testcase_5b,Config),
+ ok.
+
+testcase_7a() ->
+ [].
+testcase_7a(Config) ->
+ init = ?config(suite,Config),
+ undefined = ?config(test_group_3,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ testcase_7a = ?config(testcase_7a,Config),
+ ok.
+testcase_7b() ->
+ [].
+testcase_7b(Config) ->
+ init = ?config(suite,Config),
+ test_group_4 = ?config(test_group_4,Config),
+ test_group_5 = ?config(test_group_5,Config),
+ test_group_6 = ?config(test_group_6,Config),
+ test_group_7 = ?config(test_group_7,Config),
+ undefined = ?config(testcase_7a,Config),
+ testcase_7b = ?config(testcase_7b,Config),
+ ok.
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_1_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_1_SUITE.erl
new file mode 100644
index 0000000000..b789851134
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_1_SUITE.erl
@@ -0,0 +1,146 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(simple_1_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+%%--------------------------------------------------------------------
+%% COMMON TEST CALLBACK FUNCTIONS
+%%--------------------------------------------------------------------
+
+%%--------------------------------------------------------------------
+%% Function: suite() -> Info
+%%
+%% Info = [tuple()]
+%% List of key/value pairs.
+%%
+%% Description: Returns list of tuples to set default properties
+%% for the suite.
+%%
+%% Note: The suite/0 function is only meant to be used to return
+%% default data values, not perform any other operations.
+%%--------------------------------------------------------------------
+suite() ->
+ [
+ {timetrap,{seconds,10}}
+ ].
+
+%%--------------------------------------------------------------------
+%% Function: init_per_suite(Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
+%%
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Reason = term()
+%% The reason for skipping the suite.
+%%
+%% Description: Initialization before the suite.
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ [{ips,ips_data} | Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_suite(Config0) -> void() | {save_config,Config1}
+%%
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Cleanup after the suite.
+%%--------------------------------------------------------------------
+end_per_suite(Config) ->
+ ips_data = ?config(ips, Config).
+
+%%--------------------------------------------------------------------
+%% Function: init_per_testcase(TestCase, Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
+%%
+%% TestCase = atom()
+%% Name of the test case that is about to run.
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Reason = term()
+%% The reason for skipping the test case.
+%%
+%% Description: Initialization before each test case.
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_testcase(TestCase, Config) ->
+ [{TestCase,{TestCase,data}} | Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_testcase(TestCase, Config0) ->
+%% void() | {save_config,Config1}
+%%
+%% TestCase = atom()
+%% Name of the test case that is finished.
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Cleanup after each test case.
+%%--------------------------------------------------------------------
+end_per_testcase(TestCase, Config) ->
+ {TestCase,data} = ?config(TestCase, Config).
+
+%%--------------------------------------------------------------------
+%% Function: all() -> TestCases | {skip,Reason}
+%%
+%% TestCases = [TestCase | {sequence,SeqName}]
+%% TestCase = atom()
+%% Name of a test case.
+%% SeqName = atom()
+%% Name of a test case sequence.
+%% Reason = term()
+%% The reason for skipping all test cases.
+%%
+%% Description: Returns the list of test cases that are to be executed.
+%%--------------------------------------------------------------------
+all() ->
+ [tc1,
+ tc2].
+
+
+%%--------------------------------------------------------------------
+%% TEST CASES
+%%--------------------------------------------------------------------
+
+tc1() ->
+ [{userdata,{info, "This is a testcase"}}].
+
+tc1(Config) ->
+ ips_data = ?config(ips, Config),
+ {tc1,data} = ?config(tc1, Config),
+ ok.
+
+tc2() ->
+ [{timetrap,5000}].
+
+tc2(Config) ->
+ ips_data = ?config(ips, Config),
+ undefined = ?config(tc1, Config),
+ {tc2,data} = ?config(tc2, Config),
+ ok.
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_2_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_2_SUITE.erl
new file mode 100644
index 0000000000..eb7e9cdf7b
--- /dev/null
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/suites_1/simple_2_SUITE.erl
@@ -0,0 +1,146 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(simple_2_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+%%--------------------------------------------------------------------
+%% COMMON TEST CALLBACK FUNCTIONS
+%%--------------------------------------------------------------------
+
+%%--------------------------------------------------------------------
+%% Function: suite() -> Info
+%%
+%% Info = [tuple()]
+%% List of key/value pairs.
+%%
+%% Description: Returns list of tuples to set default properties
+%% for the suite.
+%%
+%% Note: The suite/0 function is only meant to be used to return
+%% default data values, not perform any other operations.
+%%--------------------------------------------------------------------
+suite() ->
+ [
+ {timetrap,{seconds,10}}
+ ].
+
+%%--------------------------------------------------------------------
+%% Function: init_per_suite(Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
+%%
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Reason = term()
+%% The reason for skipping the suite.
+%%
+%% Description: Initialization before the suite.
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ [{ips,ips_data} | Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_suite(Config0) -> void() | {save_config,Config1}
+%%
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Cleanup after the suite.
+%%--------------------------------------------------------------------
+end_per_suite(Config) ->
+ ips_data = ?config(ips, Config).
+
+%%--------------------------------------------------------------------
+%% Function: init_per_testcase(TestCase, Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
+%%
+%% TestCase = atom()
+%% Name of the test case that is about to run.
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Reason = term()
+%% The reason for skipping the test case.
+%%
+%% Description: Initialization before each test case.
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_testcase(TestCase, Config) ->
+ [{TestCase,{TestCase,data}} | Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_testcase(TestCase, Config0) ->
+%% void() | {save_config,Config1}
+%%
+%% TestCase = atom()
+%% Name of the test case that is finished.
+%% Config0 = Config1 = [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Cleanup after each test case.
+%%--------------------------------------------------------------------
+end_per_testcase(TestCase, Config) ->
+ {TestCase,data} = ?config(TestCase, Config).
+
+%%--------------------------------------------------------------------
+%% Function: all() -> TestCases | {skip,Reason}
+%%
+%% TestCases = [TestCase | {sequence,SeqName}]
+%% TestCase = atom()
+%% Name of a test case.
+%% SeqName = atom()
+%% Name of a test case sequence.
+%% Reason = term()
+%% The reason for skipping all test cases.
+%%
+%% Description: Returns the list of test cases that are to be executed.
+%%--------------------------------------------------------------------
+all() ->
+ [tc1,
+ tc2].
+
+
+%%--------------------------------------------------------------------
+%% TEST CASES
+%%--------------------------------------------------------------------
+
+tc1() ->
+ [{userdata,{info, "This is a testcase"}}].
+
+tc1(Config) ->
+ ips_data = ?config(ips, Config),
+ {tc1,data} = ?config(tc1, Config),
+ ok.
+
+tc2() ->
+ [{timetrap,5000}].
+
+tc2(Config) ->
+ ips_data = ?config(ips, Config),
+ undefined = ?config(tc1, Config),
+ {tc2,data} = ?config(tc2, Config),
+ ok.
diff --git a/lib/common_test/vsn.mk b/lib/common_test/vsn.mk
index 2947c6a152..413ef21df3 100644
--- a/lib/common_test/vsn.mk
+++ b/lib/common_test/vsn.mk
@@ -1,3 +1,3 @@
-COMMON_TEST_VSN = 1.5
+COMMON_TEST_VSN = 1.5.1