aboutsummaryrefslogtreecommitdiffstats
path: root/lib/common_test
diff options
context:
space:
mode:
Diffstat (limited to 'lib/common_test')
-rw-r--r--lib/common_test/doc/src/install_chapter.xml63
-rw-r--r--lib/common_test/doc/src/run_test_chapter.xml25
-rw-r--r--lib/common_test/install.sh.in53
-rw-r--r--lib/common_test/priv/Makefile.in5
-rw-r--r--lib/common_test/priv/bin/.gitignore0
-rw-r--r--lib/common_test/priv/run_test.in63
-rw-r--r--lib/common_test/src/ct.erl45
-rw-r--r--lib/common_test/src/ct_framework.erl39
-rw-r--r--lib/common_test/src/ct_logs.erl9
-rw-r--r--lib/common_test/src/ct_run.erl32
-rw-r--r--lib/common_test/src/ct_testspec.erl34
-rw-r--r--lib/common_test/src/ct_util.hrl1
-rw-r--r--lib/common_test/test/ct_auto_compile_SUITE.erl48
-rw-r--r--lib/common_test/test/ct_test_server_if_1_SUITE.erl25
-rw-r--r--lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl28
-rw-r--r--lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl27
17 files changed, 262 insertions, 237 deletions
diff --git a/lib/common_test/doc/src/install_chapter.xml b/lib/common_test/doc/src/install_chapter.xml
index 7f8c606324..31125b945c 100644
--- a/lib/common_test/doc/src/install_chapter.xml
+++ b/lib/common_test/doc/src/install_chapter.xml
@@ -34,7 +34,7 @@
<title>General information</title>
<p>The two main interfaces for running tests with Common Test
- are an executable program named ct_run and an
+ are an executable program named <c>ct_run</c> and an
erlang module named <c>ct</c>. The ct_run program
is compiled for the underlying operating system (e.g. Unix/Linux
or Windows) during the build of the Erlang/OTP system, and is
@@ -43,67 +43,10 @@
The <c>ct</c> interface functions can be called from the Erlang shell,
or from any Erlang function, on any supported platform.</p>
- <p>A legacy Bourne shell script - named run_test - exists,
- which may be manually generated and installed. This script may be used
- instead of the ct_run program mentioned above, e.g. if the user
- wishes to modify or customize the Common Test start flags in a simpler
- way than making changes to the ct_run C program.</p>
-
<p>The Common Test application is installed with the Erlang/OTP
system and no additional installation step is required to start using
- Common Test by means of the ct_run executable program, and/or the interface
- functions in the <c>ct</c> module. If you wish to use the legacy Bourne
- shell script version run_test, however, this script needs to be
- generated first, according to the instructions below.</p>
-
- <note><p>Before reading on, please note that since Common Test version
- 1.5, the run_test shell script is no longer required for starting
- tests with Common Test from the OS command line. The ct_run
- program (descibed above) is the new recommended command line interface
- for Common Test. The shell script exists mainly for legacy reasons and
- may not be updated in future releases of Common Test. It may even be removed.
- </p></note>
-
- <p>Optional step to generate a shell script for starting Common Test:</p>
- <p>To generate the run_test shell script, navigate to the
- <c><![CDATA[common_test-<vsn>]]></c> directory, located among the other
- OTP applications (under the OTP lib directory). Here execute the
- <c>install.sh</c> script with argument <c>local</c>:</p>
-
- <p><c>
- $ ./install.sh local
- </c></p>
-
- <p>This generates the executable run_test script in the
- <c><![CDATA[common_test-<vsn>/priv/bin]]></c> directory. The script
- will include absolute paths to the Common Test and Test Server
- application directories, so it's possible to copy or move the script to
- a different location on the file system, if desired, without having to
- update it. It's of course possible to leave the script under the
- <c>priv/bin</c> directory and update the PATH variable accordingly (or
- create a link or alias to it).</p>
-
- <p>If you, for any reason, have copied Common Test and Test Server
- to a different location than the default OTP lib directory, you can
- generate a run_test script with a different top level directory,
- simply by specifying the directory, instead of <c>local</c>, when running
- <c>install.sh</c>. Example:</p>
-
- <p><c>
- $ install.sh /usr/local/test_tools
- </c></p>
-
- <p>Note that the <c><![CDATA[common_test-<vsn>]]></c> and
- <c><![CDATA[test_server-<vsn>]]></c> directories must be located under the
- same top directory. Note also that the install script does not copy files
- or update environment variables. It only generates the run_test
- script.</p>
-
- <p>Whenever you install a new version of Erlang/OTP, the run_test
- script needs to be regenerated, or updated manually with new directory names
- (new version numbers), for it to "see" the latest Common Test and Test Server
- versions.</p>
-
+ Common Test by means of the <c>ct_run</c> executable program, and/or
+ the interface functions in the <c>ct</c> module.</p>
</section>
</chapter>
diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml
index 864f82cb63..df60e5f7f2 100644
--- a/lib/common_test/doc/src/run_test_chapter.xml
+++ b/lib/common_test/doc/src/run_test_chapter.xml
@@ -1005,6 +1005,31 @@
for starting the tests, the relaxed scanner
mode is enabled by means of the tuple: <c>{allow_user_terms,true}</c></p>
</section>
+ <section>
+ <title>Reading test specification terms</title>
+ <p>It's possible to look up terms in the current test specification
+ (i.e. the spec that's been used to configure and run the current test).
+ The function <c>get_testspec_terms()</c> returns a list of all test spec
+ terms (both config- and test terms) and <c>get_testspec_terms(Tags)</c>
+ returns the term (or a list of terms) matching the tag (or tags) in
+ <c>Tags</c>.</p>
+ <p>For example, in the test specification:</p>
+ <pre>
+ ...
+ {label, my_server_smoke_test}.
+ {config, "../../my_server_setup.cfg"}.
+ {config, "../../my_server_interface.cfg"}.
+ ...</pre>
+ <p>And in e.g. a test suite or a CT hook function:</p>
+ <pre>
+ ...
+ [{label,[{_Node,TestType}]}, {config,CfgFiles}] =
+ ct:get_testspec_terms([label,config]),
+
+ [verify_my_server_cfg(TestType, CfgFile) || {Node,CfgFile} &lt;- CfgFiles,
+ Node == node()];
+ ...</pre>
+ </section>
</section>
<section>
diff --git a/lib/common_test/install.sh.in b/lib/common_test/install.sh.in
deleted file mode 100644
index 5108c7a259..0000000000
--- a/lib/common_test/install.sh.in
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/sh
-
-CT_ROOT=$1
-CT_VSN=@CT_VSN@
-TS_VSN=@TS_VSN@
-
-if [ -z "$CT_ROOT" ]
-then
- echo "install.sh: need CT_ROOT (absolute) directory or 'local' as argument"
- exit 1
-fi
-
-if [ $CT_ROOT = "local" ]
-then
- CT_DIR=`pwd`
- cd priv
- sed -e "s,@CTPATH@,$CT_DIR/ebin," \
- -e "s,@TSPATH@,$CT_DIR/../test_server/ebin," \
- run_test.in > bin/run_test
- chmod 775 bin/run_test
- echo "install successful, start script created in " $CT_ROOT/common_test-$CT_VSN/priv/bin
-else
-
- if [ ! -d "$CT_ROOT" ]
- then
- echo "install.sh: CT_ROOT argument must be a valid directory"
- exit 1
- fi
-
- if [ `echo $CT_ROOT | awk '{ print substr($1,1,1) }'` != "/" ]
- then
- echo "install.sh: need an absolute path to CT_ROOT"
- exit 1
- fi
-
- if [ ! -d $CT_ROOT/common_test-$CT_VSN ]
- then
- echo "install.sh: The directory $CT_ROOT/common_test-$CT_VSN does not exist"
- exit 1
- fi
-
- if [ -d $CT_ROOT/common_test-$CT_VSN/priv ]
- then
- cd $CT_ROOT/common_test-$CT_VSN/priv
- sed -e "s;@CTPATH@;$CT_ROOT/common_test-$CT_VSN/ebin;" \
- -e "s;@TSPATH@;$CT_ROOT/test_server-$TS_VSN/ebin;" \
- run_test.in > bin/run_test
- chmod 775 bin/run_test
- echo "install successful, start script created in " $CT_ROOT/common_test-$CT_VSN/priv/bin
- fi
-fi
-
-
diff --git a/lib/common_test/priv/Makefile.in b/lib/common_test/priv/Makefile.in
index 1bc6b82ebb..7765b06f95 100644
--- a/lib/common_test/priv/Makefile.in
+++ b/lib/common_test/priv/Makefile.in
@@ -66,12 +66,7 @@ JS = jquery-latest.js jquery.tablesorter.min.js
# Rules
#
-include ../../test_server/vsn.mk
debug opt:
- $(V_at)sed -e 's;@CT_VSN@;$(VSN);' \
- -e 's;@TS_VSN@;$(TEST_SERVER_VSN);' \
- ../install.sh.in > install.sh
- - $(V_at)chmod -f 775 install.sh
docs:
diff --git a/lib/common_test/priv/bin/.gitignore b/lib/common_test/priv/bin/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/common_test/priv/bin/.gitignore
+++ /dev/null
diff --git a/lib/common_test/priv/run_test.in b/lib/common_test/priv/run_test.in
deleted file mode 100644
index 1508751e4f..0000000000
--- a/lib/common_test/priv/run_test.in
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/sh
-
-args=""
-
-while [ $1 ]; do
- if [ $1 = "-config" ]; then
- args="$args -ct_config";
- elif [ $1 = "-decrypt_key" ]; then
- args="$args -ct_decrypt_key";
- elif [ $1 = "-decrypt_file" ]; then
- args="$args -ct_decrypt_file";
- elif [ $1 = "-vts" ]; then
- vts=1;
- args="$args $1";
- elif [ $1 = "-browser" ]; then
- browser=$2;
- args="$args $1";
- elif [ $1 = "-shell" ]; then
- shell=1;
- args="$args $1";
- elif [ $1 = "-ctname" ]; then
- ctname=$2;
- args="$args";
- elif [ $1 = "-ctmaster" ]; then
- master=1;
- args="$args";
- else
- args="$args $1"
- fi
- shift
-done
-
-if [ $vts ]; then
- erl -sname ct \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- -s webtool script_start vts $browser \
- -s ct_run script_start \
- $args;
-elif [ $shell ]; then
- erl -sname ct \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- -s ct_run script_start \
- $args;
-elif [ $ctname ]; then
- erl -sname $ctname \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- $args;
-elif [ $master ]; then
- erl -sname ct_master \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- $args;
-else
- erl -sname ct \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- -s ct_run script_start \
- -s erlang halt \
- $args
-fi
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index 9d8fce2789..5ed1346f1e 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -79,6 +79,7 @@
%% Other interface functions
-export([get_status/0, abort_current_testcase/1,
get_event_mgr_ref/0,
+ get_testspec_terms/0, get_testspec_terms/1,
encrypt_config_file/2, encrypt_config_file/3,
decrypt_config_file/2, decrypt_config_file/3]).
@@ -463,6 +464,50 @@ reload_config(Required)->
ct_config:reload_config(Required).
%%%-----------------------------------------------------------------
+%%% @spec get_testspec_terms() -> TestSpecTerms | undefined
+%%% TestSpecTerms = [{Tag,Value}]
+%%% Value = [term()]
+%%%
+%%% @doc Get a list of all test specification terms used to
+%%% configure and run this test.
+%%%
+get_testspec_terms() ->
+ case ct_util:get_testdata(testspec) of
+ undefined ->
+ undefined;
+ CurrSpecRec ->
+ ct_testspec:testspec_rec2list(CurrSpecRec)
+ end.
+
+%%%-----------------------------------------------------------------
+%%% @spec get_testspec_terms(Tags) -> TestSpecTerms | undefined
+%%% Tags = [Tag] | Tag
+%%% Tag = atom()
+%%% TestSpecTerms = [{Tag,Value}] | {Tag,Value}
+%%% Value = [{Node,term()}] | [term()]
+%%% Node = atom()
+%%%
+%%% @doc Read one or more terms from the test specification used
+%%% to configure and run this test. Tag is any valid test specification
+%%% tag, such as e.g. <c>label</c>, <c>config</c>, <c>logdir</c>.
+%%% User specific terms are also available to read if the
+%%% <c>allow_user_terms</c> option has been set. Note that all value tuples
+%%% returned, except user terms, will have the node name as first element.
+%%% Note also that in order to read test terms, use <c>Tag = tests</c>
+%%% (rather than <c>suites</c>, <c>groups</c> or <c>cases</c>). Value is
+%%% then the list of *all* tests on the form:
+%%% <c>[{Node,Dir,[{TestSpec,GroupsAndCases1},...]},...], where
+%%% GroupsAndCases = [{Group,[Case]}] | [Case]</c>.
+get_testspec_terms(Tags) ->
+ case ct_util:get_testdata(testspec) of
+ undefined ->
+ undefined;
+ CurrSpecRec ->
+ ct_testspec:testspec_rec2list(Tags, CurrSpecRec)
+ end.
+
+
+%%%-----------------------------------------------------------------
%%% @spec log(Format) -> ok
%%% @equiv log(default,50,Format,[])
log(Format) ->
diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl
index ea3d7c8218..91368d3137 100644
--- a/lib/common_test/src/ct_framework.erl
+++ b/lib/common_test/src/ct_framework.erl
@@ -113,6 +113,7 @@ init_tc1(?MODULE,_,error_in_suite,[Config0]) when is_list(Config0) ->
ct_event:notify(#event{name=tc_start,
node=node(),
data={?MODULE,error_in_suite}}),
+ ct_suite_init(?MODULE, error_in_suite, [], Config0),
case ?val(error, Config0) of
undefined ->
{fail,"unknown_error_in_suite"};
@@ -635,7 +636,20 @@ try_set_default(Name,Key,Info,Where) ->
end_tc(Mod, Fun, Args) ->
%% Have to keep end_tc/3 for backwards compatibility issues
end_tc(Mod, Fun, Args, '$end_tc_dummy').
-end_tc(?MODULE,error_in_suite,_, _) -> % bad start!
+end_tc(?MODULE,error_in_suite,{Result,[Args]},Return) ->
+ %% this clause gets called if CT has encountered a suite that
+ %% can't be executed
+ FinalNotify =
+ case ct_hooks:end_tc(?MODULE, error_in_suite, Args, Result, Return) of
+ '$ct_no_change' ->
+ Result;
+ HookResult ->
+ HookResult
+ end,
+ Event = #event{name=tc_done,
+ node=node(),
+ data={?MODULE,error_in_suite,tag(FinalNotify)}},
+ ct_event:sync_notify(Event),
ok;
end_tc(Mod,Func,{TCPid,Result,[Args]}, Return) when is_pid(TCPid) ->
end_tc(Mod,Func,TCPid,Result,Args,Return);
@@ -1062,18 +1076,35 @@ get_all_cases1(_, []) ->
get_all(Mod, ConfTests) ->
case catch apply(Mod, all, []) of
- {'EXIT',_} ->
+ {'EXIT',{undef,[{Mod,all,[],_} | _]}} ->
Reason =
case code:which(Mod) of
non_existing ->
list_to_atom(atom_to_list(Mod)++
- " can not be compiled or loaded");
+ " can not be compiled or loaded");
_ ->
list_to_atom(atom_to_list(Mod)++":all/0 is missing")
end,
%% this makes test_server call error_in_suite as first
%% (and only) test case so we can report Reason properly
[{?MODULE,error_in_suite,[[{error,Reason}]]}];
+ {'EXIT',ExitReason} ->
+ case ct_util:get_testdata({error_in_suite,Mod}) of
+ undefined ->
+ ErrStr = io_lib:format("~n*** ERROR *** "
+ "~w:all/0 failed: ~p~n",
+ [Mod,ExitReason]),
+ io:format(user, ErrStr, []),
+ %% save the error info so it doesn't get printed twice
+ ct_util:set_testdata_async({{error_in_suite,Mod},
+ ExitReason});
+ _ExitReason ->
+ ct_util:delete_testdata({error_in_suite,Mod})
+ end,
+ Reason = list_to_atom(atom_to_list(Mod)++":all/0 failed"),
+ %% this makes test_server call error_in_suite as first
+ %% (and only) test case so we can report Reason properly
+ [{?MODULE,error_in_suite,[[{error,Reason}]]}];
AllTCs when is_list(AllTCs) ->
case catch save_seqs(Mod,AllTCs) of
{error,What} ->
@@ -1293,6 +1324,8 @@ report(What,Data) ->
end,
ct_logs:unregister_groupleader(ReportingPid),
case {Func,Result} of
+ {error_in_suite,_} when Suite == ?MODULE ->
+ ok;
{init_per_suite,_} ->
ok;
{end_per_suite,_} ->
diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl
index 4d5a75d354..7c8c720e13 100644
--- a/lib/common_test/src/ct_logs.erl
+++ b/lib/common_test/src/ct_logs.erl
@@ -2054,6 +2054,13 @@ runentry(Dir, Totals={Node,Label,Logs,
?testname_width-3)),
lists:flatten(io_lib:format("~ts...",[Trunc]))
end,
+ TotMissingStr =
+ if NotBuilt > 0 ->
+ ["<font color=\"red\">",
+ integer_to_list(NotBuilt),"</font>"];
+ true ->
+ integer_to_list(NotBuilt)
+ end,
Total = TotSucc+TotFail+AllSkip,
A = xhtml(["<td align=center><font size=\"-1\">",Node,
"</font></td>\n",
@@ -2073,7 +2080,7 @@ runentry(Dir, Totals={Node,Label,Logs,
"<td align=right>",TotFailStr,"</td>\n",
"<td align=right>",integer_to_list(AllSkip),
" (",UserSkipStr,"/",AutoSkipStr,")</td>\n",
- "<td align=right>",integer_to_list(NotBuilt),"</td>\n"],
+ "<td align=right>",TotMissingStr,"</td>\n"],
TotalsStr = A++B++C,
XHTML = [xhtml("<tr>\n", ["<tr class=\"",odd_or_even(),"\">\n"]),
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 4d74fd6a80..33c4f352b0 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -77,7 +77,8 @@
multiply_timetraps = 1,
scale_timetraps = false,
create_priv_dir,
- testspecs = [],
+ testspec_files = [],
+ current_testspec,
tests,
starter}).
@@ -485,8 +486,11 @@ execute_one_spec(TS, Opts, Args) ->
case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of
ok -> % read tests from spec
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
- do_run(Run, Skip, Opts#opts{config=AllConfig,
- logdir=TheLogDir}, Args);
+ Result = do_run(Run, Skip, Opts#opts{config=AllConfig,
+ logdir=TheLogDir,
+ current_testspec=TS}, Args),
+ ct_util:delete_testdata(testspec),
+ Result;
Error ->
Error
end.
@@ -577,7 +581,7 @@ combine_test_opts(TS, Specs, Opts) ->
Opts#opts{label = Label,
profile = Profile,
- testspecs = Specs,
+ testspec_files = Specs,
cover = Cover,
cover_stop = CoverStop,
logdir = which(logdir, LogDir),
@@ -702,7 +706,7 @@ script_start4(#opts{label = Label, profile = Profile,
logopts = LogOpts,
verbosity = Verbosity,
enable_builtin_hooks = EnableBuiltinHooks,
- logdir = LogDir, testspecs = Specs}, _Args) ->
+ logdir = LogDir, testspec_files = Specs}, _Args) ->
%% label - used by ct_logs
application:set_env(common_test, test_label, Label),
@@ -1103,7 +1107,7 @@ run_test2(StartOpts) ->
undefined ->
case lists:keysearch(prepared_tests, 1, StartOpts) of
{value,{_,{Run,Skip},Specs}} -> % use prepared tests
- run_prepared(Run, Skip, Opts#opts{testspecs = Specs},
+ run_prepared(Run, Skip, Opts#opts{testspec_files = Specs},
StartOpts);
false ->
run_dir(Opts, StartOpts)
@@ -1111,11 +1115,11 @@ run_test2(StartOpts) ->
Specs ->
Relaxed = get_start_opt(allow_user_terms, value, false, StartOpts),
%% using testspec(s) as input for test
- run_spec_file(Relaxed, Opts#opts{testspecs = Specs}, StartOpts)
+ run_spec_file(Relaxed, Opts#opts{testspec_files = Specs}, StartOpts)
end.
run_spec_file(Relaxed,
- Opts = #opts{testspecs = Specs},
+ Opts = #opts{testspec_files = Specs},
StartOpts) ->
Specs1 = case Specs of
[X|_] when is_integer(X) -> [Specs];
@@ -1154,7 +1158,10 @@ run_all_specs([{Specs,TS} | TSs], Opts, StartOpts, TotResult) ->
log_ts_names(Specs),
Combined = #opts{config = TSConfig} = combine_test_opts(TS, Specs, Opts),
AllConfig = merge_vals([Opts#opts.config, TSConfig]),
- try run_one_spec(TS, Combined#opts{config = AllConfig}, StartOpts) of
+ try run_one_spec(TS,
+ Combined#opts{config = AllConfig,
+ current_testspec=TS},
+ StartOpts) of
Result ->
run_all_specs(TSs, Opts, StartOpts, [Result | TotResult])
catch
@@ -1399,7 +1406,7 @@ run_testspec2(TestSpec) ->
case check_and_install_configfiles(
Opts#opts.config, LogDir1, Opts) of
ok ->
- Opts1 = Opts#opts{testspecs = [],
+ Opts1 = Opts#opts{testspec_files = [],
logdir = LogDir1,
include = AllInclude},
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
@@ -1706,6 +1713,9 @@ compile_and_run(Tests, Skip, Opts, Args) ->
ct_util:set_testdata({stylesheet,Opts#opts.stylesheet}),
%% save logopts
ct_util:set_testdata({logopts,Opts#opts.logopts}),
+ %% save info about current testspec (testspec record or undefined)
+ ct_util:set_testdata({testspec,Opts#opts.current_testspec}),
+
%% enable silent connections
case Opts#opts.silent_connections of
[] ->
@@ -1720,7 +1730,7 @@ compile_and_run(Tests, Skip, Opts, Args) ->
ct_logs:log("Silent connections", "~p", [Conns])
end
end,
- log_ts_names(Opts#opts.testspecs),
+ log_ts_names(Opts#opts.testspec_files),
TestSuites = suite_tuples(Tests),
{_TestSuites1,SuiteMakeErrors,AllMakeErrors} =
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 10a9bdac67..10c3f2a938 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -27,6 +27,8 @@
collect_tests_from_list/2, collect_tests_from_list/3,
collect_tests_from_file/2, collect_tests_from_file/3]).
+-export([testspec_rec2list/1, testspec_rec2list/2]).
+
-include("ct_util.hrl").
-define(testspec_fields, record_info(fields, testspec)).
@@ -973,7 +975,8 @@ add_tests([Term={Tag,all_nodes,Data}|Ts],Spec) ->
should_be_added(Tag,Node,Data,Spec)],
add_tests(Tests++Ts,Spec);
invalid -> % ignore term
- add_tests(Ts,Spec)
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Term]})
end;
%% create one test entry per node in Nodes and reinsert
add_tests([{Tag,[],Data}|Ts],Spec) ->
@@ -1001,7 +1004,8 @@ add_tests([Term={Tag,NodeOrOther,Data}|Ts],Spec) ->
handle_data(Tag,Node,Data,Spec),
add_tests(Ts,mod_field(Spec,Tag,NodeIxData));
invalid -> % ignore term
- add_tests(Ts,Spec)
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Term]})
end;
false ->
add_tests([{Tag,all_nodes,{NodeOrOther,Data}}|Ts],Spec)
@@ -1012,13 +1016,15 @@ add_tests([Term={Tag,Data}|Ts],Spec) ->
valid ->
add_tests([{Tag,all_nodes,Data}|Ts],Spec);
invalid ->
- add_tests(Ts,Spec)
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Term]})
end;
%% some other data than a tuple
add_tests([Other|Ts],Spec) ->
case get(relaxed) of
- true ->
- add_tests(Ts,Spec);
+ true ->
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Other]});
false ->
throw({error,{undefined_term_in_spec,Other}})
end;
@@ -1149,6 +1155,24 @@ per_node([N|Ns],Tag,Data,Refs) ->
per_node([],_,_,_) ->
[].
+%% Change the testspec record "back" to a list of tuples
+testspec_rec2list(Rec) ->
+ {Terms,_} = lists:mapfoldl(fun(unknown, Pos) ->
+ {element(Pos, Rec),Pos+1};
+ (F, Pos) ->
+ {{F,element(Pos, Rec)},Pos+1}
+ end,2,?testspec_fields),
+ lists:flatten(Terms).
+
+%% Extract one or more values from a testspec record and
+%% return the result as a list of tuples
+testspec_rec2list(Field, Rec) when is_atom(Field) ->
+ [Term] = testspec_rec2list([Field], Rec),
+ Term;
+testspec_rec2list(Fields, Rec) ->
+ Terms = testspec_rec2list(Rec),
+ [{Field,proplists:get_value(Field, Terms)} || Field <- Fields].
+
%% read the value for FieldName in record Rec#testspec
read_field(Rec, FieldName) ->
catch lists:foldl(fun(F, Pos) when F == FieldName ->
diff --git a/lib/common_test/src/ct_util.hrl b/lib/common_test/src/ct_util.hrl
index 845bb55486..f4cf407856 100644
--- a/lib/common_test/src/ct_util.hrl
+++ b/lib/common_test/src/ct_util.hrl
@@ -55,6 +55,7 @@
create_priv_dir=[],
alias=[],
tests=[],
+ unknown=[],
merge_tests=true}).
-record(cover, {app=none,
diff --git a/lib/common_test/test/ct_auto_compile_SUITE.erl b/lib/common_test/test/ct_auto_compile_SUITE.erl
index cc546ed30d..3e4da31ab4 100644
--- a/lib/common_test/test/ct_auto_compile_SUITE.erl
+++ b/lib/common_test/test/ct_auto_compile_SUITE.erl
@@ -108,6 +108,8 @@ ac_spec(Config) when is_list(Config) ->
PrivDir = ?config(priv_dir, Config),
file:copy(filename:join(DataDir, "bad_SUITE.erl"),
filename:join(PrivDir, "bad_SUITE.erl")),
+ Suite = filename:join(DataDir, "dummy_SUITE"),
+ compile:file(Suite, [{outdir,PrivDir}]),
TestSpec = [{label,ac_spec},
{auto_compile,false},
{suites,PrivDir,all}],
@@ -160,28 +162,34 @@ events_to_check(Test, N) ->
test_events(ac_flag) ->
[
- {ct_test_support_eh,start_logging,{'DEF','RUNDIR'}},
- {ct_test_support_eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
- {ct_test_support_eh,start_info,{1,1,3}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,init_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
- {ct_test_support_eh,test_stats,{1,1,{1,0}}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,end_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
- {ct_test_support_eh,test_done,{'DEF','STOP_TIME'}},
- {ct_test_support_eh,stop_logging,[]}
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,start_info,{1,1,3}},
+ {?eh,tc_start,{ct_framework,error_in_suite}},
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'bad_SUITE can not be compiled or loaded'}}}},
+ {?eh,tc_start,{dummy_SUITE,init_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
+ {?eh,test_stats,{1,1,{1,0}}},
+ {?eh,tc_start,{dummy_SUITE,end_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,stop_logging,[]}
];
test_events(ac_spec) ->
[
- {ct_test_support_eh,start_logging,{'DEF','RUNDIR'}},
- {ct_test_support_eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
- {ct_test_support_eh,start_info,{1,1,3}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,init_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
- {ct_test_support_eh,test_stats,{1,1,{1,0}}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,end_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
- {ct_test_support_eh,test_done,{'DEF','STOP_TIME'}},
- {ct_test_support_eh,stop_logging,[]}
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,start_info,{1,1,3}},
+ {?eh,tc_start,{ct_framework,error_in_suite}},
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'bad_SUITE can not be compiled or loaded'}}}},
+ {?eh,tc_start,{dummy_SUITE,init_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
+ {?eh,test_stats,{1,1,{1,0}}},
+ {?eh,tc_start,{dummy_SUITE,end_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,stop_logging,[]}
].
diff --git a/lib/common_test/test/ct_test_server_if_1_SUITE.erl b/lib/common_test/test/ct_test_server_if_1_SUITE.erl
index b6ef3062d4..214cb60c0d 100644
--- a/lib/common_test/test/ct_test_server_if_1_SUITE.erl
+++ b/lib/common_test/test/ct_test_server_if_1_SUITE.erl
@@ -236,14 +236,13 @@ test_events(ts_if_1) ->
{ts_if_2_SUITE,end_per_suite,
{failed,{error,{suite0_failed,{exited,suite0_goes_boom}}}}}},
-
{?eh,tc_start,{ct_framework,error_in_suite}},
- {?eh,test_stats,{2,6,{4,7}}},
-
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'ts_if_3_SUITE:all/0 is missing'}}}},
{?eh,tc_start,{ct_framework,error_in_suite}},
- {?eh,test_stats,{2,7,{4,7}}},
-
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'Bad return value from ts_if_4_SUITE:all/0'}}}},
{?eh,tc_start,{ts_if_5_SUITE,init_per_suite}},
{?eh,tc_done,{ts_if_5_SUITE,init_per_suite,
@@ -252,7 +251,7 @@ test_events(ts_if_1) ->
{?eh,tc_auto_skip,
{ts_if_5_SUITE,my_test_case,
{require_failed_in_suite0,{not_available,undef_variable}}}},
- {?eh,test_stats,{2,7,{4,8}}},
+ {?eh,test_stats,{2,5,{4,8}}},
{?eh,tc_auto_skip,
{ts_if_5_SUITE,end_per_suite,
{require_failed_in_suite0,{not_available,undef_variable}}}},
@@ -264,7 +263,7 @@ test_events(ts_if_1) ->
{?eh,tc_auto_skip,
{ts_if_6_SUITE,tc1,
{failed,{error,{suite0_failed,{exited,suite0_byebye}}}}}},
- {?eh,test_stats,{2,7,{4,9}}},
+ {?eh,test_stats,{2,5,{4,9}}},
{?eh,tc_auto_skip,
{ct_framework,end_per_suite,
{failed,{error,{suite0_failed,{exited,suite0_byebye}}}}}},
@@ -274,13 +273,13 @@ test_events(ts_if_1) ->
{?eh,tc_done,{ct_framework,init_per_suite,ok}},
{?eh,tc_done,
{ts_if_7_SUITE,tc1,{auto_skipped,{testcase0_failed,bad_return_value}}}},
- {?eh,test_stats,{2,7,{4,10}}},
+ {?eh,test_stats,{2,5,{4,10}}},
{?eh,tc_done,{ts_if_7_SUITE,
{init_per_group,g1,[]},
{auto_skipped,{group0_failed,bad_return_value}}}},
{?eh,tc_auto_skip,
{ts_if_7_SUITE,{tc2,g1},{group0_failed,bad_return_value}}},
- {?eh,test_stats,{2,7,{4,11}}},
+ {?eh,test_stats,{2,5,{4,11}}},
{?eh,tc_auto_skip,
{ts_if_7_SUITE,{end_per_group,g1},{group0_failed,bad_return_value}}},
@@ -288,7 +287,7 @@ test_events(ts_if_1) ->
{?eh,tc_done,{ts_if_7_SUITE,{init_per_group,g2,[]},ok}},
{?eh,tc_done,{ts_if_7_SUITE,tc2,
{auto_skipped,{testcase0_failed,bad_return_value}}}},
- {?eh,test_stats,{2,7,{4,12}}},
+ {?eh,test_stats,{2,5,{4,12}}},
{?eh,tc_start,{ts_if_7_SUITE,{end_per_group,g2,[]}}},
{?eh,tc_done,{ts_if_7_SUITE,{end_per_group,g2,[]},ok}}],
@@ -300,17 +299,17 @@ test_events(ts_if_1) ->
{?eh,tc_done,{ct_framework,init_per_suite,ok}},
{?eh,tc_start,{ts_if_8_SUITE,tc1}},
{?eh,tc_done,{ts_if_8_SUITE,tc1,{failed,{error,failed_on_purpose}}}},
- {?eh,test_stats,{2,8,{4,12}}},
+ {?eh,test_stats,{2,6,{4,12}}},
{?eh,tc_start,{ct_framework,end_per_suite}},
{?eh,tc_done,{ct_framework,end_per_suite,ok}},
{?eh,tc_user_skip,{skipped_by_spec_1_SUITE,all,"should be skipped"}},
- {?eh,test_stats,{2,8,{5,12}}},
+ {?eh,test_stats,{2,6,{5,12}}},
{?eh,tc_start,{skipped_by_spec_2_SUITE,init_per_suite}},
{?eh,tc_done,{skipped_by_spec_2_SUITE,init_per_suite,ok}},
{?eh,tc_user_skip,{skipped_by_spec_2_SUITE,tc1,"should be skipped"}},
- {?eh,test_stats,{2,8,{6,12}}},
+ {?eh,test_stats,{2,6,{6,12}}},
{?eh,tc_start,{skipped_by_spec_2_SUITE,end_per_suite}},
{?eh,tc_done,{skipped_by_spec_2_SUITE,end_per_suite,ok}},
diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl
index b8216c3596..cfc6fa93d7 100644
--- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl
@@ -41,8 +41,12 @@ suite() ->
%% @end
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+
+ TCName = ct:get_config(tcname),
+ CfgFiles = ct:get_config(file,undefined,[all]),
+
%% verify that expected config file can be read
- case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of
+ case {TCName,CfgFiles} of
{start_separate,[cfg11]} -> ok;
{start_join,[cfg11,cfg21]} -> ok;
{incl_separate1,[cfg11]} -> ok;
@@ -56,6 +60,28 @@ init_per_suite(Config) ->
_ -> ok
end,
+
+ %% test the get_testspec_terms functionality
+ if CfgFiles /= undefined ->
+ TSTerms = case ct:get_testspec_terms() of
+ undefined -> exit('testspec should not be undefined');
+ Result -> Result
+ end,
+ true = lists:keymember(config, 1, TSTerms),
+ {config,TSCfgFiles} = ct:get_testspec_terms(config),
+ [{config,TSCfgFiles},{tests,Tests}] =
+ ct:get_testspec_terms([config,tests]),
+ CfgNames = [list_to_atom(filename:basename(TSCfgFile)) ||
+ {Node,TSCfgFile} <- TSCfgFiles, Node == node()],
+ true = (length(CfgNames) == length(CfgFiles)),
+ [true = lists:member(CfgName,CfgFiles) || CfgName <- CfgNames],
+ true = lists:any(fun({{_Node,_Dir},Suites}) ->
+ lists:keymember(?MODULE, 1, Suites)
+ end, Tests);
+ true ->
+ ok
+ end,
+
Config.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl
index 7c51aca246..c3faebbd64 100644
--- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl
@@ -55,7 +55,7 @@ init_per_suite(Config) ->
{incl_both2,[cfg11,cfg12,cfg21]} -> ok;
{incl_both2,[cfg21]} -> ok;
_ -> ok
- end,
+ end,
Config.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl
index 36c1b4279b..e189b168c7 100644
--- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl
@@ -41,8 +41,11 @@ suite() ->
%% @end
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+ TCName = ct:get_config(tcname),
+ CfgFiles = ct:get_config(file,undefined,[all]),
+
%% verify that expected config file can be read
- case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of
+ case {TCName,CfgFiles} of
{start_separate,[cfg11]} -> ok;
{start_join,[cfg11,cfg21]} -> ok;
{incl_separate1,[cfg11]} -> ok;
@@ -55,6 +58,28 @@ init_per_suite(Config) ->
{incl_both2,[cfg11]} -> ok;
_ -> ok
end,
+
+ %% test the get_testspec_terms functionality
+ if CfgFiles /= undefined ->
+ TSTerms = case ct:get_testspec_terms() of
+ undefined -> exit('testspec should not be undefined');
+ Result -> Result
+ end,
+ true = lists:keymember(config, 1, TSTerms),
+ {config,TSCfgFiles} = ct:get_testspec_terms(config),
+ [{config,TSCfgFiles},{tests,Tests}] =
+ ct:get_testspec_terms([config,tests]),
+ CfgNames = [list_to_atom(filename:basename(TSCfgFile)) ||
+ {Node,TSCfgFile} <- TSCfgFiles, Node == node()],
+ true = (length(CfgNames) == length(CfgFiles)),
+ [true = lists:member(CfgName,CfgFiles) || CfgName <- CfgNames],
+ true = lists:any(fun({{_Node,_Dir},Suites}) ->
+ lists:keymember(?MODULE, 1, Suites)
+ end, Tests);
+ true ->
+ ok
+ end,
+
Config.
%%--------------------------------------------------------------------