aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorSiri Hansen <[email protected]>2013-04-04 15:43:06 +0200
committerSiri Hansen <[email protected]>2013-04-04 15:43:06 +0200
commita0fb34b3e6a73178e9b4c41ab2a20b3ba1762cd5 (patch)
tree2ece9b6846f9b0e33b8c9ca22ec666066e20da9e /lib
parent44a2884a7df320596039b8c8563bf73f9eb5aa6b (diff)
parentb74b21ba20e1e5aab7887891bf6672abb33f71fc (diff)
downloadotp-a0fb34b3e6a73178e9b4c41ab2a20b3ba1762cd5.tar.gz
otp-a0fb34b3e6a73178e9b4c41ab2a20b3ba1762cd5.tar.bz2
otp-a0fb34b3e6a73178e9b4c41ab2a20b3ba1762cd5.zip
Merge branch 'maint'
Diffstat (limited to 'lib')
-rw-r--r--lib/common_test/doc/src/ct_run.xml12
-rw-r--r--lib/common_test/doc/src/run_test_chapter.xml31
-rw-r--r--lib/common_test/src/ct.erl3
-rw-r--r--lib/common_test/src/ct_framework.erl70
-rw-r--r--lib/common_test/src/ct_repeat.erl38
-rw-r--r--lib/common_test/src/ct_run.erl14
-rw-r--r--lib/common_test/test/Makefile1
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE.erl378
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl75
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl75
-rw-r--r--lib/common_test/test/ct_test_support.erl70
-rw-r--r--lib/test_server/src/test_server_gl.erl2
-rw-r--r--lib/test_server/test/test_server_SUITE.erl2
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl13
14 files changed, 689 insertions, 95 deletions
diff --git a/lib/common_test/doc/src/ct_run.xml b/lib/common_test/doc/src/ct_run.xml
index d871908952..c87c765ae7 100644
--- a/lib/common_test/doc/src/ct_run.xml
+++ b/lib/common_test/doc/src/ct_run.xml
@@ -113,9 +113,9 @@
[-muliply_timetraps Multiplier]
[-scale_timetraps]
[-create_priv_dir auto_per_run | auto_per_tc | manual_per_tc]
- [-repeat N [-force_stop]] |
- [-duration HHMMSS [-force_stop]] |
- [-until [YYMoMoDD]HHMMSS [-force_stop]]
+ [-repeat N] |
+ [-duration HHMMSS [-force_stop [skip_rest]]] |
+ [-until [YYMoMoDD]HHMMSS [-force_stop [skip_rest]]]
[-basic_html]
[-ct_hooks CTHModule1 CTHOpts1 and CTHModule2 CTHOpts2 and ..
CTHModuleN CTHOptsN]
@@ -149,9 +149,9 @@
[-muliply_timetraps Multiplier]
[-scale_timetraps]
[-create_priv_dir auto_per_run | auto_per_tc | manual_per_tc]
- [-repeat N [-force_stop]] |
- [-duration HHMMSS [-force_stop]] |
- [-until [YYMoMoDD]HHMMSS [-force_stop]]
+ [-repeat N] |
+ [-duration HHMMSS [-force_stop [skip_rest]]] |
+ [-until [YYMoMoDD]HHMMSS [-force_stop [skip_rest]]]
[-basic_html]
[-ct_hooks CTHModule1 CTHOpts1 and CTHModule2 CTHOpts2 and ..
CTHModuleN CTHOptsN]
diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml
index 35f89153d3..afaed29626 100644
--- a/lib/common_test/doc/src/run_test_chapter.xml
+++ b/lib/common_test/doc/src/run_test_chapter.xml
@@ -174,7 +174,7 @@
<item><c><![CDATA[-repeat <n>]]></c>, tells Common Test to repeat the tests n times (see below).</item>
<item><c><![CDATA[-duration <time>]]></c>, tells Common Test to repeat the tests for duration of time (see below).</item>
<item><c><![CDATA[-until <stop_time>]]></c>, tells Common Test to repeat the tests until stop_time (see below).</item>
- <item><c>-force_stop</c>, on timeout, the test run will be aborted when current test job is finished (see below).</item>
+ <item><c>-force_stop [skip_rest]</c>, on timeout, the test run will be aborted when current test job is finished. If <c>skip_rest</c> is provided the rest of the test cases in the current test job will be skipped (see below).</item>
<item><c><![CDATA[-decrypt_key <key>]]></c>, provides a decryption key for
<seealso marker="config_file_chapter#encrypted_config_files">encrypted configuration files</seealso>.</item>
<item><c><![CDATA[-decrypt_file <key_file>]]></c>, points out a file containing a decryption key for
@@ -1273,6 +1273,7 @@
<item><c>-duration DurTime ({duration,DurTime})</c>, where <c>DurTime</c> is the duration, see below.</item>
<item><c>-until StopTime ({until,StopTime})</c>, where <c>StopTime</c> is finish time, see below.</item>
<item><c>-force_stop ({force_stop,true})</c></item>
+ <item><c>-force_stop skip_rest ({force_stop,skip_rest})</c></item>
</list>
<p>The duration time, <c>DurTime</c>, is specified as <c>HHMMSS</c>. Example:
<c>-duration 012030</c> or <c>{duration,"012030"}</c>, means the tests will
@@ -1283,12 +1284,16 @@
Example: <c>-until 071001120000</c> or <c>{until,"071001120000"}</c>, which means the tests
will be executed and (if time allows) repeated, until 12 o'clock on the 1st of Oct 2007.</p>
- <p>When timeout occurs, Common Test will never abort the test run immediately, since
+ <p>When timeout occurs, Common Test will never abort the ongoing test case, since
this might leave the system under test in an undefined, and possibly bad, state.
- Instead Common Test will finish the current test job, or the complete test
- run, before stopping. The latter is the default behaviour. The <c>force_stop</c>
- flag/option tells Common Test to stop as soon as the current test job is finished.
- Note that since Common Test always finishes off the current test job or test session,
+ Instead Common Test will by default finish the current test
+ run before stopping. If the <c>force_stop</c> flag is
+ given, Common Test will stop as soon as the current test job
+ is finished, and if the <c>force_stop</c> flag is given with
+ <c>skip_rest</c> Common Test will only complete the current
+ test case and skip the rest of the tests in the test job.
+ Note that since Common Test always finishes off at least the
+ current test case,
the time specified with <c>duration</c> or <c>until</c> is never definitive!</p>
<p>Log files from every single repeated test run is saved in normal Common Test fashion (see above).
@@ -1312,6 +1317,18 @@
<p>Example 2:</p>
<pre>
+ $ ct_run -dir $TEST_ROOT/to1 $TEST_ROOT/to2 -duration 001000 -forces_stop skip_rest</pre>
+ <p>Here the same test run as in Example 1, but with the
+ <c>force_stop</c> flag set to <c>skip_rest</c>. If the timeout
+ occurs while executing tests in directory to1, the rest of the
+ test cases in to1 will be skipped and then the test will be
+ aborted without running the tests in to2 another time. If the
+ timeout occurs while executing tests in directory to2, then the
+ rest of the test cases in to2 will be skipped and then the test
+ will be aborted.</p>
+
+ <p>Example 3:</p>
+ <pre>
$ date
Fri Sep 28 15:00:00 MEST 2007
@@ -1321,7 +1338,7 @@
Common Test will finish the entire test run before stopping (i.e. the to1 and to2 test
will always both be executed in the same test run).</p>
- <p>Example 3:</p>
+ <p>Example 4:</p>
<pre>
$ ct_run -dir $TEST_ROOT/to1 $TEST_ROOT/to2 -repeat 5</pre>
<p>Here the test run, including both the to1 and the to2 test, will be repeated 5 times.</p>
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index 04a95a53fa..e6732f7fc7 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -153,7 +153,7 @@ run(TestDirs) ->
%%% {auto_compile,Bool} | {create_priv_dir,CreatePrivDir} |
%%% {multiply_timetraps,M} | {scale_timetraps,Bool} |
%%% {repeat,N} | {duration,DurTime} | {until,StopTime} |
-%%% {force_stop,Bool} | {decrypt,DecryptKeyOrFile} |
+%%% {force_stop,ForceStop} | {decrypt,DecryptKeyOrFile} |
%%% {refresh_logs,LogDir} | {logopts,LogOpts} |
%%% {verbosity,VLevels} | {basic_html,Bool} |
%%% {ct_hooks, CTHs} | {enable_builtin_hooks,Bool} |
@@ -184,6 +184,7 @@ run(TestDirs) ->
%%% N = integer()
%%% DurTime = string(HHMMSS)
%%% StopTime = string(YYMoMoDDHHMMSS) | string(HHMMSS)
+%%% ForceStop = skip_rest | Bool
%%% DecryptKeyOrFile = {key,DecryptKey} | {file,DecryptFile}
%%% DecryptKey = string()
%%% DecryptFile = string()
diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl
index 5fe4eaf511..b92fe1555f 100644
--- a/lib/common_test/src/ct_framework.erl
+++ b/lib/common_test/src/ct_framework.erl
@@ -64,38 +64,46 @@ init_tc(Mod,Func,Config) ->
ok
end,
- case ct_util:get_testdata(curr_tc) of
- {Suite,{suite0_failed,{require,Reason}}} ->
- {skip,{require_failed_in_suite0,Reason}};
- {Suite,{suite0_failed,_}=Failure} ->
- {skip,Failure};
+ case Func=/=end_per_suite
+ andalso Func=/=end_per_group
+ andalso ct_util:get_testdata(skip_rest) of
+ true ->
+ {skip,"Repeated test stopped by force_stop option"};
_ ->
- ct_util:update_testdata(curr_tc,
- fun(undefined) ->
- [{Suite,Func}];
- (Running) ->
- [{Suite,Func}|Running]
- end, [create]),
- case ct_util:read_suite_data({seq,Suite,Func}) of
- undefined ->
- init_tc1(Mod,Suite,Func,Config);
- Seq when is_atom(Seq) ->
- case ct_util:read_suite_data({seq,Suite,Seq}) of
- [Func|TCs] -> % this is the 1st case in Seq
- %% make sure no cases in this seq are
- %% marked as failed from an earlier execution
- %% in the same suite
- lists:foreach(
- fun(TC) ->
- ct_util:save_suite_data({seq,Suite,TC},
- Seq)
- end, TCs);
- _ ->
- ok
- end,
- init_tc1(Mod,Suite,Func,Config);
- {failed,Seq,BadFunc} ->
- {skip,{sequence_failed,Seq,BadFunc}}
+ case ct_util:get_testdata(curr_tc) of
+ {Suite,{suite0_failed,{require,Reason}}} ->
+ {skip,{require_failed_in_suite0,Reason}};
+ {Suite,{suite0_failed,_}=Failure} ->
+ {skip,Failure};
+ _ ->
+ ct_util:update_testdata(curr_tc,
+ fun(undefined) ->
+ [{Suite,Func}];
+ (Running) ->
+ [{Suite,Func}|Running]
+ end, [create]),
+ case ct_util:read_suite_data({seq,Suite,Func}) of
+ undefined ->
+ init_tc1(Mod,Suite,Func,Config);
+ Seq when is_atom(Seq) ->
+ case ct_util:read_suite_data({seq,Suite,Seq}) of
+ [Func|TCs] -> % this is the 1st case in Seq
+ %% make sure no cases in this seq are
+ %% marked as failed from an earlier execution
+ %% in the same suite
+ lists:foreach(
+ fun(TC) ->
+ ct_util:save_suite_data(
+ {seq,Suite,TC},
+ Seq)
+ end, TCs);
+ _ ->
+ ok
+ end,
+ init_tc1(Mod,Suite,Func,Config);
+ {failed,Seq,BadFunc} ->
+ {skip,{sequence_failed,Seq,BadFunc}}
+ end
end
end.
diff --git a/lib/common_test/src/ct_repeat.erl b/lib/common_test/src/ct_repeat.erl
index a47309c6ee..f4d9949776 100644
--- a/lib/common_test/src/ct_repeat.erl
+++ b/lib/common_test/src/ct_repeat.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -23,7 +23,7 @@
%%% start flags (or equivalent ct:run_test/1 options) are supported:
%%% -until <StopTime>, StopTime = YYMoMoDDHHMMSS | HHMMSS
%%% -duration <DurTime>, DurTime = HHMMSS
-%%% -force_stop
+%%% -force_stop [skip_rest]
%%% -repeat <N>, N = integer()</p>
-module(ct_repeat).
@@ -62,12 +62,15 @@ loop_test(If,Args) when is_list(Args) ->
io:format("\nCommon Test: "
"Will repeat tests for ~s.\n\n",[ts(Secs)]),
TPid =
- case lists:keymember(force_stop,1,Args) of
- true ->
+ case proplists:get_value(force_stop,Args) of
+ False when False==false; False==undefined ->
+ undefined;
+ ForceStop ->
CtrlPid = self(),
- spawn(fun() -> stop_after(CtrlPid,Secs) end);
- false ->
- undefined
+ spawn(
+ fun() ->
+ stop_after(CtrlPid,Secs,ForceStop)
+ end)
end,
Args1 = [{loop_info,[{stop_time,Secs,StopTime,1}]} | Args],
loop(If,stop_time,0,Secs,StopTime,Args1,TPid,[])
@@ -212,7 +215,7 @@ get_stop_time(until,[Y1,Y2,Mo1,Mo2,D1,D2,H1,H2,Mi1,Mi2,S1,S2]) ->
list_to_integer([S1,S2])},
calendar:datetime_to_gregorian_seconds({Date,Time});
-get_stop_time(until,Time) ->
+get_stop_time(until,Time=[_,_,_,_,_,_]) ->
get_stop_time(until,"000000"++Time);
get_stop_time(duration,[H1,H2,Mi1,Mi2,S1,S2]) ->
@@ -227,10 +230,17 @@ cancel(Pid) ->
%% After Secs, abort will make the test_server finish the current
%% job, then empty the job queue and stop.
-stop_after(_CtrlPid,Secs) ->
+stop_after(_CtrlPid,Secs,ForceStop) ->
timer:sleep(Secs*1000),
+ case ForceStop of
+ SkipRest when SkipRest==skip_rest; SkipRest==["skip_rest"] ->
+ ct_util:set_testdata({skip_rest,true});
+ _ ->
+ ok
+ end,
test_server_ctrl:abort().
+
%% Callback from ct_run to print loop info to system log.
log_loop_info(Args) ->
case lists:keysearch(loop_info,1,Args) of
@@ -259,11 +269,11 @@ log_loop_info(Args) ->
io_lib:format("Test time remaining: ~w secs (~w%)\n",
[Secs,trunc((Secs/Secs0)*100)]),
LogStr4 =
- case lists:keymember(force_stop,1,Args) of
- true ->
- io_lib:format("force_stop is enabled",[]);
- _ ->
- ""
+ case proplists:get_value(force_stop,Args) of
+ False when False==false; False==undefined ->
+ "";
+ ForceStop ->
+ io_lib:format("force_stop is set to: ~w",[ForceStop])
end,
ct_logs:log("Test loop info",LogStr1++LogStr2++LogStr3++LogStr4,[])
end.
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 49f00429ae..57cfab532e 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -771,9 +771,9 @@ script_usage() ->
"\n\t[-scale_timetraps]"
"\n\t[-create_priv_dir auto_per_run | auto_per_tc | manual_per_tc]"
"\n\t[-basic_html]"
- "\n\t[-repeat N [-force_stop]] |"
- "\n\t[-duration HHMMSS [-force_stop]] |"
- "\n\t[-until [YYMoMoDD]HHMMSS [-force_stop]]\n\n"),
+ "\n\t[-repeat N] |"
+ "\n\t[-duration HHMMSS [-force_stop [skip_rest]]] |"
+ "\n\t[-until [YYMoMoDD]HHMMSS [-force_stop [skip_rest]]]\n\n"),
io:format("Run tests using test specification:\n\n"
"\tct_run -spec TestSpec1 TestSpec2 .. TestSpecN"
"\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
@@ -795,9 +795,9 @@ script_usage() ->
"\n\t[-scale_timetraps]"
"\n\t[-create_priv_dir auto_per_run | auto_per_tc | manual_per_tc]"
"\n\t[-basic_html]"
- "\n\t[-repeat N [-force_stop]] |"
- "\n\t[-duration HHMMSS [-force_stop]] |"
- "\n\t[-until [YYMoMoDD]HHMMSS [-force_stop]]\n\n"),
+ "\n\t[-repeat N] |"
+ "\n\t[-duration HHMMSS [-force_stop [skip_rest]]] |"
+ "\n\t[-until [YYMoMoDD]HHMMSS [-force_stop [skip_rest]]]\n\n"),
io:format("Refresh the HTML index files:\n\n"
"\tct_run -refresh_logs [LogDir]"
"[-logdir LogDir] "
@@ -2933,6 +2933,8 @@ opts2args(EnvStartOpts) ->
[];
({create_priv_dir,PD}) when is_atom(PD) ->
[{create_priv_dir,[atom_to_list(PD)]}];
+ ({force_stop,skip_rest}) ->
+ [{force_stop,["skip_rest"]}];
({force_stop,true}) ->
[{force_stop,[]}];
({force_stop,false}) ->
diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile
index a9ebd8f1d3..94569fa87f 100644
--- a/lib/common_test/test/Makefile
+++ b/lib/common_test/test/Makefile
@@ -38,6 +38,7 @@ MODULES= \
ct_groups_spec_SUITE \
ct_sequence_1_SUITE \
ct_repeat_1_SUITE \
+ ct_repeat_testrun_SUITE \
ct_testspec_1_SUITE \
ct_testspec_2_SUITE \
ct_testspec_3_SUITE \
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE.erl
new file mode 100644
index 0000000000..7ec384c932
--- /dev/null
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE.erl
@@ -0,0 +1,378 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2013. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File: ct_repeat_test_SUITE
+%%%
+%%% Description:
+%%% Test different options for repeating test runs:
+%%% -repeat N
+%%% -duration T [-force_stop [skip_rest]]
+%%% -until T [-force_stop [skip_rest]]
+%%%
+%%%-------------------------------------------------------------------
+-module(ct_repeat_testrun_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("common_test/include/ct_event.hrl").
+
+-define(eh, ct_test_support_eh).
+-define(skip_reason, "Repeated test stopped by force_stop option").
+-define(skipped, {skipped, ?skip_reason}).
+
+
+%% Timers used in this test.
+%% Each test suite consists of
+%%
+%% [tc1,tc2,{group,g,[tc1,tc2]},tc2]
+%%
+%% In r1_SUITE tc1 has a sleep of 10 sec - all other test cases just
+%% return ok.
+%%
+%% => One complete test run of two suites r1_SUITE + r2_SUITE is at
+%% least 20 seconds (10 sec for each r1_SUITE:tc1)
+%%
+-define(t1,30). % time shall expire during second run of r1_SUITE
+-define(t2,6). % time shall expire during first run of tc1
+-define(t3,16). % time shall expire during second run of tc1
+
+
+%%--------------------------------------------------------------------
+%% TEST SERVER CALLBACK FUNCTIONS
+%%--------------------------------------------------------------------
+
+%%--------------------------------------------------------------------
+%% Description: Since Common Test starts another Test Server
+%% instance, the tests need to be performed on a separate node (or
+%% there will be clashes with logging processes etc).
+%%--------------------------------------------------------------------
+init_per_suite(Config0) ->
+ Config = ct_test_support:init_per_suite(Config0),
+ DataDir = ?config(data_dir, Config),
+ Suite1 = filename:join([DataDir,"a_test","r1_SUITE"]),
+ Suite2 = filename:join([DataDir,"b_test","r2_SUITE"]),
+ Opts0 = ct_test_support:get_opts(Config),
+ Opts1 = Opts0 ++ [{suite,Suite1},{testcase,tc2},{label,timing1}],
+ Opts2 = Opts0 ++ [{suite,Suite2},{testcase,tc2},{label,timing2}],
+
+ %% Make sure both suites are compiled
+ {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts1],Config),
+ {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts2],Config),
+
+ %% Time the shortest testcase to use for offset
+ {T0,{1,0,{0,0}}} = timer:tc(ct_test_support,run,[ct,run_test,[Opts1],Config]),
+
+ %% -2 is to ensure we hit inside the target test case and not after
+% T = round(T0/1000000)-2,
+ T=0,
+ [{offset,T}|Config].
+
+end_per_suite(Config) ->
+ ct_test_support:end_per_suite(Config).
+
+init_per_testcase(TestCase, Config) ->
+ ct_test_support:init_per_testcase(TestCase, Config).
+
+end_per_testcase(TestCase, Config) ->
+ ct_test_support:end_per_testcase(TestCase, Config).
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [
+ repeat_n,
+ duration,
+ duration_force_stop,
+ duration_force_stop_skip_rest,
+ duration_force_stop_skip_rest_group,
+ until,
+ until_force_stop,
+ until_force_stop_skip_rest,
+ until_force_stop_skip_rest_group
+ ].
+
+%%--------------------------------------------------------------------
+%% TEST CASES
+%%--------------------------------------------------------------------
+
+%%%-----------------------------------------------------------------
+%%%
+repeat_n(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,repeat_n},
+ {repeat,2}],
+ Config),
+ ok = execute(repeat_n, Opts, ERPid, Config).
+
+duration(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,duration},
+ {duration,duration_str(?t1,2,Config)}],
+ Config),
+ ok = execute(duration, Opts, ERPid, Config).
+
+duration_force_stop(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,duration_force_stop},
+ {duration,duration_str(?t1,2,Config)},
+ {force_stop,true}],
+ Config),
+ ok = execute(duration_force_stop, Opts, ERPid, Config).
+
+duration_force_stop_skip_rest(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,duration_force_stop_skip_rest},
+ {duration,duration_str(?t2,1,Config)},
+ {force_stop,skip_rest}],
+ Config),
+ ok = execute(duration_force_stop_skip_rest, Opts, ERPid, Config).
+
+duration_force_stop_skip_rest_group(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,duration_force_stop_skip_rest_group},
+ {duration,duration_str(?t3,1,Config)},
+ {force_stop,skip_rest}],
+ Config),
+ ok = execute(duration_force_stop_skip_rest_group, Opts, ERPid, Config).
+
+until(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,until}],
+ Config),
+ ExecuteFun =
+ fun() ->
+ [_,_] = ct_test_support:run_ct_run_test(
+ Opts++[{until,until_str(?t1,2,Config)}],Config),
+ 0 = ct_test_support:run_ct_script_start(
+ Opts++[{until,until_str(?t1,2,Config)}],Config)
+ end,
+ ok = execute(ExecuteFun, until, Opts, ERPid, Config).
+
+until_force_stop(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,until_force_stop},
+ {force_stop,true}],
+ Config),
+ ExecuteFun =
+ fun() ->
+ [_,_] = ct_test_support:run_ct_run_test(
+ Opts++[{until,until_str(?t1,2,Config)}],Config),
+ 0 = ct_test_support:run_ct_script_start(
+ Opts++[{until,until_str(?t1,2,Config)}],Config)
+ end,
+ ok = execute(ExecuteFun, until_force_stop, Opts, ERPid, Config).
+
+until_force_stop_skip_rest(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,until_force_stop_skip_rest},
+ {force_stop,skip_rest}],
+ Config),
+ ExecuteFun =
+ fun() ->
+ [_] = ct_test_support:run_ct_run_test(
+ Opts++[{until,until_str(?t2,1,Config)}],Config),
+ 1 = ct_test_support:run_ct_script_start(
+ Opts++[{until,until_str(?t2,1,Config)}],Config)
+ end,
+ ok = execute(ExecuteFun, until_force_stop_skip_rest,
+ Opts, ERPid, Config).
+
+until_force_stop_skip_rest_group(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Dirs = filelib:wildcard(filename:join(DataDir,"*")),
+ {Opts,ERPid} = setup([{dir,Dirs},
+ {label,until_force_stop_skip_rest_group},
+ {force_stop,skip_rest}],
+ Config),
+ ExecuteFun =
+ fun() ->
+ [_] = ct_test_support:run_ct_run_test(
+ Opts++[{until,until_str(?t3,1,Config)}],Config),
+ 0 = ct_test_support:run_ct_script_start(
+ Opts++[{until,until_str(?t3,1,Config)}],Config)
+ end,
+ ok = execute(ExecuteFun,
+ until_force_stop_skip_rest_group,
+ Opts, ERPid, Config).
+
+
+%%%-----------------------------------------------------------------
+%%% HELP FUNCTIONS
+%%%-----------------------------------------------------------------
+
+setup(Test, Config) ->
+ Opts0 = ct_test_support:get_opts(Config),
+ Level = ?config(trace_level, Config),
+ EvHArgs = [{cbm,ct_test_support},{trace_level,Level}],
+ Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test],
+ ERPid = ct_test_support:start_event_receiver(Config),
+ {Opts,ERPid}.
+
+%% Execute test, first with ct:run_test, then with ct:script_start
+execute(Name, Opts, ERPid, Config) ->
+ ExecuteFun = fun() -> ok = ct_test_support:run(Opts, Config) end,
+ execute(ExecuteFun, Name, Opts, ERPid, Config).
+
+execute(ExecuteFun, Name, Opts, ERPid, Config) ->
+ ExecuteFun(),
+ Events = ct_test_support:get_events(ERPid, Config),
+
+ ct_test_support:log_events(Name,
+ reformat(Events, ?eh),
+ ?config(priv_dir, Config),
+ Opts),
+
+ TestEvents = events_to_check(Name),
+ ct_test_support:verify_events(TestEvents, Events, Config).
+
+reformat(Events, EH) ->
+ ct_test_support:reformat(Events, EH).
+
+%% N is the expected number of repeats
+until_str(Secs0,N,Config) ->
+ Offset = ?config(offset,Config),
+ Secs = Secs0 + N*Offset,
+ Now = calendar:datetime_to_gregorian_seconds(calendar:local_time()),
+ {{Y,Mo,D},{H,M,S}} = calendar:gregorian_seconds_to_datetime(Now+Secs),
+ lists:flatten(io_lib:format("~2..0w~2..0w~2..0w~2..0w~2..0w~2..0w",
+ [Y rem 100, Mo, D, H, M, S])).
+
+%% N is the expected number of repeats
+duration_str(Secs0,N,Config) ->
+ Offset = ?config(offset,Config),
+ Secs = Secs0 + N*Offset,
+ "0000" ++ lists:flatten(io_lib:format("~2..0w",[Secs])).
+
+%%%-----------------------------------------------------------------
+%%% TEST EVENTS
+%%%-----------------------------------------------------------------
+%% 2 tests (ct:run_test + script_start) is default
+events_to_check(C) when C==repeat_n; C==duration; C==until ->
+ dupl(4, start_logging() ++ all_succ() ++ stop_logging());
+events_to_check(C) when C==duration_force_stop; C==until_force_stop ->
+ dupl(2, start_logging() ++
+ all_succ() ++
+ stop_logging() ++
+ start_logging() ++
+ all_succ(r1_SUITE) ++
+ stop_logging());
+events_to_check(C) when C==duration_force_stop_skip_rest;
+ C==until_force_stop_skip_rest ->
+ dupl(2, start_logging() ++ skip_first_tc1(r1_SUITE) ++ stop_logging());
+events_to_check(C) when C==duration_force_stop_skip_rest_group;
+ C==until_force_stop_skip_rest_group ->
+ dupl(2, start_logging() ++ skip_tc1_in_group(r1_SUITE) ++ stop_logging()).
+
+dupl(N,List) ->
+ lists:flatten(lists:duplicate(N,List)).
+
+start_logging() ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}}].
+stop_logging() ->
+ [{?eh,stop_logging,[]}].
+
+
+all_succ() ->
+ all_succ(r1_SUITE) ++ all_succ(r2_SUITE).
+
+all_succ(Suite) ->
+ [{?eh,tc_start,{Suite,init_per_suite}},
+ {?eh,tc_done,{Suite,init_per_suite,ok}},
+ {?eh,tc_start,{Suite,tc1}},
+ {?eh,tc_done,{Suite,tc1,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,tc2}},
+ {?eh,tc_done,{Suite,tc2,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ [{?eh,tc_start,{Suite,{init_per_group,g,[]}}},
+ {?eh,tc_done,{Suite,{init_per_group,g,[]},ok}},
+ {?eh,tc_start,{Suite,tc1}},
+ {?eh,tc_done,{Suite,tc1,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,tc2}},
+ {?eh,tc_done,{Suite,tc2,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,{end_per_group,g,[]}}},
+ {?eh,tc_done,{Suite,{end_per_group,g,[]},ok}}],
+ {?eh,tc_start,{Suite,tc2}},
+ {?eh,tc_done,{Suite,tc2,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,end_per_suite}},
+ {?eh,tc_done,{Suite,end_per_suite,ok}}].
+
+skip_first_tc1(Suite) ->
+ [{?eh,tc_start,{Suite,init_per_suite}},
+ {?eh,tc_done,{Suite,init_per_suite,ok}},
+ {?eh,tc_start,{Suite,tc1}},
+ {?eh,tc_done,{Suite,tc1,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_done,{Suite,tc2,?skipped}},
+ {?eh,test_stats,{'_',0,{1,0}}},
+ {?eh,tc_done,{Suite,{init_per_group,g,[]},?skipped}},
+ {?eh,tc_auto_skip,{Suite,tc1,?skip_reason}},
+ {?eh,test_stats,{'_',0,{1,1}}},
+ {?eh,tc_auto_skip,{Suite,tc2,?skip_reason}},
+ {?eh,test_stats,{'_',0,{1,2}}},
+ {?eh,tc_auto_skip,{Suite,end_per_group,?skip_reason}},
+ {?eh,tc_done,{Suite,tc2,?skipped}},
+ {?eh,test_stats,{'_',0,{2,2}}},
+ {?eh,tc_start,{Suite,end_per_suite}},
+ {?eh,tc_done,{Suite,end_per_suite,ok}}].
+
+
+skip_tc1_in_group(Suite) ->
+ [{?eh,tc_start,{Suite,init_per_suite}},
+ {?eh,tc_done,{Suite,init_per_suite,ok}},
+ {?eh,tc_start,{Suite,tc1}},
+ {?eh,tc_done,{Suite,tc1,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,tc2}},
+ {?eh,tc_done,{Suite,tc2,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ [{?eh,tc_start,{Suite,{init_per_group,g,[]}}},
+ {?eh,tc_done,{Suite,{init_per_group,g,[]},ok}},
+ {?eh,tc_start,{Suite,tc1}},
+ {?eh,tc_done,{Suite,tc1,ok}},
+ {?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_done,{Suite,tc2,?skipped}},
+ {?eh,test_stats,{'_',0,{1,0}}},
+ {?eh,tc_start,{Suite,{end_per_group,g,[]}}},
+ {?eh,tc_done,{Suite,{end_per_group,g,[]},ok}}],
+ {?eh,tc_done,{Suite,tc2,?skipped}},
+ {?eh,test_stats,{'_',0,{2,0}}},
+ {?eh,tc_start,{Suite,end_per_suite}},
+ {?eh,tc_done,{Suite,end_per_suite,ok}}].
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl
new file mode 100644
index 0000000000..3fd5943691
--- /dev/null
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl
@@ -0,0 +1,75 @@
+%%--------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2013. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%----------------------------------------------------------------------
+%% File: r1_SUITE.erl
+%%
+%% Description:
+%%
+%%
+%% @author Support
+%% @doc
+%% @end
+%%----------------------------------------------------------------------
+%%----------------------------------------------------------------------
+-module(r1_SUITE).
+-include_lib("common_test/include/ct.hrl").
+
+-compile(export_all).
+
+%% Default timetrap timeout (set in init_per_testcase).
+-define(default_timeout, ?t:seconds(30)).
+
+all() ->
+ testcases() ++ [{group,g}, tc2].
+
+groups() ->
+ [{g,testcases()}].
+
+testcases() ->
+ [tc1,tc2].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ Config.
+
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(_Group, Config) ->
+ Config.
+
+init_per_testcase(_Case, Config) ->
+ Dog = test_server:timetrap(?default_timeout),
+ [{watchdog, Dog}|Config].
+
+end_per_testcase(_Case, Config) ->
+ Dog=?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+%%%-----------------------------------------------------------------
+%%% Test cases
+tc1(_Config) ->
+ timer:sleep(10000),
+ ok.
+
+tc2(_Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl
new file mode 100644
index 0000000000..dc9abc2863
--- /dev/null
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl
@@ -0,0 +1,75 @@
+%%--------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2013. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%----------------------------------------------------------------------
+%% File: r2_SUITE.erl
+%%
+%% Description:
+%%
+%%
+%% @author Support
+%% @doc
+%% @end
+%%----------------------------------------------------------------------
+%%----------------------------------------------------------------------
+-module(r2_SUITE).
+-include_lib("common_test/include/ct.hrl").
+
+-compile(export_all).
+
+%% Default timetrap timeout (set in init_per_testcase).
+-define(default_timeout, ?t:seconds(30)).
+
+all() ->
+ testcases() ++ [{group,g}, tc2].
+
+groups() ->
+ [{g,testcases()}].
+
+testcases() ->
+ [tc1,tc2].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ Config.
+
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(_Group, Config) ->
+ Config.
+
+init_per_testcase(_Case, Config) ->
+ Dog = test_server:timetrap(?default_timeout),
+ [{watchdog, Dog}|Config].
+
+end_per_testcase(_Case, Config) ->
+ Dog=?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+%%%-----------------------------------------------------------------
+%%% Test cases
+tc1(_Config) ->
+ %% timer:sleep(3000),
+ ok.
+
+tc2(_Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl
index 5e109e98e9..70dd087358 100644
--- a/lib/common_test/test/ct_test_support.erl
+++ b/lib/common_test/test/ct_test_support.erl
@@ -29,7 +29,8 @@
-export([init_per_suite/1, init_per_suite/2, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
write_testspec/2, write_testspec/3,
- run/2, run/3, run/4, get_opts/1, wait_for_ct_stop/1]).
+ run/2, run/3, run/4, run_ct_run_test/2, run_ct_script_start/2,
+ get_opts/1, wait_for_ct_stop/1]).
-export([handle_event/2, start_event_receiver/1, get_events/2,
verify_events/3, verify_events/4, reformat/2, log_events/4,
@@ -224,9 +225,15 @@ get_opts(Config) ->
%%%-----------------------------------------------------------------
%%%
run(Opts, Config) when is_list(Opts) ->
+ %% use ct interface
+ CtRunTestResult=run_ct_run_test(Opts,Config),
+ %% use run_test interface (simulated)
+ ExitStatus=run_ct_script_start(Opts,Config),
+ check_result(CtRunTestResult,ExitStatus,Opts).
+
+run_ct_run_test(Opts,Config) ->
CTNode = proplists:get_value(ct_node, Config),
Level = proplists:get_value(trace_level, Config),
- %% use ct interface
test_server:format(Level, "~n[RUN #1] Calling ct:run_test(~p) on ~p~n",
[Opts, CTNode]),
CtRunTestResult = rpc:call(CTNode, ct, run_test, [Opts]),
@@ -242,7 +249,11 @@ run(Opts, Config) when is_list(Opts) ->
timer:sleep(5000),
undefined = rpc:call(CTNode, erlang, whereis, [ct_util_server])
end,
- %% use run_test interface (simulated)
+ CtRunTestResult.
+
+run_ct_script_start(Opts, Config) ->
+ CTNode = proplists:get_value(ct_node, Config),
+ Level = proplists:get_value(trace_level, Config),
Opts1 = [{halt_with,{?MODULE,ct_test_halt}} | Opts],
test_server:format(Level, "Saving start opts on ~p: ~p~n",
[CTNode, Opts1]),
@@ -253,27 +264,38 @@ run(Opts, Config) when is_list(Opts) ->
ExitStatus = rpc:call(CTNode, ct_run, script_start, []),
test_server:format(Level, "[RUN #2] Got exit status value ~p~n",
[ExitStatus]),
- case {CtRunTestResult,ExitStatus} of
- {{_Ok,Failed,{_UserSkipped,_AutoSkipped}},1} when Failed > 0 ->
- ok;
- {{_Ok,0,{_UserSkipped,AutoSkipped}},ExitStatus} when AutoSkipped > 0 ->
- case proplists:get_value(exit_status, Opts1) of
- ignore_config when ExitStatus == 1 ->
- {error,{wrong_exit_status,ExitStatus}};
- _ ->
- ok
- end;
- {{error,_}=Error,ExitStatus} ->
- if ExitStatus /= 2 ->
- {error,{wrong_exit_status,ExitStatus}};
- ExitStatus == 2 ->
- Error
- end;
- {{_Ok,0,{_UserSkipped,_AutoSkipped}},0} ->
- ok;
- Unexpected ->
- {error,{unexpected_return_value,Unexpected}}
- end.
+ ExitStatus.
+
+check_result({_Ok,Failed,{_UserSkipped,_AutoSkipped}},1,_Opts)
+ when Failed > 0 ->
+ ok;
+check_result({_Ok,0,{_UserSkipped,AutoSkipped}},ExitStatus,Opts)
+ when AutoSkipped > 0 ->
+ case proplists:get_value(exit_status, Opts) of
+ ignore_config when ExitStatus == 1 ->
+ {error,{wrong_exit_status,ExitStatus}};
+ _ ->
+ ok
+ end;
+check_result({error,_}=Error,2,_Opts) ->
+ Error;
+check_result({error,_},ExitStatus,_Opts) ->
+ {error,{wrong_exit_status,ExitStatus}};
+check_result({_Ok,0,{_UserSkipped,_AutoSkipped}},0,_Opts) ->
+ ok;
+check_result(CtRunTestResult,ExitStatus,Opts)
+ when is_list(CtRunTestResult) -> % repeated testruns
+ try check_result(sum_testruns(CtRunTestResult,0,0,0,0),ExitStatus,Opts)
+ catch _:_ ->
+ {error,{unexpected_return_value,{CtRunTestResult,ExitStatus}}}
+ end;
+check_result(CtRunTestResult,ExitStatus,_Opts) ->
+ {error,{unexpected_return_value,{CtRunTestResult,ExitStatus}}}.
+
+sum_testruns([{O,F,{US,AS}}|T],Ok,Failed,UserSkipped,AutoSkipped) ->
+ sum_testruns(T,Ok+O,Failed+F,UserSkipped+US,AutoSkipped+AS);
+sum_testruns([],Ok,Failed,UserSkipped,AutoSkipped) ->
+ {Ok,Failed,{UserSkipped,AutoSkipped}}.
run(M, F, A, Config) ->
run({M,F,A}, [], Config).
diff --git a/lib/test_server/src/test_server_gl.erl b/lib/test_server/src/test_server_gl.erl
index 766a4537a2..2e4f223811 100644
--- a/lib/test_server/src/test_server_gl.erl
+++ b/lib/test_server/src/test_server_gl.erl
@@ -197,7 +197,7 @@ handle_info({io_request,From,ReplyAs,Req}=IoReq, St) ->
From ! {io_reply,ReplyAs,ok}
catch
_:_ ->
- {io_reply,ReplyAs,{error,arguments}}
+ From ! {io_reply,ReplyAs,{error,arguments}}
end,
{noreply,St};
handle_info({structured_io,ClientPid,{Detail,Str}}, St) ->
diff --git a/lib/test_server/test/test_server_SUITE.erl b/lib/test_server/test/test_server_SUITE.erl
index 1a2fc632da..cf1df6df34 100644
--- a/lib/test_server/test/test_server_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE.erl
@@ -104,7 +104,7 @@ test_server_SUITE(Config) ->
% rpc:call(Node,dbg, tpl,[test_server_ctrl,x]),
run_test_server_tests("test_server_SUITE",
[{test_server_SUITE,skip_case7,"SKIPPED!"}],
- 38, 1, 30, 19, 9, 1, 11, 2, 25, Config).
+ 39, 1, 31, 20, 9, 1, 11, 2, 26, Config).
test_server_parallel01_SUITE(Config) ->
run_test_server_tests("test_server_parallel01_SUITE", [],
diff --git a/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl
index fc2adcd651..6c50efa712 100644
--- a/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2012. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,8 @@
conf_init/1, check_new_conf/1, conf_cleanup/1,
check_old_conf/1, conf_init_fail/1, start_stop_node/1,
cleanup_nodes_init/1, check_survive_nodes/1, cleanup_nodes_fin/1,
- commercial/1]).
+ commercial/1,
+ io_invalid_data/1]).
-export([dummy_function/0,dummy_function/1,doer/1]).
@@ -47,7 +48,7 @@ all(suite) ->
[config, comment, timetrap, timetrap_cancel, multiply_timetrap,
init_per_s, init_per_tc, end_per_tc,
timeconv, msgs, capture, timecall, do_times, skip_cases,
- commercial,
+ commercial, io_invalid_data,
{conf, conf_init, [check_new_conf], conf_cleanup},
check_old_conf,
{conf, conf_init_fail,[conf_member_skip],conf_cleanup_skip},
@@ -497,4 +498,8 @@ commercial(Config) when is_list(Config) ->
true -> {comment,"Commercial build"}
end.
-
+io_invalid_data(Config) when is_list(Config) ->
+ ok = io:put_chars("valid: " ++ [42]),
+ %% OTP-10991 caused this to hang and produce a timetrap timeout:
+ {'EXIT',{badarg,_}} = (catch io:put_chars("invalid: " ++ [42.0])),
+ ok.