aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPeter Andersson <[email protected]>2016-06-08 12:26:55 +0200
committerPeter Andersson <[email protected]>2016-06-08 12:26:55 +0200
commitdc653904381551a3db55381a286fc06b37f16063 (patch)
tree8ffc18530ff76c247a78bc1b8c937ff17662b371
parente7588998f05f92f1056e4259452f8a8363d732b5 (diff)
parent3d5e91fada367fee723563fe133a45cc81dd3a73 (diff)
downloadotp-dc653904381551a3db55381a286fc06b37f16063.tar.gz
otp-dc653904381551a3db55381a286fc06b37f16063.tar.bz2
otp-dc653904381551a3db55381a286fc06b37f16063.zip
Merge branch 'peppe/common_test/fix_failing_tests'
* peppe/common_test/fix_failing_tests: Make sure test node shuts down before skipping suite Measure file i/o overhead and skip test if the speed is too slow Skip test cases if cover or debug is running
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE.erl60
1 files changed, 41 insertions, 19 deletions
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE.erl
index 632597c214..f8b6a379f6 100644
--- a/lib/common_test/test/ct_repeat_testrun_SUITE.erl
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE.erl
@@ -66,25 +66,47 @@
%% there will be clashes with logging processes etc).
%%--------------------------------------------------------------------
init_per_suite(Config0) ->
- Config = ct_test_support:init_per_suite(Config0),
- DataDir = ?config(data_dir, Config),
- Suite1 = filename:join([DataDir,"a_test","r1_SUITE"]),
- Suite2 = filename:join([DataDir,"b_test","r2_SUITE"]),
- Opts0 = ct_test_support:get_opts(Config),
- Opts1 = Opts0 ++ [{suite,Suite1},{testcase,tc2},{label,timing1}],
- Opts2 = Opts0 ++ [{suite,Suite2},{testcase,tc2},{label,timing2}],
-
- %% Make sure both suites are compiled
- {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts1],Config),
- {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts2],Config),
-
- %% Time the shortest testcase to use for offset
- {_T0,{1,0,{0,0}}} = timer:tc(ct_test_support,run,[ct,run_test,[Opts1],Config]),
-
- %% -2 is to ensure we hit inside the target test case and not after
-% T = round(T0/1000000)-2,
- T=0,
- [{offset,T}|Config].
+ TTInfo = {_T,{_Scaled,ScaleVal}} = ct:get_timetrap_info(),
+ ct:pal("Timetrap info = ~w", [TTInfo]),
+ if ScaleVal > 1 ->
+ {skip,"Skip on systems running e.g. cover or debug!"};
+ ScaleVal =< 1 ->
+ Config = ct_test_support:init_per_suite(Config0),
+ DataDir = ?config(data_dir, Config),
+ Suite1 = filename:join([DataDir,"a_test","r1_SUITE"]),
+ Suite2 = filename:join([DataDir,"b_test","r2_SUITE"]),
+ Opts0 = ct_test_support:get_opts(Config),
+ Opts1 = Opts0 ++ [{suite,Suite1},{testcase,tc2},{label,timing1}],
+ Opts2 = Opts0 ++ [{suite,Suite2},{testcase,tc2},{label,timing2}],
+
+ %% Make sure both suites are compiled
+ {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts1],Config),
+ {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts2],Config),
+
+ %% Check if file i/o is too slow for correct measurements
+ Opts3 = Opts0 ++ [{suite,Suite1},{testcase,tc1},{label,timing3}],
+ {T,_} =
+ timer:tc(
+ fun() ->
+ {1,0,{0,0}} = ct_test_support:run(ct,run_test,
+ [Opts3],Config),
+ {1,0,{0,0}} = ct_test_support:run(ct,run_test,
+ [Opts3],Config)
+ end),
+ %% The time to compare with here must match the timeout value
+ %% in the test suite. Accept 30% logging overhead (26 sec total).
+ if T > 26000000 ->
+ ct:pal("Timing test took ~w sec (< 27 sec expected). "
+ "Skipping the suite!",
+ [trunc(T/1000000)]),
+ ct_test_support:end_per_suite(Config),
+ {skip,"File I/O too slow for this suite"};
+ true ->
+ ct:pal("Timing test took ~w sec. Proceeding...",
+ [trunc(T/1000000)]),
+ [{offset,0}|Config]
+ end
+ end.
end_per_suite(Config) ->
ct_test_support:end_per_suite(Config).