From adc50b681c3d3bd6d98f6c3f88dad4a306d3a239 Mon Sep 17 00:00:00 2001 From: Peter Andersson Date: Fri, 11 Jan 2013 13:13:32 +0100 Subject: Make it possible to execute one test run per test specification OTP-9881 --- lib/common_test/src/ct_run.erl | 307 ++++++++++++++++++++---------------- lib/common_test/src/ct_testspec.erl | 32 +++- 2 files changed, 194 insertions(+), 145 deletions(-) diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl index eb05c90ba8..50723a763c 100644 --- a/lib/common_test/src/ct_run.erl +++ b/lib/common_test/src/ct_run.erl @@ -378,7 +378,8 @@ run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) -> {error,{all_suites_index,ASReason}}; _ -> file:set_cwd(Cwd), - io:format("Logs in ~s refreshed!~n~n", [LogDir1]), + io:format("Logs in ~s refreshed!~n~n", + [LogDir1]), timer:sleep(500), % time to flush io before quitting ok end @@ -387,145 +388,29 @@ run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) -> script_start2(StartOpts = #opts{vts = undefined, shell = undefined}, Args) -> - TestSpec = proplists:get_value(spec, Args), - {Terms,Opts} = - case TestSpec of - Specs when Specs =/= [], Specs =/= undefined -> - %% using testspec as input for test - Relaxed = get_start_opt(allow_user_terms, true, false, Args), - case catch ct_testspec:collect_tests_from_file(Specs, Relaxed) of - {E,Reason} when E == error ; E == 'EXIT' -> - {{error,Reason},StartOpts}; - TS -> - SpecStartOpts = get_data_for_node(TS, node()), - - Label = choose_val(StartOpts#opts.label, - SpecStartOpts#opts.label), - - Profile = choose_val(StartOpts#opts.profile, - SpecStartOpts#opts.profile), - - LogDir = choose_val(StartOpts#opts.logdir, - SpecStartOpts#opts.logdir), - - AllLogOpts = merge_vals([StartOpts#opts.logopts, - SpecStartOpts#opts.logopts]), - AllVerbosity = - merge_keyvals([StartOpts#opts.verbosity, - SpecStartOpts#opts.verbosity]), - AllSilentConns = - merge_vals([StartOpts#opts.silent_connections, - SpecStartOpts#opts.silent_connections]), - Cover = - choose_val(StartOpts#opts.cover, - SpecStartOpts#opts.cover), - CoverStop = - choose_val(StartOpts#opts.cover_stop, - SpecStartOpts#opts.cover_stop), - MultTT = - choose_val(StartOpts#opts.multiply_timetraps, - SpecStartOpts#opts.multiply_timetraps), - ScaleTT = - choose_val(StartOpts#opts.scale_timetraps, - SpecStartOpts#opts.scale_timetraps), - - CreatePrivDir = - choose_val(StartOpts#opts.create_priv_dir, - SpecStartOpts#opts.create_priv_dir), - - AllEvHs = - merge_vals([StartOpts#opts.event_handlers, - SpecStartOpts#opts.event_handlers]), - - AllCTHooks = merge_vals( - [StartOpts#opts.ct_hooks, - SpecStartOpts#opts.ct_hooks]), - - EnableBuiltinHooks = - choose_val( - StartOpts#opts.enable_builtin_hooks, - SpecStartOpts#opts.enable_builtin_hooks), - - Stylesheet = - choose_val(StartOpts#opts.stylesheet, - SpecStartOpts#opts.stylesheet), - - AllInclude = merge_vals([StartOpts#opts.include, - SpecStartOpts#opts.include]), - application:set_env(common_test, include, AllInclude), - - AutoCompile = - case choose_val(StartOpts#opts.auto_compile, - SpecStartOpts#opts.auto_compile) of - undefined -> - true; - ACBool -> - application:set_env(common_test, - auto_compile, - ACBool), - ACBool - end, - - BasicHtml = - case choose_val(StartOpts#opts.basic_html, - SpecStartOpts#opts.basic_html) of - undefined -> - false; - BHBool -> - application:set_env(common_test, basic_html, - BHBool), - BHBool - end, - - {TS,StartOpts#opts{label = Label, - profile = Profile, - testspecs = Specs, - cover = Cover, - cover_stop = CoverStop, - logdir = LogDir, - logopts = AllLogOpts, - basic_html = BasicHtml, - verbosity = AllVerbosity, - silent_connections = AllSilentConns, - config = SpecStartOpts#opts.config, - event_handlers = AllEvHs, - ct_hooks = AllCTHooks, - enable_builtin_hooks = - EnableBuiltinHooks, - stylesheet = Stylesheet, - auto_compile = AutoCompile, - include = AllInclude, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir}} - end; - _ -> - {undefined,StartOpts} - end, - %% read config/userconfig from start flags - InitConfig = ct_config:prepare_config_list(Args), - TheLogDir = which(logdir, Opts#opts.logdir), - case {TestSpec,Terms} of - {_,{error,_}=Error} -> - Error; - {[],_} -> + case proplists:get_value(spec, Args) of + Specs when Specs =/= [], Specs =/= undefined -> + Specs1 = get_start_opt(join_specs, [Specs], Specs, Args), + %% using testspec as input for test + Relaxed = get_start_opt(allow_user_terms, true, false, Args), + case catch ct_testspec:collect_tests_from_file(Specs1, Relaxed) of + {E,Reason} when E == error ; E == 'EXIT' -> + {error,Reason}; + TestSpecData -> + execute_testspecs(TestSpecData, StartOpts, Args, []) + end; + [] -> {error,no_testspec_specified}; - {undefined,_} -> % no testspec used - case check_and_install_configfiles(InitConfig, TheLogDir, Opts) of + _ -> % no testspec used + %% read config/userconfig from start flags + InitConfig = ct_config:prepare_config_list(Args), + TheLogDir = which(logdir, StartOpts#opts.logdir), + case check_and_install_configfiles(InitConfig, + TheLogDir, + StartOpts) of ok -> % go on read tests from start flags - script_start3(Opts#opts{config=InitConfig, - logdir=TheLogDir}, Args); - Error -> - Error - end; - {_,_} -> % testspec used - %% merge config from start flags with config from testspec - AllConfig = merge_vals([InitConfig, Opts#opts.config]), - case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of - ok -> % read tests from spec - {Run,Skip} = ct_testspec:prepare_tests(Terms, node()), - do_run(Run, Skip, Opts#opts{config=AllConfig, - logdir=TheLogDir}, Args); + script_start3(StartOpts#opts{config=InitConfig, + logdir=TheLogDir}, Args); Error -> Error end @@ -543,6 +428,152 @@ script_start2(StartOpts, Args) -> Error end. +execute_testspecs([], _, _, Result) -> + Result1 = lists:reverse(Result), + case lists:keysearch('EXIT', 1, Result1) of + {value,{_,_,ExitReason}} -> + exit(ExitReason); + false -> + case lists:keysearch(error, 1, Result1) of + {value,Error} -> + Error; + false -> + lists:foldl(fun({Ok,Fail,{UserSkip,AutoSkip}}, + {Ok1,Fail1,{UserSkip1,AutoSkip1}}) -> + {Ok1+Ok,Fail1+Fail, + {UserSkip1+UserSkip, + AutoSkip1+AutoSkip}} + end, {0,0,{0,0}}, Result1) + end + end; + +execute_testspecs([{Specs,TS} | TSs], StartOpts, Args, Result) -> + SpecStartOpts = get_data_for_node(TS, node()), + + Label = choose_val(StartOpts#opts.label, + SpecStartOpts#opts.label), + + Profile = choose_val(StartOpts#opts.profile, + SpecStartOpts#opts.profile), + + LogDir = choose_val(StartOpts#opts.logdir, + SpecStartOpts#opts.logdir), + + AllLogOpts = merge_vals([StartOpts#opts.logopts, + SpecStartOpts#opts.logopts]), + AllVerbosity = + merge_keyvals([StartOpts#opts.verbosity, + SpecStartOpts#opts.verbosity]), + AllSilentConns = + merge_vals([StartOpts#opts.silent_connections, + SpecStartOpts#opts.silent_connections]), + Cover = + choose_val(StartOpts#opts.cover, + SpecStartOpts#opts.cover), + CoverStop = + choose_val(StartOpts#opts.cover_stop, + SpecStartOpts#opts.cover_stop), + MultTT = + choose_val(StartOpts#opts.multiply_timetraps, + SpecStartOpts#opts.multiply_timetraps), + ScaleTT = + choose_val(StartOpts#opts.scale_timetraps, + SpecStartOpts#opts.scale_timetraps), + + CreatePrivDir = + choose_val(StartOpts#opts.create_priv_dir, + SpecStartOpts#opts.create_priv_dir), + + AllEvHs = + merge_vals([StartOpts#opts.event_handlers, + SpecStartOpts#opts.event_handlers]), + + AllCTHooks = merge_vals( + [StartOpts#opts.ct_hooks, + SpecStartOpts#opts.ct_hooks]), + + EnableBuiltinHooks = + choose_val( + StartOpts#opts.enable_builtin_hooks, + SpecStartOpts#opts.enable_builtin_hooks), + + Stylesheet = + choose_val(StartOpts#opts.stylesheet, + SpecStartOpts#opts.stylesheet), + + AllInclude = merge_vals([StartOpts#opts.include, + SpecStartOpts#opts.include]), + application:set_env(common_test, include, AllInclude), + + AutoCompile = + case choose_val(StartOpts#opts.auto_compile, + SpecStartOpts#opts.auto_compile) of + undefined -> + true; + ACBool -> + application:set_env(common_test, + auto_compile, + ACBool), + ACBool + end, + + BasicHtml = + case choose_val(StartOpts#opts.basic_html, + SpecStartOpts#opts.basic_html) of + undefined -> + false; + BHBool -> + application:set_env(common_test, basic_html, + BHBool), + BHBool + end, + + Opts = StartOpts#opts{label = Label, + profile = Profile, + testspecs = Specs, + cover = Cover, + cover_stop = CoverStop, + logdir = LogDir, + logopts = AllLogOpts, + basic_html = BasicHtml, + verbosity = AllVerbosity, + silent_connections = AllSilentConns, + config = SpecStartOpts#opts.config, + event_handlers = AllEvHs, + ct_hooks = AllCTHooks, + enable_builtin_hooks = + EnableBuiltinHooks, + stylesheet = Stylesheet, + auto_compile = AutoCompile, + include = AllInclude, + multiply_timetraps = MultTT, + scale_timetraps = ScaleTT, + create_priv_dir = CreatePrivDir}, + + try execute_testspec(TS, Opts, Args) of + ExecResult -> + execute_testspecs(TSs, StartOpts, Args, [ExecResult | Result]) + catch + _ : ExitReason -> + execute_testspecs(TSs, StartOpts, Args, + [{'EXIT',self(),ExitReason} | Result]) + end. + +execute_testspec(TS, Opts, Args) -> + %% read config/userconfig from start flags + InitConfig = ct_config:prepare_config_list(Args), + TheLogDir = which(logdir, Opts#opts.logdir), + %% merge config from start flags with config from testspec + AllConfig = merge_vals([InitConfig, Opts#opts.config]), + case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of + ok -> % read tests from spec + {Run,Skip} = ct_testspec:prepare_tests(TS, node()), + do_run(Run, Skip, Opts#opts{config=AllConfig, + logdir=TheLogDir}, Args); + Error -> + Error + end. + check_and_install_configfiles( Configs, LogDir, #opts{ event_handlers = EvHandlers, diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl index 202d8f9373..9a7a384ebb 100644 --- a/lib/common_test/src/ct_testspec.erl +++ b/lib/common_test/src/ct_testspec.erl @@ -246,22 +246,40 @@ collect_tests_from_file(Specs, Relaxed) -> collect_tests_from_file(Specs,Nodes,Relaxed) when is_list(Nodes) -> NodeRefs = lists:map(fun(N) -> {undefined,N} end, Nodes), - catch collect_tests_from_file1(Specs,#testspec{nodes=NodeRefs},Relaxed). - -collect_tests_from_file1([Spec|Specs],TestSpec,Relaxed) -> + %% [Spec1,Spec2,...] means create one testpec record per Spec file + %% [[Spec1,Spec2,...]] means merge all specs into one testspec record + {MergeSpecs,Specs1} = if is_list(hd(hd(Specs))) -> {true,hd(Specs)}; + true -> {false,Specs} + end, + catch create_specs(Specs1,Specs1,#testspec{nodes=NodeRefs}, + Relaxed,MergeSpecs,[]). + +create_specs([Spec|Ss],Specs,TestSpec,Relaxed,MergeSpecs,Saved) -> SpecDir = filename:dirname(filename:absname(Spec)), case file:consult(Spec) of {ok,Terms} -> case collect_tests(Terms, TestSpec#testspec{spec_dir=SpecDir}, Relaxed) of - TS = #testspec{tests=Tests, logdir=LogDirs} when Specs == [] -> + TS = #testspec{tests=Tests, logdir=LogDirs} when + Ss == [], MergeSpecs == true -> + LogDirs1 = lists:delete(".",LogDirs) ++ ["."], + [{Specs,TS#testspec{tests=lists:flatten(Tests), + logdir=LogDirs1}}]; + TS = #testspec{tests=Tests, logdir=LogDirs} when + Ss == [], MergeSpecs == false -> LogDirs1 = lists:delete(".",LogDirs) ++ ["."], - TS#testspec{tests=lists:flatten(Tests), logdir=LogDirs1}; - TS = #testspec{alias = As, nodes = Ns} -> + TSRet = {[Spec],TS#testspec{tests=lists:flatten(Tests), + logdir=LogDirs1}}, + lists:reverse([TSRet|Saved]); + TS = #testspec{alias = As, nodes = Ns} when + MergeSpecs == true -> TS1 = TS#testspec{alias = lists:reverse(As), nodes = lists:reverse(Ns)}, - collect_tests_from_file1(Specs,TS1,Relaxed) + create_specs(Ss,Specs,TS1,Relaxed,MergeSpecs,[]); + TS when MergeSpecs == false -> + create_specs(Ss,Specs,TestSpec,Relaxed,MergeSpecs, + [{[Spec],TS}|Saved]) end; {error,Reason} -> ReasonStr = -- cgit v1.2.3 From 102c8167c6960233102aa6f2fabec5416882b74f Mon Sep 17 00:00:00 2001 From: Peter Andersson Date: Mon, 14 Jan 2013 15:12:01 +0100 Subject: Implement support for including test specifications OTP-9881 --- lib/common_test/src/ct_run.erl | 423 ++++++++++++--------------- lib/common_test/src/ct_testspec.erl | 134 ++++++--- lib/common_test/test/ct_testspec_2_SUITE.erl | 18 +- 3 files changed, 303 insertions(+), 272 deletions(-) diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl index 50723a763c..742e287493 100644 --- a/lib/common_test/src/ct_run.erl +++ b/lib/common_test/src/ct_run.erl @@ -330,33 +330,33 @@ script_start1(Parent, Args) -> true end, - StartOpts = #opts{label = Label, profile = Profile, - vts = Vts, shell = Shell, - cover = Cover, cover_stop = CoverStop, - logdir = LogDir, logopts = LogOpts, - basic_html = BasicHtml, - verbosity = Verbosity, - event_handlers = EvHandlers, - ct_hooks = CTHooks, - enable_builtin_hooks = EnableBuiltinHooks, - auto_compile = AutoCompile, - include = IncludeDirs, - silent_connections = SilentConns, - stylesheet = Stylesheet, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir, - starter = script}, - + Opts = #opts{label = Label, profile = Profile, + vts = Vts, shell = Shell, + cover = Cover, cover_stop = CoverStop, + logdir = LogDir, logopts = LogOpts, + basic_html = BasicHtml, + verbosity = Verbosity, + event_handlers = EvHandlers, + ct_hooks = CTHooks, + enable_builtin_hooks = EnableBuiltinHooks, + auto_compile = AutoCompile, + include = IncludeDirs, + silent_connections = SilentConns, + stylesheet = Stylesheet, + multiply_timetraps = MultTT, + scale_timetraps = ScaleTT, + create_priv_dir = CreatePrivDir, + starter = script}, + %% check if log files should be refreshed or go on to run tests... - Result = run_or_refresh(StartOpts, Args), + Result = run_or_refresh(Opts, Args), %% send final results to starting process waiting in script_start/0 Parent ! {self(), Result}. -run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) -> +run_or_refresh(Opts = #opts{logdir = LogDir}, Args) -> case proplists:get_value(refresh_logs, Args) of undefined -> - script_start2(StartOpts, Args); + script_start2(Opts, Args); Refresh -> LogDir1 = case Refresh of [] -> which(logdir,LogDir); @@ -386,8 +386,8 @@ run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) -> end end. -script_start2(StartOpts = #opts{vts = undefined, - shell = undefined}, Args) -> +script_start2(Opts = #opts{vts = undefined, + shell = undefined}, Args) -> case proplists:get_value(spec, Args) of Specs when Specs =/= [], Specs =/= undefined -> Specs1 = get_start_opt(join_specs, [Specs], Specs, Args), @@ -397,38 +397,38 @@ script_start2(StartOpts = #opts{vts = undefined, {E,Reason} when E == error ; E == 'EXIT' -> {error,Reason}; TestSpecData -> - execute_testspecs(TestSpecData, StartOpts, Args, []) + execute_all_specs(TestSpecData, Opts, Args, []) end; [] -> {error,no_testspec_specified}; _ -> % no testspec used %% read config/userconfig from start flags InitConfig = ct_config:prepare_config_list(Args), - TheLogDir = which(logdir, StartOpts#opts.logdir), + TheLogDir = which(logdir, Opts#opts.logdir), case check_and_install_configfiles(InitConfig, TheLogDir, - StartOpts) of + Opts) of ok -> % go on read tests from start flags - script_start3(StartOpts#opts{config=InitConfig, - logdir=TheLogDir}, Args); + script_start3(Opts#opts{config=InitConfig, + logdir=TheLogDir}, Args); Error -> Error end end; -script_start2(StartOpts, Args) -> +script_start2(Opts, Args) -> %% read config/userconfig from start flags InitConfig = ct_config:prepare_config_list(Args), - LogDir = which(logdir, StartOpts#opts.logdir), - case check_and_install_configfiles(InitConfig, LogDir, StartOpts) of + LogDir = which(logdir, Opts#opts.logdir), + case check_and_install_configfiles(InitConfig, LogDir, Opts) of ok -> % go on read tests from start flags - script_start3(StartOpts#opts{config=InitConfig, - logdir=LogDir}, Args); + script_start3(Opts#opts{config=InitConfig, + logdir=LogDir}, Args); Error -> Error end. -execute_testspecs([], _, _, Result) -> +execute_all_specs([], _, _, Result) -> Result1 = lists:reverse(Result), case lists:keysearch('EXIT', 1, Result1) of {value,{_,_,ExitReason}} -> @@ -447,67 +447,93 @@ execute_testspecs([], _, _, Result) -> end end; -execute_testspecs([{Specs,TS} | TSs], StartOpts, Args, Result) -> - SpecStartOpts = get_data_for_node(TS, node()), - - Label = choose_val(StartOpts#opts.label, - SpecStartOpts#opts.label), - - Profile = choose_val(StartOpts#opts.profile, - SpecStartOpts#opts.profile), - - LogDir = choose_val(StartOpts#opts.logdir, - SpecStartOpts#opts.logdir), - - AllLogOpts = merge_vals([StartOpts#opts.logopts, - SpecStartOpts#opts.logopts]), +execute_all_specs([{Specs,TS} | TSs], Opts, Args, Result) -> + CombinedOpts = combine_test_opts(TS, Specs, Opts), + try execute_one_spec(TS, CombinedOpts, Args) of + ExecResult -> + execute_all_specs(TSs, Opts, Args, [ExecResult|Result]) + catch + _ : ExitReason -> + execute_all_specs(TSs, Opts, Args, + [{'EXIT',self(),ExitReason}|Result]) + end. + +execute_one_spec(TS, Opts, Args) -> + %% read config/userconfig from start flags + InitConfig = ct_config:prepare_config_list(Args), + TheLogDir = which(logdir, Opts#opts.logdir), + %% merge config from start flags with config from testspec + AllConfig = merge_vals([InitConfig, Opts#opts.config]), + case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of + ok -> % read tests from spec + {Run,Skip} = ct_testspec:prepare_tests(TS, node()), + do_run(Run, Skip, Opts#opts{config=AllConfig, + logdir=TheLogDir}, Args); + Error -> + Error + end. + +combine_test_opts(TS, Specs, Opts) -> + TSOpts = get_data_for_node(TS, node()), + + Label = choose_val(Opts#opts.label, + TSOpts#opts.label), + + Profile = choose_val(Opts#opts.profile, + TSOpts#opts.profile), + + LogDir = choose_val(Opts#opts.logdir, + TSOpts#opts.logdir), + + AllLogOpts = merge_vals([Opts#opts.logopts, + TSOpts#opts.logopts]), AllVerbosity = - merge_keyvals([StartOpts#opts.verbosity, - SpecStartOpts#opts.verbosity]), + merge_keyvals([Opts#opts.verbosity, + TSOpts#opts.verbosity]), AllSilentConns = - merge_vals([StartOpts#opts.silent_connections, - SpecStartOpts#opts.silent_connections]), + merge_vals([Opts#opts.silent_connections, + TSOpts#opts.silent_connections]), Cover = - choose_val(StartOpts#opts.cover, - SpecStartOpts#opts.cover), + choose_val(Opts#opts.cover, + TSOpts#opts.cover), CoverStop = - choose_val(StartOpts#opts.cover_stop, - SpecStartOpts#opts.cover_stop), + choose_val(Opts#opts.cover_stop, + TSOpts#opts.cover_stop), MultTT = - choose_val(StartOpts#opts.multiply_timetraps, - SpecStartOpts#opts.multiply_timetraps), + choose_val(Opts#opts.multiply_timetraps, + TSOpts#opts.multiply_timetraps), ScaleTT = - choose_val(StartOpts#opts.scale_timetraps, - SpecStartOpts#opts.scale_timetraps), - + choose_val(Opts#opts.scale_timetraps, + TSOpts#opts.scale_timetraps), + CreatePrivDir = - choose_val(StartOpts#opts.create_priv_dir, - SpecStartOpts#opts.create_priv_dir), - + choose_val(Opts#opts.create_priv_dir, + TSOpts#opts.create_priv_dir), + AllEvHs = - merge_vals([StartOpts#opts.event_handlers, - SpecStartOpts#opts.event_handlers]), - + merge_vals([Opts#opts.event_handlers, + TSOpts#opts.event_handlers]), + AllCTHooks = merge_vals( - [StartOpts#opts.ct_hooks, - SpecStartOpts#opts.ct_hooks]), - + [Opts#opts.ct_hooks, + TSOpts#opts.ct_hooks]), + EnableBuiltinHooks = choose_val( - StartOpts#opts.enable_builtin_hooks, - SpecStartOpts#opts.enable_builtin_hooks), - + Opts#opts.enable_builtin_hooks, + TSOpts#opts.enable_builtin_hooks), + Stylesheet = - choose_val(StartOpts#opts.stylesheet, - SpecStartOpts#opts.stylesheet), - - AllInclude = merge_vals([StartOpts#opts.include, - SpecStartOpts#opts.include]), + choose_val(Opts#opts.stylesheet, + TSOpts#opts.stylesheet), + + AllInclude = merge_vals([Opts#opts.include, + TSOpts#opts.include]), application:set_env(common_test, include, AllInclude), - + AutoCompile = - case choose_val(StartOpts#opts.auto_compile, - SpecStartOpts#opts.auto_compile) of + case choose_val(Opts#opts.auto_compile, + TSOpts#opts.auto_compile) of undefined -> true; ACBool -> @@ -518,8 +544,8 @@ execute_testspecs([{Specs,TS} | TSs], StartOpts, Args, Result) -> end, BasicHtml = - case choose_val(StartOpts#opts.basic_html, - SpecStartOpts#opts.basic_html) of + case choose_val(Opts#opts.basic_html, + TSOpts#opts.basic_html) of undefined -> false; BHBool -> @@ -527,52 +553,27 @@ execute_testspecs([{Specs,TS} | TSs], StartOpts, Args, Result) -> BHBool), BHBool end, - - Opts = StartOpts#opts{label = Label, - profile = Profile, - testspecs = Specs, - cover = Cover, - cover_stop = CoverStop, - logdir = LogDir, - logopts = AllLogOpts, - basic_html = BasicHtml, - verbosity = AllVerbosity, - silent_connections = AllSilentConns, - config = SpecStartOpts#opts.config, - event_handlers = AllEvHs, - ct_hooks = AllCTHooks, - enable_builtin_hooks = - EnableBuiltinHooks, - stylesheet = Stylesheet, - auto_compile = AutoCompile, - include = AllInclude, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir}, - - try execute_testspec(TS, Opts, Args) of - ExecResult -> - execute_testspecs(TSs, StartOpts, Args, [ExecResult | Result]) - catch - _ : ExitReason -> - execute_testspecs(TSs, StartOpts, Args, - [{'EXIT',self(),ExitReason} | Result]) - end. - -execute_testspec(TS, Opts, Args) -> - %% read config/userconfig from start flags - InitConfig = ct_config:prepare_config_list(Args), - TheLogDir = which(logdir, Opts#opts.logdir), - %% merge config from start flags with config from testspec - AllConfig = merge_vals([InitConfig, Opts#opts.config]), - case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of - ok -> % read tests from spec - {Run,Skip} = ct_testspec:prepare_tests(TS, node()), - do_run(Run, Skip, Opts#opts{config=AllConfig, - logdir=TheLogDir}, Args); - Error -> - Error - end. + + Opts#opts{label = Label, + profile = Profile, + testspecs = Specs, + cover = Cover, + cover_stop = CoverStop, + logdir = which(logdir, LogDir), + logopts = AllLogOpts, + basic_html = BasicHtml, + verbosity = AllVerbosity, + silent_connections = AllSilentConns, + config = TSOpts#opts.config, + event_handlers = AllEvHs, + ct_hooks = AllCTHooks, + enable_builtin_hooks = EnableBuiltinHooks, + stylesheet = Stylesheet, + auto_compile = AutoCompile, + include = AllInclude, + multiply_timetraps = MultTT, + scale_timetraps = ScaleTT, + create_priv_dir = CreatePrivDir}. check_and_install_configfiles( Configs, LogDir, #opts{ @@ -593,12 +594,12 @@ check_and_install_configfiles( {error,{cant_load_callback_module,Info}} end. -script_start3(StartOpts, Args) -> - StartOpts1 = get_start_opt(step, - fun(Step) -> - StartOpts#opts{step = Step, - cover = undefined} - end, StartOpts, Args), +script_start3(Opts, Args) -> + Opts1 = get_start_opt(step, + fun(Step) -> + Opts#opts{step = Step, + cover = undefined} + end, Opts, Args), case {proplists:get_value(dir, Args), proplists:get_value(suite, Args), groups_and_cases(proplists:get_value(group, Args), @@ -612,17 +613,17 @@ script_start3(StartOpts, Args) -> {error,no_dir_specified}; {Dirs,undefined,[]} when is_list(Dirs) -> - script_start4(StartOpts#opts{tests = tests(Dirs)}, Args); + script_start4(Opts#opts{tests = tests(Dirs)}, Args); {undefined,Suites,[]} when is_list(Suites) -> Ts = tests([suite_to_test(S) || S <- Suites]), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); {undefined,Suite,GsAndCs} when is_list(Suite) -> case [suite_to_test(S) || S <- Suite] of DirMods = [_] -> Ts = tests(DirMods, GsAndCs), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); [_,_|_] -> {error,multiple_suites_and_cases}; _ -> @@ -636,10 +637,10 @@ script_start3(StartOpts, Args) -> case [suite_to_test(Dir,S) || S <- Suite] of DirMods when GsAndCs == [] -> Ts = tests(DirMods), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); DirMods = [_] when GsAndCs /= [] -> Ts = tests(DirMods, GsAndCs), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); [_,_|_] when GsAndCs /= [] -> {error,multiple_suites_and_cases}; _ -> @@ -650,8 +651,8 @@ script_start3(StartOpts, Args) -> {error,incorrect_start_options}; {undefined,undefined,_} -> - if StartOpts#opts.vts ; StartOpts#opts.shell -> - script_start4(StartOpts#opts{tests = []}, Args); + if Opts#opts.vts ; Opts#opts.shell -> + script_start4(Opts#opts{tests = []}, Args); true -> script_usage(), {error,missing_start_options} @@ -781,6 +782,7 @@ script_usage() -> "\n\t[-logopts LogOpt1 LogOpt2 .. LogOptN]" "\n\t[-verbosity GenVLvl | [CategoryVLvl1 .. CategoryVLvlN]]" "\n\t[-allow_user_terms]" + "\n\t[-join_specs]" "\n\t[-silent_connections [ConnType1 ConnType2 .. ConnTypeN]]" "\n\t[-stylesheet CSSFile]" "\n\t[-cover CoverCfgFile]" @@ -1075,105 +1077,60 @@ run_test2(StartOpts) -> end. run_spec_file(Relaxed, - Opts = #opts{testspecs = Specs, config = CfgFiles}, + Opts = #opts{testspecs = Specs}, StartOpts) -> Specs1 = case Specs of [X|_] when is_integer(X) -> [Specs]; _ -> Specs end, AbsSpecs = lists:map(fun(SF) -> ?abs(SF) end, Specs1), - log_ts_names(AbsSpecs), - case catch ct_testspec:collect_tests_from_file(AbsSpecs, Relaxed) of + AbsSpecs1 = get_start_opt(join_specs, [AbsSpecs], AbsSpecs, StartOpts), + case catch ct_testspec:collect_tests_from_file(AbsSpecs1, Relaxed) of {Error,CTReason} when Error == error ; Error == 'EXIT' -> exit({error,CTReason}); - TS -> - SpecOpts = get_data_for_node(TS, node()), - Label = choose_val(Opts#opts.label, - SpecOpts#opts.label), - Profile = choose_val(Opts#opts.profile, - SpecOpts#opts.profile), - LogDir = choose_val(Opts#opts.logdir, - SpecOpts#opts.logdir), - AllLogOpts = merge_vals([Opts#opts.logopts, - SpecOpts#opts.logopts]), - Stylesheet = choose_val(Opts#opts.stylesheet, - SpecOpts#opts.stylesheet), - AllVerbosity = merge_keyvals([Opts#opts.verbosity, - SpecOpts#opts.verbosity]), - AllSilentConns = merge_vals([Opts#opts.silent_connections, - SpecOpts#opts.silent_connections]), - AllConfig = merge_vals([CfgFiles, SpecOpts#opts.config]), - Cover = choose_val(Opts#opts.cover, - SpecOpts#opts.cover), - CoverStop = choose_val(Opts#opts.cover_stop, - SpecOpts#opts.cover_stop), - MultTT = choose_val(Opts#opts.multiply_timetraps, - SpecOpts#opts.multiply_timetraps), - ScaleTT = choose_val(Opts#opts.scale_timetraps, - SpecOpts#opts.scale_timetraps), - CreatePrivDir = choose_val(Opts#opts.create_priv_dir, - SpecOpts#opts.create_priv_dir), - AllEvHs = merge_vals([Opts#opts.event_handlers, - SpecOpts#opts.event_handlers]), - AllInclude = merge_vals([Opts#opts.include, - SpecOpts#opts.include]), - AllCTHooks = merge_vals([Opts#opts.ct_hooks, - SpecOpts#opts.ct_hooks]), - EnableBuiltinHooks = choose_val(Opts#opts.enable_builtin_hooks, - SpecOpts#opts.enable_builtin_hooks), - - application:set_env(common_test, include, AllInclude), - - AutoCompile = case choose_val(Opts#opts.auto_compile, - SpecOpts#opts.auto_compile) of - undefined -> - true; - ACBool -> - application:set_env(common_test, auto_compile, - ACBool), - ACBool - end, + TestSpecData -> + run_all_specs(TestSpecData, Opts, StartOpts, []) + end. - BasicHtml = case choose_val(Opts#opts.basic_html, - SpecOpts#opts.basic_html) of - undefined -> - false; - BHBool -> - application:set_env(common_test, basic_html, - BHBool), - BHBool - end, - - Opts1 = Opts#opts{label = Label, - profile = Profile, - cover = Cover, - cover_stop = CoverStop, - logdir = which(logdir, LogDir), - logopts = AllLogOpts, - stylesheet = Stylesheet, - basic_html = BasicHtml, - verbosity = AllVerbosity, - silent_connections = AllSilentConns, - config = AllConfig, - event_handlers = AllEvHs, - auto_compile = AutoCompile, - include = AllInclude, - testspecs = AbsSpecs, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir, - ct_hooks = AllCTHooks, - enable_builtin_hooks = EnableBuiltinHooks - }, - - case check_and_install_configfiles(AllConfig,Opts1#opts.logdir, - Opts1) of - ok -> - {Run,Skip} = ct_testspec:prepare_tests(TS, node()), - reformat_result(catch do_run(Run, Skip, Opts1, StartOpts)); - {error,GCFReason} -> - exit({error,GCFReason}) +run_all_specs([], _, _, TotResult) -> + TotResult1 = lists:reverse(TotResult), + case lists:keysearch('EXIT', 1, TotResult1) of + {value,{_,_,ExitReason}} -> + exit(ExitReason); + false -> + case lists:keysearch(error, 1, TotResult1) of + {value,Error} -> + Error; + false -> + lists:foldl(fun({Ok,Fail,{UserSkip,AutoSkip}}, + {Ok1,Fail1,{UserSkip1,AutoSkip1}}) -> + {Ok1+Ok,Fail1+Fail, + {UserSkip1+UserSkip, + AutoSkip1+AutoSkip}} + end, {0,0,{0,0}}, TotResult1) end + end; + +run_all_specs([{Specs,TS} | TSs], Opts, StartOpts, TotResult) -> + log_ts_names(Specs), + Combined = #opts{config = TSConfig} = combine_test_opts(TS, Specs, Opts), + AllConfig = merge_vals([Opts#opts.config, TSConfig]), + try run_one_spec(TS, Combined#opts{config = AllConfig}, StartOpts) of + Result -> + run_all_specs(TSs, Opts, StartOpts, [Result | TotResult]) + catch + _ : Reason -> + run_all_specs(TSs, Opts, StartOpts, [{error,Reason} | TotResult]) + end. + +run_one_spec(TS, CombinedOpts, StartOpts) -> + #opts{logdir = Logdir, config = Config} = CombinedOpts, + case check_and_install_configfiles(Config, Logdir, CombinedOpts) of + ok -> + {Run,Skip} = ct_testspec:prepare_tests(TS, node()), + reformat_result(catch do_run(Run, Skip, CombinedOpts, StartOpts)); + Error -> + Error end. run_prepared(Run, Skip, Opts = #opts{logdir = LogDir, @@ -2927,6 +2884,10 @@ opts2args(EnvStartOpts) -> [{allow_user_terms,[]}]; ({allow_user_terms,false}) -> []; + ({join_specs,true}) -> + [{join_specs,[]}]; + ({join_specs,false}) -> + []; ({auto_compile,false}) -> [{no_auto_compile,[]}]; ({auto_compile,true}) -> diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl index 9a7a384ebb..3d9351b71b 100644 --- a/lib/common_test/src/ct_testspec.erl +++ b/lib/common_test/src/ct_testspec.erl @@ -241,45 +241,71 @@ get_skipped_cases1(_,_,_,[]) -> %%% collect_tests_from_file reads a testspec file and returns a record %%% containing the data found. -collect_tests_from_file(Specs, Relaxed) -> +collect_tests_from_file(Specs,Relaxed) -> collect_tests_from_file(Specs,[node()],Relaxed). collect_tests_from_file(Specs,Nodes,Relaxed) when is_list(Nodes) -> NodeRefs = lists:map(fun(N) -> {undefined,N} end, Nodes), %% [Spec1,Spec2,...] means create one testpec record per Spec file %% [[Spec1,Spec2,...]] means merge all specs into one testspec record - {MergeSpecs,Specs1} = if is_list(hd(hd(Specs))) -> {true,hd(Specs)}; + {JoinSpecs,Specs1} = if is_list(hd(hd(Specs))) -> {true,hd(Specs)}; true -> {false,Specs} end, - catch create_specs(Specs1,Specs1,#testspec{nodes=NodeRefs}, - Relaxed,MergeSpecs,[]). + TS0 = #testspec{nodes=NodeRefs}, + %% remove specs without tests + Filter = fun({_,#testspec{tests=[]}}) -> false; + (_) -> true + end, + try create_specs(Specs1,TS0,Relaxed,JoinSpecs,{[],TS0},[]) of + {{[],_},AdditionalTestSpecs} -> + lists:filter(Filter,AdditionalTestSpecs); + {{_,#testspec{tests=[]}},AdditionalTestSpecs} -> + lists:filter(Filter,AdditionalTestSpecs); + {{JoinedSpecs,JoinedTestSpec},AdditionalTestSpecs} -> + [{JoinedSpecs,JoinedTestSpec} | + lists:filter(Filter,AdditionalTestSpecs)] + catch + _:Error -> + Error + end. -create_specs([Spec|Ss],Specs,TestSpec,Relaxed,MergeSpecs,Saved) -> +create_specs([],_,_,_,Joined,Additional) -> + {Joined,Additional}; +create_specs([Spec|Ss],TestSpec,Relaxed,JoinSpecs, + Joined={JSpecs,_},Additional) -> SpecDir = filename:dirname(filename:absname(Spec)), + TestSpec1 = TestSpec#testspec{spec_dir=SpecDir}, case file:consult(Spec) of - {ok,Terms} -> - case collect_tests(Terms, - TestSpec#testspec{spec_dir=SpecDir}, - Relaxed) of - TS = #testspec{tests=Tests, logdir=LogDirs} when - Ss == [], MergeSpecs == true -> - LogDirs1 = lists:delete(".",LogDirs) ++ ["."], - [{Specs,TS#testspec{tests=lists:flatten(Tests), - logdir=LogDirs1}}]; - TS = #testspec{tests=Tests, logdir=LogDirs} when - Ss == [], MergeSpecs == false -> - LogDirs1 = lists:delete(".",LogDirs) ++ ["."], - TSRet = {[Spec],TS#testspec{tests=lists:flatten(Tests), - logdir=LogDirs1}}, - lists:reverse([TSRet|Saved]); - TS = #testspec{alias = As, nodes = Ns} when - MergeSpecs == true -> - TS1 = TS#testspec{alias = lists:reverse(As), - nodes = lists:reverse(Ns)}, - create_specs(Ss,Specs,TS1,Relaxed,MergeSpecs,[]); - TS when MergeSpecs == false -> - create_specs(Ss,Specs,TestSpec,Relaxed,MergeSpecs, - [{[Spec],TS}|Saved]) + {ok,Terms} -> + Terms1 = replace_names(Terms), + {Specs2Join,Specs2Add} = get_included_specs(Terms1,TestSpec1), + TestSpec2 = create_spec(Terms1,TestSpec1, + Relaxed,JoinSpecs), + case {JoinSpecs,Specs2Join,Specs2Add} of + {true,[],[]} -> + create_specs(Ss,TestSpec2,Relaxed,JoinSpecs, + {JSpecs++[get_absdir(Spec,TestSpec2)], + TestSpec2},Additional); + {false,[],[]} -> + create_specs(Ss,TestSpec,Relaxed,JoinSpecs,Joined, + Additional++[{[get_absdir(Spec,TestSpec2)], + TestSpec2}]); + _ -> + {{JSpecs1,JTS1},Additional1} = + create_specs(Specs2Join,TestSpec2,Relaxed,true, + {[get_absdir(Spec,TestSpec2)], + TestSpec2},[]), + {Joined2,Additional2} = + create_specs(Specs2Add,TestSpec,Relaxed,false, + {[],TestSpec1},[]), + NewJoined = {JSpecs++JSpecs1,JTS1}, + NewAdditional = Additional++Additional1++ + [Joined2 | Additional2], + NextTestSpec = if not JoinSpecs -> TestSpec; + true -> JTS1 + end, + create_specs(Ss,NextTestSpec,Relaxed,JoinSpecs, + NewJoined,NewAdditional) end; {error,Reason} -> ReasonStr = @@ -287,6 +313,14 @@ create_specs([Spec|Ss],Specs,TestSpec,Relaxed,MergeSpecs,Saved) -> [file:format_error(Reason)])), throw({error,{Spec,ReasonStr}}) end. + +create_spec(Terms,TestSpec,Relaxed,JoinSpecs) -> + TS = #testspec{tests=Tests, logdir=LogDirs} = + collect_tests({false,Terms},TestSpec,Relaxed), + + LogDirs1 = lists:delete(".",LogDirs) ++ ["."], + TS#testspec{tests=lists:flatten(Tests), + logdir=LogDirs1}. collect_tests_from_list(Terms,Relaxed) -> collect_tests_from_list(Terms,[node()],Relaxed). @@ -294,8 +328,8 @@ collect_tests_from_list(Terms,Relaxed) -> collect_tests_from_list(Terms,Nodes,Relaxed) when is_list(Nodes) -> {ok,Cwd} = file:get_cwd(), NodeRefs = lists:map(fun(N) -> {undefined,N} end, Nodes), - case catch collect_tests(Terms,#testspec{nodes=NodeRefs, - spec_dir=Cwd}, + case catch collect_tests({true,Terms},#testspec{nodes=NodeRefs, + spec_dir=Cwd}, Relaxed) of E = {error,_} -> E; @@ -305,10 +339,15 @@ collect_tests_from_list(Terms,Nodes,Relaxed) when is_list(Nodes) -> TS#testspec{tests=lists:flatten(Tests), logdir=LogDirs1} end. -collect_tests(Terms,TestSpec,Relaxed) -> +collect_tests({Replace,Terms},TestSpec=#testspec{alias=As,nodes=Ns},Relaxed) -> put(relaxed,Relaxed), - Terms1 = replace_names(Terms), - TestSpec1 = get_global(Terms1,TestSpec), + Terms1 = if Replace -> replace_names(Terms); + true -> Terms + end, + %% reverse nodes and aliases initially to get the order of them right + %% in case this spec is being joined with a previous one + TestSpec1 = get_global(Terms1,TestSpec#testspec{alias = lists:reverse(As), + nodes = lists:reverse(Ns)}), TestSpec2 = get_all_nodes(Terms1,TestSpec1), {Terms2, TestSpec3} = filter_init_terms(Terms1, [], TestSpec2), add_tests(Terms2,TestSpec3). @@ -438,9 +477,30 @@ replace_names_in_node1(NodeStr,Defs=[{Name,Replacement}|Ds]) -> replace_names_in_node1(NodeStr,[]) -> NodeStr. +%% look for other specification files, either to join with the +%% current spec, or execute as additional test runs +get_included_specs(Terms,TestSpec) -> + get_included_specs(Terms,TestSpec,[],[]). + +get_included_specs([{specs,How,SpecOrSpecs}|Ts],TestSpec,Join,Add) -> + Specs = case SpecOrSpecs of + [File|_] when is_list(File) -> + [get_absdir(Spec,TestSpec) || Spec <- SpecOrSpecs]; + [Ch|_] when is_integer(Ch) -> + [get_absdir(SpecOrSpecs,TestSpec)] + end, + if How == join -> + get_included_specs(Ts,TestSpec,Join++Specs,Add); + true -> + get_included_specs(Ts,TestSpec,Join,Add++Specs) + end; +get_included_specs([_|Ts],TestSpec,Join,Add) -> + get_included_specs(Ts,TestSpec,Join,Add); +get_included_specs([],_,Join,Add) -> + {Join,Add}. %% global terms that will be used for analysing all other terms in the spec -get_global([{merge_tests,Bool} | Ts], Spec) -> +get_global([{merge_tests,Bool}|Ts],Spec) -> get_global(Ts,Spec#testspec{merge_tests=Bool}); %% the 'define' term replaces the 'alias' and 'node' terms, but we need to keep @@ -810,7 +870,10 @@ add_tests([{alias,_,_}|Ts],Spec) -> % handled add_tests([{node,_,_}|Ts],Spec) -> % handled add_tests(Ts,Spec); -add_tests([{merge_tests, _} | Ts], Spec) -> % handled +add_tests([{merge_tests,_} | Ts], Spec) -> % handled + add_tests(Ts,Spec); + +add_tests([{specs,_,_} | Ts], Spec) -> % handled add_tests(Ts,Spec); %% -------------------------------------------------- @@ -1279,6 +1342,7 @@ is_node([],_) -> valid_terms() -> [ {define,3}, + {specs,3}, {node,3}, {cover,2}, {cover,3}, diff --git a/lib/common_test/test/ct_testspec_2_SUITE.erl b/lib/common_test/test/ct_testspec_2_SUITE.erl index 9d2dc84ad3..c150686482 100644 --- a/lib/common_test/test/ct_testspec_2_SUITE.erl +++ b/lib/common_test/test/ct_testspec_2_SUITE.erl @@ -479,7 +479,7 @@ multiple_specs(_Config) -> "multiple_specs.1.spec"), SpecFile2 = ct_test_support:write_testspec(Spec2,SpecDir, "multiple_specs.2.spec"), - FileResult = ct_testspec:collect_tests_from_file([SpecFile1,SpecFile2], + FileResult = ct_testspec:collect_tests_from_file([[SpecFile1,SpecFile2]], false), ct:pal("TESTSPEC RECORD FROM FILE:~n~p~n", [rec2proplist(FileResult)]), @@ -524,7 +524,7 @@ multiple_specs(_Config) -> %%% misc_config_terms(_Config) -> CfgDir = "../cfgs/to1", - + TODir = "../tests/to1", Spec = [{node,x,n1@h1},{node,y,n2@h2}, @@ -554,7 +554,9 @@ misc_config_terms(_Config) -> {create_priv_dir,[auto_per_tc]}, {create_priv_dir,n1@h1,[manual_per_tc]}, - {create_priv_dir,n2@h2,[auto_per_run]} + {create_priv_dir,n2@h2,[auto_per_run]}, + + {suites,n1@h1,TODir,[x_SUITE]} ], {ok,SpecDir} = file:get_cwd(), @@ -599,7 +601,9 @@ misc_config_terms(_Config) -> {n2@h2,CSS2}], create_priv_dir = [{Node,[auto_per_tc]}, {n1@h1,[manual_per_tc]}, - {n2@h2,[auto_per_run]}] + {n2@h2,[auto_per_run]}], + tests = [{{n1@h1,get_absdir(filename:join(SpecDir,TODir))}, + [{x_SUITE,[all]}]}] }, verify_result(Verify,ListResult,FileResult). @@ -688,10 +692,10 @@ define_names_1(_Config) -> %%% HELP FUNCTIONS %%%----------------------------------------------------------------- -verify_result(Verify,ListResult,FileResult) -> +verify_result(VerificationRec,ListResult,FileResult) -> {_,TSLTuples} = rec2proplist(ListResult), {_,TSFTuples} = rec2proplist(FileResult), - {_,VTuples} = rec2proplist(Verify), + {_,VTuples} = rec2proplist(VerificationRec), VResult = (catch lists:foldl(fun({Tag,Val},{[{Tag,Val}|TSL],[{Tag,Val}|TSF]}) -> {TSL,TSF}; @@ -720,6 +724,8 @@ read_config(S) -> rec2proplist(E={error,_What}) -> exit({invalid_testspec_record,E}); +rec2proplist([{Specs,Rec}]) when is_list(Specs) -> + rec2proplist(Rec); rec2proplist(Rec) -> [RecName|RecList] = tuple_to_list(Rec), FieldNames = -- cgit v1.2.3 From 6e8de699f548762e2e6894d347e9dc285db11841 Mon Sep 17 00:00:00 2001 From: Peter Andersson Date: Mon, 21 Jan 2013 17:14:55 +0100 Subject: Update CT Master OTP-9881 --- lib/common_test/src/ct_master.erl | 158 ++++++++++++++++++------------- lib/common_test/src/ct_testspec.erl | 88 +++++++++-------- lib/common_test/test/ct_master_SUITE.erl | 14 +-- 3 files changed, 143 insertions(+), 117 deletions(-) diff --git a/lib/common_test/src/ct_master.erl b/lib/common_test/src/ct_master.erl index f29eba605c..d58395b692 100644 --- a/lib/common_test/src/ct_master.erl +++ b/lib/common_test/src/ct_master.erl @@ -82,39 +82,48 @@ run_test(NodeOptsList) when is_list(NodeOptsList) -> %%% ExclNodes = [atom()] %%% %%% @doc Tests are spawned on the nodes as specified in TestSpecs. -%%% Each specification in TestSpec will be handled separately. It is however possible -%%% to also specify a list of specifications that should be merged into one before -%%% the tests are executed. Any test without a particular node specification will -%%% also be executed on the nodes in InclNodes. Nodes in the -%%% ExclNodes list will be excluded from the test. +%%% Each specification in TestSpec will be handled separately. It is however +%%% possible to also specify a list of specifications that should be merged +%%% into one before the tests are executed. Any test without a particular node +%%% specification will also be executed on the nodes in InclNodes. +%%% Nodes in the ExclNodes list will be excluded from the test. run([TS|TestSpecs],AllowUserTerms,InclNodes,ExclNodes) when is_list(TS), is_list(InclNodes), is_list(ExclNodes) -> - TS1 = - case TS of - List=[S|_] when is_list(S) -> List; - Spec -> [Spec] - end, - Result = - case catch ct_testspec:collect_tests_from_file(TS1,InclNodes,AllowUserTerms) of - {error,Reason} -> - {error,Reason}; - TSRec=#testspec{logdir=AllLogDirs, - config=StdCfgFiles, - userconfig=UserCfgFiles, - include=AllIncludes, - init=AllInitOpts, - event_handler=AllEvHs} -> - AllCfgFiles = {StdCfgFiles, UserCfgFiles}, - RunSkipPerNode = ct_testspec:prepare_tests(TSRec), - RunSkipPerNode2 = exclude_nodes(ExclNodes,RunSkipPerNode), - run_all(RunSkipPerNode2,AllLogDirs,AllCfgFiles,AllEvHs, - AllIncludes,[],[],AllInitOpts,TS1) - end, - [{TS,Result} | run(TestSpecs,AllowUserTerms,InclNodes,ExclNodes)]; + %% Note: [Spec] means run one test with Spec + %% [Spec1,Spec2] means run two tests separately + %% [[Spec1,Spec2]] means run one test, with the two specs merged + case catch ct_testspec:collect_tests_from_file([TS],InclNodes, + AllowUserTerms) of + {error,Reason} -> + [{error,Reason} | run(TestSpecs,AllowUserTerms,InclNodes,ExclNodes)]; + Tests -> + RunResult = + lists:map( + fun({Specs,TSRec=#testspec{logdir=AllLogDirs, + config=StdCfgFiles, + userconfig=UserCfgFiles, + include=AllIncludes, + init=AllInitOpts, + event_handler=AllEvHs}}) -> + AllCfgFiles = + {StdCfgFiles,UserCfgFiles}, + RunSkipPerNode = + ct_testspec:prepare_tests(TSRec), + RunSkipPerNode2 = + exclude_nodes(ExclNodes,RunSkipPerNode), + TSList = if is_integer(hd(TS)) -> [TS]; + true -> TS end, + {Specs,run_all(RunSkipPerNode2,AllLogDirs, + AllCfgFiles,AllEvHs, + AllIncludes,[],[],AllInitOpts,TSList)} + end, Tests), + RunResult ++ run(TestSpecs,AllowUserTerms,InclNodes,ExclNodes) + end; run([],_,_,_) -> []; -run(TS,AllowUserTerms,InclNodes,ExclNodes) when is_list(InclNodes), is_list(ExclNodes) -> +run(TS,AllowUserTerms,InclNodes,ExclNodes) when is_list(InclNodes), + is_list(ExclNodes) -> run([TS],AllowUserTerms,InclNodes,ExclNodes). %%%----------------------------------------------------------------- @@ -152,29 +161,32 @@ exclude_nodes([],RunSkipPerNode) -> %%% AllowUserTerms = bool() %%% Node = atom() %%% -%%% @doc Tests are spawned on Node according to TestSpecs. +%%% @doc Tests are spawned on Node according to +%%% TestSpecs. run_on_node([TS|TestSpecs],AllowUserTerms,Node) when is_list(TS),is_atom(Node) -> - TS1 = - case TS of - [List|_] when is_list(List) -> List; - Spec -> [Spec] - end, - Result = - case catch ct_testspec:collect_tests_from_file(TS1,[Node],AllowUserTerms) of - {error,Reason} -> - {error,Reason}; - TSRec=#testspec{logdir=AllLogDirs, - config=StdCfgFiles, - init=AllInitOpts, - include=AllIncludes, - userconfig=UserCfgFiles, - event_handler=AllEvHs} -> - AllCfgFiles = {StdCfgFiles, UserCfgFiles}, - {Run,Skip} = ct_testspec:prepare_tests(TSRec,Node), - run_all([{Node,Run,Skip}],AllLogDirs,AllCfgFiles,AllEvHs, - AllIncludes, [],[],AllInitOpts,TS1) - end, - [{TS,Result} | run_on_node(TestSpecs,AllowUserTerms,Node)]; + case catch ct_testspec:collect_tests_from_file([TS],[Node], + AllowUserTerms) of + {error,Reason} -> + [{error,Reason} | run_on_node(TestSpecs,AllowUserTerms,Node)]; + Tests -> + RunResult = + lists:map( + fun({Specs,TSRec=#testspec{logdir=AllLogDirs, + config=StdCfgFiles, + init=AllInitOpts, + include=AllIncludes, + userconfig=UserCfgFiles, + event_handler=AllEvHs}}) -> + AllCfgFiles = {StdCfgFiles,UserCfgFiles}, + {Run,Skip} = ct_testspec:prepare_tests(TSRec,Node), + TSList = if is_integer(hd(TS)) -> [TS]; + true -> TS end, + {Specs,run_all([{Node,Run,Skip}],AllLogDirs, + AllCfgFiles,AllEvHs, + AllIncludes, [],[],AllInitOpts,TSList)} + end, Tests), + RunResult ++ run_on_node(TestSpecs,AllowUserTerms,Node) + end; run_on_node([],_,_) -> []; run_on_node(TS,AllowUserTerms,Node) when is_atom(Node) -> @@ -245,7 +257,8 @@ run_all([],AllLogDirs,_,AllEvHs,_AllIncludes, false -> "." end, log(tty,"Master Logdir","~s",[MasterLogDir]), - start_master(lists:reverse(NodeOpts),Handlers,MasterLogDir,LogDirs,InitOptions,Specs), + start_master(lists:reverse(NodeOpts),Handlers,MasterLogDir, + LogDirs,InitOptions,Specs), ok. @@ -297,13 +310,15 @@ start_master(NodeOptsList) -> start_master(NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Specs) -> Master = spawn_link(?MODULE,init_master,[self(),NodeOptsList,EvHandlers, - MasterLogDir,LogDirs,InitOptions,Specs]), + MasterLogDir,LogDirs, + InitOptions,Specs]), receive {Master,Result} -> Result end. %%% @hidden -init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Specs) -> +init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs, + InitOptions,Specs) -> case whereis(ct_master) of undefined -> register(ct_master,self()), @@ -325,6 +340,7 @@ init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Spec {MLPid,_} = ct_master_logs:start(MasterLogDir, [N || {N,_} <- NodeOptsList]), log(all,"Master Logger process started","~w",[MLPid]), + case Specs of [] -> ok; _ -> @@ -359,7 +375,8 @@ init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Spec init_master1(Parent,NodeOptsList,InitOptions,LogDirs). init_master1(Parent,NodeOptsList,InitOptions,LogDirs) -> - {Inaccessible,NodeOptsList1,InitOptions1} = init_nodes(NodeOptsList,InitOptions), + {Inaccessible,NodeOptsList1,InitOptions1} = init_nodes(NodeOptsList, + InitOptions), case Inaccessible of [] -> init_master2(Parent,NodeOptsList,LogDirs); @@ -392,7 +409,8 @@ init_master2(Parent,NodeOptsList,LogDirs) -> fun({Node,Opts}) -> monitor_node(Node,true), log(all,"Test Info","Starting test(s) on ~p...",[Node]), - {spawn_link(Node,?MODULE,init_node_ctrl,[self(),Cookie,Opts]),Node} + {spawn_link(Node,?MODULE,init_node_ctrl,[self(),Cookie,Opts]), + Node} end, NodeCtrlPids = lists:map(SpawnAndMon,NodeOptsList), Result = master_loop(#state{node_ctrl_pids=NodeCtrlPids, @@ -404,7 +422,8 @@ master_loop(#state{node_ctrl_pids=[], results=Finished}) -> Str = lists:map(fun({Node,Result}) -> - io_lib:format("~-40.40.*s~p\n",[$_,atom_to_list(Node),Result]) + io_lib:format("~-40.40.*s~p\n", + [$_,atom_to_list(Node),Result]) end,lists:reverse(Finished)), log(all,"TEST RESULTS",Str,[]), log(all,"Info","Updating log files",[]), @@ -437,11 +456,13 @@ master_loop(State=#state{node_ctrl_pids=NodeCtrlPids, Bad end, log(all,"Test Info", - "Test on node ~w failed! Reason: ~p",[Node,Error]), + "Test on node ~w failed! Reason: ~p", + [Node,Error]), {Locks1,Blocked1} = update_queue(exit,Node,Locks,Blocked), master_loop(State#state{node_ctrl_pids=NodeCtrlPids1, - results=[{Node,Error}|Results], + results=[{Node, + Error}|Results], locks=Locks1, blocked=Blocked1}) end; @@ -752,7 +773,8 @@ start_nodes(InitOptions)-> IsAlive = lists:member(NodeName, nodes()), case {HasNodeStart, IsAlive} of {false, false}-> - io:format("WARNING: Node ~p is not alive but has no node_start option~n", [NodeName]); + io:format("WARNING: Node ~p is not alive but has no " + "node_start option~n", [NodeName]); {false, true}-> io:format("Node ~p is alive~n", [NodeName]); {true, false}-> @@ -761,12 +783,15 @@ start_nodes(InitOptions)-> lists:keytake(callback_module, 1, NodeStart), case Callback:start(Host, Node, NodeStart2) of {ok, NodeName} -> - io:format("Node ~p started successfully with callback ~p~n", [NodeName,Callback]); + io:format("Node ~p started successfully " + "with callback ~p~n", [NodeName,Callback]); {error, Reason, _NodeName} -> - io:format("Failed to start node ~p with callback ~p! Reason: ~p~n", [NodeName, Callback, Reason]) + io:format("Failed to start node ~p with callback ~p! " + "Reason: ~p~n", [NodeName, Callback, Reason]) end; {true, true}-> - io:format("WARNING: Node ~p is alive but has node_start option~n", [NodeName]) + io:format("WARNING: Node ~p is alive but has node_start " + "option~n", [NodeName]) end end, InitOptions). @@ -779,7 +804,8 @@ eval_on_nodes(InitOptions)-> {false,_}-> ok; {true,false}-> - io:format("WARNING: Node ~p is not alive but has eval option ~n", [NodeName]); + io:format("WARNING: Node ~p is not alive but has eval " + "option ~n", [NodeName]); {true,true}-> {eval, MFAs} = lists:keyfind(eval, 1, Options), evaluate(NodeName, MFAs) @@ -790,9 +816,11 @@ eval_on_nodes(InitOptions)-> evaluate(Node, [{M,F,A}|MFAs])-> case rpc:call(Node, M, F, A) of {badrpc,Reason}-> - io:format("WARNING: Failed to call ~p:~p/~p on node ~p due to ~p~n", [M,F,length(A),Node,Reason]); + io:format("WARNING: Failed to call ~p:~p/~p on node ~p " + "due to ~p~n", [M,F,length(A),Node,Reason]); Result-> - io:format("Called ~p:~p/~p on node ~p, result: ~p~n", [M,F,length(A),Node,Result]) + io:format("Called ~p:~p/~p on node ~p, result: ~p~n", + [M,F,length(A),Node,Result]) end, evaluate(Node, MFAs); evaluate(_Node, [])-> diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl index 3d9351b71b..e698f13b9a 100644 --- a/lib/common_test/src/ct_testspec.erl +++ b/lib/common_test/src/ct_testspec.erl @@ -257,55 +257,54 @@ collect_tests_from_file(Specs,Nodes,Relaxed) when is_list(Nodes) -> (_) -> true end, try create_specs(Specs1,TS0,Relaxed,JoinSpecs,{[],TS0},[]) of - {{[],_},AdditionalTestSpecs} -> - lists:filter(Filter,AdditionalTestSpecs); - {{_,#testspec{tests=[]}},AdditionalTestSpecs} -> - lists:filter(Filter,AdditionalTestSpecs); - {{JoinedSpecs,JoinedTestSpec},AdditionalTestSpecs} -> + {{[],_},SeparateTestSpecs} -> + lists:filter(Filter,SeparateTestSpecs); + {{_,#testspec{tests=[]}},SeparateTestSpecs} -> + lists:filter(Filter,SeparateTestSpecs); + {{JoinedSpecs,JoinedTestSpec},SeparateTestSpecs} -> [{JoinedSpecs,JoinedTestSpec} | - lists:filter(Filter,AdditionalTestSpecs)] + lists:filter(Filter,SeparateTestSpecs)] catch _:Error -> Error end. -create_specs([],_,_,_,Joined,Additional) -> - {Joined,Additional}; +create_specs([],_,_,_,Joined,Separate) -> + {Joined,Separate}; create_specs([Spec|Ss],TestSpec,Relaxed,JoinSpecs, - Joined={JSpecs,_},Additional) -> + Joined={JSpecs,_},Separate) -> SpecDir = filename:dirname(filename:absname(Spec)), TestSpec1 = TestSpec#testspec{spec_dir=SpecDir}, case file:consult(Spec) of {ok,Terms} -> Terms1 = replace_names(Terms), - {Specs2Join,Specs2Add} = get_included_specs(Terms1,TestSpec1), - TestSpec2 = create_spec(Terms1,TestSpec1, - Relaxed,JoinSpecs), - case {JoinSpecs,Specs2Join,Specs2Add} of + {Specs2Join,SepSpecs} = get_included_specs(Terms1,TestSpec1), + TestSpec2 = create_spec(Terms1,TestSpec1,Relaxed), + case {JoinSpecs,Specs2Join,SepSpecs} of {true,[],[]} -> create_specs(Ss,TestSpec2,Relaxed,JoinSpecs, {JSpecs++[get_absdir(Spec,TestSpec2)], - TestSpec2},Additional); + TestSpec2},Separate); {false,[],[]} -> create_specs(Ss,TestSpec,Relaxed,JoinSpecs,Joined, - Additional++[{[get_absdir(Spec,TestSpec2)], + Separate++[{[get_absdir(Spec,TestSpec2)], TestSpec2}]); _ -> - {{JSpecs1,JTS1},Additional1} = + {{JSpecs1,JTS1},Separate1} = create_specs(Specs2Join,TestSpec2,Relaxed,true, {[get_absdir(Spec,TestSpec2)], TestSpec2},[]), - {Joined2,Additional2} = - create_specs(Specs2Add,TestSpec,Relaxed,false, + {Joined2,Separate2} = + create_specs(SepSpecs,TestSpec,Relaxed,false, {[],TestSpec1},[]), NewJoined = {JSpecs++JSpecs1,JTS1}, - NewAdditional = Additional++Additional1++ - [Joined2 | Additional2], + NewSeparate = Separate++Separate1++ + [Joined2 | Separate2], NextTestSpec = if not JoinSpecs -> TestSpec; true -> JTS1 end, create_specs(Ss,NextTestSpec,Relaxed,JoinSpecs, - NewJoined,NewAdditional) + NewJoined,NewSeparate) end; {error,Reason} -> ReasonStr = @@ -314,10 +313,9 @@ create_specs([Spec|Ss],TestSpec,Relaxed,JoinSpecs, throw({error,{Spec,ReasonStr}}) end. -create_spec(Terms,TestSpec,Relaxed,JoinSpecs) -> +create_spec(Terms,TestSpec,Relaxed) -> TS = #testspec{tests=Tests, logdir=LogDirs} = collect_tests({false,Terms},TestSpec,Relaxed), - LogDirs1 = lists:delete(".",LogDirs) ++ ["."], TS#testspec{tests=lists:flatten(Tests), logdir=LogDirs1}. @@ -478,11 +476,11 @@ replace_names_in_node1(NodeStr,[]) -> NodeStr. %% look for other specification files, either to join with the -%% current spec, or execute as additional test runs +%% current spec, or execute as separate test runs get_included_specs(Terms,TestSpec) -> get_included_specs(Terms,TestSpec,[],[]). -get_included_specs([{specs,How,SpecOrSpecs}|Ts],TestSpec,Join,Add) -> +get_included_specs([{specs,How,SpecOrSpecs}|Ts],TestSpec,Join,Sep) -> Specs = case SpecOrSpecs of [File|_] when is_list(File) -> [get_absdir(Spec,TestSpec) || Spec <- SpecOrSpecs]; @@ -490,14 +488,14 @@ get_included_specs([{specs,How,SpecOrSpecs}|Ts],TestSpec,Join,Add) -> [get_absdir(SpecOrSpecs,TestSpec)] end, if How == join -> - get_included_specs(Ts,TestSpec,Join++Specs,Add); + get_included_specs(Ts,TestSpec,Join++Specs,Sep); true -> - get_included_specs(Ts,TestSpec,Join,Add++Specs) + get_included_specs(Ts,TestSpec,Join,Sep++Specs) end; -get_included_specs([_|Ts],TestSpec,Join,Add) -> - get_included_specs(Ts,TestSpec,Join,Add); -get_included_specs([],_,Join,Add) -> - {Join,Add}. +get_included_specs([_|Ts],TestSpec,Join,Sep) -> + get_included_specs(Ts,TestSpec,Join,Sep); +get_included_specs([],_,Join,Sep) -> + {Join,Sep}. %% global terms that will be used for analysing all other terms in the spec get_global([{merge_tests,Bool}|Ts],Spec) -> @@ -715,7 +713,7 @@ add_tests([{suites,all_nodes,Dir,Ss}|Ts],Spec) -> add_tests([{suites,Dir,Ss}|Ts],Spec) -> add_tests([{suites,all_nodes,Dir,Ss}|Ts],Spec); add_tests([{suites,Nodes,Dir,Ss}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,suites,[Dir,Ss],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,suites,[Dir,Ss],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{suites,Node,Dir,Ss}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -738,11 +736,11 @@ add_tests([{groups,Dir,Suite,Gs}|Ts],Spec) -> add_tests([{groups,Dir,Suite,Gs,{cases,TCs}}|Ts],Spec) -> add_tests([{groups,all_nodes,Dir,Suite,Gs,{cases,TCs}}|Ts],Spec); add_tests([{groups,Nodes,Dir,Suite,Gs}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,groups,[Dir,Suite,Gs],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,groups,[Dir,Suite,Gs],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{groups,Nodes,Dir,Suite,Gs,{cases,TCs}}|Ts], Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,groups,[Dir,Suite,Gs,{cases,TCs}],Ts, + Ts1 = per_node(Nodes,groups,[Dir,Suite,Gs,{cases,TCs}],Ts, Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{groups,Node,Dir,Suite,Gs}|Ts],Spec) -> @@ -766,7 +764,7 @@ add_tests([{cases,all_nodes,Dir,Suite,Cs}|Ts],Spec) -> add_tests([{cases,Dir,Suite,Cs}|Ts],Spec) -> add_tests([{cases,all_nodes,Dir,Suite,Cs}|Ts],Spec); add_tests([{cases,Nodes,Dir,Suite,Cs}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,cases,[Dir,Suite,Cs],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,cases,[Dir,Suite,Cs],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{cases,Node,Dir,Suite,Cs}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -781,7 +779,7 @@ add_tests([{skip_suites,all_nodes,Dir,Ss,Cmt}|Ts],Spec) -> add_tests([{skip_suites,Dir,Ss,Cmt}|Ts],Spec) -> add_tests([{skip_suites,all_nodes,Dir,Ss,Cmt}|Ts],Spec); add_tests([{skip_suites,Nodes,Dir,Ss,Cmt}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_suites,[Dir,Ss,Cmt],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,skip_suites,[Dir,Ss,Cmt],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_suites,Node,Dir,Ss,Cmt}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -802,11 +800,11 @@ add_tests([{skip_groups,Dir,Suite,Gs,Cmt}|Ts],Spec) -> add_tests([{skip_groups,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts],Spec) -> add_tests([{skip_groups,all_nodes,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts],Spec); add_tests([{skip_groups,Nodes,Dir,Suite,Gs,Cmt}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_groups,[Dir,Suite,Gs,Cmt],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,skip_groups,[Dir,Suite,Gs,Cmt],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_groups,Nodes,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts], Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_groups,[Dir,Suite,Gs,{cases,TCs},Cmt],Ts, + Ts1 = per_node(Nodes,skip_groups,[Dir,Suite,Gs,{cases,TCs},Cmt],Ts, Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_groups,Node,Dir,Suite,Gs,Cmt}|Ts],Spec) -> @@ -830,7 +828,7 @@ add_tests([{skip_cases,all_nodes,Dir,Suite,Cs,Cmt}|Ts],Spec) -> add_tests([{skip_cases,Dir,Suite,Cs,Cmt}|Ts],Spec) -> add_tests([{skip_cases,all_nodes,Dir,Suite,Cs,Cmt}|Ts],Spec); add_tests([{skip_cases,Nodes,Dir,Suite,Cs,Cmt}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_cases,[Dir,Suite,Cs,Cmt],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,skip_cases,[Dir,Suite,Cs,Cmt],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_cases,Node,Dir,Suite,Cs,Cmt}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -902,7 +900,7 @@ add_tests([{Tag,NodesOrOther,Data}|Ts],Spec) when is_list(NodesOrOther) -> case lists:all(fun(Test) -> is_node(Test,Spec#testspec.nodes) end, NodesOrOther) of true -> - Ts1 = separate(NodesOrOther,Tag,[Data],Ts,Spec#testspec.nodes), + Ts1 = per_node(NodesOrOther,Tag,[Data],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); false -> add_tests([{Tag,all_nodes,{NodesOrOther,Data}}|Ts],Spec) @@ -1058,12 +1056,12 @@ update_recorded(Tag,Node,Spec) -> end. %% create one test term per node -separate(Nodes,Tag,Data,Tests,Refs) -> - Separated = separate(Nodes,Tag,Data,Refs), +per_node(Nodes,Tag,Data,Tests,Refs) -> + Separated = per_node(Nodes,Tag,Data,Refs), Separated ++ Tests. -separate([N|Ns],Tag,Data,Refs) -> - [list_to_tuple([Tag,ref2node(N,Refs)|Data])|separate(Ns,Tag,Data,Refs)]; -separate([],_,_,_) -> +per_node([N|Ns],Tag,Data,Refs) -> + [list_to_tuple([Tag,ref2node(N,Refs)|Data])|per_node(Ns,Tag,Data,Refs)]; +per_node([],_,_,_) -> []. %% read the value for FieldName in record Rec#testspec diff --git a/lib/common_test/test/ct_master_SUITE.erl b/lib/common_test/test/ct_master_SUITE.erl index 56a343a96f..64d34a0c9b 100644 --- a/lib/common_test/test/ct_master_SUITE.erl +++ b/lib/common_test/test/ct_master_SUITE.erl @@ -109,7 +109,7 @@ ct_master_test(Config) when is_list(Config) -> ERPid = ct_test_support:start_event_receiver(Config), - [{TSFile,ok}] = run_test(ct_master_test, FileName, Config), + [{[TSFile],ok}] = run_test(ct_master_test, FileName, Config), Events = ct_test_support:get_events(ERPid, Config), @@ -192,12 +192,12 @@ get_log_dir(_,PrivDir,NodeName) -> run_test(_Name, FileName, Config) -> %% run the test twice, using different html versions - [{FileName,ok}] = ct_test_support:run({ct_master,run,[FileName]}, - [{ct_master,basic_html,[true]}], - Config), - [{FileName,ok}] = ct_test_support:run({ct_master,run,[FileName]}, - [{ct_master,basic_html,[false]}], - Config). + [{[FileName],ok}] = ct_test_support:run({ct_master,run,[FileName]}, + [{ct_master,basic_html,[true]}], + Config), + [{[FileName],ok}] = ct_test_support:run({ct_master,run,[FileName]}, + [{ct_master,basic_html,[false]}], + Config). reformat(Events, EH) -> ct_test_support:reformat(Events, EH). -- cgit v1.2.3 From f48550f96253588fcc643fcd5774a3c3ccb49b8b Mon Sep 17 00:00:00 2001 From: Peter Andersson Date: Tue, 22 Jan 2013 01:23:41 +0100 Subject: Add tests and correct errors OTP-9881 --- lib/common_test/src/ct_config.erl | 5 +- lib/common_test/src/ct_testspec.erl | 266 +++++---- lib/common_test/test/Makefile | 1 + lib/common_test/test/ct_test_support.erl | 2 + lib/common_test/test/ct_testspec_2_SUITE.erl | 2 +- lib/common_test/test/ct_testspec_3_SUITE.erl | 607 +++++++++++++++++++++ .../test/ct_testspec_3_SUITE_data/config1/cfg11 | 1 + .../test/ct_testspec_3_SUITE_data/config1/cfg12 | 1 + .../test/ct_testspec_3_SUITE_data/config1/cfg13 | 1 + .../test/ct_testspec_3_SUITE_data/config2/cfg21 | 1 + .../ct_testspec_3_SUITE_data/specs1/flat_spec1 | 4 + .../ct_testspec_3_SUITE_data/specs1/rec_spec_join1 | 2 + .../specs1/rec_spec_join_sep1 | 5 + .../ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 | 2 + .../specs1/rec_spec_sep_join1 | 2 + .../ct_testspec_3_SUITE_data/specs1/spec_both1 | 2 + .../specs1/spec_both_join1 | 6 + .../ct_testspec_3_SUITE_data/specs1/spec_join1 | 2 + .../test/ct_testspec_3_SUITE_data/specs1/spec_sep1 | 3 + .../ct_testspec_3_SUITE_data/specs2/flat_spec2 | 5 + .../ct_testspec_3_SUITE_data/specs2/rec_spec_join2 | 5 + .../specs2/rec_spec_join_sep2 | 5 + .../ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 | 5 + .../specs2/rec_spec_sep_join2 | 5 + .../ct_testspec_3_SUITE_data/specs2/spec_both2 | 4 + .../specs2/spec_both_join2 | 9 + .../ct_testspec_3_SUITE_data/specs2/spec_join2 | 5 + .../test/ct_testspec_3_SUITE_data/specs2/spec_sep2 | 5 + .../ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl | 172 ++++++ .../ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl | 171 ++++++ .../ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl | 171 ++++++ .../ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl | 173 ++++++ .../ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl | 158 ++++++ 33 files changed, 1714 insertions(+), 94 deletions(-) create mode 100644 lib/common_test/test/ct_testspec_3_SUITE.erl create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl create mode 100644 lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl diff --git a/lib/common_test/src/ct_config.erl b/lib/common_test/src/ct_config.erl index b1d709bc75..ac4ffbb236 100644 --- a/lib/common_test/src/ct_config.erl +++ b/lib/common_test/src/ct_config.erl @@ -266,7 +266,10 @@ read_config_files_int([{Callback, File}|Files], FunToSave) -> read_config_files_int([], _FunToSave) -> ok. -store_config(Config, Callback, File) -> +store_config(Config, Callback, File) when is_tuple(Config) -> + store_config([Config], Callback, File); + +store_config(Config, Callback, File) when is_list(Config) -> [ets:insert(?attr_table, #ct_conf{key=Key, value=Val, diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl index e698f13b9a..abd82a3176 100644 --- a/lib/common_test/src/ct_testspec.erl +++ b/lib/common_test/src/ct_testspec.erl @@ -93,7 +93,7 @@ prepare_tests(TestSpec) when is_record(TestSpec,testspec) -> %% run_per_node/2 takes the Run list as input and returns a list %% of {Node,RunPerNode,[]} tuples where the tests have been sorted %% on a per node basis. -run_per_node([{{Node,Dir},Test}|Ts],Result, MergeTests) -> +run_per_node([{{Node,Dir},Test}|Ts],Result,MergeTests) -> {value,{Node,{Run,Skip}}} = lists:keysearch(Node,1,Result), Run1 = case MergeTests of false -> @@ -190,7 +190,7 @@ prepare_suites(_Node,_Dir,[],Run,Skip) -> prepare_cases(Node,Dir,Suite,Cases) -> case get_skipped_cases(Node,Dir,Suite,Cases) of - SkipAll=[{{Node,Dir},{Suite,_Cmt}}] -> % all cases to be skipped + SkipAll=[{{Node,Dir},{Suite,_Cmt}}] -> % all cases to be skipped %% note: this adds an 'all' test even if only skip is specified {[{{Node,Dir},{Suite,all}}],SkipAll}; Skipped -> @@ -248,70 +248,125 @@ collect_tests_from_file(Specs,Nodes,Relaxed) when is_list(Nodes) -> NodeRefs = lists:map(fun(N) -> {undefined,N} end, Nodes), %% [Spec1,Spec2,...] means create one testpec record per Spec file %% [[Spec1,Spec2,...]] means merge all specs into one testspec record - {JoinSpecs,Specs1} = if is_list(hd(hd(Specs))) -> {true,hd(Specs)}; + {Join,Specs1} = if is_list(hd(hd(Specs))) -> {true,hd(Specs)}; true -> {false,Specs} end, + Specs2 = [filename:absname(S) || S <- Specs1], TS0 = #testspec{nodes=NodeRefs}, - %% remove specs without tests - Filter = fun({_,#testspec{tests=[]}}) -> false; - (_) -> true - end, - try create_specs(Specs1,TS0,Relaxed,JoinSpecs,{[],TS0},[]) of + + try create_specs(Specs2,TS0,Relaxed,Join) of {{[],_},SeparateTestSpecs} -> - lists:filter(Filter,SeparateTestSpecs); + filter_and_convert(SeparateTestSpecs); {{_,#testspec{tests=[]}},SeparateTestSpecs} -> - lists:filter(Filter,SeparateTestSpecs); - {{JoinedSpecs,JoinedTestSpec},SeparateTestSpecs} -> - [{JoinedSpecs,JoinedTestSpec} | - lists:filter(Filter,SeparateTestSpecs)] + filter_and_convert(SeparateTestSpecs); + {Joined,SeparateTestSpecs} -> + [filter_and_convert(Joined) | + filter_and_convert(SeparateTestSpecs)] catch _:Error -> Error end. -create_specs([],_,_,_,Joined,Separate) -> - {Joined,Separate}; -create_specs([Spec|Ss],TestSpec,Relaxed,JoinSpecs, - Joined={JSpecs,_},Separate) -> +filter_and_convert(Joined) when is_tuple(Joined) -> + hd(filter_and_convert([Joined])); +filter_and_convert([{_,#testspec{tests=[]}}|TSs]) -> + filter_and_convert(TSs); +filter_and_convert([{[{SpecFile,MergeTests}|SMs],TestSpec}|TSs]) -> + #testspec{config = CfgFiles} = TestSpec, + TestSpec1 = TestSpec#testspec{config = delete_dups(CfgFiles), + merge_tests = MergeTests}, + %% set the merge_tests value for the testspec to the value + %% of the first test spec in the set + [{[SpecFile | [SF || {SF,_} <- SMs]], TestSpec1} | filter_and_convert(TSs)]; +filter_and_convert([]) -> + []. + +delete_dups(Elems) -> + delete_dups1(lists:reverse(Elems),[]). + +delete_dups1([E|Es],Keep) -> + case lists:member(E,Es) of + true -> + delete_dups1(Es,Keep); + false -> + delete_dups1(Es,[E|Keep]) + end; +delete_dups1([],Keep) -> + Keep. + +create_specs(Specs,TestSpec,Relaxed,Join) -> + SpecsTree = create_spec_tree(Specs,TestSpec,Join,[]), + create_specs(SpecsTree,TestSpec,Relaxed). + +create_spec_tree([Spec|Specs],TS,JoinWithNext,Known) -> SpecDir = filename:dirname(filename:absname(Spec)), - TestSpec1 = TestSpec#testspec{spec_dir=SpecDir}, - case file:consult(Spec) of - {ok,Terms} -> - Terms1 = replace_names(Terms), - {Specs2Join,SepSpecs} = get_included_specs(Terms1,TestSpec1), - TestSpec2 = create_spec(Terms1,TestSpec1,Relaxed), - case {JoinSpecs,Specs2Join,SepSpecs} of - {true,[],[]} -> - create_specs(Ss,TestSpec2,Relaxed,JoinSpecs, - {JSpecs++[get_absdir(Spec,TestSpec2)], - TestSpec2},Separate); - {false,[],[]} -> - create_specs(Ss,TestSpec,Relaxed,JoinSpecs,Joined, - Separate++[{[get_absdir(Spec,TestSpec2)], - TestSpec2}]); - _ -> - {{JSpecs1,JTS1},Separate1} = - create_specs(Specs2Join,TestSpec2,Relaxed,true, - {[get_absdir(Spec,TestSpec2)], - TestSpec2},[]), - {Joined2,Separate2} = - create_specs(SepSpecs,TestSpec,Relaxed,false, - {[],TestSpec1},[]), - NewJoined = {JSpecs++JSpecs1,JTS1}, - NewSeparate = Separate++Separate1++ - [Joined2 | Separate2], - NextTestSpec = if not JoinSpecs -> TestSpec; - true -> JTS1 - end, - create_specs(Ss,NextTestSpec,Relaxed,JoinSpecs, - NewJoined,NewSeparate) - end; - {error,Reason} -> - ReasonStr = - lists:flatten(io_lib:format("~s", - [file:format_error(Reason)])), - throw({error,{Spec,ReasonStr}}) - end. + TS1 = TS#testspec{spec_dir=SpecDir}, + SpecAbsName = get_absfile(Spec,TS1), + case lists:member(SpecAbsName,Known) of + true -> + throw({error,{cyclic_reference,SpecAbsName}}); + false -> + case file:consult(SpecAbsName) of + {ok,Terms} -> + Terms1 = replace_names(Terms), + {InclJoin,InclSep} = get_included_specs(Terms1,TS1), + {SpecAbsName,Terms1, + create_spec_tree(InclJoin,TS,true,[SpecAbsName|Known]), + create_spec_tree(InclSep,TS,false,[SpecAbsName|Known]), + JoinWithNext, + create_spec_tree(Specs,TS,JoinWithNext,Known)}; + {error,Reason} -> + ReasonStr = + lists:flatten(io_lib:format("~s", + [file:format_error(Reason)])), + throw({error,{SpecAbsName,ReasonStr}}) + end + end; +create_spec_tree([],_TS,_JoinWithNext,_Known) -> + []. + +create_specs({Spec,Terms,InclJoin,InclSep,JoinWithNext,NextSpec}, + TestSpec,Relaxed) -> + SpecDir = filename:dirname(filename:absname(Spec)), + TestSpec1 = create_spec(Terms,TestSpec#testspec{spec_dir=SpecDir},Relaxed), + + {{JoinSpecs1,JoinTS1},Separate1} = create_specs(InclJoin,TestSpec1,Relaxed), + {{JoinSpecs2,JoinTS2},Separate2} = + case JoinWithNext of + true -> + create_specs(NextSpec,JoinTS1,Relaxed); + false -> + {{[],JoinTS1},[]} + end, + {SepJoinSpecs,Separate3} = create_specs(InclSep,TestSpec,Relaxed), + {SepJoinSpecs1,Separate4} = + case JoinWithNext of + true -> + {{[],TestSpec},[]}; + false -> + create_specs(NextSpec,TestSpec,Relaxed) + end, + + SpecInfo = {Spec,TestSpec1#testspec.merge_tests}, + AllSeparate = + [TSData || TSData = {Ss,_TS} <- Separate3++Separate1++ + [SepJoinSpecs]++Separate2++ + Separate4++[SepJoinSpecs1], + Ss /= []], + + case {JoinWithNext,JoinSpecs1} of + {true,_} -> + {{[SpecInfo|(JoinSpecs1++JoinSpecs2)],JoinTS2}, + AllSeparate}; + {false,[]} -> + {{[],TestSpec}, + [{[SpecInfo],TestSpec1}|AllSeparate]}; + {false,_} -> + {{[SpecInfo|(JoinSpecs1++JoinSpecs2)],JoinTS2}, + AllSeparate} + end; +create_specs([],TestSpec,_Relaxed) -> + {{[],TestSpec},[]}. create_spec(Terms,TestSpec,Relaxed) -> TS = #testspec{tests=Tests, logdir=LogDirs} = @@ -345,7 +400,8 @@ collect_tests({Replace,Terms},TestSpec=#testspec{alias=As,nodes=Ns},Relaxed) -> %% reverse nodes and aliases initially to get the order of them right %% in case this spec is being joined with a previous one TestSpec1 = get_global(Terms1,TestSpec#testspec{alias = lists:reverse(As), - nodes = lists:reverse(Ns)}), + nodes = lists:reverse(Ns), + merge_tests = true}), TestSpec2 = get_all_nodes(Terms1,TestSpec1), {Terms2, TestSpec3} = filter_init_terms(Terms1, [], TestSpec2), add_tests(Terms2,TestSpec3). @@ -483,9 +539,9 @@ get_included_specs(Terms,TestSpec) -> get_included_specs([{specs,How,SpecOrSpecs}|Ts],TestSpec,Join,Sep) -> Specs = case SpecOrSpecs of [File|_] when is_list(File) -> - [get_absdir(Spec,TestSpec) || Spec <- SpecOrSpecs]; + [get_absfile(Spec,TestSpec) || Spec <- SpecOrSpecs]; [Ch|_] when is_integer(Ch) -> - [get_absdir(SpecOrSpecs,TestSpec)] + [get_absfile(SpecOrSpecs,TestSpec)] end, if How == join -> get_included_specs(Ts,TestSpec,Join++Specs,Sep); @@ -1118,14 +1174,21 @@ insert_groups(Node,Dir,Suite,Groups,Cases,Tests,true) when {[Gr],Cases}; true -> {Gr,Cases} end || Gr <- Groups], - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},[{all,_}]}} -> - Tests; - {value,{{Node,Dir},Suites0}} -> - Suites1 = insert_groups1(Suite,Groups1,Suites0), - insert_in_order({{Node,Dir},Suites1},Tests); - false -> - insert_in_order({{Node,Dir},[{Suite,Groups1}]},Tests) + {Tests1,Done} = + lists:foldr(fun(All={{N,D},[{all,_}]},{Replaced,_}) when N == Node, + D == Dir -> + {[All|Replaced],true}; + ({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = insert_groups1(Suite,Groups1,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},[{Suite,Groups1}]}]; + true -> + Tests1 end; insert_groups(Node,Dir,Suite,Groups,Case,Tests, MergeTests) when is_atom(Case) -> @@ -1163,14 +1226,21 @@ insert_groups2([],GrAndCases) -> insert_cases(Node,Dir,Suite,Cases,Tests,false) when is_list(Cases) -> append({{Node,Dir},[{Suite,Cases}]},Tests); insert_cases(Node,Dir,Suite,Cases,Tests,true) when is_list(Cases) -> - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},[{all,_}]}} -> - Tests; - {value,{{Node,Dir},Suites0}} -> - Suites1 = insert_cases1(Suite,Cases,Suites0), - insert_in_order({{Node,Dir},Suites1},Tests); - false -> - insert_in_order({{Node,Dir},[{Suite,Cases}]},Tests) + {Tests1,Done} = + lists:foldr(fun(All={{N,D},[{all,_}]},{Replaced,_}) when N == Node, + D == Dir -> + {[All|Replaced],true}; + ({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = insert_cases1(Suite,Cases,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},[{Suite,Cases}]}]; + true -> + Tests1 end; insert_cases(Node,Dir,Suite,Case,Tests,MergeTests) when is_atom(Case) -> insert_cases(Node,Dir,Suite,[Case],Tests,MergeTests). @@ -1211,15 +1281,23 @@ skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,false) when append({{Node,Dir},Suites1},Tests); skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,true) when ((Cases == all) or is_list(Cases)) and is_list(Groups) -> - Suites = - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},Suites0}} -> - Suites0; - false -> - [] - end, - Suites1 = skip_groups1(Suite,[{Gr,Cases} || Gr <- Groups],Cmt,Suites), - insert_in_order({{Node,Dir},Suites1},Tests); + {Tests1,Done} = + lists:foldr(fun({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = skip_groups1(Suite, + [{Gr,Cases} || Gr <- Groups], + Cmt,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},skip_groups1(Suite, + [{Gr,Cases} || Gr <- Groups], + Cmt,[])}]; + true -> + Tests1 + end; skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests,MergeTests) when is_atom(Case) -> Cases = if Case == all -> all; true -> [Case] end, @@ -1241,15 +1319,19 @@ skip_cases(Node,Dir,Suite,Cases,Cmt,Tests,false) when is_list(Cases) -> Suites1 = skip_cases1(Suite,Cases,Cmt,[]), append({{Node,Dir},Suites1},Tests); skip_cases(Node,Dir,Suite,Cases,Cmt,Tests,true) when is_list(Cases) -> - Suites = - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},Suites0}} -> - Suites0; - false -> - [] - end, - Suites1 = skip_cases1(Suite,Cases,Cmt,Suites), - insert_in_order({{Node,Dir},Suites1},Tests); + {Tests1,Done} = + lists:foldr(fun({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = skip_cases1(Suite,Cases,Cmt,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},skip_cases1(Suite,Cases,Cmt,[])}]; + true -> + Tests1 + end; skip_cases(Node,Dir,Suite,Case,Cmt,Tests,MergeTests) when is_atom(Case) -> skip_cases(Node,Dir,Suite,[Case],Cmt,Tests,MergeTests). diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile index d469d03e04..760cc20410 100644 --- a/lib/common_test/test/Makefile +++ b/lib/common_test/test/Makefile @@ -40,6 +40,7 @@ MODULES= \ ct_repeat_1_SUITE \ ct_testspec_1_SUITE \ ct_testspec_2_SUITE \ + ct_testspec_3_SUITE \ ct_skip_SUITE \ ct_error_SUITE \ ct_test_server_if_1_SUITE \ diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl index fc572aa82f..1319fa4154 100644 --- a/lib/common_test/test/ct_test_support.erl +++ b/lib/common_test/test/ct_test_support.erl @@ -1132,6 +1132,8 @@ reformat([{_EH,#event{name=test_start,data=_}} | Events], EH) -> [{EH,test_start,{'DEF',{'START_TIME','LOGDIR'}}} | reformat(Events, EH)]; reformat([{_EH,#event{name=test_done,data=_}} | Events], EH) -> [{EH,test_done,{'DEF','STOP_TIME'}} | reformat(Events, EH)]; +reformat([{_EH,#event{name=tc_logfile,data=_}} | Events], EH) -> + reformat(Events, EH); reformat([{_EH,#event{name=test_stats,data=Data}} | Events], EH) -> [{EH,test_stats,Data} | reformat(Events, EH)]; %% use this to only print the last test_stats event: diff --git a/lib/common_test/test/ct_testspec_2_SUITE.erl b/lib/common_test/test/ct_testspec_2_SUITE.erl index c150686482..ea22312a7d 100644 --- a/lib/common_test/test/ct_testspec_2_SUITE.erl +++ b/lib/common_test/test/ct_testspec_2_SUITE.erl @@ -490,7 +490,7 @@ multiple_specs(_Config) -> [{Node2,get_absdir(filename:join(SpecDir,CfgDir))} || CfgDir <- CfgDir2]], LogDirV = get_absdir(filename:join(SpecDir,"../logs")), - Verify = #testspec{merge_tests = false, + Verify = #testspec{merge_tests = true, spec_dir = SpecDir, nodes = [{undefined,Node},{n1,Node1},{n2,Node2}], alias = [{to1,TO1V},{to2,TO2V}], diff --git a/lib/common_test/test/ct_testspec_3_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE.erl new file mode 100644 index 0000000000..c8774fae7b --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE.erl @@ -0,0 +1,607 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2009-2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_testspec_1_SUITE +%%% +%%% Description: +%%% Test test specifications +%%% +%%% The suites used for the test are located in the data directory. +%%%------------------------------------------------------------------- +-module(ct_testspec_3_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + DataDir = ?config(data_dir, Config), + Config1 = ct_test_support:init_per_suite(Config), + SpecsDir1 = filename:join(DataDir, "specs1"), + SpecsDir2 = filename:join(DataDir, "specs2"), + [{specs_dir1,SpecsDir1},{specs_dir2,SpecsDir2} | Config1]. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [start_separate, + start_join, + incl_separate1, + incl_separate2, + incl_join1, + incl_join2, + incl_both1, + incl_both2, + incl_both_and_join1, + incl_both_and_join2, + rec_incl_separate1, + rec_incl_separate2, + rec_incl_join1, + rec_incl_join2, + rec_incl_separate_join1, + rec_incl_separate_join2, + rec_incl_join_separate1, + rec_incl_join_separate2 + ]. + +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% + +start_separate(Config) -> + Specs = [fname(specs_dir1, "flat_spec1", Config), + fname(specs_dir2, "flat_spec2", Config)], + setup_and_execute(start_separate, Specs, [], Config). + +%%%----------------------------------------------------------------- +%%% + +start_join(Config) -> + Specs = [fname(specs_dir1, "flat_spec1", Config), + fname(specs_dir2, "flat_spec2", Config)], + setup_and_execute(start_join, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_separate1(Config) -> + Specs = [fname(specs_dir1, "spec_sep1", Config), + fname(specs_dir2, "spec_sep2", Config)], + setup_and_execute(incl_separate1, Specs, [], Config). + +incl_separate2(Config) -> + Specs = [fname(specs_dir1, "spec_sep1", Config), + fname(specs_dir2, "spec_sep2", Config)], + setup_and_execute(incl_separate2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_join1(Config) -> + Specs = [fname(specs_dir1, "spec_join1", Config), + fname(specs_dir2, "spec_join2", Config)], + setup_and_execute(incl_join1, Specs, [], Config). + +incl_join2(Config) -> + Specs = [fname(specs_dir1, "spec_join1", Config), + fname(specs_dir2, "spec_join2", Config)], + setup_and_execute(incl_join2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_both1(Config) -> + Specs = [fname(specs_dir1, "spec_both1", Config), + fname(specs_dir2, "spec_both2", Config)], + setup_and_execute(incl_both1, Specs, [], Config). + +incl_both2(Config) -> + Specs = [fname(specs_dir1, "spec_both1", Config), + fname(specs_dir2, "spec_both2", Config)], + setup_and_execute(incl_both2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_both_and_join1(Config) -> + Specs = [fname(specs_dir1, "spec_both_join1", Config), + fname(specs_dir2, "spec_both_join2", Config)], + setup_and_execute(incl_both_and_join1, Specs, [], Config). + +incl_both_and_join2(Config) -> + Specs = [fname(specs_dir1, "spec_both_join1", Config), + fname(specs_dir2, "spec_both_join2", Config)], + setup_and_execute(incl_both_and_join2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +rec_incl_separate1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep1", Config), + fname(specs_dir2, "rec_spec_sep2", Config)], + setup_and_execute(rec_incl_separate1, Specs, [], Config). + +rec_incl_separate2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep1", Config), + fname(specs_dir2, "rec_spec_sep2", Config)], + setup_and_execute(rec_incl_separate2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +rec_incl_join1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join1", Config), + fname(specs_dir2, "rec_spec_join2", Config)], + setup_and_execute(rec_incl_join1, Specs, [], Config). + +rec_incl_join2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join1", Config), + fname(specs_dir2, "rec_spec_join2", Config)], + setup_and_execute(rec_incl_join2, Specs, [{join_specs,true}], Config). + + +%%%----------------------------------------------------------------- +%%% + +rec_incl_separate_join1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep_join1", Config), + fname(specs_dir2, "rec_spec_sep_join2", Config)], + setup_and_execute(rec_incl_separate_join1, Specs, [], Config). + +rec_incl_separate_join2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep_join1", Config), + fname(specs_dir2, "rec_spec_sep_join2", Config)], + setup_and_execute(rec_incl_separate_join2, Specs, + [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +rec_incl_join_separate1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join_sep1", Config), + fname(specs_dir2, "rec_spec_join_sep2", Config)], + setup_and_execute(rec_incl_join_separate1, Specs, [], Config). + +rec_incl_join_separate2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join_sep1", Config), + fname(specs_dir2, "rec_spec_join_sep2", Config)], + setup_and_execute(rec_incl_join_separate2, Specs, + [{join_specs,true}], Config). + + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +fname(Tag, File, Config) -> + filename:join(?config(Tag, Config), File). + +check_parameter(TCID) -> + {ok,{config,TCID}}. + +read_config(TCID) -> + {ok,[{tcname,list_to_atom(TCID)}]}. + +setup_and_execute(TCName, Specs, TestOpts, Config) -> + + TestID = {userconfig,{?MODULE,atom_to_list(TCName)}}, + TestTerms = [TestID,{spec,Specs},{label,TCName}] ++ TestOpts, + + {Opts,ERPid} = setup(TestTerms, Config), + + case ct_test_support:run(Opts, Config) of + ok -> + ok; + Error -> + ct:pal("Error executing with opts: ~p", [Opts]), + exit(Error) + end, + + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(TCName, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(TCName), + ok = ct_test_support:verify_events(TestEvents, Events, Config). + +setup(Test, Config) when is_tuple(Test) -> + setup([Test], Config); +setup(Tests, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ Tests ++ [{event_handler,{?eh,EvHArgs}}], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). +%reformat(Events, _EH) -> +% Events. + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- +events_to_check(Test) -> + %% 2 tests (ct:run_test + script_start) is default + events_to_check(Test, 2). + +events_to_check(_, 0) -> + []; +events_to_check(Test, N) -> + test_events(Test) ++ events_to_check(Test, N-1). + +test_events(start_separate) -> + [{?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(start_join) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_separate1) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{1,1,5}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_separate2) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{1,1,5}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_join1) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_join2) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{t11_SUITE,ok_tc}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(_) -> + []. + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 new file mode 100644 index 0000000000..bc672568da --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 @@ -0,0 +1 @@ +{file, cfg11}. \ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 new file mode 100644 index 0000000000..30f2cf6857 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 @@ -0,0 +1 @@ +{file, cfg12}. \ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 new file mode 100644 index 0000000000..1860ec78e5 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 @@ -0,0 +1 @@ +{file, cfg13}. \ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 b/lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 new file mode 100644 index 0000000000..b18d35443e --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 @@ -0,0 +1 @@ +{file, cfg21}. \ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 new file mode 100644 index 0000000000..eff87222ea --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 @@ -0,0 +1,4 @@ +{config, "../config1/cfg11"}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests2", t21_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 new file mode 100644 index 0000000000..a3387f48a3 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 @@ -0,0 +1,2 @@ +{specs,join,"spec_join1"}. +{specs,join,"../specs2/spec_join2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 new file mode 100644 index 0000000000..fe127eb4b9 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 @@ -0,0 +1,5 @@ +{specs,join,"spec_sep1"}. +{specs,join,"../specs2/spec_sep2"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 new file mode 100644 index 0000000000..c778aa68a6 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 @@ -0,0 +1,2 @@ +{specs,separate,"spec_sep1"}. +{specs,separate,"../specs2/spec_sep2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 new file mode 100644 index 0000000000..7cb5a05fff --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 @@ -0,0 +1,2 @@ +{specs,separate,"spec_join1"}. +{specs,separate,"../specs2/spec_join2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 new file mode 100644 index 0000000000..46111614dc --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 @@ -0,0 +1,2 @@ +{specs, join, "../specs1/flat_spec1"}. +{specs, separate, "../specs2/flat_spec2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 new file mode 100644 index 0000000000..f52b3ed030 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 @@ -0,0 +1,6 @@ +{specs, join, "../specs1/flat_spec1"}. +{specs, separate, "../specs2/flat_spec2"}. +{merge_tests,false}. +{config, "../config1/cfg12"}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests2", t21_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 new file mode 100644 index 0000000000..736b380b0b --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 @@ -0,0 +1,2 @@ +{specs, join, "../specs2/flat_spec2"}. +{specs, join, "flat_spec1"}. \ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 new file mode 100644 index 0000000000..89456c35e0 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 @@ -0,0 +1,3 @@ +{specs, separate, "../specs2/flat_spec2"}. +{specs, separate, "flat_spec1"}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 new file mode 100644 index 0000000000..758d1e2514 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 @@ -0,0 +1,5 @@ +{merge_tests, false}. +{config, "../config2/cfg21"}. +{suites, "../tests1", t12_SUITE}. +{suites, "../tests1", t12_SUITE}. +{suites, "../tests2", t22_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 new file mode 100644 index 0000000000..19d3a3d8e2 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 @@ -0,0 +1,5 @@ +{specs,join,"spec_join2"}. +{specs,join,"../specs1/spec_join1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 new file mode 100644 index 0000000000..930e68c847 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 @@ -0,0 +1,5 @@ +{specs,join,"spec_sep2"}. +{specs,join,"../specs1/spec_sep1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 new file mode 100644 index 0000000000..5026f329a7 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 @@ -0,0 +1,5 @@ +{specs,separate,"spec_sep2"}. +{specs,separate,"../specs1/spec_sep1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 new file mode 100644 index 0000000000..17057088b4 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 @@ -0,0 +1,5 @@ +{specs,separate,"spec_join2"}. +{specs,separate,"../specs1/spec_join1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 new file mode 100644 index 0000000000..4c83115d23 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 @@ -0,0 +1,4 @@ +{specs, separate, "../specs1/flat_spec1"}. +{specs, join, "../specs2/flat_spec2"}. +{config, "../config1/cfg12"}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 new file mode 100644 index 0000000000..ad81bfb4cc --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 @@ -0,0 +1,9 @@ +{merge_tests,true}. +{config, "../config1/cfg12"}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests2", t21_SUITE}. +{suites, "../tests1", t12_SUITE}. +{suites, "../tests2", t22_SUITE}. + +{specs, separate, "../specs1/flat_spec1"}. +{specs, join, "../specs2/flat_spec2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 new file mode 100644 index 0000000000..47c0286052 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 @@ -0,0 +1,5 @@ +{specs, join, "../specs1/flat_spec1"}. +{specs, join, "flat_spec2"}. +{config, "../config1/cfg12"}. +{suites, "../tests2", t22_SUITE}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 new file mode 100644 index 0000000000..8d37f508b8 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 @@ -0,0 +1,5 @@ +{specs, separate, "../specs1/flat_spec1"}. +{specs, separate, "flat_spec2"}. +{config, "../config1/cfg12"}. +{suites, "../tests2", t22_SUITE}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl new file mode 100644 index 0000000000..4026273700 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl @@ -0,0 +1,172 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t11_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,1}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg11]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg11]} -> ok; + {incl_separate2,[cfg11]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + _ -> ok + + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl new file mode 100644 index 0000000000..25692d3e00 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl @@ -0,0 +1,171 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t12_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,30}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg21]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg21]} -> ok; + {incl_separate2,[cfg21]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + _ -> ok + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl new file mode 100644 index 0000000000..4850151fa5 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl @@ -0,0 +1,171 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t21_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,1}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg11]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg11]} -> ok; + {incl_separate2,[cfg11]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + _ -> ok + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl new file mode 100644 index 0000000000..78c5d7ca31 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl @@ -0,0 +1,173 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t22_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,30}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg21]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg12]} -> ok; + {incl_separate1,[cfg21]} -> ok; + {incl_separate2,[cfg12]} -> ok; + {incl_separate2,[cfg21]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + _ -> ok + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl new file mode 100644 index 0000000000..d01fac3144 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl @@ -0,0 +1,158 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t23_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{timetrap,{seconds,30}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + -- cgit v1.2.3 From 9230b38f318d916258168fafb2878f225fb3052b Mon Sep 17 00:00:00 2001 From: Peter Andersson Date: Thu, 24 Jan 2013 12:20:02 +0100 Subject: Update documentation OTP-9881 --- lib/common_test/doc/src/ct_run.xml | 1 + lib/common_test/doc/src/run_test_chapter.xml | 817 ++++++++++++--------- lib/common_test/src/ct.erl | 4 +- .../ct_testspec_3_SUITE_data/specs1/spec_join1 | 3 +- .../ct_testspec_3_SUITE_data/specs2/spec_join2 | 4 +- 5 files changed, 462 insertions(+), 367 deletions(-) diff --git a/lib/common_test/doc/src/ct_run.xml b/lib/common_test/doc/src/ct_run.xml index 0750f560b3..198290c1be 100644 --- a/lib/common_test/doc/src/ct_run.xml +++ b/lib/common_test/doc/src/ct_run.xml @@ -126,6 +126,7 @@ Run tests using test specification
 	ct_run -spec TestSpec1 TestSpec2 .. TestSpecN
+	[-join_specs]
 	[-config ConfigFile1 ConfigFile2 .. ConfigFileN]
 	[-userconfig CallbackModule1 ConfigString1 and CallbackModule2
          ConfigString2 and .. and CallbackModuleN ConfigStringN]
diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml
index d5f5d89e05..35f89153d3 100644
--- a/lib/common_test/doc/src/run_test_chapter.xml
+++ b/lib/common_test/doc/src/run_test_chapter.xml
@@ -530,374 +530,469 @@
   
   
Test Specifications - -

The most flexible way to specify what to test, is to use a so - called test specification. A test specification is a sequence of - Erlang terms. The terms are normally declared in a text file (see - ct:run_test/1), but - may also be passed to Common Test on the form of a list (see - ct:run_testspec/1). - There are two general types of terms: configuration terms and test - specification terms.

-

With configuration terms it is possible to e.g. label the test - run (similar to ct_run -label), evaluate arbitrary expressions - before starting the test, import configuration data (similar to - ct_run -config/-userconfig), specify the top level HTML log - directory (similar to ct_run -logdir), enable code coverage - analysis (similar to ct_run -cover), install Common Test Hooks - (similar to ct_run -ch_hooks), install event_handler plugins - (similar to ct_run -event_handler), specify include directories - that should be passed to the compiler for automatic compilation - (similar to ct_run -include), disable the auto compilation - feature (similar to ct_run -no_auto_compile), set verbosity - levels (similar to ct_run -verbosity), and more.

-

Configuration terms can be combined with ct_run start flags, - or ct:run_test/1 options. The result will for some flags/options - and terms be that the values are merged (e.g. configuration files, - include directories, verbosity levels, silent connections), and for - others that the start flags/options override the test specification - terms (e.g. log directory, label, style sheet, auto compilation).

-

With test specification terms it is possible to state exactly - which tests should run and in which order. A test term specifies - either one or more suites, one or more test case groups (possibly nested), - or one or more test cases in a group (or in multiple groups) or in a suite.

-

An arbitrary number of test terms may be declared in sequence. - Common Test will by default compile the terms into one or more tests - to be performed in one resulting test run. Note that a term that - specifies a set of test cases will "swallow" one that only - specifies a subset of these cases. E.g. the result of merging - one term that specifies that all cases in suite S should be - executed, with another term specifying only test case X and Y in - S, is a test of all cases in S. However, if a term specifying - test case X and Y in S is merged with a term specifying case Z - in S, the result is a test of X, Y and Z in S. To disable this - behaviour, i.e. to instead perform each test sequentially in a "script-like" - manner, the term merge_tests can be set to false in - the test specification.

-

A test term can also specify one or more test suites, groups, - or test cases to be skipped. Skipped suites, groups and cases - are not executed and show up in the HTML log files as - SKIPPED.

-

When a test case group is specified, the resulting test - executes the init_per_group function, followed by all test - cases and sub groups (including their configuration functions), and - finally the end_per_group function. Also if particular - test cases in a group are specified, init_per_group - and end_per_group for the group in question are - called. If a group which is defined (in Suite:group/0) to - be a sub group of another group, is specified (or if particular test - cases of a sub group are), Common Test will call the configuration - functions for the top level groups as well as for the sub group - in question (making it possible to pass configuration data all - the way from init_per_suite down to the test cases in the - sub group).

-

The test specification utilizes the same mechanism for specifying - test case groups by means of names and paths, as explained in the - Group Execution - section above, with the addition of the GroupSpec element - described next.

-

The GroupSpec element makes it possible to specify - group execution properties that will override those in the - group definition (i.e. in groups/0). Execution properties for - sub-groups may be overridden as well. This feature makes it possible to - change properties of groups at the time of execution, - without even having to edit the test suite. The very same - feature is available for group elements in the Suite:all/0 - list. Therefore, more detailed documentation, and examples, can be - found in the - Test case groups chapter.

- -

Below is the test specification syntax. Test specifications can - be used to run tests both in a single test host environment and - in a distributed Common Test environment (Large Scale - Testing). The node parameters in the init term are only - relevant in the latter (see the - Large - Scale Testing chapter for information). For more information - about the various terms, please see the corresponding sections in the - User's Guide, such as e.g. the - ct_run - program for an overview of available start flags - (since most flags have a corresponding configuration term), and - more detailed explanation of e.g. - Logging - (for the verbosity, stylesheet and basic_html terms), - External Configuration Data - (for the config and userconfig terms), - Event - Handling (for the event_handler term), - Common Test Hooks - (for the ct_hooks term), etc.

-

Config terms:

-
-      {merge_tests, Bool}.
-
-      {define, Constant, Value}.
-
-      {node, NodeAlias, Node}.
-
-      {init, InitOptions}.
-      {init, [NodeAlias], InitOptions}.
-
-      {label, Label}.
-      {label, NodeRefs, Label}.
-
-      {verbosity, VerbosityLevels}.
-      {verbosity, NodeRefs, VerbosityLevels}.
-
-      {stylesheet, CSSFile}.
-      {stylesheet, NodeRefs, CSSFile}.
-
-      {silent_connections, ConnTypes}.
-      {silent_connections, NodeRefs, ConnTypes}.
-
-      {multiply_timetraps, N}.
-      {multiply_timetraps, NodeRefs, N}.
-
-      {scale_timetraps, Bool}.
-      {scale_timetraps, NodeRefs, Bool}.
- 
-      {cover, CoverSpecFile}.
-      {cover, NodeRefs, CoverSpecFile}.
-      
-      {cover_stop, Bool}.
-      {cover_stop, NodeRefs, Bool}.
-
-      {include, IncludeDirs}.
-      {include, NodeRefs, IncludeDirs}.
-
-      {auto_compile, Bool},
-      {auto_compile, NodeRefs, Bool},
-
-      {config, ConfigFiles}.
-      {config, ConfigDir, ConfigBaseNames}.
-      {config, NodeRefs, ConfigFiles}.
-      {config, NodeRefs, ConfigDir, ConfigBaseNames}.
-
-      {userconfig, {CallbackModule, ConfigStrings}}.
-      {userconfig, NodeRefs, {CallbackModule, ConfigStrings}}.
-      
-      {logdir, LogDir}.                                        
-      {logdir, NodeRefs, LogDir}.
-
-      {logopts, LogOpts}.
-      {logopts, NodeRefs, LogOpts}.
-
-      {create_priv_dir, PrivDirOption}.
-      {create_priv_dir, NodeRefs, PrivDirOption}.
-      
-      {event_handler, EventHandlers}.
-      {event_handler, NodeRefs, EventHandlers}.
-      {event_handler, EventHandlers, InitArgs}.
-      {event_handler, NodeRefs, EventHandlers, InitArgs}.
-
-      {ct_hooks, CTHModules}.
-      {ct_hooks, NodeRefs, CTHModules}.
-
-      {enable_builtin_hooks, Bool}.
-      
-      {basic_html, Bool}.
-      {basic_html, NodeRefs, Bool}.
+    
+ General description +

The most flexible way to specify what to test, is to use a so + called test specification. A test specification is a sequence of + Erlang terms. The terms are normally declared in one or more text files + (see ct:run_test/1), but + may also be passed to Common Test on the form of a list (see + ct:run_testspec/1). + There are two general types of terms: configuration terms and test + specification terms.

+

With configuration terms it is possible to e.g. label the test + run (similar to ct_run -label), evaluate arbitrary expressions + before starting the test, import configuration data (similar to + ct_run -config/-userconfig), specify the top level HTML log + directory (similar to ct_run -logdir), enable code coverage + analysis (similar to ct_run -cover), install Common Test Hooks + (similar to ct_run -ch_hooks), install event_handler plugins + (similar to ct_run -event_handler), specify include directories + that should be passed to the compiler for automatic compilation + (similar to ct_run -include), disable the auto compilation + feature (similar to ct_run -no_auto_compile), set verbosity + levels (similar to ct_run -verbosity), and more.

+

Configuration terms can be combined with ct_run start flags, + or ct:run_test/1 options. The result will for some flags/options + and terms be that the values are merged (e.g. configuration files, + include directories, verbosity levels, silent connections), and for + others that the start flags/options override the test specification + terms (e.g. log directory, label, style sheet, auto compilation).

+

With test specification terms it is possible to state exactly + which tests should run and in which order. A test term specifies + either one or more suites, one or more test case groups (possibly nested), + or one or more test cases in a group (or in multiple groups) or in a suite.

+

An arbitrary number of test terms may be declared in sequence. + Common Test will by default compile the terms into one or more tests + to be performed in one resulting test run. Note that a term that + specifies a set of test cases will "swallow" one that only + specifies a subset of these cases. E.g. the result of merging + one term that specifies that all cases in suite S should be + executed, with another term specifying only test case X and Y in + S, is a test of all cases in S. However, if a term specifying + test case X and Y in S is merged with a term specifying case Z + in S, the result is a test of X, Y and Z in S. To disable this + behaviour, i.e. to instead perform each test sequentially in a "script-like" + manner, the term merge_tests can be set to false in + the test specification.

+

A test term can also specify one or more test suites, groups, + or test cases to be skipped. Skipped suites, groups and cases + are not executed and show up in the HTML log files as + SKIPPED.

+
+
+ Using multiple test specification files + +

When multiple test specification files are given at startup (either + with ct_run -spec file1 file2 ... or + ct:run_test([{spec, [File1,File2,...]}])), + Common Test will either execute one test run per specification file, or + join the files and perform all tests within one single test run. The first + behaviour is the default one. The latter requires that the start + flag/option join_suites is provided, e.g. + run_test -spec ./my_tests1.ts ./my_tests2.ts -join_suites.

+ +

Joining a number of specifications, or running them separately, can + also be accomplished with (and may be combined with) test specification + file inclusion, described next.

+
+
+ Test specification file inclusion +

With the specs term (see syntax below), it's possible to have + a test specification include other specifications. An included + specification may either be joined with the source specification, + or used to produce a separate test run (like with the join_specs + start flag/option above). Example:

+
+	%% In specification file "a.spec"
+	{specs, join, ["b.spec", "c.spec"]}.
+	{specs, separate, ["d.spec", "e.spec"]}.
+	%% Config and test terms follow
+	...
+

In this example, the test terms defined in files "b.spec" and "c.spec" + will be joined with the terms in the source specification "a.spec" + (if any). The inclusion of specifications "d.spec" and + "e.spec" will result in two separate, and independent, test runs (i.e. + one for each included specification).

+

Note that the join option does not imply that the test terms + will be merged (see merge_tests above), only that all tests are + executed in one single test run.

+

Joined specifications share common configuration settings, such as + the list of config files or include directories. + For configuration that can not be combined, such as settings for logdir + or verbosity, it is up to the user to ensure there are no clashes + when the test specifications are joined. Specifications included with + the separate option, do not share configuration settings with the + source specification. This is useful e.g. if there are clashing + configuration settings in included specifications, making it impossible + to join them.

+

If {merge_tests,true} is set in the source specification + (which is the default setting), terms in joined specifications will be + merged with terms in the source specification (according to the + description of merge_tests above).

+

Note that it is always the merge_tests setting in the source + specification that is used when joined with other specifications. + Say e.g. that a source specification A, with tests TA1 and TA2, has + {merge_tests,false} set, and it includes another specification, + B, with tests TB1 and TB2, that has {merge_tests,true} set. + The result will be that the test series: TA1,TA2,merge(TB1,TB2), + is executed. The opposite merge_tests settings would result in the + following the test series: merge(merge(TA1,TA2),TB1,TB2).

+

The specs term may of course be used to nest specifications, + i.e. have one specification include other specifications, which in turn + include others, etc.

+
+
+ Test case groups + +

When a test case group is specified, the resulting test + executes the init_per_group function, followed by all test + cases and sub groups (including their configuration functions), and + finally the end_per_group function. Also if particular + test cases in a group are specified, init_per_group + and end_per_group for the group in question are + called. If a group which is defined (in Suite:group/0) to + be a sub group of another group, is specified (or if particular test + cases of a sub group are), Common Test will call the configuration + functions for the top level groups as well as for the sub group + in question (making it possible to pass configuration data all + the way from init_per_suite down to the test cases in the + sub group).

+

The test specification utilizes the same mechanism for specifying + test case groups by means of names and paths, as explained in the + Group Execution + section above, with the addition of the GroupSpec element + described next.

+

The GroupSpec element makes it possible to specify + group execution properties that will override those in the + group definition (i.e. in groups/0). Execution properties for + sub-groups may be overridden as well. This feature makes it possible to + change properties of groups at the time of execution, + without even having to edit the test suite. The very same + feature is available for group elements in the Suite:all/0 + list. Therefore, more detailed documentation, and examples, can be + found in the + Test case groups chapter.

+
- {release_shell, Bool}.
+
+ Test specification syntax + +

Below is the test specification syntax. Test specifications can + be used to run tests both in a single test host environment and + in a distributed Common Test environment (Large Scale + Testing). The node parameters in the init term are only + relevant in the latter (see the + Large + Scale Testing chapter for information). For more information + about the various terms, please see the corresponding sections in the + User's Guide, such as e.g. the + ct_run + program for an overview of available start flags + (since most flags have a corresponding configuration term), and + more detailed explanation of e.g. + Logging + (for the verbosity, stylesheet and basic_html terms), + External Configuration Data + (for the config and userconfig terms), + Event + Handling (for the event_handler term), + Common Test Hooks + (for the ct_hooks term), etc.

+
+

Config terms:

+
+	{merge_tests, Bool}.
+	
+	{define, Constant, Value}.
+	
+	{specs, InclSpecsOption, TestSpecs}.
+	
+	{node, NodeAlias, Node}.
+	
+	{init, InitOptions}.
+	{init, [NodeAlias], InitOptions}.
+	
+	{label, Label}.
+	{label, NodeRefs, Label}.
+	
+	{verbosity, VerbosityLevels}.
+	{verbosity, NodeRefs, VerbosityLevels}.
+	
+	{stylesheet, CSSFile}.
+	{stylesheet, NodeRefs, CSSFile}.
+	
+	{silent_connections, ConnTypes}.
+	{silent_connections, NodeRefs, ConnTypes}.
+	
+	{multiply_timetraps, N}.
+	{multiply_timetraps, NodeRefs, N}.
+	
+	{scale_timetraps, Bool}.
+	{scale_timetraps, NodeRefs, Bool}.
+	
+	{cover, CoverSpecFile}.
+	{cover, NodeRefs, CoverSpecFile}.
+	
+	{cover_stop, Bool}.
+	{cover_stop, NodeRefs, Bool}.
+	
+	{include, IncludeDirs}.
+	{include, NodeRefs, IncludeDirs}.
+	
+	{auto_compile, Bool},
+	{auto_compile, NodeRefs, Bool},
+	
+	{config, ConfigFiles}.
+	{config, ConfigDir, ConfigBaseNames}.
+	{config, NodeRefs, ConfigFiles}.
+	{config, NodeRefs, ConfigDir, ConfigBaseNames}.
+	
+	{userconfig, {CallbackModule, ConfigStrings}}.
+	{userconfig, NodeRefs, {CallbackModule, ConfigStrings}}.
+	
+	{logdir, LogDir}.                                        
+	{logdir, NodeRefs, LogDir}.
+	
+	{logopts, LogOpts}.
+	{logopts, NodeRefs, LogOpts}.
+	
+	{create_priv_dir, PrivDirOption}.
+	{create_priv_dir, NodeRefs, PrivDirOption}.
+	
+	{event_handler, EventHandlers}.
+	{event_handler, NodeRefs, EventHandlers}.
+	{event_handler, EventHandlers, InitArgs}.
+	{event_handler, NodeRefs, EventHandlers, InitArgs}.
+	
+	{ct_hooks, CTHModules}.
+	{ct_hooks, NodeRefs, CTHModules}.
+	
+	{enable_builtin_hooks, Bool}.
+	
+	{basic_html, Bool}.
+	{basic_html, NodeRefs, Bool}.
+	
+        {release_shell, Bool}.
+

Test terms:

-
-      {suites, Dir, Suites}.                                
-      {suites, NodeRefs, Dir, Suites}.
-      
-      {groups, Dir, Suite, Groups}.
-      {groups, NodeRefs, Dir, Suite, Groups}.
-
-      {groups, Dir, Suite, Groups, {cases,Cases}}.
-      {groups, NodeRefs, Dir, Suite, Groups, {cases,Cases}}.
-
-      {cases, Dir, Suite, Cases}.                           
-      {cases, NodeRefs, Dir, Suite, Cases}.
-
-      {skip_suites, Dir, Suites, Comment}.
-      {skip_suites, NodeRefs, Dir, Suites, Comment}.
-
-      {skip_groups, Dir, Suite, GroupNames, Comment}.
-      {skip_groups, NodeRefs, Dir, Suite, GroupNames, Comment}.
-      
-      {skip_cases, Dir, Suite, Cases, Comment}.
-      {skip_cases, NodeRefs, Dir, Suite, Cases, Comment}.
- +
+	{suites, Dir, Suites}.                                
+	{suites, NodeRefs, Dir, Suites}.
+	
+	{groups, Dir, Suite, Groups}.
+	{groups, NodeRefs, Dir, Suite, Groups}.
+	
+	{groups, Dir, Suite, Groups, {cases,Cases}}.
+	{groups, NodeRefs, Dir, Suite, Groups, {cases,Cases}}.
+	
+	{cases, Dir, Suite, Cases}.                           
+	{cases, NodeRefs, Dir, Suite, Cases}.
+	
+	{skip_suites, Dir, Suites, Comment}.
+	{skip_suites, NodeRefs, Dir, Suites, Comment}.
+	
+	{skip_groups, Dir, Suite, GroupNames, Comment}.
+	{skip_groups, NodeRefs, Dir, Suite, GroupNames, Comment}.
+	
+	{skip_cases, Dir, Suite, Cases, Comment}.
+        {skip_cases, NodeRefs, Dir, Suite, Cases, Comment}.
+

Types:

-
-      Bool            = true | false
-      Constant        = atom()
-      Value           = term()
-      NodeAlias       = atom()
-      Node            = node()
-      NodeRef         = NodeAlias | Node | master
-      NodeRefs        = all_nodes | [NodeRef] | NodeRef
-      InitOptions     = term()
-      Label           = atom() | string()
-      VerbosityLevels = integer() | [{Category,integer()}]
-      Category        = atom()
-      CSSFile         = string()
-      ConnTypes       = all | [atom()]
-      N               = integer()
-      CoverSpecFile   = string()
-      IncludeDirs     = string() | [string()]
-      ConfigFiles     = string() | [string()]
-      ConfigDir       = string()
-      ConfigBaseNames = string() | [string()]
-      CallbackModule  = atom()
-      ConfigStrings   = string() | [string()]
-      LogDir          = string()
-      LogOpts         = [term()]
-      PrivDirOption   = auto_per_run | auto_per_tc | manual_per_tc
-      EventHandlers   = atom() | [atom()]
-      InitArgs        = [term()]
-      CTHModules      = [CTHModule |
-                         {CTHModule, CTHInitArgs} |
-                         {CTHModule, CTHInitArgs, CTHPriority}]
-      CTHModule       = atom()
-      CTHInitArgs     = term()
-      Dir             = string()
-      Suites          = atom() | [atom()] | all
-      Suite           = atom()
-      Groups          = GroupPath | [GroupPath] | GroupSpec | [GroupSpec] | all
-      GroupPath       = [GroupName]
-      GroupSpec       = GroupName | {GroupName,Properties} | {GroupName,Properties,GroupSpec}
-      GroupName       = atom()
-      GroupNames      = GroupName | [GroupName]
-      Cases           = atom() | [atom()] | all
-      Comment         = string() | ""
- -

The difference between the config terms above, is that with - ConfigDir, ConfigBaseNames is a list of base names, - i.e. without directory paths. ConfigFiles must be full names, - including paths. E.g, these two terms have the same meaning:

-
-      {config, ["/home/testuser/tests/config/nodeA.cfg",
-                "/home/testuser/tests/config/nodeB.cfg"]}.
-
-      {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}.
- -

Any relative paths specified in the test specification, will be - relative to the directory which contains the test specification file, if - ct_run -spec TestSpecFile ... or - ct:run:test([{spec,TestSpecFile},...]) - executes the test. The path will be relative to the top level log directory, if - ct:run:testspec(TestSpec) executes the test.

- -

The define term introduces a constant, which is used to - replace the name Constant with Value, wherever it's found in - the test specification. This replacement happens during an initial iteration - through the test specification. Constants may be used anywhere in the test - specification, e.g. in arbitrary lists and tuples, and even in strings - and inside the value part of other constant definitions! A constant can - also be part of a node name, but that is the only place where a constant - can be part of an atom.

- -

For the sake of readability, the name of the constant must always - begin with an upper case letter, or a $, ?, or _. - This also means that it must always be single quoted (obviously, since - the constant name is actually an atom, not text).

- -

The main benefit of constants is that they can be used to reduce the size - (and avoid repetition) of long strings, such as file paths. Compare these - terms:

- -
-      %% 1a. no constant
-      {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}.
-      {suites, "/home/testuser/tests/suites", all}.
-      
-      %% 1b. with constant
-      {define, 'TESTDIR', "/home/testuser/tests"}.
-      {config, "'TESTDIR'/config", ["nodeA.cfg","nodeB.cfg"]}.
-      {suites, "'TESTDIR'/suites", all}.
-
-      %% 2a. no constants
-      {config, [testnode@host1, testnode@host2], "../config", ["nodeA.cfg","nodeB.cfg"]}.
-      {suites, [testnode@host1, testnode@host2], "../suites", [x_SUITE, y_SUITE]}.
-
-      %% 2b. with constants
-      {define, 'NODE', testnode}.
-      {define, 'NODES', ['NODE'@host1, 'NODE'@host2]}.
-      {config, 'NODES', "../config", ["nodeA.cfg","nodeB.cfg"]}.
-      {suites, 'NODES', "../suites", [x_SUITE, y_SUITE]}.
- -

Constants make the test specification term alias, in previous - versions of Common Test, redundant. This term has been deprecated but will - remain supported in upcoming Common Test releases. Replacing alias - terms with define is strongly recommended though! Here's an example - of such a replacement:

+
+	Bool            = true | false
+	Constant        = atom()
+	Value           = term()
+	InclSpecsOption = join | separate
+	TestSpecs       = string() | [string()]
+	NodeAlias       = atom()
+	Node            = node()
+	NodeRef         = NodeAlias | Node | master
+	NodeRefs        = all_nodes | [NodeRef] | NodeRef
+	InitOptions     = term()
+	Label           = atom() | string()
+	VerbosityLevels = integer() | [{Category,integer()}]
+	Category        = atom()
+	CSSFile         = string()
+	ConnTypes       = all | [atom()]
+	N               = integer()
+	CoverSpecFile   = string()
+	IncludeDirs     = string() | [string()]
+	ConfigFiles     = string() | [string()]
+	ConfigDir       = string()
+	ConfigBaseNames = string() | [string()]
+	CallbackModule  = atom()
+	ConfigStrings   = string() | [string()]
+	LogDir          = string()
+	LogOpts         = [term()]
+	PrivDirOption   = auto_per_run | auto_per_tc | manual_per_tc
+	EventHandlers   = atom() | [atom()]
+	InitArgs        = [term()]
+	CTHModules      = [CTHModule |
+	                   {CTHModule, CTHInitArgs} |
+	                   {CTHModule, CTHInitArgs, CTHPriority}]
+	CTHModule       = atom()
+	CTHInitArgs     = term()
+	Dir             = string()
+	Suites          = atom() | [atom()] | all
+	Suite           = atom()
+	Groups          = GroupPath | [GroupPath] | GroupSpec | [GroupSpec] | all
+	GroupPath       = [GroupName]
+	GroupSpec       = GroupName | {GroupName,Properties} | {GroupName,Properties,GroupSpec}
+	GroupName       = atom()
+	GroupNames      = GroupName | [GroupName]
+	Cases           = atom() | [atom()] | all
+        Comment         = string() | ""
+ +
+

The difference between the config terms above, is that with + ConfigDir, ConfigBaseNames is a list of base names, + i.e. without directory paths. ConfigFiles must be full names, + including paths. E.g, these two terms have the same meaning:

+
+	  {config, ["/home/testuser/tests/config/nodeA.cfg",
+	            "/home/testuser/tests/config/nodeB.cfg"]}.
+	  
+	  {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}.
+ +

Any relative paths specified in the test specification, will be + relative to the directory which contains the test specification file, if + ct_run -spec TestSpecFile ... or + ct:run:test([{spec,TestSpecFile},...]) + executes the test. The path will be relative to the top level log directory, if + ct:run:testspec(TestSpec) executes the test.

+
-
-      %% using the old alias term
-      {config, "/home/testuser/tests/config/nodeA.cfg"}.
-      {alias, suite_dir, "/home/testuser/tests/suites"}.
-      {groups, suite_dir, x_SUITE, group1}.
-
-      %% replacing with constants
-      {define, 'TestDir', "/home/testuser/tests"}.
-      {define, 'CfgDir', "'TestDir'/config"}.
-      {define, 'SuiteDir', "'TestDir'/suites"}.
-      {config, 'CfgDir', "nodeA.cfg"}.
-      {groups, 'SuiteDir', x_SUITE, group1}.
- -

Actually, constants could well replace the node term too, but - this still has declarative value, mainly when used in combination - with NodeRefs == all_nodes (see types above).

- -

Here follows a simple test specification example:

-
-      {define, 'Top', "/home/test"}.
-      {define, 'T1', "'Top'/t1"}.
-      {define, 'T2', "'Top'/t2"}.
-      {define, 'T3', "'Top'/t3"}.
-      {define, 'CfgFile', "config.cfg"}.
+	
+ Constants + +

The define term introduces a constant, which is used to + replace the name Constant with Value, wherever it's found in + the test specification. This replacement happens during an initial iteration + through the test specification. Constants may be used anywhere in the test + specification, e.g. in arbitrary lists and tuples, and even in strings + and inside the value part of other constant definitions! A constant can + also be part of a node name, but that is the only place where a constant + can be part of an atom.

+ +

For the sake of readability, the name of the constant must always + begin with an upper case letter, or a $, ?, or _. + This also means that it must always be single quoted (obviously, since + the constant name is actually an atom, not text).

+ +

The main benefit of constants is that they can be used to reduce the size + (and avoid repetition) of long strings, such as file paths. Compare these + terms:

+ +
+	    %% 1a. no constant
+	    {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}.
+	    {suites, "/home/testuser/tests/suites", all}.
+	    
+	    %% 1b. with constant
+	    {define, 'TESTDIR', "/home/testuser/tests"}.
+	    {config, "'TESTDIR'/config", ["nodeA.cfg","nodeB.cfg"]}.
+	    {suites, "'TESTDIR'/suites", all}.
+	    
+	    %% 2a. no constants
+	    {config, [testnode@host1, testnode@host2], "../config", ["nodeA.cfg","nodeB.cfg"]}.
+	    {suites, [testnode@host1, testnode@host2], "../suites", [x_SUITE, y_SUITE]}.
+	    
+	    %% 2b. with constants
+	    {define, 'NODE', testnode}.
+	    {define, 'NODES', ['NODE'@host1, 'NODE'@host2]}.
+	    {config, 'NODES', "../config", ["nodeA.cfg","nodeB.cfg"]}.
+	    {suites, 'NODES', "../suites", [x_SUITE, y_SUITE]}.
+ +

Constants make the test specification term alias, in previous + versions of Common Test, redundant. This term has been deprecated but will + remain supported in upcoming Common Test releases. Replacing alias + terms with define is strongly recommended though! Here's an example + of such a replacement:

+ +
+	      %% using the old alias term
+	      {config, "/home/testuser/tests/config/nodeA.cfg"}.
+	      {alias, suite_dir, "/home/testuser/tests/suites"}.
+	      {groups, suite_dir, x_SUITE, group1}.
+	      
+	      %% replacing with constants
+	      {define, 'TestDir', "/home/testuser/tests"}.
+	      {define, 'CfgDir', "'TestDir'/config"}.
+	      {define, 'SuiteDir', "'TestDir'/suites"}.
+	      {config, 'CfgDir', "nodeA.cfg"}.
+	      {groups, 'SuiteDir', x_SUITE, group1}.
+ +

Actually, constants could well replace the node term too, but + this still has declarative value, mainly when used in combination + with NodeRefs == all_nodes (see types above).

+
- {logdir, "'Top'/logs"}. - - {config, ["'T1'/'CfgFile'", "'T2'/'CfgFile'", "'T3'/'CfgFile'"]}. - - {suites, 'T1', all}. - {skip_suites, 'T1', [t1B_SUITE,t1D_SUITE], "Not implemented"}. - {skip_cases, 'T1', t1A_SUITE, [test3,test4], "Irrelevant"}. - {skip_cases, 'T1', t1C_SUITE, [test1], "Ignore"}. - - {suites, 'T2', [t2B_SUITE,t2C_SUITE]}. - {cases, 'T2', t2A_SUITE, [test4,test1,test7]}. - - {skip_suites, 'T3', all, "Not implemented"}.
+
+ Example + +

Here follows a simple test specification example:

+
+	    {define, 'Top', "/home/test"}.
+	    {define, 'T1', "'Top'/t1"}.
+	    {define, 'T2', "'Top'/t2"}.
+	    {define, 'T3', "'Top'/t3"}.
+	    {define, 'CfgFile', "config.cfg"}.
+	    
+	    {logdir, "'Top'/logs"}.
+	    
+	    {config, ["'T1'/'CfgFile'", "'T2'/'CfgFile'", "'T3'/'CfgFile'"]}.
+	    
+	    {suites, 'T1', all}.
+	    {skip_suites, 'T1', [t1B_SUITE,t1D_SUITE], "Not implemented"}.
+	    {skip_cases, 'T1', t1A_SUITE, [test3,test4], "Irrelevant"}.
+	    {skip_cases, 'T1', t1C_SUITE, [test1], "Ignore"}.
+	    
+	    {suites, 'T2', [t2B_SUITE,t2C_SUITE]}.
+	    {cases, 'T2', t2A_SUITE, [test4,test1,test7]}.
+	    
+	    {skip_suites, 'T3', all, "Not implemented"}.
+ +

The example specifies the following:

+ + The specified logdir directory will be used for storing + the HTML log files (in subdirectories tagged with node name, + date and time). + The variables in the specified test system config files will be + imported for the test. + The first test to run includes all suites for system t1. Excluded from + the test are however the t1B and t1D suites. Also test cases test3 and + test4 in t1A as well as the test1 case in t1C are excluded from + the test. + Secondly, the test for system t2 should run. The included suites are + t2B and t2C. Included are also test cases test4, test1 and test7 in suite + t2A. Note that the test cases will be executed in the specified order. + Lastly, all suites for systems t3 are to be completely skipped and this + should be explicitly noted in the log files. + +
-

The example specifies the following:

- - The specified logdir directory will be used for storing - the HTML log files (in subdirectories tagged with node name, - date and time). - The variables in the specified test system config files will be - imported for the test. - The first test to run includes all suites for system t1. Excluded from - the test are however the t1B and t1D suites. Also test cases test3 and - test4 in t1A as well as the test1 case in t1C are excluded from - the test. - Secondly, the test for system t2 should run. The included suites are - t2B and t2C. Included are also test cases test4, test1 and test7 in suite - t2A. Note that the test cases will be executed in the specified order. - Lastly, all suites for systems t3 are to be completely skipped and this - should be explicitly noted in the log files. - -

With the init term it's possible to specify initialization options - for nodes defined in the test specification. Currently, there are options - to start the node and/or to evaluate any function on the node. - See the Automatic startup of - the test target nodes chapter for details.

-

It is possible for the user to provide a test specification that - includes (for Common Test) unrecognizable terms. If this is desired, - the -allow_user_terms flag should be used when starting tests with - ct_run. This forces Common Test to ignore unrecognizable terms. - Note that in this mode, Common Test is not able to check the specification - for errors as efficiently as if the scanner runs in default mode. - If ct:run_test/1 is used for starting the tests, the relaxed scanner - mode is enabled by means of the tuple: {allow_user_terms,true}

+
+ The init term +

With the init term it's possible to specify initialization options + for nodes defined in the test specification. Currently, there are options + to start the node and/or to evaluate any function on the node. + See the Automatic startup of + the test target nodes chapter for details.

+
+
+ User specific terms +

It is possible for the user to provide a test specification that + includes (for Common Test) unrecognizable terms. If this is desired, + the -allow_user_terms flag should be used when starting tests with + ct_run. This forces Common Test to ignore unrecognizable terms. + Note that in this mode, Common Test is not able to check the specification + for errors as efficiently as if the scanner runs in default mode. + If ct:run_test/1 is used + for starting the tests, the relaxed scanner + mode is enabled by means of the tuple: {allow_user_terms,true}

+
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl index 8eafdff29f..853a1582cf 100644 --- a/lib/common_test/src/ct.erl +++ b/lib/common_test/src/ct.erl @@ -144,8 +144,8 @@ run(TestDirs) -> %%% @spec run_test(Opts) -> Result %%% Opts = [OptTuples] %%% OptTuples = {dir,TestDirs} | {suite,Suites} | {group,Groups} | -%%% {testcase,Cases} | {spec,TestSpecs} | {label,Label} | -%%% {config,CfgFiles} | {userconfig, UserConfig} | +%%% {testcase,Cases} | {spec,TestSpecs} | {join_specs,Bool} | +%%% {label,Label} | {config,CfgFiles} | {userconfig, UserConfig} | %%% {allow_user_terms,Bool} | {logdir,LogDir} | %%% {silent_connections,Conns} | {stylesheet,CSSFile} | %%% {cover,CoverSpecFile} | {cover_stop,Bool} | {step,StepOpts} | diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 index 736b380b0b..baaaf35be4 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 @@ -1,2 +1 @@ -{specs, join, "../specs2/flat_spec2"}. -{specs, join, "flat_spec1"}. \ No newline at end of file +{specs, join, ["../specs2/flat_spec2", "flat_spec1"]}. \ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 index 47c0286052..d652dbd78f 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 @@ -1,5 +1,5 @@ -{specs, join, "../specs1/flat_spec1"}. -{specs, join, "flat_spec2"}. +{specs, join, ["../specs1/flat_spec1"]}. +{specs, join, ["flat_spec2"]}. {config, "../config1/cfg12"}. {suites, "../tests2", t22_SUITE}. -- cgit v1.2.3 From c88b50bdf301b5ce17ce8f4f0d50d838e94292c6 Mon Sep 17 00:00:00 2001 From: Peter Andersson Date: Sun, 27 Jan 2013 00:57:10 +0100 Subject: Add more tests --- lib/common_test/test/ct_testspec_3_SUITE.erl | 142 +++++++++++++++++++++ .../ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl | 3 + .../ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl | 4 + .../ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl | 3 + .../ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl | 4 + 5 files changed, 156 insertions(+) diff --git a/lib/common_test/test/ct_testspec_3_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE.erl index c8774fae7b..8b84b563ab 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE.erl +++ b/lib/common_test/test/ct_testspec_3_SUITE.erl @@ -601,6 +601,148 @@ test_events(incl_join2) -> {?eh,test_done,{'DEF','STOP_TIME'}}, {?eh,stop_logging,[]}]; +test_events(incl_both1) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_both2) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_both_and_join1) -> []; +test_events(incl_both_and_join2) -> []; +test_events(rec_incl_separate1) -> []; +test_events(rec_incl_separate2) -> []; +test_events(rec_incl_join1) -> []; +test_events(rec_incl_join2) -> []; +test_events(rec_incl_separate_join1) -> []; +test_events(rec_incl_separate_join2) -> []; +test_events(rec_incl_join_separate1) -> []; +test_events(rec_incl_join_separate2) -> []; + test_events(_) -> []. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl index 4026273700..bbe79ed3fe 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl @@ -50,6 +50,9 @@ init_per_suite(Config) -> {incl_join1,[cfg21,cfg11]} -> ok; {incl_join1,[cfg12,cfg11,cfg21]} -> ok; {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg11]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg11]} -> ok; _ -> ok end, diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl index 25692d3e00..810298d348 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl @@ -50,6 +50,10 @@ init_per_suite(Config) -> {incl_join1,[cfg21,cfg11]} -> ok; {incl_join1,[cfg12,cfg11,cfg21]} -> ok; {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg21]} -> ok; + {incl_both1,[cfg12,cfg21]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg21]} -> ok; _ -> ok end, Config. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl index 4850151fa5..9348cd8caf 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl @@ -50,6 +50,9 @@ init_per_suite(Config) -> {incl_join1,[cfg21,cfg11]} -> ok; {incl_join1,[cfg12,cfg11,cfg21]} -> ok; {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg11]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg11]} -> ok; _ -> ok end, Config. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl index 78c5d7ca31..a92018ec70 100644 --- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl @@ -52,6 +52,10 @@ init_per_suite(Config) -> {incl_join1,[cfg21,cfg11]} -> ok; {incl_join1,[cfg12,cfg11,cfg21]} -> ok; {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg21]} -> ok; + {incl_both1,[cfg12,cfg21]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg21]} -> ok; _ -> ok end, Config. -- cgit v1.2.3