diff options
Diffstat (limited to 'lib/common_test/test')
11 files changed, 913 insertions, 74 deletions
diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile index bd746f87a7..94569fa87f 100644 --- a/lib/common_test/test/Makefile +++ b/lib/common_test/test/Makefile @@ -38,6 +38,7 @@ MODULES= \ ct_groups_spec_SUITE \ ct_sequence_1_SUITE \ ct_repeat_1_SUITE \ + ct_repeat_testrun_SUITE \ ct_testspec_1_SUITE \ ct_testspec_2_SUITE \ ct_testspec_3_SUITE \ @@ -58,7 +59,8 @@ MODULES= \ ct_group_leader_SUITE \ ct_cover_SUITE \ ct_groups_search_SUITE \ - ct_surefire_SUITE + ct_surefire_SUITE \ + ct_telnet_SUITE ERL_FILES= $(MODULES:%=%.erl) diff --git a/lib/common_test/test/ct_cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE.erl index cb49dc423f..ec2680f664 100644 --- a/lib/common_test/test/ct_cover_SUITE.erl +++ b/lib/common_test/test/ct_cover_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -59,10 +59,8 @@ init_per_testcase(TestCase, Config) -> ct_test_support:init_per_testcase(TestCase, Config). end_per_testcase(TestCase, Config) -> - Node = fullname(existing_node), - case lists:member(Node,nodes()) of - true -> rpc:call(Node,erlang,halt,[]); - false -> ok + try apply(?MODULE,TestCase,[cleanup,Config]) + catch error:undef -> ok end, ct_test_support:end_per_testcase(TestCase, Config). @@ -125,33 +123,35 @@ slave_start_slave(Config) -> %% spec file. %% Check that cover is collected from test node and slave node. cover_node_option(Config) -> - {ok, HostStr}=inet:gethostname(), - Host = list_to_atom(HostStr), DataDir = ?config(data_dir,Config), - {ok,Node} = ct_slave:start(Host,existing_node, - [{erl_flags,"-pa " ++ DataDir}]), + {ok,Node} = start_slave(existing_node_1, "-pa " ++ DataDir), false = check_cover(Node), CoverSpec = default_cover_file_content() ++ [{nodes,[Node]}], CoverFile = create_cover_file(cover_node_option,CoverSpec,Config), {ok,Events} = run_test(cover_node_option,cover_node_option, [{cover,CoverFile}],Config), check_calls(Events,2), - {ok,Node} = ct_slave:stop(existing_node), + {ok,Node} = ct_slave:stop(existing_node_1), + ok. + +cover_node_option(cleanup,_Config) -> + _ = ct_slave:stop(existing_node_1), ok. %% Test ct_cover:add_nodes/1 and ct_cover:remove_nodes/1 %% Check that cover is collected from added node ct_cover_add_remove_nodes(Config) -> - {ok, HostStr}=inet:gethostname(), - Host = list_to_atom(HostStr), DataDir = ?config(data_dir,Config), - {ok,Node} = ct_slave:start(Host,existing_node, - [{erl_flags,"-pa " ++ DataDir}]), + {ok,Node} = start_slave(existing_node_2, "-pa " ++ DataDir), false = check_cover(Node), {ok,Events} = run_test(ct_cover_add_remove_nodes,ct_cover_add_remove_nodes, [],Config), check_calls(Events,2), - {ok,Node} = ct_slave:stop(existing_node), + {ok,Node} = ct_slave:stop(existing_node_2), + ok. + +ct_cover_add_remove_nodes(cleanup,_Config) -> + _ = ct_slave:stop(existing_node_2), ok. %% Test that the test suite itself can be cover compiled and that @@ -310,3 +310,12 @@ create_cover_file(Filename,Terms,Config) -> end,Terms), ok = file:close(Fd), File. + +start_slave(Name,Args) -> + {ok, HostStr}=inet:gethostname(), + Host = list_to_atom(HostStr), + ct_slave:start(Host,Name, + [{erl_flags,Args}, + {boot_timeout,10}, % extending some timers for slow test hosts + {init_timeout,10}, + {startup_timeout,10}]). diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl index fdc3323f0a..83d368c53d 100644 --- a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl +++ b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -52,11 +52,10 @@ init_per_testcase(_Case, Config) -> [{watchdog, Dog}|Config]. end_per_testcase(Case, Config) -> - %% try apply(?MODULE,Case,[cleanup,Config]) - %% catch error:undef -> ok - %% end, + try apply(?MODULE,Case,[cleanup,Config]) + catch error:undef -> ok + end, - kill_slaves(Case,nodes()), Dog=?config(watchdog, Config), test_server:timetrap_cancel(Dog), ok. @@ -67,47 +66,52 @@ break(_Config) -> test_server:break(""), ok. -default(Config) -> +default(_Config) -> cover_compiled = code:which(cover_test_mod), cover_test_mod:foo(), ok. -slave(Config) -> +slave(_Config) -> cover_compiled = code:which(cover_test_mod), cover_test_mod:foo(), N1 = nodename(slave,1), - {ok,Node} = ct_slave:start(N1), + {ok,Node} = start_slave(N1), cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), rpc:call(Node,cover_test_mod,foo,[]), {ok,Node} = ct_slave:stop(N1), ok. +slave(cleanup,_Config) -> + kill_slaves([nodename(slave,1)]). -slave_start_slave(Config) -> +slave_start_slave(_Config) -> cover_compiled = code:which(cover_test_mod), cover_test_mod:foo(), N1 = nodename(slave_start_slave,1), N2 = nodename(slave_start_slave,2), - {ok,Node} = ct_slave:start(N1), + {ok,Node} = start_slave(N1), cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), rpc:call(Node,cover_test_mod,foo,[]), - {ok,Node2} = rpc:call(Node,ct_slave,start,[N2]), + {ok,Node2} = start_slave(Node,N2), % start slave N2 from node Node rpc:call(Node2,cover_test_mod,foo,[]), {ok,Node2} = rpc:call(Node,ct_slave,stop,[N2]), {ok,Node} = ct_slave:stop(N1), ok. +slave_start_slave(cleanup,_Config) -> + kill_slaves([nodename(slave_start_slave,1), + nodename(slave_start_slave,2)]). -cover_node_option(Config) -> +cover_node_option(_Config) -> cover_compiled = code:which(cover_test_mod), cover_test_mod:foo(), - Node = fullname(existing_node), + Node = fullname(existing_node_1), cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), rpc:call(Node,cover_test_mod,foo,[]), ok. -ct_cover_add_remove_nodes(Config) -> +ct_cover_add_remove_nodes(_Config) -> cover_compiled = code:which(cover_test_mod), cover_test_mod:foo(), - Node = fullname(existing_node), + Node = fullname(existing_node_2), Beam = rpc:call(Node,code,which,[cover_test_mod]), false = (Beam == cover_compiled), @@ -143,14 +147,20 @@ fullname(Name) -> {ok,Host} = inet:gethostname(), list_to_atom(atom_to_list(Name) ++ "@" ++ Host). -kill_slaves(Case, [Node|Nodes]) -> - Prefix = nodeprefix(Case), - case lists:prefix(Prefix,atom_to_list(Node)) of - true -> - rpc:call(Node,erlang,halt,[]); - _ -> - ok - end, - kill_slaves(Case,Nodes); -kill_slaves(_,[]) -> +kill_slaves([Name|Names]) -> + _ = rpc:call(fullname(Name),erlang,halt,[]), + kill_slaves(Names); +kill_slaves([]) -> ok. + +start_slave(Name) -> + start_slave(node(),Name). + +start_slave(FromNode,Name) -> + {ok, HostStr}=inet:gethostname(), + Host = list_to_atom(HostStr), + rpc:call(FromNode,ct_slave,start, + [Host,Name, + [{boot_timeout,15}, % extending some timers for slow test hosts + {init_timeout,15}, + {startup_timeout,15}]]). diff --git a/lib/common_test/test/ct_group_leader_SUITE.erl b/lib/common_test/test/ct_group_leader_SUITE.erl index cde3061d6a..6d54a4c004 100644 --- a/lib/common_test/test/ct_group_leader_SUITE.erl +++ b/lib/common_test/test/ct_group_leader_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -176,6 +176,10 @@ events_to_check(_Test) -> {?eh,tc_done,{group_leader_SUITE,cap1,ok}}, {?eh,tc_start,{group_leader_SUITE,cap2}}, {?eh,tc_done,{group_leader_SUITE,cap2,ok}}]}, + {parallel,[{?eh,tc_start,{group_leader_SUITE,unexp1}}, + {?eh,tc_done,{group_leader_SUITE,unexp1,ok}}, + {?eh,tc_start,{group_leader_SUITE,unexp2}}, + {?eh,tc_done,{group_leader_SUITE,unexp2,ok}}]}, {?eh,test_done,{'DEF','STOP_TIME'}}, {?eh,stop_logging,[]} ]. diff --git a/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl b/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl index 3f1844b4ae..804f722081 100644 --- a/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl +++ b/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -112,7 +112,8 @@ groups() -> {seq,[],[s1,s2,s3]}, {seq2,[],[s4,s5]}, {seq_in_par,[parallel],[p10,p11,{group,seq},p12,{group,seq2},p13]}, - {capture_io,[parallel],[cap1,cap2]}]. + {capture_io,[parallel],[cap1,cap2]}, + {unexpected_io,[parallel],[unexp1,unexp2]}]. %%-------------------------------------------------------------------- %% @spec all() -> GroupsAndTestCases | {skip,Reason} @@ -126,7 +127,8 @@ all() -> [tc1,{group,p},{group,p_restart},p3, {group,seq_in_par}, cap1,cap2, - {group,capture_io}]. + {group,capture_io}, + {group,unexpected_io}]. tc1(_C) -> ok. @@ -250,3 +252,36 @@ gen_io(Label, N, Acc) -> S = lists:flatten(io_lib:format("~s: ~p\n", [Label,N])), io:put_chars(S), gen_io(Label, N-1, [S|Acc]). + +%% Test that unexpected I/O is sent to test_server's unexpeced_io log. +%% To trigger this, run two test cases in parallel and send a printout +%% (via ct logging functions) from an external process which has a +%% different group leader than the test cases. +unexp1(Config) -> + timer:sleep(1000), + gen_unexp_io(), + timer:sleep(1000), + check_unexp_io(Config), + ok. + +unexp2(_) -> + timer:sleep(2000), + ok. + +gen_unexp_io() -> + spawn(fun() -> + group_leader(whereis(user),self()), + ct:log("-x- Unexpected io ct:log -x-",[]), + ct:pal("-x- Unexpected io ct:pal -x-",[]), + ok + end). + +check_unexp_io(Config) -> + SuiteLog = ?config(tc_logfile,Config), + Dir = filename:dirname(SuiteLog), + UnexpLog = filename:join(Dir,"unexpected_io.log.html"), + {ok,SuiteBin} = file:read_file(SuiteLog), + nomatch = re:run(SuiteBin,"-x- Unexpected io ",[global,{capture,none}]), + {ok,UnexpBin} = file:read_file(UnexpLog), + {match,[_,_]} = re:run(UnexpBin,"-x- Unexpected io ",[global]), + ok. diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE.erl new file mode 100644 index 0000000000..35d67a10f2 --- /dev/null +++ b/lib/common_test/test/ct_repeat_testrun_SUITE.erl @@ -0,0 +1,378 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_repeat_test_SUITE +%%% +%%% Description: +%%% Test different options for repeating test runs: +%%% -repeat N +%%% -duration T [-force_stop [skip_rest]] +%%% -until T [-force_stop [skip_rest]] +%%% +%%%------------------------------------------------------------------- +-module(ct_repeat_testrun_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). +-define(skip_reason, "Repeated test stopped by force_stop option"). +-define(skipped, {skipped, ?skip_reason}). + + +%% Timers used in this test. +%% Each test suite consists of +%% +%% [tc1,tc2,{group,g,[tc1,tc2]},tc2] +%% +%% In r1_SUITE tc1 has a sleep of 10 sec - all other test cases just +%% return ok. +%% +%% => One complete test run of two suites r1_SUITE + r2_SUITE is at +%% least 20 seconds (10 sec for each r1_SUITE:tc1) +%% +-define(t1,30). % time shall expire during second run of r1_SUITE +-define(t2,9). % time shall expire during first run of tc1 +-define(t3,19). % time shall expire during second run of tc1 + + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config0) -> + Config = ct_test_support:init_per_suite(Config0), + DataDir = ?config(data_dir, Config), + Suite1 = filename:join([DataDir,"a_test","r1_SUITE"]), + Suite2 = filename:join([DataDir,"b_test","r2_SUITE"]), + Opts0 = ct_test_support:get_opts(Config), + Opts1 = Opts0 ++ [{suite,Suite1},{testcase,tc2},{label,timing1}], + Opts2 = Opts0 ++ [{suite,Suite2},{testcase,tc2},{label,timing2}], + + %% Make sure both suites are compiled + {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts1],Config), + {1,0,{0,0}} = ct_test_support:run(ct,run_test,[Opts2],Config), + + %% Time the shortest testcase to use for offset + {T0,{1,0,{0,0}}} = timer:tc(ct_test_support,run,[ct,run_test,[Opts1],Config]), + + %% -2 is to ensure we hit inside the target test case and not after +% T = round(T0/1000000)-2, + T=0, + [{offset,T}|Config]. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + repeat_n, + duration, + duration_force_stop, + duration_force_stop_skip_rest, + duration_force_stop_skip_rest_group, + until, + until_force_stop, + until_force_stop_skip_rest, + until_force_stop_skip_rest_group + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% +repeat_n(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,repeat_n}, + {repeat,2}], + Config), + ok = execute(repeat_n, Opts, ERPid, Config). + +duration(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,duration}, + {duration,duration_str(?t1,2,Config)}], + Config), + ok = execute(duration, Opts, ERPid, Config). + +duration_force_stop(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,duration_force_stop}, + {duration,duration_str(?t1,2,Config)}, + {force_stop,true}], + Config), + ok = execute(duration_force_stop, Opts, ERPid, Config). + +duration_force_stop_skip_rest(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,duration_force_stop_skip_rest}, + {duration,duration_str(?t2,1,Config)}, + {force_stop,skip_rest}], + Config), + ok = execute(duration_force_stop_skip_rest, Opts, ERPid, Config). + +duration_force_stop_skip_rest_group(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,duration_force_stop_skip_rest_group}, + {duration,duration_str(?t3,1,Config)}, + {force_stop,skip_rest}], + Config), + ok = execute(duration_force_stop_skip_rest_group, Opts, ERPid, Config). + +until(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,until}], + Config), + ExecuteFun = + fun() -> + [_,_] = ct_test_support:run_ct_run_test( + Opts++[{until,until_str(?t1,2,Config)}],Config), + 0 = ct_test_support:run_ct_script_start( + Opts++[{until,until_str(?t1,2,Config)}],Config) + end, + ok = execute(ExecuteFun, until, Opts, ERPid, Config). + +until_force_stop(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,until_force_stop}, + {force_stop,true}], + Config), + ExecuteFun = + fun() -> + [_,_] = ct_test_support:run_ct_run_test( + Opts++[{until,until_str(?t1,2,Config)}],Config), + 0 = ct_test_support:run_ct_script_start( + Opts++[{until,until_str(?t1,2,Config)}],Config) + end, + ok = execute(ExecuteFun, until_force_stop, Opts, ERPid, Config). + +until_force_stop_skip_rest(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,until_force_stop_skip_rest}, + {force_stop,skip_rest}], + Config), + ExecuteFun = + fun() -> + [_] = ct_test_support:run_ct_run_test( + Opts++[{until,until_str(?t2,1,Config)}],Config), + 1 = ct_test_support:run_ct_script_start( + Opts++[{until,until_str(?t2,1,Config)}],Config) + end, + ok = execute(ExecuteFun, until_force_stop_skip_rest, + Opts, ERPid, Config). + +until_force_stop_skip_rest_group(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Dirs = filelib:wildcard(filename:join(DataDir,"*")), + {Opts,ERPid} = setup([{dir,Dirs}, + {label,until_force_stop_skip_rest_group}, + {force_stop,skip_rest}], + Config), + ExecuteFun = + fun() -> + [_] = ct_test_support:run_ct_run_test( + Opts++[{until,until_str(?t3,1,Config)}],Config), + 0 = ct_test_support:run_ct_script_start( + Opts++[{until,until_str(?t3,1,Config)}],Config) + end, + ok = execute(ExecuteFun, + until_force_stop_skip_rest_group, + Opts, ERPid, Config). + + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +%% Execute test, first with ct:run_test, then with ct:script_start +execute(Name, Opts, ERPid, Config) -> + ExecuteFun = fun() -> ok = ct_test_support:run(Opts, Config) end, + execute(ExecuteFun, Name, Opts, ERPid, Config). + +execute(ExecuteFun, Name, Opts, ERPid, Config) -> + ExecuteFun(), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(Name, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(Name), + ct_test_support:verify_events(TestEvents, Events, Config). + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +%% N is the expected number of repeats +until_str(Secs0,N,Config) -> + Offset = ?config(offset,Config), + Secs = Secs0 + N*Offset, + Now = calendar:datetime_to_gregorian_seconds(calendar:local_time()), + {{Y,Mo,D},{H,M,S}} = calendar:gregorian_seconds_to_datetime(Now+Secs), + lists:flatten(io_lib:format("~2..0w~2..0w~2..0w~2..0w~2..0w~2..0w", + [Y rem 100, Mo, D, H, M, S])). + +%% N is the expected number of repeats +duration_str(Secs0,N,Config) -> + Offset = ?config(offset,Config), + Secs = Secs0 + N*Offset, + "0000" ++ lists:flatten(io_lib:format("~2..0w",[Secs])). + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- +%% 2 tests (ct:run_test + script_start) is default +events_to_check(C) when C==repeat_n; C==duration; C==until -> + dupl(4, start_logging() ++ all_succ() ++ stop_logging()); +events_to_check(C) when C==duration_force_stop; C==until_force_stop -> + dupl(2, start_logging() ++ + all_succ() ++ + stop_logging() ++ + start_logging() ++ + all_succ(r1_SUITE) ++ + stop_logging()); +events_to_check(C) when C==duration_force_stop_skip_rest; + C==until_force_stop_skip_rest -> + dupl(2, start_logging() ++ skip_first_tc1(r1_SUITE) ++ stop_logging()); +events_to_check(C) when C==duration_force_stop_skip_rest_group; + C==until_force_stop_skip_rest_group -> + dupl(2, start_logging() ++ skip_tc1_in_group(r1_SUITE) ++ stop_logging()). + +dupl(N,List) -> + lists:flatten(lists:duplicate(N,List)). + +start_logging() -> + [{?eh,start_logging,{'DEF','RUNDIR'}}]. +stop_logging() -> + [{?eh,stop_logging,[]}]. + + +all_succ() -> + all_succ(r1_SUITE) ++ all_succ(r2_SUITE). + +all_succ(Suite) -> + [{?eh,tc_start,{Suite,init_per_suite}}, + {?eh,tc_done,{Suite,init_per_suite,ok}}, + {?eh,tc_start,{Suite,tc1}}, + {?eh,tc_done,{Suite,tc1,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,tc2}}, + {?eh,tc_done,{Suite,tc2,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + [{?eh,tc_start,{Suite,{init_per_group,g,[]}}}, + {?eh,tc_done,{Suite,{init_per_group,g,[]},ok}}, + {?eh,tc_start,{Suite,tc1}}, + {?eh,tc_done,{Suite,tc1,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,tc2}}, + {?eh,tc_done,{Suite,tc2,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,{end_per_group,g,[]}}}, + {?eh,tc_done,{Suite,{end_per_group,g,[]},ok}}], + {?eh,tc_start,{Suite,tc2}}, + {?eh,tc_done,{Suite,tc2,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,end_per_suite}}, + {?eh,tc_done,{Suite,end_per_suite,ok}}]. + +skip_first_tc1(Suite) -> + [{?eh,tc_start,{Suite,init_per_suite}}, + {?eh,tc_done,{Suite,init_per_suite,ok}}, + {?eh,tc_start,{Suite,tc1}}, + {?eh,tc_done,{Suite,tc1,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_done,{Suite,tc2,?skipped}}, + {?eh,test_stats,{'_',0,{1,0}}}, + {?eh,tc_done,{Suite,{init_per_group,g,[]},?skipped}}, + {?eh,tc_auto_skip,{Suite,tc1,?skip_reason}}, + {?eh,test_stats,{'_',0,{1,1}}}, + {?eh,tc_auto_skip,{Suite,tc2,?skip_reason}}, + {?eh,test_stats,{'_',0,{1,2}}}, + {?eh,tc_auto_skip,{Suite,end_per_group,?skip_reason}}, + {?eh,tc_done,{Suite,tc2,?skipped}}, + {?eh,test_stats,{'_',0,{2,2}}}, + {?eh,tc_start,{Suite,end_per_suite}}, + {?eh,tc_done,{Suite,end_per_suite,ok}}]. + + +skip_tc1_in_group(Suite) -> + [{?eh,tc_start,{Suite,init_per_suite}}, + {?eh,tc_done,{Suite,init_per_suite,ok}}, + {?eh,tc_start,{Suite,tc1}}, + {?eh,tc_done,{Suite,tc1,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,tc2}}, + {?eh,tc_done,{Suite,tc2,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + [{?eh,tc_start,{Suite,{init_per_group,g,[]}}}, + {?eh,tc_done,{Suite,{init_per_group,g,[]},ok}}, + {?eh,tc_start,{Suite,tc1}}, + {?eh,tc_done,{Suite,tc1,ok}}, + {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_done,{Suite,tc2,?skipped}}, + {?eh,test_stats,{'_',0,{1,0}}}, + {?eh,tc_start,{Suite,{end_per_group,g,[]}}}, + {?eh,tc_done,{Suite,{end_per_group,g,[]},ok}}], + {?eh,tc_done,{Suite,tc2,?skipped}}, + {?eh,test_stats,{'_',0,{2,0}}}, + {?eh,tc_start,{Suite,end_per_suite}}, + {?eh,tc_done,{Suite,end_per_suite,ok}}]. diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl new file mode 100644 index 0000000000..3fd5943691 --- /dev/null +++ b/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl @@ -0,0 +1,75 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%---------------------------------------------------------------------- +%% File: r1_SUITE.erl +%% +%% Description: +%% +%% +%% @author Support +%% @doc +%% @end +%%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- +-module(r1_SUITE). +-include_lib("common_test/include/ct.hrl"). + +-compile(export_all). + +%% Default timetrap timeout (set in init_per_testcase). +-define(default_timeout, ?t:seconds(30)). + +all() -> + testcases() ++ [{group,g}, tc2]. + +groups() -> + [{g,testcases()}]. + +testcases() -> + [tc1,tc2]. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + Config. + +init_per_group(_, Config) -> + Config. + +end_per_group(_Group, Config) -> + Config. + +init_per_testcase(_Case, Config) -> + Dog = test_server:timetrap(?default_timeout), + [{watchdog, Dog}|Config]. + +end_per_testcase(_Case, Config) -> + Dog=?config(watchdog, Config), + test_server:timetrap_cancel(Dog), + ok. + +%%%----------------------------------------------------------------- +%%% Test cases +tc1(_Config) -> + timer:sleep(10000), + ok. + +tc2(_Config) -> + ok. diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl new file mode 100644 index 0000000000..dc9abc2863 --- /dev/null +++ b/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl @@ -0,0 +1,75 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%---------------------------------------------------------------------- +%% File: r2_SUITE.erl +%% +%% Description: +%% +%% +%% @author Support +%% @doc +%% @end +%%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- +-module(r2_SUITE). +-include_lib("common_test/include/ct.hrl"). + +-compile(export_all). + +%% Default timetrap timeout (set in init_per_testcase). +-define(default_timeout, ?t:seconds(30)). + +all() -> + testcases() ++ [{group,g}, tc2]. + +groups() -> + [{g,testcases()}]. + +testcases() -> + [tc1,tc2]. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + Config. + +init_per_group(_, Config) -> + Config. + +end_per_group(_Group, Config) -> + Config. + +init_per_testcase(_Case, Config) -> + Dog = test_server:timetrap(?default_timeout), + [{watchdog, Dog}|Config]. + +end_per_testcase(_Case, Config) -> + Dog=?config(watchdog, Config), + test_server:timetrap_cancel(Dog), + ok. + +%%%----------------------------------------------------------------- +%%% Test cases +tc1(_Config) -> + %% timer:sleep(3000), + ok. + +tc2(_Config) -> + ok. diff --git a/lib/common_test/test/ct_telnet_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE.erl new file mode 100644 index 0000000000..b4f24baa0c --- /dev/null +++ b/lib/common_test/test/ct_telnet_SUITE.erl @@ -0,0 +1,122 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2009-2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_telnet_SUITE +%%% +%%% Description: +%%% Edit your ts.unix.config or ts.win32.config before runnings these tests +%%% Test ct_telnet_SUITE module +%%% +%%%------------------------------------------------------------------- +-module(ct_telnet_SUITE). +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + ct_test_support:init_per_suite(Config). + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + default + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% +default(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "ct_telnet_basic_SUITE"), + Cfg = {unix, ct:get_config(unix)}, + ok = file:write_file(filename:join(DataDir, "telnet.cfg"), io_lib:write(Cfg) ++ "."), + CfgFile = filename:join(DataDir, "telnet.cfg"), + {Opts,ERPid} = setup([{suite,Suite},{label,default}, {config, CfgFile}], Config), + ok = execute(default, Opts, ERPid, Config). + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +execute(Name, Opts, ERPid, Config) -> + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(Name, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(Name,Config), + ct_test_support:verify_events(TestEvents, Events, Config). + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- +events_to_check(default,Config) -> + {module,_} = code:load_abs(filename:join(?config(data_dir,Config), + ct_telnet_basic_SUITE)), + TCs = ct_telnet_basic_SUITE:all(), + code:purge(ct_telnet_basic_SUITE), + code:delete(ct_telnet_basic_SUITE), + + OneTest = + [{?eh,start_logging,{'DEF','RUNDIR'}}] ++ + [{?eh,tc_done,{ct_telnet_basic_SUITE,TC,ok}} || TC <- TCs] ++ + [{?eh,stop_logging,[]}], + + %% 2 tests (ct:run_test + script_start) is default + OneTest ++ OneTest. diff --git a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_basic_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_basic_SUITE.erl new file mode 100644 index 0000000000..914b95f9cf --- /dev/null +++ b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_basic_SUITE.erl @@ -0,0 +1,78 @@ +%% Modify your ts.unix.config or ts.win32.config file before running these tests +-module(ct_telnet_basic_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + + +suite() -> [{require,telnet_temp,{unix,[telnet]}}]. + +all() -> + [start_stop, send_and_get, expect, already_closed, + cmd, sendf, close_wrong_type]. + +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +start_stop(_Config) -> + {ok, Handle} = ct_telnet:open(telnet_temp), + ok = ct_telnet:close(Handle), + ok. +send_and_get(_) -> + {ok, Handle} = ct_telnet:open(telnet_temp), + ok = ct_telnet:send(Handle, "ayt"), + {ok, _Data} = ct_telnet:get_data(Handle), + ok = ct_telnet:close(Handle), + ok. + +expect(_) -> + {ok, Handle} = ct_telnet:open(telnet_temp), + ok = ct_telnet:send(Handle, "echo ayt"), + ok = case ct_telnet:expect(Handle, ["ayt"]) of + {ok, _} -> + ok; + {error, {prompt, _}} -> + ok + end, + ok = ct_telnet:close(Handle), + ok. + +already_closed(_) -> + {ok, Handle} = ct_telnet:open(telnet_temp), + ok = ct_telnet:close(Handle), + {error, already_closed} = ct_telnet:close(Handle), + ok. + +cmd(_) -> + {ok, Handle} = ct_telnet:open(telnet_temp), + {ok, _} = ct_telnet:cmd(Handle, "display"), + {ok, _} = ct_telnet:cmdf(Handle, "~s ~s", ["set", "bsasdel"]), + ok = ct_telnet:close(Handle), + ok. + +sendf(_) -> + {ok, Handle} = ct_telnet:open(telnet_temp), + ok = ct_telnet:sendf(Handle, "~s", ["ayt"]), + ok = ct_telnet:close(Handle), + ok. + +close_wrong_type(_) -> + {error, _} = ct_telnet:close(whatever), + ok. diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl index 5e109e98e9..6bcac12326 100644 --- a/lib/common_test/test/ct_test_support.erl +++ b/lib/common_test/test/ct_test_support.erl @@ -29,7 +29,8 @@ -export([init_per_suite/1, init_per_suite/2, end_per_suite/1, init_per_testcase/2, end_per_testcase/2, write_testspec/2, write_testspec/3, - run/2, run/3, run/4, get_opts/1, wait_for_ct_stop/1]). + run/2, run/3, run/4, run_ct_run_test/2, run_ct_script_start/2, + get_opts/1, wait_for_ct_stop/1]). -export([handle_event/2, start_event_receiver/1, get_events/2, verify_events/3, verify_events/4, reformat/2, log_events/4, @@ -223,15 +224,49 @@ get_opts(Config) -> %%%----------------------------------------------------------------- %%% -run(Opts, Config) when is_list(Opts) -> +run(Opts0, Config) when is_list(Opts0) -> + Opts = + %% read (and override) opts from env variable, the form expected: + %% "[{some_key1,SomeVal2}, {some_key2,SomeVal2}]" + case os:getenv("CT_TEST_OPTS") of + false -> Opts0; + "" -> Opts0; + Terms -> + case erl_scan:string(Terms++".", 0) of + {ok,Tokens,_} -> + case erl_parse:parse_term(Tokens) of + {ok,OROpts} -> + Override = + fun(O={Key,_}, Os) -> + io:format(user, "ADDING START " + "OPTION: ~p~n", [O]), + [O | lists:keydelete(Key, 1, Os)] + end, + lists:foldl(Override, Opts0, OROpts); + _ -> + Opts0 + end; + _ -> + Opts0 + end + end, + + %% use ct interface + CtRunTestResult=run_ct_run_test(Opts,Config), + %% use run_test interface (simulated) + ExitStatus=run_ct_script_start(Opts,Config), + + check_result(CtRunTestResult,ExitStatus,Opts). + +run_ct_run_test(Opts,Config) -> CTNode = proplists:get_value(ct_node, Config), Level = proplists:get_value(trace_level, Config), - %% use ct interface test_server:format(Level, "~n[RUN #1] Calling ct:run_test(~p) on ~p~n", [Opts, CTNode]), + T0 = now(), CtRunTestResult = rpc:call(CTNode, ct, run_test, [Opts]), - test_server:format(Level, "~n[RUN #1] Got return value ~p~n", - [CtRunTestResult]), + test_server:format(Level, "~n[RUN #1] Got return value ~p after ~p ms~n", + [CtRunTestResult,trunc(timer:now_diff(now(), T0)/1000)]), case rpc:call(CTNode, erlang, whereis, [ct_util_server]) of undefined -> ok; @@ -242,7 +277,11 @@ run(Opts, Config) when is_list(Opts) -> timer:sleep(5000), undefined = rpc:call(CTNode, erlang, whereis, [ct_util_server]) end, - %% use run_test interface (simulated) + CtRunTestResult. + +run_ct_script_start(Opts, Config) -> + CTNode = proplists:get_value(ct_node, Config), + Level = proplists:get_value(trace_level, Config), Opts1 = [{halt_with,{?MODULE,ct_test_halt}} | Opts], test_server:format(Level, "Saving start opts on ~p: ~p~n", [CTNode, Opts1]), @@ -250,30 +289,42 @@ run(Opts, Config) when is_list(Opts) -> [common_test, run_test_start_opts, Opts1]), test_server:format(Level, "[RUN #2] Calling ct_run:script_start() on ~p~n", [CTNode]), + T0 = now(), ExitStatus = rpc:call(CTNode, ct_run, script_start, []), - test_server:format(Level, "[RUN #2] Got exit status value ~p~n", - [ExitStatus]), - case {CtRunTestResult,ExitStatus} of - {{_Ok,Failed,{_UserSkipped,_AutoSkipped}},1} when Failed > 0 -> - ok; - {{_Ok,0,{_UserSkipped,AutoSkipped}},ExitStatus} when AutoSkipped > 0 -> - case proplists:get_value(exit_status, Opts1) of - ignore_config when ExitStatus == 1 -> - {error,{wrong_exit_status,ExitStatus}}; - _ -> - ok - end; - {{error,_}=Error,ExitStatus} -> - if ExitStatus /= 2 -> - {error,{wrong_exit_status,ExitStatus}}; - ExitStatus == 2 -> - Error - end; - {{_Ok,0,{_UserSkipped,_AutoSkipped}},0} -> - ok; - Unexpected -> - {error,{unexpected_return_value,Unexpected}} - end. + test_server:format(Level, "[RUN #2] Got exit status value ~p after ~p ms~n", + [ExitStatus,trunc(timer:now_diff(now(), T0)/1000)]), + ExitStatus. + +check_result({_Ok,Failed,{_UserSkipped,_AutoSkipped}},1,_Opts) + when Failed > 0 -> + ok; +check_result({_Ok,0,{_UserSkipped,AutoSkipped}},ExitStatus,Opts) + when AutoSkipped > 0 -> + case proplists:get_value(exit_status, Opts) of + ignore_config when ExitStatus == 1 -> + {error,{wrong_exit_status,ExitStatus}}; + _ -> + ok + end; +check_result({error,_}=Error,2,_Opts) -> + Error; +check_result({error,_},ExitStatus,_Opts) -> + {error,{wrong_exit_status,ExitStatus}}; +check_result({_Ok,0,{_UserSkipped,_AutoSkipped}},0,_Opts) -> + ok; +check_result(CtRunTestResult,ExitStatus,Opts) + when is_list(CtRunTestResult) -> % repeated testruns + try check_result(sum_testruns(CtRunTestResult,0,0,0,0),ExitStatus,Opts) + catch _:_ -> + {error,{unexpected_return_value,{CtRunTestResult,ExitStatus}}} + end; +check_result(CtRunTestResult,ExitStatus,_Opts) -> + {error,{unexpected_return_value,{CtRunTestResult,ExitStatus}}}. + +sum_testruns([{O,F,{US,AS}}|T],Ok,Failed,UserSkipped,AutoSkipped) -> + sum_testruns(T,Ok+O,Failed+F,UserSkipped+US,AutoSkipped+AS); +sum_testruns([],Ok,Failed,UserSkipped,AutoSkipped) -> + {Ok,Failed,{UserSkipped,AutoSkipped}}. run(M, F, A, Config) -> run({M,F,A}, [], Config). |