aboutsummaryrefslogtreecommitdiffstats
path: root/lib/common_test/src/vts.erl
diff options
context:
space:
mode:
authorLukas Larsson <[email protected]>2011-10-04 10:16:09 +0200
committerLukas Larsson <[email protected]>2011-10-04 10:16:09 +0200
commit2d25f96f904bfa33c4bba4f3026f9fd2117ed8ab (patch)
tree26acbda51fa2ac4dd5f809e400d05aa1021eba20 /lib/common_test/src/vts.erl
parent622daf3b96666f8bbabec44a15b26a188a83b95e (diff)
parent1a42cde6d62a8ec0f9e22215d3c3ec655201c78f (diff)
downloadotp-2d25f96f904bfa33c4bba4f3026f9fd2117ed8ab.tar.gz
otp-2d25f96f904bfa33c4bba4f3026f9fd2117ed8ab.tar.bz2
otp-2d25f96f904bfa33c4bba4f3026f9fd2117ed8ab.zip
Merge branch 'dev' into major
* dev: (38 commits) Update documentation Rid ct_telnet of doc build warnings Create temporary fix for problem with parallel test cases Update primary bootstrap Correct "Missing Suites" link Add documentation on timetraps and start flags Add missing tests for timetrap handling and fix remaining errors Solve problem with ct_init/end_per_group being counted as test cases Fix errors in test suites Fix invalid call to undefined function Fix problem with test_server_ctrl creating invalid conf test Improve info in CT framework log Update vsn.mk for common_test and test_server Enhance logging performance Change order of include files Add link to last executed test suite on index page Fix problem with location value when init config func calls help func Fix crash when CTHook init fails Correct error in test suite Fix error with incorrect notification after end_per_testcase craches ... Conflicts: bootstrap/bin/start.boot bootstrap/bin/start_clean.boot bootstrap/lib/compiler/ebin/beam_asm.beam bootstrap/lib/compiler/ebin/beam_disasm.beam bootstrap/lib/compiler/ebin/compile.beam bootstrap/lib/compiler/ebin/sys_pre_expand.beam bootstrap/lib/kernel/ebin/code.beam bootstrap/lib/kernel/ebin/code_server.beam bootstrap/lib/kernel/ebin/hipe_unified_loader.beam bootstrap/lib/kernel/ebin/inet.beam bootstrap/lib/kernel/ebin/inet_config.beam bootstrap/lib/kernel/ebin/inet_dns.beam bootstrap/lib/stdlib/ebin/beam_lib.beam bootstrap/lib/stdlib/ebin/dets.beam bootstrap/lib/stdlib/ebin/erl_compile.beam bootstrap/lib/stdlib/ebin/erl_internal.beam bootstrap/lib/stdlib/ebin/erl_scan.beam bootstrap/lib/stdlib/ebin/erl_tar.beam bootstrap/lib/stdlib/ebin/io_lib_fread.beam bootstrap/lib/stdlib/ebin/otp_internal.beam bootstrap/lib/stdlib/ebin/sofs.beam bootstrap/lib/stdlib/ebin/supervisor.beam bootstrap/lib/stdlib/ebin/zip.beam lib/common_test/src/ct.erl lib/common_test/src/ct_run.erl lib/common_test/test/ct_error_SUITE.erl lib/common_test/test/ct_repeat_1_SUITE.erl lib/common_test/test/ct_skip_SUITE.erl lib/test_server/src/test_server.erl
Diffstat (limited to 'lib/common_test/src/vts.erl')
-rw-r--r--lib/common_test/src/vts.erl133
1 files changed, 83 insertions, 50 deletions
diff --git a/lib/common_test/src/vts.erl b/lib/common_test/src/vts.erl
index f0bf090804..cc8a932887 100644
--- a/lib/common_test/src/vts.erl
+++ b/lib/common_test/src/vts.erl
@@ -20,7 +20,7 @@
-module(vts).
-export([start/0,
- init_data/4,
+ init_data/5,
stop/0,
report/2]).
@@ -32,6 +32,7 @@
menu_frame/2,
welcome_frame/2,
config_frame/2,
+ browse_config_file/2,
add_config_file/2,
remove_config_file/2,
run_frame/2,
@@ -56,7 +57,7 @@
-record(state,{tests=[],config=[],event_handler=[],test_runner,
running=0,reload_results=false,start_dir,current_log_dir,
- total=0,ok=0,fail=0,skip=0,testruns=[]}).
+ logopts=[],total=0,ok=0,fail=0,skip=0,testruns=[]}).
%%%-----------------------------------------------------------------
@@ -65,8 +66,8 @@ start() ->
webtool:start(),
webtool:start_tools([],"app=vts").
-init_data(ConfigFiles,EvHandlers,LogDir,Tests) ->
- call({init_data,ConfigFiles,EvHandlers,LogDir,Tests}).
+init_data(ConfigFiles,EvHandlers,LogDir,LogOpts,Tests) ->
+ call({init_data,ConfigFiles,EvHandlers,LogDir,LogOpts,Tests}).
stop() ->
webtool:stop_tools([],"app=vts"),
@@ -119,6 +120,8 @@ menu_frame(_Env,_Input) ->
call(menu_frame).
config_frame(_Env,_Input) ->
call(config_frame).
+browse_config_file(_Env,Input) ->
+ call({browse_config_file,Input}).
add_config_file(_Env,Input) ->
call({add_config_file,Input}).
remove_config_file(_Env,Input) ->
@@ -160,10 +163,11 @@ init(Parent) ->
loop(State) ->
receive
- {{init_data,Config,EvHandlers,LogDir,Tests},From} ->
+ {{init_data,Config,EvHandlers,LogDir,LogOpts,Tests},From} ->
%% ct:pal("State#state.current_log_dir=~p", [State#state.current_log_dir]),
NewState = State#state{config=Config,event_handler=EvHandlers,
- current_log_dir=LogDir,tests=Tests},
+ current_log_dir=LogDir,
+ logopts=LogOpts,tests=Tests},
ct_install(NewState),
return(From,ok),
loop(NewState);
@@ -182,6 +186,9 @@ loop(State) ->
{config_frame,From} ->
return(From,config_frame1(State)),
loop(State);
+ {{browse_config_file,_Input},From} ->
+ return(From,ok),
+ loop(State);
{{add_config_file,Input},From} ->
{Return,State1} = add_config_file1(Input,State),
ct_install(State1),
@@ -241,10 +248,12 @@ loop(State) ->
return(From,ok);
{'EXIT',Pid,Reason} ->
case State#state.test_runner of
- Pid -> io:format("ERROR: test runner crashed: ~p\n",[Reason]);
- _ -> ignore
- end,
- loop(State);
+ Pid ->
+ io:format("Test run error: ~p\n",[Reason]),
+ loop(State);
+ _ ->
+ loop(State)
+ end;
{{test_info,_Type,_Data},From} ->
return(From,ok),
loop(State)
@@ -270,10 +279,11 @@ return({To,Ref},Result) ->
To ! {Ref, Result}.
-run_test1(State=#state{tests=Tests,current_log_dir=LogDir}) ->
+run_test1(State=#state{tests=Tests,current_log_dir=LogDir,
+ logopts=LogOpts}) ->
Self=self(),
RunTest = fun() ->
- case ct_run:do_run(Tests,[],LogDir) of
+ case ct_run:do_run(Tests,[],LogDir,LogOpts) of
{error,_Reason} ->
aborted();
_ ->
@@ -282,17 +292,18 @@ run_test1(State=#state{tests=Tests,current_log_dir=LogDir}) ->
unlink(Self)
end,
Pid = spawn_link(RunTest),
- Total =
+ {Total,Tests1} =
receive
{{test_info,start_info,{_,_,Cases}},From} ->
return(From,ok),
- Cases;
+ {Cases,Tests};
EXIT = {'EXIT',_,_} ->
- self() ! EXIT
+ self() ! EXIT,
+ {0,[]}
after 30000 ->
- 0
+ {0,[]}
end,
- State#state{test_runner=Pid,running=length(Tests),
+ State#state{test_runner=Pid,running=length(Tests1),
total=Total,ok=0,fail=0,skip=0,testruns=[]}.
@@ -356,22 +367,32 @@ config_frame1(State) ->
config_body(State) ->
Entry = [input("TYPE=file NAME=browse SIZE=40"),
input("TYPE=hidden NAME=file")],
+ BrowseForm =
+ form(
+ "NAME=read_file_form METHOD=post ACTION=\"./browse_config_file\"",
+ table(
+ "BORDER=0",
+ [tr(td("1. Locate config file")),
+ tr(td(Entry))])),
AddForm =
form(
- "NAME=read_file_form METHOD=post ACTION=\"./add_config_file\"",
+ "NAME=add_file_form METHOD=post ACTION=\"./add_config_file\"",
table(
"BORDER=0",
- [tr(
- [td(Entry),
+ [tr(td("2. Paste full config file name here")),
+ tr(
+ [td(input("TYPE=text NAME=file SIZE=40")),
td("ALIGN=center",
input("TYPE=submit onClick=\"file.value=browse.value;\""
" VALUE=\"Add\""))])])),
+
{Text,RemoveForm} =
case State#state.config of
[] ->
- T = "To be able to run any tests, one or more configuration "
- "files must be added. Enter the name of the configuration "
- "file below and click the \"Add\" button.",
+ T = "Before running the tests, one or more configuration "
+ "files may be added. Locate the config file, copy its "
+ "full name, paste this into the text field below, then "
+ "click the \"Add\" button.",
R = "",
{T,R};
Files ->
@@ -394,20 +415,24 @@ config_body(State) ->
input("TYPE=submit VALUE=\"Remove\"")))])),
{T,R}
end,
-
+
[h1("ALIGN=center","Config"),
table(
- "WIDTH=600 ALIGN=center CELLPADDING=5",
+ "WIDTH=450 ALIGN=center CELLPADDING=5",
[tr(td(["BGCOLOR=",?INFO_BG_COLOR],Text)),
- tr(td("ALIGN=center",AddForm)),
- tr(td("ALIGN=center",RemoveForm))])].
-
+ tr(td("")),
+ tr(td("")),
+ tr(td("ALIGN=left",BrowseForm)),
+ tr(td("ALIGN=left",AddForm)),
+ tr(td("ALIGN=left",RemoveForm))])].
add_config_file1(Input,State) ->
State1 =
case get_input_data(Input,"file") of
- "" -> State;
- File -> State#state{config=[File|State#state.config]}
+ "" ->
+ State;
+ File ->
+ State#state{config=[File|State#state.config]}
end,
Return = config_frame1(State1),
{Return,State1}.
@@ -427,10 +452,17 @@ run_body(#state{running=Running}) when Running>0 ->
[h1("ALIGN=center","Run Test"),
p(["Test are ongoing: ",href("./result_frameset","Results")])];
run_body(State) ->
- ConfigList = ul([li(File) || File <- State#state.config]),
+ ConfigList =
+ case State#state.config of
+ [] ->
+ ul(["none"]);
+ CfgFiles ->
+ ul([li(File) || File <- CfgFiles])
+ end,
ConfigFiles = [h3("Config Files"),
ConfigList],
-
+ {ok,CWD} = file:get_cwd(),
+ CurrWD = [h3("Current Working Directory"), ul(CWD)],
AddDirForm =
form(
"NAME=add_dir_form METHOD=post ACTION=\"./add_test_dir\"",
@@ -442,7 +474,6 @@ run_body(State) ->
td("ALIGN=center",
input("TYPE=submit onClick=\"dir.value=browse.value;\""
" VALUE=\"Add Test Dir\""))])])),
-
{LoadedTestsTable,Submit} =
case create_testdir_entries(State#state.tests,1) of
[] -> {"",""};
@@ -454,22 +485,20 @@ run_body(State) ->
{table("CELLPADDING=5",[Heading,TestDirs]),
submit_button()}
end,
-
- %% It should be ok to have no config-file!
Body =
- %% case State#state.config of %% [] -> %% p("ALIGN=center",
- %% href("./config_frame","Please select one or
- %% more config files")); %% _ ->
table(
- "WIDTH=100%",
- [tr(td(ConfigFiles)),
+ "WIDTH=450 ALIGN=center",
+ [tr(td("")),
+ tr(td("")),
+ tr(td(ConfigFiles)),
+ tr(td("")),
+ tr(td(CurrWD)),
tr(td("")),
tr(td(AddDirForm)),
tr(td("")),
tr(td(LoadedTestsTable)),
- tr(td(Submit))]),
- %% end,
-
+ tr(td(Submit))
+ ]),
[h1("ALIGN=center","Run Test"), Body].
create_testdir_entries([{Dir,Suite,Case}|Tests],N) ->
@@ -556,18 +585,17 @@ options([Element|Elements],Selected,N,Func) ->
options([],_Selected,_N,_Func) ->
[].
-add_test_dir1(Input,State) ->
+add_test_dir1(Input, State) ->
State1 =
case get_input_data(Input,"dir") of
"" -> State;
Dir0 ->
Dir = case ct_util:is_test_dir(Dir0) of
- true ->
- Dir0;
- false -> filename:join(Dir0,"test")
+ true -> Dir0;
+ false -> ct_util:get_testdir(Dir0, all)
end,
case filelib:is_dir(Dir) of
- true ->
+ true ->
Test = ct_run:tests(Dir),
State#state{tests=State#state.tests++Test};
false ->
@@ -577,8 +605,6 @@ add_test_dir1(Input,State) ->
Return = run_frame1(State1),
{Return,State1}.
-
-
remove_test_dir1(Input,State) ->
N = list_to_integer(get_input_data(Input,"dir")),
State1 = State#state{tests=delete_test(N,State#state.tests)},
@@ -641,6 +667,9 @@ result_frameset2(State) ->
"./redirect_to_result_log_frame";
{_Dir,0} ->
filename:join(["/log_dir","index.html"]);
+ {_Dir,_} when State#state.testruns == [] ->
+ %% crash before first test
+ "./no_result_log_frame";
{_Dir,_} ->
{_,CurrentLog} = hd(State#state.testruns),
CurrentLog
@@ -749,6 +778,8 @@ report1(tc_done,{_Suite,_Case,{skipped,_Reason}},State) ->
State#state{skip=State#state.skip+1};
report1(tc_user_skip,{_Suite,_Case,_Reason},State) ->
State#state{skip=State#state.skip+1};
+report1(tc_auto_skip,{_Suite,_Case,_Reason},State) ->
+ State#state{skip=State#state.skip+1};
report1(loginfo,_,State) ->
State.
@@ -850,6 +881,8 @@ h2(Text) ->
["<H2>",Text,"</H2>\n"].
h3(Text) ->
["<H3>",Text,"</H3>\n"].
+%%h4(Text) ->
+%% ["<H4>",Text,"</H4>\n"].
font(Args,Text) ->
["<FONT ",Args,">\n",Text,"\n</FONT>\n"].
p(Text) ->