diff options
Diffstat (limited to 'lib/common_test')
105 files changed, 7645 insertions, 1411 deletions
diff --git a/lib/common_test/doc/src/common_test_app.xml b/lib/common_test/doc/src/common_test_app.xml index b6d4a633cb..151159ad69 100644 --- a/lib/common_test/doc/src/common_test_app.xml +++ b/lib/common_test/doc/src/common_test_app.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>2003</year><year>2012</year> + <year>2003</year><year>2013</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -170,7 +170,9 @@ <v> UserData = term()</v> <v> Conns = [atom()]</v> <v> CSSFile = string()</v> - <v> CTHs = [CTHModule | {CTHModule, CTHInitArgs} | {CTHModule, CTHInitArgs, CTHPriority}]</v> + <v> CTHs = [CTHModule |</v> + <v> {CTHModule, CTHInitArgs} |</v> + <v> {CTHModule, CTHInitArgs, CTHPriority}]</v> <v> CTHModule = atom()</v> <v> CTHInitArgs = term()</v> </type> @@ -297,8 +299,9 @@ <v> UserData = term()</v> <v> Conns = [atom()]</v> <v> CSSFile = string()</v> - <v> CTHs = [CTHModule | {CTHModule, CTHInitArgs} | - {CTHModule, CTHInitArgs, CTHPriority}]</v> + <v> CTHs = [CTHModule |</v> + <v> {CTHModule, CTHInitArgs} |</v> + <v> {CTHModule, CTHInitArgs, CTHPriority}]</v> <v> CTHModule = atom()</v> <v> CTHInitArgs = term()</v> </type> diff --git a/lib/common_test/doc/src/cover_chapter.xml b/lib/common_test/doc/src/cover_chapter.xml index 803a71de07..4fa92d5583 100644 --- a/lib/common_test/doc/src/cover_chapter.xml +++ b/lib/common_test/doc/src/cover_chapter.xml @@ -108,6 +108,33 @@ specifications</seealso>).</p> </section> + <marker id="cover_stop"></marker> + <section> + <title>Stopping the cover tool when tests are completed</title> + <p>By default the Cover tool is automatically stopped when the + tests are completed. This causes the original (non cover + compiled) modules to be loaded back in to the test node. If a + process at this point is still running old code of any of the + modules that are cover compiled, meaning that it has not done + any fully qualified function call after the cover compilation, + the process will now be killed. To avoid this it is possible to + set the value of the <c>cover_stop</c> option to + <c>false</c>. This means that the modules will stay cover + compiled, and it is therefore only recommended if the erlang + node(s) under test is terminated after the test is completed + or if cover can be manually stopped.</p> + + <p>The option can be set by using the <c>-cover_stop</c> flag with + <c>ct_run</c>, by adding <c>{cover_stop,true|false}</c> to the + Opts argument to <c><seealso + marker="ct#run_test-1">ct:run_test/1</seealso></c>, or by adding + a <c>cover_stop</c> term in your test specification (see chapter + about <seealso + marker="run_test_chapter#test_specifications">test + specifications</seealso>).</p> + + </section> + <section> <title>The cover specification file</title> <p>These are the terms allowed in a cover specification file:</p> @@ -148,6 +175,11 @@ %% Specific modules to exclude in cover. {excl_mods, Mods}. + + %% Cross cover compilation + %% Tag = atom(), an identifier for a test run + %% Mod = [atom()], modules to compile for accumulated analysis + {cross,[{Tag,Mods}]}. </pre> <p>The <c>incl_dirs_r</c> and <c>excl_dirs_r</c> terms tell Common @@ -163,6 +195,81 @@ specification file for Common Test).</p> </section> + <marker id="cross_cover"/> + <section> + <title>Cross cover analysis</title> + <p>The cross cover mechanism allows cover analysis of modules + across multiple tests. It is useful if some code, e.g. a library + module, is used by many different tests and the accumulated cover + result is desirable.</p> + + <p>This can of course also be achieved in a more customized way by + using the <c>export</c> parameter in the cover specification and + analysing the result off line, but the cross cover mechanism is a + build in solution which also provides the logging.</p> + + <p>The mechanism is easiest explained via an example:</p> + + <p>Let's say that there are two systems, <c>s1</c> and <c>s2</c>, + which are tested in separate test runs. System <c>s1</c> contains + a library module <c>m1</c> which is tested by the <c>s1</c> test + run and is included in <c>s1</c>'s cover specification:</p> + +<code type="none"> +s1.cover: + {incl_mods,[m1]}.</code> + + <p>When analysing code coverage, the result for <c>m1</c> can be + seen in the cover log in the <c>s1</c> test result.</p> + + <p>Now, let's imagine that since <c>m1</c> is a library module, it + is also used quite a bit by system <c>s2</c>. The <c>s2</c> test + run does not specifically test <c>m1</c>, but it might still be + interesting to see which parts of <c>m1</c> is actually covered by + the <c>s2</c> tests. To do this, <c>m1</c> could be included also + in <c>s2</c>'s cover specification:</p> + +<code type="none"> +s2.cover: + {incl_mods,[m1]}.</code> + + <p>This would give an entry for <c>m1</c> also in the cover log + for the <c>s2</c> test run. The problem is that this would only + reflect the coverage by <c>s2</c> tests, not the accumulated + result over <c>s1</c> and <c>s2</c>. And this is where the cross + cover mechanism comes in handy.</p> + + <p>If instead the cover specification for <c>s2</c> was like + this:</p> + +<code type="none"> +s2.cover: + {cross,[{s1,[m1]}]}.</code> + + <p>then <c>m1</c> would be cover compiled in the <c>s2</c> test + run, but not shown in the coverage log. Instead, if + <c>ct_cover:cross_cover_analyse/2</c> is called after both + <c>s1</c> and <c>s2</c> test runs are completed, the accumulated + result for <c>m1</c> would be available in the cross cover log for + the <c>s1</c> test run.</p> + + <p>The call to the analyse function must be like this:</p> + +<code type="none"> +ct_cover:cross_cover_analyse(Level, [{s1,S1LogDir},{s2,S2LogDir}]).</code> + + <p>where <c>S1LogDir</c> and <c>S2LogDir</c> are the directories + named <c><TestName>.logs</c> for each test respectively.</p> + + <p>Note the tags <c>s1</c> and <c>s2</c> which are used in the + cover specification file and in the call to + <c>ct_cover:cross_cover_analyse/2</c>. The point of these are only + to map the modules specified in the cover specification to the log + directory specified in the call to the analyse function. The name + of the tag has no meaning beyond this.</p> + + </section> + <section> <title>Logging</title> <p>To view the result of a code coverage test, follow the @@ -170,6 +277,11 @@ takes you to the code coverage overview page. If you have successfully performed a detailed coverage analysis, you find links to each individual module coverage page here.</p> + + <p>If cross cover analysis has been performed, and there are + accumulated coverage results for the current test, then the - + "Coverdata collected over all tests" link will take you to these + results.</p> </section> </chapter> diff --git a/lib/common_test/doc/src/ct_hooks_chapter.xml b/lib/common_test/doc/src/ct_hooks_chapter.xml index 86237f5fc1..fe871eb516 100644 --- a/lib/common_test/doc/src/ct_hooks_chapter.xml +++ b/lib/common_test/doc/src/ct_hooks_chapter.xml @@ -4,7 +4,7 @@ <chapter> <header> <copyright> - <year>2011</year><year>2012</year> + <year>2011</year><year>2013</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -439,14 +439,14 @@ terminate(State) -> <table> <row> - <cell><em>CTH Name</em></cell> - <cell><em>Is Built-in</em></cell> - <cell><em>Description</em></cell> + <cell align="left"><em>CTH Name</em></cell> + <cell align="left"><em>Is Built-in</em></cell> + <cell align="left"><em>Description</em></cell> </row> <row> - <cell>cth_log_redirect</cell> - <cell>yes</cell> - <cell>Captures all error_logger and SASL logging events and prints them + <cell align="left">cth_log_redirect</cell> + <cell align="left">yes</cell> + <cell align="left">Captures all error_logger and SASL logging events and prints them to the current test case log. If an event can not be associated with a testcase it will be printed in the common test framework log. This will happen for testcases which are run in parallel and events which occur @@ -455,14 +455,29 @@ terminate(State) -> using the normal SASL mechanisms. </cell> </row> <row> - <cell>cth_surefire</cell> - <cell>no</cell> - <cell>Captures all test results and outputs them as surefire XML into - a file. The file which is created is by default called junit_report.xml. - The name can be by setting the path option for this hook. e.g. + <cell align="left">cth_surefire</cell> + <cell align="left">no</cell> + <cell align="left"><p>Captures all test results and outputs them as surefire + XML into a file. The file which is created is by default + called junit_report.xml. The file name can be changed by + setting the <c>path</c> option for this hook, e.g.</p> + <code>-ct_hooks cth_surefire [{path,"/tmp/report.xml"}]</code> - Surefire XML can forinstance be used by Jenkins to display test - results.</cell> + + <p>If the <c>url_base</c> option is set, an additional + attribute named <c>url</c> will be added to each + <c>testsuite</c> and <c>testcase</c> XML element. The value will + be constructed from the <c>url_base</c> and a relative path + to the test suite or test case log respectively, e.g.</p> + + <code>-ct_hooks cth_surefire [{url_base, "http://myserver.com/"}]</code> + <p>will give a url attribute value similar to</p> + + <code>"http://myserver.com/[email protected]_11.19.39/ +x86_64-unknown-linux-gnu.my_test.logs/run.2012-12-12_11.19.39/suite.log.html"</code> + + <p>Surefire XML can for instance be used by Jenkins to display test + results.</p></cell> </row> </table> diff --git a/lib/common_test/doc/src/ct_run.xml b/lib/common_test/doc/src/ct_run.xml index c6749d6960..198290c1be 100644 --- a/lib/common_test/doc/src/ct_run.xml +++ b/lib/common_test/doc/src/ct_run.xml @@ -104,6 +104,7 @@ [-silent_connections [ConnType1 ConnType2 .. ConnTypeN]] [-stylesheet CSSFile] [-cover CoverCfgFile] + [-cover_stop Bool] [-event_handler EvHandler1 EvHandler2 .. EvHandlerN] | [-event_handler_init EvHandler1 InitArg1 and EvHandler2 InitArg2 and .. EvHandlerN InitArgN] @@ -125,6 +126,7 @@ <title>Run tests using test specification</title> <pre> ct_run -spec TestSpec1 TestSpec2 .. TestSpecN + [-join_specs] [-config ConfigFile1 ConfigFile2 .. ConfigFileN] [-userconfig CallbackModule1 ConfigString1 and CallbackModule2 ConfigString2 and .. and CallbackModuleN ConfigStringN] @@ -138,6 +140,7 @@ [-silent_connections [ConnType1 ConnType2 .. ConnTypeN]] [-stylesheet CSSFile] [-cover CoverCfgFile] + [-cover_stop Bool] [-event_handler EvHandler1 EvHandler2 .. EvHandlerN] | [-event_handler_init EvHandler1 InitArg1 and EvHandler2 InitArg2 and .. EvHandlerN InitArgN] diff --git a/lib/common_test/doc/src/notes.xml b/lib/common_test/doc/src/notes.xml index 7e33b71de1..8cbcbad8b2 100644 --- a/lib/common_test/doc/src/notes.xml +++ b/lib/common_test/doc/src/notes.xml @@ -4,7 +4,7 @@ <chapter> <header> <copyright> - <year>2004</year><year>2012</year> + <year>2004</year><year>2013</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -32,6 +32,282 @@ <file>notes.xml</file> </header> +<section><title>Common_Test 1.7</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Severe errors detected by <c>test_server</c> (e.g. if log + files directories cannot be created) will now be reported + to <c>common_test</c> and noted in the <c>common_test</c> + logs.</p> + <p> + Own Id: OTP-9769 Aux Id: kunagi-202 [113] </p> + </item> + <item> + <p> + The earlier undocumented cross cover feature for + accumulating cover data over multiple tests has now been + fixed and documented.</p> + <p> + Own Id: OTP-9870 Aux Id: kunagi-206 [117] </p> + </item> + <item> + <p> + If a busy test case generated lots of error messages, + cth_log_redirect:post_end_per_testcase would crash with a + timeout while waiting for the error logger to finish + handling all error reports. The default timer was 5 + seconds. This has now been extended to 5 minutes.</p> + <p> + Own Id: OTP-10040 Aux Id: kunagi-173 [84] </p> + </item> + <item> + <p>When a test case failed because of a timetrap time + out, the <c>Config</c> data for the case was lost in the + following call to <c>end_per_testcase/2</c>, and also in + calls to the CT Hook function + <c>post_end_per_testcase/4</c>. This problem has been + solved and the <c>Config</c> data is now correctly passed + to the above functions after a timetrap timeout + failure.</p> + <p> + Own Id: OTP-10070 Aux Id: kunagi-175 [86] </p> + </item> + <item> + <p> + Some calls to deprecated and removed functions in snmp + are removed from ct_snmp.</p> + <p> + Own Id: OTP-10088 Aux Id: kunagi-176 [87] </p> + </item> + <item> + <p>In test_server, the same process would supervise the + currently running test case and be group leader (and IO + server) for the test case. Furthermore, when running + parallel test cases, new temporary supervisor/group + leader processes were spawned and the process that was + group leader for sequential test cases would not be + active. That would lead to several problems:</p> + <p>* Processes started by init_per_suite will inherit the + group leader of the init_per_suite process (and that + group leader would not process IO requests when parallel + test cases was running). If later a parallel test case + caused such a processto print using (for example) + io:format/2, the calling would hang.</p> + <p>* Similarly, if a process was spawned from a parallel + test case, it would inherit the temporary group leader + for that parallel test case. If that spawned process + later - when the group of parallel tests have finished - + attempted to print something, its group leader would be + dead and there would be <c>badarg</c> exception.</p> + <p>Those problems have been solved by having group + leaders separate from the processes that supervises the + test cases, and keeping temporary group leader process + for parallel test cases alive until no more process in + the system use them as group leaders.</p> + <p>Also, a new <c>unexpected_io.log</c> log file + (reachable from the summary page of each test suite) has + been introduced. All unexpected IO will be printed into + it(for example, IO to a group leader for a parallel test + case that has finished).</p> + <p> + Own Id: OTP-10101 Aux Id: OTP-10125 </p> + </item> + <item> + <p> + Some bugfixes in <c>ct_snmp:</c></p> + <p> + <list> <item> ct_snmp will now use the value of the + 'agent_vsns' config variable when setting the 'variables' + parameter to snmp application agent configuration. + Earlier this had to be done separately - i.e. the + supported versions had to be specified twice. </item> + <item> Snmp application failed to write notify.conf since + ct_snmp gave the notify type as a string instead of an + atom. This has been corrected. </item> </list></p> + <p> + Own Id: OTP-10432</p> + </item> + <item> + <p> + Some bugfixes in <c>ct_snmp</c>:</p> + <p> + <list> <item> Functions <c>register_users/2</c>, + <c>register_agents/2</c> and <c>register_usm_users/2</c>, + and the corresponding <c>unregister_*/1</c> functions + were not executable. These are corrected/rewritten. + </item> <item> Function <c>update_usm_users/2</c> is + removed, and an unregister function is added instead. + Update can now be done with unregister_usm_users and then + register_usm_users. </item> <item> Functions + <c>unregister_*/2</c> are added, so specific + users/agents/usm users can be unregistered. </item> + <item> Function <c>unload_mibs/1</c> is added for + completeness. </item> <item> Overriding configuration + files did not work, since the files were written in + priv_dir instead of in the configuration dir + (priv_dir/conf). This has been corrected. </item> <item> + Arguments to <c>register_usm_users/2</c> were faulty + documented. This has been corrected. </item> </list></p> + <p> + Own Id: OTP-10434 Aux Id: kunagi-264 [175] </p> + </item> + <item> + <p> + Faulty exported specs in common test has been corrected + to <c>ct_netconfc:hook_options/0</c> and + <c>inet:hostname/0</c></p> + <p> + Own Id: OTP-10601</p> + </item> + <item> + <p> + The netconf client in common_test did not adjust the + window after receiving data. Due to this, the client + stopped receiving data after a while. This has been + corrected.</p> + <p> + Own Id: OTP-10646</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p>It is now possible to let a test specification include + other test specifications. Included specs can either be + joined with the source spec (and all other joined specs), + resulting in one single test run, or they can be executed + in separate test runs. Also, a start flag/option, + <c>join_specs</c>, has been introduced, to be used in + combination with the <c>spec</c> option. With + <c>join_specs</c>, Common Test can be told to either join + multiple test specifications, or run them separately. + Without <c>join_specs</c>, the latter behaviour is + default. Note that this is a change compared to earlier + versions of Common Test, where specifications could only + be joined. More information can be found in the Running + Tests chapter in the User's Guide (see the Test + Specifications section).</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-9881 Aux Id: kunagi-350 [261] </p> + </item> + <item> + <p> + The <c>ct_slave:start/3</c> function now supports an + <c>{env,[{Var,Value}]}</c> option to extend environment + for the slave node.</p> + <p> + Own Id: OTP-10469 Aux Id: kunagi-317 [228] </p> + </item> + <item> + <p> Some examples overflowing the width of PDF pages have + been corrected. </p> + <p> + Own Id: OTP-10665</p> + </item> + <item> + <p> + Update common test modules to handle unicode <list> + <item> Use UTF-8 encoding for all HTML files, except the + HTML version of the test suite generated with + erl2html2:convert, which will have the same encoding as + the original test suite (.erl) file. </item> <item> + Encode link targets in HTML files with + test_server_ctrl:uri_encode/1. </item> <item> Use unicode + modifier 't' with ~s when appropriate. </item> <item> Use + unicode:characters_to_list and + unicode:characters_to_binary for conversion between + binaries and strings instead of binary_to_list and + list_to_binary. </item> </list></p> + <p> + Own Id: OTP-10783</p> + </item> + </list> + </section> + + + <section><title>Known Bugs and Problems</title> + <list> + <item> + <p> + CT drops error reason when groups/0 crashes.</p> + <p> + Own Id: OTP-10631 Aux Id: kunagi-345 [256] </p> + </item> + <item> + <p> + Event handler on a ct_master node causes hanging.</p> + <p> + Own Id: OTP-10634 Aux Id: kunagi-347 [258] </p> + </item> + <item> + <p> + CT fails to open telnet conn after a timetrap timeout.</p> + <p> + Own Id: OTP-10648 Aux Id: seq12212 </p> + </item> + </list> + </section> + +</section> + +<section><title>Common_Test 1.6.3.1</title> + + <section><title>Known Bugs and Problems</title> + <list> + <item> + <p> + The following corrections/changes are done in the + cth_surefire hook:</p> + <p> + <list> <item> Earlier there would always be a + 'properties' element under the 'testsuites' element. This + would exist even if there were no 'property' element + inside it. This has been changed so if there are no + 'property' elements to display, then there will not be a + 'properties' element either. </item> <item> The XML file + will now (unless other is specified) be stored in the top + log directory. Earlier, the default directory would be + the current working directory for the erlang node, which + would mostly, but not always, be the top log directory. + </item> <item> The 'hostname' attribute in the + 'testsuite' element would earlier never have the correct + value. This has been corrected. </item> <item> The + 'errors' attribute in the 'testsuite' element would + earlier display the number of failed testcases. This has + been changed and will now always have the value 0, while + the 'failures' attribute will show the number of failed + testcases. </item> <item> A new attribute 'skipped' is + added to the 'testsuite' element. This will display the + number of skipped testcases. These would earlier be + included in the number of failed test cases. </item> + <item> The total number of tests displayed by the 'tests' + attribute in the 'testsuite' element would earlier + include init/end_per_suite and init/end_per_group. This + is no longer the case. The 'tests' attribute will now + only count "real" test cases. </item> <item> Earlier, + auto skipped test cases would have no value in the 'log' + attribute. This is now corrected. </item> <item> A new + attributes 'log' is added to the 'testsuite' element. + </item> <item> A new option named 'url_base' is added for + this hook. If this option is used, a new attribute named + 'url' will be added to the 'testcase' and 'testsuite' + elements. </item> </list></p> + <p> + Own Id: OTP-10589</p> + </item> + </list> + </section> + +</section> + <section><title>Common_Test 1.6.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml index a0b2c96006..35f89153d3 100644 --- a/lib/common_test/doc/src/run_test_chapter.xml +++ b/lib/common_test/doc/src/run_test_chapter.xml @@ -4,7 +4,7 @@ <chapter> <header> <copyright> - <year>2003</year><year>2012</year> + <year>2003</year><year>2013</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -155,6 +155,8 @@ <item><c><![CDATA[-stylesheet <css_file>]]></c>, points out a user HTML style sheet (see below).</item> <item><c><![CDATA[-cover <cover_cfg_file>]]></c>, to perform code coverage test (see <seealso marker="cover_chapter#cover">Code Coverage Analysis</seealso>).</item> + <item><c><![CDATA[-cover_stop <bool>]]></c>, to specify if the cover tool shall be stopped after the test is completed (see + <seealso marker="cover_chapter#cover_stop">Code Coverage Analysis</seealso>).</item> <item><c><![CDATA[-event_handler <event_handlers>]]></c>, to install <seealso marker="event_handler_chapter#event_handling">event handlers</seealso>.</item> <item><c><![CDATA[-event_handler_init <event_handlers>]]></c>, to install @@ -528,369 +530,469 @@ <marker id="test_specifications"></marker> <section> <title>Test Specifications</title> - - <p>The most flexible way to specify what to test, is to use a so - called test specification. A test specification is a sequence of - Erlang terms. The terms are normally declared in a text file (see - <c><seealso marker="ct#run_test-1">ct:run_test/1</seealso></c>), but - may also be passed to Common Test on the form of a list (see - <c><seealso marker="ct#run_testspec-1">ct:run_testspec/1</seealso></c>). - There are two general types of terms: configuration terms and test - specification terms.</p> - <p>With configuration terms it is possible to e.g. label the test - run (similar to <c>ct_run -label</c>), evaluate arbitrary expressions - before starting the test, import configuration data (similar to - <c>ct_run -config/-userconfig</c>), specify the top level HTML log - directory (similar to <c>ct_run -logdir</c>), enable code coverage - analysis (similar to <c>ct_run -cover</c>), install Common Test Hooks - (similar to <c>ct_run -ch_hooks</c>), install event_handler plugins - (similar to <c>ct_run -event_handler</c>), specify include directories - that should be passed to the compiler for automatic compilation - (similar to <c>ct_run -include</c>), disable the auto compilation - feature (similar to <c>ct_run -no_auto_compile</c>), set verbosity - levels (similar to <c>ct_run -verbosity</c>), and more.</p> - <p>Configuration terms can be combined with <c>ct_run</c> start flags, - or <c>ct:run_test/1</c> options. The result will for some flags/options - and terms be that the values are merged (e.g. configuration files, - include directories, verbosity levels, silent connections), and for - others that the start flags/options override the test specification - terms (e.g. log directory, label, style sheet, auto compilation).</p> - <p>With test specification terms it is possible to state exactly - which tests should run and in which order. A test term specifies - either one or more suites, one or more test case groups (possibly nested), - or one or more test cases in a group (or in multiple groups) or in a suite.</p> - <p>An arbitrary number of test terms may be declared in sequence. - Common Test will by default compile the terms into one or more tests - to be performed in one resulting test run. Note that a term that - specifies a set of test cases will "swallow" one that only - specifies a subset of these cases. E.g. the result of merging - one term that specifies that all cases in suite S should be - executed, with another term specifying only test case X and Y in - S, is a test of all cases in S. However, if a term specifying - test case X and Y in S is merged with a term specifying case Z - in S, the result is a test of X, Y and Z in S. To disable this - behaviour, i.e. to instead perform each test sequentially in a "script-like" - manner, the term <c>merge_tests</c> can be set to <c>false</c> in - the test specification.</p> - <p>A test term can also specify one or more test suites, groups, - or test cases to be skipped. Skipped suites, groups and cases - are not executed and show up in the HTML log files as - SKIPPED.</p> - <p>When a test case group is specified, the resulting test - executes the <c>init_per_group</c> function, followed by all test - cases and sub groups (including their configuration functions), and - finally the <c>end_per_group</c> function. Also if particular - test cases in a group are specified, <c>init_per_group</c> - and <c>end_per_group</c> for the group in question are - called. If a group which is defined (in <c>Suite:group/0</c>) to - be a sub group of another group, is specified (or if particular test - cases of a sub group are), Common Test will call the configuration - functions for the top level groups as well as for the sub group - in question (making it possible to pass configuration data all - the way from <c>init_per_suite</c> down to the test cases in the - sub group).</p> - <p>The test specification utilizes the same mechanism for specifying - test case groups by means of names and paths, as explained in the - <seealso marker="run_test_chapter#group_execution">Group Execution</seealso> - section above, with the addition of the <c>GroupSpec</c> element - described next.</p> - <p>The <c>GroupSpec</c> element makes it possible to specify - group execution properties that will override those in the - group definition (i.e. in <c>groups/0</c>). Execution properties for - sub-groups may be overridden as well. This feature makes it possible to - change properties of groups at the time of execution, - without even having to edit the test suite. The very same - feature is available for <c>group</c> elements in the <c>Suite:all/0</c> - list. Therefore, more detailed documentation, and examples, can be - found in the <seealso marker="write_test_chapter#test_case_groups"> - Test case groups</seealso> chapter.</p> - - <p>Below is the test specification syntax. Test specifications can - be used to run tests both in a single test host environment and - in a distributed Common Test environment (Large Scale - Testing). The node parameters in the <c>init</c> term are only - relevant in the latter (see the - <seealso marker="ct_master_chapter#test_specifications">Large - Scale Testing</seealso> chapter for information). For more information - about the various terms, please see the corresponding sections in the - User's Guide, such as e.g. the - <seealso marker="run_test_chapter#ct_run"><c>ct_run</c> - program</seealso> for an overview of available start flags - (since most flags have a corresponding configuration term), and - more detailed explanation of e.g. - <seealso marker="write_test_chapter#logging">Logging</seealso> - (for the <c>verbosity</c>, <c>stylesheet</c> and <c>basic_html</c> terms), - <seealso marker="config_file_chapter#top">External Configuration Data</seealso> - (for the <c>config</c> and <c>userconfig</c> terms), - <seealso marker="event_handler_chapter#event_handling">Event - Handling</seealso> (for the <c>event_handler</c> term), - <seealso marker="ct_hooks_chapter#installing">Common Test Hooks</seealso> - (for the <c>ct_hooks</c> term), etc.</p> - <p>Config terms:</p> - <pre> - {merge_tests, Bool}. - - {define, Constant, Value}. - - {node, NodeAlias, Node}. - - {init, InitOptions}. - {init, [NodeAlias], InitOptions}. - - {label, Label}. - {label, NodeRefs, Label}. - - {verbosity, VerbosityLevels}. - {verbosity, NodeRefs, VerbosityLevels}. - - {stylesheet, CSSFile}. - {stylesheet, NodeRefs, CSSFile}. - - {silent_connections, ConnTypes}. - {silent_connections, NodeRefs, ConnTypes}. - - {multiply_timetraps, N}. - {multiply_timetraps, NodeRefs, N}. - - {scale_timetraps, Bool}. - {scale_timetraps, NodeRefs, Bool}. - - {cover, CoverSpecFile}. - {cover, NodeRefs, CoverSpecFile}. - - {include, IncludeDirs}. - {include, NodeRefs, IncludeDirs}. - - {auto_compile, Bool}, - {auto_compile, NodeRefs, Bool}, - - {config, ConfigFiles}. - {config, ConfigDir, ConfigBaseNames}. - {config, NodeRefs, ConfigFiles}. - {config, NodeRefs, ConfigDir, ConfigBaseNames}. - - {userconfig, {CallbackModule, ConfigStrings}}. - {userconfig, NodeRefs, {CallbackModule, ConfigStrings}}. - - {logdir, LogDir}. - {logdir, NodeRefs, LogDir}. - - {logopts, LogOpts}. - {logopts, NodeRefs, LogOpts}. - - {create_priv_dir, PrivDirOption}. - {create_priv_dir, NodeRefs, PrivDirOption}. - - {event_handler, EventHandlers}. - {event_handler, NodeRefs, EventHandlers}. - {event_handler, EventHandlers, InitArgs}. - {event_handler, NodeRefs, EventHandlers, InitArgs}. - - {ct_hooks, CTHModules}. - {ct_hooks, NodeRefs, CTHModules}. - - {enable_builtin_hooks, Bool}. - - {basic_html, Bool}. - {basic_html, NodeRefs, Bool}. + <section> + <title>General description</title> + <p>The most flexible way to specify what to test, is to use a so + called test specification. A test specification is a sequence of + Erlang terms. The terms are normally declared in one or more text files + (see <c><seealso marker="ct#run_test-1">ct:run_test/1</seealso></c>), but + may also be passed to Common Test on the form of a list (see + <c><seealso marker="ct#run_testspec-1">ct:run_testspec/1</seealso></c>). + There are two general types of terms: configuration terms and test + specification terms.</p> + <p>With configuration terms it is possible to e.g. label the test + run (similar to <c>ct_run -label</c>), evaluate arbitrary expressions + before starting the test, import configuration data (similar to + <c>ct_run -config/-userconfig</c>), specify the top level HTML log + directory (similar to <c>ct_run -logdir</c>), enable code coverage + analysis (similar to <c>ct_run -cover</c>), install Common Test Hooks + (similar to <c>ct_run -ch_hooks</c>), install event_handler plugins + (similar to <c>ct_run -event_handler</c>), specify include directories + that should be passed to the compiler for automatic compilation + (similar to <c>ct_run -include</c>), disable the auto compilation + feature (similar to <c>ct_run -no_auto_compile</c>), set verbosity + levels (similar to <c>ct_run -verbosity</c>), and more.</p> + <p>Configuration terms can be combined with <c>ct_run</c> start flags, + or <c>ct:run_test/1</c> options. The result will for some flags/options + and terms be that the values are merged (e.g. configuration files, + include directories, verbosity levels, silent connections), and for + others that the start flags/options override the test specification + terms (e.g. log directory, label, style sheet, auto compilation).</p> + <p>With test specification terms it is possible to state exactly + which tests should run and in which order. A test term specifies + either one or more suites, one or more test case groups (possibly nested), + or one or more test cases in a group (or in multiple groups) or in a suite.</p> + <p>An arbitrary number of test terms may be declared in sequence. + Common Test will by default compile the terms into one or more tests + to be performed in one resulting test run. Note that a term that + specifies a set of test cases will "swallow" one that only + specifies a subset of these cases. E.g. the result of merging + one term that specifies that all cases in suite S should be + executed, with another term specifying only test case X and Y in + S, is a test of all cases in S. However, if a term specifying + test case X and Y in S is merged with a term specifying case Z + in S, the result is a test of X, Y and Z in S. To disable this + behaviour, i.e. to instead perform each test sequentially in a "script-like" + manner, the term <c>merge_tests</c> can be set to <c>false</c> in + the test specification.</p> + <p>A test term can also specify one or more test suites, groups, + or test cases to be skipped. Skipped suites, groups and cases + are not executed and show up in the HTML log files as + SKIPPED.</p> + </section> + <section> + <title>Using multiple test specification files</title> + + <p>When multiple test specification files are given at startup (either + with <c>ct_run -spec file1 file2 ...</c> or + <c>ct:run_test([{spec, [File1,File2,...]}])</c>), + Common Test will either execute one test run per specification file, or + join the files and perform all tests within one single test run. The first + behaviour is the default one. The latter requires that the start + flag/option <c>join_suites</c> is provided, e.g. + <c>run_test -spec ./my_tests1.ts ./my_tests2.ts -join_suites</c>.</p> + + <p>Joining a number of specifications, or running them separately, can + also be accomplished with (and may be combined with) test specification + file inclusion, described next.</p> + </section> + <section> + <title>Test specification file inclusion</title> + <p>With the <c>specs</c> term (see syntax below), it's possible to have + a test specification include other specifications. An included + specification may either be joined with the source specification, + or used to produce a separate test run (like with the <c>join_specs</c> + start flag/option above). Example:</p> + <pre> + %% In specification file "a.spec" + {specs, join, ["b.spec", "c.spec"]}. + {specs, separate, ["d.spec", "e.spec"]}. + %% Config and test terms follow + ...</pre> + <p>In this example, the test terms defined in files "b.spec" and "c.spec" + will be joined with the terms in the source specification "a.spec" + (if any). The inclusion of specifications "d.spec" and + "e.spec" will result in two separate, and independent, test runs (i.e. + one for each included specification).</p> + <p>Note that the <c>join</c> option does not imply that the test terms + will be merged (see <c>merge_tests</c> above), only that all tests are + executed in one single test run.</p> + <p>Joined specifications share common configuration settings, such as + the list of <c>config</c> files or <c>include</c> directories. + For configuration that can not be combined, such as settings for <c>logdir</c> + or <c>verbosity</c>, it is up to the user to ensure there are no clashes + when the test specifications are joined. Specifications included with + the <c>separate</c> option, do not share configuration settings with the + source specification. This is useful e.g. if there are clashing + configuration settings in included specifications, making it impossible + to join them.</p> + <p>If <c>{merge_tests,true}</c> is set in the source specification + (which is the default setting), terms in joined specifications will be + merged with terms in the source specification (according to the + description of <c>merge_tests</c> above).</p> + <p>Note that it is always the <c>merge_tests</c> setting in the source + specification that is used when joined with other specifications. + Say e.g. that a source specification A, with tests TA1 and TA2, has + <c>{merge_tests,false}</c> set, and it includes another specification, + B, with tests TB1 and TB2, that has <c>{merge_tests,true}</c> set. + The result will be that the test series: <c>TA1,TA2,merge(TB1,TB2)</c>, + is executed. The opposite <c>merge_tests</c> settings would result in the + following the test series: <c>merge(merge(TA1,TA2),TB1,TB2)</c>.</p> + <p>The <c>specs</c> term may of course be used to nest specifications, + i.e. have one specification include other specifications, which in turn + include others, etc.</p> + </section> + <section> + <title>Test case groups</title> + + <p>When a test case group is specified, the resulting test + executes the <c>init_per_group</c> function, followed by all test + cases and sub groups (including their configuration functions), and + finally the <c>end_per_group</c> function. Also if particular + test cases in a group are specified, <c>init_per_group</c> + and <c>end_per_group</c> for the group in question are + called. If a group which is defined (in <c>Suite:group/0</c>) to + be a sub group of another group, is specified (or if particular test + cases of a sub group are), Common Test will call the configuration + functions for the top level groups as well as for the sub group + in question (making it possible to pass configuration data all + the way from <c>init_per_suite</c> down to the test cases in the + sub group).</p> + <p>The test specification utilizes the same mechanism for specifying + test case groups by means of names and paths, as explained in the + <seealso marker="run_test_chapter#group_execution">Group Execution</seealso> + section above, with the addition of the <c>GroupSpec</c> element + described next.</p> + <p>The <c>GroupSpec</c> element makes it possible to specify + group execution properties that will override those in the + group definition (i.e. in <c>groups/0</c>). Execution properties for + sub-groups may be overridden as well. This feature makes it possible to + change properties of groups at the time of execution, + without even having to edit the test suite. The very same + feature is available for <c>group</c> elements in the <c>Suite:all/0</c> + list. Therefore, more detailed documentation, and examples, can be + found in the <seealso marker="write_test_chapter#test_case_groups"> + Test case groups</seealso> chapter.</p> + </section> - {release_shell, Bool}.</pre> + <section> + <title>Test specification syntax</title> + + <p>Below is the test specification syntax. Test specifications can + be used to run tests both in a single test host environment and + in a distributed Common Test environment (Large Scale + Testing). The node parameters in the <c>init</c> term are only + relevant in the latter (see the + <seealso marker="ct_master_chapter#test_specifications">Large + Scale Testing</seealso> chapter for information). For more information + about the various terms, please see the corresponding sections in the + User's Guide, such as e.g. the + <seealso marker="run_test_chapter#ct_run"><c>ct_run</c> + program</seealso> for an overview of available start flags + (since most flags have a corresponding configuration term), and + more detailed explanation of e.g. + <seealso marker="write_test_chapter#logging">Logging</seealso> + (for the <c>verbosity</c>, <c>stylesheet</c> and <c>basic_html</c> terms), + <seealso marker="config_file_chapter#top">External Configuration Data</seealso> + (for the <c>config</c> and <c>userconfig</c> terms), + <seealso marker="event_handler_chapter#event_handling">Event + Handling</seealso> (for the <c>event_handler</c> term), + <seealso marker="ct_hooks_chapter#installing">Common Test Hooks</seealso> + (for the <c>ct_hooks</c> term), etc.</p> + </section> + <p>Config terms:</p> + <pre> + {merge_tests, Bool}. + + {define, Constant, Value}. + + {specs, InclSpecsOption, TestSpecs}. + + {node, NodeAlias, Node}. + + {init, InitOptions}. + {init, [NodeAlias], InitOptions}. + + {label, Label}. + {label, NodeRefs, Label}. + + {verbosity, VerbosityLevels}. + {verbosity, NodeRefs, VerbosityLevels}. + + {stylesheet, CSSFile}. + {stylesheet, NodeRefs, CSSFile}. + + {silent_connections, ConnTypes}. + {silent_connections, NodeRefs, ConnTypes}. + + {multiply_timetraps, N}. + {multiply_timetraps, NodeRefs, N}. + + {scale_timetraps, Bool}. + {scale_timetraps, NodeRefs, Bool}. + + {cover, CoverSpecFile}. + {cover, NodeRefs, CoverSpecFile}. + + {cover_stop, Bool}. + {cover_stop, NodeRefs, Bool}. + + {include, IncludeDirs}. + {include, NodeRefs, IncludeDirs}. + + {auto_compile, Bool}, + {auto_compile, NodeRefs, Bool}, + + {config, ConfigFiles}. + {config, ConfigDir, ConfigBaseNames}. + {config, NodeRefs, ConfigFiles}. + {config, NodeRefs, ConfigDir, ConfigBaseNames}. + + {userconfig, {CallbackModule, ConfigStrings}}. + {userconfig, NodeRefs, {CallbackModule, ConfigStrings}}. + + {logdir, LogDir}. + {logdir, NodeRefs, LogDir}. + + {logopts, LogOpts}. + {logopts, NodeRefs, LogOpts}. + + {create_priv_dir, PrivDirOption}. + {create_priv_dir, NodeRefs, PrivDirOption}. + + {event_handler, EventHandlers}. + {event_handler, NodeRefs, EventHandlers}. + {event_handler, EventHandlers, InitArgs}. + {event_handler, NodeRefs, EventHandlers, InitArgs}. + + {ct_hooks, CTHModules}. + {ct_hooks, NodeRefs, CTHModules}. + + {enable_builtin_hooks, Bool}. + + {basic_html, Bool}. + {basic_html, NodeRefs, Bool}. + + {release_shell, Bool}.</pre> + <p>Test terms:</p> - <pre> - {suites, Dir, Suites}. - {suites, NodeRefs, Dir, Suites}. - - {groups, Dir, Suite, Groups}. - {groups, NodeRefs, Dir, Suite, Groups}. - - {groups, Dir, Suite, Groups, {cases,Cases}}. - {groups, NodeRefs, Dir, Suite, Groups, {cases,Cases}}. - - {cases, Dir, Suite, Cases}. - {cases, NodeRefs, Dir, Suite, Cases}. - - {skip_suites, Dir, Suites, Comment}. - {skip_suites, NodeRefs, Dir, Suites, Comment}. - - {skip_groups, Dir, Suite, GroupNames, Comment}. - {skip_groups, NodeRefs, Dir, Suite, GroupNames, Comment}. - - {skip_cases, Dir, Suite, Cases, Comment}. - {skip_cases, NodeRefs, Dir, Suite, Cases, Comment}.</pre> - + <pre> + {suites, Dir, Suites}. + {suites, NodeRefs, Dir, Suites}. + + {groups, Dir, Suite, Groups}. + {groups, NodeRefs, Dir, Suite, Groups}. + + {groups, Dir, Suite, Groups, {cases,Cases}}. + {groups, NodeRefs, Dir, Suite, Groups, {cases,Cases}}. + + {cases, Dir, Suite, Cases}. + {cases, NodeRefs, Dir, Suite, Cases}. + + {skip_suites, Dir, Suites, Comment}. + {skip_suites, NodeRefs, Dir, Suites, Comment}. + + {skip_groups, Dir, Suite, GroupNames, Comment}. + {skip_groups, NodeRefs, Dir, Suite, GroupNames, Comment}. + + {skip_cases, Dir, Suite, Cases, Comment}. + {skip_cases, NodeRefs, Dir, Suite, Cases, Comment}.</pre> + <p>Types:</p> - <pre> - Bool = true | false - Constant = atom() - Value = term() - NodeAlias = atom() - Node = node() - NodeRef = NodeAlias | Node | master - NodeRefs = all_nodes | [NodeRef] | NodeRef - InitOptions = term() - Label = atom() | string() - VerbosityLevels = integer() | [{Category,integer()}] - Category = atom() - CSSFile = string() - ConnTypes = all | [atom()] - N = integer() - CoverSpecFile = string() - IncludeDirs = string() | [string()] - ConfigFiles = string() | [string()] - ConfigDir = string() - ConfigBaseNames = string() | [string()] - CallbackModule = atom() - ConfigStrings = string() | [string()] - LogDir = string() - LogOpts = [term()] - PrivDirOption = auto_per_run | auto_per_tc | manual_per_tc - EventHandlers = atom() | [atom()] - InitArgs = [term()] - CTHModules = [CTHModule | {CTHModule, CTHInitArgs} | {CTHModule, CTHInitArgs, CTHPriority}] - CTHModule = atom() - CTHInitArgs = term() - Dir = string() - Suites = atom() | [atom()] | all - Suite = atom() - Groups = GroupPath | [GroupPath] | GroupSpec | [GroupSpec] | all - GroupPath = [GroupName] - GroupSpec = GroupName | {GroupName,Properties} | {GroupName,Properties,GroupSpec} - GroupName = atom() - GroupNames = GroupName | [GroupName] - Cases = atom() | [atom()] | all - Comment = string() | ""</pre> - - <p>The difference between the <c>config</c> terms above, is that with - <c>ConfigDir</c>, <c>ConfigBaseNames</c> is a list of base names, - i.e. without directory paths. <c>ConfigFiles</c> must be full names, - including paths. E.g, these two terms have the same meaning:</p> - <pre> - {config, ["/home/testuser/tests/config/nodeA.cfg", - "/home/testuser/tests/config/nodeB.cfg"]}. - - {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}.</pre> - - <note><p>Any relative paths specified in the test specification, will be - relative to the directory which contains the test specification file, if - <c>ct_run -spec TestSpecFile ...</c> or - <c>ct:run:test([{spec,TestSpecFile},...])</c> - executes the test. The path will be relative to the top level log directory, if - <c>ct:run:testspec(TestSpec)</c> executes the test.</p></note> - - <p>The <c>define</c> term introduces a constant, which is used to - replace the name <c>Constant</c> with <c>Value</c>, wherever it's found in - the test specification. This replacement happens during an initial iteration - through the test specification. Constants may be used anywhere in the test - specification, e.g. in arbitrary lists and tuples, and even in strings - and inside the value part of other constant definitions! A constant can - also be part of a node name, but that is the only place where a constant - can be part of an atom.</p> - - <note><p>For the sake of readability, the name of the constant must always - begin with an upper case letter, or a <c>$</c>, <c>?</c>, or <c>_</c>. - This also means that it must always be single quoted (obviously, since - the constant name is actually an atom, not text).</p></note> - - <p>The main benefit of constants is that they can be used to reduce the size - (and avoid repetition) of long strings, such as file paths. Compare these - terms:</p> - - <pre> - %% 1a. no constant - {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}. - {suites, "/home/testuser/tests/suites", all}. - - %% 1b. with constant - {define, 'TESTDIR', "/home/testuser/tests"}. - {config, "'TESTDIR'/config", ["nodeA.cfg","nodeB.cfg"]}. - {suites, "'TESTDIR'/suites", all}. - - %% 2a. no constants - {config, [testnode@host1, testnode@host2], "../config", ["nodeA.cfg","nodeB.cfg"]}. - {suites, [testnode@host1, testnode@host2], "../suites", [x_SUITE, y_SUITE]}. - - %% 2b. with constants - {define, 'NODE', testnode}. - {define, 'NODES', ['NODE'@host1, 'NODE'@host2]}. - {config, 'NODES', "../config", ["nodeA.cfg","nodeB.cfg"]}. - {suites, 'NODES', "../suites", [x_SUITE, y_SUITE]}.</pre> - - <p>Constants make the test specification term <c>alias</c>, in previous - versions of Common Test, redundant. This term has been deprecated but will - remain supported in upcoming Common Test releases. Replacing <c>alias</c> - terms with <c>define</c> is strongly recommended though! Here's an example - of such a replacement:</p> + <pre> + Bool = true | false + Constant = atom() + Value = term() + InclSpecsOption = join | separate + TestSpecs = string() | [string()] + NodeAlias = atom() + Node = node() + NodeRef = NodeAlias | Node | master + NodeRefs = all_nodes | [NodeRef] | NodeRef + InitOptions = term() + Label = atom() | string() + VerbosityLevels = integer() | [{Category,integer()}] + Category = atom() + CSSFile = string() + ConnTypes = all | [atom()] + N = integer() + CoverSpecFile = string() + IncludeDirs = string() | [string()] + ConfigFiles = string() | [string()] + ConfigDir = string() + ConfigBaseNames = string() | [string()] + CallbackModule = atom() + ConfigStrings = string() | [string()] + LogDir = string() + LogOpts = [term()] + PrivDirOption = auto_per_run | auto_per_tc | manual_per_tc + EventHandlers = atom() | [atom()] + InitArgs = [term()] + CTHModules = [CTHModule | + {CTHModule, CTHInitArgs} | + {CTHModule, CTHInitArgs, CTHPriority}] + CTHModule = atom() + CTHInitArgs = term() + Dir = string() + Suites = atom() | [atom()] | all + Suite = atom() + Groups = GroupPath | [GroupPath] | GroupSpec | [GroupSpec] | all + GroupPath = [GroupName] + GroupSpec = GroupName | {GroupName,Properties} | {GroupName,Properties,GroupSpec} + GroupName = atom() + GroupNames = GroupName | [GroupName] + Cases = atom() | [atom()] | all + Comment = string() | ""</pre> + + <section> + <p>The difference between the <c>config</c> terms above, is that with + <c>ConfigDir</c>, <c>ConfigBaseNames</c> is a list of base names, + i.e. without directory paths. <c>ConfigFiles</c> must be full names, + including paths. E.g, these two terms have the same meaning:</p> + <pre> + {config, ["/home/testuser/tests/config/nodeA.cfg", + "/home/testuser/tests/config/nodeB.cfg"]}. + + {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}.</pre> + + <note><p>Any relative paths specified in the test specification, will be + relative to the directory which contains the test specification file, if + <c>ct_run -spec TestSpecFile ...</c> or + <c>ct:run:test([{spec,TestSpecFile},...])</c> + executes the test. The path will be relative to the top level log directory, if + <c>ct:run:testspec(TestSpec)</c> executes the test.</p></note> + </section> - <pre> - %% using the old alias term - {config, "/home/testuser/tests/config/nodeA.cfg"}. - {alias, suite_dir, "/home/testuser/tests/suites"}. - {groups, suite_dir, x_SUITE, group1}. - - %% replacing with constants - {define, 'TestDir', "/home/testuser/tests"}. - {define, 'CfgDir', "'TestDir'/config"}. - {define, 'SuiteDir', "'TestDir'/suites"}. - {config, 'CfgDir', "nodeA.cfg"}. - {groups, 'SuiteDir', x_SUITE, group1}.</pre> - - <p>Actually, constants could well replace the <c>node</c> term too, but - this still has declarative value, mainly when used in combination - with <c>NodeRefs == all_nodes</c> (see types above).</p> - - <p>Here follows a simple test specification example:</p> - <pre> - {define, 'Top', "/home/test"}. - {define, 'T1', "'Top'/t1"}. - {define, 'T2', "'Top'/t2"}. - {define, 'T3', "'Top'/t3"}. - {define, 'CfgFile', "config.cfg"}. + <section> + <title>Constants</title> + + <p>The <c>define</c> term introduces a constant, which is used to + replace the name <c>Constant</c> with <c>Value</c>, wherever it's found in + the test specification. This replacement happens during an initial iteration + through the test specification. Constants may be used anywhere in the test + specification, e.g. in arbitrary lists and tuples, and even in strings + and inside the value part of other constant definitions! A constant can + also be part of a node name, but that is the only place where a constant + can be part of an atom.</p> + + <note><p>For the sake of readability, the name of the constant must always + begin with an upper case letter, or a <c>$</c>, <c>?</c>, or <c>_</c>. + This also means that it must always be single quoted (obviously, since + the constant name is actually an atom, not text).</p></note> + + <p>The main benefit of constants is that they can be used to reduce the size + (and avoid repetition) of long strings, such as file paths. Compare these + terms:</p> + + <pre> + %% 1a. no constant + {config, "/home/testuser/tests/config", ["nodeA.cfg","nodeB.cfg"]}. + {suites, "/home/testuser/tests/suites", all}. + + %% 1b. with constant + {define, 'TESTDIR', "/home/testuser/tests"}. + {config, "'TESTDIR'/config", ["nodeA.cfg","nodeB.cfg"]}. + {suites, "'TESTDIR'/suites", all}. + + %% 2a. no constants + {config, [testnode@host1, testnode@host2], "../config", ["nodeA.cfg","nodeB.cfg"]}. + {suites, [testnode@host1, testnode@host2], "../suites", [x_SUITE, y_SUITE]}. + + %% 2b. with constants + {define, 'NODE', testnode}. + {define, 'NODES', ['NODE'@host1, 'NODE'@host2]}. + {config, 'NODES', "../config", ["nodeA.cfg","nodeB.cfg"]}. + {suites, 'NODES', "../suites", [x_SUITE, y_SUITE]}.</pre> + + <p>Constants make the test specification term <c>alias</c>, in previous + versions of Common Test, redundant. This term has been deprecated but will + remain supported in upcoming Common Test releases. Replacing <c>alias</c> + terms with <c>define</c> is strongly recommended though! Here's an example + of such a replacement:</p> + + <pre> + %% using the old alias term + {config, "/home/testuser/tests/config/nodeA.cfg"}. + {alias, suite_dir, "/home/testuser/tests/suites"}. + {groups, suite_dir, x_SUITE, group1}. + + %% replacing with constants + {define, 'TestDir', "/home/testuser/tests"}. + {define, 'CfgDir', "'TestDir'/config"}. + {define, 'SuiteDir', "'TestDir'/suites"}. + {config, 'CfgDir', "nodeA.cfg"}. + {groups, 'SuiteDir', x_SUITE, group1}.</pre> + + <p>Actually, constants could well replace the <c>node</c> term too, but + this still has declarative value, mainly when used in combination + with <c>NodeRefs == all_nodes</c> (see types above).</p> + </section> - {logdir, "'Top'/logs"}. - - {config, ["'T1'/'CfgFile'", "'T2'/'CfgFile'", "'T3'/'CfgFile'"]}. - - {suites, 'T1', all}. - {skip_suites, 'T1', [t1B_SUITE,t1D_SUITE], "Not implemented"}. - {skip_cases, 'T1', t1A_SUITE, [test3,test4], "Irrelevant"}. - {skip_cases, 'T1', t1C_SUITE, [test1], "Ignore"}. - - {suites, 'T2', [t2B_SUITE,t2C_SUITE]}. - {cases, 'T2', t2A_SUITE, [test4,test1,test7]}. - - {skip_suites, 'T3', all, "Not implemented"}.</pre> + <section> + <title>Example</title> + + <p>Here follows a simple test specification example:</p> + <pre> + {define, 'Top', "/home/test"}. + {define, 'T1', "'Top'/t1"}. + {define, 'T2', "'Top'/t2"}. + {define, 'T3', "'Top'/t3"}. + {define, 'CfgFile', "config.cfg"}. + + {logdir, "'Top'/logs"}. + + {config, ["'T1'/'CfgFile'", "'T2'/'CfgFile'", "'T3'/'CfgFile'"]}. + + {suites, 'T1', all}. + {skip_suites, 'T1', [t1B_SUITE,t1D_SUITE], "Not implemented"}. + {skip_cases, 'T1', t1A_SUITE, [test3,test4], "Irrelevant"}. + {skip_cases, 'T1', t1C_SUITE, [test1], "Ignore"}. + + {suites, 'T2', [t2B_SUITE,t2C_SUITE]}. + {cases, 'T2', t2A_SUITE, [test4,test1,test7]}. + + {skip_suites, 'T3', all, "Not implemented"}.</pre> + + <p>The example specifies the following:</p> + <list> + <item>The specified logdir directory will be used for storing + the HTML log files (in subdirectories tagged with node name, + date and time).</item> + <item>The variables in the specified test system config files will be + imported for the test.</item> + <item>The first test to run includes all suites for system t1. Excluded from + the test are however the t1B and t1D suites. Also test cases test3 and + test4 in t1A as well as the test1 case in t1C are excluded from + the test.</item> + <item>Secondly, the test for system t2 should run. The included suites are + t2B and t2C. Included are also test cases test4, test1 and test7 in suite + t2A. Note that the test cases will be executed in the specified order.</item> + <item>Lastly, all suites for systems t3 are to be completely skipped and this + should be explicitly noted in the log files.</item> + </list> + </section> - <p>The example specifies the following:</p> - <list> - <item>The specified logdir directory will be used for storing - the HTML log files (in subdirectories tagged with node name, - date and time).</item> - <item>The variables in the specified test system config files will be - imported for the test.</item> - <item>The first test to run includes all suites for system t1. Excluded from - the test are however the t1B and t1D suites. Also test cases test3 and - test4 in t1A as well as the test1 case in t1C are excluded from - the test.</item> - <item>Secondly, the test for system t2 should run. The included suites are - t2B and t2C. Included are also test cases test4, test1 and test7 in suite - t2A. Note that the test cases will be executed in the specified order.</item> - <item>Lastly, all suites for systems t3 are to be completely skipped and this - should be explicitly noted in the log files.</item> - </list> - <p>With the <c>init</c> term it's possible to specify initialization options - for nodes defined in the test specification. Currently, there are options - to start the node and/or to evaluate any function on the node. - See the <seealso marker="ct_master_chapter#ct_slave">Automatic startup of - the test target nodes</seealso> chapter for details.</p> - <p>It is possible for the user to provide a test specification that - includes (for Common Test) unrecognizable terms. If this is desired, - the <c>-allow_user_terms</c> flag should be used when starting tests with - <c>ct_run</c>. This forces Common Test to ignore unrecognizable terms. - Note that in this mode, Common Test is not able to check the specification - for errors as efficiently as if the scanner runs in default mode. - If <c><seealso marker="ct#run_test-1">ct:run_test/1</seealso></c> is used for starting the tests, the relaxed scanner - mode is enabled by means of the tuple: <c>{allow_user_terms,true}</c></p> + <section> + <title>The init term</title> + <p>With the <c>init</c> term it's possible to specify initialization options + for nodes defined in the test specification. Currently, there are options + to start the node and/or to evaluate any function on the node. + See the <seealso marker="ct_master_chapter#ct_slave">Automatic startup of + the test target nodes</seealso> chapter for details.</p> + </section> + <section> + <title>User specific terms</title> + <p>It is possible for the user to provide a test specification that + includes (for Common Test) unrecognizable terms. If this is desired, + the <c>-allow_user_terms</c> flag should be used when starting tests with + <c>ct_run</c>. This forces Common Test to ignore unrecognizable terms. + Note that in this mode, Common Test is not able to check the specification + for errors as efficiently as if the scanner runs in default mode. + If <c><seealso marker="ct#run_test-1">ct:run_test/1</seealso></c> is used + for starting the tests, the relaxed scanner + mode is enabled by means of the tuple: <c>{allow_user_terms,true}</c></p> + </section> </section> <section> diff --git a/lib/common_test/doc/src/write_test_chapter.xml b/lib/common_test/doc/src/write_test_chapter.xml index 248d7de8b6..cc8d913994 100644 --- a/lib/common_test/doc/src/write_test_chapter.xml +++ b/lib/common_test/doc/src/write_test_chapter.xml @@ -4,7 +4,7 @@ <chapter> <header> <copyright> - <year>2003</year><year>2012</year> + <year>2003</year><year>2013</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -982,38 +982,36 @@ <p>Example:</p> <pre> + Some printouts during test case execution: - Some printouts during test case execution: + io:format("1. Standard IO, importance = ~w~n", [?STD_IMPORTANCE]), + ct:log("2. Uncategorized, importance = ~w", [?STD_IMPORTANCE]), + ct:log(info, "3. Categorized info, importance = ~w", [?STD_IMPORTANCE]]), + ct:log(info, ?LOW_IMPORTANCE, "4. Categorized info, importance = ~w", [?LOW_IMPORTANCE]), + ct:log(error, "5. Categorized error, importance = ~w", [?HI_IMPORTANCE]), + ct:log(error, ?HI_IMPORTANCE, "6. Categorized error, importance = ~w", [?MAX_IMPORTANCE]), - io:format("1. Standard IO, importance = ~w~n", [?STD_IMPORTANCE]), - ct:log("2. Uncategorized, importance = ~w", [?STD_IMPORTANCE]), - ct:log(info, "3. Categorized info, importance = ~w", [?STD_IMPORTANCE]]), - ct:log(info, ?LOW_IMPORTANCE, "4. Categorized info, importance = ~w", [?LOW_IMPORTANCE]), - ct:log(error, "5. Categorized error, importance = ~w", [?HI_IMPORTANCE]), - ct:log(error, ?HI_IMPORTANCE, "6. Categorized error, importance = ~w", [?MAX_IMPORTANCE]), + If starting the test without specifying any verbosity levels: - If starting the test without specifying any verbosity levels: + $ ct_run ... - $ ct_run ... + the following gets printed: - the following gets printed: - - 1. Standard IO, importance = 50 - 2. Uncategorized, importance = 50 - 3. Categorized info, importance = 50 - 5. Categorized error, importance = 75 - 6. Categorized error, importance = 99 - - If starting the test with: - - $ ct_run -verbosity 1 and info 75 - - the following gets printed: + 1. Standard IO, importance = 50 + 2. Uncategorized, importance = 50 + 3. Categorized info, importance = 50 + 5. Categorized error, importance = 75 + 6. Categorized error, importance = 99 + + If starting the test with: + + $ ct_run -verbosity 1 and info 75 + + the following gets printed: - 3. Categorized info, importance = 50 - 4. Categorized info, importance = 25 - 6. Categorized error, importance = 99 - </pre> + 3. Categorized info, importance = 50 + 4. Categorized info, importance = 25 + 6. Categorized error, importance = 99</pre> <p>How categories can be mapped to CSS tags is documented in the <seealso marker="run_test_chapter#html_stylesheet">Running Tests</seealso> diff --git a/lib/common_test/priv/Makefile.in b/lib/common_test/priv/Makefile.in index 4372ab124e..5a9fabbe45 100644 --- a/lib/common_test/priv/Makefile.in +++ b/lib/common_test/priv/Makefile.in @@ -68,15 +68,15 @@ JS = jquery-latest.js jquery.tablesorter.min.js include ../../test_server/vsn.mk debug opt: - sed -e 's;@CT_VSN@;$(VSN);' \ + $(V_at)sed -e 's;@CT_VSN@;$(VSN);' \ -e 's;@TS_VSN@;$(TEST_SERVER_VSN);' \ ../install.sh.in > install.sh - chmod 775 install.sh + $(V_at)chmod 775 install.sh docs: clean: - rm -f $(SCRIPTS) + $(V_at)rm -f $(SCRIPTS) # ---------------------------------------------------- diff --git a/lib/common_test/src/Makefile b/lib/common_test/src/Makefile index dd2923ece9..4600c0ad78 100644 --- a/lib/common_test/src/Makefile +++ b/lib/common_test/src/Makefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 2003-2012. All Rights Reserved. +# Copyright Ericsson AB 2003-2013. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -97,7 +97,7 @@ DTD_FILES = \ # ---------------------------------------------------- ERL_COMPILE_FLAGS += -pa ../ebin -I../include -I $(ERL_TOP)/lib/snmp/include/ \ -I../../test_server/include -I../../xmerl/inc/ \ - -I $(ERL_TOP)/lib/kernel/include + -I $(ERL_TOP)/lib/kernel/include -Werror # ---------------------------------------------------- # Targets @@ -127,10 +127,10 @@ clean: # Special Build Targets # ---------------------------------------------------- $(APP_TARGET): $(APP_SRC) ../vsn.mk - sed -e 's;%VSN%;$(VSN);' $< > $@ + $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@ $(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk - sed -e 's;%VSN%;$(VSN);' $< > $@ + $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@ # ---------------------------------------------------- # Release Target diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl index ad9bf4e2d6..04a95a53fa 100644 --- a/lib/common_test/src/ct.erl +++ b/lib/common_test/src/ct.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2012. All Rights Reserved. +%% Copyright Ericsson AB 2003-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -144,11 +144,11 @@ run(TestDirs) -> %%% @spec run_test(Opts) -> Result %%% Opts = [OptTuples] %%% OptTuples = {dir,TestDirs} | {suite,Suites} | {group,Groups} | -%%% {testcase,Cases} | {spec,TestSpecs} | {label,Label} | -%%% {config,CfgFiles} | {userconfig, UserConfig} | +%%% {testcase,Cases} | {spec,TestSpecs} | {join_specs,Bool} | +%%% {label,Label} | {config,CfgFiles} | {userconfig, UserConfig} | %%% {allow_user_terms,Bool} | {logdir,LogDir} | %%% {silent_connections,Conns} | {stylesheet,CSSFile} | -%%% {cover,CoverSpecFile} | {step,StepOpts} | +%%% {cover,CoverSpecFile} | {cover_stop,Bool} | {step,StepOpts} | %%% {event_handler,EventHandlers} | {include,InclDirs} | %%% {auto_compile,Bool} | {create_priv_dir,CreatePrivDir} | %%% {multiply_timetraps,M} | {scale_timetraps,Bool} | @@ -735,7 +735,7 @@ fail(Format, Args) -> %%% overwrites the string set by this function.</p> comment(Comment) when is_list(Comment) -> Formatted = - case (catch io_lib:format("~s",[Comment])) of + case (catch io_lib:format("~ts",[Comment])) of {'EXIT',_} -> % it's a list not a string io_lib:format("~p",[Comment]); String -> @@ -988,8 +988,9 @@ get_testdata(Key) -> end. %%%----------------------------------------------------------------- -%%% @spec abort_current_testcase(Reason) -> ok | {error,no_testcase_running} +%%% @spec abort_current_testcase(Reason) -> ok | {error,ErrorReason} %%% Reason = term() +%%% ErrorReason = no_testcase_running | parallel_group %%% %%% @doc <p>When calling this function, the currently executing test case will be aborted. %%% It is the user's responsibility to know for sure which test case is currently diff --git a/lib/common_test/src/ct_config.erl b/lib/common_test/src/ct_config.erl index b1d709bc75..c35cbd3c08 100644 --- a/lib/common_test/src/ct_config.erl +++ b/lib/common_test/src/ct_config.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2012. All Rights Reserved. +%% Copyright Ericsson AB 2010-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -266,7 +266,10 @@ read_config_files_int([{Callback, File}|Files], FunToSave) -> read_config_files_int([], _FunToSave) -> ok. -store_config(Config, Callback, File) -> +store_config(Config, Callback, File) when is_tuple(Config) -> + store_config([Config], Callback, File); + +store_config(Config, Callback, File) when is_list(Config) -> [ets:insert(?attr_table, #ct_conf{key=Key, value=Val, @@ -607,7 +610,7 @@ encrypt_config_file(SrcFileName, EncryptFileName, {key,Key}) -> EncBin = crypto:des3_cbc_encrypt(K1, K2, K3, IVec, Bin2), case file:write_file(EncryptFileName, EncBin) of ok -> - io:format("~s --(encrypt)--> ~s~n", + io:format("~ts --(encrypt)--> ~ts~n", [SrcFileName,EncryptFileName]), ok; {error,Reason} -> @@ -649,7 +652,7 @@ decrypt_config_file(EncryptFileName, TargetFileName, {key,Key}) -> _ -> case file:write_file(TargetFileName, SrcBin) of ok -> - io:format("~s --(decrypt)--> ~s~n", + io:format("~ts --(decrypt)--> ~ts~n", [EncryptFileName,TargetFileName]), ok; {error,Reason} -> @@ -700,7 +703,7 @@ get_crypt_key_from_file() -> _ -> case catch string:tokens(binary_to_list(Result), [$\n,$\r]) of [Key] -> - io:format("~nCrypt key file: ~s~n", [FullName]), + io:format("~nCrypt key file: ~ts~n", [FullName]), Key; _ -> {error,{bad_crypt_file,FullName}} diff --git a/lib/common_test/src/ct_config_plain.erl b/lib/common_test/src/ct_config_plain.erl index 237df5c8f3..c6547f0a40 100644 --- a/lib/common_test/src/ct_config_plain.erl +++ b/lib/common_test/src/ct_config_plain.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2011. All Rights Reserved. +%% Copyright Ericsson AB 2010-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -47,7 +47,7 @@ read_config(ConfigFile) -> {error,no_crypt_file} -> {error,{config_file_error, lists:flatten( - io_lib:format("~s",[file:format_error(Reason)]))}}; + io_lib:format("~ts",[file:format_error(Reason)]))}}; {error,CryptError} -> {error,{decrypt_file_error,CryptError}}; _ when is_list(Key) -> diff --git a/lib/common_test/src/ct_conn_log_h.erl b/lib/common_test/src/ct_conn_log_h.erl index d7bd18606b..ac08a3e0ad 100644 --- a/lib/common_test/src/ct_conn_log_h.erl +++ b/lib/common_test/src/ct_conn_log_h.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -52,7 +52,7 @@ open_files([],State) -> do_open_files([{Tag,File}|Logs],Acc) -> - case file:open(File, [write]) of + case file:open(File, [write,{encoding,utf8}]) of {ok,Fd} -> do_open_files(Logs,[{Tag,Fd}|Acc]); {error,Reason} -> @@ -98,9 +98,9 @@ terminate(_,#state{logs=Logs}) -> %%% Writing reports write_report(Time,#conn_log{module=ConnMod}=Info,Data,State) -> {LogType,Fd} = get_log(Info,State), - io:format(Fd,"~n~s~s~s",[format_head(ConnMod,LogType,Time), - format_title(LogType,Info), - format_data(ConnMod,LogType,Data)]). + io:format(Fd,"~n~ts~ts~ts",[format_head(ConnMod,LogType,Time), + format_title(LogType,Info), + format_data(ConnMod,LogType,Data)]). write_error(Time,#conn_log{module=ConnMod}=Info,Report,State) -> case get_log(Info,State) of @@ -109,9 +109,10 @@ write_error(Time,#conn_log{module=ConnMod}=Info,Report,State) -> %% sasl error handler, so don't write it again. ok; {LogType,Fd} -> - io:format(Fd,"~n~s~s~s",[format_head(ConnMod,LogType,Time," ERROR"), - format_title(LogType,Info), - format_error(LogType,Report)]) + io:format(Fd,"~n~ts~ts~ts", + [format_head(ConnMod,LogType,Time," ERROR"), + format_title(LogType,Info), + format_error(LogType,Report)]) end. get_log(Info,State) -> @@ -140,16 +141,16 @@ format_head(ConnMod,LogType,Time) -> format_head(ConnMod,LogType,Time,""). format_head(ConnMod,raw,Time,Text) -> - io_lib:format("~n~p, ~p~s, ",[now_to_time(Time),ConnMod,Text]); + io_lib:format("~n~w, ~w~ts, ",[now_to_time(Time),ConnMod,Text]); format_head(ConnMod,_,Time,Text) -> Head = pad_char_end(?WIDTH,pretty_head(now_to_time(Time),ConnMod,Text),$=), - io_lib:format("~n~s",[Head]). + io_lib:format("~n~ts",[Head]). format_title(raw,#conn_log{client=Client}=Info) -> - io_lib:format("Client ~p ~s ~s",[Client,actionstr(Info),serverstr(Info)]); + io_lib:format("Client ~w ~s ~ts",[Client,actionstr(Info),serverstr(Info)]); format_title(_,Info) -> Title = pad_char_end(?WIDTH,pretty_title(Info),$=), - io_lib:format("~n~s", [Title]). + io_lib:format("~n~ts", [Title]). format_data(_,_,NoData) when NoData == ""; NoData == <<>> -> ""; @@ -162,8 +163,6 @@ format_error(pretty,Report) -> [io_lib:format("~n ~p: ~p",[K,V]) || {K,V} <- Report]. - - %%%----------------------------------------------------------------- %%% Helpers conn_info(LoggingProc, #conn_log{client=undefined} = ConnInfo) -> @@ -187,12 +186,12 @@ now_to_time({_,_,MicroS}=Now) -> pretty_head({{{Y,Mo,D},{H,Mi,S}},MicroS},ConnMod,Text0) -> Text = string:to_upper(atom_to_list(ConnMod) ++ Text0), - io_lib:format("= ~s ==== ~s-~s-~p::~s:~s:~s,~s ", + io_lib:format("= ~s ==== ~s-~s-~w::~s:~s:~s,~s ", [Text,t(D),month(Mo),Y,t(H),t(Mi),t(S), micro2milli(MicroS)]). pretty_title(#conn_log{client=Client}=Info) -> - io_lib:format("= Client ~p ~s Server ~s ", + io_lib:format("= Client ~w ~s Server ~ts ", [Client,actionstr(Info),serverstr(Info)]). actionstr(#conn_log{action=send}) -> "----->"; @@ -204,7 +203,7 @@ actionstr(_) -> "<---->". serverstr(#conn_log{name=undefined,address=Address}) -> io_lib:format("~p",[Address]); serverstr(#conn_log{name=Alias,address=Address}) -> - io_lib:format("~p(~p)",[Alias,Address]). + io_lib:format("~w(~p)",[Alias,Address]). month(1) -> "Jan"; month(2) -> "Feb"; diff --git a/lib/common_test/src/ct_cover.erl b/lib/common_test/src/ct_cover.erl index d39f50ba00..ae671c750a 100644 --- a/lib/common_test/src/ct_cover.erl +++ b/lib/common_test/src/ct_cover.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2009. All Rights Reserved. +%% Copyright Ericsson AB 2006-2012. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -24,7 +24,7 @@ -module(ct_cover). --export([get_spec/1, add_nodes/1, remove_nodes/1]). +-export([get_spec/1, add_nodes/1, remove_nodes/1, cross_cover_analyse/2]). -include("ct_util.hrl"). @@ -100,6 +100,22 @@ remove_nodes(Nodes) -> %%%----------------------------------------------------------------- +%%% @spec cross_cover_analyse(Level,Tests) -> ok +%%% Level = overview | details +%%% Tests = [{Tag,Dir}] +%%% Tag = atom() +%%% Dir = string() +%%% +%%% @doc Accumulate cover results over multiple tests. +%%% See the chapter about <seealso +%%% marker="cover_chapter#cross_cover">cross cover +%%% analysis</seealso> in the users's guide. +%%% +cross_cover_analyse(Level,Tests) -> + test_server_ctrl:cross_cover_analyse(Level,Tests). + + +%%%----------------------------------------------------------------- %%% @hidden %% Read cover specification file and return the parsed info. @@ -249,9 +265,11 @@ get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms]) -> Mods = App#cover.excl_mods, get_app_info(App#cover{excl_mods=Mods++Mods1},Terms); -get_app_info(App=#cover{app=Name}, [{cross_apps,Name,AppMods1}|Terms]) -> - AppMods = App#cover.cross, - get_app_info(App#cover{cross=AppMods++AppMods1},Terms); +get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms]) -> + get_app_info(App, [{cross,none,Cross}|Terms]); +get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms]) -> + Cross = App#cover.cross, + get_app_info(App#cover{cross=Cross++Cross1},Terms); get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms]) -> get_app_info(App, [{src_dirs,none,Dirs}|Terms]); @@ -354,10 +372,10 @@ remove_excludes_and_dups(CoverData=#cover{excl_mods=Excl,incl_mods=Incl}) -> files2mods(Info=#cover{excl_mods=ExclFs, incl_mods=InclFs, - cross=CrossFs}) -> + cross=Cross}) -> Info#cover{excl_mods=files2mods1(ExclFs), incl_mods=files2mods1(InclFs), - cross=files2mods1(CrossFs)}. + cross=[{Tag,files2mods1(Fs)} || {Tag,Fs} <- Cross]}. files2mods1([M|Fs]) when is_atom(M) -> [M|files2mods1(Fs)]; diff --git a/lib/common_test/src/ct_event.erl b/lib/common_test/src/ct_event.erl index 49e0635d79..c1c1d943b9 100644 --- a/lib/common_test/src/ct_event.erl +++ b/lib/common_test/src/ct_event.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2012. All Rights Reserved. +%% Copyright Ericsson AB 2006-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -150,7 +150,7 @@ init(RecvPids) -> %%-------------------------------------------------------------------- handle_event(Event,State=#state{receivers=RecvPids}) -> print("~n=== ~w ===~n", [?MODULE]), - print("~p: ~p~n", [Event#event.name,Event#event.data]), + print("~w: ~w~n", [Event#event.name,Event#event.data]), lists:foreach(fun(Recv) -> report_event(Recv,Event) end, RecvPids), {ok,State}. diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl index 403eab66cb..5fe4eaf511 100644 --- a/lib/common_test/src/ct_framework.erl +++ b/lib/common_test/src/ct_framework.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2012. All Rights Reserved. +%% Copyright Ericsson AB 2004-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -750,12 +750,12 @@ error_notification(Mod,Func,_Args,{Error,Loc}) -> Descr1 = lists:flatten(io_lib:format("~P",[Descr,10])), if length(Descr1) > 50 -> Descr2 = string:substr(Descr1,1,50), - io_lib:format("{badmatch,~s...}",[Descr2]); + io_lib:format("{badmatch,~ts...}",[Descr2]); true -> - io_lib:format("{badmatch,~s}",[Descr1]) + io_lib:format("{badmatch,~ts}",[Descr1]) end; {test_case_failed,Reason} -> - case (catch io_lib:format("{test_case_failed,~s}", [Reason])) of + case (catch io_lib:format("{test_case_failed,~ts}", [Reason])) of {'EXIT',_} -> io_lib:format("{test_case_failed,~p}", [Reason]); Result -> Result @@ -788,7 +788,7 @@ error_notification(Mod,Func,_Args,{Error,Loc}) -> "<font color=\"green\">" ++ "(" ++ "</font>" ++ Comment ++ "<font color=\"green\">" ++ ")" ++ "</font>", - Str = io_lib:format("~s ~s", [ErrorHtml,CommentHtml]), + Str = io_lib:format("~ts ~ts", [ErrorHtml,CommentHtml]), test_server:comment(Str) end end, @@ -803,24 +803,24 @@ error_notification(Mod,Func,_Args,{Error,Loc}) -> end, case Loc of [{?MODULE,error_in_suite}] -> - PrintErr("Error in suite detected: ~s", [ErrStr]); + PrintErr("Error in suite detected: ~ts", [ErrStr]); R when R == unknown; R == undefined -> - PrintErr("Error detected: ~s", [ErrStr]); + PrintErr("Error detected: ~ts", [ErrStr]); %% if a function specified by all/0 does not exist, we %% pick up undef here [{LastMod,LastFunc}|_] when ErrStr == "undef" -> - PrintErr("~w:~w could not be executed~nReason: ~s", + PrintErr("~w:~w could not be executed~nReason: ~ts", [LastMod,LastFunc,ErrStr]); [{LastMod,LastFunc}|_] -> - PrintErr("~w:~w failed~nReason: ~s", [LastMod,LastFunc,ErrStr]); + PrintErr("~w:~w failed~nReason: ~ts", [LastMod,LastFunc,ErrStr]); [{LastMod,LastFunc,LastLine}|_] -> %% print error to console, we are only %% interested in the last executed expression - PrintErr("~w:~w failed on line ~w~nReason: ~s", + PrintErr("~w:~w failed on line ~w~nReason: ~ts", [LastMod,LastFunc,LastLine,ErrStr]), case ct_util:read_suite_data({seq,Mod,Func}) of @@ -1184,13 +1184,19 @@ report(What,Data) -> ok; {error,Reason} -> ct_logs:log("COVER INFO", - "Importing cover data from: ~s fails! " + "Importing cover data from: ~ts fails! " "Reason: ~p", [Imp,Reason]) end end, Imps) end; tests_done -> ok; + severe_error -> + ct_event:sync_notify(#event{name=What, + node=node(), + data=Data}), + ct_util:set_testdata({What,Data}), + ok; tc_start -> %% Data = {{Suite,Func},LogFileName} ct_event:sync_notify(#event{name=tc_logfile, @@ -1343,4 +1349,7 @@ format_comment(Comment) -> %%%----------------------------------------------------------------- %%% @spec get_html_wrapper(TestName, PrintLabel, Cwd) -> Header get_html_wrapper(TestName, PrintLabel, Cwd, TableCols) -> - ct_logs:get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols). + get_html_wrapper(TestName, PrintLabel, Cwd, TableCols, utf8). + +get_html_wrapper(TestName, PrintLabel, Cwd, TableCols, Encoding) -> + ct_logs:get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols, Encoding). diff --git a/lib/common_test/src/ct_gen_conn.erl b/lib/common_test/src/ct_gen_conn.erl index 1f01d84601..2d4b1d1f52 100644 --- a/lib/common_test/src/ct_gen_conn.erl +++ b/lib/common_test/src/ct_gen_conn.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2012. All Rights Reserved. +%% Copyright Ericsson AB 2003-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -272,7 +272,7 @@ call(Pid, Msg, Timeout) -> after Timeout -> erlang:demonitor(MRef, [flush]), log("ct_gen_conn", - "Connection process ~p not responding. Killing now!", + "Connection process ~w not responding. Killing now!", [Pid]), exit(Pid, kill), {error,{process_down,Pid,forced_termination}} diff --git a/lib/common_test/src/ct_groups.erl b/lib/common_test/src/ct_groups.erl index 74ab5e5439..14a8aab881 100644 --- a/lib/common_test/src/ct_groups.erl +++ b/lib/common_test/src/ct_groups.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2012. All Rights Reserved. +%% Copyright Ericsson AB 2004-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -462,7 +462,7 @@ make_conf(Mod, Name, Props, TestSpec) -> false -> ct_logs:log("TEST INFO", "init_per_group/2 and " "end_per_group/2 missing for group " - "~p in ~p, using default.", + "~w in ~w, using default.", [Name,Mod]), {{ct_framework,init_per_group}, {ct_framework,end_per_group}, diff --git a/lib/common_test/src/ct_hooks.erl b/lib/common_test/src/ct_hooks.erl index 1bcc63738e..3d87a82e24 100644 --- a/lib/common_test/src/ct_hooks.erl +++ b/lib/common_test/src/ct_hooks.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2012. All Rights Reserved. +%% Copyright Ericsson AB 2004-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -362,11 +362,11 @@ catch_apply(M,F,A, Default) -> [{M,F,A,_}|_] when Reason == undef -> Default; Trace -> - ct_logs:log("Suite Hook","Call to CTH failed: ~p:~p", + ct_logs:log("Suite Hook","Call to CTH failed: ~w:~p", [error,{Reason,Trace}]), throw({error_in_cth_call, lists:flatten( - io_lib:format("~p:~p/~p CTH call failed", + io_lib:format("~w:~w/~w CTH call failed", [M,F,length(A)]))}) end end. diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl index 0b7a8bb075..5924930072 100644 --- a/lib/common_test/src/ct_logs.erl +++ b/lib/common_test/src/ct_logs.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2012. All Rights Reserved. +%% Copyright Ericsson AB 2003-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -35,9 +35,10 @@ -export([add_external_logs/1, add_link/3]). -export([make_last_run_index/0]). -export([make_all_suites_index/1,make_all_runs_index/1]). --export([get_ts_html_wrapper/4]). +-export([get_ts_html_wrapper/5]). -export([xhtml/2, locate_priv_file/1, make_relative/1]). -export([insert_javascript/1]). +-export([uri/1]). %% Logging stuff directly from testcase -export([tc_log/3, tc_log/4, tc_log_async/3, tc_print/3, tc_print/4, @@ -307,8 +308,8 @@ end_log() -> %%% calling test suite.</p> add_external_logs(Logs) -> start_log("External Logs"), - [cont_log("<a href=~p>~s</a>\n", - [filename:join("log_private",Log),Log]) || Log <- Logs], + [cont_log("<a href=\"~ts\">~ts</a>\n", + [uri(filename:join("log_private",Log)),Log]) || Log <- Logs], end_log(). %%%----------------------------------------------------------------- @@ -320,8 +321,8 @@ add_external_logs(Logs) -> %%% @doc Print a link to a given file stored in the priv_dir of the %%% calling test suite. add_link(Heading,File,Type) -> - log(Heading,"<a href=~p type=~p>~s</a>\n", - [filename:join("log_private",File),Type,File]). + log(Heading,"<a href=\"~ts\" type=~p>~ts</a>\n", + [uri(filename:join("log_private",File)),Type,File]). %%%----------------------------------------------------------------- @@ -469,7 +470,7 @@ ct_log(Category,Format,Args) -> %%%================================================================= %%% Internal functions int_header() -> - "<div class=\"ct_internal\"><b>*** CT ~s *** ~s</b>". + "<div class=\"ct_internal\"><b>*** CT ~s *** ~ts</b>". int_footer() -> "</div>". @@ -692,7 +693,7 @@ logger_loop(State) -> logger_loop(State); {set_stylesheet,TC,SSFile} -> Fd = State#logger_state.ct_log_fd, - io:format(Fd, "~p loading external style sheet: ~s~n", + io:format(Fd, "~p loading external style sheet: ~ts~n", [TC,SSFile]), logger_loop(State#logger_state{stylesheet = SSFile}); {clear_stylesheet,_} when State#logger_state.stylesheet == undefined -> @@ -752,7 +753,7 @@ print_to_log(sync, FromPid, TCGL, List, State) -> IoProc = if FromPid /= TCGL -> TCGL; true -> State#logger_state.ct_log_fd end, - io:format(IoProc, "~s", [lists:foldl(IoFun, [], List)]), + io:format(IoProc, "~ts", [lists:foldl(IoFun, [], List)]), State; print_to_log(async, FromPid, TCGL, List, State) -> @@ -764,7 +765,7 @@ print_to_log(async, FromPid, TCGL, List, State) -> end, Printer = fun() -> test_server:permit_io(IoProc, self()), - io:format(IoProc, "~s", [lists:foldl(IoFun, [], List)]) + io:format(IoProc, "~ts", [lists:foldl(IoFun, [], List)]) end, case State#logger_state.async_print_jobs of [] -> @@ -868,7 +869,7 @@ set_evmgr_gl(GL) -> end. open_ctlog() -> - {ok,Fd} = file:open(?ct_log_name,[write]), + {ok,Fd} = file:open(?ct_log_name,[write,{encoding,utf8}]), io:format(Fd, header("Common Test Framework Log", {[],[1,2],[]}), []), case file:consult(ct_run:variables_file_name("../")) of {ok,Vars} -> @@ -878,7 +879,7 @@ open_ctlog() -> Dir = filename:dirname(Cwd), Variables = ct_run:variables_file_name(Dir), io:format(Fd, - "Can not read the file \'~s\' Reason: ~w\n" + "Can not read the file \'~ts\' Reason: ~w\n" "No configuration found for test!!\n", [Variables,Reason]) end, @@ -904,7 +905,7 @@ print_style(Fd,undefined) -> print_style(Fd,StyleSheet) -> case file:read_file(StyleSheet) of {ok,Bin} -> - Str = binary_to_list(Bin), + Str = b2s(Bin,encoding(StyleSheet)), Pos0 = case string:str(Str,"<style>") of 0 -> string:str(Str,"<STYLE>"); N0 -> N0 @@ -919,9 +920,9 @@ print_style(Fd,StyleSheet) -> print_style_error(Fd,StyleSheet,missing_style_end_tag); Pos0 /= 0 -> Style = string:sub_string(Str,Pos0,Pos1+7), - io:format(Fd,"~s\n",[Style]); + io:format(Fd,"~ts\n",[Style]); Pos0 == 0 -> - io:format(Fd,"<style>~s</style>\n",[Str]) + io:format(Fd,"<style>~ts</style>\n",[Str]) end; {error,Reason} -> print_style_error(Fd,StyleSheet,Reason) @@ -934,7 +935,7 @@ print_style(Fd,StyleSheet) -> %% [StyleSheet]). print_style_error(Fd,StyleSheet,Reason) -> - io:format(Fd,"\n<!-- Failed to load stylesheet ~s: ~p -->\n", + io:format(Fd,"\n<!-- Failed to load stylesheet ~ts: ~p -->\n", [StyleSheet,Reason]), print_style(Fd,undefined). @@ -963,7 +964,7 @@ make_last_run_index(StartTime) -> % io:put_chars("done\n"), ok; Err -> - io:format("Unknown internal error while updating ~s. " + io:format("Unknown internal error while updating ~ts. " "Please report.\n(Err: ~p, ID: 1)", [AbsIndexName,Err]), {error, Err} @@ -1001,7 +1002,7 @@ make_last_run_index1(StartTime,IndexName) -> %% write current Totals to file, later to be used in all_runs log write_totals_file(?totals_name,Label,Logs1,Totals), Index = [Index0|index_footer()], - case force_write_file(IndexName, Index) of + case force_write_file(IndexName, unicode:characters_to_binary(Index)) of ok -> ok; {error, Reason} -> @@ -1085,7 +1086,7 @@ make_one_index_entry1(SuiteName, Link, Label, Success, Fail, UserSkip, AutoSkip, CrashDumpName = SuiteName ++ "_erl_crash.dump", case filelib:is_file(CrashDumpName) of true -> - [" <a href=\"", CrashDumpName, + [" <a href=\"", uri(CrashDumpName), "\">(CrashDump)</a>"]; false -> "" @@ -1115,10 +1116,10 @@ make_one_index_entry1(SuiteName, Link, Label, Success, Fail, UserSkip, AutoSkip, [] -> "none"; _ -> "<a href=\""++?all_runs_name++"\">Old Runs</a>" end, - A = xhtml(["<td><font size=\"-1\"><a href=\"",CtLogFile, + A = xhtml(["<td><font size=\"-1\"><a href=\"",uri(CtLogFile), "\">CT Log</a></font></td>\n", "<td><font size=\"-1\">",OldRunsLink,"</font></td>\n"], - ["<td><a href=\"",CtLogFile,"\">CT Log</a></td>\n", + ["<td><a href=\"",uri(CtLogFile),"\">CT Log</a></td>\n", "<td>",OldRunsLink,"</td>\n"]), {L,T,N,A}; false -> @@ -1128,7 +1129,8 @@ make_one_index_entry1(SuiteName, Link, Label, Success, Fail, UserSkip, AutoSkip, if NotBuilt == 0 -> ["<td align=right>",integer_to_list(NotBuilt),"</td>\n"]; true -> - ["<td align=right><a href=\"",filename:join(CtRunDir,?ct_log_name),"\">", + ["<td align=right><a href=\"", + uri(filename:join(CtRunDir,?ct_log_name)),"\">", integer_to_list(NotBuilt),"</a></td>\n"] end, FailStr = @@ -1151,7 +1153,7 @@ make_one_index_entry1(SuiteName, Link, Label, Success, Fail, UserSkip, AutoSkip, [xhtml("<tr valign=top>\n", ["<tr class=\"",odd_or_even(),"\">\n"]), xhtml("<td><font size=\"-1\"><a href=\"", "<td><a href=\""), - LogFile,"\">",SuiteName,"</a>", CrashDumpLink, + uri(LogFile),"\">",SuiteName,"</a>", CrashDumpLink, xhtml("</font></td>\n", "</td>\n"), Lbl, Timestamp, "<td align=right>",integer_to_list(Success),"</td>\n", @@ -1364,8 +1366,9 @@ header1(Title, SubTitle, TableCols) -> "<head>\n", "<title>" ++ Title ++ " " ++ SubTitle ++ "</title>\n", "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n", + "<meta http-equiv=\"content-type\" content=\"text/html; charset=utf-8\">\n", xhtml("", - ["<link rel=\"stylesheet\" href=\"",CSSFile,"\" type=\"text/css\">\n"]), + ["<link rel=\"stylesheet\" href=\"",uri(CSSFile),"\" type=\"text/css\">\n"]), xhtml("", ["<script type=\"text/javascript\" src=\"",JQueryFile, "\"></script>\n"]), @@ -1462,7 +1465,7 @@ count_cases(Dir) -> LogFile = filename:join(Dir, ?suitelog_name), case file:read_file(LogFile) of {ok, Bin} -> - case count_cases1(binary_to_list(Bin), + case count_cases1(b2s(Bin), {undefined,undefined,undefined,undefined}) of {error,not_complete} -> %% The test is not complete - dont write summary @@ -1557,7 +1560,7 @@ config_table1([{Key,Value}|Vars]) -> "<td><pre>",io_lib:format("~p",[Value]),"</pre></td></tr>\n"], ["<tr class=\"", odd_or_even(), "\">\n", "<td>", atom_to_list(Key), "</td>\n", - "<td>", io_lib:format("~p",[Value]), "</td>\n</tr>\n"]) | + "<td>", io_lib:format("~p",[Value]), "</td>\n</tr>\n"]) | config_table1(Vars)]; config_table1([]) -> ["</tbody>\n</table>\n"]. @@ -1574,8 +1577,9 @@ make_all_runs_index(When) -> DirsSorted = (catch sort_all_runs(Dirs)), Header = all_runs_header(), Index = [runentry(Dir) || Dir <- DirsSorted], - Result = file:write_file(AbsName,Header++Index++ - all_runs_index_footer()), + Result = file:write_file(AbsName, + unicode:characters_to_binary( + Header++Index++all_runs_index_footer())), if When == start -> ok; true -> io:put_chars("done\n") end, @@ -1602,10 +1606,27 @@ sort_all_runs(Dirs) -> interactive_link() -> [Dir|_] = lists:reverse(filelib:wildcard(logdir_prefix()++"*.*")), CtLog = filename:join(Dir,"ctlog.html"), - Body = ["Log from last interactive run: <a href=\"",CtLog,"\">", - timestamp(Dir),"</a>"], - file:write_file("last_interactive.html",Body), - io:format("~n~nUpdated ~s\n" + Body = + [xhtml( + ["<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n", + "<html>\n"], + ["<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"\n", + "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n", + "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">\n"]), + "<!-- autogenerated by '"++atom_to_list(?MODULE)++"' -->\n", + "<head>\n", + "<title>Last interactive run</title>\n", + "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n", + "<meta http-equiv=\"content-type\" content=\"text/html; charset=utf-8\">\n", + "</head>\n", + "<body>\n", + "Log from last interactive run: <a href=\"",uri(CtLog),"\">", + timestamp(Dir),"</a>", + "</body>\n", + "</html>\n" + ], + file:write_file("last_interactive.html",unicode:characters_to_binary(Body)), + io:format("~n~nUpdated ~ts\n" "Any CT activities will be logged here\n", [?abs("last_interactive.html")]). @@ -1655,7 +1676,7 @@ runentry(Dir) -> TestNames; true -> Trunc = Polish(string:substr(TestNames,1,?testname_width-3)), - lists:flatten(io_lib:format("~s...",[Trunc])) + lists:flatten(io_lib:format("~ts...",[Trunc])) end, Total = TotSucc+TotFail+AllSkip, A = xhtml(["<td align=center><font size=\"-1\">",Node, @@ -1695,7 +1716,7 @@ runentry(Dir) -> "<td align=right>?</td>\n"], A++B++C end, - Index = filename:join(Dir,?index_name), + Index = uri(filename:join(Dir,?index_name)), [xhtml("<tr>\n", ["<tr class=\"",odd_or_even(),"\">\n"]), xhtml(["<td><font size=\"-1\"><a href=\"",Index,"\">",timestamp(Dir),"</a>", TotalsStr,"</font></td>\n"], @@ -1836,7 +1857,7 @@ make_all_suites_index(NewTestData = {_TestName,DirName}) -> ok -> ok; Err -> - io:format("Unknown internal error while updating ~s. " + io:format("Unknown internal error while updating ~ts. " "Please report.\n(Err: ~p, ID: 1)", [AbsIndexName,Err]), {error, Err} @@ -1900,7 +1921,7 @@ make_all_suites_index1(When, AbsIndexName, AllLogDirs) -> ok end; Err -> - io:format("Unknown internal error while updating ~s. " + io:format("Unknown internal error while updating ~ts. " "Please report.\n(Err: ~p, ID: 1)", [AbsIndexName,Err]), {error, Err} @@ -1912,7 +1933,7 @@ make_all_suites_index2(IndexName, AllTestLogDirs) -> all_suites_index_header(), 0, 0, 0, 0, 0, [], []), Index = [Index0|index_footer()], - case force_write_file(IndexName, Index) of + case force_write_file(IndexName, unicode:characters_to_binary(Index)) of ok -> {ok,CacheData}; {error, Reason} -> @@ -1970,7 +1991,7 @@ make_all_suites_ix_cached(AbsIndexName, NewTestData, Label, AllTestLogDirs) -> all_suites_index_header(IndexDir), 0, 0, 0, 0, 0), Index = [Index0|index_footer()], - case force_write_file(AbsIndexName, Index) of + case force_write_file(AbsIndexName, unicode:characters_to_binary(Index)) of ok -> ok; {error, Reason} -> @@ -2116,7 +2137,7 @@ simulate_logger_loop() -> receive {log,_,_,_,_,_,List} -> S = [[io_lib:format(Str,Args),io_lib:nl()] || {Str,Args} <- List], - io:format("~s",[S]), + io:format("~ts",[S]), simulate_logger_loop(); stop -> ok @@ -2273,11 +2294,12 @@ make_relative1(DirTs, CwdTs) -> Ups ++ DirTs. %%%----------------------------------------------------------------- -%%% @spec get_ts_html_wrapper(TestName, PrintLabel, Cwd) -> {Mode,Header,Footer} +%%% @spec get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols, Encoding) +%%% -> {Mode,Header,Footer} %%% %%% @doc %%% -get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols) -> +get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols, Encoding) -> TestName1 = if is_list(TestName) -> lists:flatten(TestName); true -> @@ -2319,14 +2341,16 @@ get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols) -> "<html>\n", "<head><title>", TestName1, "</title>\n", "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n", + "<meta http-equiv=\"content-type\" content=\"text/html; charset=", + html_encoding(Encoding),"\">\n", "</head>\n", "<body", Bgr, " bgcolor=\"white\" text=\"black\" ", "link=\"blue\" vlink=\"purple\" alink=\"red\">\n", LabelStr, "\n"], ["<center>\n<br><hr><p>\n", - "<a href=\"", AllRuns, + "<a href=\"", uri(AllRuns), "\">Test run history\n</a> | ", - "<a href=\"", TestIndex, + "<a href=\"", uri(TestIndex), "\">Top level test index\n</a>\n</p>\n", Copyright,"</center>\n</body>\n</html>\n"]}; _ -> @@ -2363,14 +2387,15 @@ get_ts_html_wrapper(TestName, PrintLabel, Cwd, TableCols) -> "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">\n", "<head>\n<title>", TestName1, "</title>\n", "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n", - "<link rel=\"stylesheet\" href=\"", CSSFile, "\" type=\"text/css\">\n", + "<meta http-equiv=\"content-type\" content=\"text/html; charset=utf-8\">\n", + "<link rel=\"stylesheet\" href=\"", uri(CSSFile), "\" type=\"text/css\">\n", "<script type=\"text/javascript\" src=\"", JQueryFile, "\"></script>\n", "<script type=\"text/javascript\" src=\"", TableSorterFile, "\"></script>\n"] ++ TableSorterScript ++ ["</head>\n","<body>\n", LabelStr, "\n"], ["<center>\n<br /><hr /><p>\n", - "<a href=\"", AllRuns, + "<a href=\"", uri(AllRuns), "\">Test run history\n</a> | ", - "<a href=\"", TestIndex, + "<a href=\"", uri(TestIndex), "\">Top level test index\n</a>\n</p>\n", Copyright,"</center>\n</body>\n</html>\n"]} end. @@ -2460,3 +2485,31 @@ insert_javascript({tablesorter,TableName, " $(\"#",TableName,"\").trigger(\"update\");\n", " $(\"#",TableName,"\").trigger(\"appendCache\");\n", "});\n</script>\n"]. + +uri("") -> + ""; +uri(Href) -> + test_server_ctrl:uri_encode(Href). + +%% Read magic comment to get encoding of text file. +%% If no magic comment exists, assume default encoding +encoding(File) -> + case epp:read_encoding(File) of + none -> + epp:default_encoding(); + E -> + E + end. + +%% Convert binary to string using default encoding +b2s(Bin) -> + b2s(Bin,epp:default_encoding()). + +%% Convert binary to string using given encoding +b2s(Bin,Encoding) -> + unicode:characters_to_list(Bin,Encoding). + +html_encoding(latin1) -> + "iso-8859-1"; +html_encoding(utf8) -> + "utf-8". diff --git a/lib/common_test/src/ct_make.erl b/lib/common_test/src/ct_make.erl index 8ddb91d355..d4bd81e78d 100644 --- a/lib/common_test/src/ct_make.erl +++ b/lib/common_test/src/ct_make.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2009-2011. All Rights Reserved. +%% Copyright Ericsson AB 2009-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -280,13 +280,13 @@ recompile(File, NoExec, Load, Opts) -> do_recompile(_File, true, _Load, _Opts) -> out_of_date; do_recompile(File, false, noload, Opts) -> - io:format("Recompile: ~s\n",[File]), + io:format("Recompile: ~ts\n",[File]), compile:file(File, [report_errors, report_warnings, error_summary |Opts]); do_recompile(File, false, load, Opts) -> - io:format("Recompile: ~s\n",[File]), + io:format("Recompile: ~ts\n",[File]), c:c(File, Opts); do_recompile(File, false, netload, Opts) -> - io:format("Recompile: ~s\n",[File]), + io:format("Recompile: ~ts\n",[File]), c:nc(File, Opts). exists(File) -> diff --git a/lib/common_test/src/ct_master.erl b/lib/common_test/src/ct_master.erl index 042c5ba267..e516f635d2 100644 --- a/lib/common_test/src/ct_master.erl +++ b/lib/common_test/src/ct_master.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2012. All Rights Reserved. +%% Copyright Ericsson AB 2006-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -51,7 +51,7 @@ %%% {testcase,Cases} | {spec,TestSpecs} | {allow_user_terms,Bool} | %%% {logdir,LogDir} | {event_handler,EventHandlers} | %%% {silent_connections,Conns} | {cover,CoverSpecFile} | -%%% {userconfig, UserCfgFiles} +%%% {cover_stop,Bool} | {userconfig, UserCfgFiles} %%% CfgFiles = string() | [string()] %%% TestDirs = string() | [string()] %%% Suites = atom() | [atom()] @@ -82,39 +82,48 @@ run_test(NodeOptsList) when is_list(NodeOptsList) -> %%% ExclNodes = [atom()] %%% %%% @doc Tests are spawned on the nodes as specified in <code>TestSpecs</code>. -%%% Each specification in TestSpec will be handled separately. It is however possible -%%% to also specify a list of specifications that should be merged into one before -%%% the tests are executed. Any test without a particular node specification will -%%% also be executed on the nodes in <code>InclNodes</code>. Nodes in the -%%% <code>ExclNodes</code> list will be excluded from the test. +%%% Each specification in TestSpec will be handled separately. It is however +%%% possible to also specify a list of specifications that should be merged +%%% into one before the tests are executed. Any test without a particular node +%%% specification will also be executed on the nodes in <code>InclNodes</code>. +%%% Nodes in the <code>ExclNodes</code> list will be excluded from the test. run([TS|TestSpecs],AllowUserTerms,InclNodes,ExclNodes) when is_list(TS), is_list(InclNodes), is_list(ExclNodes) -> - TS1 = - case TS of - List=[S|_] when is_list(S) -> List; - Spec -> [Spec] - end, - Result = - case catch ct_testspec:collect_tests_from_file(TS1,InclNodes,AllowUserTerms) of - {error,Reason} -> - {error,Reason}; - TSRec=#testspec{logdir=AllLogDirs, - config=StdCfgFiles, - userconfig=UserCfgFiles, - include=AllIncludes, - init=AllInitOpts, - event_handler=AllEvHs} -> - AllCfgFiles = {StdCfgFiles, UserCfgFiles}, - RunSkipPerNode = ct_testspec:prepare_tests(TSRec), - RunSkipPerNode2 = exclude_nodes(ExclNodes,RunSkipPerNode), - run_all(RunSkipPerNode2,AllLogDirs,AllCfgFiles,AllEvHs, - AllIncludes,[],[],AllInitOpts,TS1) - end, - [{TS,Result} | run(TestSpecs,AllowUserTerms,InclNodes,ExclNodes)]; + %% Note: [Spec] means run one test with Spec + %% [Spec1,Spec2] means run two tests separately + %% [[Spec1,Spec2]] means run one test, with the two specs merged + case catch ct_testspec:collect_tests_from_file([TS],InclNodes, + AllowUserTerms) of + {error,Reason} -> + [{error,Reason} | run(TestSpecs,AllowUserTerms,InclNodes,ExclNodes)]; + Tests -> + RunResult = + lists:map( + fun({Specs,TSRec=#testspec{logdir=AllLogDirs, + config=StdCfgFiles, + userconfig=UserCfgFiles, + include=AllIncludes, + init=AllInitOpts, + event_handler=AllEvHs}}) -> + AllCfgFiles = + {StdCfgFiles,UserCfgFiles}, + RunSkipPerNode = + ct_testspec:prepare_tests(TSRec), + RunSkipPerNode2 = + exclude_nodes(ExclNodes,RunSkipPerNode), + TSList = if is_integer(hd(TS)) -> [TS]; + true -> TS end, + {Specs,run_all(RunSkipPerNode2,AllLogDirs, + AllCfgFiles,AllEvHs, + AllIncludes,[],[],AllInitOpts,TSList)} + end, Tests), + RunResult ++ run(TestSpecs,AllowUserTerms,InclNodes,ExclNodes) + end; run([],_,_,_) -> []; -run(TS,AllowUserTerms,InclNodes,ExclNodes) when is_list(InclNodes), is_list(ExclNodes) -> +run(TS,AllowUserTerms,InclNodes,ExclNodes) when is_list(InclNodes), + is_list(ExclNodes) -> run([TS],AllowUserTerms,InclNodes,ExclNodes). %%%----------------------------------------------------------------- @@ -152,29 +161,32 @@ exclude_nodes([],RunSkipPerNode) -> %%% AllowUserTerms = bool() %%% Node = atom() %%% -%%% @doc Tests are spawned on <code>Node</code> according to <code>TestSpecs</code>. +%%% @doc Tests are spawned on <code>Node</code> according to +%%% <code>TestSpecs</code>. run_on_node([TS|TestSpecs],AllowUserTerms,Node) when is_list(TS),is_atom(Node) -> - TS1 = - case TS of - [List|_] when is_list(List) -> List; - Spec -> [Spec] - end, - Result = - case catch ct_testspec:collect_tests_from_file(TS1,[Node],AllowUserTerms) of - {error,Reason} -> - {error,Reason}; - TSRec=#testspec{logdir=AllLogDirs, - config=StdCfgFiles, - init=AllInitOpts, - include=AllIncludes, - userconfig=UserCfgFiles, - event_handler=AllEvHs} -> - AllCfgFiles = {StdCfgFiles, UserCfgFiles}, - {Run,Skip} = ct_testspec:prepare_tests(TSRec,Node), - run_all([{Node,Run,Skip}],AllLogDirs,AllCfgFiles,AllEvHs, - AllIncludes, [],[],AllInitOpts,TS1) - end, - [{TS,Result} | run_on_node(TestSpecs,AllowUserTerms,Node)]; + case catch ct_testspec:collect_tests_from_file([TS],[Node], + AllowUserTerms) of + {error,Reason} -> + [{error,Reason} | run_on_node(TestSpecs,AllowUserTerms,Node)]; + Tests -> + RunResult = + lists:map( + fun({Specs,TSRec=#testspec{logdir=AllLogDirs, + config=StdCfgFiles, + init=AllInitOpts, + include=AllIncludes, + userconfig=UserCfgFiles, + event_handler=AllEvHs}}) -> + AllCfgFiles = {StdCfgFiles,UserCfgFiles}, + {Run,Skip} = ct_testspec:prepare_tests(TSRec,Node), + TSList = if is_integer(hd(TS)) -> [TS]; + true -> TS end, + {Specs,run_all([{Node,Run,Skip}],AllLogDirs, + AllCfgFiles,AllEvHs, + AllIncludes, [],[],AllInitOpts,TSList)} + end, Tests), + RunResult ++ run_on_node(TestSpecs,AllowUserTerms,Node) + end; run_on_node([],_,_) -> []; run_on_node(TS,AllowUserTerms,Node) when is_atom(Node) -> @@ -244,8 +256,9 @@ run_all([],AllLogDirs,_,AllEvHs,_AllIncludes, {value,{_,Dir}} -> Dir; false -> "." end, - log(tty,"Master Logdir","~s",[MasterLogDir]), - start_master(lists:reverse(NodeOpts),Handlers,MasterLogDir,LogDirs,InitOptions,Specs), + log(tty,"Master Logdir","~ts",[MasterLogDir]), + start_master(lists:reverse(NodeOpts),Handlers,MasterLogDir, + LogDirs,InitOptions,Specs), ok. @@ -297,13 +310,15 @@ start_master(NodeOptsList) -> start_master(NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Specs) -> Master = spawn_link(?MODULE,init_master,[self(),NodeOptsList,EvHandlers, - MasterLogDir,LogDirs,InitOptions,Specs]), + MasterLogDir,LogDirs, + InitOptions,Specs]), receive {Master,Result} -> Result end. %%% @hidden -init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Specs) -> +init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs, + InitOptions,Specs) -> case whereis(ct_master) of undefined -> register(ct_master,self()), @@ -325,13 +340,14 @@ init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Spec {MLPid,_} = ct_master_logs:start(MasterLogDir, [N || {N,_} <- NodeOptsList]), log(all,"Master Logger process started","~w",[MLPid]), + case Specs of [] -> ok; _ -> SpecsStr = lists:map(fun(Name) -> Name ++ " " end,Specs), - ct_master_logs:log("Test Specification file(s)","~s", + ct_master_logs:log("Test Specification file(s)","~ts", [lists:flatten(SpecsStr)]) end, @@ -340,7 +356,7 @@ init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Spec ct_master_event:add_handler(), %% add user handlers for master event manager Add = fun({H,Args}) -> - log(all,"Adding Event Handler","~p",[H]), + log(all,"Adding Event Handler","~w",[H]), case gen_event:add_handler(?CT_MEVMGR_REF,H,Args) of ok -> ok; {'EXIT',Why} -> exit(Why); @@ -359,7 +375,8 @@ init_master(Parent,NodeOptsList,EvHandlers,MasterLogDir,LogDirs,InitOptions,Spec init_master1(Parent,NodeOptsList,InitOptions,LogDirs). init_master1(Parent,NodeOptsList,InitOptions,LogDirs) -> - {Inaccessible,NodeOptsList1,InitOptions1} = init_nodes(NodeOptsList,InitOptions), + {Inaccessible,NodeOptsList1,InitOptions1} = init_nodes(NodeOptsList, + InitOptions), case Inaccessible of [] -> init_master2(Parent,NodeOptsList,LogDirs); @@ -391,8 +408,9 @@ init_master2(Parent,NodeOptsList,LogDirs) -> SpawnAndMon = fun({Node,Opts}) -> monitor_node(Node,true), - log(all,"Test Info","Starting test(s) on ~p...",[Node]), - {spawn_link(Node,?MODULE,init_node_ctrl,[self(),Cookie,Opts]),Node} + log(all,"Test Info","Starting test(s) on ~w...",[Node]), + {spawn_link(Node,?MODULE,init_node_ctrl,[self(),Cookie,Opts]), + Node} end, NodeCtrlPids = lists:map(SpawnAndMon,NodeOptsList), Result = master_loop(#state{node_ctrl_pids=NodeCtrlPids, @@ -404,7 +422,8 @@ master_loop(#state{node_ctrl_pids=[], results=Finished}) -> Str = lists:map(fun({Node,Result}) -> - io_lib:format("~-40.40.*s~p\n",[$_,atom_to_list(Node),Result]) + io_lib:format("~-40.40.*ts~p\n", + [$_,atom_to_list(Node),Result]) end,lists:reverse(Finished)), log(all,"TEST RESULTS",Str,[]), log(all,"Info","Updating log files",[]), @@ -437,18 +456,20 @@ master_loop(State=#state{node_ctrl_pids=NodeCtrlPids, Bad end, log(all,"Test Info", - "Test on node ~w failed! Reason: ~p",[Node,Error]), + "Test on node ~w failed! Reason: ~p", + [Node,Error]), {Locks1,Blocked1} = update_queue(exit,Node,Locks,Blocked), master_loop(State#state{node_ctrl_pids=NodeCtrlPids1, - results=[{Node,Error}|Results], + results=[{Node, + Error}|Results], locks=Locks1, blocked=Blocked1}) end; undefined -> %% ignore (but report) exit from master_logger etc log(all,"Test Info", - "Warning! Process ~p has terminated. Reason: ~p", + "Warning! Process ~w has terminated. Reason: ~p", [Pid,Reason]), master_loop(State) end; @@ -531,7 +552,7 @@ update_queue(take,Node,From,Lock={Op,Resource},Locks,Blocked) -> %% Blocked: [{{Operation,Resource},Node,WaitingPid},...] case lists:keysearch(Lock,1,Locks) of {value,{_Lock,Owner}} -> % other node has lock - log(html,"Lock Info","Node ~p blocked on ~w by ~w. Resource: ~p", + log(html,"Lock Info","Node ~w blocked on ~w by ~w. Resource: ~p", [Node,Op,Owner,Resource]), Blocked1 = Blocked ++ [{Lock,Node,From}], {Locks,Blocked1}; @@ -546,7 +567,7 @@ update_queue(release,Node,_From,Lock={Op,Resource},Locks,Blocked) -> case lists:keysearch(Lock,1,Blocked) of {value,E={Lock,SomeNode,WaitingPid}} -> Blocked1 = lists:delete(E,Blocked), - log(html,"Lock Info","Node ~p proceeds with ~w. Resource: ~p", + log(html,"Lock Info","Node ~w proceeds with ~w. Resource: ~p", [SomeNode,Op,Resource]), reply(ok,WaitingPid), % waiting process may start {Locks1,Blocked1}; @@ -625,7 +646,8 @@ refresh_logs([D|Dirs],Refreshed) -> refresh_logs([],Refreshed) -> Str = lists:map(fun({D,Result}) -> - io_lib:format("Refreshing logs in ~p... ~p",[D,Result]) + io_lib:format("Refreshing logs in ~p... ~p", + [D,Result]) end,Refreshed), log(all,"Info",Str,[]). @@ -638,7 +660,7 @@ init_node_ctrl(MasterPid,Cookie,Opts) -> process_flag(trap_exit, true), MasterNode = node(MasterPid), group_leader(whereis(user),self()), - io:format("~n********** node_ctrl process ~p started on ~p **********~n", + io:format("~n********** node_ctrl process ~w started on ~w **********~n", [self(),node()]), %% initially this node must have the same cookie as the master node %% but now we set it explicitly for the connection so that test suites @@ -671,7 +693,7 @@ init_node_ctrl(MasterPid,Cookie,Opts) -> pong -> MasterPid ! {self(),{result,Result}}; pang -> - io:format("Warning! Connection to master node ~p is lost. " + io:format("Warning! Connection to master node ~w is lost. " "Can't report result!~n~n", [MasterNode]) end. @@ -696,8 +718,9 @@ status(MasterPid,Event) -> log(To,Heading,Str,Args) -> if To == all ; To == tty -> - Str1 = ["=== ",Heading," ===\n",io_lib:format(Str,Args),"\n"], - io:format(Str1,[]); + Chars = ["=== ",Heading," ===\n", + io_lib:format(Str,Args),"\n"], + io:put_chars(Chars); true -> ok end, @@ -751,21 +774,25 @@ start_nodes(InitOptions)-> IsAlive = lists:member(NodeName, nodes()), case {HasNodeStart, IsAlive} of {false, false}-> - io:format("WARNING: Node ~p is not alive but has no node_start option~n", [NodeName]); + io:format("WARNING: Node ~w is not alive but has no " + "node_start option~n", [NodeName]); {false, true}-> - io:format("Node ~p is alive~n", [NodeName]); + io:format("Node ~w is alive~n", [NodeName]); {true, false}-> {node_start, NodeStart} = lists:keyfind(node_start, 1, Options), {value, {callback_module, Callback}, NodeStart2}= lists:keytake(callback_module, 1, NodeStart), case Callback:start(Host, Node, NodeStart2) of {ok, NodeName} -> - io:format("Node ~p started successfully with callback ~p~n", [NodeName,Callback]); + io:format("Node ~w started successfully " + "with callback ~w~n", [NodeName,Callback]); {error, Reason, _NodeName} -> - io:format("Failed to start node ~p with callback ~p! Reason: ~p~n", [NodeName, Callback, Reason]) + io:format("Failed to start node ~w with callback ~w! " + "Reason: ~p~n", [NodeName, Callback, Reason]) end; {true, true}-> - io:format("WARNING: Node ~p is alive but has node_start option~n", [NodeName]) + io:format("WARNING: Node ~w is alive but has node_start " + "option~n", [NodeName]) end end, InitOptions). @@ -778,7 +805,8 @@ eval_on_nodes(InitOptions)-> {false,_}-> ok; {true,false}-> - io:format("WARNING: Node ~p is not alive but has eval option ~n", [NodeName]); + io:format("WARNING: Node ~w is not alive but has eval " + "option~n", [NodeName]); {true,true}-> {eval, MFAs} = lists:keyfind(eval, 1, Options), evaluate(NodeName, MFAs) @@ -789,9 +817,11 @@ eval_on_nodes(InitOptions)-> evaluate(Node, [{M,F,A}|MFAs])-> case rpc:call(Node, M, F, A) of {badrpc,Reason}-> - io:format("WARNING: Failed to call ~p:~p/~p on node ~p due to ~p~n", [M,F,length(A),Node,Reason]); + io:format("WARNING: Failed to call ~w:~w/~w on node ~w " + "due to ~p~n", [M,F,length(A),Node,Reason]); Result-> - io:format("Called ~p:~p/~p on node ~p, result: ~p~n", [M,F,length(A),Node,Result]) + io:format("Called ~w:~w/~w on node ~w, result: ~p~n", + [M,F,length(A),Node,Result]) end, evaluate(Node, MFAs); evaluate(_Node, [])-> diff --git a/lib/common_test/src/ct_master_event.erl b/lib/common_test/src/ct_master_event.erl index a70baefaaf..5877b7c6f2 100644 --- a/lib/common_test/src/ct_master_event.erl +++ b/lib/common_test/src/ct_master_event.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2009. All Rights Reserved. +%% Copyright Ericsson AB 2006-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -114,13 +114,13 @@ init(_) -> %% each installed event handler to handle the event. %%-------------------------------------------------------------------- handle_event(#event{name=start_logging,node=Node,data=RunDir},State) -> - ct_master_logs:log("CT Master Event Handler","Got ~s from ~p",[RunDir,Node]), + ct_master_logs:log("CT Master Event Handler","Got ~ts from ~w",[RunDir,Node]), ct_master_logs:nodedir(Node,RunDir), {ok,State}; handle_event(#event{name=Name,node=Node,data=Data},State) -> print("~n=== ~w ===~n", [?MODULE]), - print("~p on ~p: ~p~n", [Name,Node,Data]), + print("~w on ~w: ~p~n", [Name,Node,Data]), {ok,State}. %%-------------------------------------------------------------------- diff --git a/lib/common_test/src/ct_master_logs.erl b/lib/common_test/src/ct_master_logs.erl index 9e61d5b16f..5393097f57 100644 --- a/lib/common_test/src/ct_master_logs.erl +++ b/lib/common_test/src/ct_master_logs.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2012. All Rights Reserved. +%% Copyright Ericsson AB 2006-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -133,8 +133,8 @@ init(Parent,LogDir,Nodes) -> end,Nodes)), io:format(CtLogFd,int_header(),[log_timestamp(now()),"Test Nodes\n"]), - io:format(CtLogFd,"~s\n",[NodeStr]), - io:format(CtLogFd,int_footer()++"\n",[]), + io:format(CtLogFd,"~ts\n",[NodeStr]), + io:put_chars(CtLogFd,[int_footer(),"\n"]), NodeDirIxFd = open_nodedir_index(RunDirAbs,Time), Parent ! {started,self(),{Time,RunDirAbs}}, @@ -201,25 +201,22 @@ loop(State) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%% open_ct_master_log(Dir) -> FullName = filename:join(Dir,?ct_master_log_name), - {ok,Fd} = file:open(FullName,[write]), - io:format(Fd,header("Common Test Master Log", {[],[1,2],[]}),[]), + {ok,Fd} = file:open(FullName,[write,{encoding,utf8}]), + io:put_chars(Fd,header("Common Test Master Log", {[],[1,2],[]})), %% maybe add config info here later - io:format(Fd, config_table([]), []), - io:format(Fd, - "<style>\n" - "div.ct_internal { background:lightgrey; color:black }\n" - "div.default { background:lightgreen; color:black }\n" - "</style>\n", - []), - io:format(Fd, - xhtml("<br><h2>Progress Log</h2>\n<pre>\n", - "<br /><h2>Progress Log</h2>\n<pre>\n"), - []), + io:put_chars(Fd,config_table([])), + io:put_chars(Fd, + "<style>\n" + "div.ct_internal { background:lightgrey; color:black }\n" + "div.default { background:lightgreen; color:black }\n" + "</style>\n"), + io:put_chars(Fd, + xhtml("<br><h2>Progress Log</h2>\n<pre>\n", + "<br /><h2>Progress Log</h2>\n<pre>\n")), Fd. close_ct_master_log(Fd) -> - io:format(Fd,"</pre>",[]), - io:format(Fd,footer(),[]), + io:put_chars(Fd,["</pre>",footer()]), file:close(Fd). config_table(Vars) -> @@ -238,7 +235,7 @@ config_table1([]) -> ["</tbody>\n</table>\n"]. int_header() -> - "<div class=\"ct_internal\"><b>*** CT MASTER ~s *** ~s</b>". + "<div class=\"ct_internal\"><b>*** CT MASTER ~s *** ~ts</b>". int_footer() -> "</div>". @@ -247,21 +244,22 @@ int_footer() -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% open_nodedir_index(Dir,StartTime) -> FullName = filename:join(Dir,?nodedir_index_name), - {ok,Fd} = file:open(FullName,[write]), - io:format(Fd,nodedir_index_header(StartTime),[]), + {ok,Fd} = file:open(FullName,[write,{encoding,utf8}]), + io:put_chars(Fd,nodedir_index_header(StartTime)), Fd. print_nodedir(Node,RunDir,Fd) -> Index = filename:join(RunDir,"index.html"), - io:format(Fd, - ["<tr>\n" - "<td align=center>",atom_to_list(Node),"</td>\n", - "<td align=left><a href=\"",Index,"\">",Index,"</a></td>\n", - "</tr>\n"],[]), + io:put_chars(Fd, + ["<tr>\n" + "<td align=center>",atom_to_list(Node),"</td>\n", + "<td align=left><a href=\"",ct_logs:uri(Index),"\">",Index, + "</a></td>\n", + "</tr>\n"]), ok. close_nodedir_index(Fd) -> - io:format(Fd,index_footer(),[]), + io:put_chars(Fd,index_footer()), file:close(Fd). nodedir_index_header(StartTime) -> @@ -286,7 +284,9 @@ make_all_runs_index(LogDir) -> DirsSorted = (catch sort_all_runs(Dirs)), Header = all_runs_header(), Index = [runentry(Dir) || Dir <- DirsSorted], - Result = file:write_file(FullName,Header++Index++index_footer()), + Result = file:write_file(FullName, + unicode:characters_to_binary( + Header++Index++index_footer())), Result. sort_all_runs(Dirs) -> @@ -326,7 +326,8 @@ runentry(Dir) -> end, Index = filename:join(Dir,?nodedir_index_name), ["<tr>\n" - "<td align=center><a href=\"",Index,"\">",timestamp(Dir),"</a></td>\n", + "<td align=center><a href=\"",ct_logs:uri(Index),"\">", + timestamp(Dir),"</a></td>\n", "<td align=center>",MasterStr,"</td>\n", "<td align=center>",NodesStr,"</td>\n", "</tr>\n"]. @@ -384,8 +385,10 @@ header(Title, TableCols) -> "<head>\n", "<title>" ++ Title ++ "</title>\n", "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n", + "<meta http-equiv=\"content-type\" content=\"text/html; charset=utf-8\">\n", xhtml("", - ["<link rel=\"stylesheet\" href=\"",CSSFile,"\" type=\"text/css\">"]), + ["<link rel=\"stylesheet\" href=\"",ct_logs:uri(CSSFile), + "\" type=\"text/css\">"]), xhtml("", ["<script type=\"text/javascript\" src=\"",JQueryFile, "\"></script>\n"]), diff --git a/lib/common_test/src/ct_master_status.erl b/lib/common_test/src/ct_master_status.erl index 76060fb7bb..f9f511ecca 100644 --- a/lib/common_test/src/ct_master_status.erl +++ b/lib/common_test/src/ct_master_status.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2009. All Rights Reserved. +%% Copyright Ericsson AB 2006-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -70,7 +70,7 @@ init(_) -> %% handle_event(#event{name=Name,node=Node,data=Data},State) -> print("~n=== ~w ===~n", [?MODULE]), - print("~p on ~p: ~p~n", [Name,Node,Data]), + print("~w on ~w: ~p~n", [Name,Node,Data]), {ok,State}. %%-------------------------------------------------------------------- diff --git a/lib/common_test/src/ct_netconfc.erl b/lib/common_test/src/ct_netconfc.erl index 294b82bff6..1339e53780 100644 --- a/lib/common_test/src/ct_netconfc.erl +++ b/lib/common_test/src/ct_netconfc.erl @@ -1,7 +1,7 @@ %%---------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -307,7 +307,7 @@ -type option() :: {ssh,host()} | {port,inet:port_number()} | {user,string()} | {password,string()} | {user_dir,string()} | {timeout,timeout()}. --type host() :: inet:host_name() | inet:ip_address(). +-type host() :: inet:hostname() | inet:ip_address(). -type notification() :: {notification, xml_attributes(), notification_content()}. -type notification_content() :: [event_time()|simple_xml()]. @@ -1073,7 +1073,8 @@ handle_msg({get_event_streams=Op,Streams,Timeout}, From, State) -> SimpleXml = encode_rpc_operation(get,[Filter]), do_send_rpc(Op, SimpleXml, Timeout, From, State). -handle_msg({ssh_cm, _CM, {data, _Ch, _Type, Data}}, State) -> +handle_msg({ssh_cm, CM, {data, Ch, _Type, Data}}, State) -> + ssh_connection:adjust_window(CM,Ch,size(Data)), handle_data(Data, State); handle_msg({ssh_cm, _CM, _SshCloseMsg}, State) -> %% _SshCloseMsg can probably be one of @@ -1280,10 +1281,11 @@ do_send(Connection, SimpleXml) -> to_xml_doc(Simple) -> Prolog = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>", - Xml = list_to_binary(xmerl:export_simple([Simple], - xmerl_xml, - [#xmlAttribute{name=prolog, - value=Prolog}])), + Xml = unicode:characters_to_binary( + xmerl:export_simple([Simple], + xmerl_xml, + [#xmlAttribute{name=prolog, + value=Prolog}])), <<Xml/binary,?END_TAG/binary>>. %%%----------------------------------------------------------------- @@ -1687,18 +1689,27 @@ log(#connection{host=Host,port=Port,name=Name},Action,Data) -> %% Log callback - called from the error handler process -format_data(raw,Data) -> - io_lib:format("~n~s~n",[hide_password(Data)]); -format_data(pretty,Data) -> - io_lib:format("~n~s~n",[indent(Data)]); -format_data(html,Data) -> - io_lib:format("~n~s~n",[html_format(Data)]). +format_data(How,Data) -> + %% Assuming that the data is encoded as UTF-8. If it is not, then + %% the printout might be wrong, but the format function will not + %% crash! + %% FIXME: should probably read encoding from the data and do + %% unicode:characters_to_binary(Data,InEncoding,utf8) when calling + %% log/3 instead of assuming utf8 in as done here! + do_format_data(How,unicode:characters_to_binary(Data)). + +do_format_data(raw,Data) -> + io_lib:format("~n~ts~n",[hide_password(Data)]); +do_format_data(pretty,Data) -> + io_lib:format("~n~ts~n",[indent(Data)]); +do_format_data(html,Data) -> + io_lib:format("~n~ts~n",[html_format(Data)]). %%%----------------------------------------------------------------- %%% Hide password elements from XML data hide_password(Bin) -> re:replace(Bin,<<"(<password[^>]*>)[^<]*(</password>)">>,<<"\\1*****\\2">>, - [global,{return,binary}]). + [global,{return,binary},unicode]). %%%----------------------------------------------------------------- %%% HTML formatting @@ -1716,13 +1727,13 @@ indent(Bin) -> Part -> indent1(lists:reverse(Part)++String,erase(indent)) end, - list_to_binary(IndentedString). + unicode:characters_to_binary(IndentedString). %% Normalizes the XML document by removing all space and newline %% between two XML tags. %% Returns a list, no matter if the input was a list or a binary. -normalize(Str) -> - re:replace(Str,<<">[ \r\n\t]+<">>,<<"><">>,[global,{return,list}]). +normalize(Bin) -> + re:replace(Bin,<<">[ \r\n\t]+<">>,<<"><">>,[global,{return,list},unicode]). indent1("<?"++Rest1,Indent1) -> @@ -1805,7 +1816,8 @@ get_tag([]) -> %%% SSH stuff ssh_receive_data() -> receive - {ssh_cm, _CM, {data, _Ch, _Type, Data}} -> + {ssh_cm, CM, {data, Ch, _Type, Data}} -> + ssh_connection:adjust_window(CM,Ch,size(Data)), {ok, Data}; {ssh_cm, _CM, {Closed, _Ch}} = X when Closed == closed; Closed == eof -> {error,X}; diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl index 96b2934382..c0bdbb2a09 100644 --- a/lib/common_test/src/ct_run.erl +++ b/lib/common_test/src/ct_run.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2012. All Rights Reserved. +%% Copyright Ericsson AB 2004-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -58,6 +58,7 @@ vts, shell, cover, + cover_stop, coverspec, step, logdir, @@ -147,7 +148,7 @@ script_start(Args) -> _ -> "" end end, - io:format("~nCommon Test~s starting (cwd is ~s)~n~n", + io:format("~nCommon Test~s starting (cwd is ~ts)~n~n", [CTVsn,Cwd]), Self = self(), Pid = spawn_link(fun() -> script_start1(Self, Args) end), @@ -245,6 +246,7 @@ script_start1(Parent, Args) -> Vts = get_start_opt(vts, true, Args), Shell = get_start_opt(shell, true, Args), Cover = get_start_opt(cover, fun([CoverFile]) -> ?abs(CoverFile) end, Args), + CoverStop = get_start_opt(cover_stop, fun([CS]) -> list_to_atom(CS) end, Args), LogDir = get_start_opt(logdir, fun([LogD]) -> LogD end, Args), LogOpts = get_start_opt(logopts, fun(Os) -> [list_to_atom(O) || O <- Os] end, [], Args), @@ -328,32 +330,33 @@ script_start1(Parent, Args) -> true end, - StartOpts = #opts{label = Label, profile = Profile, - vts = Vts, shell = Shell, cover = Cover, - logdir = LogDir, logopts = LogOpts, - basic_html = BasicHtml, - verbosity = Verbosity, - event_handlers = EvHandlers, - ct_hooks = CTHooks, - enable_builtin_hooks = EnableBuiltinHooks, - auto_compile = AutoCompile, - include = IncludeDirs, - silent_connections = SilentConns, - stylesheet = Stylesheet, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir, - starter = script}, - + Opts = #opts{label = Label, profile = Profile, + vts = Vts, shell = Shell, + cover = Cover, cover_stop = CoverStop, + logdir = LogDir, logopts = LogOpts, + basic_html = BasicHtml, + verbosity = Verbosity, + event_handlers = EvHandlers, + ct_hooks = CTHooks, + enable_builtin_hooks = EnableBuiltinHooks, + auto_compile = AutoCompile, + include = IncludeDirs, + silent_connections = SilentConns, + stylesheet = Stylesheet, + multiply_timetraps = MultTT, + scale_timetraps = ScaleTT, + create_priv_dir = CreatePrivDir, + starter = script}, + %% check if log files should be refreshed or go on to run tests... - Result = run_or_refresh(StartOpts, Args), + Result = run_or_refresh(Opts, Args), %% send final results to starting process waiting in script_start/0 Parent ! {self(), Result}. -run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) -> +run_or_refresh(Opts = #opts{logdir = LogDir}, Args) -> case proplists:get_value(refresh_logs, Args) of undefined -> - script_start2(StartOpts, Args); + script_start2(Opts, Args); Refresh -> LogDir1 = case Refresh of [] -> which(logdir,LogDir); @@ -375,167 +378,203 @@ run_or_refresh(StartOpts = #opts{logdir = LogDir}, Args) -> {error,{all_suites_index,ASReason}}; _ -> file:set_cwd(Cwd), - io:format("Logs in ~s refreshed!~n~n", [LogDir1]), + io:format("Logs in ~ts refreshed!~n~n", + [LogDir1]), timer:sleep(500), % time to flush io before quitting ok end end end. -script_start2(StartOpts = #opts{vts = undefined, - shell = undefined}, Args) -> - TestSpec = proplists:get_value(spec, Args), - {Terms,Opts} = - case TestSpec of - Specs when Specs =/= [], Specs =/= undefined -> - %% using testspec as input for test - Relaxed = get_start_opt(allow_user_terms, true, false, Args), - case catch ct_testspec:collect_tests_from_file(Specs, Relaxed) of - {E,Reason} when E == error ; E == 'EXIT' -> - {{error,Reason},StartOpts}; - TS -> - SpecStartOpts = get_data_for_node(TS, node()), - - Label = choose_val(StartOpts#opts.label, - SpecStartOpts#opts.label), - - Profile = choose_val(StartOpts#opts.profile, - SpecStartOpts#opts.profile), - - LogDir = choose_val(StartOpts#opts.logdir, - SpecStartOpts#opts.logdir), - - AllLogOpts = merge_vals([StartOpts#opts.logopts, - SpecStartOpts#opts.logopts]), - AllVerbosity = - merge_keyvals([StartOpts#opts.verbosity, - SpecStartOpts#opts.verbosity]), - AllSilentConns = - merge_vals([StartOpts#opts.silent_connections, - SpecStartOpts#opts.silent_connections]), - Cover = - choose_val(StartOpts#opts.cover, - SpecStartOpts#opts.cover), - MultTT = - choose_val(StartOpts#opts.multiply_timetraps, - SpecStartOpts#opts.multiply_timetraps), - ScaleTT = - choose_val(StartOpts#opts.scale_timetraps, - SpecStartOpts#opts.scale_timetraps), - - CreatePrivDir = - choose_val(StartOpts#opts.create_priv_dir, - SpecStartOpts#opts.create_priv_dir), - - AllEvHs = - merge_vals([StartOpts#opts.event_handlers, - SpecStartOpts#opts.event_handlers]), - - AllCTHooks = merge_vals( - [StartOpts#opts.ct_hooks, - SpecStartOpts#opts.ct_hooks]), - - EnableBuiltinHooks = - choose_val( - StartOpts#opts.enable_builtin_hooks, - SpecStartOpts#opts.enable_builtin_hooks), - - Stylesheet = - choose_val(StartOpts#opts.stylesheet, - SpecStartOpts#opts.stylesheet), - - AllInclude = merge_vals([StartOpts#opts.include, - SpecStartOpts#opts.include]), - application:set_env(common_test, include, AllInclude), - - AutoCompile = - case choose_val(StartOpts#opts.auto_compile, - SpecStartOpts#opts.auto_compile) of - undefined -> - true; - ACBool -> - application:set_env(common_test, - auto_compile, - ACBool), - ACBool - end, - - BasicHtml = - case choose_val(StartOpts#opts.basic_html, - SpecStartOpts#opts.basic_html) of - undefined -> - false; - BHBool -> - application:set_env(common_test, basic_html, - BHBool), - BHBool - end, - - {TS,StartOpts#opts{label = Label, - profile = Profile, - testspecs = Specs, - cover = Cover, - logdir = LogDir, - logopts = AllLogOpts, - basic_html = BasicHtml, - verbosity = AllVerbosity, - silent_connections = AllSilentConns, - config = SpecStartOpts#opts.config, - event_handlers = AllEvHs, - ct_hooks = AllCTHooks, - enable_builtin_hooks = - EnableBuiltinHooks, - stylesheet = Stylesheet, - auto_compile = AutoCompile, - include = AllInclude, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir}} - end; - _ -> - {undefined,StartOpts} - end, - %% read config/userconfig from start flags - InitConfig = ct_config:prepare_config_list(Args), - TheLogDir = which(logdir, Opts#opts.logdir), - case {TestSpec,Terms} of - {_,{error,_}=Error} -> - Error; - {[],_} -> +script_start2(Opts = #opts{vts = undefined, + shell = undefined}, Args) -> + case proplists:get_value(spec, Args) of + Specs when Specs =/= [], Specs =/= undefined -> + Specs1 = get_start_opt(join_specs, [Specs], Specs, Args), + %% using testspec as input for test + Relaxed = get_start_opt(allow_user_terms, true, false, Args), + case catch ct_testspec:collect_tests_from_file(Specs1, Relaxed) of + {E,Reason} when E == error ; E == 'EXIT' -> + {error,Reason}; + TestSpecData -> + execute_all_specs(TestSpecData, Opts, Args, []) + end; + [] -> {error,no_testspec_specified}; - {undefined,_} -> % no testspec used - case check_and_install_configfiles(InitConfig, TheLogDir, Opts) of + _ -> % no testspec used + %% read config/userconfig from start flags + InitConfig = ct_config:prepare_config_list(Args), + TheLogDir = which(logdir, Opts#opts.logdir), + case check_and_install_configfiles(InitConfig, + TheLogDir, + Opts) of ok -> % go on read tests from start flags script_start3(Opts#opts{config=InitConfig, logdir=TheLogDir}, Args); Error -> Error - end; - {_,_} -> % testspec used - %% merge config from start flags with config from testspec - AllConfig = merge_vals([InitConfig, Opts#opts.config]), - case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of - ok -> % read tests from spec - {Run,Skip} = ct_testspec:prepare_tests(Terms, node()), - do_run(Run, Skip, Opts#opts{config=AllConfig, - logdir=TheLogDir}, Args); - Error -> - Error end end; -script_start2(StartOpts, Args) -> +script_start2(Opts, Args) -> %% read config/userconfig from start flags InitConfig = ct_config:prepare_config_list(Args), - LogDir = which(logdir, StartOpts#opts.logdir), - case check_and_install_configfiles(InitConfig, LogDir, StartOpts) of + LogDir = which(logdir, Opts#opts.logdir), + case check_and_install_configfiles(InitConfig, LogDir, Opts) of ok -> % go on read tests from start flags - script_start3(StartOpts#opts{config=InitConfig, - logdir=LogDir}, Args); + script_start3(Opts#opts{config=InitConfig, + logdir=LogDir}, Args); + Error -> + Error + end. + +execute_all_specs([], _, _, Result) -> + Result1 = lists:reverse(Result), + case lists:keysearch('EXIT', 1, Result1) of + {value,{_,_,ExitReason}} -> + exit(ExitReason); + false -> + case lists:keysearch(error, 1, Result1) of + {value,Error} -> + Error; + false -> + lists:foldl(fun({Ok,Fail,{UserSkip,AutoSkip}}, + {Ok1,Fail1,{UserSkip1,AutoSkip1}}) -> + {Ok1+Ok,Fail1+Fail, + {UserSkip1+UserSkip, + AutoSkip1+AutoSkip}} + end, {0,0,{0,0}}, Result1) + end + end; + +execute_all_specs([{Specs,TS} | TSs], Opts, Args, Result) -> + CombinedOpts = combine_test_opts(TS, Specs, Opts), + try execute_one_spec(TS, CombinedOpts, Args) of + ExecResult -> + execute_all_specs(TSs, Opts, Args, [ExecResult|Result]) + catch + _ : ExitReason -> + execute_all_specs(TSs, Opts, Args, + [{'EXIT',self(),ExitReason}|Result]) + end. + +execute_one_spec(TS, Opts, Args) -> + %% read config/userconfig from start flags + InitConfig = ct_config:prepare_config_list(Args), + TheLogDir = which(logdir, Opts#opts.logdir), + %% merge config from start flags with config from testspec + AllConfig = merge_vals([InitConfig, Opts#opts.config]), + case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of + ok -> % read tests from spec + {Run,Skip} = ct_testspec:prepare_tests(TS, node()), + do_run(Run, Skip, Opts#opts{config=AllConfig, + logdir=TheLogDir}, Args); Error -> Error end. +combine_test_opts(TS, Specs, Opts) -> + TSOpts = get_data_for_node(TS, node()), + + Label = choose_val(Opts#opts.label, + TSOpts#opts.label), + + Profile = choose_val(Opts#opts.profile, + TSOpts#opts.profile), + + LogDir = choose_val(Opts#opts.logdir, + TSOpts#opts.logdir), + + AllLogOpts = merge_vals([Opts#opts.logopts, + TSOpts#opts.logopts]), + AllVerbosity = + merge_keyvals([Opts#opts.verbosity, + TSOpts#opts.verbosity]), + AllSilentConns = + merge_vals([Opts#opts.silent_connections, + TSOpts#opts.silent_connections]), + Cover = + choose_val(Opts#opts.cover, + TSOpts#opts.cover), + CoverStop = + choose_val(Opts#opts.cover_stop, + TSOpts#opts.cover_stop), + MultTT = + choose_val(Opts#opts.multiply_timetraps, + TSOpts#opts.multiply_timetraps), + ScaleTT = + choose_val(Opts#opts.scale_timetraps, + TSOpts#opts.scale_timetraps), + + CreatePrivDir = + choose_val(Opts#opts.create_priv_dir, + TSOpts#opts.create_priv_dir), + + AllEvHs = + merge_vals([Opts#opts.event_handlers, + TSOpts#opts.event_handlers]), + + AllCTHooks = merge_vals( + [Opts#opts.ct_hooks, + TSOpts#opts.ct_hooks]), + + EnableBuiltinHooks = + choose_val( + Opts#opts.enable_builtin_hooks, + TSOpts#opts.enable_builtin_hooks), + + Stylesheet = + choose_val(Opts#opts.stylesheet, + TSOpts#opts.stylesheet), + + AllInclude = merge_vals([Opts#opts.include, + TSOpts#opts.include]), + application:set_env(common_test, include, AllInclude), + + AutoCompile = + case choose_val(Opts#opts.auto_compile, + TSOpts#opts.auto_compile) of + undefined -> + true; + ACBool -> + application:set_env(common_test, + auto_compile, + ACBool), + ACBool + end, + + BasicHtml = + case choose_val(Opts#opts.basic_html, + TSOpts#opts.basic_html) of + undefined -> + false; + BHBool -> + application:set_env(common_test, basic_html, + BHBool), + BHBool + end, + + Opts#opts{label = Label, + profile = Profile, + testspecs = Specs, + cover = Cover, + cover_stop = CoverStop, + logdir = which(logdir, LogDir), + logopts = AllLogOpts, + basic_html = BasicHtml, + verbosity = AllVerbosity, + silent_connections = AllSilentConns, + config = TSOpts#opts.config, + event_handlers = AllEvHs, + ct_hooks = AllCTHooks, + enable_builtin_hooks = EnableBuiltinHooks, + stylesheet = Stylesheet, + auto_compile = AutoCompile, + include = AllInclude, + multiply_timetraps = MultTT, + scale_timetraps = ScaleTT, + create_priv_dir = CreatePrivDir}. + check_and_install_configfiles( Configs, LogDir, #opts{ event_handlers = EvHandlers, @@ -555,12 +594,12 @@ check_and_install_configfiles( {error,{cant_load_callback_module,Info}} end. -script_start3(StartOpts, Args) -> - StartOpts1 = get_start_opt(step, - fun(Step) -> - StartOpts#opts{step = Step, - cover = undefined} - end, StartOpts, Args), +script_start3(Opts, Args) -> + Opts1 = get_start_opt(step, + fun(Step) -> + Opts#opts{step = Step, + cover = undefined} + end, Opts, Args), case {proplists:get_value(dir, Args), proplists:get_value(suite, Args), groups_and_cases(proplists:get_value(group, Args), @@ -574,17 +613,17 @@ script_start3(StartOpts, Args) -> {error,no_dir_specified}; {Dirs,undefined,[]} when is_list(Dirs) -> - script_start4(StartOpts#opts{tests = tests(Dirs)}, Args); + script_start4(Opts#opts{tests = tests(Dirs)}, Args); {undefined,Suites,[]} when is_list(Suites) -> Ts = tests([suite_to_test(S) || S <- Suites]), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); {undefined,Suite,GsAndCs} when is_list(Suite) -> case [suite_to_test(S) || S <- Suite] of DirMods = [_] -> Ts = tests(DirMods, GsAndCs), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); [_,_|_] -> {error,multiple_suites_and_cases}; _ -> @@ -598,10 +637,10 @@ script_start3(StartOpts, Args) -> case [suite_to_test(Dir,S) || S <- Suite] of DirMods when GsAndCs == [] -> Ts = tests(DirMods), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); DirMods = [_] when GsAndCs /= [] -> Ts = tests(DirMods, GsAndCs), - script_start4(StartOpts1#opts{tests = Ts}, Args); + script_start4(Opts1#opts{tests = Ts}, Args); [_,_|_] when GsAndCs /= [] -> {error,multiple_suites_and_cases}; _ -> @@ -612,8 +651,8 @@ script_start3(StartOpts, Args) -> {error,incorrect_start_options}; {undefined,undefined,_} -> - if StartOpts#opts.vts ; StartOpts#opts.shell -> - script_start4(StartOpts#opts{tests = []}, Args); + if Opts#opts.vts ; Opts#opts.shell -> + script_start4(Opts#opts{tests = []}, Args); true -> script_usage(), {error,missing_start_options} @@ -723,6 +762,7 @@ script_usage() -> "\n\t[-silent_connections [ConnType1 ConnType2 .. ConnTypeN]]" "\n\t[-stylesheet CSSFile]" "\n\t[-cover CoverCfgFile]" + "\n\t[-cover_stop Bool]" "\n\t[-event_handler EvHandler1 EvHandler2 .. EvHandlerN]" "\n\t[-ct_hooks CTHook1 CTHook2 .. CTHookN]" "\n\t[-include InclDir1 InclDir2 .. InclDirN]" @@ -742,9 +782,11 @@ script_usage() -> "\n\t[-logopts LogOpt1 LogOpt2 .. LogOptN]" "\n\t[-verbosity GenVLvl | [CategoryVLvl1 .. CategoryVLvlN]]" "\n\t[-allow_user_terms]" + "\n\t[-join_specs]" "\n\t[-silent_connections [ConnType1 ConnType2 .. ConnTypeN]]" "\n\t[-stylesheet CSSFile]" "\n\t[-cover CoverCfgFile]" + "\n\t[-cover_stop Bool]" "\n\t[-event_handler EvHandler1 EvHandler2 .. EvHandlerN]" "\n\t[-ct_hooks CTHook1 CTHook2 .. CTHookN]" "\n\t[-include InclDir1 InclDir2 .. InclDirN]" @@ -839,7 +881,7 @@ run_test1(StartOpts) when is_list(StartOpts) -> undefined -> Tracing = start_trace(StartOpts), {ok,Cwd} = file:get_cwd(), - io:format("~nCommon Test starting (cwd is ~s)~n~n", [Cwd]), + io:format("~nCommon Test starting (cwd is ~ts)~n~n", [Cwd]), Res = case ct_repeat:loop_test(func, StartOpts) of false -> @@ -938,6 +980,7 @@ run_test2(StartOpts) -> %% code coverage Cover = get_start_opt(cover, fun(CoverFile) -> ?abs(CoverFile) end, StartOpts), + CoverStop = get_start_opt(cover_stop, value, StartOpts), %% timetrap manipulation MultiplyTT = get_start_opt(multiply_timetraps, value, 1, StartOpts), @@ -1000,7 +1043,8 @@ run_test2(StartOpts) -> Step = get_start_opt(step, value, StartOpts), Opts = #opts{label = Label, profile = Profile, - cover = Cover, step = Step, logdir = LogDir, + cover = Cover, cover_stop = CoverStop, + step = Step, logdir = LogDir, logopts = LogOpts, basic_html = BasicHtml, config = CfgFiles, verbosity = Verbosity, @@ -1033,102 +1077,60 @@ run_test2(StartOpts) -> end. run_spec_file(Relaxed, - Opts = #opts{testspecs = Specs, config = CfgFiles}, + Opts = #opts{testspecs = Specs}, StartOpts) -> Specs1 = case Specs of [X|_] when is_integer(X) -> [Specs]; _ -> Specs end, AbsSpecs = lists:map(fun(SF) -> ?abs(SF) end, Specs1), - log_ts_names(AbsSpecs), - case catch ct_testspec:collect_tests_from_file(AbsSpecs, Relaxed) of + AbsSpecs1 = get_start_opt(join_specs, [AbsSpecs], AbsSpecs, StartOpts), + case catch ct_testspec:collect_tests_from_file(AbsSpecs1, Relaxed) of {Error,CTReason} when Error == error ; Error == 'EXIT' -> exit({error,CTReason}); - TS -> - SpecOpts = get_data_for_node(TS, node()), - Label = choose_val(Opts#opts.label, - SpecOpts#opts.label), - Profile = choose_val(Opts#opts.profile, - SpecOpts#opts.profile), - LogDir = choose_val(Opts#opts.logdir, - SpecOpts#opts.logdir), - AllLogOpts = merge_vals([Opts#opts.logopts, - SpecOpts#opts.logopts]), - Stylesheet = choose_val(Opts#opts.stylesheet, - SpecOpts#opts.stylesheet), - AllVerbosity = merge_keyvals([Opts#opts.verbosity, - SpecOpts#opts.verbosity]), - AllSilentConns = merge_vals([Opts#opts.silent_connections, - SpecOpts#opts.silent_connections]), - AllConfig = merge_vals([CfgFiles, SpecOpts#opts.config]), - Cover = choose_val(Opts#opts.cover, - SpecOpts#opts.cover), - MultTT = choose_val(Opts#opts.multiply_timetraps, - SpecOpts#opts.multiply_timetraps), - ScaleTT = choose_val(Opts#opts.scale_timetraps, - SpecOpts#opts.scale_timetraps), - CreatePrivDir = choose_val(Opts#opts.create_priv_dir, - SpecOpts#opts.create_priv_dir), - AllEvHs = merge_vals([Opts#opts.event_handlers, - SpecOpts#opts.event_handlers]), - AllInclude = merge_vals([Opts#opts.include, - SpecOpts#opts.include]), - AllCTHooks = merge_vals([Opts#opts.ct_hooks, - SpecOpts#opts.ct_hooks]), - EnableBuiltinHooks = choose_val(Opts#opts.enable_builtin_hooks, - SpecOpts#opts.enable_builtin_hooks), - - application:set_env(common_test, include, AllInclude), - - AutoCompile = case choose_val(Opts#opts.auto_compile, - SpecOpts#opts.auto_compile) of - undefined -> - true; - ACBool -> - application:set_env(common_test, auto_compile, - ACBool), - ACBool - end, + TestSpecData -> + run_all_specs(TestSpecData, Opts, StartOpts, []) + end. - BasicHtml = case choose_val(Opts#opts.basic_html, - SpecOpts#opts.basic_html) of - undefined -> - false; - BHBool -> - application:set_env(common_test, basic_html, - BHBool), - BHBool - end, - - Opts1 = Opts#opts{label = Label, - profile = Profile, - cover = Cover, - logdir = which(logdir, LogDir), - logopts = AllLogOpts, - stylesheet = Stylesheet, - basic_html = BasicHtml, - verbosity = AllVerbosity, - silent_connections = AllSilentConns, - config = AllConfig, - event_handlers = AllEvHs, - auto_compile = AutoCompile, - include = AllInclude, - testspecs = AbsSpecs, - multiply_timetraps = MultTT, - scale_timetraps = ScaleTT, - create_priv_dir = CreatePrivDir, - ct_hooks = AllCTHooks, - enable_builtin_hooks = EnableBuiltinHooks - }, - - case check_and_install_configfiles(AllConfig,Opts1#opts.logdir, - Opts1) of - ok -> - {Run,Skip} = ct_testspec:prepare_tests(TS, node()), - reformat_result(catch do_run(Run, Skip, Opts1, StartOpts)); - {error,GCFReason} -> - exit({error,GCFReason}) +run_all_specs([], _, _, TotResult) -> + TotResult1 = lists:reverse(TotResult), + case lists:keysearch('EXIT', 1, TotResult1) of + {value,{_,_,ExitReason}} -> + exit(ExitReason); + false -> + case lists:keysearch(error, 1, TotResult1) of + {value,Error} -> + Error; + false -> + lists:foldl(fun({Ok,Fail,{UserSkip,AutoSkip}}, + {Ok1,Fail1,{UserSkip1,AutoSkip1}}) -> + {Ok1+Ok,Fail1+Fail, + {UserSkip1+UserSkip, + AutoSkip1+AutoSkip}} + end, {0,0,{0,0}}, TotResult1) end + end; + +run_all_specs([{Specs,TS} | TSs], Opts, StartOpts, TotResult) -> + log_ts_names(Specs), + Combined = #opts{config = TSConfig} = combine_test_opts(TS, Specs, Opts), + AllConfig = merge_vals([Opts#opts.config, TSConfig]), + try run_one_spec(TS, Combined#opts{config = AllConfig}, StartOpts) of + Result -> + run_all_specs(TSs, Opts, StartOpts, [Result | TotResult]) + catch + _ : Reason -> + run_all_specs(TSs, Opts, StartOpts, [{error,Reason} | TotResult]) + end. + +run_one_spec(TS, CombinedOpts, StartOpts) -> + #opts{logdir = Logdir, config = Config} = CombinedOpts, + case check_and_install_configfiles(Config, Logdir, CombinedOpts) of + ok -> + {Run,Skip} = ct_testspec:prepare_tests(TS, node()), + reformat_result(catch do_run(Run, Skip, CombinedOpts, StartOpts)); + Error -> + Error end. run_prepared(Run, Skip, Opts = #opts{logdir = LogDir, @@ -1318,7 +1320,7 @@ run_testspec(TestSpec) -> run_testspec1(TestSpec) -> {ok,Cwd} = file:get_cwd(), - io:format("~nCommon Test starting (cwd is ~s)~n~n", [Cwd]), + io:format("~nCommon Test starting (cwd is ~ts)~n~n", [Cwd]), case catch run_testspec2(TestSpec) of {'EXIT',Reason} -> file:set_cwd(Cwd), @@ -1376,6 +1378,7 @@ get_data_for_node(#testspec{label = Labels, verbosity = VLvls, silent_connections = SilentConnsList, cover = CoverFs, + cover_stop = CoverStops, config = Cfgs, userconfig = UsrCfgs, event_handler = EvHs, @@ -1407,6 +1410,7 @@ get_data_for_node(#testspec{label = Labels, SCs -> SCs end, Cover = proplists:get_value(Node, CoverFs), + CoverStop = proplists:get_value(Node, CoverStops), MT = proplists:get_value(Node, MTs), ST = proplists:get_value(Node, STs), CreatePrivDir = proplists:get_value(Node, PDs), @@ -1425,6 +1429,7 @@ get_data_for_node(#testspec{label = Labels, verbosity = Verbosity, silent_connections = SilentConns, cover = Cover, + cover_stop = CoverStop, config = ConfigFiles, event_handlers = EvHandlers, ct_hooks = FiltCTHooks, @@ -1452,7 +1457,7 @@ refresh_logs(LogDir) -> {error,{all_runs_index,ARReason}}; _ -> file:set_cwd(Cwd), - io:format("Logs in ~s refreshed!~n",[LogDir]), + io:format("Logs in ~ts refreshed!~n",[LogDir]), ok end end @@ -1597,14 +1602,7 @@ do_run(Tests, Misc, LogDir, LogOpts) when is_list(Misc), StepOpts -> #opts{step = StepOpts} end, - Opts1 = - case proplists:get_value(cover, Misc) of - undefined -> - Opts; - CoverFile -> - Opts#opts{cover = CoverFile} - end, - do_run(Tests, [], Opts1#opts{logdir = LogDir}, []); + do_run(Tests, [], Opts#opts{logdir = LogDir}, []); do_run(Tests, Skip, Opts, Args) when is_record(Opts, opts) -> #opts{label = Label, profile = Profile, cover = Cover, @@ -1638,7 +1636,13 @@ do_run(Tests, Skip, Opts, Args) when is_record(Opts, opts) -> {error,Reason} -> exit({error,Reason}); CoverSpec -> - Opts#opts{coverspec = CoverSpec} + CoverStop = + case Opts#opts.cover_stop of + undefined -> true; + Stop -> Stop + end, + Opts#opts{coverspec = CoverSpec, + cover_stop = CoverStop} end end, %% This env variable is used by test_server to determine @@ -1776,7 +1780,7 @@ possibly_spawn(true, Tests, Skip, Opts) -> end, unlink(CTUtilSrv), SupPid = spawn(Supervisor), - io:format(user, "~nTest control handed over to process ~p~n~n", + io:format(user, "~nTest control handed over to process ~w~n~n", [SupPid]), SupPid. @@ -1864,7 +1868,7 @@ verify_suites(TestSuites) -> Suite)), io:format(user, "Suite ~w not found" - "in directory ~s~n", + "in directory ~ts~n", [Suite,TestDir]), {Found,[{DS,[Name]}|NotFound]} end @@ -1879,7 +1883,7 @@ verify_suites(TestSuites) -> ActualDir = filename:dirname(SuiteFile), {[{ActualDir,Suite}|Found],NotFound}; false -> - io:format(user, "Directory ~s is " + io:format(user, "Directory ~ts is " "invalid~n", [Dir]), Name = filename:join(Dir, atom_to_list(Suite)), {Found,[{DS,[Name]}|NotFound]} @@ -2119,7 +2123,7 @@ do_run_test(Tests, Skip, Opts) -> cross = CovCross, src = _CovSrc}} -> ct_logs:log("COVER INFO", - "Using cover specification file: ~s~n" + "Using cover specification file: ~ts~n" "App: ~w~n" "Cross cover: ~w~n" "Including ~w modules~n" @@ -2134,7 +2138,7 @@ do_run_test(Tests, Skip, Opts) -> DelResult = file:delete(CovExport), ct_logs:log("COVER INFO", "Warning! " - "Export file ~s already exists. " + "Export file ~ts already exists. " "Deleting with result: ~p", [CovExport,DelResult]); false -> @@ -2144,7 +2148,8 @@ do_run_test(Tests, Skip, Opts) -> %% tell test_server which modules should be cover compiled %% note that actual compilation is done when tests start test_server_ctrl:cover(CovApp, CovFile, CovExcl, CovIncl, - CovCross, CovExport, CovLevel), + CovCross, CovExport, CovLevel, + Opts#opts.cover_stop), %% save cover data (used e.g. to add nodes dynamically) ct_util:set_testdata({cover,CovData}), %% start cover on specified nodes @@ -2216,6 +2221,15 @@ do_run_test(Tests, Skip, Opts) -> end, CleanUp), [code:del_path(Dir) || Dir <- AddedToPath], + %% If a severe error has occurred in the test_server, + %% we will generate an exception here. + case ct_util:get_testdata(severe_error) of + undefined -> ok; + SevereError -> + ct_logs:log("SEVERE ERROR", "~p\n", [SevereError]), + exit(SevereError) + end, + case ct_util:get_testdata(stats) of Stats = {_Ok,_Failed,{_UserSkipped,_AutoSkipped}} -> Stats; @@ -2606,7 +2620,7 @@ log_ts_names(Specs) -> List = lists:map(fun(Name) -> Name ++ " " end, Specs), - ct_logs:log("Test Specification file(s)", "~s", + ct_logs:log("Test Specification file(s)", "~ts", [lists:flatten(List)]). merge_arguments(Args) -> @@ -2618,6 +2632,9 @@ merge_arguments([LogDir={logdir,_}|Args], Merged) -> merge_arguments([CoverFile={cover,_}|Args], Merged) -> merge_arguments(Args, handle_arg(replace, CoverFile, Merged)); +merge_arguments([CoverStop={cover_stop,_}|Args], Merged) -> + merge_arguments(Args, handle_arg(replace, CoverStop, Merged)); + merge_arguments([{'case',TC}|Args], Merged) -> merge_arguments(Args, handle_arg(merge, {testcase,TC}, Merged)); @@ -2714,10 +2731,10 @@ event_handler_args2opts(Default, Args) -> event_handler_init_args2opts(EHs) end. event_handler_init_args2opts([EH, Arg, "and" | EHs]) -> - [{list_to_atom(EH),lists:flatten(io_lib:format("~s",[Arg]))} | + [{list_to_atom(EH),lists:flatten(io_lib:format("~ts",[Arg]))} | event_handler_init_args2opts(EHs)]; event_handler_init_args2opts([EH, Arg]) -> - [{list_to_atom(EH),lists:flatten(io_lib:format("~s",[Arg]))}]; + [{list_to_atom(EH),lists:flatten(io_lib:format("~ts",[Arg]))}]; event_handler_init_args2opts([]) -> []. @@ -2867,6 +2884,10 @@ opts2args(EnvStartOpts) -> [{allow_user_terms,[]}]; ({allow_user_terms,false}) -> []; + ({join_specs,true}) -> + [{join_specs,[]}]; + ({join_specs,false}) -> + []; ({auto_compile,false}) -> [{no_auto_compile,[]}]; ({auto_compile,true}) -> @@ -3035,7 +3056,7 @@ start_trace(Args) -> false end; {_,Error} -> - io:format("Warning! Tracing not started. Reason: ~s~n~n", + io:format("Warning! Tracing not started. Reason: ~ts~n~n", [file:format_error(Error)]), false end; diff --git a/lib/common_test/src/ct_slave.erl b/lib/common_test/src/ct_slave.erl index aa3413fa89..1fd8c04f8b 100644 --- a/lib/common_test/src/ct_slave.erl +++ b/lib/common_test/src/ct_slave.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010. All Rights Reserved. +%% Copyright Ericsson AB 2010-2012. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -37,7 +37,7 @@ -record(options, {username, password, boot_timeout, init_timeout, startup_timeout, startup_functions, monitor_master, - kill_if_fail, erl_flags}). + kill_if_fail, erl_flags, env}). %%%----------------------------------------------------------------- %%% @spec start(Node) -> Result @@ -85,7 +85,8 @@ start(Host, Node) -> %%% {startup_functions, StartupFunctions} | %%% {monitor_master, Monitor} | %%% {kill_if_fail, KillIfFail} | -%%% {erl_flags, ErlangFlags} +%%% {erl_flags, ErlangFlags} | +%%% {env, [{EnvVar,Value}]} %%% Username = string() %%% Password = string() %%% BootTimeout = integer() @@ -99,6 +100,8 @@ start(Host, Node) -> %%% Monitor = bool() %%% KillIfFail = bool() %%% ErlangFlags = string() +%%% EnvVar = string() +%%% Value = string() %%% Result = {ok, NodeName} | {error, already_started, NodeName} | %%% {error, started_not_connected, NodeName} | %%% {error, boot_timeout, NodeName} | @@ -152,6 +155,9 @@ start(Host, Node) -> %%% <p>Option <code>erlang_flags</code> specifies, which flags will be added %%% to the parameters of the <code>erl</code> executable.</p> %%% +%%% <p>Option <code>env</code> specifies a list of environment variables +%%% that will extended the environment.</p> +%%% %%% <p>Special return values are: %%% <list> %%% <item><code>{error, already_started, NodeName}</code> - if the node with @@ -233,10 +239,12 @@ fetch_options(Options) -> Monitor = get_option_value(monitor_master, Options, false), KillIfFail = get_option_value(kill_if_fail, Options, true), ErlFlags = get_option_value(erl_flags, Options, []), + EnvVars = get_option_value(env, Options, []), #options{username=UserName, password=Password, boot_timeout=BootTimeout, init_timeout=InitTimeout, startup_timeout=StartupTimeout, startup_functions=StartupFunctions, - monitor_master=Monitor, kill_if_fail=KillIfFail, erl_flags=ErlFlags}. + monitor_master=Monitor, kill_if_fail=KillIfFail, + erl_flags=ErlFlags, env=EnvVars}. % send a message when slave node is started % @hidden @@ -306,11 +314,19 @@ do_start(Host, Node, Options) -> true-> spawn_remote_node(Host, Node, Options) end, + BootTimeout = Options#options.boot_timeout, InitTimeout = Options#options.init_timeout, StartupTimeout = Options#options.startup_timeout, Result = case wait_for_node_alive(ENode, BootTimeout) of pong-> + case test_server:is_cover() of + true -> + MainCoverNode = cover:get_main_node(), + rpc:call(MainCoverNode,cover,start,[ENode]); + false -> + ok + end, call_functions(ENode, Functions2), receive {node_started, ENode}-> @@ -365,9 +381,9 @@ get_cmd(Node, Flags) -> % spawn node locally spawn_local_node(Node, Options) -> - ErlFlags = Options#options.erl_flags, + #options{env=Env,erl_flags=ErlFlags} = Options, Cmd = get_cmd(Node, ErlFlags), - open_port({spawn, Cmd}, [stream]). + open_port({spawn, Cmd}, [stream,{env,Env}]). % start crypto and ssh if not yet started check_for_ssh_running() -> @@ -386,9 +402,10 @@ check_for_ssh_running() -> % spawn node remotely spawn_remote_node(Host, Node, Options) -> - Username = Options#options.username, - Password = Options#options.password, - ErlFlags = Options#options.erl_flags, + #options{username=Username, + password=Password, + erl_flags=ErlFlags, + env=Env} = Options, SSHOptions = case {Username, Password} of {[], []}-> []; @@ -400,8 +417,17 @@ spawn_remote_node(Host, Node, Options) -> check_for_ssh_running(), {ok, SSHConnRef} = ssh:connect(atom_to_list(Host), 22, SSHOptions), {ok, SSHChannelId} = ssh_connection:session_channel(SSHConnRef, infinity), + ssh_setenv(SSHConnRef, SSHChannelId, Env), ssh_connection:exec(SSHConnRef, SSHChannelId, get_cmd(Node, ErlFlags), infinity). + +ssh_setenv(SSHConnRef, SSHChannelId, [{Var, Value} | Vars]) + when is_list(Var), is_list(Value) -> + success = ssh_connection:setenv(SSHConnRef, SSHChannelId, + Var, Value, infinity), + ssh_setenv(SSHConnRef, SSHChannelId, Vars); +ssh_setenv(_SSHConnRef, _SSHChannelId, []) -> ok. + % call functions on a remote Erlang node call_functions(_Node, []) -> ok; @@ -423,8 +449,29 @@ wait_for_node_alive(Node, N) -> % call init:stop on a remote node do_stop(ENode) -> + {Cover,MainCoverNode} = + case test_server:is_cover() of + true -> + Main = cover:get_main_node(), + rpc:call(Main,cover,flush,[ENode]), + {true,Main}; + false -> + {false,undefined} + end, spawn(ENode, init, stop, []), - wait_for_node_dead(ENode, 5). + case wait_for_node_dead(ENode, 5) of + {ok,ENode} -> + if Cover -> + %% To avoid that cover is started again if a node + %% with the same name is started later. + rpc:call(MainCoverNode,cover,stop,[ENode]); + true -> + ok + end, + {ok,ENode}; + Error -> + Error + end. % wait N seconds until node is disconnected wait_for_node_dead(Node, 0) -> diff --git a/lib/common_test/src/ct_snmp.erl b/lib/common_test/src/ct_snmp.erl index 8fe63e8ed1..71038bd4f4 100644 --- a/lib/common_test/src/ct_snmp.erl +++ b/lib/common_test/src/ct_snmp.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2010. All Rights Reserved. +%% Copyright Ericsson AB 2004-2012. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -39,7 +39,7 @@ %%% %%% Manager config %%% [{start_manager, boolean()} % Optional - default is true %%% {users, [{user_name(), [call_back_module(), user_data()]}]}, %% Optional -%%% {usm_users, [{usm_user_name(), usm_config()}]},%% Optional - snmp v3 only +%%% {usm_users, [{usm_user_name(), [usm_config()]}]},%% Optional - snmp v3 only %%% % managed_agents is optional %%% {managed_agents,[{agent_name(), [user_name(), agent_ip(), agent_port(), [agent_config()]]}]}, %%% {max_msg_size, integer()}, % Optional - default is 484 @@ -130,7 +130,7 @@ %%% @type agent_config() = {Item, Value} %%% @type user_name() = atom() %%% @type usm_user_name() = string() -%%% @type usm_config() = string() +%%% @type usm_config() = {Item, Value} %%% @type call_back_module() = atom() %%% @type user_data() = term() %%% @type oids() = [oid()] @@ -157,8 +157,9 @@ %%% API -export([start/2, start/3, stop/1, get_values/3, get_next_values/3, set_values/4, set_info/1, register_users/2, register_agents/2, register_usm_users/2, - unregister_users/1, unregister_agents/1, update_usm_users/2, - load_mibs/1]). + unregister_users/1, unregister_users/2, unregister_agents/1, + unregister_agents/2, unregister_usm_users/1, unregister_usm_users/2, + load_mibs/1, unload_mibs/1]). %% Manager values -define(CT_SNMP_LOG_FILE, "ct_snmp_set.log"). @@ -250,10 +251,8 @@ stop(Config) -> %%% %%% @doc Issues a synchronous snmp get request. get_values(Agent, Oids, MgrAgentConfName) -> - [Uid, AgentIp, AgentUdpPort | _] = - agent_conf(Agent, MgrAgentConfName), - {ok, SnmpReply, _} = - snmpm:g(Uid, AgentIp, AgentUdpPort, Oids), + [Uid | _] = agent_conf(Agent, MgrAgentConfName), + {ok, SnmpReply, _} = snmpm:sync_get2(Uid, target_name(Agent), Oids), SnmpReply. %%% @spec get_next_values(Agent, Oids, MgrAgentConfName) -> SnmpReply @@ -265,10 +264,8 @@ get_values(Agent, Oids, MgrAgentConfName) -> %%% %%% @doc Issues a synchronous snmp get next request. get_next_values(Agent, Oids, MgrAgentConfName) -> - [Uid, AgentIp, AgentUdpPort | _] = - agent_conf(Agent, MgrAgentConfName), - {ok, SnmpReply, _} = - snmpm:gn(Uid, AgentIp, AgentUdpPort, Oids), + [Uid | _] = agent_conf(Agent, MgrAgentConfName), + {ok, SnmpReply, _} = snmpm:sync_get_next2(Uid, target_name(Agent), Oids), SnmpReply. %%% @spec set_values(Agent, VarsAndVals, MgrAgentConfName, Config) -> SnmpReply @@ -282,13 +279,11 @@ get_next_values(Agent, Oids, MgrAgentConfName) -> %%% @doc Issues a synchronous snmp set request. set_values(Agent, VarsAndVals, MgrAgentConfName, Config) -> PrivDir = ?config(priv_dir, Config), - [Uid, AgentIp, AgentUdpPort | _] = - agent_conf(Agent, MgrAgentConfName), + [Uid | _] = agent_conf(Agent, MgrAgentConfName), Oids = lists:map(fun({Oid, _, _}) -> Oid end, VarsAndVals), - {ok, SnmpGetReply, _} = - snmpm:g(Uid, AgentIp, AgentUdpPort, Oids), - {ok, SnmpSetReply, _} = - snmpm:s(Uid, AgentIp, AgentUdpPort, VarsAndVals), + TargetName = target_name(Agent), + {ok, SnmpGetReply, _} = snmpm:sync_get2(Uid, TargetName, Oids), + {ok, SnmpSetReply, _} = snmpm:sync_set2(Uid, TargetName, VarsAndVals), case SnmpSetReply of {noError, 0, _} when PrivDir /= false -> log(PrivDir, Agent, SnmpGetReply, VarsAndVals); @@ -328,12 +323,23 @@ set_info(Config) -> %%% Reason = term() %%% %%% @doc Register the manager entity (=user) responsible for specific agent(s). -%%% Corresponds to making an entry in users.conf +%%% Corresponds to making an entry in users.conf. +%%% +%%% This function will try to register the given users, without +%%% checking if any of them already exist. In order to change an +%%% already registered user, the user must first be unregistered. register_users(MgrAgentConfName, Users) -> - {snmp, SnmpVals} = ct:get_config(MgrAgentConfName), - NewSnmpVals = lists:keyreplace(users, 1, SnmpVals, {users, Users}), - ct_config:update_config(MgrAgentConfName, {snmp, NewSnmpVals}), - setup_users(Users). + case setup_users(Users) of + ok -> + SnmpVals = ct:get_config(MgrAgentConfName), + OldUsers = ct:get_config({MgrAgentConfName,users},[]), + NewSnmpVals = lists:keystore(users, 1, SnmpVals, + {users, Users ++ OldUsers}), + ct_config:update_config(MgrAgentConfName, NewSnmpVals), + ok; + Error -> + Error + end. %%% @spec register_agents(MgrAgentConfName, ManagedAgents) -> ok | {error, Reason} %%% @@ -343,12 +349,24 @@ register_users(MgrAgentConfName, Users) -> %%% %%% @doc Explicitly instruct the manager to handle this agent. %%% Corresponds to making an entry in agents.conf +%%% +%%% This function will try to register the given managed agents, +%%% without checking if any of them already exist. In order to change +%%% an already registered managed agent, the agent must first be +%%% unregistered. register_agents(MgrAgentConfName, ManagedAgents) -> - {snmp, SnmpVals} = ct:get_config(MgrAgentConfName), - NewSnmpVals = lists:keyreplace(managed_agents, 1, SnmpVals, - {managed_agents, ManagedAgents}), - ct_config:update_config(MgrAgentConfName, {snmp, NewSnmpVals}), - setup_managed_agents(ManagedAgents). + case setup_managed_agents(MgrAgentConfName,ManagedAgents) of + ok -> + SnmpVals = ct:get_config(MgrAgentConfName), + OldAgents = ct:get_config({MgrAgentConfName,managed_agents},[]), + NewSnmpVals = lists:keystore(managed_agents, 1, SnmpVals, + {managed_agents, + ManagedAgents ++ OldAgents}), + ct_config:update_config(MgrAgentConfName, NewSnmpVals), + ok; + Error -> + Error + end. %%% @spec register_usm_users(MgrAgentConfName, UsmUsers) -> ok | {error, Reason} %%% @@ -358,60 +376,115 @@ register_agents(MgrAgentConfName, ManagedAgents) -> %%% %%% @doc Explicitly instruct the manager to handle this USM user. %%% Corresponds to making an entry in usm.conf +%%% +%%% This function will try to register the given users, without +%%% checking if any of them already exist. In order to change an +%%% already registered user, the user must first be unregistered. register_usm_users(MgrAgentConfName, UsmUsers) -> - {snmp, SnmpVals} = ct:get_config(MgrAgentConfName), - NewSnmpVals = lists:keyreplace(users, 1, SnmpVals, {usm_users, UsmUsers}), - ct_config:update_config(MgrAgentConfName, {snmp, NewSnmpVals}), EngineID = ct:get_config({MgrAgentConfName, engine_id}, ?ENGINE_ID), - setup_usm_users(UsmUsers, EngineID). + case setup_usm_users(UsmUsers, EngineID) of + ok -> + SnmpVals = ct:get_config(MgrAgentConfName), + OldUsmUsers = ct:get_config({MgrAgentConfName,usm_users},[]), + NewSnmpVals = lists:keystore(usm_users, 1, SnmpVals, + {usm_users, UsmUsers ++ OldUsmUsers}), + ct_config:update_config(MgrAgentConfName, NewSnmpVals), + ok; + Error -> + Error + end. -%%% @spec unregister_users(MgrAgentConfName) -> ok | {error, Reason} +%%% @spec unregister_users(MgrAgentConfName) -> ok %%% %%% MgrAgentConfName = atom() %%% Reason = term() %%% -%%% @doc Removes information added when calling register_users/2. +%%% @doc Unregister all users. unregister_users(MgrAgentConfName) -> - Users = lists:map(fun({UserName, _}) -> UserName end, - ct:get_config({MgrAgentConfName, users})), - {snmp, SnmpVals} = ct:get_config(MgrAgentConfName), - NewSnmpVals = lists:keyreplace(users, 1, SnmpVals, {users, []}), - ct_config:update_config(MgrAgentConfName, {snmp, NewSnmpVals}), - takedown_users(Users). + Users = [Id || {Id,_} <- ct:get_config({MgrAgentConfName, users},[])], + unregister_users(MgrAgentConfName,Users). -%%% @spec unregister_agents(MgrAgentConfName) -> ok | {error, Reason} +%%% @spec unregister_users(MgrAgentConfName,Users) -> ok %%% %%% MgrAgentConfName = atom() +%%% Users = [user_name()] %%% Reason = term() %%% -%%% @doc Removes information added when calling register_agents/2. +%%% @doc Unregister the given users. +unregister_users(MgrAgentConfName,Users) -> + takedown_users(Users), + SnmpVals = ct:get_config(MgrAgentConfName), + AllUsers = ct:get_config({MgrAgentConfName, users},[]), + RemainingUsers = lists:filter(fun({Id,_}) -> + not lists:member(Id,Users) + end, + AllUsers), + NewSnmpVals = lists:keyreplace(users, 1, SnmpVals, {users,RemainingUsers}), + ct_config:update_config(MgrAgentConfName, NewSnmpVals), + ok. + +%%% @spec unregister_agents(MgrAgentConfName) -> ok +%%% +%%% MgrAgentConfName = atom() +%%% Reason = term() +%%% +%%% @doc Unregister all managed agents. unregister_agents(MgrAgentConfName) -> - ManagedAgents = lists:map(fun({_, [Uid, AgentIP, AgentPort, _]}) -> - {Uid, AgentIP, AgentPort} - end, - ct:get_config({MgrAgentConfName, managed_agents})), - {snmp, SnmpVals} = ct:get_config(MgrAgentConfName), - NewSnmpVals = lists:keyreplace(managed_agents, 1, SnmpVals, - {managed_agents, []}), - ct_config:update_config(MgrAgentConfName, {snmp, NewSnmpVals}), - takedown_managed_agents(ManagedAgents). + ManagedAgents = [AgentName || + {AgentName, _} <- + ct:get_config({MgrAgentConfName,managed_agents},[])], + unregister_agents(MgrAgentConfName,ManagedAgents). +%%% @spec unregister_agents(MgrAgentConfName,ManagedAgents) -> ok +%%% +%%% MgrAgentConfName = atom() +%%% ManagedAgents = [agent_name()] +%%% Reason = term() +%%% +%%% @doc Unregister the given managed agents. +unregister_agents(MgrAgentConfName,ManagedAgents) -> + takedown_managed_agents(MgrAgentConfName, ManagedAgents), + SnmpVals = ct:get_config(MgrAgentConfName), + AllAgents = ct:get_config({MgrAgentConfName,managed_agents},[]), + RemainingAgents = lists:filter(fun({Name,_}) -> + not lists:member(Name,ManagedAgents) + end, + AllAgents), + NewSnmpVals = lists:keyreplace(managed_agents, 1, SnmpVals, + {managed_agents,RemainingAgents}), + ct_config:update_config(MgrAgentConfName, NewSnmpVals), + ok. -%%% @spec update_usm_users(MgrAgentConfName, UsmUsers) -> ok | {error, Reason} +%%% @spec unregister_usm_users(MgrAgentConfName) -> ok %%% %%% MgrAgentConfName = atom() -%%% UsmUsers = usm_users() %%% Reason = term() %%% -%%% @doc Alters information added when calling register_usm_users/2. -update_usm_users(MgrAgentConfName, UsmUsers) -> - - {snmp, SnmpVals} = ct:get_config(MgrAgentConfName), - NewSnmpVals = lists:keyreplace(usm_users, 1, SnmpVals, - {usm_users, UsmUsers}), - ct_config:update_config(MgrAgentConfName, {snmp, NewSnmpVals}), +%%% @doc Unregister all usm users. +unregister_usm_users(MgrAgentConfName) -> + UsmUsers = [Id || {Id,_} <- ct:get_config({MgrAgentConfName, usm_users},[])], + unregister_usm_users(MgrAgentConfName,UsmUsers). + +%%% @spec unregister_usm_users(MgrAgentConfName,UsmUsers) -> ok +%%% +%%% MgrAgentConfName = atom() +%%% UsmUsers = [usm_user_name()] +%%% Reason = term() +%%% +%%% @doc Unregister the given usm users. +unregister_usm_users(MgrAgentConfName,UsmUsers) -> EngineID = ct:get_config({MgrAgentConfName, engine_id}, ?ENGINE_ID), - do_update_usm_users(UsmUsers, EngineID). + takedown_usm_users(UsmUsers,EngineID), + SnmpVals = ct:get_config(MgrAgentConfName), + AllUsmUsers = ct:get_config({MgrAgentConfName, usm_users},[]), + RemainingUsmUsers = lists:filter(fun({Id,_}) -> + not lists:member(Id,UsmUsers) + end, + AllUsmUsers), + NewSnmpVals = lists:keyreplace(usm_users, 1, SnmpVals, + {usm_users,RemainingUsmUsers}), + ct_config:update_config(MgrAgentConfName, NewSnmpVals), + ok. %%% @spec load_mibs(Mibs) -> ok | {error, Reason} %%% @@ -423,6 +496,15 @@ update_usm_users(MgrAgentConfName, UsmUsers) -> load_mibs(Mibs) -> snmpa:load_mibs(snmp_master_agent, Mibs). +%%% @spec unload_mibs(Mibs) -> ok | {error, Reason} +%%% +%%% Mibs = [MibName] +%%% MibName = string() +%%% Reason = term() +%%% +%%% @doc Unload the mibs from the agent 'snmp_master_agent'. +unload_mibs(Mibs) -> + snmpa:unload_mibs(snmp_master_agent, Mibs). %%%======================================================================== %%% Internal functions @@ -486,9 +568,8 @@ setup_agent(true, AgentConfName, SnmpConfName, file:make_dir(DbDir), snmp_config:write_agent_snmp_files(ConfDir, Vsns, ManagerIP, TrapUdp, AgentIP, AgentUdp, SysName, - atom_to_list(NotifType), - SecType, Passwd, AgentEngineID, - AgentMaxMsgSize), + NotifType, SecType, Passwd, + AgentEngineID, AgentMaxMsgSize), override_default_configuration(Config, AgentConfName), @@ -497,7 +578,8 @@ setup_agent(true, AgentConfName, SnmpConfName, {verbosity, trace}]}, {agent_type, master}, {agent_verbosity, trace}, - {net_if, [{verbosity, trace}]}], + {net_if, [{verbosity, trace}]}, + {versions, Vsns}], ct:get_config({SnmpConfName,agent})), application:set_env(snmp, agent, SnmpEnv). %%%--------------------------------------------------------------------------- @@ -535,65 +617,61 @@ manager_register(true, MgrAgentConfName) -> setup_usm_users(UsmUsers, EngineID), setup_users(Users), - setup_managed_agents(Agents). + setup_managed_agents(MgrAgentConfName,Agents). %%%--------------------------------------------------------------------------- setup_users(Users) -> - lists:foreach(fun({Id, [Module, Data]}) -> - snmpm:register_user(Id, Module, Data) - end, Users). + while_ok(fun({Id, [Module, Data]}) -> + snmpm:register_user(Id, Module, Data) + end, Users). %%%--------------------------------------------------------------------------- -setup_managed_agents([]) -> - ok; - -setup_managed_agents([{_, [Uid, AgentIp, AgentUdpPort, AgentConf]} | - Rest]) -> - NewAgentIp = case AgentIp of - IpTuple when is_tuple(IpTuple) -> - IpTuple; - HostName when is_list(HostName) -> - {ok,Hostent} = inet:gethostbyname(HostName), - [IpTuple|_] = Hostent#hostent.h_addr_list, - IpTuple - end, - ok = snmpm:register_agent(Uid, NewAgentIp, AgentUdpPort), - lists:foreach(fun({Item, Val}) -> - snmpm:update_agent_info(Uid, NewAgentIp, - AgentUdpPort, Item, Val) - end, AgentConf), - setup_managed_agents(Rest). +setup_managed_agents(AgentConfName,Agents) -> + Fun = + fun({AgentName, [Uid, AgentIp, AgentUdpPort, AgentConf0]}) -> + NewAgentIp = case AgentIp of + IpTuple when is_tuple(IpTuple) -> + IpTuple; + HostName when is_list(HostName) -> + {ok,Hostent} = inet:gethostbyname(HostName), + [IpTuple|_] = Hostent#hostent.h_addr_list, + IpTuple + end, + AgentConf = + case lists:keymember(engine_id,1,AgentConf0) of + true -> + AgentConf0; + false -> + DefaultEngineID = + ct:get_config({AgentConfName,agent_engine_id}, + ?AGENT_ENGINE_ID), + [{engine_id,DefaultEngineID}|AgentConf0] + end, + snmpm:register_agent(Uid, target_name(AgentName), + [{address,NewAgentIp},{port,AgentUdpPort} | + AgentConf]) + end, + while_ok(Fun,Agents). %%%--------------------------------------------------------------------------- setup_usm_users(UsmUsers, EngineID)-> - lists:foreach(fun({UsmUser, Conf}) -> - snmpm:register_usm_user(EngineID, UsmUser, Conf) - end, UsmUsers). + while_ok(fun({UsmUser, Conf}) -> + snmpm:register_usm_user(EngineID, UsmUser, Conf) + end, UsmUsers). %%%--------------------------------------------------------------------------- takedown_users(Users) -> - lists:foreach(fun({Id}) -> + lists:foreach(fun(Id) -> snmpm:unregister_user(Id) end, Users). %%%--------------------------------------------------------------------------- -takedown_managed_agents([{Uid, AgentIp, AgentUdpPort} | - Rest]) -> - NewAgentIp = case AgentIp of - IpTuple when is_tuple(IpTuple) -> - IpTuple; - HostName when is_list(HostName) -> - {ok,Hostent} = inet:gethostbyname(HostName), - [IpTuple|_] = Hostent#hostent.h_addr_list, - IpTuple - end, - ok = snmpm:unregister_agent(Uid, NewAgentIp, AgentUdpPort), - takedown_managed_agents(Rest); - -takedown_managed_agents([]) -> - ok. +takedown_managed_agents(MgrAgentConfName,ManagedAgents) -> + lists:foreach(fun(AgentName) -> + [Uid | _] = agent_conf(AgentName, MgrAgentConfName), + snmpm:unregister_agent(Uid, target_name(AgentName)) + end, ManagedAgents). %%%--------------------------------------------------------------------------- -do_update_usm_users(UsmUsers, EngineID) -> - lists:foreach(fun({UsmUser, {Item, Val}}) -> - snmpm:update_usm_user_info(EngineID, UsmUser, - Item, Val) - end, UsmUsers). +takedown_usm_users(UsmUsers, EngineID) -> + lists:foreach(fun(Id) -> + snmpm:unregister_usm_user(EngineID, Id) + end, UsmUsers). %%%--------------------------------------------------------------------------- log(PrivDir, Agent, {_, _, Varbinds}, NewVarsAndVals) -> @@ -657,7 +735,7 @@ override_contexts(Config, {data_dir_file, File}) -> override_contexts(Config, ContextInfo); override_contexts(Config, Contexts) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"context.conf"), file:delete(File), snmp_config:write_agent_context_config(Dir, "", Contexts). @@ -673,7 +751,7 @@ override_sysinfo(Config, {data_dir_file, File}) -> override_sysinfo(Config, SysInfo); override_sysinfo(Config, SysInfo) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"standard.conf"), file:delete(File), snmp_config:write_agent_standard_config(Dir, "", SysInfo). @@ -688,7 +766,7 @@ override_target_address(Config, {data_dir_file, File}) -> override_target_address(Config, TargetAddressConf); override_target_address(Config, TargetAddressConf) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"target_addr.conf"), file:delete(File), snmp_config:write_agent_target_addr_config(Dir, "", TargetAddressConf). @@ -704,7 +782,7 @@ override_target_params(Config, {data_dir_file, File}) -> override_target_params(Config, TargetParamsConf); override_target_params(Config, TargetParamsConf) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"target_params.conf"), file:delete(File), snmp_config:write_agent_target_params_config(Dir, "", TargetParamsConf). @@ -719,7 +797,7 @@ override_notify(Config, {data_dir_file, File}) -> override_notify(Config, NotifyConf); override_notify(Config, NotifyConf) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"notify.conf"), file:delete(File), snmp_config:write_agent_notify_config(Dir, "", NotifyConf). @@ -734,7 +812,7 @@ override_usm(Config, {data_dir_file, File}) -> override_usm(Config, UsmConf); override_usm(Config, UsmConf) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"usm.conf"), file:delete(File), snmp_config:write_agent_usm_config(Dir, "", UsmConf). @@ -749,7 +827,7 @@ override_community(Config, {data_dir_file, File}) -> override_community(Config, CommunityConf); override_community(Config, CommunityConf) -> - Dir = ?config(priv_dir, Config), + Dir = filename:join(?config(priv_dir, Config),"conf"), File = filename:join(Dir,"community.conf"), file:delete(File), snmp_config:write_agent_community_config(Dir, "", CommunityConf). @@ -765,7 +843,20 @@ override_vacm(Config, {data_dir_file, File}) -> override_vacm(Config, VacmConf); override_vacm(Config, VacmConf) -> - Dir = ?config(priv_dir, Config), - File = filename:join(Dir,"vacm.conf"), + Dir = filename:join(?config(priv_dir, Config),"conf"), + File = filename:join(Dir,"vacm.conf"), file:delete(File), snmp_config:write_agent_vacm_config(Dir, "", VacmConf). + +%%%--------------------------------------------------------------------------- + +target_name(Agent) -> + atom_to_list(Agent). + +while_ok(Fun,[H|T]) -> + case Fun(H) of + ok -> while_ok(Fun,T); + Error -> Error + end; +while_ok(_Fun,[]) -> + ok. diff --git a/lib/common_test/src/ct_telnet.erl b/lib/common_test/src/ct_telnet.erl index b13c050e32..02186864a5 100644 --- a/lib/common_test/src/ct_telnet.erl +++ b/lib/common_test/src/ct_telnet.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2012. All Rights Reserved. +%% Copyright Ericsson AB 2003-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -201,7 +201,7 @@ open(KeyOrName,ConnType,TargetMod,Extra) -> close(Connection) -> case get_handle(Connection) of {ok,Pid} -> - log("ct_telnet:close","Handle: ~p",[Pid]), + log("ct_telnet:close","Handle: ~w",[Pid]), case ct_gen_conn:stop(Pid) of {error,{process_down,Pid,noproc}} -> {error,already_closed}; @@ -558,7 +558,7 @@ reconnect(Ip,Port,N,State=#state{target_mod=TargetMod, Error when N==0 -> Error; _Error -> - log("Reconnect failed!","Retries left: ~p",[N]), + log("Reconnect failed!","Retries left: ~w",[N]), timer:sleep(ReconnInt), reconnect(Ip,Port,N-1,State) end. @@ -567,7 +567,7 @@ reconnect(Ip,Port,N,State=#state{target_mod=TargetMod, %% @hidden terminate(TelnPid,State) -> log(heading(terminate,State#state.name), - "Closing telnet connection.\nId: ~p", + "Closing telnet connection.\nId: ~w", [TelnPid]), ct_telnet_client:close(TelnPid). @@ -899,7 +899,7 @@ one_expect(Data,Pattern,EO) -> [Prompt] when Prompt==prompt; Prompt=={prompt,PromptType} -> %% Only searching for prompt log_lines(UptoPrompt), - try_cont_log("<b>PROMPT:</b> ~s", [PromptType]), + try_cont_log("<b>PROMPT:</b> ~ts", [PromptType]), {match,{prompt,PromptType},Rest}; [{prompt,_OtherPromptType}] -> %% Only searching for one specific prompt, not thisone @@ -969,7 +969,7 @@ seq_expect1(Data,[prompt|Patterns],Acc,Rest,EO) -> {continue,[prompt|Patterns],Acc,LastLine}; PromptType -> log_lines(Data), - try_cont_log("<b>PROMPT:</b> ~s", [PromptType]), + try_cont_log("<b>PROMPT:</b> ~ts", [PromptType]), seq_expect(Rest,Patterns,[{prompt,PromptType}|Acc],EO) end; seq_expect1(Data,[{prompt,PromptType}|Patterns],Acc,Rest,EO) -> @@ -980,7 +980,7 @@ seq_expect1(Data,[{prompt,PromptType}|Patterns],Acc,Rest,EO) -> {continue,[{prompt,PromptType}|Patterns],Acc,LastLine}; PromptType -> log_lines(Data), - try_cont_log("<b>PROMPT:</b> ~s", [PromptType]), + try_cont_log("<b>PROMPT:</b> ~ts", [PromptType]), seq_expect(Rest,Patterns,[{prompt,PromptType}|Acc],EO); _OtherPromptType -> log_lines(Data), @@ -1032,13 +1032,13 @@ match_line(Line,Patterns,FoundPrompt,EO) -> match_line(Line,[prompt|Patterns],false,EO,RetTag) -> match_line(Line,Patterns,false,EO,RetTag); match_line(Line,[prompt|_Patterns],FoundPrompt,_EO,RetTag) -> - try_cont_log(" ~s", [Line]), - try_cont_log("<b>PROMPT:</b> ~s", [FoundPrompt]), + try_cont_log(" ~ts", [Line]), + try_cont_log("<b>PROMPT:</b> ~ts", [FoundPrompt]), {RetTag,{prompt,FoundPrompt}}; match_line(Line,[{prompt,PromptType}|_Patterns],FoundPrompt,_EO,RetTag) when PromptType==FoundPrompt -> - try_cont_log(" ~s", [Line]), - try_cont_log("<b>PROMPT:</b> ~s", [FoundPrompt]), + try_cont_log(" ~ts", [Line]), + try_cont_log("<b>PROMPT:</b> ~ts", [FoundPrompt]), {RetTag,{prompt,FoundPrompt}}; match_line(Line,[{prompt,PromptType}|Patterns],FoundPrompt,EO,RetTag) when PromptType=/=FoundPrompt -> @@ -1048,7 +1048,7 @@ match_line(Line,[{Tag,Pattern}|Patterns],FoundPrompt,EO,RetTag) -> nomatch -> match_line(Line,Patterns,FoundPrompt,EO,RetTag); {match,Match} -> - try_cont_log("<b>MATCH:</b> ~s", [Line]), + try_cont_log("<b>MATCH:</b> ~ts", [Line]), {RetTag,{Tag,Match}} end; match_line(Line,[Pattern|Patterns],FoundPrompt,EO,RetTag) -> @@ -1056,13 +1056,13 @@ match_line(Line,[Pattern|Patterns],FoundPrompt,EO,RetTag) -> nomatch -> match_line(Line,Patterns,FoundPrompt,EO,RetTag); {match,Match} -> - try_cont_log("<b>MATCH:</b> ~s", [Line]), + try_cont_log("<b>MATCH:</b> ~ts", [Line]), {RetTag,Match} end; match_line(Line,[],FoundPrompt,EO,match) -> match_line(Line,EO#eo.haltpatterns,FoundPrompt,EO,halt); match_line(Line,[],_FoundPrompt,_EO,halt) -> - try_cont_log(" ~s", [Line]), + try_cont_log(" ~ts", [Line]), nomatch. one_line([$\n|Rest],Line) -> @@ -1086,7 +1086,7 @@ log_lines(String) -> [] -> ok; LastLine -> - try_cont_log(" ~s", [LastLine]) + try_cont_log(" ~ts", [LastLine]) end. log_lines_not_last(String) -> @@ -1094,7 +1094,7 @@ log_lines_not_last(String) -> {[],LastLine} -> LastLine; {String1,LastLine} -> - try_cont_log("~s",[String1]), + try_cont_log("~ts",[String1]), LastLine end. diff --git a/lib/common_test/src/ct_telnet_client.erl b/lib/common_test/src/ct_telnet_client.erl index d703b39ac5..7329498ed6 100644 --- a/lib/common_test/src/ct_telnet_client.erl +++ b/lib/common_test/src/ct_telnet_client.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2010. All Rights Reserved. +%% Copyright Ericsson AB 2003-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -308,7 +308,7 @@ cmd_dbg(_Cmd) -> [Opt] -> Opt; _ -> Opts end, - io:format("~s(~w): ~w\n", [CtrlStr,Ctrl,Opts1]); + io:format("~ts(~w): ~w\n", [CtrlStr,Ctrl,Opts1]); Any -> io:format("Unexpected in cmd_dbg:~n~w~n",[Any]) end. diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl index 5ce095e38e..e341391a91 100644 --- a/lib/common_test/src/ct_testspec.erl +++ b/lib/common_test/src/ct_testspec.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2012. All Rights Reserved. +%% Copyright Ericsson AB 2006-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -28,7 +28,6 @@ collect_tests_from_file/2, collect_tests_from_file/3]). -include("ct_util.hrl"). - -define(testspec_fields, record_info(fields, testspec)). %%%------------------------------------------------------------------ @@ -93,7 +92,7 @@ prepare_tests(TestSpec) when is_record(TestSpec,testspec) -> %% run_per_node/2 takes the Run list as input and returns a list %% of {Node,RunPerNode,[]} tuples where the tests have been sorted %% on a per node basis. -run_per_node([{{Node,Dir},Test}|Ts],Result, MergeTests) -> +run_per_node([{{Node,Dir},Test}|Ts],Result,MergeTests) -> {value,{Node,{Run,Skip}}} = lists:keysearch(Node,1,Result), Run1 = case MergeTests of false -> @@ -190,7 +189,7 @@ prepare_suites(_Node,_Dir,[],Run,Skip) -> prepare_cases(Node,Dir,Suite,Cases) -> case get_skipped_cases(Node,Dir,Suite,Cases) of - SkipAll=[{{Node,Dir},{Suite,_Cmt}}] -> % all cases to be skipped + SkipAll=[{{Node,Dir},{Suite,_Cmt}}] -> % all cases to be skipped %% note: this adds an 'all' test even if only skip is specified {[{{Node,Dir},{Suite,all}}],SkipAll}; Skipped -> @@ -241,34 +240,147 @@ get_skipped_cases1(_,_,_,[]) -> %%% collect_tests_from_file reads a testspec file and returns a record %%% containing the data found. -collect_tests_from_file(Specs, Relaxed) -> +collect_tests_from_file(Specs,Relaxed) -> collect_tests_from_file(Specs,[node()],Relaxed). collect_tests_from_file(Specs,Nodes,Relaxed) when is_list(Nodes) -> NodeRefs = lists:map(fun(N) -> {undefined,N} end, Nodes), - catch collect_tests_from_file1(Specs,#testspec{nodes=NodeRefs},Relaxed). + %% [Spec1,Spec2,...] means create one testpec record per Spec file + %% [[Spec1,Spec2,...]] means merge all specs into one testspec record + {Join,Specs1} = if is_list(hd(hd(Specs))) -> {true,hd(Specs)}; + true -> {false,Specs} + end, + Specs2 = [filename:absname(S) || S <- Specs1], + TS0 = #testspec{nodes=NodeRefs}, + + try create_specs(Specs2,TS0,Relaxed,Join) of + {{[],_},SeparateTestSpecs} -> + filter_and_convert(SeparateTestSpecs); + {{_,#testspec{tests=[]}},SeparateTestSpecs} -> + filter_and_convert(SeparateTestSpecs); + {Joined,SeparateTestSpecs} -> + [filter_and_convert(Joined) | + filter_and_convert(SeparateTestSpecs)] + catch + _:Error -> + Error + end. + +filter_and_convert(Joined) when is_tuple(Joined) -> + hd(filter_and_convert([Joined])); +filter_and_convert([{_,#testspec{tests=[]}}|TSs]) -> + filter_and_convert(TSs); +filter_and_convert([{[{SpecFile,MergeTests}|SMs],TestSpec}|TSs]) -> + #testspec{config = CfgFiles} = TestSpec, + TestSpec1 = TestSpec#testspec{config = delete_dups(CfgFiles), + merge_tests = MergeTests}, + %% set the merge_tests value for the testspec to the value + %% of the first test spec in the set + [{[SpecFile | [SF || {SF,_} <- SMs]], TestSpec1} | filter_and_convert(TSs)]; +filter_and_convert([]) -> + []. + +delete_dups(Elems) -> + delete_dups1(lists:reverse(Elems),[]). + +delete_dups1([E|Es],Keep) -> + case lists:member(E,Es) of + true -> + delete_dups1(Es,Keep); + false -> + delete_dups1(Es,[E|Keep]) + end; +delete_dups1([],Keep) -> + Keep. + +create_specs(Specs,TestSpec,Relaxed,Join) -> + SpecsTree = create_spec_tree(Specs,TestSpec,Join,[]), + create_specs(SpecsTree,TestSpec,Relaxed). -collect_tests_from_file1([Spec|Specs],TestSpec,Relaxed) -> +create_spec_tree([Spec|Specs],TS,JoinWithNext,Known) -> SpecDir = filename:dirname(filename:absname(Spec)), - case file:consult(Spec) of - {ok,Terms} -> - case collect_tests(Terms, - TestSpec#testspec{spec_dir=SpecDir}, - Relaxed) of - TS = #testspec{tests=Tests, logdir=LogDirs} when Specs == [] -> - LogDirs1 = lists:delete(".",LogDirs) ++ ["."], - TS#testspec{tests=lists:flatten(Tests), logdir=LogDirs1}; - TS = #testspec{alias = As, nodes = Ns} -> - TS1 = TS#testspec{alias = lists:reverse(As), - nodes = lists:reverse(Ns)}, - collect_tests_from_file1(Specs,TS1,Relaxed) - end; - {error,Reason} -> - ReasonStr = - lists:flatten(io_lib:format("~s", - [file:format_error(Reason)])), - throw({error,{Spec,ReasonStr}}) - end. + TS1 = TS#testspec{spec_dir=SpecDir}, + SpecAbsName = get_absfile(Spec,TS1), + case lists:member(SpecAbsName,Known) of + true -> + throw({error,{cyclic_reference,SpecAbsName}}); + false -> + case file:consult(SpecAbsName) of + {ok,Terms} -> + Terms1 = replace_names(Terms), + {InclJoin,InclSep} = get_included_specs(Terms1,TS1), + {SpecAbsName,Terms1, + create_spec_tree(InclJoin,TS,true,[SpecAbsName|Known]), + create_spec_tree(InclSep,TS,false,[SpecAbsName|Known]), + JoinWithNext, + create_spec_tree(Specs,TS,JoinWithNext,Known)}; + {error,Reason} -> + ReasonStr = + lists:flatten(io_lib:format("~s", + [file:format_error(Reason)])), + throw({error,{SpecAbsName,ReasonStr}}) + end + end; +create_spec_tree([],_TS,_JoinWithNext,_Known) -> + []. + +create_specs({Spec,Terms,InclJoin,InclSep,JoinWithNext,NextSpec}, + TestSpec,Relaxed) -> + SpecDir = filename:dirname(filename:absname(Spec)), + TestSpec1 = create_spec(Terms,TestSpec#testspec{spec_dir=SpecDir}, + JoinWithNext,Relaxed), + + {{JoinSpecs1,JoinTS1},Separate1} = create_specs(InclJoin,TestSpec1,Relaxed), + + {{JoinSpecs2,JoinTS2},Separate2} = + case JoinWithNext of + true -> + create_specs(NextSpec,JoinTS1,Relaxed); + false -> + {{[],JoinTS1},[]} + end, + {SepJoinSpecs,Separate3} = create_specs(InclSep,TestSpec,Relaxed), + {SepJoinSpecs1,Separate4} = + case JoinWithNext of + true -> + {{[],TestSpec},[]}; + false -> + create_specs(NextSpec,TestSpec,Relaxed) + end, + + SpecInfo = {Spec,TestSpec1#testspec.merge_tests}, + AllSeparate = + [TSData || TSData = {Ss,_TS} <- Separate3++Separate1++ + [SepJoinSpecs]++Separate2++ + [SepJoinSpecs1]++Separate4, + Ss /= []], + + case {JoinWithNext,JoinSpecs1} of + {true,_} -> + {{[SpecInfo|(JoinSpecs1++JoinSpecs2)],JoinTS2}, + AllSeparate}; + {false,[]} -> + {{[],TestSpec}, + [{[SpecInfo],TestSpec1}|AllSeparate]}; + {false,_} -> + {{[SpecInfo|(JoinSpecs1++JoinSpecs2)],JoinTS2}, + AllSeparate} + end; +create_specs([],TestSpec,_Relaxed) -> + {{[],TestSpec},[]}. + +create_spec(Terms,TestSpec,JoinedByPrev,Relaxed) -> + %% it's the "includer" that decides the value of merge_tests + Terms1 = if not JoinedByPrev -> + [{set_merge_tests,true}|Terms]; + true -> + [{set_merge_tests,false}|Terms] + end, + TS = #testspec{tests=Tests, logdir=LogDirs} = + collect_tests({false,Terms1},TestSpec,Relaxed), + LogDirs1 = lists:delete(".",LogDirs) ++ ["."], + TS#testspec{tests=lists:flatten(Tests), + logdir=LogDirs1}. collect_tests_from_list(Terms,Relaxed) -> collect_tests_from_list(Terms,[node()],Relaxed). @@ -276,8 +388,8 @@ collect_tests_from_list(Terms,Relaxed) -> collect_tests_from_list(Terms,Nodes,Relaxed) when is_list(Nodes) -> {ok,Cwd} = file:get_cwd(), NodeRefs = lists:map(fun(N) -> {undefined,N} end, Nodes), - case catch collect_tests(Terms,#testspec{nodes=NodeRefs, - spec_dir=Cwd}, + case catch collect_tests({true,Terms},#testspec{nodes=NodeRefs, + spec_dir=Cwd}, Relaxed) of E = {error,_} -> E; @@ -287,13 +399,28 @@ collect_tests_from_list(Terms,Nodes,Relaxed) when is_list(Nodes) -> TS#testspec{tests=lists:flatten(Tests), logdir=LogDirs1} end. -collect_tests(Terms,TestSpec,Relaxed) -> +collect_tests({Replace,Terms},TestSpec=#testspec{alias=As,nodes=Ns},Relaxed) -> put(relaxed,Relaxed), - Terms1 = replace_names(Terms), - TestSpec1 = get_global(Terms1,TestSpec), - TestSpec2 = get_all_nodes(Terms1,TestSpec1), - {Terms2, TestSpec3} = filter_init_terms(Terms1, [], TestSpec2), - add_tests(Terms2,TestSpec3). + Terms1 = if Replace -> replace_names(Terms); + true -> Terms + end, + {MergeTestsDef,Terms2} = + case proplists:get_value(set_merge_tests,Terms1,true) of + false -> + %% disable merge_tests + {TestSpec#testspec.merge_tests, + proplists:delete(merge_tests,Terms1)}; + true -> + {true,Terms1} + end, + %% reverse nodes and aliases initially to get the order of them right + %% in case this spec is being joined with a previous one + TestSpec1 = get_global(Terms2,TestSpec#testspec{alias = lists:reverse(As), + nodes = lists:reverse(Ns), + merge_tests = MergeTestsDef}), + TestSpec2 = get_all_nodes(Terms2,TestSpec1), + {Terms3, TestSpec3} = filter_init_terms(Terms2, [], TestSpec2), + add_tests(Terms3,TestSpec3). %% replace names (atoms) in the testspec matching those in 'define' terms by %% searching recursively through tuples and lists @@ -420,9 +547,30 @@ replace_names_in_node1(NodeStr,Defs=[{Name,Replacement}|Ds]) -> replace_names_in_node1(NodeStr,[]) -> NodeStr. +%% look for other specification files, either to join with the +%% current spec, or execute as separate test runs +get_included_specs(Terms,TestSpec) -> + get_included_specs(Terms,TestSpec,[],[]). + +get_included_specs([{specs,How,SpecOrSpecs}|Ts],TestSpec,Join,Sep) -> + Specs = case SpecOrSpecs of + [File|_] when is_list(File) -> + [get_absfile(Spec,TestSpec) || Spec <- SpecOrSpecs]; + [Ch|_] when is_integer(Ch) -> + [get_absfile(SpecOrSpecs,TestSpec)] + end, + if How == join -> + get_included_specs(Ts,TestSpec,Join++Specs,Sep); + true -> + get_included_specs(Ts,TestSpec,Join,Sep++Specs) + end; +get_included_specs([_|Ts],TestSpec,Join,Sep) -> + get_included_specs(Ts,TestSpec,Join,Sep); +get_included_specs([],_,Join,Sep) -> + {Join,Sep}. %% global terms that will be used for analysing all other terms in the spec -get_global([{merge_tests,Bool} | Ts], Spec) -> +get_global([{merge_tests,Bool}|Ts],Spec) -> get_global(Ts,Spec#testspec{merge_tests=Bool}); %% the 'define' term replaces the 'alias' and 'node' terms, but we need to keep @@ -588,7 +736,7 @@ add_option({Key,Value},Node,List,WarnIfExists) when is_list(Value) -> NewOption = case lists:keyfind(Key,1,OldOptions) of {Key,OldOption} when WarnIfExists,OldOption/=[]-> io:format("There is an option ~w=~w already " - "defined for node ~p, skipping new ~w~n", + "defined for node ~w, skipping new ~w~n", [Key,OldOption,Node,Value]), OldOption; {Key,OldOption}-> @@ -637,7 +785,7 @@ add_tests([{suites,all_nodes,Dir,Ss}|Ts],Spec) -> add_tests([{suites,Dir,Ss}|Ts],Spec) -> add_tests([{suites,all_nodes,Dir,Ss}|Ts],Spec); add_tests([{suites,Nodes,Dir,Ss}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,suites,[Dir,Ss],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,suites,[Dir,Ss],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{suites,Node,Dir,Ss}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -660,11 +808,11 @@ add_tests([{groups,Dir,Suite,Gs}|Ts],Spec) -> add_tests([{groups,Dir,Suite,Gs,{cases,TCs}}|Ts],Spec) -> add_tests([{groups,all_nodes,Dir,Suite,Gs,{cases,TCs}}|Ts],Spec); add_tests([{groups,Nodes,Dir,Suite,Gs}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,groups,[Dir,Suite,Gs],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,groups,[Dir,Suite,Gs],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{groups,Nodes,Dir,Suite,Gs,{cases,TCs}}|Ts], Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,groups,[Dir,Suite,Gs,{cases,TCs}],Ts, + Ts1 = per_node(Nodes,groups,[Dir,Suite,Gs,{cases,TCs}],Ts, Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{groups,Node,Dir,Suite,Gs}|Ts],Spec) -> @@ -688,7 +836,7 @@ add_tests([{cases,all_nodes,Dir,Suite,Cs}|Ts],Spec) -> add_tests([{cases,Dir,Suite,Cs}|Ts],Spec) -> add_tests([{cases,all_nodes,Dir,Suite,Cs}|Ts],Spec); add_tests([{cases,Nodes,Dir,Suite,Cs}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,cases,[Dir,Suite,Cs],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,cases,[Dir,Suite,Cs],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{cases,Node,Dir,Suite,Cs}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -703,7 +851,7 @@ add_tests([{skip_suites,all_nodes,Dir,Ss,Cmt}|Ts],Spec) -> add_tests([{skip_suites,Dir,Ss,Cmt}|Ts],Spec) -> add_tests([{skip_suites,all_nodes,Dir,Ss,Cmt}|Ts],Spec); add_tests([{skip_suites,Nodes,Dir,Ss,Cmt}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_suites,[Dir,Ss,Cmt],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,skip_suites,[Dir,Ss,Cmt],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_suites,Node,Dir,Ss,Cmt}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -724,11 +872,11 @@ add_tests([{skip_groups,Dir,Suite,Gs,Cmt}|Ts],Spec) -> add_tests([{skip_groups,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts],Spec) -> add_tests([{skip_groups,all_nodes,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts],Spec); add_tests([{skip_groups,Nodes,Dir,Suite,Gs,Cmt}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_groups,[Dir,Suite,Gs,Cmt],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,skip_groups,[Dir,Suite,Gs,Cmt],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_groups,Nodes,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts], Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_groups,[Dir,Suite,Gs,{cases,TCs},Cmt],Ts, + Ts1 = per_node(Nodes,skip_groups,[Dir,Suite,Gs,{cases,TCs},Cmt],Ts, Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_groups,Node,Dir,Suite,Gs,Cmt}|Ts],Spec) -> @@ -752,7 +900,7 @@ add_tests([{skip_cases,all_nodes,Dir,Suite,Cs,Cmt}|Ts],Spec) -> add_tests([{skip_cases,Dir,Suite,Cs,Cmt}|Ts],Spec) -> add_tests([{skip_cases,all_nodes,Dir,Suite,Cs,Cmt}|Ts],Spec); add_tests([{skip_cases,Nodes,Dir,Suite,Cs,Cmt}|Ts],Spec) when is_list(Nodes) -> - Ts1 = separate(Nodes,skip_cases,[Dir,Suite,Cs,Cmt],Ts,Spec#testspec.nodes), + Ts1 = per_node(Nodes,skip_cases,[Dir,Suite,Cs,Cmt],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); add_tests([{skip_cases,Node,Dir,Suite,Cs,Cmt}|Ts],Spec) -> Tests = Spec#testspec.tests, @@ -783,6 +931,9 @@ add_tests([{release_shell,Bool}|Ts],Spec) -> add_tests(Ts, Spec#testspec{release_shell = Bool}); %% --- handled/errors --- +add_tests([{set_merge_tests,_}|Ts],Spec) -> % internal + add_tests(Ts,Spec); + add_tests([{define,_,_}|Ts],Spec) -> % handled add_tests(Ts,Spec); @@ -792,7 +943,10 @@ add_tests([{alias,_,_}|Ts],Spec) -> % handled add_tests([{node,_,_}|Ts],Spec) -> % handled add_tests(Ts,Spec); -add_tests([{merge_tests, _} | Ts], Spec) -> % handled +add_tests([{merge_tests,_} | Ts], Spec) -> % handled + add_tests(Ts,Spec); + +add_tests([{specs,_,_} | Ts], Spec) -> % handled add_tests(Ts,Spec); %% -------------------------------------------------- @@ -821,7 +975,7 @@ add_tests([{Tag,NodesOrOther,Data}|Ts],Spec) when is_list(NodesOrOther) -> case lists:all(fun(Test) -> is_node(Test,Spec#testspec.nodes) end, NodesOrOther) of true -> - Ts1 = separate(NodesOrOther,Tag,[Data],Ts,Spec#testspec.nodes), + Ts1 = per_node(NodesOrOther,Tag,[Data],Ts,Spec#testspec.nodes), add_tests(Ts1,Spec); false -> add_tests([{Tag,all_nodes,{NodesOrOther,Data}}|Ts],Spec) @@ -866,7 +1020,18 @@ add_tests([],Spec) -> % done %% check if it's a CT term that has bad format or if the user seems to %% have added something of his/her own, which we'll let pass if relaxed %% mode is enabled. -check_term(Term) -> +check_term(Atom) when is_atom(Atom) -> + Valid = valid_terms(), + case lists:member(Atom,Valid) of + true -> + valid; + false -> % ignore + case get(relaxed) of + true -> invalid; + false -> throw({error,{undefined_term_in_spec,Atom}}) + end + end; +check_term(Term) when is_tuple(Term) -> Size = size(Term), [Name|_] = tuple_to_list(Term), Valid = valid_terms(), @@ -894,7 +1059,9 @@ check_term(Term) -> throw({error,{undefined_term_in_spec,Term}}) end end - end. + end; +check_term(Other) -> + throw({error,{undefined_term_in_spec,Other}}). %% specific data handling before saving in testspec record, e.g. %% converting relative paths to absolute for directories and files @@ -903,6 +1070,8 @@ handle_data(logdir,Node,Dir,Spec) -> [{Node,ref2dir(Dir,Spec)}]; handle_data(cover,Node,File,Spec) -> [{Node,get_absfile(File,Spec)}]; +handle_data(cover_stop,Node,Stop,_Spec) -> + [{Node,Stop}]; handle_data(include,Node,Dirs=[D|_],Spec) when is_list(D) -> [{Node,ref2dir(Dir,Spec)} || Dir <- Dirs]; handle_data(include,Node,Dir=[Ch|_],Spec) when is_integer(Ch) -> @@ -975,12 +1144,12 @@ update_recorded(Tag,Node,Spec) -> end. %% create one test term per node -separate(Nodes,Tag,Data,Tests,Refs) -> - Separated = separate(Nodes,Tag,Data,Refs), +per_node(Nodes,Tag,Data,Tests,Refs) -> + Separated = per_node(Nodes,Tag,Data,Refs), Separated ++ Tests. -separate([N|Ns],Tag,Data,Refs) -> - [list_to_tuple([Tag,ref2node(N,Refs)|Data])|separate(Ns,Tag,Data,Refs)]; -separate([],_,_,_) -> +per_node([N|Ns],Tag,Data,Refs) -> + [list_to_tuple([Tag,ref2node(N,Refs)|Data])|per_node(Ns,Tag,Data,Refs)]; +per_node([],_,_,_) -> []. %% read the value for FieldName in record Rec#testspec @@ -1037,14 +1206,21 @@ insert_groups(Node,Dir,Suite,Groups,Cases,Tests,true) when {[Gr],Cases}; true -> {Gr,Cases} end || Gr <- Groups], - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},[{all,_}]}} -> - Tests; - {value,{{Node,Dir},Suites0}} -> - Suites1 = insert_groups1(Suite,Groups1,Suites0), - insert_in_order({{Node,Dir},Suites1},Tests); - false -> - insert_in_order({{Node,Dir},[{Suite,Groups1}]},Tests) + {Tests1,Done} = + lists:foldr(fun(All={{N,D},[{all,_}]},{Replaced,_}) when N == Node, + D == Dir -> + {[All|Replaced],true}; + ({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = insert_groups1(Suite,Groups1,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},[{Suite,Groups1}]}]; + true -> + Tests1 end; insert_groups(Node,Dir,Suite,Groups,Case,Tests, MergeTests) when is_atom(Case) -> @@ -1082,14 +1258,21 @@ insert_groups2([],GrAndCases) -> insert_cases(Node,Dir,Suite,Cases,Tests,false) when is_list(Cases) -> append({{Node,Dir},[{Suite,Cases}]},Tests); insert_cases(Node,Dir,Suite,Cases,Tests,true) when is_list(Cases) -> - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},[{all,_}]}} -> - Tests; - {value,{{Node,Dir},Suites0}} -> - Suites1 = insert_cases1(Suite,Cases,Suites0), - insert_in_order({{Node,Dir},Suites1},Tests); - false -> - insert_in_order({{Node,Dir},[{Suite,Cases}]},Tests) + {Tests1,Done} = + lists:foldr(fun(All={{N,D},[{all,_}]},{Replaced,_}) when N == Node, + D == Dir -> + {[All|Replaced],true}; + ({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = insert_cases1(Suite,Cases,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},[{Suite,Cases}]}]; + true -> + Tests1 end; insert_cases(Node,Dir,Suite,Case,Tests,MergeTests) when is_atom(Case) -> insert_cases(Node,Dir,Suite,[Case],Tests,MergeTests). @@ -1130,15 +1313,23 @@ skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,false) when append({{Node,Dir},Suites1},Tests); skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,true) when ((Cases == all) or is_list(Cases)) and is_list(Groups) -> - Suites = - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},Suites0}} -> - Suites0; - false -> - [] - end, - Suites1 = skip_groups1(Suite,[{Gr,Cases} || Gr <- Groups],Cmt,Suites), - insert_in_order({{Node,Dir},Suites1},Tests); + {Tests1,Done} = + lists:foldr(fun({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = skip_groups1(Suite, + [{Gr,Cases} || Gr <- Groups], + Cmt,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},skip_groups1(Suite, + [{Gr,Cases} || Gr <- Groups], + Cmt,[])}]; + true -> + Tests1 + end; skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests,MergeTests) when is_atom(Case) -> Cases = if Case == all -> all; true -> [Case] end, @@ -1160,15 +1351,19 @@ skip_cases(Node,Dir,Suite,Cases,Cmt,Tests,false) when is_list(Cases) -> Suites1 = skip_cases1(Suite,Cases,Cmt,[]), append({{Node,Dir},Suites1},Tests); skip_cases(Node,Dir,Suite,Cases,Cmt,Tests,true) when is_list(Cases) -> - Suites = - case lists:keysearch({Node,Dir},1,Tests) of - {value,{{Node,Dir},Suites0}} -> - Suites0; - false -> - [] - end, - Suites1 = skip_cases1(Suite,Cases,Cmt,Suites), - insert_in_order({{Node,Dir},Suites1},Tests); + {Tests1,Done} = + lists:foldr(fun({{N,D},Suites0},{Replaced,_}) when N == Node, + D == Dir -> + Suites1 = skip_cases1(Suite,Cases,Cmt,Suites0), + {[{{N,D},Suites1}|Replaced],true}; + (T,{Replaced,Match}) -> + {[T|Replaced],Match} + end, {[],false}, Tests), + if not Done -> + Tests ++ [{{Node,Dir},skip_cases1(Suite,Cases,Cmt,[])}]; + true -> + Tests1 + end; skip_cases(Node,Dir,Suite,Case,Cmt,Tests,MergeTests) when is_atom(Case) -> skip_cases(Node,Dir,Suite,[Case],Cmt,Tests,MergeTests). @@ -1258,10 +1453,14 @@ is_node([],_) -> valid_terms() -> [ + {set_merge_tests,2}, {define,3}, + {specs,3}, {node,3}, {cover,2}, {cover,3}, + {cover_stop,2}, + {cover_stop,3}, {config,2}, {config,3}, {config,4}, diff --git a/lib/common_test/src/ct_util.erl b/lib/common_test/src/ct_util.erl index cf891ed043..0f2b2081d9 100644 --- a/lib/common_test/src/ct_util.erl +++ b/lib/common_test/src/ct_util.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2012. All Rights Reserved. +%% Copyright Ericsson AB 2003-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -396,14 +396,14 @@ loop(Mode,TestData,StartDir) -> %% A connection crashed - remove the connection but don't die ct_logs:tc_log_async(ct_error_notify, "Connection process died: " - "Pid: ~p, Address: ~p, Callback: ~p\n" + "Pid: ~w, Address: ~p, Callback: ~w\n" "Reason: ~p\n\n", [Pid,A,CB,Reason]), catch CB:close(Pid), loop(Mode,TestData,StartDir); _ -> %% Let process crash in case of error, this shouldn't happen! - io:format("\n\nct_util_server got EXIT from ~p: ~p\n\n", + io:format("\n\nct_util_server got EXIT from ~w: ~p\n\n", [Pid,Reason]), file:set_cwd(StartDir), exit(Reason) @@ -956,7 +956,7 @@ open_url(iexplore, Args, URL) -> Path = proplists:get_value(default, Paths), [Cmd | _] = string:tokens(Path, "%"), Cmd1 = Cmd ++ " " ++ Args ++ " " ++ URL, - io:format(user, "~nOpening ~s with command:~n ~s~n", [URL,Cmd1]), + io:format(user, "~nOpening ~ts with command:~n ~ts~n", [URL,Cmd1]), open_port({spawn,Cmd1}, []); _ -> io:format("~nNo path to iexplore.exe~n",[]) @@ -969,6 +969,6 @@ open_url(Prog, Args, URL) -> is_list(Prog) -> Prog end, Cmd = ProgStr ++ " " ++ Args ++ " " ++ URL, - io:format(user, "~nOpening ~s with command:~n ~s~n", [URL,Cmd]), + io:format(user, "~nOpening ~ts with command:~n ~ts~n", [URL,Cmd]), open_port({spawn,Cmd},[]), ok. diff --git a/lib/common_test/src/ct_util.hrl b/lib/common_test/src/ct_util.hrl index 196b5e46d0..c9c6514fa4 100644 --- a/lib/common_test/src/ct_util.hrl +++ b/lib/common_test/src/ct_util.hrl @@ -38,6 +38,7 @@ verbosity=[], silent_connections=[], cover=[], + cover_stop=[], config=[], userconfig=[], event_handler=[], diff --git a/lib/common_test/src/cth_conn_log.erl b/lib/common_test/src/cth_conn_log.erl index 3af89db3a5..644594e34d 100644 --- a/lib/common_test/src/cth_conn_log.erl +++ b/lib/common_test/src/cth_conn_log.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -58,7 +58,7 @@ -spec init(Id, HookOpts) -> Result when Id :: term(), - HookOpts :: ct:hook_options(), + HookOpts :: ct_netconfc:hook_options(), Result :: {ok,[{ct_netconfc:conn_mod(), {ct_netconfc:log_type(),[ct_netconfc:key_or_name()]}}]}. init(_Id, HookOpts) -> @@ -105,8 +105,9 @@ pre_init_per_testcase(TestCase,Config,CthState) -> "<table borders=1>" "<b>" ++ ConnModStr ++ " logs:</b>\n" ++ [io_lib:format( - "<tr><td>~p</td><td><a href=~p>~s</a></td></tr>", - [S,L,filename:basename(L)]) + "<tr><td>~p</td><td><a href=\"~ts\">~ts</a>" + "</td></tr>", + [S,ct_logs:uri(L),filename:basename(L)]) || {S,L} <- Ls] ++ "</table>", io:format(Str,[]), diff --git a/lib/common_test/src/cth_log_redirect.erl b/lib/common_test/src/cth_log_redirect.erl index 77f57c6195..78ae70f37e 100644 --- a/lib/common_test/src/cth_log_redirect.erl +++ b/lib/common_test/src/cth_log_redirect.erl @@ -54,7 +54,7 @@ post_init_per_group(_Group, _Config, Result, State) -> post_end_per_testcase(_TC, _Config, Result, State) -> %% Make sure that the event queue is flushed %% before ending this test case. - gen_event:call(error_logger, ?MODULE, flush), + gen_event:call(error_logger, ?MODULE, flush, 300000), {Result, State}. pre_end_per_group(Group, Config, {ct_log, Group}) -> diff --git a/lib/common_test/src/cth_surefire.erl b/lib/common_test/src/cth_surefire.erl index 76b0f0b5ea..1a38b6584b 100644 --- a/lib/common_test/src/cth_surefire.erl +++ b/lib/common_test/src/cth_surefire.erl @@ -1,3 +1,22 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%%-------------------------------------------------------------------- + %%% @doc Common Test Framework functions handling test specifications. %%% %%% <p>This module creates a junit report of the test run if plugged in @@ -27,18 +46,28 @@ -export([terminate/1]). -record(state, { filepath, axis, properties, package, hostname, - curr_suite, curr_suite_ts, curr_group = [], curr_tc, - curr_log_dir, timer, tc_log, + curr_suite, curr_suite_ts, curr_group = [], + curr_log_dir, timer, tc_log, url_base, test_cases = [], test_suites = [] }). --record(testcase, { log, group, classname, name, time, failure, timestamp }). --record(testsuite, { errors, failures, hostname, name, tests, +-record(testcase, { log, url, group, classname, name, time, result, timestamp }). +-record(testsuite, { errors, failures, skipped, hostname, name, tests, time, timestamp, id, package, - properties, testcases }). + properties, testcases, log, url }). + +-define(default_report,"junit_report.xml"). +-define(suite_log,"suite.log.html"). + +%% Number of dirs from log root to testcase log file. +%% ct_run.<node>.<timestamp>/<test_name>/run.<timestamp>/<tc_log>.html +-define(log_depth,3). id(Opts) -> - filename:absname(proplists:get_value(path, Opts, "junit_report.xml")). + case proplists:get_value(path, Opts) of + undefined -> ?default_report; + Path -> filename:absname(Path) + end. init(Path, Opts) -> {ok, Host} = inet:gethostname(), @@ -47,10 +76,24 @@ init(Path, Opts) -> package = proplists:get_value(package,Opts), axis = proplists:get_value(axis,Opts,[]), properties = proplists:get_value(properties,Opts,[]), + url_base = proplists:get_value(url_base,Opts), timer = now() }. pre_init_per_suite(Suite,Config,#state{ test_cases = [] } = State) -> - {Config, init_tc(State#state{ curr_suite = Suite, curr_suite_ts = now() }, + TcLog = proplists:get_value(tc_logfile,Config), + CurrLogDir = filename:dirname(TcLog), + Path = + case State#state.filepath of + ?default_report -> + RootDir = get_test_root(TcLog), + filename:join(RootDir,?default_report); + P -> + P + end, + {Config, init_tc(State#state{ filepath = Path, + curr_suite = Suite, + curr_suite_ts = now(), + curr_log_dir = CurrLogDir}, Config) }; pre_init_per_suite(Suite,Config,State) -> %% Have to close the previous suite @@ -59,7 +102,8 @@ pre_init_per_suite(Suite,Config,State) -> post_init_per_suite(_Suite,Config, Result, State) -> {Result, end_tc(init_per_suite,Config,Result,State)}. -pre_end_per_suite(_Suite,Config,State) -> {Config, init_tc(State, Config)}. +pre_end_per_suite(_Suite,Config,State) -> + {Config, init_tc(State, Config)}. post_end_per_suite(_Suite,Config,Result,State) -> {Result, end_tc(end_per_suite,Config,Result,State)}. @@ -71,13 +115,15 @@ pre_init_per_group(Group,Config,State) -> post_init_per_group(_Group,Config,Result,State) -> {Result, end_tc(init_per_group,Config,Result,State)}. -pre_end_per_group(_Group,Config,State) -> {Config, init_tc(State, Config)}. +pre_end_per_group(_Group,Config,State) -> + {Config, init_tc(State, Config)}. post_end_per_group(_Group,Config,Result,State) -> NewState = end_tc(end_per_group, Config, Result, State), {Result, NewState#state{ curr_group = tl(NewState#state.curr_group)}}. -pre_init_per_testcase(_TC,Config,State) -> {Config, init_tc(State, Config)}. +pre_init_per_testcase(_TC,Config,State) -> + {Config, init_tc(State, Config)}. post_end_per_testcase(TC,Config,Result,State) -> {Result, end_tc(TC,Config, Result,State)}. @@ -88,11 +134,19 @@ on_tc_fail(_TC, Res, State) -> TCs = State#state.test_cases, TC = hd(TCs), NewTC = TC#testcase{ - failure = + result = {fail,lists:flatten(io_lib:format("~p",[Res]))} }, State#state{ test_cases = [NewTC | tl(TCs)]}. -on_tc_skip(Tc,{Type,_Reason} = Res, State) when Type == tc_auto_skip -> +on_tc_skip(Tc,{Type,_Reason} = Res, State0) when Type == tc_auto_skip -> + TcStr = atom_to_list(Tc), + State = + case State0#state.test_cases of + [#testcase{name=TcStr}|TCs] -> + State0#state{test_cases=TCs}; + _ -> + State0 + end, do_tc_skip(Res, end_tc(Tc,[],Res,init_tc(State,[]))); on_tc_skip(_Tc, _Res, State = #state{test_cases = []}) -> State; @@ -103,7 +157,7 @@ do_tc_skip(Res, State) -> TCs = State#state.test_cases, TC = hd(TCs), NewTC = TC#testcase{ - failure = + result = {skipped,lists:flatten(io_lib:format("~p",[Res]))} }, State#state{ test_cases = [NewTC | tl(TCs)]}. @@ -117,33 +171,52 @@ end_tc(Func, Config, Res, State) when is_atom(Func) -> end_tc(atom_to_list(Func), Config, Res, State); end_tc(Name, _Config, _Res, State = #state{ curr_suite = Suite, curr_group = Groups, - timer = TS, tc_log = Log } ) -> + curr_log_dir = CurrLogDir, + timer = TS, + tc_log = Log0, + url_base = UrlBase } ) -> + Log = + case Log0 of + "" -> + LowerSuiteName = string:to_lower(atom_to_list(Suite)), + filename:join(CurrLogDir,LowerSuiteName++"."++Name++".html"); + _ -> + Log0 + end, + Url = make_url(UrlBase,Log), ClassName = atom_to_list(Suite), PGroup = string:join([ atom_to_list(Group)|| Group <- lists:reverse(Groups)],"."), TimeTakes = io_lib:format("~f",[timer:now_diff(now(),TS) / 1000000]), State#state{ test_cases = [#testcase{ log = Log, + url = Url, timestamp = now_to_string(TS), classname = ClassName, group = PGroup, name = Name, time = TimeTakes, - failure = passed }| State#state.test_cases]}. + result = passed }| + State#state.test_cases], + tc_log = ""}. % so old tc_log is not set if next is on_tc_skip close_suite(#state{ test_cases = [] } = State) -> State; -close_suite(#state{ test_cases = TCs } = State) -> - Total = length(TCs), - Succ = length(lists:filter(fun(#testcase{ failure = F }) -> - F == passed - end,TCs)), - Fail = Total - Succ, +close_suite(#state{ test_cases = TCs, url_base = UrlBase } = State) -> + {Total,Fail,Skip} = count_tcs(TCs,0,0,0), TimeTaken = timer:now_diff(now(),State#state.curr_suite_ts) / 1000000, + SuiteLog = filename:join(State#state.curr_log_dir,?suite_log), + SuiteUrl = make_url(UrlBase,SuiteLog), Suite = #testsuite{ name = atom_to_list(State#state.curr_suite), package = State#state.package, + hostname = State#state.hostname, time = io_lib:format("~f",[TimeTaken]), timestamp = now_to_string(State#state.curr_suite_ts), - errors = Fail, tests = Total, - testcases = lists:reverse(TCs) }, + errors = 0, + failures = Fail, + skipped = Skip, + tests = Total, + testcases = lists:reverse(TCs), + log = SuiteLog, + url = SuiteUrl}, State#state{ test_cases = [], test_suites = [Suite | State#state.test_suites]}. @@ -159,14 +232,15 @@ terminate(State) -> -to_xml(#testcase{ group = Group, classname = CL, log = L, name = N, time = T, timestamp = TS, failure = F}) -> +to_xml(#testcase{ group = Group, classname = CL, log = L, url = U, name = N, time = T, timestamp = TS, result = R}) -> ["<testcase ", - [["group=\"",Group,"\""]||Group /= ""]," " + [["group=\"",Group,"\" "]||Group /= ""], "name=\"",N,"\" " "time=\"",T,"\" " - "timestamp=\"",TS,"\" " + "timestamp=\"",TS,"\" ", + [["url=\"",U,"\" "]||U /= undefined], "log=\"",L,"\">", - case F of + case R of passed -> []; {skipped,Reason} -> @@ -176,22 +250,29 @@ to_xml(#testcase{ group = Group, classname = CL, log = L, name = N, time = T, ti ["<failure message=\"Test ",N," in ",CL," failed!\" type=\"crash\">", sanitize(Reason),"</failure>"] end,"</testcase>"]; -to_xml(#testsuite{ package = P, hostname = H, errors = E, time = Time, - timestamp = TS, tests = T, name = N, testcases = Cases }) -> +to_xml(#testsuite{ package = P, hostname = H, errors = E, failures = F, + skipped = S, time = Time, timestamp = TS, tests = T, name = N, + testcases = Cases, log = Log, url = Url }) -> ["<testsuite ", [["package=\"",P,"\" "]||P /= undefined], - [["hostname=\"",P,"\" "]||H /= undefined], - [["name=\"",N,"\" "]||N /= undefined], - [["time=\"",Time,"\" "]||Time /= undefined], - [["timestamp=\"",TS,"\" "]||TS /= undefined], + "hostname=\"",H,"\" " + "name=\"",N,"\" " + "time=\"",Time,"\" " + "timestamp=\"",TS,"\" " "errors=\"",integer_to_list(E),"\" " - "tests=\"",integer_to_list(T),"\">", + "failures=\"",integer_to_list(F),"\" " + "skipped=\"",integer_to_list(S),"\" " + "tests=\"",integer_to_list(T),"\" ", + [["url=\"",Url,"\" "]||Url /= undefined], + "log=\"",Log,"\">", [to_xml(Case) || Case <- Cases], "</testsuite>"]; to_xml(#state{ test_suites = TestSuites, axis = Axis, properties = Props }) -> ["<testsuites>",properties_to_xml(Axis,Props), [to_xml(TestSuite) || TestSuite <- TestSuites],"</testsuites>"]. +properties_to_xml([],[]) -> + []; properties_to_xml(Axis,Props) -> ["<properties>", [["<property name=\"",Name,"\" axis=\"yes\" value=\"",Value,"\" />"] || {Name,Value} <- Axis], @@ -216,4 +297,38 @@ sanitize([]) -> now_to_string(Now) -> {{YY,MM,DD},{HH,Mi,SS}} = calendar:now_to_local_time(Now), - io_lib:format("~p-~2..0B-~2..0BT~2..0B:~2..0B:~2..0B",[YY,MM,DD,HH,Mi,SS]). + io_lib:format("~w-~2..0B-~2..0BT~2..0B:~2..0B:~2..0B",[YY,MM,DD,HH,Mi,SS]). + +make_url(undefined,_) -> + undefined; +make_url(_,[]) -> + undefined; +make_url(UrlBase0,Log) -> + UrlBase = string:strip(UrlBase0,right,$/), + RelativeLog = get_relative_log_url(Log), + string:join([UrlBase,RelativeLog],"/"). + +get_test_root(Log) -> + LogParts = filename:split(Log), + filename:join(lists:sublist(LogParts,1,length(LogParts)-?log_depth)). + +get_relative_log_url(Log) -> + LogParts = filename:split(Log), + Start = length(LogParts)-?log_depth, + Length = ?log_depth+1, + string:join(lists:sublist(LogParts,Start,Length),"/"). + +count_tcs([#testcase{name=ConfCase}|TCs],Ok,F,S) + when ConfCase=="init_per_suite"; + ConfCase=="end_per_suite"; + ConfCase=="init_per_group"; + ConfCase=="end_per_group" -> + count_tcs(TCs,Ok,F,S); +count_tcs([#testcase{result=passed}|TCs],Ok,F,S) -> + count_tcs(TCs,Ok+1,F,S); +count_tcs([#testcase{result={fail,_}}|TCs],Ok,F,S) -> + count_tcs(TCs,Ok,F+1,S); +count_tcs([#testcase{result={skipped,_}}|TCs],Ok,F,S) -> + count_tcs(TCs,Ok,F,S+1); +count_tcs([],Ok,F,S) -> + {Ok+F+S,F,S}. diff --git a/lib/common_test/src/unix_telnet.erl b/lib/common_test/src/unix_telnet.erl index 25b9d4d5d2..99ce92e9f1 100644 --- a/lib/common_test/src/unix_telnet.erl +++ b/lib/common_test/src/unix_telnet.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2010. All Rights Reserved. +%% Copyright Ericsson AB 2004-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -110,7 +110,7 @@ connect1(Ip,Port,Timeout,KeepAlive,Username,Password) -> case ct_telnet:silent_teln_expect(Pid,[],[prompt],?prx,[]) of {ok,{prompt,?username},_} -> ok = ct_telnet_client:send_data(Pid,Username), - cont_log("Username: ~s",[Username]), + cont_log("Username: ~ts",[Username]), case ct_telnet:silent_teln_expect(Pid,[],prompt,?prx,[]) of {ok,{prompt,?password},_} -> ok = ct_telnet_client:send_data(Pid,Password), diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile index 374fd8a824..760cc20410 100644 --- a/lib/common_test/test/Makefile +++ b/lib/common_test/test/Makefile @@ -40,6 +40,7 @@ MODULES= \ ct_repeat_1_SUITE \ ct_testspec_1_SUITE \ ct_testspec_2_SUITE \ + ct_testspec_3_SUITE \ ct_skip_SUITE \ ct_error_SUITE \ ct_test_server_if_1_SUITE \ @@ -52,7 +53,12 @@ MODULES= \ ct_auto_compile_SUITE \ ct_verbosity_SUITE \ ct_shell_SUITE \ - ct_groups_search_SUITE + ct_system_error_SUITE \ + ct_snmp_SUITE \ + ct_group_leader_SUITE \ + ct_cover_SUITE \ + ct_groups_search_SUITE \ + ct_surefire_SUITE ERL_FILES= $(MODULES:%=%.erl) @@ -106,7 +112,7 @@ release_spec: opt release_tests_spec: $(INSTALL_DIR) "$(RELSYSDIR)" $(INSTALL_DATA) $(ERL_FILES) $(COVERFILE) "$(RELSYSDIR)" - $(INSTALL_DATA) common_test.spec "$(RELSYSDIR)" + $(INSTALL_DATA) common_test.spec common_test.cover "$(RELSYSDIR)" chmod -R u+w "$(RELSYSDIR)" @tar cf - *_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) diff --git a/lib/common_test/test/common_test.cover b/lib/common_test/test/common_test.cover new file mode 100644 index 0000000000..3aa49623e7 --- /dev/null +++ b/lib/common_test/test/common_test.cover @@ -0,0 +1,10 @@ +%% -*- erlang -*- +{incl_app,common_test,details}. +{cross,common_test,[{test_server,[erl2html2, + test_server, + test_server_ctrl, + test_server_gl, + test_server_h, + test_server_io, + test_server_node, + test_server_sup]}]}. diff --git a/lib/common_test/test/ct_config_info_SUITE.erl b/lib/common_test/test/ct_config_info_SUITE.erl index 40da377ee5..10fe8286dd 100644 --- a/lib/common_test/test/ct_config_info_SUITE.erl +++ b/lib/common_test/test/ct_config_info_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2009-2011. All Rights Reserved. +%% Copyright Ericsson AB 2009-2012. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -123,8 +123,7 @@ test_events(config_info) -> {?eh,tc_done,{config_info_1_SUITE,init_per_suite,ok}}, [{?eh,tc_start,{config_info_1_SUITE,{init_per_group,g1,[]}}}, - {?eh,tc_done,{config_info_1_SUITE, - {init_per_group,unknown,[]}, + {?eh,tc_done,{config_info_1_SUITE,{init_per_group,g1,[]}, {failed,{timetrap_timeout,350}}}}, {?eh,tc_auto_skip,{config_info_1_SUITE,t11, {failed,{config_info_1_SUITE,init_per_group,{timetrap_timeout,350}}}}}, @@ -136,14 +135,12 @@ test_events(config_info) -> {?eh,tc_done,{config_info_1_SUITE,{init_per_group,g2,[]},ok}}, {?eh,tc_done,{config_info_1_SUITE,t21,ok}}, {?eh,tc_start,{config_info_1_SUITE,{end_per_group,g2,[]}}}, - {?eh,tc_done,{config_info_1_SUITE, - {end_per_group,unknown,[]}, + {?eh,tc_done,{config_info_1_SUITE,{end_per_group,g2,[]}, {failed,{timetrap_timeout,450}}}}], [{?eh,tc_start,{config_info_1_SUITE,{init_per_group,g3,[]}}}, {?eh,tc_done,{config_info_1_SUITE,{init_per_group,g3,[]},ok}}, [{?eh,tc_start,{config_info_1_SUITE,{init_per_group,g4,[]}}}, - {?eh,tc_done,{config_info_1_SUITE, - {init_per_group,unknown,[]}, + {?eh,tc_done,{config_info_1_SUITE,{init_per_group,g4,[]}, {failed,{timetrap_timeout,400}}}}, {?eh,tc_auto_skip,{config_info_1_SUITE,t41, {failed,{config_info_1_SUITE,init_per_group, @@ -164,8 +161,7 @@ test_events(config_info) -> {?eh,tc_done,{config_info_1_SUITE,{init_per_group,g5,[]},ok}}, {?eh,tc_done,{config_info_1_SUITE,t51,ok}}, {?eh,tc_start,{config_info_1_SUITE,{end_per_group,g5,[]}}}, - {?eh,tc_done,{config_info_1_SUITE, - {end_per_group,unknown,[]}, + {?eh,tc_done,{config_info_1_SUITE,{end_per_group,g5,[]}, {failed,{timetrap_timeout,400}}}}], {?eh,tc_start,{config_info_1_SUITE,{end_per_group,g3,[]}}}, {?eh,tc_done,{config_info_1_SUITE,{end_per_group,g3,[]},ok}}], diff --git a/lib/common_test/test/ct_cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE.erl new file mode 100644 index 0000000000..cb49dc423f --- /dev/null +++ b/lib/common_test/test/ct_cover_SUITE.erl @@ -0,0 +1,312 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_cover_SUITE +%%% +%%% Description: +%%% Test code cover analysis support +%%% +%%%------------------------------------------------------------------- +-module(ct_cover_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). +-define(suite, cover_SUITE). +-define(mod, cover_test_mod). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + case test_server:is_cover() of + true -> + {skip,"Test server is running cover already - skipping"}; + false -> + ct_test_support:init_per_suite(Config) + end. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + Node = fullname(existing_node), + case lists:member(Node,nodes()) of + true -> rpc:call(Node,erlang,halt,[]); + false -> ok + end, + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + default, + cover_stop_true, + cover_stop_false, + slave, + slave_start_slave, + cover_node_option, + ct_cover_add_remove_nodes, + otp_9956, + cross + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%% Check that cover is collected from test node +%% Also check that cover is by default stopped after test is completed +default(Config) -> + {ok,Events} = run_test(default,Config), + false = check_cover(Config), + check_calls(Events,1), + ok. + +%% Check that cover is stopped when cover_stop option is set to true +cover_stop_true(Config) -> + {ok,_Events} = run_test(cover_stop_true,[{cover_stop,true}],Config), + false = check_cover(Config). + +%% Check that cover is not stopped when cover_stop option is set to false +cover_stop_false(Config) -> + {ok,_Events} = run_test(cover_stop_false,[{cover_stop,false}],Config), + {true,[],[?mod]} = check_cover(Config), + CTNode = proplists:get_value(ct_node, Config), + ok = rpc:call(CTNode,cover,stop,[]), + false = check_cover(Config), + ok. + +%% Let test node start a slave node - check that cover is collected +%% from both nodes +slave(Config) -> + {ok,Events} = run_test(slave,slave,[],Config), + check_calls(Events,2), + ok. + +%% Let test node start a slave node which in turn starts another slave +%% node - check that cover is collected from all three nodes +slave_start_slave(Config) -> + {ok,Events} = run_test(slave_start_slave,slave_start_slave,[],Config), + check_calls(Events,3), + ok. + +%% Start a slave node before test starts - the node is listed in cover +%% spec file. +%% Check that cover is collected from test node and slave node. +cover_node_option(Config) -> + {ok, HostStr}=inet:gethostname(), + Host = list_to_atom(HostStr), + DataDir = ?config(data_dir,Config), + {ok,Node} = ct_slave:start(Host,existing_node, + [{erl_flags,"-pa " ++ DataDir}]), + false = check_cover(Node), + CoverSpec = default_cover_file_content() ++ [{nodes,[Node]}], + CoverFile = create_cover_file(cover_node_option,CoverSpec,Config), + {ok,Events} = run_test(cover_node_option,cover_node_option, + [{cover,CoverFile}],Config), + check_calls(Events,2), + {ok,Node} = ct_slave:stop(existing_node), + ok. + +%% Test ct_cover:add_nodes/1 and ct_cover:remove_nodes/1 +%% Check that cover is collected from added node +ct_cover_add_remove_nodes(Config) -> + {ok, HostStr}=inet:gethostname(), + Host = list_to_atom(HostStr), + DataDir = ?config(data_dir,Config), + {ok,Node} = ct_slave:start(Host,existing_node, + [{erl_flags,"-pa " ++ DataDir}]), + false = check_cover(Node), + {ok,Events} = run_test(ct_cover_add_remove_nodes,ct_cover_add_remove_nodes, + [],Config), + check_calls(Events,2), + {ok,Node} = ct_slave:stop(existing_node), + ok. + +%% Test that the test suite itself can be cover compiled and that +%% data_dir is set correctly (OTP-9956) +otp_9956(Config) -> + CoverFile = create_cover_file(otp_9956,[{incl_mods,[?suite]}],Config), + {ok,Events} = run_test(otp_9956,otp_9956,[{cover,CoverFile}],Config), + check_calls(Events,{?suite,otp_9956,1},1), + ok. + +%% Test cross cover mechanism +cross(Config) -> + {ok,Events1} = run_test(cross1,Config), + check_calls(Events1,1), + + CoverFile2 = create_cover_file(cross1,[{cross,[{cross1,[?mod]}]}],Config), + {ok,Events2} = run_test(cross2,[{cover,CoverFile2}],Config), + check_calls(Events2,1), + + %% Get the log dirs for each test and run cross cover analyse + [D11,D12] = lists:sort(get_run_dirs(Events1)), + [D21,D22] = lists:sort(get_run_dirs(Events2)), + + ct_cover:cross_cover_analyse(details,[{cross1,D11},{cross2,D21}]), + ct_cover:cross_cover_analyse(details,[{cross1,D12},{cross2,D22}]), + + %% Get the cross cover logs and read for each test + [C11,C12,C21,C22] = + [filename:join(D,"cross_cover.html") || D <- [D11,D12,D21,D22]], + + {ok,CrossData} = file:read_file(C11), + {ok,CrossData} = file:read_file(C12), + + {ok,Def} = file:read_file(C21), + {ok,Def} = file:read_file(C22), + + %% A simple test: just check that the test module exists in the + %% log from cross1 test, and that it does not exist in the log + %% from cross2 test. + TestMod = list_to_binary(atom_to_list(?mod)), + {_,_} = binary:match(CrossData,TestMod), + nomatch = binary:match(Def,TestMod), + {_,_} = binary:match(Def, + <<"No cross cover modules exist for this application">>), + + ok. + + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- +run_test(Label,Config) -> + run_test(Label,[],Config). +run_test(Label,ExtraOpts,Config) -> + run_test(Label,default,ExtraOpts,Config). +run_test(Label,Testcase,ExtraOpts,Config) -> + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, ?suite), + CoverFile = + case proplists:get_value(cover,ExtraOpts) of + undefined -> + create_default_cover_file(Label,Config); + CF -> + CF + end, + RestOpts = lists:keydelete(cover,1,ExtraOpts), + {Opts,ERPid} = setup([{suite,Suite},{testcase,Testcase}, + {cover,CoverFile},{label,Label}] ++ RestOpts, Config), + execute(Label, Testcase, Opts, ERPid, Config). + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +execute(Name, Testcase, Opts, ERPid, Config) -> + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(Name, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + TestEvents = events_to_check(Testcase), + R = ct_test_support:verify_events(TestEvents, Events, Config), + {R,Events}. + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +events_to_check(Testcase) -> + OneTest = + [{?eh,start_logging,{'DEF','RUNDIR'}}] ++ + [{?eh,tc_done,{?suite,Testcase,ok}}] ++ + [{?eh,stop_logging,[]}], + + %% 2 tests (ct:run_test + script_start) is default + OneTest ++ OneTest. + +check_cover(Config) when is_list(Config) -> + CTNode = proplists:get_value(ct_node, Config), + check_cover(CTNode); +check_cover(Node) when is_atom(Node) -> + case rpc:call(Node,test_server,is_cover,[]) of + true -> + {true, + rpc:call(Node,cover,which_nodes,[]), + rpc:call(Node,cover,modules,[])}; + false -> + false + end. + +%% Get the log dir "run.<timestamp>" for all (both!) tests +get_run_dirs(Events) -> + [filename:dirname(TCLog) || + {ct_test_support_eh, + {event,tc_logfile,_Node, + {{?suite,init_per_suite},TCLog}}} <- Events]. + +%% Check that each coverlog includes N calls to ?mod:foo/0 +check_calls(Events,N) -> + check_calls(Events,{?mod,foo,0},N). +check_calls(Events,MFA,N) -> + CoverLogs = [filename:join(D,"all.coverdata") || D <- get_run_dirs(Events)], + do_check_logs(CoverLogs,MFA,N). + +do_check_logs([CoverLog|CoverLogs],{Mod,_,_} = MFA,N) -> + {ok,_} = cover:start(), + ok = cover:import(CoverLog), + {ok,Calls} = cover:analyse(Mod,calls,function), + ok = cover:stop(), + {MFA,N} = lists:keyfind(MFA,1,Calls), + do_check_logs(CoverLogs,MFA,N); +do_check_logs([],_,_) -> + ok. + +fullname(Name) -> + {ok,Host} = inet:gethostname(), + list_to_atom(atom_to_list(Name) ++ "@" ++ Host). + +default_cover_file_content() -> + [{incl_mods,[?mod]}]. + +create_default_cover_file(Filename,Config) -> + create_cover_file(Filename,default_cover_file_content(),Config). + +create_cover_file(Filename,Terms,Config) -> + PrivDir = ?config(priv_dir,Config), + File = filename:join(PrivDir,Filename) ++ ".cover", + {ok,Fd} = file:open(File,[write]), + lists:foreach(fun(Term) -> + file:write(Fd,io_lib:format("~p.~n",[Term])) + end,Terms), + ok = file:close(Fd), + File. diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl new file mode 100644 index 0000000000..fdc3323f0a --- /dev/null +++ b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl @@ -0,0 +1,156 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%---------------------------------------------------------------------- +%% File: cover_SUITE.erl +%% +%% Description: +%% This file contains the test cases for the code coverage support +%% +%% @author Support +%% @doc Test of code coverage support in common_test +%% @end +%%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- +-module(cover_SUITE). +-include_lib("common_test/include/ct.hrl"). + +-compile(export_all). + +%% Default timetrap timeout (set in init_per_testcase). +-define(default_timeout, ?t:minutes(1)). + +suite() -> + []. + +all() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + Config. + +init_per_testcase(_Case, Config) -> + Dog = test_server:timetrap(?default_timeout), + [{watchdog, Dog}|Config]. + +end_per_testcase(Case, Config) -> + %% try apply(?MODULE,Case,[cleanup,Config]) + %% catch error:undef -> ok + %% end, + + kill_slaves(Case,nodes()), + Dog=?config(watchdog, Config), + test_server:timetrap_cancel(Dog), + ok. + +%%%----------------------------------------------------------------- +%%% Test cases +break(_Config) -> + test_server:break(""), + ok. + +default(Config) -> + cover_compiled = code:which(cover_test_mod), + cover_test_mod:foo(), + ok. + +slave(Config) -> + cover_compiled = code:which(cover_test_mod), + cover_test_mod:foo(), + N1 = nodename(slave,1), + {ok,Node} = ct_slave:start(N1), + cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), + rpc:call(Node,cover_test_mod,foo,[]), + {ok,Node} = ct_slave:stop(N1), + ok. + +slave_start_slave(Config) -> + cover_compiled = code:which(cover_test_mod), + cover_test_mod:foo(), + N1 = nodename(slave_start_slave,1), + N2 = nodename(slave_start_slave,2), + {ok,Node} = ct_slave:start(N1), + cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), + rpc:call(Node,cover_test_mod,foo,[]), + {ok,Node2} = rpc:call(Node,ct_slave,start,[N2]), + rpc:call(Node2,cover_test_mod,foo,[]), + {ok,Node2} = rpc:call(Node,ct_slave,stop,[N2]), + {ok,Node} = ct_slave:stop(N1), + ok. + +cover_node_option(Config) -> + cover_compiled = code:which(cover_test_mod), + cover_test_mod:foo(), + Node = fullname(existing_node), + cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), + rpc:call(Node,cover_test_mod,foo,[]), + ok. + +ct_cover_add_remove_nodes(Config) -> + cover_compiled = code:which(cover_test_mod), + cover_test_mod:foo(), + Node = fullname(existing_node), + Beam = rpc:call(Node,code,which,[cover_test_mod]), + false = (Beam == cover_compiled), + + rpc:call(Node,cover_test_mod,foo,[]), % should not be collected + {ok,[Node]} = ct_cover:add_nodes([Node]), + cover_compiled = rpc:call(Node,code,which,[cover_test_mod]), + rpc:call(Node,cover_test_mod,foo,[]), % should be collected + ok = ct_cover:remove_nodes([Node]), + rpc:call(Node,cover_test_mod,foo,[]), % should not be collected + + Beam = rpc:call(Node,code,which,[cover_test_mod]), + + ok. + +otp_9956(Config) -> + cover_compiled = code:which(?MODULE), + DataDir = ?config(data_dir,Config), + absolute = filename:pathtype(DataDir), + true = filelib:is_dir(DataDir), + ok. + + +%%%----------------------------------------------------------------- +%%% Internal +nodename(Case,N) -> + list_to_atom(nodeprefix(Case) ++ integer_to_list(N)). + +nodeprefix(Case) -> + atom_to_list(?MODULE) ++ "_" ++ atom_to_list(Case) ++ "_node". + + +fullname(Name) -> + {ok,Host} = inet:gethostname(), + list_to_atom(atom_to_list(Name) ++ "@" ++ Host). + +kill_slaves(Case, [Node|Nodes]) -> + Prefix = nodeprefix(Case), + case lists:prefix(Prefix,atom_to_list(Node)) of + true -> + rpc:call(Node,erlang,halt,[]); + _ -> + ok + end, + kill_slaves(Case,Nodes); +kill_slaves(_,[]) -> + ok. diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE_data/.gitignore b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE_data/.gitignore new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE_data/.gitignore diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_test_mod.erl b/lib/common_test/test/ct_cover_SUITE_data/cover_test_mod.erl new file mode 100644 index 0000000000..d4f69452c3 --- /dev/null +++ b/lib/common_test/test/ct_cover_SUITE_data/cover_test_mod.erl @@ -0,0 +1,4 @@ +-module(cover_test_mod). +-compile(export_all). +foo() -> + ok. diff --git a/lib/common_test/test/ct_error_SUITE.erl b/lib/common_test/test/ct_error_SUITE.erl index 338e76264e..86ec71c98e 100644 --- a/lib/common_test/test/ct_error_SUITE.erl +++ b/lib/common_test/test/ct_error_SUITE.erl @@ -44,8 +44,12 @@ %% there will be clashes with logging processes etc). %%-------------------------------------------------------------------- init_per_suite(Config) -> - Config1 = ct_test_support:init_per_suite(Config), - Config1. + DataDir = ?config(data_dir, Config), + TestDir = filename:join(DataDir, "error/test/"), + CTH = filename:join(TestDir, "verify_config.erl"), + ct:pal("Compiling ~p: ~p", + [CTH,compile:file(CTH,[{outdir,TestDir},debug_info])]), + ct_test_support:init_per_suite([{path_dirs,[TestDir]} | Config]). end_per_suite(Config) -> ct_test_support:end_per_suite(Config). @@ -61,7 +65,8 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> [cfg_error, lib_error, no_compile, timetrap_end_conf, timetrap_normal, timetrap_extended, timetrap_parallel, - timetrap_fun, misc_errors]. + timetrap_fun, timetrap_fun_group, misc_errors, + config_restored]. groups() -> []. @@ -251,6 +256,24 @@ timetrap_fun(Config) when is_list(Config) -> %%%----------------------------------------------------------------- %%% +timetrap_fun_group(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Join = fun(D, S) -> filename:join(D, "error/test/"++S) end, + Suites = [Join(DataDir, "timetrap_8_SUITE")], + {Opts,ERPid} = setup([{suite,Suites}], Config), + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(timetrap_fun_group, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(timetrap_fun_group), + ok = ct_test_support:verify_events(TestEvents, Events, Config). + +%%%----------------------------------------------------------------- +%%% misc_errors(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), Join = fun(D, S) -> filename:join(D, "error/test/"++S) end, @@ -267,6 +290,24 @@ misc_errors(Config) when is_list(Config) -> TestEvents = events_to_check(misc_errors), ok = ct_test_support:verify_events(TestEvents, Events, Config). +%%%----------------------------------------------------------------- +%%% +config_restored(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "error/test/config_restored_SUITE"), + {Opts,ERPid} = setup([{suite,Suite}, + {ct_hooks,[verify_config]}], + Config), + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(config_restored, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(config_restored), + ok = ct_test_support:verify_events(TestEvents, Events, Config). %%%----------------------------------------------------------------- %%% HELP FUNCTIONS @@ -429,8 +470,7 @@ test_events(cfg_error) -> {'EXIT',{init_per_group_fails,g1}}}}}}], [{?eh,tc_start,{cfg_error_8_SUITE,{init_per_group,g2,[]}}}, - {?eh,tc_done,{cfg_error_8_SUITE, - {init_per_group,unknown,[]}, + {?eh,tc_done,{cfg_error_8_SUITE,{init_per_group,g2,[]}, {failed,{timetrap_timeout,2000}}}}, {?eh,tc_auto_skip,{cfg_error_8_SUITE,tc1, {failed,{cfg_error_8_SUITE,init_per_group, @@ -500,7 +540,7 @@ test_events(cfg_error) -> {?eh,tc_done,{cfg_error_8_SUITE,tc1,ok}}, {?eh,test_stats,{9,0,{0,14}}}, {?eh,tc_start,{cfg_error_8_SUITE,{end_per_group,g12,[]}}}, - {?eh,tc_done,{cfg_error_8_SUITE,{end_per_group,unknown,[]}, + {?eh,tc_done,{cfg_error_8_SUITE,{end_per_group,g12,[]}, {failed,{timetrap_timeout,2000}}}}], {?eh,tc_start,{cfg_error_8_SUITE,end_per_suite}}, @@ -971,11 +1011,423 @@ test_events(timetrap_fun) -> {?eh,stop_logging,[]} ]; +test_events(timetrap_fun_group) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{1,1,58}}, + {?eh,tc_start,{timetrap_8_SUITE,init_per_suite}}, + {?eh,tc_done,{timetrap_8_SUITE,init_per_suite,ok}}, + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g0,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g0,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{0,1,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{0,2,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g0,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g0,[]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g1,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g1,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{0,3,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{0,4,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g1,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g1,[]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g2,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g2,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc1}}, + {?eh,tc_done,{timetrap_8_SUITE,tc1, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{0,5,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{0,6,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g2,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g2,[]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g3,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g3,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc4}}, + {?eh,tc_done,{timetrap_8_SUITE,tc4, + {failed,{timetrap_timeout,{'$approx',2000}}}}}, + {?eh,test_stats,{0,7,{0,0}}}, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g1,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g1,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{0,8,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{0,9,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g1,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g1,[]},ok}}], + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g2,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g2,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc1}}, + {?eh,tc_done,{timetrap_8_SUITE,tc1, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{0,10,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{0,11,{0,0}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g2,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g2,[]},ok}}], + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g3,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g3,[]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g4,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g4,[]}, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc0, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{0,11,{0,1}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{0,11,{0,2}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,end_per_group, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g5,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g5,[]}, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc0, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{0,11,{0,3}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{0,11,{0,4}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,end_per_group, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g6,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g6,[]}, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc0, + {failed,{timetrap_8_SUITE,init_per_group, + {timetrap_timeout,'_'}}}}}, + {?eh,test_stats,{0,11,{0,5}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,init_per_group, + {timetrap_timeout,'_'}}}}}, + {?eh,test_stats,{0,11,{0,6}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,end_per_group, + {failed,{timetrap_8_SUITE,init_per_group, + {timetrap_timeout,'_'}}}}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g7,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g7,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{1,11,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g7,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g7,[]}, + {user_timetrap_error,{kaboom,'_'}}}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g8,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g8,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{2,11,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g8,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g8,[]}, + {failed,{timetrap_timeout,{'$approx',500}}}}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g9,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g9,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{3,11,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,test_stats,{3,12,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g9,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g9,[]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g10,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g10,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,test_stats,{3,13,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{4,13,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g10,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g10,[]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,g11,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,g11,[]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc3}}, + {?eh,tc_done,{timetrap_8_SUITE,tc3, + {failed,{timetrap_timeout,{'$approx',4000}}}}}, + {?eh,test_stats,{4,14,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{4,15,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,g11,[]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,g11,[]},ok}}], + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg0,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg0,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{4,16,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{4,17,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg0,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg0,[parallel]},ok}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg1,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg1,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{4,18,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{4,19,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg1,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg1,[parallel]},ok}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg2,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg2,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc1}}, + {?eh,tc_done,{timetrap_8_SUITE,tc1, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{4,20,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{4,21,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg2,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg2,[parallel]},ok}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg3,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg3,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc4}}, + {?eh,tc_done,{timetrap_8_SUITE,tc4, + {failed,{timetrap_timeout,{'$approx',2000}}}}}, + {?eh,test_stats,{4,22,{0,6}}}, + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg1,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg1,[parallel]}, + ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{4,23,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{4,24,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg1,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg1,[parallel]}, + ok}}]}, + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg2,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg2,[parallel]}, + ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc1}}, + {?eh,tc_done,{timetrap_8_SUITE,tc1, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{4,25,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{4,26,{0,6}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg2,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg2,[parallel]}, + ok}}]}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg3,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg3,[parallel]},ok}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg4,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg4,[parallel]}, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc0, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{4,26,{0,7}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{4,26,{0,8}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,end_per_group, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg5,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg5,[parallel]}, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc0, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{4,26,{0,9}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}, + {?eh,test_stats,{4,26,{0,10}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,end_per_group, + {failed,{timetrap_8_SUITE,init_per_group, + {user_timetrap_error,{kaboom,'_'}}}}}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg6,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg6,[parallel]}, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc0, + {failed,{timetrap_8_SUITE,init_per_group, + {timetrap_timeout,'_'}}}}}, + {?eh,test_stats,{4,26,{0,11}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,init_per_group, + {timetrap_timeout,'_'}}}}}, + {?eh,test_stats,{4,26,{0,12}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,end_per_group, + {failed,{timetrap_8_SUITE,init_per_group, + {timetrap_timeout,'_'}}}}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg7,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg7,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{5,26,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg7,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg7,[parallel]}, + {user_timetrap_error,{kaboom,'_'}}}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg8,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg8,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{6,26,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg8,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg8,[parallel]}, + {failed,{timetrap_timeout,{'$approx',500}}}}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg9,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg9,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {user_timetrap_error,{kaboom,'_'}}}}, + %% Due to parallelism only checking final test stat in group + {?eh,test_stats,{7,27,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg9,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg9,[parallel]},ok}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg10,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg10,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + %% Due to parallelism only checking final test stat in group + {?eh,test_stats,{8,28,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg10,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg10,[parallel]},ok}}]}, + + {parallel, + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,pg11,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,pg11,[parallel]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc3}}, + {?eh,tc_done,{timetrap_8_SUITE,tc3, + {failed,{timetrap_timeout,{'$approx',4000}}}}}, + {?eh,test_stats,{8,29,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc2}}, + {?eh,tc_done,{timetrap_8_SUITE,tc2, + {failed,{timetrap_timeout,{'$approx',500}}}}}, + {?eh,test_stats,{8,30,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,pg11,[parallel]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,pg11,[parallel]},ok}}]}, + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,sg1,[sequence]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,sg1,[sequence]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{9,30,{0,12}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {user_timetrap_error,{kaboom,'_'}}}}, + {?eh,test_stats,{9,31,{0,12}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc1, + {failed,{timetrap_8_SUITE,tc0}}}}, + {?eh,test_stats,{9,31,{0,13}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,tc0}}}}, + {?eh,test_stats,{9,31,{0,14}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,sg1,[sequence]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,sg1,[sequence]},ok}}], + + [{?eh,tc_start,{timetrap_8_SUITE,{init_per_group,sg2,[sequence]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{init_per_group,sg2,[sequence]},ok}}, + {?eh,tc_start,{timetrap_8_SUITE,tc5}}, + {?eh,tc_done,{timetrap_8_SUITE,tc5,ok}}, + {?eh,test_stats,{10,31,{0,14}}}, + {?eh,tc_start,{timetrap_8_SUITE,tc0}}, + {?eh,tc_done,{timetrap_8_SUITE,tc0, + {failed,{timetrap_timeout,{'$approx',1000}}}}}, + {?eh,test_stats,{10,32,{0,14}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc1, + {failed,{timetrap_8_SUITE,tc0}}}}, + {?eh,test_stats,{10,32,{0,15}}}, + {?eh,tc_auto_skip,{timetrap_8_SUITE,tc2, + {failed,{timetrap_8_SUITE,tc0}}}}, + {?eh,test_stats,{10,32,{0,16}}}, + {?eh,tc_start,{timetrap_8_SUITE,{end_per_group,sg2,[sequence]}}}, + {?eh,tc_done,{timetrap_8_SUITE,{end_per_group,sg2,[sequence]},ok}}], + + {?eh,tc_start,{timetrap_8_SUITE,end_per_suite}}, + {?eh,tc_done,{timetrap_8_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]} + ]; + test_events(misc_errors) -> [ {?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, - {?eh,start_info,{1,1,7}}, + {?eh,start_info,{1,1,9}}, {?eh,tc_start,{misc_error_1_SUITE,ct_fail_1}}, {?eh,tc_done,{misc_error_1_SUITE,ct_fail_1, {failed,{error,{test_case_failed,{error,this_is_expected}}}}}}, @@ -1002,7 +1454,48 @@ test_events(misc_errors) -> {?eh,tc_start,{misc_error_1_SUITE,killed_by_signal_2}}, {?eh,tc_done,{misc_error_1_SUITE,killed_by_signal_2, {failed,testcase_aborted_or_killed}}}, - {?eh,test_stats,{0,7,{0,0}}}, + {parallel, + [{?eh,tc_start,{misc_error_1_SUITE,p1}}, + {?eh,tc_done,{misc_error_1_SUITE,p1,ok}}, + {?eh,tc_start,{misc_error_1_SUITE,p2}}, + {?eh,tc_done,{misc_error_1_SUITE,p2,ok}}]}, + {?eh,test_stats,{2,7,{0,0}}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]} + ]; + +test_events(config_restored) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{1,1,4}}, + {?eh,tc_start,{config_restored_SUITE,init_per_suite}}, + {?eh,tc_done,{config_restored_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{config_restored_SUITE,to_tc}}, + {?eh,cth,{verify_config,post_end_per_testcase,{to_tc,diff_ok}}}, + {?eh,tc_done, + {config_restored_SUITE,to_tc,{failed,{timetrap_timeout,1000}}}}, + {?eh,test_stats,{0,1,{0,0}}}, + {?eh,tc_start,{config_restored_SUITE,exit_tc}}, + {?eh,cth,{verify_config,post_end_per_testcase,{exit_tc,diff_ok}}}, + {?eh,tc_done,{config_restored_SUITE,exit_tc, + {failed,{error,{test_case_failed,"Goodbye!"}}}}}, + {?eh,test_stats,{0,2,{0,0}}}, + [{?eh,tc_start,{config_restored_SUITE,{init_per_group,g1,[]}}}, + {?eh,tc_start,{config_restored_SUITE,to_tc}}, + {?eh,cth,{verify_config,post_end_per_testcase,{to_tc,diff_ok}}}, + {?eh,tc_done, + {config_restored_SUITE,to_tc,{failed,{timetrap_timeout,1000}}}}, + {?eh,test_stats,{0,3,{0,0}}}, + {?eh,tc_start,{config_restored_SUITE,exit_tc}}, + {?eh,cth,{verify_config,post_end_per_testcase,{exit_tc,diff_ok}}}, + {?eh,tc_done,{config_restored_SUITE,exit_tc, + {failed,{error,{test_case_failed,"Goodbye!"}}}}}, + {?eh,test_stats,{0,4,{0,0}}}, + {?eh,tc_start,{config_restored_SUITE,{end_per_group,g1,[]}}}, + {?eh,tc_done,{config_restored_SUITE,{end_per_group,g1,[]},ok}}], + {?eh,tc_start,{config_restored_SUITE,end_per_suite}}, + {?eh,tc_done,{config_restored_SUITE,end_per_suite,ok}}, {?eh,test_done,{'DEF','STOP_TIME'}}, {?eh,stop_logging,[]} ]. diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/config_restored_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/config_restored_SUITE.erl new file mode 100644 index 0000000000..fb47f2bcb9 --- /dev/null +++ b/lib/common_test/test/ct_error_SUITE_data/error/test/config_restored_SUITE.erl @@ -0,0 +1,175 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(config_restored_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% Function: suite() -> Info +%% Info = [tuple()] +%%-------------------------------------------------------------------- +suite() -> + [{timetrap,1000}]. + +%%-------------------------------------------------------------------- +%% Function: init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config1 = [{init_per_suite,?MODULE} | Config], + TabPid = spawn(fun() -> + ets:new(?MODULE, [named_table, set, public]), + receive _ -> ok end + end), + [{tab,TabPid} | Config1]. + +%%-------------------------------------------------------------------- +%% Function: end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%%-------------------------------------------------------------------- +end_per_suite(Config) -> + ets:delete(?MODULE), + exit(?config(tab, Config), kill), + ok. + +%%-------------------------------------------------------------------- +%% Function: init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%%-------------------------------------------------------------------- +init_per_group(GroupName, Config) -> + [{init_per_group,GroupName} | Config]. + +%%-------------------------------------------------------------------- +%% Function: end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% Function: init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%%-------------------------------------------------------------------- +init_per_testcase(TC, Config) -> + Config1 = [{init_per_testcase,TC} | Config], + ets:insert(?MODULE, {config,Config}), + %% ct:pal("Config after init_per_testcase(~w) = ~p", [TC,Config1]), + Config1. + +%%-------------------------------------------------------------------- +%% Function: end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%%-------------------------------------------------------------------- +end_per_testcase(TC, Config) -> + ct:pal("Config in end_per_testcase(~w) = ~p", [TC,Config]), + [{_,MemConfig}] = ets:lookup(?MODULE, config), + diff_config(Config, MemConfig, [tc_status]), + ?MODULE = proplists:get_value(init_per_suite, Config), + TC = proplists:get_value(init_per_testcase, Config), + case ?config(tc_group_properties, Config) of + undefined -> + ok; + Props -> + GName = proplists:get_value(name, Props), + GName = proplists:get_value(init_per_group, Config) + end, + ok. + +%%-------------------------------------------------------------------- +%% Function: groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%%-------------------------------------------------------------------- +groups() -> + [{g1,[],[to_tc, exit_tc]}]. + +%%-------------------------------------------------------------------- +%% Function: all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%%-------------------------------------------------------------------- +all() -> + [to_tc, exit_tc, + {group,g1}]. + + +to_tc(Config) -> + %% ct:pal("Config for to_tc = ~p", [Config]), + [{_,MemConfig}] = ets:lookup(?MODULE, config), + diff_config(Config, MemConfig, []), + ct:sleep(2000). + +exit_tc(Config) -> + %% ct:pal("Config for exit_tc = ~p", [Config]), + [{_,MemConfig}] = ets:lookup(?MODULE, config), + diff_config(Config, MemConfig, []), + ct:fail("Goodbye!"). + + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS + +diff_config(Cfg1, Cfg2, DiffKeys) -> + diff_config(Cfg1, Cfg2, DiffKeys, []). + +diff_config([{K,V} | Cfg1], Cfg2, DiffKeys, RemKeys) -> + case proplists:get_value(K, Cfg2) of + undefined -> + diff_config(Cfg1, Cfg2, proplists:delete(K, DiffKeys), RemKeys); + V -> + diff_config(Cfg1, proplists:delete(K, Cfg2), DiffKeys, RemKeys); + _Other -> + case proplists:is_defined(K, DiffKeys) of + true -> + diff_config(Cfg1, Cfg2, proplists:delete(K, DiffKeys), RemKeys); + false -> + diff_config(Cfg1, Cfg2, DiffKeys, [K | RemKeys]) + end + end; +diff_config([], [], [], []) -> + ct:pal("Diff ok!", []), + ok; +diff_config([], Cfg2, DiffKeys, RemKeys) -> + Result = {diff_failed, {cfg2,Cfg2}, {diffkeys,DiffKeys}, {remkeys,RemKeys}}, + ct:pal("Diff failed! Result = ~p", [Result]), + exit(Result). + diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/misc_error_1_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/misc_error_1_SUITE.erl index 99c3ed05ec..61f3fa7e59 100644 --- a/lib/common_test/test/ct_error_SUITE_data/error/test/misc_error_1_SUITE.erl +++ b/lib/common_test/test/ct_error_SUITE_data/error/test/misc_error_1_SUITE.erl @@ -96,7 +96,7 @@ end_per_testcase(_TestCase, _Config) -> %% N = integer() | forever %%-------------------------------------------------------------------- groups() -> - []. + [{p,[parallel],[p1,p2]}]. %%-------------------------------------------------------------------- %% Function: all() -> GroupsAndTestCases | {skip,Reason} @@ -107,7 +107,8 @@ groups() -> %%-------------------------------------------------------------------- all() -> [ct_fail_1, ct_fail_2, ct_fail_3, ts_fail_1, ts_fail_2, - killed_by_signal_1, killed_by_signal_2]. + killed_by_signal_1, killed_by_signal_2, + {group,p}]. ct_fail_1(_) -> ct:fail({error,this_is_expected}), @@ -152,3 +153,10 @@ killed_by_signal_2(_) -> end), ct:sleep(1000), exit(this_should_not_be_seen). + +p1(_) -> + {error,parallel_group} = ct:abort_current_testcase(aborted), + ok. + +p2(_) -> + receive after 1000 -> ok end. diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_8_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_8_SUITE.erl new file mode 100644 index 0000000000..ff138f38b5 --- /dev/null +++ b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_8_SUITE.erl @@ -0,0 +1,258 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(timetrap_8_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +-define(TO, 4). + +%%-------------------------------------------------------------------- +%% Function: suite() -> Info +%% Info = [tuple()] +%%-------------------------------------------------------------------- +suite() -> + [{timetrap,{timetrap_utils,timetrap_val,[{seconds,?TO}]}}]. + +%%-------------------------------------------------------------------- +%% Function: init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config. + +%%-------------------------------------------------------------------- +%% Function: end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% Function: init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%%-------------------------------------------------------------------- +init_per_group(G6, Config) when G6==g6; G6==pg6 -> + ct:sleep({seconds,1}), + Config; +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% Function: end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%%-------------------------------------------------------------------- +end_per_group(G7or8, _Config) when G7or8==g7; G7or8==pg7; G7or8==g8; G7or8==pg8 -> + ct:sleep({seconds,5}), + ok; +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% Function: init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%%-------------------------------------------------------------------- +init_per_testcase(_, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% Function: end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%%-------------------------------------------------------------------- +end_per_testcase(_, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% Function: groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%%-------------------------------------------------------------------- +groups() -> + [ + {g0,[],[tc0,tc2]}, % group override suite and tc overrides group + {g1,[],[tc0,tc2]}, % group override suite and tc overrides group + {g2,[],[tc1,tc2]}, % tc override group + {g3,[],[tc4,{group,g1},{group,g2}]}, % subgroup override group + {g4,[],[tc0,tc2]}, % exit during init_per_group + {g5,[],[tc0,tc2]}, % exit during init_per_group + {g6,[],[tc0,tc2]}, % timeout during init_per_group + {g7,[],[tc5]}, % exit during end_per_group + {g8,[],[tc5]}, % timeout during end_per_group + {g9,[],[tc5,tc0]}, % exit during testcase + {g10,[],[tc0,tc5]}, % exit during testcase + {g11,[],[tc3,tc2]}, % suite is valid if nothing else is specified + {pg0,[parallel],[tc0,tc2]}, % group override suite and tc overrides group + {pg1,[parallel],[tc0,tc2]}, % group override suite and tc overrides group + {pg2,[parallel],[tc1,tc2]}, % tc override group + {pg3,[parallel],[tc4,{group,pg1},{group,pg2}]}, % subgroup override group + {pg4,[parallel],[tc0,tc2]}, % exit during init_per_group + {pg5,[parallel],[tc0,tc2]}, % exit during init_per_group + {pg6,[parallel],[tc0,tc2]}, % timeout during init_per_group + {pg7,[parallel],[tc5]}, % exit during end_per_group + {pg8,[parallel],[tc5]}, % timeout during end_per_group + {pg9,[parallel],[tc5,tc0]}, % exit during testcase + {pg10,[parallel],[tc0,tc5]},% exit during testcase + {pg11,[parallel],[tc3,tc2]},% suite is valid if nothing else is specified + {sg1,[sequence],[tc5,tc0,tc1,tc2]}, % exit during sequencial testcase + {sg2,[sequence],[tc5,tc0,tc1,tc2]}].% timeout during sequencial testcase + +group(g0) -> + [{timetrap,{timetrap_utils,timetrap_val,[{seconds,1}]}}]; +group(g1) -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(1000) end}]; +group(g2) -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(3000) end}]; +group(g3) -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(2000) end}]; +group(g4) -> + [{timetrap,{timetrap_utils,timetrap_exit,[kaboom]}}]; +group(g5) -> + [{timetrap,fun() -> exit(kaboom) end}]; +group(g6) -> + [{timetrap,{timetrap_utils,timetrap_val,[500]}}]; +group(g7) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(g8) -> + [{timetrap,{timetrap_utils,timetrap_val,[500]}}]; +group(g9) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(g10) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(g11) -> + []; +group(pg0) -> + [{timetrap,{timetrap_utils,timetrap_val,[{seconds,1}]}}]; +group(pg1) -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(1000) end}]; +group(pg2) -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(3000) end}]; +group(pg3) -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(2000) end}]; +group(pg4) -> + [{timetrap,{timetrap_utils,timetrap_exit,[kaboom]}}]; +group(pg5) -> + [{timetrap,fun() -> exit(kaboom) end}]; +group(pg6) -> + [{timetrap,{timetrap_utils,timetrap_val,[500]}}]; +group(pg7) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(pg8) -> + [{timetrap,{timetrap_utils,timetrap_val,[500]}}]; +group(pg9) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(pg10) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(pg11) -> + []; +group(sg1) -> + [{timetrap,fun() -> ct:sleep(1000),exit(kaboom) end}]; +group(sg2) -> + [{timetrap,{timetrap_utils,timetrap_val,[{seconds,1}]}}]. + + +%%-------------------------------------------------------------------- +%% Function: all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%%-------------------------------------------------------------------- +all() -> + [ + {group,g0}, + {group,g1}, + {group,g2}, + {group,g3}, + {group,g4}, + {group,g5}, + {group,g6}, + {group,g7}, + {group,g8}, + {group,g9}, + {group,g10}, + {group,g11}, + {group,pg0}, + {group,pg1}, + {group,pg2}, + {group,pg3}, + {group,pg4}, + {group,pg5}, + {group,pg6}, + {group,pg7}, + {group,pg8}, + {group,pg9}, + {group,pg10}, + {group,pg11}, + {group,sg1}, + {group,sg2}]. + + + +tc0(_) -> + ct:comment("TO set by group"), + ct:sleep({seconds,5}), + ok. + +tc1() -> + [{timetrap,{timetrap_utils,timetrap_val,[1000]}}]. +tc1(_) -> + ct:comment("TO after 1 sec"), + ct:sleep({seconds,2}), + ok. + +tc2() -> + [{timetrap,fun() -> timetrap_utils:timetrap_val(500) end}]. +tc2(_) -> + ct:comment("TO after 0.5 sec"), + ct:sleep({seconds,2}), + ok. + +tc3(_) -> + ct:comment(io_lib:format("TO after ~w sec", [?TO])), + ct:sleep({seconds,5}), + ok. + +tc4(_) -> + ct:comment("TO set by group"), + ct:sleep({seconds,5}), + ok. + +tc5(_) -> + ct:comment("No TO in this testcase, maybe later"), + ok. diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl new file mode 100644 index 0000000000..40a54b9f99 --- /dev/null +++ b/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl @@ -0,0 +1,239 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%% @doc Common Test Example Suite Callback module. +%%% +%%% <p>This module gives an example of a common test CTH (Common Test Hook). +%%% There are many ways to add a CTH to a test run, you can do it either in +%%% the command line using -ct_hook, in a test spec using +%%% {ct_hook,M} or in the suite it self by returning ct_hook +%%% from either suite/0, init_per_suite/1, init_per_group/2 and +%%% init_per_testcase/2. The scope of the CTH is determined by where is it +%%% started. If it is started in the command line or test spec then it will +%%% be stopped at the end of all tests. If it is started in init_per_suite, +%%% it will be stopped after end_per_suite and so on. See terminate +%%% documentation for a table describing the scoping machanics. +%%% +%%% All of callbacks except init/1 in a CTH are optional.</p> + +-module(verify_config). + +%% CT Hooks +-export([id/1]). +-export([init/2]). + +-export([pre_init_per_suite/3]). +-export([post_init_per_suite/4]). +-export([pre_end_per_suite/3]). +-export([post_end_per_suite/4]). + +-export([pre_init_per_group/3]). +-export([post_init_per_group/4]). +-export([pre_end_per_group/3]). +-export([post_end_per_group/4]). + +-export([pre_init_per_testcase/3]). +-export([post_end_per_testcase/4]). + +-export([on_tc_fail/3]). +-export([on_tc_skip/3]). + +-export([terminate/1]). + +-include_lib("common_test/src/ct_util.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-type config() :: proplists:proplist(). +-type reason() :: term(). +-type skip_or_fail() :: {skip, reason()} | + {auto_skip, reason()} | + {fail, reason()} | + {'EXIT',reason()}. + +-record(state, { id = ?MODULE :: term()}). + +%% @doc Always called before any other callback function. Use this to initiate +%% any common state. It should return an state for this CTH. +-spec init(Id :: term(), Opts :: proplists:proplist()) -> + {ok, State :: #state{}}. +init(Id, Opts) -> + {ok,Opts}. + +%% @doc The ID is used to uniquly identify an CTH instance, if two CTH's +%% return the same ID the seconds CTH is ignored. This function should NOT +%% have any side effects as it might be called multiple times by common test. +-spec id(Opts :: proplists:proplist()) -> + Id :: term(). +id(Opts) -> + now(). + +%% @doc Called before init_per_suite is called. Note that this callback is +%% only called if the CTH is added before init_per_suite is run (eg. in a test +%% specification, suite/0 function etc). +%% You can change the config in the this function. +-spec pre_init_per_suite(Suite :: atom(), + Config :: config(), + State :: #state{}) -> + {config() | skip_or_fail(), NewState :: #state{}}. +pre_init_per_suite(Suite,Config,State) -> + {Config, State}. + +%% @doc Called after init_per_suite. +%% you can change the return value in this function. +-spec post_init_per_suite(Suite :: atom(), + Config :: config(), + Return :: config() | skip_or_fail(), + State :: #state{}) -> + {config() | skip_or_fail(), NewState :: #state{}}. +post_init_per_suite(Suite,Config,Return,State) -> + {Return, State}. + +%% @doc Called before end_per_suite. The config/state can be changed here, +%% though it will only affect the *end_per_suite function. +-spec pre_end_per_suite(Suite :: atom(), + Config :: config() | skip_or_fail(), + State :: #state{}) -> + {ok | skip_or_fail(), NewState :: #state{}}. +pre_end_per_suite(Suite,Config,State) -> + {Config, State}. + +%% @doc Called after end_per_suite. Note that the config cannot be +%% changed here, only the status of the suite. +-spec post_end_per_suite(Suite :: atom(), + Config :: config(), + Return :: term(), + State :: #state{}) -> + {ok | skip_or_fail(), NewState :: #state{}}. +post_end_per_suite(Suite,Config,Return,State) -> + {Return, State}. + +%% @doc Called before each init_per_group. +%% You can change the config in this function. +-spec pre_init_per_group(Group :: atom(), + Config :: config(), + State :: #state{}) -> + {config() | skip_or_fail(), NewState :: #state{}}. +pre_init_per_group(Group,Config,State) -> + {Config, State}. + +%% @doc Called after each init_per_group. +%% You can change the return value in this function. +-spec post_init_per_group(Group :: atom(), + Config :: config(), + Return :: config() | skip_or_fail(), + State :: #state{}) -> + {config() | skip_or_fail(), NewState :: #state{}}. +post_init_per_group(Group,Config,Return,State) -> + {Return, State}. + +%% @doc Called after each end_per_group. The config/state can be changed here, +%% though it will only affect the *end_per_group functions. +-spec pre_end_per_group(Group :: atom(), + Config :: config() | skip_or_fail(), + State :: #state{}) -> + {ok | skip_or_fail(), NewState :: #state{}}. +pre_end_per_group(Group,Config,State) -> + {Config, State}. + +%% @doc Called after each end_per_group. Note that the config cannot be +%% changed here, only the status of the group. +-spec post_end_per_group(Group :: atom(), + Config :: config(), + Return :: term(), + State :: #state{}) -> + {ok | skip_or_fail(), NewState :: #state{}}. +post_end_per_group(Group,Config,Return,State) -> + {Return, State}. + +%% @doc Called before each test case. +%% You can change the config in this function. +-spec pre_init_per_testcase(TC :: atom(), + Config :: config(), + State :: #state{}) -> + {config() | skip_or_fail(), NewState :: #state{}}. +pre_init_per_testcase(TC,Config,State) -> + {Config, State}. + +%% @doc Called after each test case. Note that the config cannot be +%% changed here, only the status of the test case. +-spec post_end_per_testcase(TC :: atom(), + Config :: config(), + Return :: term(), + State :: #state{}) -> + {ok | skip_or_fail(), NewState :: #state{}}. +post_end_per_testcase(TC,Config,Return,State) -> + %% check that config has been restored + ct:pal("Config in verify_config:post_end_per_testcase(~w) = ~p", + [TC,Config]), + [{_,MemConfig}] = ets:lookup(config_restored_SUITE, config), + try config_restored_SUITE:diff_config(Config, MemConfig, [tc_status]) of + ok -> + gen_event:notify( + ?CT_EVMGR_REF, #event{ name = cth, node = node(), + data = {?MODULE, post_end_per_testcase, + {TC,diff_ok}}}) + catch + _:_ -> + gen_event:notify( + ?CT_EVMGR_REF, #event{ name = cth, node = node(), + data = {?MODULE, post_end_per_testcase, + {TC,diff_failed}}}) + end, + {Return, State}. + +%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group, +%% post_end_per_group and post_end_per_tc if the suite, group or test case failed. +%% This function should be used for extra cleanup which might be needed. +%% It is not possible to modify the config or the status of the test run. +-spec on_tc_fail(TC :: init_per_suite | end_per_suite | + init_per_group | end_per_group | atom(), + Reason :: term(), State :: #state{}) -> + NewState :: #state{}. +on_tc_fail(TC, Reason, State) -> + State. + +%% @doc Called when a test case is skipped by either user action +%% or due to an init function failing. Test case can be +%% end_per_suite, init_per_group, end_per_group and the actual test cases. +-spec on_tc_skip(TC :: end_per_suite | + init_per_group | end_per_group | atom(), + {tc_auto_skip, {failed, {Mod :: atom(), Function :: atom(), Reason :: term()}}} | + {tc_user_skip, {skipped, Reason :: term()}}, + State :: #state{}) -> + NewState :: #state{}. +on_tc_skip(TC, Reason, State) -> + State. + +%% @doc Called when the scope of the CTH is done, this depends on +%% when the CTH was specified. This translation table describes when this +%% function is called. +%% +%% | Started in | terminate called | +%% |---------------------|-------------------------| +%% | command_line | after all tests are run | +%% | test spec | after all tests are run | +%% | suite/0 | after SUITE is done | +%% | init_per_suite/1 | after SUITE is done | +%% | init_per_group/2 | after group is done | +%% |-----------------------------------------------| +%% +-spec terminate(State :: #state{}) -> + term(). +terminate(State) -> + ok. diff --git a/lib/common_test/test/ct_group_leader_SUITE.erl b/lib/common_test/test/ct_group_leader_SUITE.erl new file mode 100644 index 0000000000..cde3061d6a --- /dev/null +++ b/lib/common_test/test/ct_group_leader_SUITE.erl @@ -0,0 +1,181 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_system_error_SUITE +%%% +%%% Description: +%%% +%%% Test the group leader functionality in the test_server application. +%%%------------------------------------------------------------------- +-module(ct_group_leader_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config1 = ct_test_support:init_per_suite(Config), + Config1. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + basic + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% +basic(Config) -> + TC = basic, + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "group_leader_SUITE"), + {Opts,ERPid} = setup([{suite,Suite},{label,TC}], Config), + SuiteLog = execute(TC, Opts, ERPid, Config), + {ok,Data} = file:read_file(SuiteLog), + Lines = binary:split(Data, <<"\n">>, [global]), + {ok,RE} = re:compile("(\\S+):(\\S+)$"), + Cases0 = [begin + {match,[M,F]} = re:run(Case, RE, [{capture,all_but_first,list}]), + {list_to_atom(M),list_to_atom(F)} + end || <<"=case ",Case/binary>> <- Lines], + Cases = [MF || {_,F}=MF <- Cases0, + F =/= init_per_suite, + F =/= end_per_suite, + F =/= init_per_group, + F =/= end_per_group], + io:format("~p\n", [Cases]), + [] = verify_cases(events_to_check(TC), Cases, false), + ok. + +verify_cases([{parallel,P}|Ts], Cases0, Par) -> + Cases = verify_cases(P, Cases0, true), + verify_cases(Ts, Cases, Par); +verify_cases([{?eh,tc_done,{M,F,_}}|Ts], Cases0, false) -> + [{M,F}|Cases] = Cases0, + verify_cases(Ts, Cases, false); +verify_cases([{?eh,tc_done,{M,F,_}}|Ts], Cases0, true) -> + case lists:member({M,F}, Cases0) of + true -> + Cases = Cases0 -- [{M,F}], + verify_cases(Ts, Cases, true); + false -> + io:format("~p not found\n", [{M,F}]), + ?t:fail() + end; +verify_cases([{?eh,_,_}|Ts], Cases, Par) -> + verify_cases(Ts, Cases, Par); +verify_cases([], Cases, _) -> + Cases; +verify_cases([List|Ts], Cases0, Par) when is_list(List) -> + Cases = verify_cases(List, Cases0, false), + verify_cases(Ts, Cases, Par). + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +execute(Name, Opts, ERPid, Config) -> + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(Name, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(Name), + ok = ct_test_support:verify_events(TestEvents, Events, Config), + {event,tc_logfile,_,{_,File}} = + lists:keyfind(tc_logfile, 2, [Ev || {?eh,Ev} <- Events]), + LogDir = filename:dirname(File), + filename:join(LogDir, "suite.log"). + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- + +events_to_check(_Test) -> + [{?eh,tc_done,{group_leader_SUITE,tc1,ok}}, + {parallel,[{?eh,tc_start,{group_leader_SUITE,p1}}, + {?eh,tc_done,{group_leader_SUITE,p1,ok}}, + {?eh,tc_start,{group_leader_SUITE,p2}}, + {?eh,tc_done,{group_leader_SUITE,p2,ok}}]}, + {?eh,tc_done,{group_leader_SUITE,p_restart_my_io_server,ok}}, + {?eh,tc_done,{group_leader_SUITE,p3,ok}}, + {parallel,[ + {?eh,tc_start,{group_leader_SUITE,p10}}, + {?eh,tc_start,{group_leader_SUITE,p11}}, + {?eh,tc_done,{group_leader_SUITE,p10,ok}}, + {?eh,tc_done,{group_leader_SUITE,p11,ok}}, + [{?eh,tc_done,{group_leader_SUITE,s1,ok}}, + {?eh,tc_done,{group_leader_SUITE,s2,ok}}, + {?eh,tc_done,{group_leader_SUITE,s3,ok}}], + {?eh,tc_start,{group_leader_SUITE,p12}}, + {?eh,tc_done,{group_leader_SUITE,p12,ok}}, + [{?eh,tc_done,{group_leader_SUITE,s4,ok}}, + {?eh,tc_done,{group_leader_SUITE,s5,ok}}], + {?eh,tc_start,{group_leader_SUITE,p13}}, + {?eh,tc_done,{group_leader_SUITE,p13,ok}} ]}, + {?eh,tc_done,{group_leader_SUITE,cap1,ok}}, + {?eh,tc_done,{group_leader_SUITE,cap2,ok}}, + {parallel,[{?eh,tc_start,{group_leader_SUITE,cap1}}, + {?eh,tc_done,{group_leader_SUITE,cap1,ok}}, + {?eh,tc_start,{group_leader_SUITE,cap2}}, + {?eh,tc_done,{group_leader_SUITE,cap2,ok}}]}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]} + ]. diff --git a/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl b/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl new file mode 100644 index 0000000000..3f1844b4ae --- /dev/null +++ b/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl @@ -0,0 +1,252 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(group_leader_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{timetrap,{seconds,10}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + start_my_io_server(), + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + my_io_server ! die, + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + [{p,[parallel],[p1,p2]}, + {p_restart,[parallel],[p_restart_my_io_server]}, + {seq,[],[s1,s2,s3]}, + {seq2,[],[s4,s5]}, + {seq_in_par,[parallel],[p10,p11,{group,seq},p12,{group,seq2},p13]}, + {capture_io,[parallel],[cap1,cap2]}]. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [tc1,{group,p},{group,p_restart},p3, + {group,seq_in_par}, + cap1,cap2, + {group,capture_io}]. + +tc1(_C) -> + ok. + +p1(_) -> + %% OTP-10101: + %% + %% External apps/processes started by init_per_suite (common operation), + %% will inherit the group leader of the init_per_suite process, i.e. the + %% test_server test case control process (executing run_test_case_msgloop/7). + %% If, later, a parallel test case triggers the external app to print with + %% e.g. io:format() (also common operation), the calling process will hang! + %% The reason for this is that a parallel test case has a dedicated IO + %% server process, other than the central test case control process. The + %% latter process is not executing run_test_case_msgloop/7 and will not + %% respond to IO messages. The process is still group leader for the + %% external app, however, which is wrong. It's the IO process for the + %% parallel test case that should be group leader - but only for the + %% particular invokation, since other parallel test cases could be + %% invoking the external app too. + print("hej\n"). + +p2(_) -> + print("hopp\n"). + +p_restart_my_io_server(_) -> + %% Restart the IO server and change its group leader. This used + %% to set to the group leader to a process that would soon die. + Ref = erlang:monitor(process, my_io_server), + my_io_server ! die, + receive + {'DOWN',Ref,_,_,_} -> + start_my_io_server() + end. + +p3(_) -> + %% OTP-10125. This would crash since the group leader process + %% for the my_io_server had died. + print("hoppsan\n"). + +print(String) -> + my_io_server ! {print,self(),String}, + receive + {printed,String} -> + ok + end. + +start_my_io_server() -> + Parent = self(), + Pid = spawn(fun() -> my_io_server(Parent) end), + receive + {Pid,started} -> + io:format("~p\n", [process_info(Pid)]), + ok + end. + +my_io_server(Parent) -> + register(my_io_server, self()), + Parent ! {self(),started}, + my_io_server_loop(). + +my_io_server_loop() -> + receive + {print,From,String} -> + io:put_chars(String), + From ! {printed,String}, + my_io_server_loop(); + die -> + ok + end. + +p10(_) -> + receive after 1 -> ok end. + +p11(_) -> + ok. + +p12(_) -> + ok. + +p13(_) -> + ok. + +s1(_) -> + ok. + +s2(_) -> + ok. + +s3(_) -> + ok. + +s4(_) -> + ok. + +s5(_) -> + ok. + +cap1(_) -> + ct:capture_start(), + IO = gen_io(cap1, 10, []), + ct:capture_stop(), + IO = ct:capture_get(), + ok. + +cap2(_) -> + ct:capture_start(), + {Pid,Ref} = spawn_monitor(fun() -> + exit(gen_io(cap2, 42, [])) + end), + receive + {'DOWN',Ref,process,Pid,IO} -> + ct:capture_stop(), + IO = ct:capture_get(), + ok + end. + +gen_io(_, 0, Acc) -> + lists:reverse(Acc); +gen_io(Label, N, Acc) -> + S = lists:flatten(io_lib:format("~s: ~p\n", [Label,N])), + io:put_chars(S), + gen_io(Label, N-1, [S|Acc]). diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl index 405df1e978..796a0832d7 100644 --- a/lib/common_test/test/ct_hooks_SUITE.erl +++ b/lib/common_test/test/ct_hooks_SUITE.erl @@ -64,7 +64,7 @@ end_per_testcase(TestCase, Config) -> suite() -> - [{timetrap,{seconds,20}}]. + [{timetrap,{minutes,1}}]. all() -> all(suite). diff --git a/lib/common_test/test/ct_master_SUITE.erl b/lib/common_test/test/ct_master_SUITE.erl index 27243a0067..0f336d2d79 100644 --- a/lib/common_test/test/ct_master_SUITE.erl +++ b/lib/common_test/test/ct_master_SUITE.erl @@ -109,7 +109,7 @@ ct_master_test(Config) when is_list(Config) -> ERPid = ct_test_support:start_event_receiver(Config), - [{TSFile,ok}] = run_test(ct_master_test, FileName, Config), + [{[TSFile],ok}] = run_test(ct_master_test, FileName, Config), Events = ct_test_support:get_events(ERPid, Config), @@ -117,14 +117,8 @@ ct_master_test(Config) when is_list(Config) -> reformat(Events, ?eh), PrivDir, []), - find_events(NodeNames, [{tc_start,{master_SUITE,init_per_suite}}, - {tc_start,{master_SUITE,first_testcase}}, - {tc_start,{master_SUITE,second_testcase}}, - {tc_start,{master_SUITE,third_testcase}}, - {tc_start,{master_SUITE,end_per_suite}}], - Events), - - ok. + TestEvents = events_to_check(ct_master_test), + ok = find_events(NodeNames, TestEvents, Events, Config). %%%----------------------------------------------------------------- %%% HELP FUNCTIONS @@ -153,13 +147,18 @@ make_spec(DataDir, FileName, NodeNames, Suites, Config) -> CM = [{config,master,filename:join(DataDir,"master/config.txt")}], + Env = [{"THIS_MUST_BE_SET","yes"}, + {"SO_MUST_THIS","value"}], NS = lists:map( fun(NodeName) -> {init,NodeName,[ {node_start,[{startup_functions,[]}, - {monitor_master,true}]}, - {eval,{erlang,nodes,[]}} - ] + {monitor_master,true}, + {boot_timeout,10}, + {init_timeout,10}, + {startup_timeout,10}, + {env,Env}]}, + {eval,{erlang,nodes,[]}}] } end, NodeNames), @@ -196,13 +195,12 @@ get_log_dir(_,PrivDir,NodeName) -> run_test(_Name, FileName, Config) -> %% run the test twice, using different html versions - [{FileName,ok}] = ct_test_support:run({ct_master,run,[FileName]}, - [{ct_master,basic_html,[true]}], - Config), - timer:sleep(5000), - [{FileName,ok}] = ct_test_support:run({ct_master,run,[FileName]}, - [{ct_master,basic_html,[false]}], - Config). + [{[FileName],ok}] = ct_test_support:run({ct_master,run,[FileName]}, + [{ct_master,basic_html,[true]}], + Config), + [{[FileName],ok}] = ct_test_support:run({ct_master,run,[FileName]}, + [{ct_master,basic_html,[false]}], + Config). reformat(Events, EH) -> ct_test_support:reformat(Events, EH). @@ -210,28 +208,26 @@ reformat(Events, EH) -> %%%----------------------------------------------------------------- %%% TEST EVENTS %%%----------------------------------------------------------------- -find_events([], _CheckEvents, _) -> - ok; -find_events([NodeName|NodeNames],CheckEvents,AllEvents) -> - find_events(NodeNames, CheckEvents, - remove_events(add_host(NodeName),CheckEvents, AllEvents, [])). - -remove_events(Node,[{Name,Data} | RestChecks], - [{?eh,#event{ name = Name, node = Node, data = Data }}|RestEvs], - Acc) -> - remove_events(Node, RestChecks, RestEvs, Acc); -remove_events(Node, Checks, [Event|RestEvs], Acc) -> - remove_events(Node, Checks, RestEvs, [Event | Acc]); -remove_events(_Node, [], [], Acc) -> - lists:reverse(Acc); -remove_events(Node, Events, [], Acc) -> - test_server:format("Could not find events: ~p in ~p for node ~p", - [Events, lists:reverse(Acc), Node]), - exit(event_not_found). + +find_events(NodeNames, TestEvents, Events, Config) -> + [begin + Node = add_host(Node0), + io:format("Searching for events for node: ~s", [Node]), + ok = ct_test_support:verify_events(TestEvents, Events, Node, Config), + io:nl() + end || Node0 <- NodeNames], + ok. add_host(NodeName) -> {ok, HostName} = inet:gethostname(), list_to_atom(atom_to_list(NodeName)++"@"++HostName). -expected_events(_) -> - []. +events_to_check(_) -> + [{?eh,tc_start,{master_SUITE,first_testcase}}, + {?eh,tc_done,{master_SUITE,first_testcase,ok}}, + {?eh,tc_start,{master_SUITE,second_testcase}}, + {?eh,tc_done,{master_SUITE,second_testcase,ok}}, + {?eh,tc_start,{master_SUITE,third_testcase}}, + {?eh,tc_done,{master_SUITE,third_testcase,ok}}, + {?eh,tc_start,{master_SUITE,env_vars}}, + {?eh,tc_done,{master_SUITE,env_vars,ok}}]. diff --git a/lib/common_test/test/ct_master_SUITE_data/master/master_SUITE.erl b/lib/common_test/test/ct_master_SUITE_data/master/master_SUITE.erl index 032d69ad9f..df54c4419c 100644 --- a/lib/common_test/test/ct_master_SUITE_data/master/master_SUITE.erl +++ b/lib/common_test/test/ct_master_SUITE_data/master/master_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010. All Rights Reserved. +%% Copyright Ericsson AB 2010-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -39,7 +39,8 @@ init_per_suite(Config) -> end_per_suite(_) -> ok. -all() -> [first_testcase, second_testcase, third_testcase]. +all() -> [first_testcase, second_testcase, third_testcase, + env_vars]. init_per_testcase(_, Config) -> Config. @@ -56,3 +57,9 @@ second_testcase(_)-> third_testcase(_)-> A = 4, A = 2*2. + +env_vars(_) -> + io:format("~p\n", [os:getenv()]), + "yes" = os:getenv("THIS_MUST_BE_SET"), + "value" = os:getenv("SO_MUST_THIS"), + ok. diff --git a/lib/common_test/test/ct_netconfc_SUITE.erl b/lib/common_test/test/ct_netconfc_SUITE.erl index 3042a924fe..c89a4cdabe 100644 --- a/lib/common_test/test/ct_netconfc_SUITE.erl +++ b/lib/common_test/test/ct_netconfc_SUITE.erl @@ -43,12 +43,11 @@ %% there will be clashes with logging processes etc). %%-------------------------------------------------------------------- init_per_suite(Config) -> - Config1 = ct_test_support:init_per_suite(Config), case application:load(crypto) of - {error,Reason} -> + {error,Reason} when Reason=/={already_loaded,crypto} -> {skip, Reason}; _ -> - Config1 + ct_test_support:init_per_suite(Config) end. end_per_suite(Config) -> diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl index d337158bce..54526e8e83 100644 --- a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl +++ b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl @@ -1044,12 +1044,9 @@ gen_dsa(LSize,NSize) when is_integer(LSize), is_integer(NSize) -> Key = gen_dsa2(LSize, NSize), {Key, encode_key(Key)}. -encode_key(Key = #'RSAPrivateKey'{}) -> - {ok, Der} = 'OTP-PUB-KEY':encode('RSAPrivateKey', Key), - {'RSAPrivateKey', list_to_binary(Der), not_encrypted}; encode_key(Key = #'DSAPrivateKey'{}) -> - {ok, Der} = 'OTP-PUB-KEY':encode('DSAPrivateKey', Key), - {'DSAPrivateKey', list_to_binary(Der), not_encrypted}. + Der = public_key:der_encode('DSAPrivateKey', Key), + {'DSAPrivateKey', Der, not_encrypted}. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl b/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl index 2427f37f52..09217f60a3 100644 --- a/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl +++ b/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012. All Rights Reserved. +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -426,7 +426,7 @@ reply(ConnRef,Reply) -> send(ConnRef, make_msg(Reply)). from_simple(Simple) -> - list_to_binary(xmerl:export_simple_element(Simple,xmerl_xml)). + unicode_c2b(xmerl:export_simple_element(Simple,xmerl_xml)). xml(Content) -> <<"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n", @@ -435,30 +435,30 @@ xml(Content) -> rpc_reply(Content) when is_binary(Content) -> MsgId = case erase(msg_id) of undefined -> <<>>; - Id -> list_to_binary([" message-id=\"",Id,"\""]) + Id -> unicode_c2b([" message-id=\"",Id,"\""]) end, <<"<rpc-reply xmlns=\"",?NETCONF_NAMESPACE,"\"",MsgId/binary,">\n", Content/binary,"\n</rpc-reply>">>; rpc_reply(Content) -> - rpc_reply(list_to_binary(Content)). + rpc_reply(unicode_c2b(Content)). session_id(no_session_id) -> <<>>; session_id(SessionId0) -> - SessionId = list_to_binary(integer_to_list(SessionId0)), + SessionId = unicode_c2b(integer_to_list(SessionId0)), <<"<session-id>",SessionId/binary,"</session-id>\n">>. capabilities(undefined) -> - CapsXml = list_to_binary([["<capability>",C,"</capability>\n"] + CapsXml = unicode_c2b([["<capability>",C,"</capability>\n"] || C <- ?CAPABILITIES]), <<"<capabilities>\n",CapsXml/binary,"</capabilities>\n">>; capabilities({base,Vsn}) -> - CapsXml = list_to_binary([["<capability>",C,"</capability>\n"] + CapsXml = unicode_c2b([["<capability>",C,"</capability>\n"] || C <- ?CAPABILITIES_VSN(Vsn)]), <<"<capabilities>\n",CapsXml/binary,"</capabilities>\n">>; capabilities(no_base) -> [_|Caps] = ?CAPABILITIES, - CapsXml = list_to_binary([["<capability>",C,"</capability>\n"] || C <- Caps]), + CapsXml = unicode_c2b([["<capability>",C,"</capability>\n"] || C <- Caps]), <<"<capabilities>\n",CapsXml/binary,"</capabilities>\n">>; capabilities(no_caps) -> <<>>. @@ -553,3 +553,8 @@ make_msg(Xml) when is_binary(Xml) -> xml(Xml); make_msg(Simple) when is_tuple(Simple) -> xml(from_simple(Simple)). + +%%%----------------------------------------------------------------- +%%% Convert to unicode binary, since we use UTF-8 encoding in XML +unicode_c2b(Characters) -> + unicode:characters_to_binary(Characters). diff --git a/lib/common_test/test/ct_snmp_SUITE.erl b/lib/common_test/test/ct_snmp_SUITE.erl new file mode 100644 index 0000000000..f8b4543770 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE.erl @@ -0,0 +1,141 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_snmp_SUITE +%%% +%%% Description: +%%% Test ct_snmp module +%%% +%%%------------------------------------------------------------------- +-module(ct_snmp_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config1 = ct_test_support:init_per_suite(Config), + Config1. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + default + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% +default(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "snmp_SUITE"), + CfgFile = filename:join(DataDir, "snmp.cfg"), + {Opts,ERPid} = setup([{suite,Suite},{config,CfgFile}, + {label,default}], Config), + + ok = execute(default, Opts, ERPid, Config). + + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +execute(Name, Opts, ERPid, Config) -> + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(Name, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(Name,Config), + ct_test_support:verify_events(TestEvents, Events, Config). + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- +events_to_check(_TestName,Config) -> + {module,_} = code:load_abs(filename:join(?config(data_dir,Config), + snmp_SUITE)), + TCs = get_tcs(), + code:purge(snmp_SUITE), + code:delete(snmp_SUITE), + + OneTest = + [{?eh,start_logging,{'DEF','RUNDIR'}}] ++ + [{?eh,tc_done,{snmp_SUITE,TC,ok}} || TC <- TCs] ++ + [{?eh,stop_logging,[]}], + + %% 2 tests (ct:run_test + script_start) is default + OneTest ++ OneTest. + + +get_tcs() -> + All = snmp_SUITE:all(), + Groups = + try snmp_SUITE:groups() + catch error:undef -> [] + end, + flatten_tcs(All,Groups). + +flatten_tcs([H|T],Groups) when is_atom(H) -> + [H|flatten_tcs(T,Groups)]; +flatten_tcs([{group,Group}|T],Groups) -> + TCs = proplists:get_value(Group,Groups), + flatten_tcs(TCs ++ T,Groups); +flatten_tcs([],_) -> + []. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp.cfg b/lib/common_test/test/ct_snmp_SUITE_data/snmp.cfg new file mode 100644 index 0000000000..895e097de6 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp.cfg @@ -0,0 +1,44 @@ +%% -*- erlang -*- +{snmp1, [{start_agent,true}, + {users,[{user_name,[snmpm_user_default,[]]}]}, + {managed_agents,[{agent_name, [user_name, {127,0,0,1}, 4000, + [{engine_id,"ct_snmp-test-engine"}, + {version,v2}]]}]}, + {engine_id,"ct_snmp-test-engine"}, + {agent_vsns,[v2]} + ]}. +{snmp2, [{start_agent,true}, + {engine_id,"ct_snmp-test-engine"} + ]}. +{snmp3, [{start_agent,true}, + {engine_id,"ct_snmp-test-engine"}, + {agent_vsns,[v1,v2,v3]}, + {agent_contexts,{data_dir_file,"context.conf"}}, + {agent_usm,{data_dir_file,"usm.conf"}}, + {agent_community,{data_dir_file,"community.conf"}}, + {agent_notify_def,{data_dir_file,"notify.conf"}}, + {agent_sysinfo,{data_dir_file,"standard.conf"}}, + {agent_target_address_def,{data_dir_file,"target_addr.conf"}}, + {agent_target_param_def,{data_dir_file,"target_params.conf"}}, + {agent_vacm,{data_dir_file,"vacm.conf"}}]}. +{snmp_app1,[{manager, [{config, [{verbosity, silence}]}, + {server,[{verbosity,silence}]}, + {net_if,[{verbosity,silence}]}, + {versions,[v2]} + ]}, + {agent, [{config, [{verbosity, silence}]}, + {net_if,[{verbosity,silence}]}, + {mib_server,[{verbosity,silence}]}, + {local_db,[{verbosity,silence}]}, + {agent_verbosity,silence} + ]}]}. +{snmp_app2,[{manager, [{config, [{verbosity, silence}]}, + {server,[{verbosity,silence}]}, + {net_if,[{verbosity,silence}]} + ]}, + {agent, [{config, [{verbosity, silence}]}, + {net_if,[{verbosity,silence}]}, + {mib_server,[{verbosity,silence}]}, + {local_db,[{verbosity,silence}]}, + {agent_verbosity,silence} + ]}]}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl new file mode 100644 index 0000000000..16b2b5690c --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl @@ -0,0 +1,395 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%---------------------------------------------------------------------- +%% File: ct_snmp_SUITE.erl +%% +%% Description: +%% This file contains the test cases for the ct_snmp API. +%% +%% @author Support +%% @doc Test of SNMP support in common_test +%% @end +%%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- +-module(snmp_SUITE). +-include_lib("common_test/include/ct.hrl"). +-include_lib("snmp/include/STANDARD-MIB.hrl"). +-include_lib("snmp/include/SNMP-USER-BASED-SM-MIB.hrl"). +-include_lib("snmp/include/snmp_types.hrl"). + +-compile(export_all). + +%% Default timetrap timeout (set in init_per_testcase). +-define(default_timeout, ?t:minutes(1)). + +-define(AGENT_UDP, 4000). + +suite() -> + [ + {require, snmp1, snmp1}, + {require, snmp_app1, snmp_app1}, + {require, snmp2, snmp2}, + {require, snmp_app2, snmp_app2}, + {require, snmp3, snmp3} + ]. + +all() -> + [start_stop, + {group,get_set}, + {group,register}, + {group,override}, + set_info]. + + +groups() -> + [{get_set,[get_values, + get_next_values, + set_values, + load_mibs]}, + {register,[register_users, + register_users_fail, + register_agents, + register_agents_fail, + register_usm_users, + register_usm_users_fail]}, + {override,[override_usm, + override_standard, + override_context, + override_community, + override_notify, + override_target_addr, + override_target_params, + override_vacm]}]. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + Config. + +init_per_group(get_set, Config) -> + ok = ct_snmp:start(Config,snmp1,snmp_app1), + Config; +init_per_group(register, Config) -> + ok = ct_snmp:start(Config,snmp2,snmp_app2), + Config; +init_per_group(_, Config) -> + ok = ct_snmp:start(Config,snmp3,snmp_app2), + Config. + +end_per_group(_Group, Config) -> + catch ct_snmp:stop(Config), + Config. + +init_per_testcase(_Case, Config) -> + Dog = test_server:timetrap(?default_timeout), + [{watchdog, Dog}|Config]. + +end_per_testcase(Case, Config) -> + try apply(?MODULE,Case,[cleanup,Config]) + catch error:undef -> ok + end, + Dog=?config(watchdog, Config), + test_server:timetrap_cancel(Dog), + ok. + +%%%----------------------------------------------------------------- +%%% Test cases +break(_Config) -> + test_server:break(""), + ok. + +start_stop(Config) -> + ok = ct_snmp:start(Config,snmp1,snmp_app1), + timer:sleep(1000), + {snmp,_,_} = lists:keyfind(snmp,1,application:which_applications()), + [_|_] = filelib:wildcard("*/*.conf",?config(priv_dir,Config)), + + ok = ct_snmp:stop(Config), + timer:sleep(1000), + false = lists:keyfind(snmp,1,application:which_applications()), + [] = filelib:wildcard("*/*.conf",?config(priv_dir,Config)), + ok. + +get_values(_Config) -> + Oids1 = [?sysDescr_instance, ?sysName_instance], + {noError,_,V1} = ct_snmp:get_values(agent_name,Oids1,snmp1), + [#varbind{oid=?sysDescr_instance,value="Erlang SNMP agent"}, + #varbind{oid=?sysName_instance,value="ct_test"}] = V1, + ok. + +get_next_values(_Config) -> + Oids2 = [?system], + {noError,_,V2} = ct_snmp:get_next_values(agent_name,Oids2,snmp1), + [#varbind{oid=?sysDescr_instance,value="Erlang SNMP agent"}] = V2, + ok. + +set_values(Config) -> + Oid3 = ?sysName_instance, + NewName = "ct_test changed by " ++ atom_to_list(?MODULE), + VarsAndVals = [{Oid3,s,NewName}], + {noError,_,_} = + ct_snmp:set_values(agent_name,VarsAndVals,snmp1,Config), + + Oids4 = [?sysName_instance], + {noError,_,V4} = ct_snmp:get_values(agent_name,Oids4,snmp1), + [#varbind{oid=?sysName_instance,value=NewName}] = V4, + + ok. + +load_mibs(_Config) -> + [{'SNMPv2-MIB',_}=SnmpV2Mib] = snmpa:which_mibs(), + Mib = filename:join([code:priv_dir(snmp),"mibs","SNMP-USER-BASED-SM-MIB"]), + ok = ct_snmp:load_mibs([Mib]), + TwoMibs = [_,_] = snmpa:which_mibs(), + [{'SNMP-USER-BASED-SM-MIB',_}] = lists:delete(SnmpV2Mib,TwoMibs), + ok = ct_snmp:unload_mibs([Mib]), + [{'SNMPv2-MIB',_}] = snmpa:which_mibs(), + ok. + + +register_users(_Config) -> + [] = snmpm:which_users(), + ok = ct_snmp:register_users(snmp2,[{reg_user1,[snmpm_user_default,[]]}]), + [_] = snmpm:which_users(), + [_] = ct:get_config({snmp2,users}), + ok = ct_snmp:register_users(snmp2,[{reg_user2,[snmpm_user_default,[]]}]), + [_,_] = snmpm:which_users(), + [_,_] = ct:get_config({snmp2,users}), + ok = ct_snmp:register_users(snmp2,[{reg_user3,[snmpm_user_default,[]]}]), + [_,_,_] = snmpm:which_users(), + [_,_,_] = ct:get_config({snmp2,users}), + ok = ct_snmp:unregister_users(snmp2,[reg_user3]), + [_,_] = snmpm:which_users(), + [_,_] = ct:get_config({snmp2,users}), + ok = ct_snmp:unregister_users(snmp2), + [] = snmpm:which_users(), + [] = ct:get_config({snmp2,users}), + ok. +register_users(cleanup,_Config) -> + ct_snmp:unregister_users(snmp2). + +register_users_fail(_Config) -> + [] = snmpm:which_users(), + {error,_} = ct_snmp:register_users(snmp2,[{reg_user3,[unknown_module,[]]}]), + [] = snmpm:which_users(), + ok. +register_users_fail(cleanup,_Config) -> + ct_snmp:unregister_users(snmp2). + +register_agents(_Config) -> + {ok, HostName} = inet:gethostname(), + {ok, Addr} = inet:getaddr(HostName, inet), + + [] = snmpm:which_agents(), + ok = ct_snmp:register_users(snmp2,[{reg_user1,[snmpm_user_default,[]]}]), + ok = ct_snmp:register_agents(snmp2,[{reg_agent1, + [reg_user1,Addr,?AGENT_UDP,[]]}]), + [_] = snmpm:which_agents(), + [_] = ct:get_config({snmp2,managed_agents}), + ok = ct_snmp:register_agents(snmp2,[{reg_agent2, + [reg_user1,Addr,?AGENT_UDP,[]]}]), + [_,_] = snmpm:which_agents(), + [_,_] = ct:get_config({snmp2,managed_agents}), + + ok = ct_snmp:register_agents(snmp2,[{reg_agent3, + [reg_user1,Addr,?AGENT_UDP,[]]}]), + [_,_,_] = snmpm:which_agents(), + [_,_,_] = ct:get_config({snmp2,managed_agents}), + + ok = ct_snmp:unregister_agents(snmp2,[reg_agent3]), + [_,_] = snmpm:which_agents(), + [_,_] = ct:get_config({snmp2,managed_agents}), + + ok = ct_snmp:unregister_agents(snmp2), + ok = ct_snmp:unregister_users(snmp2), + [] = snmpm:which_agents(), + [] = ct:get_config({snmp2,managed_agents}), + ok. +register_agents(cleanup,_Config) -> + ct_snmp:unregister_agents(snmp2), + ct_snmp:unregister_users(snmp2). + +register_agents_fail(_Config) -> + {ok, HostName} = inet:gethostname(), + {ok, Addr} = inet:getaddr(HostName, inet), + + [] = snmpm:which_agents(), + {error,_} + = ct_snmp:register_agents(snmp2,[{reg_agent3, + [unknown_user,Addr,?AGENT_UDP,[]]}]), + [] = snmpm:which_agents(), + ok. +register_agents_fail(cleanup,_Config) -> + ct_snmp:unregister_agents(snmp2). + +register_usm_users(_Config) -> + [] = snmpm:which_usm_users(), + ok = ct_snmp:register_usm_users(snmp2,[{"reg_usm_user1",[]}]), + [_] = snmpm:which_usm_users(), + [_] = ct:get_config({snmp2,usm_users}), + ok = ct_snmp:register_usm_users(snmp2,[{"reg_usm_user2",[]}]), + [_,_] = snmpm:which_usm_users(), + [_,_] = ct:get_config({snmp2,usm_users}), + ok = ct_snmp:register_usm_users(snmp2,[{"reg_usm_user3",[]}]), + [_,_,_] = snmpm:which_usm_users(), + [_,_,_] = ct:get_config({snmp2,usm_users}), + ok = ct_snmp:unregister_usm_users(snmp2,["reg_usm_user3"]), + [_,_] = snmpm:which_usm_users(), + [_,_] = ct:get_config({snmp2,usm_users}), + ok = ct_snmp:unregister_usm_users(snmp2), + [] = snmpm:which_usm_users(), + [] = ct:get_config({snmp2,usm_users}), + ok. +register_usm_users(cleanup,_Config) -> + ct_snmp:unregister_usm_users(snmp2). + +register_usm_users_fail(_Config) -> + [] = snmpm:which_usm_users(), + {error,_} + = ct_snmp:register_usm_users(snmp2,[{"reg_usm_user3", + [{sec_name,invalid_data_type}]}]), + [] = snmpm:which_usm_users(), + ok. +register_usm_users_fail(cleanup,_Config) -> + ct_snmp:unregister_usm_users(snmp2). + +%% Test that functionality for overriding default configuration file +%% works - i.e. that the files are written and that the configuration +%% is actually used. +%% +%% Note that the config files used in this test case do not +%% necessarily make up a reasonable configuration for the snmp +%% application... +override_usm(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + Mib = filename:join([code:priv_dir(snmp),"mibs","SNMP-USER-BASED-SM-MIB"]), + ok = ct_snmp:load_mibs([Mib]), + + %% Check that usm.conf is overwritten + {ok,MyUsm} = snmpa_conf:read_usm_config(DataDir), + {ok,UsedUsm} = snmpa_conf:read_usm_config(ConfDir), + true = (MyUsm == UsedUsm), + + %% Check that the usm user is actually configured... + [{Index,"secname"}] = + snmp_user_based_sm_mib:usmUserTable(get_next,?usmUserEntry,[3]), + true = lists:suffix("usm_user_name",Index), + ok. + +override_standard(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that standard.conf is overwritten + {ok,MyStandard} = snmpa_conf:read_standard_config(DataDir), + {ok,UsedStandard} = snmpa_conf:read_standard_config(ConfDir), + true = (MyStandard == UsedStandard), + + %% Check that the values from standard.conf is actually configured... + {value,"name for override test"} = snmp_standard_mib:sysName(get), + {value,"agent for ct_snmp override test"} = snmp_standard_mib:sysDescr(get), + ok. + +override_context(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that context.conf is overwritten + {ok,MyContext} = snmpa_conf:read_context_config(DataDir), + {ok,UsedContext} = snmpa_conf:read_context_config(ConfDir), + true = (MyContext == UsedContext), + ok. + +override_community(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that community.conf is overwritten + {ok,MyCommunity} = snmpa_conf:read_community_config(DataDir), + {ok,UsedCommunity} = snmpa_conf:read_community_config(ConfDir), + true = (MyCommunity == UsedCommunity), + ok. + +override_notify(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that notify.conf is overwritten + {ok,MyNotify} = snmpa_conf:read_notify_config(DataDir), + {ok,UsedNotify} = snmpa_conf:read_notify_config(ConfDir), + true = (MyNotify == UsedNotify), + ok. + +override_target_addr(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that target_addr.conf is overwritten + {ok,MyTargetAddr} = snmpa_conf:read_target_addr_config(DataDir), + {ok,UsedTargetAddr} = snmpa_conf:read_target_addr_config(ConfDir), + true = (MyTargetAddr == UsedTargetAddr), + ok. + +override_target_params(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that target_params.conf is overwritten + {ok,MyTargetParams} = snmpa_conf:read_target_params_config(DataDir), + {ok,UsedTargetParams} = snmpa_conf:read_target_params_config(ConfDir), + true = (MyTargetParams == UsedTargetParams), + ok. + +override_vacm(Config) -> + DataDir = ?config(data_dir,Config), + PrivDir = ?config(priv_dir,Config), + ConfDir = filename:join(PrivDir,"conf"), + + %% Check that vacm.conf is overwritten + {ok,MyVacm} = snmpa_conf:read_vacm_config(DataDir), + {ok,UsedVacm} = snmpa_conf:read_vacm_config(ConfDir), + true = (MyVacm == UsedVacm), + ok. + + + + +%% NOTE!! This test must always be executed last in the suite, and +%% should match all set requests performed in the suite. I.e. if you +%% add a set request, you must add an entry in the return value of +%% ct_snmp:set_info/1 below. +set_info(Config) -> + %% From test case set_values/1: + Oid1 = ?sysName_instance, + NewValue1 = "ct_test changed by " ++ atom_to_list(?MODULE), + + %% The test... + [{_AgentName,_,[{Oid1,_,NewValue1}]}] + = ct_snmp:set_info(Config), + ok. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/community.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/community.conf new file mode 100644 index 0000000000..5a64df6605 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/community.conf @@ -0,0 +1 @@ +{"public", "public", "initial", "", ""}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/context.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/context.conf new file mode 100644 index 0000000000..feed5e1d11 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/context.conf @@ -0,0 +1 @@ +"testcontext". diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/notify.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/notify.conf new file mode 100644 index 0000000000..367ba3aa4b --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/notify.conf @@ -0,0 +1 @@ +{"standard inform", "std_inform", inform}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/standard.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/standard.conf new file mode 100644 index 0000000000..79908fb355 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/standard.conf @@ -0,0 +1,7 @@ +{sysDescr, "agent for ct_snmp override test"}. +{sysObjectID, [1,2,3]}. +{sysContact, "[email protected]"}. +{sysLocation, "erlang"}. +{sysServices, 72}. +{snmpEnableAuthenTraps, enabled}. +{sysName, "name for override test"}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/target_addr.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/target_addr.conf new file mode 100644 index 0000000000..d02672a074 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/target_addr.conf @@ -0,0 +1,2 @@ +{"target1", snmpUDPDomain, [147,214,122,73], 5000, 1500, 3, "std_trap", "target_v3", "", [], 2048}. +{"target2", snmpUDPDomain, [147,214,122,73], 5000, 1500, 3, "std_inform", "target_v3", "", [], 2048}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/target_params.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/target_params.conf new file mode 100644 index 0000000000..5a9a619422 --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/target_params.conf @@ -0,0 +1 @@ +{"target_v3", v3, usm, "initial", noAuthNoPriv}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/usm.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/usm.conf new file mode 100644 index 0000000000..d6e245914e --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/usm.conf @@ -0,0 +1 @@ +{"ct_snmp-test-engine","usm_user_name","secname",zeroDotZero,usmNoAuthProtocol,"","",usmNoPrivProtocol,"","","","",""}. diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/vacm.conf b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/vacm.conf new file mode 100644 index 0000000000..158fe02e3b --- /dev/null +++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE_data/vacm.conf @@ -0,0 +1,6 @@ +{vacmSecurityToGroup, usm, "initial", "initial"}. +{vacmAccess, "initial", "", any, noAuthNoPriv, exact, "restricted", "", "restricted"}. +{vacmAccess, "initial", "", usm, authNoPriv, exact, "internet", "internet", "internet"}. +{vacmAccess, "initial", "", usm, authPriv, exact, "internet", "internet", "internet"}. +{vacmViewTreeFamily, "restricted", [1,3,6,1], included, null}. +{vacmViewTreeFamily, "internet", [1,3,6,1], included, null}. diff --git a/lib/common_test/test/ct_surefire_SUITE.erl b/lib/common_test/test/ct_surefire_SUITE.erl new file mode 100644 index 0000000000..b86b47f0a2 --- /dev/null +++ b/lib/common_test/test/ct_surefire_SUITE.erl @@ -0,0 +1,374 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012-2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_surefire_SUITE +%%% +%%% Description: +%%% Test cth_surefire hook +%%% +%%%------------------------------------------------------------------- +-module(ct_surefire_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-include_lib("xmerl/include/xmerl.hrl"). +-include_lib("kernel/include/file.hrl"). + +-define(eh, ct_test_support_eh). + +-define(url_base,"http://my.host.com/"). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config1 = ct_test_support:init_per_suite(Config), + Config1. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + default, + absolute_path, + relative_path, + url, + logdir + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% +default(Config) when is_list(Config) -> + run(default,[cth_surefire],"junit_report.xml",Config). + +absolute_path(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir,Config), + Path = filename:join(PrivDir,"abspath.xml"), + run(absolute_path,[{cth_surefire,[{path,Path}]}],Path,Config). + +relative_path(Config) when is_list(Config) -> + Path = "relpath.xml", + run(relative_path,[{cth_surefire,[{path,Path}]}],Path,Config). + +url(Config) when is_list(Config) -> + Path = "url.xml", + run(url,[{cth_surefire,[{url_base,?url_base},{path,Path}]}], + Path,Config). + +logdir(Config) when is_list(Config) -> + Opts = ct_test_support:get_opts(Config), + LogDir = + case lists:keyfind(logdir,1,Opts) of + {logdir,LD} -> LD; + false -> ?config(priv_dir,Config) + end, + MyLogDir = filename:join(LogDir,"specific_logdir"), + ensure_exists_empty(MyLogDir), + Path = "logdir.xml", + run(logdir,[{cth_surefire,[{path,Path}]}],Path,Config,[{logdir,MyLogDir}]). + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- +run(Case,CTHs,Report,Config) -> + run(Case,CTHs,Report,Config,[]). +run(Case,CTHs,Report,Config,ExtraOpts) -> + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "surefire_SUITE"), + {Opts,ERPid} = setup([{suite,Suite},{ct_hooks,CTHs},{label,Case}|ExtraOpts], + Config), + ok = execute(Case, Opts, ERPid, Config), + LogDir = + case lists:keyfind(logdir,1,Opts) of + {logdir,LD} -> LD; + false -> ?config(priv_dir,Config) + end, + Re = filename:join([LogDir,"*",Report]), + check_xml(Case,Re). + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Opts1 = + case lists:keymember(logdir,1,Test) of + true -> lists:keydelete(logdir,1,Opts0); + false -> Opts0 + end, + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts1 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +execute(Name, Opts, ERPid, Config) -> + ok = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(Name, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(Name), + ct_test_support:verify_events(TestEvents, Events, Config). + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- +events_to_check(Test) -> + %% 2 tests (ct:run_test + script_start) is default + events_to_check(Test, 2). + +events_to_check(_, 0) -> + []; +events_to_check(Test, N) -> + test_events(Test) ++ events_to_check(Test, N-1). + +test_events(_) -> + [{?eh,start_logging,'_'}, + {?eh,start_info,{1,1,9}}, + {?eh,tc_start,{surefire_SUITE,init_per_suite}}, + {?eh,tc_done,{surefire_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{surefire_SUITE,tc_ok}}, + {?eh,tc_done,{surefire_SUITE,tc_ok,ok}}, + {?eh,test_stats,{1,0,{0,0}}}, + {?eh,tc_start,{surefire_SUITE,tc_fail}}, + {?eh,tc_done,{surefire_SUITE,tc_fail, + {failed,{error,{test_case_failed,"this test should fail"}}}}}, + {?eh,test_stats,{1,1,{0,0}}}, + {?eh,tc_start,{surefire_SUITE,tc_skip}}, + {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}}, + {?eh,test_stats,{1,1,{1,0}}}, + {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}}, + {?eh,tc_done,{surefire_SUITE,tc_autoskip_require, + {skipped,{require_failed,'_'}}}}, + {?eh,test_stats,{1,1,{1,1}}}, + [{?eh,tc_start,{surefire_SUITE,{init_per_group,g,[]}}}, + {?eh,tc_done,{surefire_SUITE,{init_per_group,g,[]},ok}}, + {?eh,tc_start,{surefire_SUITE,tc_ok}}, + {?eh,tc_done,{surefire_SUITE,tc_ok,ok}}, + {?eh,test_stats,{2,1,{1,1}}}, + {?eh,tc_start,{surefire_SUITE,tc_fail}}, + {?eh,tc_done,{surefire_SUITE,tc_fail, + {failed,{error,{test_case_failed,"this test should fail"}}}}}, + {?eh,test_stats,{2,2,{1,1}}}, + {?eh,tc_start,{surefire_SUITE,tc_skip}}, + {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}}, + {?eh,test_stats,{2,2,{2,1}}}, + {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}}, + {?eh,tc_done,{surefire_SUITE,tc_autoskip_require, + {skipped,{require_failed,'_'}}}}, + {?eh,test_stats,{2,2,{2,2}}}, + {?eh,tc_start,{surefire_SUITE,{end_per_group,g,[]}}}, + {?eh,tc_done,{surefire_SUITE,{end_per_group,g,[]},ok}}], + [{?eh,tc_start,{surefire_SUITE,{init_per_group,g_fail,[]}}}, + {?eh,tc_done,{surefire_SUITE,{init_per_group,g_fail,[]}, + {failed,{error,all_cases_should_be_skipped}}}}, + {?eh,tc_auto_skip,{surefire_SUITE,tc_ok, + {failed, + {surefire_SUITE,init_per_group, + {'EXIT',all_cases_should_be_skipped}}}}}, + {?eh,test_stats,{2,2,{2,3}}}, + {?eh,tc_auto_skip,{surefire_SUITE,end_per_group, + {failed, + {surefire_SUITE,init_per_group, + {'EXIT',all_cases_should_be_skipped}}}}}], + {?eh,tc_start,{surefire_SUITE,end_per_suite}}, + {?eh,tc_done,{surefire_SUITE,end_per_suite,ok}}, + {?eh,stop_logging,[]}]. + + +%%%----------------------------------------------------------------- +%%% Check generated xml log files +check_xml(Case,XmlRe) -> + case filelib:wildcard(XmlRe) of + [] -> + ct:fail("No xml files found with regexp ~p~n", [XmlRe]); + [_] = Xmls when Case==absolute_path -> + do_check_xml(Case,Xmls); + [_,_] = Xmls -> + do_check_xml(Case,Xmls) + end. + +%% Allowed structure: +%% <testsuites> +%% <testsuite> +%% <properties> +%% <property/> +%% ... +%% </properties> +%% <testcase> +%% [<failure/> | <error/> | <skipped/> ] +%% </testcase> +%% ... +%% </testsuite> +%% ... +%% </testsuites> +do_check_xml(Case,[Xml|Xmls]) -> + ct:log("Checking <a href=~p>~s</a>~n",[Xml,Xml]), + {E,_} = xmerl_scan:file(Xml), + Expected = events_to_result(lists:flatten(test_events(Case))), + ParseResult = testsuites(Case,E), + ct:log("Expecting: ~p~n",[[Expected]]), + ct:log("Actual : ~p~n",[ParseResult]), + [Expected] = ParseResult, + do_check_xml(Case,Xmls); +do_check_xml(_,[]) -> + ok. + +%% Scanning the XML to get the same type of result as events_to_result/1 +testsuites(Case,#xmlElement{name=testsuites,content=TS}) -> + %% OTP-10589 - move properties element to <testsuite> + false = lists:keytake(properties,#xmlElement.name,TS), + testsuite(Case,TS). + +testsuite(Case,[#xmlElement{name=testsuite,content=TC,attributes=A}|TS]) -> + {ET,EF,ES} = events_to_numbers(lists:flatten(test_events(Case))), + {T,E,F,S} = get_numbers_from_attrs(A,false,false,false,false), + ct:log("Expecting total:~p, error:~p, failure:~p, skipped:~p~n",[ET,0,EF,ES]), + ct:log("Actual total:~p, error:~p, failure:~p, skipped:~p~n",[T,E,F,S]), + {ET,0,EF,ES} = {T,E,F,S}, + + %% properties should only be there if given a options to hook + false = lists:keytake(properties,#xmlElement.name,TC), + %% system-out and system-err is not used by common_test + false = lists:keytake('system-out',#xmlElement.name,TC), + false = lists:keytake('system-err',#xmlElement.name,TC), + R=testcase(Case,TC), + [R|testsuite(Case,TS)]; +testsuite(_Case,[]) -> + []. + +testcase(url=Case,[#xmlElement{name=testcase,attributes=A,content=C}|TC]) -> + R = failed_or_skipped(C), + case R of + [s] -> + case lists:keyfind(url,#xmlAttribute.name,A) of + false -> ok; + #xmlAttribute{value=UrlAttr} -> + lists:keyfind(url,#xmlAttribute.name,A), + true = lists:prefix(?url_base,UrlAttr) + end; + _ -> + #xmlAttribute{value=UrlAttr} = + lists:keyfind(url,#xmlAttribute.name,A), + true = lists:prefix(?url_base,UrlAttr) + end, + [R|testcase(Case,TC)]; +testcase(Case,[#xmlElement{name=testcase,attributes=A,content=C}|TC]) -> + false = lists:keyfind(url,#xmlAttribute.name,A), + R = failed_or_skipped(C), + [R|testcase(Case,TC)]; +testcase(_Case,[]) -> + []. + +failed_or_skipped([#xmlElement{name=failure}|E]) -> + [f|failed_or_skipped(E)]; +failed_or_skipped([#xmlElement{name=error}|E]) -> + [e|failed_or_skipped(E)]; +failed_or_skipped([#xmlElement{name=skipped}|E]) -> + [s|failed_or_skipped(E)]; +failed_or_skipped([]) -> + []. + +%% Using the expected events to produce the expected result of the XML scanning. +%% The result is a list of test suites: +%% Testsuites = [Testsuite] +%% Testsuite = [Testcase] +%% Testcase = [] | [f] | [s], indicating ok, failed and skipped respectively +events_to_result([{?eh,tc_done,{_Suite,_Case,R}}|E]) -> + [result(R)|events_to_result(E)]; +events_to_result([{?eh,tc_auto_skip,_}|E]) -> + [[s]|events_to_result(E)]; +events_to_result([_|E]) -> + events_to_result(E); +events_to_result([]) -> + []. + +result(ok) ->[]; +result({skipped,_}) -> [s]; +result({failed,_}) -> [f]. + +%% Using the expected events' last test_stats element to produce the +%% expected number of totla, errors, failed and skipped testcases. +events_to_numbers(E) -> + RevE = lists:reverse(E), + {?eh,test_stats,{Ok,F,{US,AS}}} = lists:keyfind(test_stats,2,RevE), + {Ok+F+US+AS,F,US+AS}. + +get_numbers_from_attrs([#xmlAttribute{name=tests,value=X}|A],false,E,F,S) -> + get_numbers_from_attrs(A,list_to_integer(X),E,F,S); +get_numbers_from_attrs([#xmlAttribute{name=errors,value=X}|A],T,false,F,S) -> + get_numbers_from_attrs(A,T,list_to_integer(X),F,S); +get_numbers_from_attrs([#xmlAttribute{name=failures,value=X}|A],T,E,false,S) -> + get_numbers_from_attrs(A,T,E,list_to_integer(X),S); +get_numbers_from_attrs([#xmlAttribute{name=skipped,value=X}|A],T,E,F,false) -> + get_numbers_from_attrs(A,T,E,F,list_to_integer(X)); +get_numbers_from_attrs([_|A],T,E,F,S) -> + get_numbers_from_attrs(A,T,E,F,S); +get_numbers_from_attrs([],T,E,F,S) -> + {T,E,F,S}. + +ensure_exists_empty(Dir) -> + case file:list_dir(Dir) of + {error,enoent} -> + file:make_dir(Dir); + {ok,Files} -> + del_files(Dir,Files) + end. + +del_files(Dir,[F0|Fs] ) -> + F = filename:join(Dir,F0), + case file:read_file_info(F) of + {ok,#file_info{type=directory}} -> + {ok,Files} = file:list_dir(F), + del_files(F,Files), + file:del_dir(F), + del_files(Dir,Fs); + _ -> + file:delete(F), + del_files(Dir,Fs) + end; +del_files(_,[]) -> + ok. diff --git a/lib/common_test/test/ct_surefire_SUITE_data/surefire_SUITE.erl b/lib/common_test/test/ct_surefire_SUITE_data/surefire_SUITE.erl new file mode 100644 index 0000000000..677aee46c5 --- /dev/null +++ b/lib/common_test/test/ct_surefire_SUITE_data/surefire_SUITE.erl @@ -0,0 +1,92 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%---------------------------------------------------------------------- +%% File: surefire_SUITE.erl +%% +%% Description: +%% This file contains the test cases for cth_surefire. +%% +%% @author Support +%% @doc Test of surefire support in common_test +%% @end +%%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- +-module(surefire_SUITE). +-include_lib("common_test/include/ct.hrl"). + +-compile(export_all). + +%% Default timetrap timeout (set in init_per_testcase). +-define(default_timeout, ?t:minutes(1)). + +all() -> + testcases() ++ [{group,g},{group,g_fail}]. + +groups() -> + [{g,testcases()}, + {g_fail,[tc_ok]}]. + +testcases() -> + [tc_ok, + tc_fail, + tc_skip, + tc_autoskip_require]. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + Config. + +init_per_group(g_fail, _Config) -> + exit(all_cases_should_be_skipped); +init_per_group(_, Config) -> + Config. + +end_per_group(_Group, Config) -> + Config. + +init_per_testcase(_Case, Config) -> + Dog = test_server:timetrap(?default_timeout), + [{watchdog, Dog}|Config]. + +end_per_testcase(_Case, Config) -> + Dog=?config(watchdog, Config), + test_server:timetrap_cancel(Dog), + ok. + +%%%----------------------------------------------------------------- +%%% Test cases +break(_Config) -> + test_server:break(""), + ok. + +tc_ok(_Config) -> + ok. + +tc_fail(_Config) -> + ct:fail("this test should fail"). + +tc_skip(_Config) -> + {skip,"this test is skipped"}. + +tc_autoskip_require() -> + [{require,whatever}]. +tc_autoskip_require(Config) -> + ct:fail("this test should never be executed - it should be autoskipped"). diff --git a/lib/common_test/test/ct_system_error_SUITE.erl b/lib/common_test/test/ct_system_error_SUITE.erl new file mode 100644 index 0000000000..f2d6ef4b1b --- /dev/null +++ b/lib/common_test/test/ct_system_error_SUITE.erl @@ -0,0 +1,132 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_system_error_SUITE +%%% +%%% Description: +%%% +%%% Test that severe system errors (such as failure to write logs) are +%%% noticed and handled. +%%%------------------------------------------------------------------- +-module(ct_system_error_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config1 = ct_test_support:init_per_suite(Config), + Config1. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [ + test_server_failing_logs + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% +test_server_failing_logs(Config) -> + TC = test_server_failing_logs, + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "a_SUITE"), + {Opts,ERPid} = setup([{suite,Suite},{label,TC}], Config), + crash_test_server(Config), + {error,{cannot_create_log_dir,__}} = ct_test_support:run(Opts, Config), + Events = ct_test_support:get_events(ERPid, Config), + ct_test_support:log_events(TC, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(TC), + ok = ct_test_support:verify_events(TestEvents, Events, Config). + +crash_test_server(Config) -> + DataDir = ?config(data_dir, Config), + Root = proplists:get_value(logdir, ct_test_support:get_opts(Config)), + [$@|Host] = lists:dropwhile(fun(C) -> + C =/= $@ + end, atom_to_list(node())), + Format = filename:join(Root, + "ct_run.ct@" ++ Host ++ + ".~4..0w-~2..0w-~2..0w_" + "~2..0w.~2..0w.~2..0w"), + [C2,C1|_] = lists:reverse(filename:split(DataDir)), + LogDir = C1 ++ "." ++ C2 ++ ".a_SUITE.logs", + T = calendar:datetime_to_gregorian_seconds(calendar:local_time()), + [begin + {{Y,Mon,D},{H,Min,S}} = + calendar:gregorian_seconds_to_datetime(T+Offset), + Dir0 = io_lib:format(Format, [Y,Mon,D,H,Min,S]), + Dir = lists:flatten(Dir0), + file:make_dir(Dir), + File = filename:join(Dir, LogDir), + file:write_file(File, "anything goes\n") + end || Offset <- lists:seq(0, 20)], + ok. + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +setup(Test, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ [{event_handler,{?eh,EvHArgs}}|Test], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- + +events_to_check(_Test) -> + [{?eh,severe_error,{cannot_create_log_dir,{'_','_'}}}]. diff --git a/lib/common_test/test/ct_system_error_SUITE_data/a_SUITE.erl b/lib/common_test/test/ct_system_error_SUITE_data/a_SUITE.erl new file mode 100644 index 0000000000..c6e3ddfd5d --- /dev/null +++ b/lib/common_test/test/ct_system_error_SUITE_data/a_SUITE.erl @@ -0,0 +1,122 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(a_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{timetrap,{seconds,10}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [tc1]. + +tc1(_C) -> + ok. diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl index 80cca4a1cc..7c33fd404d 100644 --- a/lib/common_test/test/ct_test_support.erl +++ b/lib/common_test/test/ct_test_support.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2012. All Rights Reserved. +%% Copyright Ericsson AB 2008-2013. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -32,7 +32,7 @@ run/2, run/3, run/4, get_opts/1, wait_for_ct_stop/1]). -export([handle_event/2, start_event_receiver/1, get_events/2, - verify_events/3, reformat/2, log_events/4, + verify_events/3, verify_events/4, reformat/2, log_events/4, join_abs_dirs/2]). -export([ct_test_halt/1]). @@ -117,8 +117,7 @@ end_per_suite(Config) -> CTNode = proplists:get_value(ct_node, Config), PrivDir = proplists:get_value(priv_dir, Config), true = rpc:call(CTNode, code, del_path, [filename:join(PrivDir,"")]), - cover:stop(CTNode), - slave:stop(CTNode), + slave_stop(CTNode), ok. %%%----------------------------------------------------------------- @@ -149,8 +148,7 @@ end_per_testcase(_TestCase, Config) -> case wait_for_ct_stop(CTNode) of %% Common test was not stopped to we restart node. false -> - cover:stop(CTNode), - slave:stop(CTNode), + slave_stop(CTNode), start_slave(Config,proplists:get_value(trace_level,Config)), {fail, "Could not stop common_test"}; true -> @@ -364,6 +362,14 @@ verify_events(TEvs, Evs, Config) -> ok end. +verify_events(TEvs, Evs, Node, Config) -> + case catch verify_events1(TEvs, Evs, Node, Config) of + {'EXIT',Reason} -> + Reason; + _ -> + ok + end. + verify_events1([TestEv|_], [{TEH,#event{name=stop_logging,node=Node,data=_}}|_], Node, _) when element(1,TestEv) == TEH, element(2,TestEv) =/= stop_logging -> test_server:format("Failed to find ~p in the list of events!~n", [TestEv]), @@ -612,8 +618,11 @@ locate({parallel,TEvs}, Node, Evs, Config) -> fun({EH,#event{name=tc_auto_skip, node=EvNode, data={Mod,end_per_group,Reason}}}) when - EH == TEH, EvNode == Node, Mod == M, Reason == R -> - false; + EH == TEH, EvNode == Node, Mod == M -> + case match_data(R, Reason) of + match -> false; + _ -> true + end; ({EH,#event{name=stop_logging, node=EvNode,data=_}}) when EH == TEH, EvNode == Node -> @@ -627,23 +636,12 @@ locate({parallel,TEvs}, Node, Evs, Config) -> [_AutoSkip | RemEvs2] -> {Done,RemEvs2,length(RemEvs2)} end; - %% match other event than test case - (TEv={TEH,N,D}, Acc) when D == '_' -> - case [E || E={EH,#event{name=Name, - node=EvNode, - data=_}} <- Evs1, - EH == TEH, EvNode == Node, Name == N] of - [] -> - exit({unmatched,TEv}); - _ -> - test_server:format("Found ~p!", [TEv]), - Acc - end; (TEv={TEH,N,D}, Acc) -> case [E || E={EH,#event{name=Name, node=EvNode, data=Data}} <- Evs1, - EH == TEH, EvNode == Node, Name == N, Data == D] of + EH == TEH, EvNode == Node, Name == N, + match == match_data(D,Data)] of [] -> exit({unmatched,TEv}); _ -> @@ -1002,33 +1000,39 @@ locate({TEH,Name,Data}, Node, [{TEH,#event{name=Name, data = EvData, node = Node}}|Evs], Config) -> - try match_data(Data, EvData) of + case match_data(Data, EvData) of match -> - {Config,Evs} - catch _:_ -> + {Config,Evs}; + _ -> nomatch end; locate({_TEH,_Name,_Data}, _Node, [_|_Evs], _Config) -> nomatch. -match_data(D,D) -> +match_data(Data, EvData) -> + try do_match_data(Data, EvData) + catch _:_ -> + nomatch + end. + +do_match_data(D,D) -> match; -match_data('_',_) -> +do_match_data('_',_) -> match; -match_data(Fun,Data) when is_function(Fun) -> +do_match_data(Fun,Data) when is_function(Fun) -> Fun(Data); -match_data('$proplist',Proplist) -> - match_data( +do_match_data('$proplist',Proplist) -> + do_match_data( fun(List) -> lists:foreach(fun({_,_}) -> ok end,List) end,Proplist); -match_data([H1|MatchT],[H2|ValT]) -> - match_data(H1,H2), - match_data(MatchT,ValT); -match_data(Tuple1,Tuple2) when is_tuple(Tuple1),is_tuple(Tuple2) -> - match_data(tuple_to_list(Tuple1),tuple_to_list(Tuple2)); -match_data([],[]) -> +do_match_data([H1|MatchT],[H2|ValT]) -> + do_match_data(H1,H2), + do_match_data(MatchT,ValT); +do_match_data(Tuple1,Tuple2) when is_tuple(Tuple1),is_tuple(Tuple2) -> + do_match_data(tuple_to_list(Tuple1),tuple_to_list(Tuple2)); +do_match_data([],[]) -> match. result_match({SkipOrFail,{ErrorInd,{Why,'_'}}}, @@ -1039,10 +1043,13 @@ result_match({SkipOrFail,{ErrorInd,{EMod,EFunc,{Why,'_'}}}}, true; result_match({failed,{timetrap_timeout,{'$approx',Num}}}, {failed,{timetrap_timeout,Value}}) -> - if Value >= trunc(Num-0.02*Num), - Value =< trunc(Num+0.02*Num) -> true; + if Value >= trunc(Num-0.05*Num), + Value =< trunc(Num+0.05*Num) -> true; true -> false end; +result_match({user_timetrap_error,{Why,'_'}}, + {user_timetrap_error,{Why,_Stack}}) -> + true; result_match(Result, Result) -> true; result_match(_, _) -> @@ -1125,6 +1132,8 @@ reformat([{_EH,#event{name=test_start,data=_}} | Events], EH) -> [{EH,test_start,{'DEF',{'START_TIME','LOGDIR'}}} | reformat(Events, EH)]; reformat([{_EH,#event{name=test_done,data=_}} | Events], EH) -> [{EH,test_done,{'DEF','STOP_TIME'}} | reformat(Events, EH)]; +reformat([{_EH,#event{name=tc_logfile,data=_}} | Events], EH) -> + reformat(Events, EH); reformat([{_EH,#event{name=test_stats,data=Data}} | Events], EH) -> [{EH,test_stats,Data} | reformat(Events, EH)]; %% use this to only print the last test_stats event: @@ -1259,3 +1268,22 @@ rm_files([F | Fs]) -> rm_files([]) -> ok. +%%%----------------------------------------------------------------- +%%% +slave_stop(Node) -> + Cover = test_server:is_cover(), + if Cover-> cover:flush(Node); + true -> ok + end, + erlang:monitor_node(Node, true), + slave:stop(Node), + receive + {nodedown, Node} -> + if Cover -> cover:stop(Node); + true -> ok + end + after 5000 -> + erlang:monitor_node(Node, false), + receive {nodedown, Node} -> ok after 0 -> ok end %flush + end, + ok. diff --git a/lib/common_test/test/ct_testspec_1_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE.erl index b7e19f25dd..6a4a4acd80 100644 --- a/lib/common_test/test/ct_testspec_1_SUITE.erl +++ b/lib/common_test/test/ct_testspec_1_SUITE.erl @@ -58,7 +58,7 @@ end_per_testcase(TestCase, Config) -> suite() -> [{ct_hooks,[ts_install_cth]}]. -all() -> +all() -> [all_suites, skip_all_suites, suite, skip_suite, all_testcases, skip_all_testcases, testcase, skip_testcase, all_groups, skip_all_groups, group, @@ -67,23 +67,23 @@ all() -> skip_group_testcase, topgroup, subgroup, skip_subgroup, subgroup_all_testcases, skip_subgroup_all_testcases, subgroup_testcase, skip_subgroup_testcase, - sub_skipped_by_top, testcase_in_multiple_groups, - order_of_tests_in_multiple_dirs_no_merge_tests, - order_of_tests_in_multiple_suites_no_merge_tests, - order_of_suites_in_multiple_dirs_no_merge_tests, - order_of_groups_in_multiple_dirs_no_merge_tests, - order_of_groups_in_multiple_suites_no_merge_tests, - order_of_tests_in_multiple_dirs, - order_of_tests_in_multiple_suites, - order_of_suites_in_multiple_dirs, - order_of_groups_in_multiple_dirs, - order_of_groups_in_multiple_suites, - order_of_tests_in_multiple_suites_with_skip_no_merge_tests, - order_of_tests_in_multiple_suites_with_skip, + sub_skipped_by_top, testcase_many_groups, + order_of_tests_many_dirs_no_merge_tests, + order_of_tests_many_suites_no_merge_tests, + order_of_suites_many_dirs_no_merge_tests, + order_of_groups_many_dirs_no_merge_tests, + order_of_groups_many_suites_no_merge_tests, + order_of_tests_many_dirs, + order_of_tests_many_suites, + order_of_suites_many_dirs, + order_of_groups_many_dirs, + order_of_groups_many_suites, + order_of_tests_many_suites_with_skip_no_merge_tests, + order_of_tests_many_suites_with_skip, all_plus_one_tc_no_merge_tests, all_plus_one_tc]. -groups() -> +groups() -> []. init_per_group(_GroupName, Config) -> @@ -373,19 +373,19 @@ sub_skipped_by_top(Config) when is_list(Config) -> %%%----------------------------------------------------------------- %%% -testcase_in_multiple_groups(Config) when is_list(Config) -> +testcase_many_groups(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir = filename:join(DataDir, "groups_1"), TestSpec = [{cases,TestDir,groups_12_SUITE,[testcase_1a,testcase_1b]}, {skip_cases,TestDir,groups_12_SUITE,[testcase_1b],"SKIPPED!"}], - setup_and_execute(testcase_in_multiple_groups, TestSpec, Config). + setup_and_execute(testcase_many_groups, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_tests_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) -> +order_of_tests_many_dirs_no_merge_tests(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -395,13 +395,13 @@ order_of_tests_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) -> {cases,TestDir2,groups_22_SUITE,[testcase_1]}, {cases,TestDir1,groups_12_SUITE,[testcase_1b]}], - setup_and_execute(order_of_tests_in_multiple_dirs_no_merge_tests, + setup_and_execute(order_of_tests_many_dirs_no_merge_tests, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_tests_in_multiple_suites_no_merge_tests(Config) when is_list(Config) -> +order_of_tests_many_suites_no_merge_tests(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -410,13 +410,13 @@ order_of_tests_in_multiple_suites_no_merge_tests(Config) when is_list(Config) -> {cases,TestDir1,groups_11_SUITE,[testcase_1]}, {cases,TestDir1,groups_12_SUITE,[testcase_1b]}], - setup_and_execute(order_of_tests_in_multiple_suites_no_merge_tests, + setup_and_execute(order_of_tests_many_suites_no_merge_tests, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_suites_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) -> +order_of_suites_many_dirs_no_merge_tests(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -426,13 +426,13 @@ order_of_suites_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) -> {suites,TestDir2,groups_22_SUITE}, {suites,TestDir1,groups_11_SUITE}], - setup_and_execute(order_of_suites_in_multiple_dirs_no_merge_tests, + setup_and_execute(order_of_suites_many_dirs_no_merge_tests, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_groups_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) -> +order_of_groups_many_dirs_no_merge_tests(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -442,13 +442,13 @@ order_of_groups_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) -> {groups,TestDir2,groups_22_SUITE,test_group_1a}, {groups,TestDir1,groups_12_SUITE,test_group_1b}], - setup_and_execute(order_of_groups_in_multiple_dirs_no_merge_tests, + setup_and_execute(order_of_groups_many_dirs_no_merge_tests, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_groups_in_multiple_suites_no_merge_tests(Config) +order_of_groups_many_suites_no_merge_tests(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), @@ -458,13 +458,13 @@ order_of_groups_in_multiple_suites_no_merge_tests(Config) {groups,TestDir1,groups_11_SUITE,test_group_1a}, {groups,TestDir1,groups_12_SUITE,test_group_1b}], - setup_and_execute(order_of_groups_in_multiple_suites_no_merge_tests, + setup_and_execute(order_of_groups_many_suites_no_merge_tests, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_tests_in_multiple_suites_with_skip_no_merge_tests(Config) +order_of_tests_many_suites_with_skip_no_merge_tests(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), @@ -477,14 +477,14 @@ order_of_tests_in_multiple_suites_with_skip_no_merge_tests(Config) {skip_cases,TestDir1,groups_12_SUITE,[testcase_1b],"Skip it"}], setup_and_execute( - order_of_tests_in_multiple_suites_with_skip_no_merge_tests, + order_of_tests_many_suites_with_skip_no_merge_tests, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_tests_in_multiple_dirs(Config) when is_list(Config) -> +order_of_tests_many_dirs(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -493,13 +493,13 @@ order_of_tests_in_multiple_dirs(Config) when is_list(Config) -> {cases,TestDir2,groups_22_SUITE,[testcase_1]}, {cases,TestDir1,groups_12_SUITE,[testcase_1b]}], - setup_and_execute(order_of_tests_in_multiple_dirs, + setup_and_execute(order_of_tests_many_dirs, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_tests_in_multiple_suites(Config) when is_list(Config) -> +order_of_tests_many_suites(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -507,13 +507,13 @@ order_of_tests_in_multiple_suites(Config) when is_list(Config) -> {cases,TestDir1,groups_11_SUITE,[testcase_1]}, {cases,TestDir1,groups_12_SUITE,[testcase_1b]}], - setup_and_execute(order_of_tests_in_multiple_suites, + setup_and_execute(order_of_tests_many_suites, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_suites_in_multiple_dirs(Config) when is_list(Config) -> +order_of_suites_many_dirs(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -522,13 +522,13 @@ order_of_suites_in_multiple_dirs(Config) when is_list(Config) -> {suites,TestDir2,groups_22_SUITE}, {suites,TestDir1,groups_11_SUITE}], - setup_and_execute(order_of_suites_in_multiple_dirs, + setup_and_execute(order_of_suites_many_dirs, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_groups_in_multiple_dirs(Config) when is_list(Config) -> +order_of_groups_many_dirs(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -537,13 +537,13 @@ order_of_groups_in_multiple_dirs(Config) when is_list(Config) -> {groups,TestDir2,groups_22_SUITE,test_group_1a}, {groups,TestDir1,groups_12_SUITE,test_group_1b}], - setup_and_execute(order_of_groups_in_multiple_dirs, + setup_and_execute(order_of_groups_many_dirs, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_groups_in_multiple_suites(Config) when is_list(Config) -> +order_of_groups_many_suites(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -551,13 +551,13 @@ order_of_groups_in_multiple_suites(Config) when is_list(Config) -> {groups,TestDir1,groups_11_SUITE,test_group_1a}, {groups,TestDir1,groups_12_SUITE,test_group_1b}], - setup_and_execute(order_of_groups_in_multiple_suites, + setup_and_execute(order_of_groups_many_suites, TestSpec, Config). %%%----------------------------------------------------------------- %%% -order_of_tests_in_multiple_suites_with_skip(Config) when is_list(Config) -> +order_of_tests_many_suites_with_skip(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), TestDir1 = filename:join(DataDir, "groups_1"), @@ -567,7 +567,7 @@ order_of_tests_in_multiple_suites_with_skip(Config) when is_list(Config) -> {cases,TestDir1,groups_11_SUITE,[testcase_2]}, {skip_cases,TestDir1,groups_12_SUITE,[testcase_1b],"Skip it!"}], - setup_and_execute(order_of_tests_in_multiple_suites_with_skip, + setup_and_execute(order_of_tests_many_suites_with_skip, TestSpec, Config). %%%----------------------------------------------------------------- @@ -1204,10 +1204,10 @@ test_events(sub_skipped_by_top) -> {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}} ]; -test_events(testcase_in_multiple_groups) -> +test_events(testcase_many_groups) -> []; -test_events(order_of_tests_in_multiple_dirs_no_merge_tests) -> +test_events(order_of_tests_many_dirs_no_merge_tests) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,testcase_1a}}, {?eh,tc_done, {groups_12_SUITE,testcase_1a, @@ -1219,7 +1219,7 @@ test_events(order_of_tests_in_multiple_dirs_no_merge_tests) -> {failed,{error,{test_case_failed,no_group_data}}}}}, {?eh,stop_logging,[]} ]; -test_events(order_of_tests_in_multiple_suites_no_merge_tests) -> +test_events(order_of_tests_many_suites_no_merge_tests) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,testcase_1a}}, {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}}, @@ -1229,7 +1229,7 @@ test_events(order_of_tests_in_multiple_suites_no_merge_tests) -> {?eh,tc_done,{groups_12_SUITE,testcase_1b,'_'}}, {?eh,stop_logging,[]} ]; -test_events(order_of_suites_in_multiple_dirs_no_merge_tests) -> +test_events(order_of_suites_many_dirs_no_merge_tests) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,init_per_suite}}, {?eh,tc_done,{groups_12_SUITE,init_per_suite,'_'}}, @@ -1244,7 +1244,7 @@ test_events(order_of_suites_in_multiple_dirs_no_merge_tests) -> {?eh,tc_start,{groups_11_SUITE,end_per_suite}}, {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}}, {?eh,stop_logging,[]}]; -test_events(order_of_groups_in_multiple_dirs_no_merge_tests) -> +test_events(order_of_groups_many_dirs_no_merge_tests) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}}, @@ -1257,7 +1257,7 @@ test_events(order_of_groups_in_multiple_dirs_no_merge_tests) -> {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1b,'_'},'_'}}, {?eh,stop_logging,[]}]; -test_events(order_of_groups_in_multiple_suites_no_merge_tests) -> +test_events(order_of_groups_many_suites_no_merge_tests) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}}, @@ -1270,7 +1270,7 @@ test_events(order_of_groups_in_multiple_suites_no_merge_tests) -> {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1b,'_'},'_'}}, {?eh,stop_logging,[]}]; -test_events(order_of_tests_in_multiple_suites_with_skip_no_merge_tests) -> +test_events(order_of_tests_many_suites_with_skip_no_merge_tests) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,testcase_1a}}, {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}}, @@ -1282,7 +1282,7 @@ test_events(order_of_tests_in_multiple_suites_with_skip_no_merge_tests) -> {?eh,stop_logging,[]} ]; -test_events(order_of_tests_in_multiple_dirs) -> +test_events(order_of_tests_many_dirs) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,testcase_1a}}, {?eh,tc_done, @@ -1296,7 +1296,7 @@ test_events(order_of_tests_in_multiple_dirs) -> {?eh,tc_done,{groups_22_SUITE,testcase_1,ok}}, {?eh,stop_logging,[]} ]; -test_events(order_of_tests_in_multiple_suites) -> +test_events(order_of_tests_many_suites) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,testcase_1a}}, {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}}, @@ -1308,7 +1308,7 @@ test_events(order_of_tests_in_multiple_suites) -> {?eh,tc_done,{groups_11_SUITE,testcase_1,ok}}, {?eh,stop_logging,[]} ]; -test_events(order_of_suites_in_multiple_dirs) -> +test_events(order_of_suites_many_dirs) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,init_per_suite}}, {?eh,tc_done,{groups_12_SUITE,init_per_suite,'_'}}, @@ -1325,7 +1325,7 @@ test_events(order_of_suites_in_multiple_dirs) -> {?eh,tc_start,{groups_22_SUITE,end_per_suite}}, {?eh,tc_done,{groups_22_SUITE,end_per_suite,'_'}}, {?eh,stop_logging,[]}]; -test_events(order_of_groups_in_multiple_dirs) -> +test_events(order_of_groups_many_dirs) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}}, @@ -1338,7 +1338,7 @@ test_events(order_of_groups_in_multiple_dirs) -> {?eh,tc_done, {groups_22_SUITE,{end_per_group,test_group_1a,'_'},'_'}}, {?eh,stop_logging,[]}]; -test_events(order_of_groups_in_multiple_suites) -> +test_events(order_of_groups_many_suites) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}}, @@ -1352,7 +1352,7 @@ test_events(order_of_groups_in_multiple_suites) -> {?eh,stop_logging,[]}]; -test_events(order_of_tests_in_multiple_suites_with_skip) -> +test_events(order_of_tests_many_suites_with_skip) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,tc_start,{groups_12_SUITE,testcase_1a}}, {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}}, diff --git a/lib/common_test/test/ct_testspec_2_SUITE.erl b/lib/common_test/test/ct_testspec_2_SUITE.erl index 9d2dc84ad3..beaba0eaac 100644 --- a/lib/common_test/test/ct_testspec_2_SUITE.erl +++ b/lib/common_test/test/ct_testspec_2_SUITE.erl @@ -479,7 +479,7 @@ multiple_specs(_Config) -> "multiple_specs.1.spec"), SpecFile2 = ct_test_support:write_testspec(Spec2,SpecDir, "multiple_specs.2.spec"), - FileResult = ct_testspec:collect_tests_from_file([SpecFile1,SpecFile2], + FileResult = ct_testspec:collect_tests_from_file([[SpecFile1,SpecFile2]], false), ct:pal("TESTSPEC RECORD FROM FILE:~n~p~n", [rec2proplist(FileResult)]), @@ -490,7 +490,7 @@ multiple_specs(_Config) -> [{Node2,get_absdir(filename:join(SpecDir,CfgDir))} || CfgDir <- CfgDir2]], LogDirV = get_absdir(filename:join(SpecDir,"../logs")), - Verify = #testspec{merge_tests = false, + Verify = #testspec{merge_tests = true, spec_dir = SpecDir, nodes = [{undefined,Node},{n1,Node1},{n2,Node2}], alias = [{to1,TO1V},{to2,TO2V}], @@ -500,22 +500,13 @@ multiple_specs(_Config) -> logdir = [{Node,LogDirV},{Node1,LogDirV},{Node2,LogDirV},"."], config = CFGs, tests = [{{Node1,TO1V}, - [{x_SUITE,[all]}]}, - {{Node1,TO1V}, - [{y_SUITE,[{g1,all},{g2,all}]}]}, - {{Node1,TO1V}, - [{y_SUITE,[tc1,tc2]}]}, - {{Node1,TO1V}, - [{z_SUITE,[{all,{skip,"skipped"}}]}]}, - {{Node2,TO2V}, - [{x_SUITE,[all]}]}, - {{Node2,TO2V}, - [{y_SUITE,[all]}]}, - {{Node2,TO2V}, - [{x_SUITE,[{{g1,all},{skip,"skipped"}}, - {{g2,all},{skip,"skipped"}}]}]}, + [{x_SUITE,[all]}, + {y_SUITE,[{g1,all},{g2,all},tc1,tc2]}, + {z_SUITE,[{all,{skip,"skipped"}}]}]}, {{Node2,TO2V}, - [{y_SUITE,[{tc1,{skip,"skipped"}}, + [{x_SUITE,[all,{{g1,all},{skip,"skipped"}}, + {{g2,all},{skip,"skipped"}}]}, + {y_SUITE,[all,{tc1,{skip,"skipped"}}, {tc2,{skip,"skipped"}}]}]}]}, verify_result(Verify,FileResult,FileResult). @@ -524,7 +515,7 @@ multiple_specs(_Config) -> %%% misc_config_terms(_Config) -> CfgDir = "../cfgs/to1", - + TODir = "../tests/to1", Spec = [{node,x,n1@h1},{node,y,n2@h2}, @@ -554,7 +545,9 @@ misc_config_terms(_Config) -> {create_priv_dir,[auto_per_tc]}, {create_priv_dir,n1@h1,[manual_per_tc]}, - {create_priv_dir,n2@h2,[auto_per_run]} + {create_priv_dir,n2@h2,[auto_per_run]}, + + {suites,n1@h1,TODir,[x_SUITE]} ], {ok,SpecDir} = file:get_cwd(), @@ -599,7 +592,9 @@ misc_config_terms(_Config) -> {n2@h2,CSS2}], create_priv_dir = [{Node,[auto_per_tc]}, {n1@h1,[manual_per_tc]}, - {n2@h2,[auto_per_run]}] + {n2@h2,[auto_per_run]}], + tests = [{{n1@h1,get_absdir(filename:join(SpecDir,TODir))}, + [{x_SUITE,[all]}]}] }, verify_result(Verify,ListResult,FileResult). @@ -688,10 +683,10 @@ define_names_1(_Config) -> %%% HELP FUNCTIONS %%%----------------------------------------------------------------- -verify_result(Verify,ListResult,FileResult) -> +verify_result(VerificationRec,ListResult,FileResult) -> {_,TSLTuples} = rec2proplist(ListResult), {_,TSFTuples} = rec2proplist(FileResult), - {_,VTuples} = rec2proplist(Verify), + {_,VTuples} = rec2proplist(VerificationRec), VResult = (catch lists:foldl(fun({Tag,Val},{[{Tag,Val}|TSL],[{Tag,Val}|TSF]}) -> {TSL,TSF}; @@ -720,6 +715,8 @@ read_config(S) -> rec2proplist(E={error,_What}) -> exit({invalid_testspec_record,E}); +rec2proplist([{Specs,Rec}]) when is_list(Specs) -> + rec2proplist(Rec); rec2proplist(Rec) -> [RecName|RecList] = tuple_to_list(Rec), FieldNames = diff --git a/lib/common_test/test/ct_testspec_3_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE.erl new file mode 100644 index 0000000000..e2f57c2c64 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE.erl @@ -0,0 +1,745 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2009-2012. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File: ct_testspec_1_SUITE +%%% +%%% Description: +%%% Test test specifications +%%% +%%% The suites used for the test are located in the data directory. +%%%------------------------------------------------------------------- +-module(ct_testspec_3_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +-define(eh, ct_test_support_eh). + +%%-------------------------------------------------------------------- +%% TEST SERVER CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- + +%%-------------------------------------------------------------------- +%% Description: Since Common Test starts another Test Server +%% instance, the tests need to be performed on a separate node (or +%% there will be clashes with logging processes etc). +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + DataDir = ?config(data_dir, Config), + Config1 = ct_test_support:init_per_suite(Config), + SpecsDir1 = filename:join(DataDir, "specs1"), + SpecsDir2 = filename:join(DataDir, "specs2"), + [{specs_dir1,SpecsDir1},{specs_dir2,SpecsDir2} | Config1]. + +end_per_suite(Config) -> + ct_test_support:end_per_suite(Config). + +init_per_testcase(TestCase, Config) -> + ct_test_support:init_per_testcase(TestCase, Config). + +end_per_testcase(TestCase, Config) -> + ct_test_support:end_per_testcase(TestCase, Config). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [start_separate, + start_join, + incl_separate1, + incl_separate2, + incl_join1, + incl_join2, + incl_both1, + incl_both2, + incl_both_and_join1, + incl_both_and_join2, + rec_incl_separate1, + rec_incl_separate2, + rec_incl_join1, + rec_incl_join2, + rec_incl_separate_join1, + rec_incl_separate_join2, + rec_incl_join_separate1, + rec_incl_join_separate2 + ]. + +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +%%%----------------------------------------------------------------- +%%% + +start_separate(Config) -> + Specs = [fname(specs_dir1, "flat_spec1", Config), + fname(specs_dir2, "flat_spec2", Config)], + setup_and_execute(start_separate, Specs, [], Config). + +%%%----------------------------------------------------------------- +%%% + +start_join(Config) -> + Specs = [fname(specs_dir1, "flat_spec1", Config), + fname(specs_dir2, "flat_spec2", Config)], + setup_and_execute(start_join, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_separate1(Config) -> + Specs = [fname(specs_dir1, "spec_sep1", Config), + fname(specs_dir2, "spec_sep2", Config)], + setup_and_execute(incl_separate1, Specs, [], Config). + +incl_separate2(Config) -> + Specs = [fname(specs_dir1, "spec_sep1", Config), + fname(specs_dir2, "spec_sep2", Config)], + setup_and_execute(incl_separate2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_join1(Config) -> + Specs = [fname(specs_dir1, "spec_join1", Config), + fname(specs_dir2, "spec_join2", Config)], + setup_and_execute(incl_join1, Specs, [], Config). + +incl_join2(Config) -> + Specs = [fname(specs_dir1, "spec_join1", Config), + fname(specs_dir2, "spec_join2", Config)], + setup_and_execute(incl_join2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_both1(Config) -> + Specs = [fname(specs_dir1, "spec_both1", Config), + fname(specs_dir2, "spec_both2", Config)], + setup_and_execute(incl_both1, Specs, [], Config). + +incl_both2(Config) -> + Specs = [fname(specs_dir1, "spec_both1", Config), + fname(specs_dir2, "spec_both2", Config)], + setup_and_execute(incl_both2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +incl_both_and_join1(Config) -> + Specs = [fname(specs_dir1, "spec_both_join1", Config), + fname(specs_dir2, "spec_both_join2", Config)], + setup_and_execute(incl_both_and_join1, Specs, [], Config). + +incl_both_and_join2(Config) -> + Specs = [fname(specs_dir1, "spec_both_join1", Config), + fname(specs_dir2, "spec_both_join2", Config)], + setup_and_execute(incl_both_and_join2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +rec_incl_separate1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep1", Config), + fname(specs_dir2, "rec_spec_sep2", Config)], + setup_and_execute(rec_incl_separate1, Specs, [], Config). + +rec_incl_separate2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep1", Config), + fname(specs_dir2, "rec_spec_sep2", Config)], + setup_and_execute(rec_incl_separate2, Specs, [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +rec_incl_join1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join1", Config), + fname(specs_dir2, "rec_spec_join2", Config)], + setup_and_execute(rec_incl_join1, Specs, [], Config). + +rec_incl_join2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join1", Config), + fname(specs_dir2, "rec_spec_join2", Config)], + setup_and_execute(rec_incl_join2, Specs, [{join_specs,true}], Config). + + +%%%----------------------------------------------------------------- +%%% + +rec_incl_separate_join1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep_join1", Config), + fname(specs_dir2, "rec_spec_sep_join2", Config)], + setup_and_execute(rec_incl_separate_join1, Specs, [], Config). + +rec_incl_separate_join2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_sep_join1", Config), + fname(specs_dir2, "rec_spec_sep_join2", Config)], + setup_and_execute(rec_incl_separate_join2, Specs, + [{join_specs,true}], Config). + +%%%----------------------------------------------------------------- +%%% + +rec_incl_join_separate1(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join_sep1", Config), + fname(specs_dir2, "rec_spec_join_sep2", Config)], + setup_and_execute(rec_incl_join_separate1, Specs, [], Config). + +rec_incl_join_separate2(Config) -> + Specs = [fname(specs_dir1, "rec_spec_join_sep1", Config), + fname(specs_dir2, "rec_spec_join_sep2", Config)], + setup_and_execute(rec_incl_join_separate2, Specs, + [{join_specs,true}], Config). + + +%%%----------------------------------------------------------------- +%%% HELP FUNCTIONS +%%%----------------------------------------------------------------- + +fname(Tag, File, Config) -> + filename:join(?config(Tag, Config), File). + +check_parameter(TCID) -> + {ok,{config,TCID}}. + +read_config(TCID) -> + {ok,[{tcname,list_to_atom(TCID)}]}. + +setup_and_execute(TCName, Specs, TestOpts, Config) -> + + TestID = {userconfig,{?MODULE,atom_to_list(TCName)}}, + TestTerms = [TestID,{spec,Specs},{label,TCName}] ++ TestOpts, + + {Opts,ERPid} = setup(TestTerms, Config), + + case ct_test_support:run(Opts, Config) of + ok -> + ok; + Error -> + ct:pal("Error executing with opts: ~p", [Opts]), + exit(Error) + end, + + Events = ct_test_support:get_events(ERPid, Config), + + ct_test_support:log_events(TCName, + reformat(Events, ?eh), + ?config(priv_dir, Config), + Opts), + + TestEvents = events_to_check(TCName), + ok = ct_test_support:verify_events(TestEvents, Events, Config). + +setup(Test, Config) when is_tuple(Test) -> + setup([Test], Config); +setup(Tests, Config) -> + Opts0 = ct_test_support:get_opts(Config), + Level = ?config(trace_level, Config), + EvHArgs = [{cbm,ct_test_support},{trace_level,Level}], + Opts = Opts0 ++ Tests ++ [{event_handler,{?eh,EvHArgs}}], + ERPid = ct_test_support:start_event_receiver(Config), + {Opts,ERPid}. + +reformat(Events, EH) -> + ct_test_support:reformat(Events, EH). +%reformat(Events, _EH) -> +% Events. + +%%%----------------------------------------------------------------- +%%% TEST EVENTS +%%%----------------------------------------------------------------- +events_to_check(Test) -> + %% 2 tests (ct:run_test + script_start) is default + events_to_check(Test, 2). + +events_to_check(_, 0) -> + []; +events_to_check(Test, N) -> + test_events(Test) ++ events_to_check(Test, N-1). + +test_events(start_separate) -> + [{?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(start_join) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_separate1) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{1,1,5}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_separate2) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{1,1,5}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_join1) -> + [{?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_join2) -> + [{?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_both1) -> + [{?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_both2) -> + [{?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{4,4,20}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,ok_tc}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{4,8,{4,4}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{3,2,15}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t12_SUITE,init_per_suite}}, + {?eh,tc_done,{t12_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t12_SUITE,end_per_suite}}, + {?eh,tc_done,{t12_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t22_SUITE,init_per_suite}}, + {?eh,tc_done,{t22_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{3,6,{3,3}}}, + {?eh,tc_start,{t22_SUITE,end_per_suite}}, + {?eh,tc_done,{t22_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}, + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,start_info,{2,2,10}}, + {?eh,tc_start,{t11_SUITE,init_per_suite}}, + {?eh,tc_done,{t11_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{1,2,{1,1}}}, + {?eh,tc_start,{t11_SUITE,end_per_suite}}, + {?eh,tc_done,{t11_SUITE,end_per_suite,ok}}, + {?eh,tc_start,{t21_SUITE,init_per_suite}}, + {?eh,tc_done,{t21_SUITE,init_per_suite,ok}}, + {?eh,test_stats,{2,4,{2,2}}}, + {?eh,tc_start,{t21_SUITE,end_per_suite}}, + {?eh,tc_done,{t21_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,stop_logging,[]}]; + +test_events(incl_both_and_join1) -> []; +test_events(incl_both_and_join2) -> []; +test_events(rec_incl_separate1) -> []; +test_events(rec_incl_separate2) -> []; +test_events(rec_incl_join1) -> []; +test_events(rec_incl_join2) -> []; +test_events(rec_incl_separate_join1) -> []; +test_events(rec_incl_separate_join2) -> []; +test_events(rec_incl_join_separate1) -> []; +test_events(rec_incl_join_separate2) -> []; + +test_events(_) -> + []. + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 new file mode 100644 index 0000000000..bc672568da --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg11 @@ -0,0 +1 @@ +{file, cfg11}.
\ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 new file mode 100644 index 0000000000..30f2cf6857 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg12 @@ -0,0 +1 @@ +{file, cfg12}.
\ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 new file mode 100644 index 0000000000..1860ec78e5 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config1/cfg13 @@ -0,0 +1 @@ +{file, cfg13}.
\ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 b/lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 new file mode 100644 index 0000000000..b18d35443e --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/config2/cfg21 @@ -0,0 +1 @@ +{file, cfg21}.
\ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 new file mode 100644 index 0000000000..eff87222ea --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/flat_spec1 @@ -0,0 +1,4 @@ +{config, "../config1/cfg11"}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests2", t21_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 new file mode 100644 index 0000000000..a3387f48a3 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join1 @@ -0,0 +1,2 @@ +{specs,join,"spec_join1"}. +{specs,join,"../specs2/spec_join2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 new file mode 100644 index 0000000000..fe127eb4b9 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_join_sep1 @@ -0,0 +1,5 @@ +{specs,join,"spec_sep1"}. +{specs,join,"../specs2/spec_sep2"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 new file mode 100644 index 0000000000..c778aa68a6 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep1 @@ -0,0 +1,2 @@ +{specs,separate,"spec_sep1"}. +{specs,separate,"../specs2/spec_sep2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 new file mode 100644 index 0000000000..7cb5a05fff --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/rec_spec_sep_join1 @@ -0,0 +1,2 @@ +{specs,separate,"spec_join1"}. +{specs,separate,"../specs2/spec_join2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 new file mode 100644 index 0000000000..46111614dc --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both1 @@ -0,0 +1,2 @@ +{specs, join, "../specs1/flat_spec1"}. +{specs, separate, "../specs2/flat_spec2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 new file mode 100644 index 0000000000..f52b3ed030 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_both_join1 @@ -0,0 +1,6 @@ +{specs, join, "../specs1/flat_spec1"}. +{specs, separate, "../specs2/flat_spec2"}. +{merge_tests,false}. +{config, "../config1/cfg12"}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests2", t21_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 new file mode 100644 index 0000000000..baaaf35be4 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_join1 @@ -0,0 +1 @@ +{specs, join, ["../specs2/flat_spec2", "flat_spec1"]}.
\ No newline at end of file diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 new file mode 100644 index 0000000000..89456c35e0 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs1/spec_sep1 @@ -0,0 +1,3 @@ +{specs, separate, "../specs2/flat_spec2"}. +{specs, separate, "flat_spec1"}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 new file mode 100644 index 0000000000..758d1e2514 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/flat_spec2 @@ -0,0 +1,5 @@ +{merge_tests, false}. +{config, "../config2/cfg21"}. +{suites, "../tests1", t12_SUITE}. +{suites, "../tests1", t12_SUITE}. +{suites, "../tests2", t22_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 new file mode 100644 index 0000000000..19d3a3d8e2 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join2 @@ -0,0 +1,5 @@ +{specs,join,"spec_join2"}. +{specs,join,"../specs1/spec_join1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 new file mode 100644 index 0000000000..930e68c847 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_join_sep2 @@ -0,0 +1,5 @@ +{specs,join,"spec_sep2"}. +{specs,join,"../specs1/spec_sep1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 new file mode 100644 index 0000000000..5026f329a7 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep2 @@ -0,0 +1,5 @@ +{specs,separate,"spec_sep2"}. +{specs,separate,"../specs1/spec_sep1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 new file mode 100644 index 0000000000..17057088b4 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/rec_spec_sep_join2 @@ -0,0 +1,5 @@ +{specs,separate,"spec_join2"}. +{specs,separate,"../specs1/spec_join1"}. + +{config, "../config1/cfg13"}. +{suites, "../tests2", t23_SUITE}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 new file mode 100644 index 0000000000..4c83115d23 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both2 @@ -0,0 +1,4 @@ +{specs, separate, "../specs1/flat_spec1"}. +{specs, join, "../specs2/flat_spec2"}. +{config, "../config1/cfg12"}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 new file mode 100644 index 0000000000..ad81bfb4cc --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_both_join2 @@ -0,0 +1,9 @@ +{merge_tests,true}. +{config, "../config1/cfg12"}. +{suites, "../tests1", t11_SUITE}. +{suites, "../tests2", t21_SUITE}. +{suites, "../tests1", t12_SUITE}. +{suites, "../tests2", t22_SUITE}. + +{specs, separate, "../specs1/flat_spec1"}. +{specs, join, "../specs2/flat_spec2"}. diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 new file mode 100644 index 0000000000..d652dbd78f --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_join2 @@ -0,0 +1,5 @@ +{specs, join, ["../specs1/flat_spec1"]}. +{specs, join, ["flat_spec2"]}. +{config, "../config1/cfg12"}. +{suites, "../tests2", t22_SUITE}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 new file mode 100644 index 0000000000..8d37f508b8 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/specs2/spec_sep2 @@ -0,0 +1,5 @@ +{specs, separate, "../specs1/flat_spec1"}. +{specs, separate, "flat_spec2"}. +{config, "../config1/cfg12"}. +{suites, "../tests2", t22_SUITE}. + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl new file mode 100644 index 0000000000..bbe79ed3fe --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl @@ -0,0 +1,175 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t11_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,1}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg11]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg11]} -> ok; + {incl_separate2,[cfg11]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg11]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg11]} -> ok; + _ -> ok + + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl new file mode 100644 index 0000000000..810298d348 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl @@ -0,0 +1,175 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t12_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,30}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg21]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg21]} -> ok; + {incl_separate2,[cfg21]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg21]} -> ok; + {incl_both1,[cfg12,cfg21]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg21]} -> ok; + _ -> ok + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl new file mode 100644 index 0000000000..9348cd8caf --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl @@ -0,0 +1,174 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t21_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,1}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg11]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg11]} -> ok; + {incl_separate2,[cfg11]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg11]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg11]} -> ok; + _ -> ok + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl new file mode 100644 index 0000000000..a92018ec70 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t22_SUITE.erl @@ -0,0 +1,177 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t22_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{require,file}, + {require,tcname}, + {timetrap,{seconds,30}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + %% verify that expected config file can be read + case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of + {start_separate,[cfg21]} -> ok; + {start_join,[cfg11,cfg21]} -> ok; + {incl_separate1,[cfg12]} -> ok; + {incl_separate1,[cfg21]} -> ok; + {incl_separate2,[cfg12]} -> ok; + {incl_separate2,[cfg21]} -> ok; + {incl_join1,[cfg21,cfg11]} -> ok; + {incl_join1,[cfg12,cfg11,cfg21]} -> ok; + {incl_join2,[cfg21,cfg11,cfg12]} -> ok; + {incl_both1,[cfg21]} -> ok; + {incl_both1,[cfg12,cfg21]} -> ok; + {incl_both2,[cfg11,cfg12,cfg21]} -> ok; + {incl_both2,[cfg21]} -> ok; + _ -> ok + end, + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl new file mode 100644 index 0000000000..d01fac3144 --- /dev/null +++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t23_SUITE.erl @@ -0,0 +1,158 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(t23_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +%%-------------------------------------------------------------------- +%% @spec suite() -> Info +%% Info = [tuple()] +%% @end +%%-------------------------------------------------------------------- +suite() -> + [{timetrap,{seconds,30}}]. + +%%-------------------------------------------------------------------- +%% @spec init_per_suite(Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_suite(Config0) -> void() | {save_config,Config1} +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_suite(_Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_group(GroupName, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_group(_GroupName, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_group(GroupName, Config0) -> +%% void() | {save_config,Config1} +%% GroupName = atom() +%% Config0 = Config1 = [tuple()] +%% @end +%%-------------------------------------------------------------------- +end_per_group(_GroupName, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec init_per_testcase(TestCase, Config0) -> +%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +init_per_testcase(autoskip_tc, Config) -> + exit(kaboom), + Config; + +init_per_testcase(userskip_tc, Config) -> + {skip,"user skipped"}; + +init_per_testcase(_TestCase, Config) -> + Config. + +%%-------------------------------------------------------------------- +%% @spec end_per_testcase(TestCase, Config0) -> +%% void() | {save_config,Config1} | {fail,Reason} +%% TestCase = atom() +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +end_per_testcase(_TestCase, _Config) -> + ok. + +%%-------------------------------------------------------------------- +%% @spec groups() -> [Group] +%% Group = {GroupName,Properties,GroupsAndTestCases} +%% GroupName = atom() +%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}] +%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase] +%% TestCase = atom() +%% Shuffle = shuffle | {shuffle,{integer(),integer(),integer()}} +%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail | +%% repeat_until_any_ok | repeat_until_any_fail +%% N = integer() | forever +%% @end +%%-------------------------------------------------------------------- +groups() -> + []. + +%%-------------------------------------------------------------------- +%% @spec all() -> GroupsAndTestCases | {skip,Reason} +%% GroupsAndTestCases = [{group,GroupName} | TestCase] +%% GroupName = atom() +%% TestCase = atom() +%% Reason = term() +%% @end +%%-------------------------------------------------------------------- +all() -> + [ok_tc, exit_tc, to_tc, autoskip_tc, userskip_tc]. + +%%-------------------------------------------------------------------- +%% @spec TestCase(Config0) -> +%% ok | exit() | {skip,Reason} | {comment,Comment} | +%% {save_config,Config1} | {skip_and_save,Reason,Config1} +%% Config0 = Config1 = [tuple()] +%% Reason = term() +%% Comment = term() +%% @end +%%-------------------------------------------------------------------- +ok_tc(_) -> + ok. + +exit_tc(_) -> + exit(kaboom), + ok. + +to_tc(_) -> + ct:timetrap(1), + ct:sleep(100), + ok. + +autoskip_tc(_) -> + ok. + +userskip_tc(_) -> + ok. + + + + diff --git a/lib/common_test/vsn.mk b/lib/common_test/vsn.mk index f9bb22867e..c92fb2ca37 100644 --- a/lib/common_test/vsn.mk +++ b/lib/common_test/vsn.mk @@ -1 +1 @@ -COMMON_TEST_VSN = 1.6.3 +COMMON_TEST_VSN = 1.7 |