aboutsummaryrefslogtreecommitdiffstats
path: root/lib/test_server
diff options
context:
space:
mode:
authorErlang/OTP <[email protected]>2009-11-20 14:54:40 +0000
committerErlang/OTP <[email protected]>2009-11-20 14:54:40 +0000
commit84adefa331c4159d432d22840663c38f155cd4c1 (patch)
treebff9a9c66adda4df2106dfd0e5c053ab182a12bd /lib/test_server
downloadotp-84adefa331c4159d432d22840663c38f155cd4c1.tar.gz
otp-84adefa331c4159d432d22840663c38f155cd4c1.tar.bz2
otp-84adefa331c4159d432d22840663c38f155cd4c1.zip
The R13B03 release.OTP_R13B03
Diffstat (limited to 'lib/test_server')
-rw-r--r--lib/test_server/AUTHORS12
-rw-r--r--lib/test_server/Makefile38
-rw-r--r--lib/test_server/README113
-rw-r--r--lib/test_server/doc/html/.gitignore0
-rw-r--r--lib/test_server/doc/man3/.gitignore0
-rw-r--r--lib/test_server/doc/man6/.gitignore0
-rw-r--r--lib/test_server/doc/pdf/.gitignore0
-rw-r--r--lib/test_server/doc/src/Makefile138
-rw-r--r--lib/test_server/doc/src/basics_chapter.xml216
-rw-r--r--lib/test_server/doc/src/book.xml49
-rw-r--r--lib/test_server/doc/src/example_chapter.xml150
-rw-r--r--lib/test_server/doc/src/fascicules.xml18
-rw-r--r--lib/test_server/doc/src/make.dep24
-rw-r--r--lib/test_server/doc/src/notes.xml346
-rw-r--r--lib/test_server/doc/src/notes_history.xml112
-rw-r--r--lib/test_server/doc/src/part.xml45
-rw-r--r--lib/test_server/doc/src/part_notes.xml40
-rw-r--r--lib/test_server/doc/src/part_notes_history.xml38
-rw-r--r--lib/test_server/doc/src/ref_man.xml43
-rw-r--r--lib/test_server/doc/src/run_test_chapter.xml49
-rw-r--r--lib/test_server/doc/src/test_server.xml840
-rw-r--r--lib/test_server/doc/src/test_server_app.xml75
-rw-r--r--lib/test_server/doc/src/test_server_ctrl.xml771
-rw-r--r--lib/test_server/doc/src/test_spec_chapter.xml375
-rw-r--r--lib/test_server/doc/src/ts.xml592
-rw-r--r--lib/test_server/doc/src/why_test_chapter.xml140
-rw-r--r--lib/test_server/doc/src/write_framework_chapter.xml166
-rw-r--r--lib/test_server/doc/src/write_test_chapter.xml228
-rw-r--r--lib/test_server/ebin/.gitignore0
-rw-r--r--lib/test_server/include/test_server.hrl32
-rw-r--r--lib/test_server/include/test_server_line.hrl20
-rw-r--r--lib/test_server/info2
-rw-r--r--lib/test_server/prebuild.skip1
-rw-r--r--lib/test_server/src/Makefile145
-rw-r--r--lib/test_server/src/conf_vars.in25
l---------lib/test_server/src/config.guess1
l---------lib/test_server/src/config.sub1
-rw-r--r--lib/test_server/src/configure.in423
-rw-r--r--lib/test_server/src/cross.cover20
-rw-r--r--lib/test_server/src/erl2html2.erl182
l---------lib/test_server/src/install-sh1
-rw-r--r--lib/test_server/src/test_server.app.src36
-rw-r--r--lib/test_server/src/test_server.appup.src1
-rw-r--r--lib/test_server/src/test_server.erl2203
-rw-r--r--lib/test_server/src/test_server_ctrl.erl5253
-rw-r--r--lib/test_server/src/test_server_h.erl129
-rw-r--r--lib/test_server/src/test_server_internal.hrl59
-rw-r--r--lib/test_server/src/test_server_line.erl380
-rw-r--r--lib/test_server/src/test_server_node.erl1013
-rw-r--r--lib/test_server/src/test_server_sup.erl616
-rw-r--r--lib/test_server/src/things/distr_startup_SUITE.erl238
-rw-r--r--lib/test_server/src/things/mnesia_power_SUITE.erl125
-rw-r--r--lib/test_server/src/things/random_kill_SUITE.erl81
-rw-r--r--lib/test_server/src/things/soft.gs.txt16
-rw-r--r--lib/test_server/src/things/verify.erl199
-rw-r--r--lib/test_server/src/ts.config45
-rw-r--r--lib/test_server/src/ts.erl695
-rw-r--r--lib/test_server/src/ts.hrl36
-rw-r--r--lib/test_server/src/ts.unix.config4
-rw-r--r--lib/test_server/src/ts.vxworks.config19
-rw-r--r--lib/test_server/src/ts.win32.config15
-rw-r--r--lib/test_server/src/ts_autoconf_vxworks.erl191
-rw-r--r--lib/test_server/src/ts_autoconf_win32.erl254
-rw-r--r--lib/test_server/src/ts_erl_config.erl398
-rw-r--r--lib/test_server/src/ts_install.erl348
-rw-r--r--lib/test_server/src/ts_lib.erl335
-rw-r--r--lib/test_server/src/ts_make.erl103
-rw-r--r--lib/test_server/src/ts_reports.erl543
-rw-r--r--lib/test_server/src/ts_run.erl746
-rw-r--r--lib/test_server/src/ts_selftest.erl120
-rw-r--r--lib/test_server/src/vxworks_client.erl243
-rw-r--r--lib/test_server/vsn.mk2
72 files changed, 19917 insertions, 0 deletions
diff --git a/lib/test_server/AUTHORS b/lib/test_server/AUTHORS
new file mode 100644
index 0000000000..3212999174
--- /dev/null
+++ b/lib/test_server/AUTHORS
@@ -0,0 +1,12 @@
+Original Authors and Contributors:
+
+Mattias Nilsson
+Bj�rn Gustavsson
+Janne Lindblad
+Patrik Winroth
+Claes Wikstr�m
+Siri Hansen
+Peter Andersson
+
+...and others.
+
diff --git a/lib/test_server/Makefile b/lib/test_server/Makefile
new file mode 100644
index 0000000000..98e51a0ee6
--- /dev/null
+++ b/lib/test_server/Makefile
@@ -0,0 +1,38 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1996-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+#
+# Macros
+#
+
+SUB_DIRECTORIES = src doc/src
+
+include vsn.mk
+VSN = $(TEST_SERVER_VSN)
+
+SPECIAL_TARGETS =
+
+#
+# Default Subdir Targets
+#
+include $(ERL_TOP)/make/otp_subdir.mk
+
diff --git a/lib/test_server/README b/lib/test_server/README
new file mode 100644
index 0000000000..fc71c90ca8
--- /dev/null
+++ b/lib/test_server/README
@@ -0,0 +1,113 @@
+===========================================================================
+ OTP Test Server
+===========================================================================
+
+To compile the 'test_server' application you need to build and install
+a Erlang/OTP system from source. Get your open source Erlang/OTP from
+http://www.erlang.org/. The resulting "erlc" command must be in the
+search path for commands.
+
+The Erlang test_server application and the example tests are to be
+inserted into an existing source tree for Erlang/OTP.
+
+You don't run the Test Server or the tests from the source tree.
+Instead a test installation area $TESTROOT is used with the resulting
+directory structure
+
+ $TESTROOT/test_server
+ $TESTROOT/<app1>_test
+ $TESTROOT/<app2>_test
+ .
+ .
+
+For more details see the test_server documentation can be found in the
+"$ERL_TOP/lib/test_server/doc/html" directory.
+
+
+Unpacking the sources
+---------------------
+
+Enter your Erlang/OTP source tree and unpack the OTP Test Server and
+optionally the test examples
+
+ % cd otp_src_RXX
+ % gunzip -c test_server-<VSN>.tar.gz | tar xf -
+ % gunzip -c emulator-YYYY-MM-DD.tar.gz | tar xf -
+ % gunzip -c stdlib-YYYY-MM-DD.tar.gz | tar xf -
+
+
+How to build and install the OTP Test Server
+--------------------------------------------
+
+Set the ERL_TOP variable to the top directory of the source tree
+
+ % cd otp_src_RXX
+
+ % setenv ERL_TOP `pwd`
+ or
+ % export ERL_TOP=`pwd`
+
+If not done before you need to run the configure script
+
+ % ./configure
+
+Then build and install from the the base directory of the test_server
+application
+
+ % cd lib/test_server
+ % gmake release_tests TESTROOT=<some dir>
+
+
+How to build and install the example test suites
+------------------------------------------------
+
+If you want to build and install the example test suites
+you build and install from the the test directories
+
+ % cd $ERL_TOP/lib/stdlib/test
+ % gmake release_tests TESTROOT=<some dir>
+
+ % cd $ERL_TOP/erts/emulator/test
+ % gmake release_tests TESTROOT=<some dir>
+
+
+How to run OTP test suites
+--------------------------
+
+First cd into $TESTROOT/test_server
+
+ % cd $TESTROOT/test_server
+
+Install the OTP Test Server framework
+
+ % erl
+ 1> ts:install().
+
+Check which tests are available
+
+ 2> ts:tests().
+ [...]
+
+Run the collections of test suites one at the time
+
+ 3> ts:run(emulator). (starts a xterm with an Erlang shell)
+ 4> ts:run(stdlib). (starts a xterm with an Erlang shell)
+
+or all at once
+
+ 5> ts:run(). (the node running the tests will be in the background)
+
+Note that it is normal to see lots of error messages in the Erlang
+shell. The tests will stress the system with lots of invalid input to
+find problems in the error handling.
+
+Also note that a failing test case does not always indicate a bug in
+Erlang/OTP. Differences in the network setup, machine configuration
+etc may cause a test case to fail or time out.
+
+The result of the tests are recorded in the file named "index.html" in
+the "$TESTROOT/test_server" directory. You can follow the progress of
+tests suites not yet completed from "last_test.html".
+
+For more details see the test_server documentation can be found in the
+"$ERL_TOP/lib/test_server/doc/html" directory.
diff --git a/lib/test_server/doc/html/.gitignore b/lib/test_server/doc/html/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/test_server/doc/html/.gitignore
diff --git a/lib/test_server/doc/man3/.gitignore b/lib/test_server/doc/man3/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/test_server/doc/man3/.gitignore
diff --git a/lib/test_server/doc/man6/.gitignore b/lib/test_server/doc/man6/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/test_server/doc/man6/.gitignore
diff --git a/lib/test_server/doc/pdf/.gitignore b/lib/test_server/doc/pdf/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/test_server/doc/pdf/.gitignore
diff --git a/lib/test_server/doc/src/Makefile b/lib/test_server/doc/src/Makefile
new file mode 100644
index 0000000000..e3c1b8ce92
--- /dev/null
+++ b/lib/test_server/doc/src/Makefile
@@ -0,0 +1,138 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2002-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+VSN=$(TEST_SERVER_VSN)
+APPLICATION=test_server
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+XML_APPLICATION_FILES = ref_man.xml
+XML_REF3_FILES = \
+ test_server_ctrl.xml \
+ test_server.xml
+XML_REF3_INTERNAL = \
+ ts.xml
+XML_REF6_FILES = test_server_app.xml
+
+XML_PART_FILES = \
+ part.xml \
+ part_notes.xml \
+ part_notes_history.xml
+
+XML_CHAPTER_FILES = \
+ basics_chapter.xml \
+ run_test_chapter.xml \
+ write_test_chapter.xml \
+ test_spec_chapter.xml \
+ example_chapter.xml \
+ write_framework_chapter.xml \
+ notes.xml \
+ notes_history.xml
+
+BOOK_FILES = book.xml
+
+GIF_FILES =
+
+# ----------------------------------------------------
+
+HTML_FILES = $(XML_APPLICATION_FILES:%.xml=$(HTMLDIR)/%.html) \
+ $(XML_PART_FILES:%.xml=$(HTMLDIR)/%.html)
+
+HTML_INTERNAL = $(XML_REF3_INTERNAL:%.xml=$(HTMLDIR)/%.html)
+
+INFO_FILE = ../../info
+
+MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3)
+MAN3_INTERNAL = $(XML_REF3_INTERNAL:%.xml=$(MAN3DIR)/%.3)
+MAN6_FILES = $(XML_REF6_FILES:%_app.xml=$(MAN6DIR)/%.6)
+
+HTML_REF_MAN_FILE = $(HTMLDIR)/index.html
+
+TOP_PDF_FILE = $(PDFDIR)/test_server-$(VSN).pdf
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+XML_FLAGS +=
+DVIPS_FLAGS +=
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+$(HTMLDIR)/%.gif: %.gif
+ $(INSTALL_DATA) $< $@
+
+docs: pdf html man
+
+pdf: $(TOP_PDF_FILE)
+
+html: gifs $(HTML_REF_MAN_FILE)
+
+man: $(MAN3_FILES) $(MAN3_INTERNAL) $(MAN6_FILES)
+
+gifs: $(GIF_FILES:%=$(HTMLDIR)/%)
+
+debug opt:
+
+clean clean_docs:
+ rm -rf $(HTMLDIR)/*
+ rm -f $(MAN3DIR)/*
+ rm -f $(MAN6DIR)/*
+ rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo)
+ rm -f errs core *~
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_docs_spec: docs
+ $(INSTALL_DIR) $(RELSYSDIR)/doc/pdf
+ $(INSTALL_DATA) $(TOP_PDF_FILE) $(RELSYSDIR)/doc/pdf
+ $(INSTALL_DIR) $(RELSYSDIR)/doc/html
+ $(INSTALL_DATA) $(HTMLDIR)/* \
+ $(RELSYSDIR)/doc/html
+ $(INSTALL_DATA) $(INFO_FILE) $(RELSYSDIR)
+ $(INSTALL_DIR) $(RELEASE_PATH)/man/man3
+ $(INSTALL_DATA) $(MAN3_FILES) $(RELEASE_PATH)/man/man3
+ $(INSTALL_DIR) $(RELEASE_PATH)/man/man6
+ $(INSTALL_DATA) $(MAN6_FILES) $(RELEASE_PATH)/man/man6
+
+release_spec:
+
+release_tests_spec:
+
+# ----------------------------------------------------
+# Include dependency
+# ----------------------------------------------------
+
+include make.dep
diff --git a/lib/test_server/doc/src/basics_chapter.xml b/lib/test_server/doc/src/basics_chapter.xml
new file mode 100644
index 0000000000..a96cc88075
--- /dev/null
+++ b/lib/test_server/doc/src/basics_chapter.xml
@@ -0,0 +1,216 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server Basics</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>basics_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Introduction</title>
+ <p><em>Test Server</em> is a portable test tool for automated
+ testing of Erlang programs and OTP applications. It provides an
+ interface for running test programs directly with Test Server
+ as well as an interface for integrating Test Server
+ with a framework application. The latter makes it possible to use
+ Test Server as the engine of a higher level test tool
+ application.</p>
+
+ <p>It is strongly recommended that Test Server be used from inside
+ a framework application, rather than interfaced directly for
+ running test programs. Test Server can be pretty difficult to use
+ since it's a very general and quite extensive and complex
+ application. Furthermore, the <c>test_server_ctrl</c> functions
+ are not meant to be used from within the actual test programs. The
+ framework should handle communication with Test Server and deal
+ with the more complex aspects of this interaction automatically so
+ that a higher level interface may be provided for the tester. For
+ test tool usage to be productive, a simpler, more intuitive and
+ (if required) more specific interface is required than what Test Server
+ can provide.</p>
+
+ <p>OTP delivers a general purpose framework for Test Server, called
+ <em>Common Test</em>. This application is a tool well suited for
+ automated black box testing of target systems of <em>any kind</em>
+ (not necessarily implemented in Erlang). Common Test is also a very
+ useful tool for white box testing of Erlang programs and OTP
+ applications. Unless a more specific functionality and/or user
+ interface is required (in which case you might need to implement
+ your own framework), Common Test should do the job for
+ you. Please read the Common Test User's Guide and reference manual
+ for more information.</p>
+
+ <p>Under normal circumstances, knowledge about the Test Server
+ application is not required for using the Common Test framework.
+ However, if you want to use Test Server without a framework,
+ or learn how to integrate it with your own framework, please read on...
+ </p>
+ </section>
+ <section>
+ <title>Getting started</title>
+ <p>Testing when using Test Server is done by running test
+ suites. A test suite is a number of test cases, where each test
+ case tests one or more things. The test case is the smallest unit
+ that the test server deals with. One or more test cases are
+ grouped together into one ordinary Erlang module, which is called
+ a test suite. Several test suite modules can be grouped together
+ in special test specification files representing whole application
+ and/or system test "jobs".
+ </p>
+ <p>The test suite Erlang module must follow a certain interface,
+ which is specified by Test Server. See the section on writing
+ test suites for details about this.
+ </p>
+ <p>Each test case is considered a success if it returns to the
+ caller, no matter what the returned value is. An exception to this
+ is the return value <c>{skip, Reason}</c> which indicates that the
+ test case is skipped. A failure is specified as a crash, no matter
+ what the crash reason is.
+ </p>
+ <p>As a test suite runs, all information (including output to
+ stdout) is recorded in several different log files. A minimum of
+ information is displayed to the user console. This only include
+ start and stop information, plus a note for each failed test case.
+ </p>
+ <p>The result from each test case is recorded in an HTML log file
+ which is created for each test run. Every test case gets one row
+ in a table presenting total time, whether the case was successful
+ or not, if it was skipped, and possibly also a comment. The HTML
+ file has links to each test case's logfile, which may be viewed
+ from e.g. Netscape or any other HTML capable browser.
+ </p>
+ <p>The Test Server consists of three parts:
+ </p>
+ <list type="bulleted">
+ <item>The part that executes the test suites on target and
+ provides support for the test suite author is called
+ <c>test_server</c>. This is described in the chapter about
+ writing test cases in this user's guide, and in the reference
+ manual for the <c>test_server</c> module.</item>
+ <item>The controlling part, which provides the low level
+ operator interface, starts and stops the target node (if remote
+ target) and slave nodes and writes log files, is called
+ <c>test_server_ctrl</c>. The Test Server Controller should not
+ be used directly when running tests. Instead a framework built
+ on top of it should be used. More information
+ about how to write your own framework can be found
+ in this user's guide and in the reference manual for the
+ <c>test_server_ctrl</c> module.</item>
+ </list>
+ </section>
+
+ <section>
+ <title>Definition of terms</title>
+ <taglist>
+ <tag><em>conf(iguration) case</em></tag>
+ <item>This is a group of test cases which need some specific
+ configuration. A conf case contains an initiation function which
+ sets up a specific configuration, one or more test cases using
+ this configuration, and a cleanup function which restores the
+ configuration. A conf case is specified in a test specification
+ either like this:<c>{conf,InitFunc,ListOfCases,CleanupFunc}</c>,
+ or this: <c>{conf,Properties,InitFunc,ListOfCases,CleanupFunc}</c>
+ </item>
+ <tag><em>datadir</em></tag>
+ <item>Data directory for a test suite. This directory contains
+ any files used by the test suite, e.g. additional erlang
+ modules, c code or data files. If the data directory contains
+ code which must be compiled before the test suite is run, it
+ should also contain a makefile source called Makefile.src
+ defining how to compile.
+ </item>
+ <tag><em>documentation clause</em></tag>
+ <item>One of the function clauses in a test case. This clause
+ shall return a list of strings describing what the test case
+ tests.
+ </item>
+ <tag><em>execution clause</em></tag>
+ <item>One of the function clauses in a test case. This clause
+ implements the actual test case, i.e. calls the functions that
+ shall be tested and checks results. The clause shall crash if it
+ fails.
+ </item>
+ <tag><em>major log file</em></tag>
+ <item>This is the test suites log file.
+ </item>
+ <tag><em>Makefile.src</em></tag>
+ <item>This file is used by the test server framework to generate
+ a makefile for a datadir. It contains some special characters
+ which are replaced according to the platform currently tested.
+ </item>
+ <tag><em>minor log file</em></tag>
+ <item>This is a separate log file for each test case.
+ </item>
+ <tag><em>privdir</em></tag>
+ <item>Private directory for a test suite. This directory should
+ be used when the test suite needs to write to files.
+ </item>
+ <tag><em>skip case</em></tag>
+ <item>A test case which shall be skipped.
+ </item>
+ <tag><em>specification clause</em></tag>
+ <item>One of the function clauses in a test case. This clause
+ shall return an empty list, a test specification or
+ <c>{skip,Reason}</c>. If an empty list is returned, it means
+ that the test case shall be executed, and so it must also have
+ an execution clause. Note that the specification clause is
+ always executed on the controller node, i.e. not on the target
+ node.
+ </item>
+ <tag><em>test case</em></tag>
+ <item>A single test included in a test suite. Typically it tests
+ one function in a module or application. A test case is
+ implemented as a function in a test suite module. The function
+ can have three clauses, the documentation-, specification- and
+ execution clause.
+ </item>
+ <tag><em>test specification</em></tag>
+ <item>A specification of which test suites and test cases to
+ run. There can be test specifications on three different levels
+ in a test. The top level is a test specification file which
+ roughly specifies what to test for a whole application. Then
+ there is a test specification for each test suite returned from
+ the <c>all(suite)</c> function in the suite. And there can also
+ be a test specification returned from the specification clause
+ of a test case.
+ </item>
+ <tag><em>test specification file</em></tag>
+ <item>This is a text file containing the test specification for
+ an application. The file has the extension ".spec" or
+ ".spec.Platform", where Platform is e.g. "vxworks".
+ </item>
+ <tag><em>test suite</em></tag>
+ <item>An erlang module containing a collection of test cases for
+ a specific application or module.
+ </item>
+ <tag><em>topcase</em></tag>
+ <item>The first "command" in a test specification file. This
+ command contains the test specification, like this:
+ <c>{topcase,TestSpecification}</c></item>
+ </taglist>
+ </section>
+</chapter>
+
diff --git a/lib/test_server/doc/src/book.xml b/lib/test_server/doc/src/book.xml
new file mode 100644
index 0000000000..960ce48cf7
--- /dev/null
+++ b/lib/test_server/doc/src/book.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE book SYSTEM "book.dtd">
+
+<book xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header titlestyle="normal">
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date>2002-07-11</date>
+ <rev></rev>
+ <file>book.xml</file>
+ </header>
+ <insidecover>
+ </insidecover>
+ <pagetext>Test Server</pagetext>
+ <preamble>
+ <contents level="2"></contents>
+ </preamble>
+ <parts lift="no">
+ <xi:include href="part.xml"/>
+ </parts>
+ <applications>
+ <xi:include href="ref_man.xml"/>
+ </applications>
+ <releasenotes>
+ <xi:include href="notes.xml"/>
+ </releasenotes>
+ <listofterms></listofterms>
+ <index></index>
+</book>
+
diff --git a/lib/test_server/doc/src/example_chapter.xml b/lib/test_server/doc/src/example_chapter.xml
new file mode 100644
index 0000000000..8a06526528
--- /dev/null
+++ b/lib/test_server/doc/src/example_chapter.xml
@@ -0,0 +1,150 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Examples</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>example_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Test suite</title>
+ <code type="none">
+-module(my_SUITE).
+
+-export([all/1,
+ not_started/1, not_started_func1/1, not_started_func2/1,
+ start/1, stop/1,
+ func1/1, func2/1
+ ]).
+
+-export([init_per_testcase/2, end_per_testcase/2]).
+
+-include("test_server.hrl").
+
+-define(default_timeout, ?t:minutes(1)).
+
+init_per_testcase(_Case, Config) ->
+ ?line Dog=?t:timetrap(?default_timeout),
+ [{watchdog, Dog}|Config].
+end_per_testcase(_Case, Config) ->
+ Dog=?config(watchdog, Config),
+ ?t:timetrap_cancel(Dog),
+ ok.
+
+all(suite) ->
+ %% Test specification on test suite level
+ [not_started,
+ {conf, start, [func1, func2], stop}].
+
+not_started(suite) ->
+ %% Test specification on test case level
+ [not_started_func1, not_started_func2];
+not_started(doc) ->
+ ["Testing all functions when application is not started"].
+%% No execution clause unless the specification clause returns [].
+
+
+not_started_func1(suite) ->
+ [];
+not_started_func1(doc) ->
+ ["Testing function 1 when application is not started"].
+not_started_func1(Config) when list(Config) ->
+ ?line {error, not_started} = myapp:func1(dummy_ref,1),
+ ?line {error, not_started} = myapp:func1(dummy_ref,2),
+ ok.
+
+not_started_func2(suite) ->
+ [];
+not_started_func2(doc) ->
+ ["Testing function 2 when application is not started"].
+not_started_func2(Config) when list(Config) ->
+ ?line {error, not_started} = myapp:func2(dummy_ref,1),
+ ?line {error, not_started} = myapp:func2(dummy_ref,2),
+ ok.
+
+
+%% No specification clause needed for an init function in a conf case!!!
+start(doc) ->
+ ["Testing start of my application."];
+start(Config) when list(Config) ->
+ ?line Ref = myapp:start(),
+ case erlang:whereis(my_main_process) of
+ Pid when pid(Pid) ->
+ [{myapp_ref,Ref}|Config];
+ undefined ->
+ %% Since this is the init function in a conf case, the rest of the
+ %% cases in the conf case will be skipped if this case fails.
+ ?t:fail("my_main_process did not start")
+ end.
+
+func1(suite) ->
+ [];
+func1(doc) ->
+ ["Test that func1 returns ok when argument is 1 and error if argument is 2"];
+func1(Config) when list(Config) ->
+ ?line Ref = ?config(myapp_ref,Config),
+ ?line ok = myapp:func1(Ref,1),
+ ?line error = myapp:func1(Ref,2),
+ ok.
+
+func2(suite) ->
+ [];
+func2(doc) ->
+ ["Test that func1 returns ok when argument is 3 and error if argument is 4"];
+func2(Config) when list(Config) ->
+ ?line Ref = ?config(myapp_ref,Config),
+ ?line ok = myapp:func2(Ref,3),
+ ?line error = myapp:func2(Ref,4),
+ ok.
+
+%% No specification clause needed for a cleanup function in a conf case!!!
+stop(doc) ->
+ ["Testing termination of my application"];
+stop(Config) when list(Config) ->
+ ?line Ref = ?config(myapp_ref,Config),
+ ?line ok = myapp:stop(Ref),
+ case erlang:whereis(my_main_process) of
+ undefined ->
+ lists:keydelete(myapp_ref,1,Config);
+ Pid when pid(Pid) ->
+ ?t:fail("my_main_process did not stop")
+ end.
+ </code>
+ </section>
+
+ <section>
+ <title>Test specification file</title>
+ <p><em><c>myapp.spec:</c></em></p>
+ <code type="none">
+{topcase, {dir, "../myapp_test"}}. % Test specification on top level </code>
+ <p><em><c>myapp.spec.vxworks:</c></em></p>
+ <code type="none">
+{topcase, {dir, "../myapp_test"}}. % Test specification on top level
+{skip,{my_SUITE,func2,"Not applicable on VxWorks"}}. </code>
+ </section>
+</chapter>
+
+
diff --git a/lib/test_server/doc/src/fascicules.xml b/lib/test_server/doc/src/fascicules.xml
new file mode 100644
index 0000000000..0678195e07
--- /dev/null
+++ b/lib/test_server/doc/src/fascicules.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE fascicules SYSTEM "fascicules.dtd">
+
+<fascicules>
+ <fascicule file="part" href="part_frame.html" entry="no">
+ User's Guide
+ </fascicule>
+ <fascicule file="ref_man" href="ref_man_frame.html" entry="yes">
+ Reference Manual
+ </fascicule>
+ <fascicule file="part_notes" href="part_notes_frame.html" entry="no">
+ Release Notes
+ </fascicule>
+ <fascicule file="" href="../../../../doc/print.html" entry="no">
+ Off-Print
+ </fascicule>
+</fascicules>
+
diff --git a/lib/test_server/doc/src/make.dep b/lib/test_server/doc/src/make.dep
new file mode 100644
index 0000000000..ee9100bd08
--- /dev/null
+++ b/lib/test_server/doc/src/make.dep
@@ -0,0 +1,24 @@
+# ----------------------------------------------------
+# >>>> Do not edit this file <<<<
+# This file was automaticly generated by
+# /home/otp/bin/docdepend
+# ----------------------------------------------------
+
+
+# ----------------------------------------------------
+# TeX files that the DVI file depend on
+# ----------------------------------------------------
+
+book.dvi: basics_chapter.tex book.tex example_chapter.tex \
+ part.tex ref_man.tex run_test_chapter.tex \
+ test_server_app.tex test_server_ctrl.tex \
+ test_server.tex test_spec_chapter.tex \
+ write_framework_chapter.tex \
+ write_test_chapter.tex
+
+# ----------------------------------------------------
+# Source inlined when transforming from source to LaTeX
+# ----------------------------------------------------
+
+book.tex: ref_man.xml
+
diff --git a/lib/test_server/doc/src/notes.xml b/lib/test_server/doc/src/notes.xml
new file mode 100644
index 0000000000..a71c18b5b7
--- /dev/null
+++ b/lib/test_server/doc/src/notes.xml
@@ -0,0 +1,346 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2004</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>APPLICATION Release Notes</title>
+ <prepared>Peter Andersson</prepared>
+ <responsible>Peter Andersson</responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date>2007-11-30</date>
+ <rev>A</rev>
+ <file>notes.xml</file>
+ </header>
+
+<section><title>Test_Server 3.3.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ If the init_per_testcase/2 function fails, the test case
+ now gets marked and counted as auto skipped, not user
+ skipped (which would previously happen).</p>
+ <p>
+ Own Id: OTP-8289</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ The documentation is now built with open source tools
+ (xsltproc and fop) that exists on most platforms. One
+ visible change is that the frames are removed.</p>
+ <p>
+ Own Id: OTP-8201</p>
+ </item>
+ <item>
+ <p>
+ It is now possible to fail a test case from the
+ end_per_testcase/2 function, by returning {fail,Reason}.</p>
+ <p>
+ Own Id: OTP-8284</p>
+ </item>
+ <item>
+ <p>
+ It is now possible to fail a test case by having the
+ end_tc/3 framework function return {fail,Reason} for the
+ test case.</p>
+ <p>
+ Own Id: OTP-8285</p>
+ </item>
+ <item>
+ <p>
+ The test_server framework API (e.g. the end_tc/3
+ function) has been modified. See the test_server_ctrl
+ documentation for details.</p>
+ <p>
+ Own Id: OTP-8286 Aux Id: OTP-8285, OTP-8287 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.3.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ When running a suite starting with a test case group,
+ Test Server crashed if init_per_suite/1 exited or
+ returned skip. This has been fixed.</p>
+ <p>
+ Own Id: OTP-8105 Aux Id: OTP-8089 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Various updates and fixes in Common Test and Test Server.</p>
+ <p>
+ Own Id: OTP-8045 Aux Id: OTP-8089,OTP-8105,OTP-8163 </p>
+ </item>
+ <item>
+ <p>
+ Errors in coverage data collection and analysis were
+ difficult to detect. The logging has been improved so
+ that more information about e.g. imported and missing
+ modules is printed to the html log files.</p>
+ <p>
+ Own Id: OTP-8163 Aux Id: seq11374 </p>
+ </item>
+ <item>
+ <p>
+ The Common Test HTML overview pages have been improved.
+ It is now possible to see if a test case has been skipped
+ explicitly or because a configuration function has
+ failed. Also, the history page (all_runs.html) now has
+ scrolling text displaying the test names. The old format
+ (showing names as a truncated string) can still be
+ generated by means of the flag/option 'basic_html'.</p>
+ <p>
+ Own Id: OTP-8177</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.3.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Various corrections and improvements of Common Test and
+ Test Server.</p>
+ <p>
+ Own Id: OTP-7981</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.3.1</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Minor updates and corrections.</p>
+ <p>
+ Own Id: OTP-7897</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.3</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ The conf case in Test Server has been extended with
+ properties that make it possible to execute test cases in
+ parallel, in sequence and in shuffled order. It is now
+ also possible to repeat test cases according to different
+ criterias. The properties can be combined, making it
+ possible to e.g. repeat a conf case a certain number of
+ times and execute the test cases in different (random)
+ order every time. The properties are specified in a list
+ in the conf case definition: {conf, Properties, InitCase,
+ TestCases, EndCase}. The available properties are:
+ parallel, sequence, shuffle, repeat, repeat_until_all_ok,
+ repeat_until_any_ok, repeat_until_any_fail,
+ repeat_until_all_fail.</p>
+ <p>
+ Own Id: OTP-7511 Aux Id: OTP-7839 </p>
+ </item>
+ <item>
+ <p>The test server starts Cover on nodes of the same
+ version as the test server itself only.</p>
+ <p>
+ Own Id: OTP-7699</p>
+ </item>
+ <item>
+ <p>
+ The Erlang mode for Emacs has been updated with new and
+ modified skeletons for Common Test and TS. Syntax for
+ test case groups in Common Test (and conf cases with
+ properties in TS) has been added and a new minimal Common
+ Test suite skeleton has been introduced.</p>
+ <p>
+ Own Id: OTP-7856</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+<section><title>Test_Server 3.2.4.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ The step functionality in Common Test (based on
+ interaction with Debugger) was broken. This has been
+ fixed, and some new step features have also been added.
+ Please see the Common Test User's Guide for details.</p>
+ <p>
+ Own Id: OTP-7800 Aux Id: seq11106 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.2.4</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Miscellaneous updates.</p>
+ <p>
+ Own Id: OTP-7527</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.2.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ When a testcase terminated due to a timetrap, io sent to
+ the group leader from framework:end_tc/3 (using
+ ct:pal/2/3 or ct:log/2/3) would cause deadlock. This has
+ been fixed.</p>
+ <p>
+ Own Id: OTP-7447 Aux Id: seq11010 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Various updates and improvements, plus some minor bug
+ fixes, have been implemented in Common Test and Test
+ Server.</p>
+ <p>
+ Own Id: OTP-7112</p>
+ </item>
+ <item>
+ <p>
+ It is now possible, by means of the new function
+ ct:abort_current_testcase/1 or
+ test_server_ctrl:abort_current_testcase/1, to abort the
+ currently executing test case.</p>
+ <p>
+ Own Id: OTP-7518 Aux Id: OTP-7112 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.2.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p><c>erlang:system_info/1</c> now accepts the
+ <c>logical_processors</c>, and <c>debug_compiled</c>
+ arguments. For more info see the, <c>erlang(3)</c>
+ documentation.</p> <p>The scale factor returned by
+ <c>test_server:timetrap_scale_factor/0</c> is now also
+ effected if the emulator uses a larger amount of
+ scheduler threads than the amount of logical processors
+ on the system. </p>
+ <p>
+ Own Id: OTP-7175</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.2.1</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ When init_per_suite or end_per_suite terminated due to
+ runtime failure, test_server failed to format the line
+ number information properly and crashed. This error has
+ now been fixed.</p>
+ <p>
+ Own Id: OTP-7091</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.2.0</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Test Server is a portable test server for automated
+ application testing. The server can run test suites on
+ local or remote targets and log progress and results to
+ HTML pages. The main purpose of Test Server is to act as
+ engine inside customized test tools. A callback interface
+ for such framework applications is provided.</p>
+ <p>
+ Own Id: OTP-6989</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+</chapter>
+
diff --git a/lib/test_server/doc/src/notes_history.xml b/lib/test_server/doc/src/notes_history.xml
new file mode 100644
index 0000000000..0392bd74a2
--- /dev/null
+++ b/lib/test_server/doc/src/notes_history.xml
@@ -0,0 +1,112 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server Release Notes History</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ </header>
+
+ <section>
+ <title>Test Server 3.1.1</title>
+
+ <section>
+ <title>Improvements and new features</title>
+ <list type="bulleted">
+ <item>
+ <p>Added functions <c>test_server:break/1</c> and
+ <c>test_server:continue/0</c> for semiautomatic testing.</p>
+ <p><c>test_server:timetrap/1</c> can now also take
+ <c>{hours,H} | {minutes,M | {seconds,S}</c>.</p>
+ <p>Added function
+ <c>test_server_ctrl:multiply_timetraps/1</c>,
+ <c>test_server_ctrl:add_case/3</c>,
+ <c>test_server_ctrl:add_cases/2/3</c>.</p>
+ <p>Added test suite functions <c>init_per_suite/1</c> and
+ <c>end_per_suite/1</c>.</p>
+ <p><c>fin_per_testcase/2</c> is changed to
+ <c>end_per_testcase/2</c>. <c>fin_per_testcase</c> is kept
+ for backwards compatibility.</p>
+ <p>Added support for writing own test server frameworks.
+ Callback functions <c>init_tc/1</c>, <c>end_tc/3</c>,
+ <c>get_suite/2</c>, <c>report/2</c>, <c>warn/1</c>.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Test Server 3.1</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Added the options <c>cover</c> and <c>cover_details</c>
+ to <c>ts:run</c>. When one of these options is used,
+ the tested application will be cover compiled
+ before the test is run. The cover compiled code will also
+ be loaded on all slave or peer nodes started with
+ <c>test_server:start_node</c>. When the test is completed
+ coverage data from all nodes is collected and merged, and
+ presented in the coverage log to which there will be a link
+ from the test suite result page (i.e. the one with the
+ heading "Test suite ... results").</p>
+ <p>The <c>cover_details</c> option will do
+ <c>cover:analyse_to_file</c> for each cover compiled module,
+ while the <c>cover</c> option only will produce a list of
+ modules and the number of covered/uncovered lines in each
+ module.</p>
+ <p>To make it possible to run all test from a script (like in
+ the OTP daily builds), the following is added:
+ <c>ts:run([all_tests | Options])</c>.</p>
+ <p>This means that e.g. the following is possible:
+ <c>erl -s ts run all_tests batch cover</c>.</p>
+ <p>Note that it is also possible to run tests with cover even
+ if you don't use <c>ts</c>.
+ See <c>test_server_ctrl:cover/2/3</c>.</p>
+ <p>Own Id: OTP-4703</p>
+ </item>
+ <item>
+ <p>Removed module <c>ts_save.erl</c> and function
+ <c>ts:save/0/1</c><em>(incompatible)</em>.</p>
+ <p>Added config variable <c>ipv6_hosts</c> to
+ <c>ts:install/1</c> and test spec file.</p>
+ <p>No longer removing duplicates of test cases from test spec
+ <em>(incompatible)</em>.</p>
+ <p>Added function <c>test_server:run_on_shielded_node/2</c>.</p>
+ <p>Creation of html files for test suite source does no longer
+ crash if suite contains more than 9999 lines of code.</p>
+ <p>Added functionality for cross cover compilation,
+ i.e. collection of cover data from all tests.</p>
+ <p>Multiplying timetrap times with 10 when running with cover.</p>
+ <p>Added <c>ts:r/3</c> for running tests with cover.</p>
+ <p>*** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>Own Id: OTP-5040</p>
+ </item>
+ </list>
+ </section>
+ </section>
+</chapter>
+
diff --git a/lib/test_server/doc/src/part.xml b/lib/test_server/doc/src/part.xml
new file mode 100644
index 0000000000..fdcd3d274e
--- /dev/null
+++ b/lib/test_server/doc/src/part.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE part SYSTEM "part.dtd">
+
+<part xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server User's Guide</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date>2002-07-11</date>
+ <rev></rev>
+ </header>
+ <description>
+ <p><em>Test Server</em> is a portable test server for
+ automated application testing. The server can run test suites
+ on local or remote targets and log progress and results to HTML
+ pages. The main purpose of Test Server is to act as engine
+ inside customized test tools. A callback interface for
+ such framework applications is provided.</p>
+ </description>
+ <xi:include href="basics_chapter.xml"/>
+ <xi:include href="test_spec_chapter.xml"/>
+ <xi:include href="write_test_chapter.xml"/>
+ <xi:include href="run_test_chapter.xml"/>
+ <xi:include href="write_framework_chapter.xml"/>
+ <xi:include href="example_chapter.xml"/>
+</part>
+
diff --git a/lib/test_server/doc/src/part_notes.xml b/lib/test_server/doc/src/part_notes.xml
new file mode 100644
index 0000000000..2347f64ca1
--- /dev/null
+++ b/lib/test_server/doc/src/part_notes.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE part SYSTEM "part.dtd">
+
+<part xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header>
+ <copyright>
+ <year>2004</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server Release Notes</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ </header>
+ <description>
+ <p>The <em>Test Server</em> is a portable test server for
+ application testing. The test server can run automatic test suites
+ on local or remote target and log progress and results to HTML
+ pages. It also provides some support for test suite authors.</p>
+ <p>For information about older versions, see
+ <url href="part_notes_history_frame.html">Release Notes History</url>.</p>
+ </description>
+ <xi:include href="notes.xml"/>
+</part>
+
diff --git a/lib/test_server/doc/src/part_notes_history.xml b/lib/test_server/doc/src/part_notes_history.xml
new file mode 100644
index 0000000000..556d172755
--- /dev/null
+++ b/lib/test_server/doc/src/part_notes_history.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE part SYSTEM "part.dtd">
+
+<part>
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server Release Notes History</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ </header>
+ <description>
+ <p>The <em>Test Server</em> is a portable test server for
+ application testing. The test server can run automatic test suites
+ on local or remote target and log progress and results to HTML
+ pages. It also provides some support for test suite authors.</p>
+ </description>
+ <include file="notes_history"></include>
+</part>
+
diff --git a/lib/test_server/doc/src/ref_man.xml b/lib/test_server/doc/src/ref_man.xml
new file mode 100644
index 0000000000..17d6093dc0
--- /dev/null
+++ b/lib/test_server/doc/src/ref_man.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE application SYSTEM "application.dtd">
+
+<application xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server Reference Manual</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>ref_man.xml</file>
+ </header>
+ <description>
+ <p><em>Test Server</em> is a portable test server for
+ automated application testing. The server can run test suites
+ on local or remote targets and log progress and results to HTML
+ pages. The main purpose of Test Server is to act as engine
+ inside customized test tools. A callback interface for
+ such framework applications is provided.</p>
+ </description>
+ <xi:include href="test_server_app.xml"/>
+ <xi:include href="test_server_ctrl.xml"/>
+ <xi:include href="test_server.xml"/>
+</application>
+
diff --git a/lib/test_server/doc/src/run_test_chapter.xml b/lib/test_server/doc/src/run_test_chapter.xml
new file mode 100644
index 0000000000..36bd41da1f
--- /dev/null
+++ b/lib/test_server/doc/src/run_test_chapter.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Running Test Suites</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>run_test_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Using the test server controller</title>
+ <p>The test server controller provides a low level interface to
+ all the Test Server functionality. It is possible to use this
+ interface directly, but it is recommended to use a framework
+ such as <em>Common Test</em> instead. If no existing framework
+ suits your needs, you could of course build your own
+ on top of the test server controller. Some information about how
+ to do this can be found in the section named "Writing you own
+ test server framework" in the Test Server User's Guide.
+ </p>
+ <p>For information about using the controller directly, please see
+ all available functions in the reference manual for
+ <c>test_server_ctrl</c>.
+ </p>
+ </section>
+</chapter>
+
diff --git a/lib/test_server/doc/src/test_server.xml b/lib/test_server/doc/src/test_server.xml
new file mode 100644
index 0000000000..6e75425862
--- /dev/null
+++ b/lib/test_server/doc/src/test_server.xml
@@ -0,0 +1,840 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2007</year>
+ <year>2008</year>
+ <holder>Ericsson AB, All Rights Reserved</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson AB.
+ </legalnotice>
+
+ <title>test_server</title>
+ <prepared>Siri Hansen</prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
+ <file>test_server_ref.sgml</file>
+ </header>
+ <module>test_server</module>
+ <modulesummary>This module provides support for test suite authors.</modulesummary>
+ <description>
+ <p>The <c>test_server</c> module aids the test suite author by providing
+ various support functions. The supported functionality includes:
+ </p>
+ <list type="bulleted">
+ <item>Logging and timestamping
+ </item>
+ <item>Capturing output to stdout
+ </item>
+ <item>Retrieving and flushing the message queue of a process
+ </item>
+ <item>Watchdog timers, process sleep, time measurement and unit
+ conversion
+ </item>
+ <item>Private scratch directory for all test suites
+ </item>
+ <item>Start and stop of slave- or peer nodes</item>
+ </list>
+ <p>For more information on how to write test cases and for
+ examples, please see the Test Server User's Guide.
+ </p>
+ </description>
+
+ <section>
+ <title>TEST SUITE SUPPORT FUNCTIONS</title>
+ <p>The following functions are supposed to be used inside a test
+ suite.
+ </p>
+ </section>
+ <funcs>
+ <func>
+ <name>os_type() -> OSType</name>
+ <fsummary>Returns the OS type of the target node</fsummary>
+ <type>
+ <v>OSType = term()</v>
+ <d>This is the same as returned from <c>os:type/0</c></d>
+ </type>
+ <desc>
+ <p>This function can be called on controller or target node, and
+ it will always return the OS type of the target node.</p>
+ </desc>
+ </func>
+ <func>
+ <name>fail()</name>
+ <name>fail(Reason)</name>
+ <fsummary>Makes the test case fail.</fsummary>
+ <type>
+ <v>Reason = term()</v>
+ <d>The reason why the test case failed.</d>
+ </type>
+ <desc>
+ <p>This will make the test suite fail with a given reason, or
+ with <c>suite_failed</c> if no reason was given. Use this
+ function if you want to terminate a test case, as this will
+ make it easier to read the log- and HTML files. <c>Reason</c>
+ will appear in the comment field in the HTML log.</p>
+ </desc>
+ </func>
+ <func>
+ <name>timetrap(Timout) -> Handle</name>
+ <fsummary></fsummary>
+ <type>
+ <v>Timeout = integer() | {hours,H} | {minutes,M} | {seconds,S}</v>
+ <v>H = M = S = integer()</v>
+ <v>Pid = pid()</v>
+ <d>The process that is to be timetrapped (<c>self()</c>by default)</d>
+ </type>
+ <desc>
+ <p>Sets up a time trap for the current process. An expired
+ timetrap kills the process with reason
+ <c>timetrap_timeout</c>. The returned handle is to be given
+ as argument to <c>timetrap_cancel</c> before the timetrap
+ expires. If <c>Timeout</c> is an integer, it is expected to
+ be milliseconds.</p>
+ <note>
+ <p>If the current process is trapping exits, it will not be killed
+ by the exit signal with reason <c>timetrap_timeout</c>.
+ If this happens, the process will be sent an exit signal
+ with reason <c>kill</c> 10 seconds later which will kill the
+ process. Information about the timetrap timeout will in
+ this case not be found in the test logs. However, the
+ error_logger will be sent a warning.</p>
+ </note>
+ </desc>
+ </func>
+ <func>
+ <name>timetrap_cancel(Handle) -> ok</name>
+ <fsummary>Cancels a timetrap.</fsummary>
+ <type>
+ <v>Handle = term()</v>
+ <d>Handle returned from <c>timetrap</c></d>
+ </type>
+ <desc>
+ <p>This function cancels a timetrap. This must be done before
+ the timetrap expires.</p>
+ </desc>
+ </func>
+ <func>
+ <name>timetrap_scale_factor() -> ScaleFactor</name>
+ <fsummary>Returns the scale factor for timeouts.</fsummary>
+ <type>
+ <v>ScaleFactor = integer()</v>
+ </type>
+ <desc>
+ <p>This function returns the scale factor by which all timetraps
+ are scaled. It is normally 1, but can be greater than 1 if
+ the test_server is running <c>cover</c>, using a larger amount of
+ scheduler threads than the amount of logical processors on the
+ system, running under purify, valgrind or in a debug-compiled
+ emulator. The scale factor can be used if you need to scale you
+ own timeouts in test cases with same factor as the test_server
+ uses.</p>
+ </desc>
+ </func>
+ <func>
+ <name>sleep(MSecs) -> ok</name>
+ <fsummary>Suspens the calling task for a specified time.</fsummary>
+ <type>
+ <v>MSecs = integer() | float() | infinity</v>
+ <d>The number of milliseconds to sleep</d>
+ </type>
+ <desc>
+ <p>This function suspends the calling process for at least the
+ supplied number of milliseconds. There are two major reasons
+ why you should use this function instead of
+ <c>timer:sleep</c>, the first being that the module
+ <c>timer</c> may be unavailable at the time the test suite is
+ run, and the second that it also accepts floating point
+ numbers.</p>
+ </desc>
+ </func>
+ <func>
+ <name>hours(N) -> MSecs</name>
+ <name>minutes(N) -> MSecs</name>
+ <name>seconds(N) -> MSecs</name>
+ <fsummary></fsummary>
+ <type>
+ <v>N = integer()</v>
+ <d>Value to convert to milliseconds.</d>
+ </type>
+ <desc>
+ <p>Theese functions convert <c>N</c> number of hours, minutes
+ or seconds into milliseconds.
+ </p>
+ <p>Use this function when you want to
+ <c>test_server:sleep/1</c> for a number of seconds, minutes or
+ hours(!).</p>
+ </desc>
+ </func>
+ <func>
+ <name>format(Format) -> ok</name>
+ <name>format(Format, Args)</name>
+ <name>format(Pri,Format)</name>
+ <name>format(Pri, Format, Args)</name>
+ <fsummary></fsummary>
+ <type>
+ <v>Format = string()</v>
+ <d>Format as described for <c>io_:format</c>.</d>
+ <v>Args = list()</v>
+ <d>List of arguments to format.</d>
+ </type>
+ <desc>
+ <p>Formats output just like <c>io:format</c> but sends the
+ formatted string to a logfile. If the urgency value,
+ <c>Pri</c>, is lower than some threshold value, it will also
+ be written to the test person's console. Default urgency is
+ 50, default threshold for display on the console is 1.
+ </p>
+ <p>Typically, the test person don't want to see everything a
+ test suite outputs, but is merely interested in if the test
+ cases succeeded or not, which the test server tells him. If he
+ would like to see more, he could manually change the threshold
+ values by using the <c>test_server_ctrl:set_levels/3</c>
+ function.</p>
+ </desc>
+ </func>
+ <func>
+ <name>capture_start() -> ok</name>
+ <name>capture_stop() -> ok</name>
+ <name>capture_get() -> list()</name>
+ <fsummary>Captures all output to stdout for a process.</fsummary>
+ <desc>
+ <p>These functions makes it possible to capture all output to
+ stdout from a process started by the test suite. The list of
+ characters captured can be purged by using <c>capture_get</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>messages_get() -> list()</name>
+ <fsummary>Empty the message queue.</fsummary>
+ <desc>
+ <p>This function will empty and return all the messages
+ currently in the calling process' message queue.</p>
+ </desc>
+ </func>
+ <func>
+ <name>timecall(M, F, A) -> {Time, Value}</name>
+ <fsummary>Measures the time needed to call a function.</fsummary>
+ <type>
+ <v>M = atom()</v>
+ <d>The name of the module where the function resides.</d>
+ <v>F = atom()</v>
+ <d>The name of the function to call in the module.</d>
+ <v>A = list()</v>
+ <d>The arguments to supply the called function.</d>
+ <v>Time = integer()</v>
+ <d>The number of seconds it took to call the function.</d>
+ <v>Value = term()</v>
+ <d>Value returned from the called function.</d>
+ </type>
+ <desc>
+ <p>This function measures the time (in seconds) it takes to
+ call a certain function. The function call is <em>not</em>
+ caught within a catch.</p>
+ </desc>
+ </func>
+ <func>
+ <name>do_times(N, M, F, A) -> ok</name>
+ <name>do_times(N, Fun)</name>
+ <fsummary>Calls MFA or Fun N times.</fsummary>
+ <type>
+ <v>N = integer()</v>
+ <d>Number of times to call MFA.</d>
+ <v>M = atom()</v>
+ <d>Module name where the function resides.</d>
+ <v>F = atom()</v>
+ <d>Function name to call.</d>
+ <v>A = list()</v>
+ <d>Arguments to M:F.</d>
+ </type>
+ <desc>
+ <p>Calls MFA or Fun N times. Useful for extensive testing of a
+ sensitive function.</p>
+ </desc>
+ </func>
+ <func>
+ <name>m_out_of_n(M, N, Fun) -> ok | exit({m_out_of_n_failed, {R,left_to_do}}</name>
+ <fsummary>Fault tolerant <c>do_times</c>.</fsummary>
+ <type>
+ <v>N = integer()</v>
+ <d>Number of times to call the Fun.</d>
+ <v>M = integer()</v>
+ <d>Number of times to require a successful return.</d>
+ </type>
+ <desc>
+ <p>Repeatedly evaluates the given function until it succeeds
+ (doesn't crash) M times. If, after N times, M successful
+ attempts have not been accomplished, the process crashes with
+ reason {m_out_of_n_failed, {R,left_to_do}}, where R indicates
+ how many cases that was still to be successfully completed.
+ </p>
+ <p>For example:
+ </p>
+ <p><c>m_out_of_n(1,4,fun() -> tricky_test_case() end)</c> <br></br>
+Tries to run tricky_test_case() up to 4 times, and is
+ happy if it succeeds once.
+ </p>
+ <p><c>m_out_of_n(7,8,fun() -> clock_sanity_check() end)</c> <br></br>
+Tries running clock_sanity_check() up to 8 times,and
+ allows the function to fail once. This might be useful if
+ clock_sanity_check/0 is known to fail if the clock crosses an
+ hour boundary during the test (and the up to 8 test runs could
+ never cross 2 boundaries)</p>
+ </desc>
+ </func>
+ <func>
+ <name>call_crash(M, F, A) -> Result</name>
+ <name>call_crash(Time, M, F, A) -> Result</name>
+ <name>call_crash(Time, Crash, M, F, A) -> Result</name>
+ <fsummary>Calls MFA and succeeds if it crashes.</fsummary>
+ <type>
+ <v>Result = ok | exit(call_crash_timeout) | exit({wrong_crash_reason, Reason})</v>
+ <v>Crash = term()</v>
+ <d>Crash return from the function.</d>
+ <v>Time = integer()</v>
+ <d>Timeout in milliseconds.</d>
+ <v>M = atom()</v>
+ <d>Module name where the function resides.</d>
+ <v>F = atom()</v>
+ <d>Function name to call.</d>
+ <v>A = list()</v>
+ <d>Arguments to M:F.</d>
+ </type>
+ <desc>
+ <p>Spawns a new process that calls MFA. The call is considered
+ successful if the call crashes with the gives reason
+ (<c>Crash</c>) or any reason if not specified. The call must
+ terminate within the given time (default <c>infinity</c>), or
+ it is considered a failure.</p>
+ </desc>
+ </func>
+ <func>
+ <name>temp_name(Stem) -> Name</name>
+ <fsummary>Returns a unique filename.</fsummary>
+ <type>
+ <v>Stem = string()</v>
+ </type>
+ <desc>
+ <p>Returns a unique filename starting with <c>Stem</c> with
+ enough extra characters appended to make up a unique
+ filename. The filename returned is guaranteed not to exist in
+ the filesystem at the time of the call.</p>
+ </desc>
+ </func>
+ <func>
+ <name>break(Comment) -> ok</name>
+ <fsummary>Cancel all timetraps and wait for call to continue/0.</fsummary>
+ <type>
+ <v>Comment = string()</v>
+ </type>
+ <desc>
+ <p><c>Comment</c> is a string which will be written in
+ the shell, e.g. explaining what to do.</p>
+ <p>This function will cancel all timetraps and pause the
+ execution of the test case until the user executes the
+ <c>continue/0</c> function. It gives the user the opportunity
+ to interact with the erlang node running the tests, e.g. for
+ debugging purposes or for manually executing a part of the
+ test case.</p>
+ <p>When the <c>break/1</c> function is called, the shell will
+ look something like this:</p>
+ <code type="none"><![CDATA[
+ --- SEMIAUTOMATIC TESTING ---
+ The test case executes on process <0.51.0>
+
+
+ "Here is a comment, it could e.g. instruct to pull out a card"
+
+
+ -----------------------------
+
+ Continue with --> test_server:continue(). ]]></code>
+ <p>The user can now interact with the erlang node, and when
+ ready call <c>test_server:continue().</c></p>
+ <p>Note that this function can not be used if the test is
+ executed with <c>ts:run/0/1/2/3/4</c> in <c>batch</c> mode.</p>
+ </desc>
+ </func>
+ <func>
+ <name>continue() -> ok</name>
+ <fsummary>Continue after break/1.</fsummary>
+ <desc>
+ <p>This function must be called in order to continue after a
+ test case has called <c>break/1</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>run_on_shielded_node(Fun, CArgs) -> term()</name>
+ <fsummary>Execute a function a shielded node.</fsummary>
+ <type>
+ <v>Fun = function() (arity 0)</v>
+ <d>Function to execute on the shielded node.</d>
+ <v>CArg = string()</v>
+ <d>Extra command line arguments to use when starting the shielded node.</d>
+ </type>
+ <desc>
+ <p><c>Fun</c> is executed in a process on a temporarily created
+ hidden node with a proxy for communication with the test server
+ node. The node is called a shielded node (should have been called
+ a shield node). If <c>Fun</c> is successfully executed, the result
+ is returned. A peer node (see <c>start_node/3</c>) started from
+ the shielded node will be shielded from test server node, i.e.
+ they will not be aware of each other. This is useful when you want
+ to start nodes from earlier OTP releases than the OTP release of
+ the test server node.</p>
+ <p>Nodes from an earlier OTP release can normally not be started
+ if the test server hasn't been started in compatibility mode
+ (see the <c>+R</c> flag in the <c>erl(1)</c> documentation) of
+ an earlier release. If a shielded node is started in compatibility
+ mode of an earlier OTP release than the OTP release of the test
+ server node, the shielded node can start nodes of an earlier OTP
+ release.</p>
+ <note>
+ <p>You <em>must</em> make sure that nodes started by the shielded
+ node never communicate directly with the test server node.</p>
+ </note>
+ <note>
+ <p>Slave nodes always communicate with the test server node;
+ therefore, <em>never</em> start <em>slave nodes</em> from the
+ shielded node, <em>always</em> start <em>peer nodes</em>.</p>
+ </note>
+ </desc>
+ </func>
+ <func>
+ <name>start_node(Name, Type, Options) -> {ok, Node} | {error, Reason}</name>
+ <fsummary>Start a node.</fsummary>
+ <type>
+ <v>Name = atom() | string()</v>
+ <d>Name of the slavenode to start (as given to -sname or -name)</d>
+ <v>Type = slave | peer</v>
+ <d>The type of node to start.</d>
+ <v>Options = [{atom(), term()]</v>
+ <d>Tuplelist of options</d>
+ </type>
+ <desc>
+ <p>This functions starts a node, possibly on a remote machine,
+ and guarantees cross architecture transparency. Type is set to
+ either <c>slave</c> or <c>peer</c>.
+ </p>
+ <p><c>slave</c> means that the new node will have a master,
+ i.e. the slave node will terminate if the master terminates,
+ TTY output produced on the slave will be sent back to the
+ master node and file I/O is done via the master. The master is
+ normally the target node unless the target is itself a slave.
+ </p>
+ <p><c>peer</c> means that the new node is an independent node
+ with no master.
+ </p>
+ <p><c>Options</c> is a tuplelist which can contain one or more
+ of
+ </p>
+ <taglist>
+ <tag><c>{remote, true}</c></tag>
+ <item>Start the node on a remote host. If not specified, the
+ node will be started on the local host (with some
+ exceptions, as for the case of VxWorks, where
+ all nodes are started on a remote host). Test cases that
+ require a remote host will fail with a reasonable comment if
+ no remote hosts are available at the time they are run.
+ </item>
+ <tag><c>{args, Arguments}</c></tag>
+ <item>Arguments passed directly to the node. This is
+ typically a string appended to the command line.
+ </item>
+ <tag><c>{wait, false}</c></tag>
+ <item>Don't wait until the node is up. By default, this
+ function does not return until the node is up and running,
+ but this option makes it return as soon as the node start
+ command is given..
+ <br></br>
+Only valid for peer nodes
+ </item>
+ <tag><c>{fail_on_error, false}</c></tag>
+ <item>Returns <c>{error, Reason}</c> rather than failing the
+ test case.
+ <br></br>
+Only valid for peer nodes. Note that slave nodes always
+ act as if they had <c>fail_on_error=false</c></item>
+ <tag><c>{erl, ReleaseList}</c></tag>
+ <item>Use an Erlang emulator determined by ReleaseList when
+ starting nodes, instead of the same emulator as the test
+ server is running. ReleaseList is a list of specifiers,
+ where a specifier is either {release, Rel}, {prog, Prog}, or
+ 'this'. Rel is either the name of a release, e.g., "r12b_patched"
+ or 'latest'. 'this' means using the same emulator as the test
+ server. Prog is the name of an emulator executable. If the
+ list has more than one element, one of them is picked
+ randomly. (Only works on Solaris and Linux, and the test server
+ gives warnings when it notices that nodes are not of the same
+ version as itself.)
+ <br></br>
+ <br></br>
+
+ When specifying this option to run a previous release, use
+ <c>is_release_available/1</c> function to test if the given
+ release is available and skip the test case if not.
+ <br></br>
+ <br></br>
+
+ In order to avoid compatibility problems (may not appear right
+ away), use a shielded node (see <c>run_on_shielded_node/2</c>)
+ when starting nodes from different OTP releases than the test
+ server.
+ </item>
+ <tag><c>{cleanup, false}</c></tag>
+ <item>Tells the test server not to kill this node if it is
+ still alive after the test case is completed. This is useful
+ if the same node is to be used by a group of test cases.
+ </item>
+ <tag><c>{env, Env}</c></tag>
+ <item><c>Env</c> should be a list of tuples <c>{Name, Val}</c>,
+ where <c>Name</c> is the name of an environment variable, and
+ <c>Val</c> is the value it is to have in the started node.
+ Both <c>Name</c> and <c>Val</c> must be strings. The one
+ exception is <c>Val</c> being the atom <c>false</c> (in
+ analogy with <c>os:getenv/1</c>), which removes the
+ environment variable. Only valid for peer nodes. Not
+ available on VxWorks.</item>
+ </taglist>
+ </desc>
+ </func>
+ <func>
+ <name>stop_node(NodeName) -> bool()</name>
+ <fsummary>Stops a node</fsummary>
+ <type>
+ <v>NodeName = term()</v>
+ <d>Name of the node to stop</d>
+ </type>
+ <desc>
+ <p>This functions stops a node previously started with
+ <c>start_node/3</c>. Use this function to stop any node you
+ start, or the test server will produce a warning message in
+ the test logs, and kill the nodes automatically unless it was
+ started with the <c>{cleanup, false}</c> option.</p>
+ </desc>
+ </func>
+ <func>
+ <name>is_commercial() -> bool()</name>
+ <fsummary>Tests whether the emulator is commercially supported</fsummary>
+ <desc>
+ <p>This function test whether the emulator is commercially supported
+ emulator. The tests for a commercially supported emulator could be more
+ stringent (for instance, a commercial release should always contain
+ documentation for all applications).</p>
+ </desc>
+ </func>
+
+ <func>
+ <name>is_release_available(Release) -> bool()</name>
+ <fsummary>Tests whether a release is available</fsummary>
+ <type>
+ <v>Release = string() | atom()</v>
+ <d>Release to test for</d>
+ </type>
+ <desc>
+ <p>This function test whether the release given by
+ <c>Release</c> (for instance, "r12b_patched") is available
+ on the computer that the test_server controller is running on.
+ Typically, you should skip the test case if not.</p>
+ <p>Caution: This function may not be called from the <c>suite</c>
+ clause of a test case, as the test_server will deadlock.</p>
+ </desc>
+ </func>
+ <func>
+ <name>is_native(Mod) -> bool()</name>
+ <fsummary>Checks whether the module is natively compiled or not</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <d>A module name</d>
+ </type>
+ <desc>
+ <p>Checks whether the module is natively compiled or not</p>
+ </desc>
+ </func>
+ <func>
+ <name>app_test(App) -> ok | test_server:fail()</name>
+ <name>app_test(App,Mode)</name>
+ <fsummary>Checks an applications .app file for obvious errors</fsummary>
+ <type>
+ <v>App = term()</v>
+ <d>The name of the application to test</d>
+ <v>Mode = pedantic | tolerant</v>
+ <d>Default is pedantic</d>
+ </type>
+ <desc>
+ <p>Checks an applications .app file for obvious errors.
+ The following is checked:
+ </p>
+ <list type="bulleted">
+ <item>required fields
+ </item>
+ <item>that all modules specified actually exists
+ </item>
+ <item>that all requires applications exists
+ </item>
+ <item>that no module included in the application has export_all
+ </item>
+ <item>that all modules in the ebin/ dir is included (If
+ <c>Mode==tolerant</c> this only produces a warning, as all
+ modules does not have to be included)</item>
+ </list>
+ </desc>
+ </func>
+ <func>
+ <name>comment(Comment) -> ok</name>
+ <fsummary>Print a comment on the HTML result page</fsummary>
+ <type>
+ <v>Comment = string()</v>
+ </type>
+ <desc>
+ <p>The given String will occur in the comment field of the
+ table on the HTML result page. If called several times, only
+ the last comment is printed. comment/1 is also overwritten by
+ the return value {comment,Comment} from a test case or by
+ fail/1 (which prints Reason as a comment).</p>
+ </desc>
+ </func>
+ </funcs>
+
+ <section>
+ <title>TEST SUITE EXPORTS</title>
+ <p>The following functions must be exported from a test suite
+ module.
+ </p>
+ </section>
+ <funcs>
+ <func>
+ <name>all(suite) -> TestSpec | {skip, Comment}</name>
+ <fsummary>Returns the module's test specification</fsummary>
+ <type>
+ <v>TestSpec = list()</v>
+ <v>Comment = string()</v>
+ <d>This comment will be printed on the HTML result page</d>
+ </type>
+ <desc>
+ <p>This function must return the test specification for the
+ test suite module. The syntax of a test specification is
+ described in the Test Server User's Guide.</p>
+ </desc>
+ </func>
+ <func>
+ <name>init_per_suite(Config0) -> Config1 | {skip, Comment}</name>
+ <fsummary>Test suite initiation</fsummary>
+ <type>
+ <v>Config0 = Config1 = [tuple()]</v>
+ <v>Comment = string()</v>
+ <d>Describes why the suite is skipped</d>
+ </type>
+ <desc>
+ <p>This function is called before all other test cases in the
+ suite. <c>Config</c> is the configuration which can be modified
+ here. Whatever is returned from this function is given as
+ <c>Config</c> to the test cases.
+ </p>
+ <p>If this function fails, all test cases in the suite will be
+ skipped.</p>
+ </desc>
+ </func>
+ <func>
+ <name>end_per_suite(Config) -> void()</name>
+ <fsummary>Test suite finalization</fsummary>
+ <type>
+ <v>Config = [tuple()]</v>
+ </type>
+ <desc>
+ <p>This function is called after the last test case in the
+ suite, and can be used to clean up whatever the test cases
+ have done. The return value is ignored.</p>
+ </desc>
+ </func>
+ <func>
+ <name>init_per_testcase(Case, Config0) -> Config1 | {skip, Comment}</name>
+ <fsummary>Test case initiation</fsummary>
+ <type>
+ <v>Case = atom()</v>
+ <v>Config0 = Config1 = [tuple()]</v>
+ <v>Comment = string()</v>
+ <d>Describes why the test case is skipped</d>
+ </type>
+ <desc>
+ <p>This function is called before each test case. The
+ <c>Case</c> argument is the name of the test case, and
+ <c>Config</c> is the configuration which can be modified
+ here. Whatever is returned from this function is given as
+ <c>Config</c> to the test case.</p>
+ </desc>
+ </func>
+ <func>
+ <name>end_per_testcase(Case, Config) -> void()</name>
+ <fsummary>Test case finalization</fsummary>
+ <type>
+ <v>Case = atom()</v>
+ <v>Config = [tuple()]</v>
+ </type>
+ <desc>
+ <p>This function is called after each test case, and can be
+ used to clean up whatever the test case has done. The return
+ value is ignored.</p>
+ </desc>
+ </func>
+ <func>
+ <name>Case(doc) -> [Decription]</name>
+ <name>Case(suite) -> [] | TestSpec | {skip, Comment}</name>
+ <name>Case(Config) -> {skip, Comment} | {comment, Comment} | Ok</name>
+ <fsummary>A test case</fsummary>
+ <type>
+ <v>Description = string()</v>
+ <d>Short description of the test case</d>
+ <v>TestSpec = list()</v>
+ <v>Comment = string()</v>
+ <d>This comment will be printed on the HTML result page</d>
+ <v>Ok = term()</v>
+ <v>Config = [tuple()]</v>
+ <d>Elements from the Config parameter can be read with the ?config macro, see section about test suite support macros</d>
+ </type>
+ <desc>
+ <p>The <em>documentation clause</em> (argument <c>doc</c>) can
+ be used for automatic generation of test documentation or test
+ descriptions.
+ </p>
+ <p>The <em>specification clause</em> (argument <c>spec</c>)
+ shall return an empty list, the test specification for the
+ test case or <c>{skip,Comment}</c>. The syntax of a test
+ specification is described in the Test Server User's Guide.
+ </p>
+ <p><em>Note that the specification clause always is executed on the controller host.</em></p>
+ <p>The <em>execution clause</em> (argument <c>Config</c>) is
+ only called if the specification clause returns an empty list.
+ The execution clause is the real test case. Here you must call
+ the functions you want to test, and do whatever you need to
+ check the result. If something fails, make sure the process
+ crashes or call <c>test_server:fail/0/1</c> (which also will
+ cause the process to crash).
+ </p>
+ <p>You can return <c>{skip,Comment}</c> if you decide not to
+ run the test case after all, e.g. if it is not applicable on
+ this platform.
+ </p>
+ <p>You can return <c>{comment,Comment}</c> if you wish to
+ print some information in the 'Comment' field on the HTML
+ result page.
+ </p>
+ <p>If the execution clause returns anything else, it is
+ considered a success, unless it is <c>{'EXIT',Reason}</c> or
+ <c>{'EXIT',Pid,Reason}</c> which can't be distinguished from a
+ crash, and thus will be considered a failure.
+ </p>
+ <p>A <em>conf test case</em> is a group of test cases with an
+ init and a cleanup function. The init and cleanup functions
+ are also test cases, but they have special rules:</p>
+ <list type="bulleted">
+ <item>They do not need a specification clause.</item>
+ <item>They must always have the execution clause.</item>
+ <item>They must return the <c>Config</c> parameter, a modified
+ version of it or <c>{skip,Comment}</c> from the execution clause.</item>
+ <item>The cleanup function may also return a tuple
+ <c>{return_group_result,Status}</c>, which is used to return the
+ status of the conf case to Test Server and/or to a conf case on a
+ higher level. (<c>Status = ok | skipped | failed</c>).</item>
+ <item><c>init_per_testcase</c> and <c>end_per_testcase</c> are
+ not called before and after these functions.</item>
+ </list>
+ </desc>
+ </func>
+ </funcs>
+
+ <section>
+ <title>TEST SUITE LINE NUMBERS</title>
+ <p>If a test case fails, the test server can report the exact line
+ number at which it failed. There are two ways of doing this,
+ either by using the <c>line</c> macro or by using the
+ <c>test_server_line</c> parse transform.
+ </p>
+ <p>The <c>line</c> macro is described under TEST SUITE SUPPORT
+ MACROS below. The <c>line</c> macro will only report the last line
+ executed when a test case failed.
+ </p>
+ <p>The <c>test_server_line</c> parse transform is activated by
+ including the headerfile <c>test_server_line.hrl</c> in the test
+ suite. When doing this, it is important that the
+ <c>test_server_line</c> module is in the code path of the erlang
+ node compiling the test suite. The parse transform will report a
+ history of a maximum of 10 lines when a test case
+ fails. Consecutive lines in the same function are not shown.
+ </p>
+ <p>The attribute <c>-no_lines(FuncList).</c> can be used in the
+ test suite to exclude specific functions from the parse
+ transform. This is necessary e.g. for functions that are executed
+ on old (i.e. &lt;R10B) OTP releases. <c>FuncList = [{Func,Arity}]</c>.
+ </p>
+ <p>If both the <c>line</c> macro and the parse transform is used in
+ the same module, the parse transform will overrule the macro.
+ </p>
+ </section>
+
+ <section>
+ <title>TEST SUITE SUPPORT MACROS</title>
+ <p>There are some macros defined in the <c>test_server.hrl</c>
+ that are quite useful for test suite programmers:
+ </p>
+ <p>The <em>line</em> macro, is quite
+ essential when writing test cases. It tells the test server
+ exactly what line of code that is being executed, so that it can
+ report this line back if the test case fails. Use this macro at
+ the beginning of every test case line of code.
+ </p>
+ <p>The <em>config</em> macro, is used to
+ retrieve information from the <c>Config</c> variable sent to all
+ test cases. It is used with two arguments, where the first is the
+ name of the configuration variable you wish to retrieve, and the
+ second is the <c>Config</c> variable supplied to the test case
+ from the test server.
+ </p>
+ <p>Possible configuration variables include:</p>
+ <list type="bulleted">
+ <item><c>data_dir</c> - Data file directory.</item>
+ <item><c>priv_dir</c> - Scratch file directory.</item>
+ <item><c>nodes</c> - Nodes specified in the spec file</item>
+ <item><c>nodenames</c> - Generated nodenames.</item>
+ <item>Whatever added by conf test cases or
+ <c>init_per_testcase/2</c></item>
+ </list>
+ <p>Examples of the <c>line</c> and <c>config</c> macros can be
+ seen in the Examples chapter in the user's guide.
+ </p>
+ <p>If the <c>line_trace</c> macro is defined, you will get a
+ timestamp (<c>erlang:now()</c>) in your minor log for each
+ <c>line</c> macro in your suite. This way you can at any time see
+ which line is currently being executed, and when the line was
+ called.
+ </p>
+ <p>The <c>line_trace</c> macro can also be used together with the
+ <c>test_server_line</c> parse transform described above. A
+ timestamp will then be written for each line in the suite, except
+ for functions stated in the <c>-no_lines</c> attribute.
+ </p>
+ <p>The <c>line_trace</c> macro can e.g. be defined as a compile
+ option, like this:
+ <br></br>
+<c>erlc -W -Dline_trace my_SUITE.erl</c></p>
+ </section>
+</erlref>
+
diff --git a/lib/test_server/doc/src/test_server_app.xml b/lib/test_server/doc/src/test_server_app.xml
new file mode 100644
index 0000000000..924cdc886b
--- /dev/null
+++ b/lib/test_server/doc/src/test_server_app.xml
@@ -0,0 +1,75 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE appref SYSTEM "appref.dtd">
+
+<appref>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Server Application</title>
+ <prepared>Siri Hansen</prepared>
+ <responsible>Peter Andersson</responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date>2002-07-12</date>
+ <rev>PA1</rev>
+ <file>test_server_app.xml</file>
+ </header>
+ <app>test_server</app>
+ <appsummary>Test Server for manual or automatic testing of Erlang code</appsummary>
+ <description>
+ <p><em>Test Server</em> is a portable test server for
+ automated application testing. The server can run test suites
+ on local or remote targets and log progress and results to HTML
+ pages. The main purpose of Test Server is to act as engine
+ inside customized test tools. A callback interface for
+ such framework applications is provided.</p>
+ <p>In brief the test server supports:</p>
+ <list type="bulleted">
+ <item>Running multiple, concurrent test suites</item>
+ <item>Running tests on remote and even diskless targets</item>
+ <item>Test suites may contain other test suites, in a tree fashion</item>
+ <item>Logging of the events in a test suite, on both suite and case levels</item>
+ <item>HTML presentation of test suite results</item>
+ <item>HTML presentation of test suite code</item>
+ <item>Support for test suite authors, e.g. start/stop slave nodes</item>
+ <item>Call trace on target and slave nodes</item>
+ </list>
+ <p>For information about how to write test cases and test suites,
+ please see the Test Server User's Guide and the reference
+ manual for the <c>test_server</c> module.
+ </p>
+ <p><em>Common Test</em> is an existing test tool application based on the
+ OTP Test Server. Please read the Common Test User's Guide for more information.
+ </p>
+ </description>
+
+ <section>
+ <title>Configuration</title>
+ <p>There are currently no configuration parameters available for
+ this application.
+ </p>
+ </section>
+
+ <section>
+ <title>SEE ALSO</title>
+ <p></p>
+ </section>
+</appref>
+
diff --git a/lib/test_server/doc/src/test_server_ctrl.xml b/lib/test_server/doc/src/test_server_ctrl.xml
new file mode 100644
index 0000000000..3d95813c14
--- /dev/null
+++ b/lib/test_server/doc/src/test_server_ctrl.xml
@@ -0,0 +1,771 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2007</year>
+ <year>2008</year>
+ <holder>Ericsson AB, All Rights Reserved</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson AB.
+ </legalnotice>
+
+ <title>The Test Server Controller</title>
+ <prepared>Siri Hansen</prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
+ <file>test_server_ctrl_ref.sgml</file>
+ </header>
+ <module>test_server_ctrl</module>
+ <modulesummary>This module provides a low level interface to the Test Server.</modulesummary>
+ <description>
+ <p>The <c>test_server_ctrl</c> module provides a low level
+ interface to the Test Server. This interface is normally
+ not used directly by the tester, but through a framework built
+ on top of <c>test_server_ctrl</c>.
+ </p>
+ <p>Common Test is such a framework, well suited for automated
+ black box testing of target systems of any kind (not necessarily
+ implemented in Erlang). Common Test is also a very useful tool for
+ white box testing Erlang programs and OTP applications.
+ Please see the Common Test User's Guide and reference manual for
+ more information.
+ </p>
+ <p>If you want to write your own framework, some more information
+ can be found in the chapter "Writing your own test server
+ framework" in the Test Server User's Guide. Details about the
+ interface provided by <c>test_server_ctrl</c> follows below.
+ </p>
+ </description>
+ <funcs>
+ <func>
+ <name>start() -> Result</name>
+ <name>start(ParameterFile) -> Result</name>
+ <fsummary>Starts the test server.</fsummary>
+ <type>
+ <v>Result = ok | {error, {already_started, pid()}</v>
+ <v>ParameterFile = atom() | string()</v>
+ </type>
+ <desc>
+ <p>This function starts the test server. If the parameter file
+ is given, it indicates that the target is remote. In that case
+ the target node is started and a socket connection is
+ established between the controller and the target node.
+ </p>
+ <p>The parameter file is a text file containing key-value
+ tuples. Each tuple must be followed by a dot-newline
+ sequence. The following key-value tuples are allowed:
+ </p>
+ <taglist>
+ <tag><c>{type,PlatformType}</c></tag>
+ <item>This is an atom indicating the target platform type,
+ currently supported: <c>PlatformType = vxworks</c> <br></br>
+Mandatory
+ </item>
+ <tag><c>{target,TargetHost}</c></tag>
+ <item>This is the name of the target host, can be atom or
+ string.
+ <br></br>
+Mandatory
+ </item>
+ <tag><c>{slavetargets,SlaveTargets}</c></tag>
+ <item>This is a list of available hosts where slave nodes
+ can be started. The hostnames are given as atoms or strings.
+ <br></br>
+Optional, default <c>SlaveTargets = []</c></item>
+ <tag><c>{longnames,Bool}</c></tag>
+ <item>This indicates if longnames shall be used, i.e. if the
+ <c>-name</c> option should be used for the target node
+ instead of <c>-sname</c> <br></br>
+Optional, default <c>Bool = false</c></item>
+ <tag><c>{master, {MasterHost, MasterCookie}}</c></tag>
+ <item>If target is remote and the target node is started as
+ a slave node, this option indicates which master and
+ cookie to use. The given master
+ will also be used as master for slave nodes started with
+ <c>test_server:start_node/3</c>. It is expected that the
+ <c>erl_boot_server</c> is started on the master node before
+ the <c>test_server_ctrl:start/1</c> function is called.
+ <br></br>
+Optional, if not given the test server controller node
+ is used as master and the <c>erl_boot_server</c> is
+ automatically started.</item>
+ </taglist>
+ </desc>
+ </func>
+ <func>
+ <name>stop() -> ok</name>
+ <fsummary>Stops the test server immediately.</fsummary>
+ <desc>
+ <p>This stops the test server (both controller and target) and
+ all its activity. The running test suite (if any) will be
+ halted.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_dir(Name, Dir) -> ok</name>
+ <name>add_dir(Name, Dir, Pattern) -> ok</name>
+ <name>add_dir(Name, [Dir|Dirs]) -> ok</name>
+ <name>add_dir(Name, [Dir|Dirs], Pattern) -> ok</name>
+ <fsummary>Add a directory to the job queue.</fsummary>
+ <type>
+ <v>Name = term()</v>
+ <d>The jobname for this directory.</d>
+ <v>Dir = term()</v>
+ <d>The directory to scan for test suites.</d>
+ <v>Dirs = [term()]</v>
+ <d>List of directories to scan for test suites.</d>
+ <v>Pattern = term()</v>
+ <d>Suite match pattern. Directories will be scanned for Pattern_SUITE.erl files.</d>
+ </type>
+ <desc>
+ <p>Puts a collection of suites matching (*_SUITE) in given
+ directories into the job queue. <c>Name</c> is an arbitrary
+ name for the job, it can be any erlang term. If <c>Pattern</c>
+ is given, only modules matching <c>Pattern*</c> will be added.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_module(Mod) -> ok</name>
+ <name>add_module(Name, [Mod|Mods]) -> ok</name>
+ <fsummary>Add a module to the job queue with or without a given name.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <v>Mods = [atom()]</v>
+ <d>The name(s) of the module(s) to add.</d>
+ <v>Name = term()</v>
+ <d>Name for the job.</d>
+ </type>
+ <desc>
+ <p>This function adds a module or a list of modules, to the
+ test servers job queue. <c>Name</c> may be any Erlang
+ term. When <c>Name</c> is not given, the job gets the name of
+ the module.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_case(Mod, Case) -> ok</name>
+ <fsummary>Adds one test case to the job queue.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <d>Name of the module the test case is in.</d>
+ <v>Case = atom() </v>
+ <d>Function name of the test case to add.</d>
+ </type>
+ <desc>
+ <p>This function will add one test case to the job queue. The
+ job will be given the module's name.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_case(Name, Mod, Case) -> ok</name>
+ <fsummary>Equivalent to add_case/2, but with specified name.</fsummary>
+ <type>
+ <v>Name = string()</v>
+ <d>Name to use for the test job.</d>
+ </type>
+ <desc>
+ <p>Equivalent to <c>add_case/2</c>, but the test job will get
+ the specified name.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_cases(Mod, Cases) -> ok</name>
+ <fsummary>Adds a list of test cases to the job queue.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <d>Name of the module the test case is in.</d>
+ <v>Cases = [Case] </v>
+ <v>Case = atom() </v>
+ <d>Function names of the test cases to add.</d>
+ </type>
+ <desc>
+ <p>This function will add one or more test cases to the job
+ queue. The job will be given the module's name.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_cases(Name, Mod, Cases) -> ok</name>
+ <fsummary>Equivalent to add_cases/2, but with specified name.</fsummary>
+ <type>
+ <v>Name = string()</v>
+ <d>Name to use for the test job.</d>
+ </type>
+ <desc>
+ <p>Equivalent to <c>add_cases/2</c>, but the test job will get
+ the specified name.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_spec(TestSpecFile) -> ok | {error, nofile}</name>
+ <fsummary>Adds a test specification file to the job queue.</fsummary>
+ <type>
+ <v>TestSpecFile = string()</v>
+ <d>Name of the test specification file</d>
+ </type>
+ <desc>
+ <p>This function will add the content of the given test
+ specification file to the job queue. The job will be given the
+ name of the test specification file, e.g. if the file is
+ called <c>test.spec</c>, the job will be called <c>test</c>.
+ </p>
+ <p>See the reference manual for the test server application
+ for details about the test specification file.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_dir_with_skip(Name, [Dir|Dirs], Skip) -> ok</name>
+ <name>add_dir_with_skip(Name, [Dir|Dirs], Pattern, Skip) -> ok</name>
+ <name>add_module_with_skip(Mod, Skip) -> ok</name>
+ <name>add_module_with_skip(Name, [Mod|Mods], Skip) -> ok</name>
+ <name>add_case_with_skip(Mod, Case, Skip) -> ok</name>
+ <name>add_case_with_skip(Name, Mod, Case, Skip) -> ok</name>
+ <name>add_cases_with_skip(Mod, Cases, Skip) -> ok</name>
+ <name>add_cases_with_skip(Name, Mod, Cases, Skip) -> ok</name>
+ <fsummary>Same purpose as functions listed above, but with extra Skip argument.</fsummary>
+ <type>
+ <v>Skip = [SkipItem]</v>
+ <d>List of items to be skipped from the test.</d>
+ <v>SkipItem = {Mod,Comment} | {Mod,Case,Comment} | {Mod,Cases,Comment}</v>
+ <v>Mod = atom()</v>
+ <d>Test suite name.</d>
+ <v>Comment = string()</v>
+ <d>Reason why suite or case is being skipped.</d>
+ <v>Cases = [Case]</v>
+ <v>Case = atom()</v>
+ <d>Name of test case function.</d>
+ </type>
+ <desc>
+ <p>These functions add test jobs just like the add_dir, add_module,
+ add_case and add_cases functions above, but carry an additional
+ argument, Skip. Skip is a list of items that should be skipped
+ in the current test run. Test job items that occur in the Skip
+ list will be logged as SKIPPED with the associated Comment.</p>
+ </desc>
+ </func>
+ <func>
+ <name>add_tests_with_skip(Name, Tests, Skip) -> ok</name>
+ <fsummary>Adds different types of jobs to the run queue.</fsummary>
+ <type>
+ <v>Name = term()</v>
+ <d>The jobname for this directory.</d>
+ <v>Tests = [TestItem]</v>
+ <d>List of jobs to add to the run queue.</d>
+ <v>TestItem = {Dir,all,all} | {Dir,Mods,all} | {Dir,Mod,Cases}</v>
+ <v>Dir = term()</v>
+ <d>The directory to scan for test suites.</d>
+ <v>Mods = [Mod]</v>
+ <v>Mod = atom()</v>
+ <d>Test suite name.</d>
+ <v>Cases = [Case]</v>
+ <v>Case = atom()</v>
+ <d>Name of test case function.</d>
+ <v>Skip = [SkipItem]</v>
+ <d>List of items to be skipped from the test.</d>
+ <v>SkipItem = {Mod,Comment} | {Mod,Case,Comment} | {Mod,Cases,Comment}</v>
+ <v>Comment = string()</v>
+ <d>Reason why suite or case is being skipped.</d>
+ </type>
+ <desc>
+ <p>This function adds various test jobs to the test_server_ctrl
+ job queue. These jobs can be of different type (all or specific suites
+ in one directory, all or specific cases in one suite, etc). It is also
+ possible to get particular items skipped by passing them along in the
+ Skip list (see the add_*_with_skip functions above).</p>
+ </desc>
+ </func>
+ <func>
+ <name>abort_current_testcase(Reason) -> ok | {error,no_testcase_running}</name>
+ <fsummary>Aborts the test case currently executing.</fsummary>
+ <type>
+ <v>Reason = term()</v>
+ <d>The reason for stopping the test case, which will be printed in the log.</d>
+ </type>
+ <desc>
+ <p>When calling this function, the currently executing test case will be aborted.
+ It is the user's responsibility to know for sure which test case is currently
+ executing. The function is therefore only safe to call from a function which
+ has been called (or synchronously invoked) by the test case.</p>
+ </desc>
+ </func>
+ <func>
+ <name>set_levels(Console, Major, Minor) -> ok</name>
+ <fsummary>Sets the levels of I/O.</fsummary>
+ <type>
+ <v>Console = integer()</v>
+ <d>Level for I/O to be sent to console.</d>
+ <v>Major = integer()</v>
+ <d>Level for I/O to be sent to the major logfile.</d>
+ <v>Minor = integer()</v>
+ <d>Level for I/O to be sent to the minor logfile.</d>
+ </type>
+ <desc>
+ <p>Determines where I/O from test suites/test server will
+ go. All text output from test suites and the test server is
+ tagged with a priority value which ranges from 0 to 100, 100
+ being the most detailed. (see the section about log files in
+ the user's guide). Output from the test cases (using
+ <c>io:format/2</c>) has a detail level of 50. Depending on the
+ levels set by this function, this I/O may be sent to the
+ console, the major log file (for the whole test suite) or to
+ the minor logfile (separate for each test case).
+ </p>
+ <p>All output with detail level:</p>
+ <list type="bulleted">
+ <item>Less than or equal to <c>Console</c> is displayed on
+ the screen (default 1)
+ </item>
+ <item>Less than or equal to <c>Major</c> is logged in the
+ major log file (default 19)
+ </item>
+ <item>Greater than or equal to <c>Minor</c> is logged in the
+ minor log files (default 10)
+ </item>
+ </list>
+ <p>To view the currently set thresholds, use the
+ <c>get_levels/0</c> function.</p>
+ </desc>
+ </func>
+ <func>
+ <name>get_levels() -> {Console, Major, Minor}</name>
+ <fsummary>Returns the current levels.</fsummary>
+ <desc>
+ <p>Returns the current levels. See <c>set_levels/3</c> for
+ types.</p>
+ </desc>
+ </func>
+ <func>
+ <name>jobs() -> JobQueue</name>
+ <fsummary>Returns the job queue.</fsummary>
+ <type>
+ <v>JobQueue = [{list(), pid()}]</v>
+ </type>
+ <desc>
+ <p>This function will return all the jobs currently in the job
+ queue.</p>
+ </desc>
+ </func>
+ <func>
+ <name>multiply_timetraps(N) -> ok</name>
+ <fsummary>All timetraps started after this will be multiplied by N.</fsummary>
+ <type>
+ <v>N = integer() | infinity</v>
+ </type>
+ <desc>
+ <p>This function should be called before a test is started
+ which requires extended timetraps, e.g. if extensive tracing
+ is used. All timetraps started after this call will be
+ multiplied by <c>N</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>cover(Application,Analyse) -> ok</name>
+ <name>cover(CoverFile,Analyse) -> ok</name>
+ <name>cover(App,CoverFile,Analyse) -> ok</name>
+ <fsummary>Informs the test_server controller that next test shall run with code coverage analysis.</fsummary>
+ <type>
+ <v>Application = atom()</v>
+ <d>OTP application to cover compile</d>
+ <v>CoverFile = string()</v>
+ <d>Name of file listing modules to exclude from or include in cover compilation. The filename must include full path to the file.</d>
+ <v>Analyse = details | overview</v>
+ </type>
+ <desc>
+ <p>This function informs the test_server controller that next
+ test shall run with code coverage analysis. All timetraps will
+ automatically be multiplied by 10 when cover i run.
+ </p>
+ <p><c>Application</c> and <c>CoverFile</c> indicates what to
+ cover compile. If <c>Application</c> is given, the default is
+ that all modules in the <c>ebin</c> directory of the
+ application will be cover compiled. The <c>ebin</c> directory
+ is found by adding <c>ebin</c> to
+ <c>code:lib_dir(Application)</c>.
+ </p>
+ <p>A <c>CoverFile</c> can have the following entries:</p>
+ <code type="none">
+{exclude, all | ExcludeModuleList}.
+{include, IncludeModuleList}. </code>
+ <p>Note that each line must end with a full
+ stop. <c>ExcludeModuleList</c> and <c>IncludeModuleList</c>
+ are lists of atoms, where each atom is a module name.
+ </p>
+ <p>If both an <c>Application</c> and a <c>CoverFile</c> is
+ given, all modules in the application are cover compiled,
+ except for the modules listed in <c>ExcludeModuleList</c>. The
+ modules in <c>IncludeModuleList</c> are also cover compiled.
+ </p>
+ <p>If a <c>CoverFile</c> is given, but no <c>Application</c>,
+ only the modules in <c>IncludeModuleList</c> are cover
+ compiled.
+ </p>
+ <p><c>Analyse</c> indicates the detail level of the cover
+ analysis. If <c>Analyse = details</c>, each cover compiled
+ module will be analysed with
+ <c>cover:analyse_to_file/1</c>. If <c>Analyse = overview</c>
+ an overview of all cover compiled modules is created, listing
+ the number of covered and not covered lines for each module.
+ </p>
+ <p>If the test following this call starts any slave or peer
+ nodes with <c>test_server:start_node/3</c>, the same cover
+ compiled code will be loaded on all nodes. If the loading
+ fails, e.g. if the node runs an old version of OTP, the node
+ will simply not be a part of the coverage analysis. Note that
+ slave or peer nodes must be stopped with
+ <c>test_server:stop_node/1</c> for the node to be part of the
+ coverage analysis, else the test server will not be able to
+ fetch coverage data from the node.
+ </p>
+ <p>When the test is finished, the coverage analysis is
+ automatically completed, logs are created and the cover
+ compiled modules are unloaded. If another test is to be run
+ with coverage analysis, <c>test_server_ctrl:cover/2/3</c> must
+ be called again.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name>cross_cover_analyse(Level) -> ok</name>
+ <fsummary>Analyse cover data collected from all tests</fsummary>
+ <type>
+ <v>Level = details | overview</v>
+ </type>
+ <desc>
+ <p>Analyse cover data collected from all tests. The modules
+ analysed are the ones listed in the cross cover file
+ <c>cross.cover</c> in the current directory of the test
+ server.</p>
+ <p>The modules listed in the <c>cross.cover</c> file are
+ modules that are heavily used by other applications than the
+ one they belong to. This function should be run after all
+ tests are completed, and the result will be stored in a file
+ called cross_cover.html in the run.&lt;timestamp&gt;
+ directory of the application the modules belong to.
+ </p>
+ <p>The <c>cross.cover</c> file contains elements like this:</p>
+ <pre>
+{App,Modules}. </pre>
+ <p>where <c>App</c> can be an application name or the atom
+ <c>all</c>. The application (or all applications) will cover
+ compile the listed <c>Modules</c>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name>trc(TraceInfoFile) -> ok | {error, Reason}</name>
+ <fsummary>Starts call trace on target and slave nodes</fsummary>
+ <type>
+ <v>TraceInfoFile = atom() | string()</v>
+ <d>Name of a file defining which functions to trace and how</d>
+ </type>
+ <desc>
+ <p>This function starts call trace on target and on slave or
+ peer nodes that are started or will be started by the test
+ suites.
+ </p>
+ <p>Timetraps are not extended automatically when tracing is
+ used. Use <c>multiply_timetraps/1</c> if necessary.
+ </p>
+ <p>Note that the trace support in the test server is in a very
+ early stage of the implementation, and thus not yet as
+ powerful as one might wish for.
+ </p>
+ <p>The trace information file specified by the
+ <c>TraceInfoFile</c> argument is a text file containing one or
+ more of the following elements:
+ </p>
+ <list type="bulleted">
+ <item><c>{SetTP,Module,Pattern}.</c></item>
+ <item><c>{SetTP,Module,Function,Pattern}.</c></item>
+ <item><c>{SetTP,Module,Function,Arity,Pattern}.</c></item>
+ <item><c>ClearTP.</c></item>
+ <item><c>{ClearTP,Module}.</c></item>
+ <item><c>{ClearTP,Module,Function}.</c></item>
+ <item><c>{ClearTP,Module,Function,Arity}.</c></item>
+ </list>
+ <taglist>
+ <tag><c>SetTP = tp | tpl</c></tag>
+ <item>This is maps to the corresponding functions in the
+ <c>ttb</c> module in the <c>observer</c>
+ application. <c>tp</c> means set trace pattern on global
+ function calls. <c>tpl</c> means set trace pattern on local
+ and global function calls.
+ </item>
+ <tag><c>ClearTP = ctp | ctpl | ctpg</c></tag>
+ <item>This is maps to the corresponding functions in the
+ <c>ttb</c> module in the <c>observer</c>
+ application. <c>ctp</c> means clear trace pattern (i.e. turn
+ off) on global and local function calls. <c>ctpl</c> means
+ clear trace pattern on local function calls only and <c>ctpg</c>
+ means clear trace pattern on global function calls only.
+ </item>
+ <tag><c>Module = atom()</c></tag>
+ <item>The module to trace
+ </item>
+ <tag><c>Function = atom()</c></tag>
+ <item>The name of the function to trace
+ </item>
+ <tag><c>Arity = integer()</c></tag>
+ <item>The arity of the function to trace
+ </item>
+ <tag><c>Pattern = [] | match_spec()</c></tag>
+ <item>The trace pattern to set for the module or
+ function. For a description of the match_spec() syntax,
+ please turn to the User's guide for the runtime system
+ (erts). The chapter "Match Specification in Erlang" explains
+ the general match specification language.
+ </item>
+ </taglist>
+ <p>The trace result will be logged in a (binary) file called
+ <c>NodeName-test_server</c> in the current directory of the
+ test server controller node. The log must be formatted using
+ <c>ttb:format/1/2</c>.
+ </p>
+ <p>This is valid for all targets except the OSE/Delta target
+ for which all nodes will be logged and automatically formatted
+ in one single text file called <c>allnodes-test_server</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>stop_trace() -> ok | {error, not_tracing}</name>
+ <fsummary>Stops tracing on target and slave nodes.</fsummary>
+ <desc>
+ <p>This function stops tracing on target, and on slave or peer
+ nodes that are currently running. New slave or peer nodes will
+ no longer be traced after this.</p>
+ </desc>
+ </func>
+ </funcs>
+
+ <section>
+ <title>FUNCTIONS INVOKED FROM COMMAND LINE</title>
+ <p>The following functions are supposed to be invoked from the
+ command line using the <c>-s</c> option when starting the erlang
+ node.</p>
+ </section>
+ <funcs>
+ <func>
+ <name>run_test(CommandLine) -> ok</name>
+ <fsummary>Runs the tests specified on the command line.</fsummary>
+ <type>
+ <v>CommandLine = FlagList</v>
+ </type>
+ <desc>
+ <p>This function is supposed to be invoked from the
+ commandline. It starts the test server, interprets the
+ argument supplied from the commandline, runs the tests
+ specified and when all tests are done, stops the test server
+ and returns to the Erlang prompt.
+ </p>
+ <p>The <c>CommandLine</c> argument is a list of command line
+ flags, typically <c>['KEY1', Value1, 'KEY2', Value2, ...]</c>.
+ The valid command line flags are listed below.
+ </p>
+ <p>Under a UNIX command prompt, this function can be invoked like this:
+ <br></br>
+<c>erl -noshell -s test_server_ctrl run_test KEY1 Value1 KEY2 Value2 ... -s erlang halt</c></p>
+ <p>Or make an alias (this is for unix/tcsh) <br></br>
+<c>alias erl_test 'erl -noshell -s test_server_ctrl run_test \\!* -s erlang halt'</c></p>
+ <p>And then use it like this <br></br>
+<c>erl_test KEY1 Value1 KEY2 Value2 ...</c> <br></br>
+</p>
+ <p>The valid command line flags are</p>
+ <taglist>
+ <tag><c>DIR dir</c></tag>
+ <item>Adds all test modules in the directory <c>dir</c> to
+ the job queue.
+ </item>
+ <tag><c>MODULE mod</c></tag>
+ <item>Adds the module <c>mod</c> to the job queue.
+ </item>
+ <tag><c>CASE mod case</c></tag>
+ <item>Adds the case <c>case</c> in module <c>mod</c> to the
+ job queue.
+ </item>
+ <tag><c>SPEC spec</c></tag>
+ <item>Runs the test specification file <c>spec</c>.
+ </item>
+ <tag><c>SKIPMOD mod</c></tag>
+ <item>Skips all test cases in the module <c>mod</c></item>
+ <tag><c>SKIPCASE mod case</c></tag>
+ <item>Skips the test case <c>case</c> in module <c>mod</c>.
+ </item>
+ <tag><c>NAME name</c></tag>
+ <item>Names the test suite to something else than the
+ default name. This does not apply to <c>SPEC</c> which keeps
+ it's names.
+ </item>
+ <tag><c>PARAMETERS parameterfile</c></tag>
+ <item>Specifies the parameter file to use when starting
+ remote target
+ </item>
+ <tag><c>COVER app cover_file analyse</c></tag>
+ <item>Indicates that the test should be run with cover
+ analysis. <c>app</c>, <c>cover_file</c> and <c>analyse</c>
+ corresponds to the parameters to
+ <c>test_server_ctrl:cover/3</c>. If no cover file is used,
+ the atom <c>none</c> should be given.
+ </item>
+ <tag><c>TRACE traceinfofile</c></tag>
+ <item>Specifies a trace information file. When this option
+ is given, call tracing is started on the target node and all
+ slave or peer nodes that are started. The trace information
+ file specifies which modules and functions to trace. See the
+ function <c>trc/1</c> above for more information about the
+ syntax of this file.
+ </item>
+ </taglist>
+ </desc>
+ </func>
+ </funcs>
+
+ <section>
+ <title>FRAMEWORK CALLBACK FUNCTIONS</title>
+ <p>A test server framework can be defined by setting the
+ environment variable <c>TEST_SERVER_FRAMEWORK</c> to a module
+ name. This module will then be framework callback module, and it
+ must export the following function:</p>
+ </section>
+ <funcs>
+ <func>
+ <name>get_suite(Mod,Func) -> TestCaseList</name>
+ <fsummary>Get subcases.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <v>Func = atom()</v>
+ <v>TestCaseList = [,SubCase]</v>
+ </type>
+ <desc>
+ <p>This function is called before a test case is started. The
+ purpose is to retrieve a list of subcases. The default
+ behaviour of this function should be to call
+ <c>Mod:Func(suite)</c> and return the result from this call.</p>
+ </desc>
+ </func>
+ <func>
+ <name>init_tc(Mod,Func,Args) -> {ok,Args}</name>
+ <fsummary>Preparation for a test case.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <v>Func = atom()</v>
+ <v>Args = [tuple()]</v>
+ <d>Normally Args = [Config]</d>
+ </type>
+ <desc>
+ <p>This function is called when a test case is started. It is
+ called on the process executing the test case function
+ (<c>Mod:Func</c>). Typical use of this function can be to alter
+ the input parameters to the test case function (<c>Args</c>) or
+ to set properties for the executing process.</p>
+ </desc>
+ </func>
+ <func>
+ <name>end_tc(Mod,Func,Args) -> ok</name>
+ <fsummary>Cleanup after a test case.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <v>Func = atom()</v>
+ <v>Args = [tuple()]</v>
+ <d>Normally Args = [Config]</d>
+ </type>
+ <desc>
+ <p>This function is called when a test case is completed. It is
+ called on the process where the test case function
+ (<c>Mod:Func</c>) was executed. Typical use of this function can
+ be to clean up stuff done by <c>init_tc/3</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>report(What,Data) -> ok</name>
+ <fsummary>Progress report for test.</fsummary>
+ <type>
+ <v>What = atom()</v>
+ <v>Data = term()</v>
+ </type>
+ <desc>
+ <p>This function is called in order to keep the framework upto
+ date about the progress of the test. This is useful e.g. if the
+ framework implements a GUI where the progress information is
+ constantly updated. The following can be reported:
+ </p>
+ <p><c>What = tests_start, Data = {Name,NumCases}</c> <br></br>
+<c>What = tests_done, Data = {Ok,Failed,Skipped}</c> <br></br>
+<c>What = tc_start, Data = {Mod,Func}</c> <br></br>
+<c>What = tc_done, Data = {Mod,Func,Result}</c></p>
+ </desc>
+ </func>
+ <func>
+ <name>error_notification(Mod, Case, Args, Error) -> ok</name>
+ <fsummary>Inform framework of crashing testcase.</fsummary>
+ <type>
+ <v>Mod = atom()</v>
+ <d>Test suite name.</d>
+ <v>Case = atom()</v>
+ <d>Name of test case function.</d>
+ <v>Args = [tuple()]</v>
+ <d>Normally Args = [Config]</d>
+ <v>Error = {Reason,Location}</v>
+ <v>Reason = term()</v>
+ <d>Reason for termination.</d>
+ <v>Location = unknown | [{Mod,Case,Line}]</v>
+ <d>Last known position in Mod before termination.</d>
+ <v>Line = integer()</v>
+ <d>Line number in file Mod.erl.</d>
+ </type>
+ <desc>
+ <p>This function is called as the result of testcase Mod:Case failing
+ with Reason at Location. The function is intended mainly to aid
+ specific logging or error handling in the framework application. Note
+ that for Location to have relevant values (i.e. other than unknown),
+ the <c>line</c> macro or <c>test_server_line</c> parse transform must
+ be used. For details, please see the section about test suite line numbers
+ in the <c>test_server</c> reference manual page.</p>
+ </desc>
+ </func>
+ <func>
+ <name>warn(What) -> boolean()</name>
+ <fsummary>Ask framework if test server should issue a warning for What.</fsummary>
+ <type>
+ <v>What = processes | nodes</v>
+ </type>
+ <desc>
+ <p>The test server checks the number of processes and nodes
+ before and after the test is executed. This function is a
+ question to the framework if the test server should warn when
+ the number of processes or nodes has changed during the test
+ execution. If <c>true</c> is returned, a warning will be written
+ in the test case minor log file.</p>
+ </desc>
+ </func>
+ <func>
+ <name>target_info() -> InfoStr</name>
+ <fsummary>Print info about the target system to the test case log.</fsummary>
+ <type>
+ <v>InfoStr = string() | ""</v>
+ </type>
+ <desc>
+ <p>The test server will ask the framework for information about
+ the test target system and print InfoStr in the test case
+ log file below the host information.</p>
+ </desc>
+ </func>
+ </funcs>
+</erlref>
+
diff --git a/lib/test_server/doc/src/test_spec_chapter.xml b/lib/test_server/doc/src/test_spec_chapter.xml
new file mode 100644
index 0000000000..3a7730d61e
--- /dev/null
+++ b/lib/test_server/doc/src/test_spec_chapter.xml
@@ -0,0 +1,375 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Test Structure and Test Specifications</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>test_spec_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Test structure</title>
+ <p>A test consists of a set of test cases. Each test case is
+ implemented as an erlang function. An erlang module implementing
+ one or more test cases is called a test suite.
+ </p>
+ </section>
+
+ <section>
+ <title>Test specifications</title>
+ <p>A test specification is a specification of which test suites
+ and test cases to run and which to skip. A test specification can
+ also group several test cases into conf cases with init and
+ cleanup functions (see section about configuration cases
+ below). In a test there can be test specifications on three
+ different levels:
+ </p>
+ <p>The top level is a test specification file which roughly
+ specifies what to test for a whole application. The test
+ specification in such a file is encapsulated in a topcase
+ command.
+ </p>
+ <p>Then there is a test specification for each test suite,
+ specifying which test cases to run within the suite. The test
+ specification for a test suite is returned from the
+ <c>all(suite)</c> function in the test suite module.
+ </p>
+ <p>And finally there can be a test specification per test case,
+ specifying sub test cases to run. The test specification for a
+ test case is returned from the specification clause of the test
+ case.
+ </p>
+ <p>When a test starts, the total test specification is built in a
+ tree fashion, starting from the top level test specification.
+ </p>
+ <p>The following are the valid elements of a test
+ specification. The specification can be one of these elements or a
+ list with any combination of the elements:
+ </p>
+ <taglist>
+ <tag><c>{Mod, Case}</c></tag>
+ <item>This specifies the test case Mod:Case/1
+ </item>
+ <tag><c>{dir, Dir}</c></tag>
+ <item>This specifies all modules <c>*_SUITE</c> in the directory
+ <c>Dir</c></item>
+ <tag><c>{dir, Dir, Pattern}</c></tag>
+ <item>This specifies all modules <c>Pattern*</c> in the
+ directory <c>Dir</c></item>
+ <tag><c>{conf, Init, TestSpec, Fin}</c></tag>
+ <item>This is a configuration case. In a test specification
+ file, <c>Init</c> and <c>Fin</c> must be
+ <c>{Mod,Func}</c>. Inside a module they can also be just
+ <c>Func</c>. See the section named Configuration Cases below for
+ more information about this.
+ </item>
+ <tag><c>{conf, Properties, Init, TestSpec, Fin}</c></tag>
+ <item>This is a configuration case as explained above, but
+ which also takes a list of execution properties for its group
+ of test cases and nested sub-groups.
+ </item>
+ <tag><c>{make, Init, TestSpec, Fin}</c></tag>
+ <item>This is a special version of a conf case which is only
+ used by the test server framework <c>ts</c>. <c>Init</c> and
+ <c>Fin</c> are make and unmake functions for a data
+ directory. <c>TestSpec</c> is the test specification for the
+ test suite owning the data directory in question. If the make
+ function fails, all tests in the test suite are skipped. The
+ difference between this "make case" and a normal conf case is
+ that for the make case, <c>Init</c> and <c>Fin</c> are given with
+ arguments (<c>{Mod,Func,Args}</c>), and that they are executed
+ on the controller node (i.e. not on target).
+ </item>
+ <tag><c>Case</c></tag>
+ <item>This can only be used inside a module, i.e. not a test
+ specification file. It specifies the test case
+ <c>CurrentModule:Case</c>.
+ </item>
+ </taglist>
+ </section>
+
+ <section>
+ <title>Test Specification Files</title>
+ <p>A test specification file is a text file containing the top
+ level test specification (a topcase command), and possibly one or
+ more additional commands. A "command" in a test specification file
+ means a key-value tuple ended by a dot-newline sequence.
+ </p>
+ <p>The following commands are valid:
+ </p>
+ <taglist>
+ <tag><c>{topcase, TestSpec}</c></tag>
+ <item>This command is mandatory in all test specification
+ files. <c>TestSpec</c> is the top level test specification of a
+ test.
+ </item>
+ <tag><c>{skip, {Mod, Comment}}</c></tag>
+ <item>This specifies that all cases in the module <c>Mod</c>
+ shall be skipped. <c>Comment</c> is a string.
+ </item>
+ <tag><c>{skip, {Mod, Case, Comment}}</c></tag>
+ <item>This specifies that the case <c>Mod:Case</c> shall be
+ skipped.
+ </item>
+ <tag><c>{skip, {Mod, CaseList, Comment}}</c></tag>
+ <item>This specifies that all cases <c>Mod:Case</c>, where
+ <c>Case</c> is in <c>CaseList</c>, shall be skipped.
+ </item>
+ <tag><c>{nodes, Nodes}</c></tag>
+ <item><c>Nodes</c> is a list of nodenames available to the test
+ suite. It will be added to the <c>Config</c> argument to all
+ test cases. <c>Nodes</c> is a list of atoms.
+ </item>
+ <tag><c>{require_nodenames, Num}</c></tag>
+ <item>Specifies how many nodenames the test suite will
+ need. Theese will be automatically generated and inserted into the
+ <c>Config</c> argument to all test cases. <c>Num</c> is an
+ integer.
+ </item>
+ <tag><c>{hosts, Hosts}</c></tag>
+ <item>This is a list of available hosts on which to start slave
+ nodes. It is used when the <c>{remote, true}</c> option is given
+ to the <c>test_server:start_node/3</c> function. Also, if
+ <c>{require_nodenames, Num}</c> is contained in a test
+ specification file, the generated nodenames will be spread over
+ all hosts given in this <c>Hosts</c> list. The hostnames are
+ atoms or strings.
+ </item>
+ <tag><c>{diskless, true}</c></tag>
+ <item>Adds <c>{diskless, true}</c> to the <c>Config</c> argument
+ to all test cases. This is kept for backwards compatibility and
+ should not be used. Use a configuration case instead.
+ </item>
+ <tag><c>{ipv6_hosts, Hosts}</c></tag>
+ <item>Adds <c>{ipv6_hosts, Hosts}</c> to the <c>Config</c>
+ argument to all test cases.</item>
+ </taglist>
+ <p>All test specification files shall have the extension
+ ".spec". If special test specification files are needed for
+ Windows or VxWorks platforms, additional files with the
+ extension ".spec.win" and ".spec.vxworks" shall be
+ used. This is useful e.g. if some test cases shall be skipped on
+ these platforms.
+ </p>
+ <p>Some examples for test specification files can be found in the
+ Examples section of this user's guide.
+ </p>
+ </section>
+
+ <section>
+ <title>Configuration cases</title>
+ <p>If a group of test cases need the same initialization, a so called
+ <em>configuration</em> or <em>conf</em> case can be used. A conf
+ case consists of an initialization function, the group of test cases
+ needing this initialization and a cleanup or finalization function.
+ </p>
+ <p>If the init function in a conf case fails or returns
+ <c>{skip,Comment}</c>, the rest of the test cases in the conf case
+ (including the cleanup function) are skipped. If the init function
+ succeeds, the cleanup function will always be called, even if some
+ of the test cases in between failed.
+ </p>
+ <p>Both the init function and the cleanup function in a conf case
+ get the <c>Config</c> parameter as only argument. This parameter
+ can be modified or returned as is. Whatever is returned by the
+ init function is given as <c>Config</c> parameter to the rest of
+ the test cases in the conf case, including the cleanup function.
+ </p>
+ <p>If the <c>Config</c> parameter is changed by the init function,
+ it must be restored by the cleanup function. Whatever is returned
+ by the cleanup function will be given to the next test case called.
+ </p>
+ <p>The optional <c>Properties</c> list can be used to specify
+ execution properties for the test cases and possibly nested
+ sub-groups of the configuration case. The available properties are:</p>
+ <pre>
+ Properties = [parallel | sequence | Shuffle | {RepeatType,N}]
+ Shuffle = shuffle | {shuffle,Seed}
+ Seed = {integer(),integer(),integer()}
+ RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
+ repeat_until_any_ok | repeat_until_any_fail
+ N = integer() | forever</pre>
+
+ <p>If the <c>parallel</c> property is specified, Test Server will execute
+ all test cases in the group in parallel. If <c>sequence</c> is specified,
+ the cases will be executed in a sequence, meaning if one case fails, all
+ following cases will be skipped. If <c>shuffle</c> is specified, the cases
+ in the group will be executed in random order. The <c>repeat</c> property
+ orders Test Server to repeat execution of the cases in the group a given
+ number of times, or until any, or all, cases fail or succeed.</p>
+
+ <p>Properties may be combined so that e.g. if <c>shuffle</c>,
+ <c>repeat_until_any_fail</c> and <c>sequence</c> are all specified, the test
+ cases in the group will be executed repeatedly and in random order until
+ a test case fails, when execution is immediately stopped and the rest of
+ the cases skipped.</p>
+
+ <p>The properties for a conf case is always printed on the top of the HTML log
+ for the group's init function. Also, the total execution time for a conf case
+ can be found at the bottom of the log for the group's end function.</p>
+
+ <p>Configuration cases may be nested so that sets of grouped cases can be
+ configured with the same init- and end functions.</p>
+ </section>
+
+ <section>
+ <title>The parallel property and nested configuration cases</title>
+ <p>If a conf case has a parallel property, its test cases will be spawned
+ simultaneously and get executed in parallel. A test case is not allowed
+ to execute in parallel with the end function however, which means
+ that the time it takes to execute a set of parallel cases is equal to the
+ execution time of the slowest test case in the group. A negative side
+ effect of running test cases in parallel is that the HTML summary pages
+ are not updated with links to the individual test case logs until the
+ end function for the conf case has finished.</p>
+
+ <p>A conf case nested under a parallel conf case will start executing in
+ parallel with previous (parallel) test cases (no matter what properties the
+ nested conf case has). Since, however, test cases are never executed in
+ parallel with the init- or the end function of the same conf case, it's
+ only after a nested group of cases has finished that any remaining parallel
+ cases in the previous conf case get spawned.</p>
+ </section>
+
+ <section>
+ <title>Repeated execution of test cases</title>
+ <marker id="repeated_cases"></marker>
+ <p>A conf case may be repeated a certain number of times
+ (specified by an integer) or indefinitely (specified by <c>forever</c>).
+ The repetition may also be stopped prematurely if any or all cases
+ fail or succeed, i.e. if the property <c>repeat_until_any_fail</c>,
+ <c>repeat_until_any_ok</c>, <c>repeat_until_all_fail</c>, or
+ <c>repeat_until_all_ok</c> is used. If the basic <c>repeat</c>
+ property is used, status of test cases is irrelevant for the repeat
+ operation.</p>
+
+ <p>It is possible to return the status of a conf case (ok or
+ failed), to affect the execution of the conf case on the level above.
+ This is accomplished by, in the end function, looking up the value
+ of <c>tc_group_properties</c> in the <c>Config</c> list and checking the
+ result of the finished test cases. If status <c>failed</c> should be
+ returned from the conf case as a result, the end function should return
+ the value <c>{return_group_result,failed}</c>. The status of a nested conf
+ case is taken into account by Test Server when deciding if execution
+ should be repeated or not (unless the basic <c>repeat</c> property is used).</p>
+
+ <p>The <c>tc_group_properties</c> value is a list of status tuples,
+ each with the key <c>ok</c>, <c>skipped</c> and <c>failed</c>. The
+ value of a status tuple is a list containing names of test cases
+ that have been executed with the corresponding status as result.</p>
+
+ <p>Here's an example of how to return the status from a conf case:</p>
+ <pre>
+ conf_end_function(Config) ->
+ Status = ?config(tc_group_result, Config),
+ case proplists:get_value(failed, Status) of
+ [] -> % no failed cases
+ {return_group_result,ok};
+ _Failed -> % one or more failed
+ {return_group_result,failed}
+ end.</pre>
+
+ <p>It is also possible in the end function to check the status of
+ a nested conf case (maybe to determine what status the current conf case should
+ return). This is as simple as illustrated in the example above, only the
+ name of the end function of the nested conf case is stored in a tuple
+ <c>{group_result,EndFunc}</c>, which can be searched for in the status lists.
+ Example:</p>
+ <pre>
+ conf_end_function_X(Config) ->
+ Status = ?config(tc_group_result, Config),
+ Failed = proplists:get_value(failed, Status),
+ case lists:member({group_result,conf_end_function_Y}, Failed) of
+ true ->
+ {return_group_result,failed};
+ false ->
+ {return_group_result,ok}
+ end;
+ ...</pre>
+
+ <note><p>When a conf case is repeated, the init- and end functions
+ are also always called with each repetition.</p></note>
+ </section>
+
+ <section>
+ <title>Shuffled test case order</title>
+ <p>The order that test cases in a conf case are executed, is under normal
+ circumstances the same as the order defined in the test specification.
+ With the <c>shuffle</c> property set, however, Test Server will instead
+ execute the test cases in random order.</p>
+
+ <p>The user may provide a seed value (a tuple of three integers) with
+ the shuffle property: <c>{shuffle,Seed}</c>. This way, the same shuffling
+ order can be created every time the conf case is executed. If no seed value
+ is given, Test Server creates a "random" seed for the shuffling operation
+ (using the return value of <c>erlang:now()</c>). The seed value is always
+ printed to the log file of the init function so that it can be used to
+ recreate the same execution order in subsequent test runs.</p>
+
+ <note><p>If execution of a conf case with shuffled test cases is repeated,
+ the seed will not be reset in between turns.</p></note>
+
+ <p>If a nested conf case is specified in a conf case with a <c>shuffle</c>
+ property, the execution order of the nested cases in relation to the test cases
+ (and other conf cases) is also random. The order of the test cases in the nested
+ conf case is however not random (unless, of course, this one also has a
+ <c>shuffle</c> property).</p>
+ </section>
+
+ <section>
+ <title>Skipping test cases</title>
+ <p>It is possible to skip certain test cases, for example if you
+ know beforehand that a specific test case fails. This might be
+ functionality which isn't yet implemented, a bug that is known but
+ not yet fixed or some functionality which doesn't work or isn't
+ applicable on a specific platform.
+ </p>
+ <p>There are several different ways to state that a test case
+ should be skipped:</p>
+ <list type="bulleted">
+ <item>Using the <c>{skip,What}</c> command in a test
+ specification file
+ </item>
+ <item>Returning <c>{skip,Reason}</c> from the
+ <c>init_per_testcase/2</c> function
+ </item>
+ <item>Returning <c>{skip,Reason}</c> from the specification
+ clause of the test case
+ </item>
+ <item>Returning <c>{skip,Reason}</c> from the execution clause
+ of the test case
+ </item>
+ </list>
+ <p>The latter of course means that the execution clause is
+ actually called, so the author must make sure that the test case
+ is not run. For more information about the different clauses in a
+ test case, see the chapter about writing test cases.
+ </p>
+ <p>When a test case is skipped, it will be noted as <c>SKIPPED</c>
+ in the HTML log.
+ </p>
+ </section>
+</chapter>
+
diff --git a/lib/test_server/doc/src/ts.xml b/lib/test_server/doc/src/ts.xml
new file mode 100644
index 0000000000..0f91d3eea2
--- /dev/null
+++ b/lib/test_server/doc/src/ts.xml
@@ -0,0 +1,592 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2007</year>
+ <year>2008</year>
+ <holder>Ericsson AB, All Rights Reserved</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson AB.
+ </legalnotice>
+
+ <title>The OTP Test Server Framework</title>
+ <prepared>Mattias Nilsson</prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
+ <file>ts.xml</file>
+ </header>
+ <module>ts</module>
+ <modulesummary>Test Server Framework for testing OTP</modulesummary>
+ <description>
+ <p>This is a framework for testing OTP. The <c>ts</c> module
+ implements the interface to all the functionality in the
+ framework.
+ </p>
+ <p>The framework is built on top of the Test Server Controller,
+ <c>test_server_ctrl</c>, and provides a high level operator
+ interface. The main features added by the framework are:
+ </p>
+ <list type="bulleted">
+ <item>Automatic compilation of test suites and data directories
+ </item>
+ <item>Collection of files in central directories and creation of
+ additional HTML pages for better overview.
+ </item>
+ <item>Single command interface for running all available tests
+ </item>
+ <item>Spawns a new node with correct parameters before starting
+ the test server
+ </item>
+ <item>Atomatically creates the parameter file needed when
+ running tests on remote target
+ </item>
+ </list>
+ <p>More information about the Test Server Framework and how to run
+ test cases can be found in the Test Server User's Guide.
+ </p>
+ <p>For writing you own test server framework, please turn to the
+ reference manual for the Test Server Controller and chapter named
+ "Writing your own test server framework" in the Test Server User's
+ Guide.
+ </p>
+ <p>SETUP</p>
+ <p>To be able to run <c>ts</c>, you must first `install'
+ <c>ts</c> for the current environment. This is done by calling
+ <c>ts:install/0/1/2</c>. A file called `variables' is created
+ and used by <c>ts</c> when running test suites. It is not
+ recommended to edit this file, but it is possible to alter if
+ <c>ts</c> gets the wrong idea about your environment.
+ </p>
+ <p><c>ts:install/0</c> is used if the target platform is the
+ same as the controller host, i.e. if you run on "local target"
+ and no options are needed. Then running <c>ts:install/0</c><c>ts</c>
+ will run an autoconf script for your current
+ environment and set up the necessary variables needed by the
+ test suites.
+ </p>
+ <p><c>ts:install/1</c> or <c>ts:install/2</c> is used if the
+ target platform is different from the controller host, i.e. if
+ you run on "remote target" or if special options are required
+ for your system. VxWorks is currently supported
+ as remote target platform.
+ </p>
+ <p>See the reference manual for detailed information about
+ <c>ts:install/0/1/2</c>.
+ </p>
+ <p>Some of the common variables in the 'variables' file are
+ described below. Do not make any assumptions as of what is found
+ in this file, as it may change at any time.
+ </p>
+ <list>
+ <item><c>longnames</c><br></br>
+ Set to true if the system is using fully qualified
+ nodenames.
+ </item>
+ <item><c>platform_id</c><br></br>
+ This is the currently installed platform identification
+ string.
+ </item>
+ <item><c>platform_filename</c><br></br>
+ This is the name used to create the final save directory
+ for test runs.
+ </item>
+ <item><c>platform_label</c><br></br>
+ This is the string presented in the generated test
+ results index page.
+ </item>
+ <item><c>rsh_name</c><br></br>
+ This is the rsh program to use when starting slave or
+ peer nodes on a remote host.
+ </item>
+ <item><c>erl_flags</c><br></br>
+ Compile time flags used when compiling test suites.
+ </item>
+ <item><c>erl_release</c><br></br>
+ The Erlang/OTP release being tested.
+ </item>
+ <item><c>'EMULATOR'</c><br></br>
+ The emulator being tested (e.g. beam)
+ </item>
+ <item><c>'CPU'</c><br></br>
+ The CPU in the machine running the tests, e.g. sparc.
+ </item>
+ <item><c>target_host</c><br></br>
+ The target host name
+ </item>
+ <item><c>os</c><br></br>
+ The target operating system, e.g. solaris2.8
+ </item>
+ <item><c>target</c><br></br>
+ The current target platform, e.g. sparc-sun-solaris2.8
+ </item>
+ </list>
+ <p>RUNNING TESTS</p>
+ <p>After installing <c>ts</c>, you can run your test with the
+ <c>ts:run/0/1/2/3/4</c> functions. These functions, however,
+ require a special directory structure to be able to find your
+ test suites. Both the test server and all tests must be located
+ under your $TESTROOT directory. The test server implementation
+ shall be located in the directory <c>$TESTROOT/test_server</c>
+ and for each application there must be a directory named
+ <c><![CDATA[$TESTROOT/<application>_test]]></c> containing the .spec file
+ and all test suites and data directories for the
+ application. Note that there shall only be one .spec file for
+ each application.
+ </p>
+ <p><c>$TESTROOT/test_server</c> must be the current directory
+ when calling the <c>ts:run/*</c> function.
+ </p>
+ <p>All available tests can be found with <c>ts:tests()</c>. This
+ will list all applications for which a test specification file
+ <c><![CDATA[../<application>_test/<application>.spec]]></c> can be found.
+ </p>
+ <p>To run all these tests, use <c>ts:run()</c>.
+ </p>
+ <p>To run one or some of the tests, use <c>ts:run(Tests)</c>,
+ where <c>Tests</c> is the name of the application you want to
+ test, or a list of such names.
+ </p>
+ <p>To run one test suite within a test, use
+ <c>ts:run(Test,Suite)</c>.
+ </p>
+ <p>To run one test case within a suite, use
+ <c>ts:run(Test,Suite,Case)</c></p>
+ <p>To all these functions, you can also add a list of
+ options. Please turn to the reference manual for the <c>ts</c>
+ module to see the valid options to use.
+ </p>
+ <p>The function <c>ts:help()</c> displays some simple help for
+ the functions in <c>ts</c>. Use this for quick reference.
+ </p>
+ <p>LOG FILES</p>
+ <p>As the execution of the test suites go on, events are logged in
+ four different ways:
+ </p>
+ <list type="bulleted">
+ <item>Text to the operator's console.</item>
+ <item>Suite related information is sent to the major log file.</item>
+ <item>Case related information is sent to the minor log file.</item>
+ <item>The HTML log file gets updated with test results.</item>
+ </list>
+ <p>Typically the operator, who may run hundreds or thousands of
+ test cases, doesn't want to fill the screen with details
+ about/from the specific test cases. By default, the operator will
+ only see:
+ </p>
+ <list type="bulleted">
+ <item>A confirmation that the test has started.
+ </item>
+ <item>A small note about each failed test case.
+ </item>
+ <item>A summary of all the run test cases.
+ </item>
+ <item>A confirmation that the test run is complete
+ </item>
+ <item>Some special information like error reports and progress
+ reports, printouts written with erlang:display/1 or io:format/3
+ specifically addressed to somewhere other than
+ <c>standard_io</c>.</item>
+ </list>
+ <p>This is enough for the operator to know, and if he wants to dig
+ in deeper into a specific test case result, he can do so by
+ following the links in the HTML presentation to take a look in the
+ major or minor log files.
+ </p>
+ <p>A detailed report of the entire test suite is stored in the
+ major logfile, the exact reason for failure, time spent etc.
+ </p>
+ <p>The HTML log file is a summary of the major log file, but gives
+ a much better overview of the test run. It also has links to every
+ test case's log file for quick viewing with a HTML browser.
+ </p>
+ <p>The minor log file contain full details of every single test
+ case, each one in a separate file. This way the files should be
+ easy to compare with previous test runs, even if the set of test
+ cases change.
+ </p>
+ <p>Which information that goes where is user configurable via the
+ test server controller. Three threshold values determine what
+ comes out on screen, and in the major or minor log files. The
+ contents that goes to the HTML log file is fixed, and cannot be
+ altered.
+ </p>
+
+ </description>
+ <funcs>
+ <func>
+ <name>install() -> ok | {error, Reason}</name>
+ <name>install(TargetSystem) -> ok | {error, Reason}</name>
+ <name>install(Opts) -> ok | {error, Reason}</name>
+ <name>install(TargetSystem,Opts) -> ok | {error, Reason}</name>
+ <fsummary>Installs the Test Server Framework</fsummary>
+ <type>
+ <v>TargetSystem = {Architecture, TargetHost}</v>
+ <v>Architecture = atom() or string()</v>
+ <d>e.g. "ose" or "vxworks_ppc603"</d>
+ <v>TargetHost = atom() or string()</v>
+ <d>The name of the target host</d>
+ <v>Opts = list()</v>
+ </type>
+ <desc>
+ <p>Installs and configures the Test Server Framework for
+ running test suites. If a remote host is to be used, the
+ <c>TargetSystem</c> argument must be given so that "cross
+ installation" can be done. This should be used for testing on
+ VxWorks or OSE/Delta. Installation is required for any of the
+ functions in <c>ts</c> to work.
+ </p>
+ <p>Opts may be one or more of
+ </p>
+ <list>
+ <item><c>{longnames, Bool}</c><br></br>
+ Use fully qualified hostnames for test_server and
+ slave nodes. Bool is <c>true</c> or <c>false</c> (default).
+ </item>
+ <item><c>{verbose, Level}</c><br></br>
+ Verbosity level for test server output, set to 0, 1 or
+ 2, where 0 is quiet(default).
+ </item>
+ <item><c>{hosts, Hosts}</c><br></br>
+ This is a list of available hosts on which to start
+ slave nodes. It is used when the <c>{remote, true}</c>
+ option is given to the <c>test_server:start_node/3</c>
+ function. Also, if <c>{require_nodenames, Num}</c> is
+ contained in a test specification file, the generated
+ nodenames will be spread over all hosts given in this
+ <c>Hosts</c> list. The hostnames are given as atoms or
+ strings.
+ </item>
+ <item><c>{slavetargets, SlaveTarges}</c><br></br>
+ For VxWorks and OSE/Delta only. This is a list of
+ available hosts where slave nodes can be started. This is
+ necessary because only one node can run per host in the
+ VxWorks environment. This is not the same as
+ <c>{hosts, Hosts}</c> because it is used for all slave nodes
+ - not only the ones started with <c>{remote, true}</c>. The
+ hostnames are given as atoms or strings.
+ </item>
+ <item><c>{crossroot, TargetErlRoot}</c><br></br>
+ Erlang root directory on target host
+ <br></br>
+This option is mandatory for remote targets
+ </item>
+ <item><c>{master, {MasterHost, MasterCookie}}</c><br></br>
+ If target is remote and the target node is started as
+ a slave node, this option
+ indicates which master and cookie to use. The given master
+ will also be used as master for slave nodes started with
+ <c>test_server:start_node/3</c>. It is expected that the
+ <c>erl_boot_server</c> is started on the master node before
+ the test is run. If this option is not given, the test
+ server controller node is used as master and the
+ <c>erl_boot_server</c> is automatically started.
+ </item>
+ <item><c>{erl_start_args, ArgString}</c><br></br>
+ Additional arguments to be used when starting the test
+ server controller node. <c>ArgString</c> will be appended to
+ the command line when starting the erlang node. Note that
+ this will only affect the startup of the <em>controller node</em>,
+ i.e. not the target node or any slave nodes
+ startet from a test case.
+ </item>
+ <item><c>{ipv6_hosts, HostList}</c><br></br>
+ This option will be inserted in the
+ <c>Config</c> parameter for each test case. <c>HostList</c>
+ is a list of hosts supporting IPv6.
+ </item>
+ </list>
+ </desc>
+ </func>
+ <func>
+ <name>help() -> ok</name>
+ <fsummary>Presents simple help on the functions in <c>ts</c></fsummary>
+ <desc>
+ <p>Presents simple help on the functions in <c>ts</c>. Useful
+ for quick reference.</p>
+ </desc>
+ </func>
+ <func>
+ <name>tests() -> Tests</name>
+ <fsummary>Returns the list of available tests</fsummary>
+ <desc>
+ <p>Returns the list of available tests. This is actually just
+ a list of all test specification files found by looking up
+ "../*_test/*.spec".
+ </p>
+ <p>In each ../Name_test/ directory there should be one test
+ specification file named Name.spec.</p>
+ </desc>
+ </func>
+ <func>
+ <name>run() -> ok | {error, Reason}</name>
+ <name>run([all_tests|Opts])</name>
+ <name>run(Specs)</name>
+ <name>run(Specs, Opts)</name>
+ <name>run(Spec, Module)</name>
+ <name>run(Spec, Module, Opts)</name>
+ <name>run(Spec, Module, Case)</name>
+ <name>run(Spec, Module, Case, Opts)</name>
+ <fsummary>Runs (specified) test suite(s)</fsummary>
+ <type>
+ <v>Specs = Spec | [Spec]</v>
+ <v>Spec = atom()</v>
+ <v>Module = atom()</v>
+ <v>Case = atom()</v>
+ <v>Opts = [Opt]</v>
+ <v>Opt = batch | verbose | {verbose, Level} | {vars, Vars} | keep_topcase | cover | cover_details |{cover,CoverFile} | {cover_details,CoverFile} | {trace, TraceSpec}</v>
+ <v>Level = integer(); 0 means silent</v>
+ <v>Vars = list() of key-value tuples</v>
+ <v>CoverFile = string(); name of file listing modules to exclude from or include in cover compilation. The name must include full path to the file.</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p>This function runs test suite(s)/case(s). To be able to run
+ any tests, ts:install must first be called to create the
+ <c>variables</c> file needed. To run a whole test specification,
+ only specify the name of the test specification, and all test
+ suite modules belonging to that test spec will be run. To run
+ a single module in a test specification, use the Module
+ argument to specify the name of the module to run and all test
+ cases in that module will be run, and to run a specified test
+ case, specify the name of the test case using the Case
+ argument. If called with no argument, all test specifications
+ available will be run. Use ts:tests/0 to see the available
+ test specifications.
+ </p>
+ <p>If the <c>batch</c> option is not given, a new xterm is
+ started (unix) when <c>ts:run</c> is called.
+ </p>
+ <p>The <c>verbose</c> option sets the verbosity level for test
+ server output. This has the same effect as if given to
+ <c>ts:install/1/2</c></p>
+ <p>The <c>vars</c> option can be used for adding configuration
+ variables that are not in the <c>variables</c> file generated
+ during installation. Can be any of the <c>Opts</c> valid for
+ <c>ts:install/1/2</c>.
+ </p>
+ <p>The <c>keep_topcase</c> option forces <c>ts</c> to keep the
+ topcase in your test specification file as is. This option can
+ only be used if you don't give the <c>Module</c> or
+ <c>Case</c> parameters to <c>ts:run</c>. The
+ <c>keep_topcase</c> option is necessary if your topcase
+ contains anything other than <c><![CDATA[{dir,"../<Name>_test"}]]></c>. If
+ the option is not used, <c>ts</c> will modify your topcase.
+ </p>
+ <p>The <c>cover</c> and <c>cover_details</c> options indicates
+ that the test shall be run with code coverage
+ analysis. <c>cover_details</c> means that analysis shall be
+ done on the most detailed level. If the test is run with a
+ remote target, this option creates a list of uncovered lines
+ in each cover compiled module. If the test is run with a local
+ target, each cover compiled module will be analysed with
+ <c>cover:analyse_to_file/1</c>. The <c>cover</c> options will
+ only create an overview of all cover compiled modules with the
+ number of covered and not covered lines.
+ </p>
+ <p>The <c>CoverFile</c> which can be given with the
+ <c>cover</c> and <c>cover_details</c> options must be the
+ filename of a file listing modules to be excluded from or
+ included in the cover compilation. By default, <c>ts</c>
+ believes that <c>Spec</c> is the name of an OTP application
+ and that all modules in this application shall be cover
+ compiled. The <c>CoverFile</c> can exclude modules that belong
+ to the application and add modules that don't belong to the
+ application. The file can have the following entries:</p>
+ <code type="none">
+{exclude, all | ExcludeModuleList}.
+{include, IncludeModuleList}. </code>
+ <p>Note that each line must end with a full
+ stop. <c>ExcludeModuleList</c> and <c>IncludeModuleList</c>
+ are lists of atoms, where each atom is a module name.
+ </p>
+ <p>If the <c>cover</c> or <c>cover_details</c> options are
+ given on their own, the directory <c><![CDATA[../<Spec>_test]]></c> is
+ searched for a <c>CoverFile</c> named <c><![CDATA[<Spec>.cover]]></c>. If
+ this file is not found, <c>Spec</c> is assumed to be the name
+ of an OTP application, and all modules in the <c>ebin</c>
+ directory for the application are cover compiled. The
+ <c>ebin</c> directory is found by adding <c>ebin</c> to
+ <c>code:lib_dir(Spec)</c>.
+ </p>
+ <p>The same cover compiled code will be loaded on all slave or
+ peer nodes started with <c>test_server:start_node/3</c>. The
+ exception is nodes that run an old version of OTP. If the loading
+ fails, the node will simply not be a part of the coverage
+ analysis. Note that slave and peer nodes must be stopped with
+ <c>test_server:stop_node/1</c> for the node to be part of the
+ coverage analysis, else the test server will not be able to
+ fetch coverage data from the node.
+ </p>
+ <p>The <c>trace</c> option is used to turn on call trace on
+ target and on slave or peer nodes started with
+ <c>test_server:start_node/3</c>. <c>TraceSpec</c> can be the
+ name of a trace information file, or a list of elements like
+ the ones in a trace information file. Please turn to the
+ reference manual for <c>test_server_ctrl:trc/1</c> for details
+ about the trace information file.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name>cross_cover_analyse(Level) -> ok</name>
+ <name>cross_cover_analyse([Level]) -> ok</name>
+ <fsummary>Analyse cover data collected from all tests</fsummary>
+ <desc>
+ <p>Analyse cover data collected from all tests.
+ </p>
+ <p>See test_server_ctrl:cross_cover_analyse/1
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name>r() -> ok</name>
+ <name>r(Opts) -> ok</name>
+ <name>r(SpecOrSuite) -> ok</name>
+ <name>r(SpecOrSuite,Opts) -> ok</name>
+ <name>r(Suite,Case) -> ok</name>
+ <name>r(Suite,Case,Opts) -> ok</name>
+ <fsummary>Run test suite or test case without <c>ts</c>installed</fsummary>
+ <type>
+ <v>SpecOrSuite = Spec | Suite</v>
+ <v>Spec = string()</v>
+ <d>"Name.spec" or "Name.spec.OsType", where OsType is vxworks</d>
+ <v>Suite = atom()</v>
+ <v>Case = atom()</v>
+ <v>Opts = [Opt]</v>
+ <v>Opt = {Cover,AppOrCoverFile} | {Cover,Application,CoverFile}</v>
+ <v>Cover = cover | cover_details</v>
+ <v>AppOrCoverFile = Application | CoverFile</v>
+ <v>Application = atom()</v>
+ <d>OTP application to cover compile</d>
+ <v>CoverFile = string()</v>
+ <d>Name of file listing modules to exclude from or include in cover compilation</d>
+ </type>
+ <desc>
+ <p>This function can be used to run a test suites or test
+ cases directly, without any of the additional features added
+ by the test server framework. It is simply a wrapper function
+ for the <c>add_dir</c>, <c>add_spec</c>, <c>add_module</c> and
+ <c>add_case</c> functions in <c>test_server_ctrl</c>:
+ </p>
+ <p><c>r() -> add_dir(".")</c> <br></br>
+<c>r(Spec) -> add_spec(Spec)</c> <br></br>
+<c>r(Suite) -> add_module(Suite)</c> <br></br>
+<c>r(Suite,Case) -> add_case(Suite,Case)</c></p>
+ <p>To use this function, it is required that the test suite is
+ compiled and in the code path of the node where the function
+ is called. The function can be used without having <c>ts</c>
+ installed.
+ </p>
+ <p>For information about the <c>cover</c> and
+ <c>cover_details</c> options, see <c>test_server_ctrl:cover/2/3</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>index() -> ok | {error, Reason}</name>
+ <fsummary>Updates local index page</fsummary>
+ <type>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p>This function updates the local index page. This can be
+ useful if a previous test run was not completed and the index
+ is incomplete.</p>
+ </desc>
+ </func>
+ <func>
+ <name>clean() -> ok</name>
+ <name>clean(all) -> ok</name>
+ <fsummary>Cleans up the log directories created when running tests. </fsummary>
+ <desc>
+ <p>This function cleans up log directories created when
+ running test cases. <c>clean/0</c> cleans up all but the last
+ run of each application. <c>clean/1</c> cleans up all test
+ runs found.</p>
+ </desc>
+ </func>
+ <func>
+ <name>estone() -> ok | {error, Reason}</name>
+ <name>estone(Opts) -> ok</name>
+ <fsummary>Runs the EStone test</fsummary>
+ <desc>
+ <p>This function runs the EStone test. It is a shortcut for
+ running the test suite <c>estone_SUITE</c> in the
+ <c>kernel</c> application.
+ </p>
+ <p><c>Opts</c> is the same as the <c>Opts</c> argument for the
+ <c>ts:run</c> functions.</p>
+ </desc>
+ </func>
+ </funcs>
+
+ <section>
+ <title>Makfile.src in Data Directory</title>
+ <p>If a data directory contains code which must be compiled before
+ the test suite is run, a makefile source called
+ <c>Makefile.src</c> can be placed in the data directory. This file
+ will be converted to a valid makefile by <c>ts:run/0/1/2/3/4</c>.
+ </p>
+ <p>The reason for generating the makefile is that you can use
+ variables from the <c>variables</c> file which was generated by
+ <c>ts:install/0/1/2</c>. All occurrences of <c>@Key@</c> in
+ <c>Makefile.src</c> is substituted by the <c>Value</c> from
+ <c>{Key,Value}</c> found in the <c>variables</c> file. Example:
+ </p>
+ <p>Cut from <c>variables</c>:</p>
+ <code type="none">
+ ...
+ {'EMULATOR',"beam"}.
+ {'CFLAGS',"-g -O2"}.
+ {'LD',"$(CC) $(CFLAGS)"}.
+ {'CC',"gcc"}.
+ ...
+ </code>
+ <p><c>Makefile.src</c> for compiling erlang code could look
+ something like this:</p>
+ <code type="none">
+ EFLAGS=+debug_info
+
+ all: ordsets1.@EMULATOR@
+
+ ordsets1.@EMULATOR@: ordsets1.erl
+ erlc $(EFLAGS) ordsets1.erl
+ </code>
+ <p><c>Makefile.src</c> for compiling c code could look
+ something like this:</p>
+ <code type="none">
+ CC = @CC@
+ LD = @LD@
+ CFLAGS = @CFLAGS@ -I@erl_include@ @DEFS@
+ CROSSLDFLAGS = @CROSSLDFLAGS@
+
+ PROGS = nfs_check@exe@
+
+ all: $(PROGS)
+
+ nfs_check@exe@: nfs_check@obj@
+ $(LD) $(CROSSLDFLAGS) -o nfs_check nfs_check@obj@ @LIBS@
+
+ nfs_check@obj@: nfs_check.c
+ $(CC) -c -o nfs_check@obj@ $(CFLAGS) nfs_check.c
+ </code>
+ </section>
+</erlref>
+
diff --git a/lib/test_server/doc/src/why_test_chapter.xml b/lib/test_server/doc/src/why_test_chapter.xml
new file mode 100644
index 0000000000..745d4218f1
--- /dev/null
+++ b/lib/test_server/doc/src/why_test_chapter.xml
@@ -0,0 +1,140 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Why Test</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ </header>
+
+ <section>
+ <title>Goals</title>
+ <p>It's not possible to prove that a program is correct by
+ testing. On the contrary, it has been formally proven that it is
+ impossible to prove programs in general by testing. Theoretical
+ program proofs or plain examination of code may be viable options
+ for those that wish to certify that a program is correct. The test
+ server, as it is based on testing, cannot be used for
+ certification. Its intended use is instead to (cost effectively)
+ <em>find bugs</em>. A successful test suite is one that reveals a
+ bug. If a test suite results in Ok, then we know very little that
+ we didn't know before.
+ </p>
+ </section>
+
+ <section>
+ <title>What to test?</title>
+ <p>There are many kinds of test suites. Some concentrate on
+ calling every function in the interface to some module or
+ server. Some other do the same, but uses all kinds of illegal
+ parameters, and verifies that the server stays alive and rejects
+ the requests with reasonable error codes. Some test suites
+ simulate an application (typically consisting of a few modules of
+ an application), some try to do tricky requests in general, some
+ test suites even test internal functions.
+ </p>
+ <p>Another interesting category of test suites are the ones that
+ check that fixed bugs don't reoccur. When a bugfix is introduced,
+ a test case that checks for that specific bug should be written
+ and submitted to the affected test suite(s).
+ </p>
+ <p>Aim for finding bugs. Write whatever test that has the highest
+ probability of finding a bug, now or in the future. Concentrate
+ more on the critical parts. Bugs in critical subsystems are a lot
+ more expensive than others.
+ </p>
+ <p>Aim for functionality testing rather than implementation
+ details. Implementation details change quite often, and the test
+ suites should be long lived. Often implementation details differ
+ on different platforms and versions. If implementation details
+ have to be tested, try to factor them out into separate test
+ cases. Later on these test cases may be rewritten, or just
+ skipped.
+ </p>
+ <p>Also, aim for testing everything once, no less, no more. It's
+ not effective having every test case fail just because one
+ function in the interface changed.
+ </p>
+ </section>
+
+ <section>
+ <title>How much to test</title>
+ <p>There is a unix shell script that counts the number of non
+ commented words (lines and characters too) of source code in each
+ application's test directory and divides with the number of such
+ source words in the src directory. This is a measure of how much
+ test code there is.
+ </p>
+ <p>There has been much debate over how much test code, compared to
+ production code, should be written in a project. More test code
+ finds more bugs, but test code needs to be maintained just like
+ the production code, and it's expensive to write it in the first
+ place. In several articles from relatively mature software
+ organizations that I have read, the amount of test code has been
+ about the same as the production code. </p>
+ <p>In OTP, at the time of
+ writing, few applications come even close to this, some have no
+ test code at all.
+ </p>
+
+ <section>
+ <title>Full coverage</title>
+ <p>It is possible to cover compile the modules being tested
+ before running the test suites. Doing so displays which branches
+ of the code that are tested by the test suite, and which are
+ not. Many use this as a measure of a good test suite. When every
+ single line of source code is covered once by the test suite,
+ the test suite is finished.
+ </p>
+ <p>A coverage of 100% still proves nothing, though. It doesn't
+ mean that the code is error free, that everything is tested. For
+ instance, if a function contains a division, it has to be
+ executed at least twice. Once with parameters that cause
+ division by zero, and once with other parameters.
+ </p>
+ <p>High degree of coverage is good of course, it means that no
+ major parts of the code has been left untested. It's another
+ question whether it's cost effective. You're only likely to find
+ 50% more bugs when going from 67% to 100% coverage, but the work
+ (cost) is maybe 200% as large, or more, because reaching all of
+ those obscure branches is usually complicated.
+ </p>
+ <p>Again, the reason for testing with the test server is to find
+ bugs, not to create certificates of valid code. Maximizing the
+ number of found bugs per hour probably means not going for 100%
+ coverage. For some module the optimum may be 70%, for some other
+ maybe 250%. 100% shouldn't be a goal in itself.</p>
+ </section>
+
+ <section>
+ <title>User interface testing</title>
+ <p>It is very difficult to do sensible testing of user
+ interfaces, especially the graphic ones. The test server has
+ some support for capturing the text I/O that goes to the user,
+ but none for graphics. There are several tools on the market
+ that help with this.</p>
+ </section>
+ </section>
+</chapter>
+
diff --git a/lib/test_server/doc/src/write_framework_chapter.xml b/lib/test_server/doc/src/write_framework_chapter.xml
new file mode 100644
index 0000000000..2fde67132e
--- /dev/null
+++ b/lib/test_server/doc/src/write_framework_chapter.xml
@@ -0,0 +1,166 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Write you own test server framework</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>write_framework_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Introduction</title>
+ <p>The test server controller can be interfaced from the operating
+ system or from within Erlang. The nature of your new framework
+ will decide which interface to use. If you want your framework to
+ start a new node for each test, the operating system interface is
+ very convenient. If your node is already started, going from
+ within Erlang might be a more flexible solution.
+ </p>
+ <p>The two methods are described below.
+ </p>
+ </section>
+
+ <section>
+ <title>Interfacing the test server controller from Erlang</title>
+ <p>Using the test server from Erlang means that you have to start
+ the test server and then add test jobs. Use
+ <c>test_server_ctrl:start/0</c> to start a local target or
+ <c>test_server_ctrl:start/1</c> to start a remote target. The test
+ server is stopped by <c>test_server_ctrl:stop/0</c>.
+ </p>
+ <p>The argument to <c>test_server_ctrl:start/1</c> is the name of a
+ parameter file. The parameter file specifies what type of target
+ to start and where to start it, as well as some additional
+ parameters needed for different target types. See the reference
+ manual for a detailed description of all valid parameters.
+ </p>
+
+ <section>
+ <title>Adding test jobs</title>
+ <p>There are many commands available for adding test cases to
+ the test server's job queue: <br></br>
+</p>
+ <list type="bulleted">
+ <item>Single test case <br></br>
+<c>test_server_ctrl:add_case/2/3</c></item>
+ <item>Multiple test cases from same suite <br></br>
+<c>test_server_ctrl:add_cases/2/3</c></item>
+ <item>Test suite module or modules <br></br>
+<c>test_server_ctrl:add_module/1/2</c></item>
+ <item>Some or all test suite modules in a directory <br></br>
+<c>test_server_ctrl:add_dir/2/3</c></item>
+ <item>Test cases specified in a test specification file <br></br>
+<c>test_server_ctrl:add_spec/1</c></item>
+ </list>
+ <p>All test suites are given a unique name, which is usually
+ given when the test suite is added to the job queue. In some
+ cases, a default name is used, as in the case when a module is
+ added without a specified name. The test job name is used to
+ store logfiles, which are stored in the `name.logs' directory
+ under the current directory.
+ </p>
+ <p>See the reference manual for details about the functions for
+ adding test jobs.
+ </p>
+ </section>
+ </section>
+
+ <section>
+ <title>Interfacing the test server controller from the operating system.</title>
+ <p>The function <c>run_test/1</c> is your interface in the test
+ server controller if you wish to use it from the operating
+ system. You simply start an erlang shell and invoke this function
+ with the <c>-s</c> option. <c>run_test/1</c> starts the test
+ server, runs the test specified by the command line and stops the
+ test server. The argument to <c>run_test/1</c> is a list of
+ command line flags, typically
+ <c>['KEY1', Value1, 'KEY2', Value2, ...]</c>.
+ The valid command line flags are listed in the reference manual
+ for <c>test_server_ctrl</c>.
+ </p>
+ <p>A typical command line may look like this <br></br>
+<c>erl -noshell -s test_server_ctrl run_test KEY1 Value1 KEY2 Value2 ... -s erlang halt</c></p>
+ <p>Or make an alias (this is for unix/tcsh) <br></br>
+<c>alias erl_test 'erl -noshell -s test_server_ctrl run_test \\!* -s erlang halt'</c></p>
+ <p>And then use it like this <br></br>
+<c>erl_test KEY1 Value1 KEY2 Value2 ...</c> <br></br>
+</p>
+
+ <section>
+ <title>An Example</title>
+ <p>An example of starting a test run from the command line <br></br>
+</p>
+ <p><c>erl -name test_srv -noshell -rsh /home/super/otp/bin/ctrsh </c> <br></br>
+<c>-pa /clearcase/otp/erts/lib/kernel/test </c> <br></br>
+<c>-boot start_sasl -sasl errlog_type error </c> <br></br>
+<c>-s test_server_ctrl run_test SPEC kernel.spec -s erlang halt</c> <br></br>
+</p>
+ </section>
+ </section>
+
+ <section>
+ <title>Framework callback functions</title>
+ <p>By defining the environment variable
+ <c>TEST_SERVER_FRAMEWORK</c> to a module name, the framework
+ callback functions can be used. The framework callback functions
+ are called by the test server in order let the framework interact
+ with the execution of the tests and to keep the framework upto
+ date with information about the test progress.
+ </p>
+ <p>The framework callback functions are described in the reference
+ manual for <c>test_server_ctrl</c>.
+ </p>
+ <p>Note that this topic is in an early stage of development, and
+ changes might occur.
+ </p>
+ </section>
+
+ <section>
+ <title>Other concerns</title>
+ <p>Some things to think about when writing you own test server
+ framework:
+ </p>
+ <list type="bulleted">
+ <item><c>emulator version</c> - Make sure that the intended
+ version of the emulator is started.
+ </item>
+ <item><c>operating system path</c> - If test cases use port
+ programs, make sure the paths are correct.
+ </item>
+ <item><c>recompilation</c> - Make sure all test suites are fresh
+ compiled.
+ </item>
+ <item><c>test_server.hrl</c> - Make sure the
+ <c>test_server.hrl</c> file is in the include path when
+ compiling test suites.
+ </item>
+ <item><c>running applications</c> - Some test suites require
+ some applications to be running (e.g. sasl). Make sure they are
+ started.
+ </item>
+ </list>
+ </section>
+</chapter>
+
diff --git a/lib/test_server/doc/src/write_test_chapter.xml b/lib/test_server/doc/src/write_test_chapter.xml
new file mode 100644
index 0000000000..12f0dfc361
--- /dev/null
+++ b/lib/test_server/doc/src/write_test_chapter.xml
@@ -0,0 +1,228 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2002</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Writing Test Suites</title>
+ <prepared>Siri Hansen</prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>write_test_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Support for test suite authors</title>
+ <p>The <c>test_server</c> module provides some useful functions
+ to support the test suite author. This includes:
+ </p>
+ <list type="bulleted">
+ <item>Starting and stopping slave or peer nodes</item>
+ <item>Capturing and checking stdout output</item>
+ <item>Retrieving and flushing process message queue</item>
+ <item>Watchdog timers</item>
+ <item>Checking that a function crashes</item>
+ <item>Checking that a function succeeds at least m out of n times</item>
+ <item>Checking .app files</item>
+ </list>
+ <p>Please turn to the reference manual for the <c>test_server</c>
+ module for details about these functions.
+ </p>
+ </section>
+
+ <section>
+ <title>Test suites</title>
+ <p>A test suite is an ordinary Erlang module that contains test
+ cases. It's recommended that the module has a name on the form
+ *_SUITE.erl. Otherwise, the directory function will not find the
+ modules (by default).
+ </p>
+ <p>For some of the test server support, the test server include
+ file <c>test_server.hrl</c> must be included. Never include it
+ with the full path, for portability reasons. Use the compiler
+ include directive instead.
+ </p>
+ <p>The special function <c>all(suite)</c> in each module is called
+ to get the test specification for that module. The function
+ typically returns a list of test cases in that module, but any
+ test specification could be returned. Please see the chapter
+ about test specifications for details about this.
+ </p>
+ </section>
+
+ <section>
+ <title>Init per test case</title>
+ <p>In each test suite module, the functions
+ <c>init_per_testcase/2</c> and <c>end_per_testcase/2</c> must be
+ implemented.
+ </p>
+ <p><c>init_per_testcase</c> is called before each test case in the
+ test suite, giving a (limited) possibility for initialization.
+ </p>
+ <p><c>end_per_testcase/2</c> is called after each test case is
+ completed, giving a possibility to clean up.
+ </p>
+ <p>The first argument to these functions is the name of the test
+ case. This can be used to do individual initialization and cleanup for
+ each test cases.
+ </p>
+ <p>The second argument is a list of tuples called
+ <c>Config</c>. The first element in a <c>Config</c> tuple
+ should be an atom - a key value to be used for searching.
+ <c>init_per_testcase/2</c> may modify the <c>Config</c>
+ parameter or just return it as is. Whatever is retuned by
+ <c>init_per_testcase/2</c> is given as <c>Config</c> parameter to
+ the test case itself.
+ </p>
+ <p>The return value of <c>end_per_testcase/2</c> is ignored by the
+ test server.
+ </p>
+ </section>
+
+ <section>
+ <title>Test cases</title>
+ <p>The smallest unit that the test server is concerned with is a
+ test case. Each test case can in turn test many things, for
+ example make several calls to the same interface function with
+ different parameters.
+ </p>
+ <p>It is possible to put many or few tests into each test
+ case. How many things each test case tests is up to the author,
+ but here are some things to keep in mind.
+ </p>
+ <p>Very small test cases often leads to more code, since
+ initialization has to be duplicated. Larger code, especially with
+ a lot of duplication, increases maintenance and reduces
+ readability.
+ </p>
+ <p>Larger test cases make it harder to tell what went wrong if it
+ fails, and force us to skip larger portions of test code if a
+ specific part fails. These effects are accentuated when running on
+ multiple platforms because test cases often have to be skipped.
+ </p>
+ <p>A test case generally consists of three parts, the
+ documentation part, the specification part and the execution
+ part. These are implemented as three clauses of the same function.
+ </p>
+ <p>The documentation clause matches the argument '<c>doc</c>' and
+ returns a list for strings describing what the test case tests.
+ </p>
+ <p>The specification clause matches the argument '<c>suite</c>'
+ and returns the test specification for this particular test
+ case. If the test specification is an empty list, this indicates
+ that the test case is a leaf test case, i.e. one to be executed.
+ </p>
+ <p><em>Note that the specification clause of a test case is executed on the test server controller host. This means that if target is remote, the specification clause is probably executed on a different platform than the one tested.</em></p>
+ <p>The execution clause implements the actual test case. It takes
+ one argument, <c>Config</c>, which contain configuration
+ information like <c>data_dir</c> and <c>priv_dir</c>. See <seealso marker="#data_priv_dir">Data and Private Directories</seealso> for
+ more information about these.
+ </p>
+ <p>The <c>Config</c> variable can also contain the
+ <c>nodenames</c> key, if requested by the <c>require_nodenames</c>
+ command in the test suite specification file. All <c>Config</c>
+ items should be extracted using the <c>?config</c> macro. This is
+ to ensure future compatibility if the <c>Config</c> format
+ changes. See the reference manual for <c>test_server</c> for
+ details about this macro.
+ </p>
+ <p>If the execution clause crashes or exits, it is considered a
+ failure. If it returns <c>{skip,Reason}</c>, the test case is
+ considered skipped. If it returns <c>{comment,String}</c>,
+ the string will be added in the 'Comment' field on the HTML
+ result page. If the execution clause returns anything else, it is
+ considered a success, unless it is <c>{'EXIT',Reason}</c> or
+ <c>{'EXIT',Pid,Reason}</c> which can't be distinguished from a
+ crash, and thus will be considered a failure.
+ </p>
+ </section>
+
+ <section>
+ <marker id="data_priv_dir"></marker>
+ <title>Data and Private Directories</title>
+ <p>The data directory (<c>data_dir</c>) is the directory where the test
+ module has its own files needed for the testing. A compiler test
+ case may have source files to feed into the compiler, a release
+ upgrade test case may have some old and new release of
+ something. A graphics test case may have some icons and a test
+ case doing a lot of math with bignums might store the correct
+ answers there. The name of the <c>data_dir</c> is the the name of
+ the test suite and then "_data". For example,
+ <c>"some_path/foo_SUITE.beam"</c> has the data directory
+ <c>"some_path/foo_SUITE_data/"</c>.
+ </p>
+ <p>The <c>priv_dir</c> is the test suite's private directory. This
+ directory should be used when a test case needs to write to
+ files. The name of the private directory is generated by the test
+ server, which also creates the directory.
+ </p>
+ <p><em>Warning:</em> Do not depend on current directory to be
+ writable, or to point to anything in particular. All scratch files
+ are to be written in the <c>priv_dir</c>, and all data files found
+ in <c>data_dir</c>. If the current directory has to be something
+ specific, it must be set with <c>file:set_cwd/1</c>.
+ </p>
+ </section>
+
+ <section>
+ <title>Execution environment</title>
+ <p>Each time a test case is about to be executed, a new process is
+ created with <c>spawn_link</c>. This is so that the test case will
+ have no dependencies to earlier tests, with respect to process flags,
+ process links, messages in the queue, other processes having registered
+ the process, etc. As little as possible is done to change the initial
+ context of the process (what is created by plain spawn). Here is a
+ list of differences:
+ </p>
+ <list type="bulleted">
+ <item>It has a link to the test server. If this link is removed,
+ the test server will not know when the test case is finished,
+ just wait infinitely.
+ </item>
+ <item>It often holds a few items in the process dictionary, all
+ with names starting with '<c>test_server_</c>'. This is to keep
+ track of if/where a test case fails.
+ </item>
+ <item>There is a top-level catch. All of the test case code is
+ catched, so that the location of a crash can be reported back to
+ the test server. If the test case process is killed by another
+ process (thus the catch code is never executed) the test server
+ is not able to tell where the test case was executing.
+ </item>
+ <item>It has a special group leader implemented by the test
+ server. This way the test server is able to capture the io that
+ the test case provokes. This is also used by some of the test
+ server support functions.
+ </item>
+ </list>
+ <p>There is no time limit for a test case, unless the test case
+ itself imposes such a limit, by calling
+ <c>test_server:timetrap/1</c> for example. The call can be made
+ in each test case, or in the <c>init_per_testcase/2</c>
+ function. Make sure to call the corresponding
+ <c>test_server:timetrap_cancel/1</c> function as well, e.g in the
+ <c>end_per_testcase/2</c> function, or else the test cases will
+ always fail.
+ </p>
+ </section>
+
+</chapter>
+
diff --git a/lib/test_server/ebin/.gitignore b/lib/test_server/ebin/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/test_server/ebin/.gitignore
diff --git a/lib/test_server/include/test_server.hrl b/lib/test_server/include/test_server.hrl
new file mode 100644
index 0000000000..4b96d84ace
--- /dev/null
+++ b/lib/test_server/include/test_server.hrl
@@ -0,0 +1,32 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-ifdef(line_trace).
+-line_trace(true).
+-define(line,
+ put(test_server_loc,{?MODULE,?LINE}),
+ io:format(lists:concat([?MODULE,",",integer_to_list(?LINE),": ~p"]),
+ [erlang:now()]),).
+-else.
+-define(line,put(test_server_loc,{?MODULE,?LINE}),).
+-endif.
+-define(t,test_server).
+-define(config,test_server:lookup_config).
+
+
diff --git a/lib/test_server/include/test_server_line.hrl b/lib/test_server/include/test_server_line.hrl
new file mode 100644
index 0000000000..60ef860883
--- /dev/null
+++ b/lib/test_server/include/test_server_line.hrl
@@ -0,0 +1,20 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-compile({parse_transform,test_server_line}).
+
diff --git a/lib/test_server/info b/lib/test_server/info
new file mode 100644
index 0000000000..79ccc202d7
--- /dev/null
+++ b/lib/test_server/info
@@ -0,0 +1,2 @@
+group: tools
+short: The OTP Test Server
diff --git a/lib/test_server/prebuild.skip b/lib/test_server/prebuild.skip
new file mode 100644
index 0000000000..8ee4101f6a
--- /dev/null
+++ b/lib/test_server/prebuild.skip
@@ -0,0 +1 @@
+src/autom4te.cache
diff --git a/lib/test_server/src/Makefile b/lib/test_server/src/Makefile
new file mode 100644
index 0000000000..2d7e5b28bc
--- /dev/null
+++ b/lib/test_server/src/Makefile
@@ -0,0 +1,145 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1996-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+
+include $(ERL_TOP)/make/target.mk
+
+# ----------------------------------------------------
+# Configuration info.
+# ----------------------------------------------------
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+VSN=$(TEST_SERVER_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/test_server-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+
+MODULES= test_server_ctrl \
+ test_server_node \
+ test_server \
+ test_server_sup \
+ test_server_line \
+ test_server_h \
+ erl2html2 \
+ vxworks_client
+
+TS_MODULES= \
+ ts \
+ ts_run \
+ ts_reports \
+ ts_lib \
+ ts_make \
+ ts_erl_config \
+ ts_autoconf_win32 \
+ ts_autoconf_vxworks \
+ ts_install
+
+TARGET_MODULES= $(MODULES:%=$(EBIN)/%)
+TS_TARGET_MODULES= $(TS_MODULES:%=$(EBIN)/%)
+
+ERL_FILES= $(MODULES:=.erl)
+TS_ERL_FILES= $(TS_MODULES:=.erl)
+HRL_FILES = ../include/test_server.hrl ../include/test_server_line.hrl
+INTERNAL_HRL_FILES = test_server_internal.hrl
+TS_HRL_FILES= ts.hrl
+C_FILES =
+AUTOCONF_FILES = configure.in conf_vars.in
+COVER_FILES = cross.cover
+PROGRAMS = configure config.sub config.guess install-sh
+CONFIG = ts.config ts.unix.config ts.win32.config ts.vxworks.config
+
+TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR)) \
+ $(APP_TARGET) $(APPUP_TARGET)
+TS_TARGET_FILES = $(TS_MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+TARGETS = $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(PROGRAMS) \
+ $(APP_TARGET) $(APPUP_TARGET)
+TS_TARGETS = $(TS_MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+APP_FILE= test_server.app
+APP_SRC= $(APP_FILE).src
+APP_TARGET= $(EBIN)/$(APP_FILE)
+
+APPUP_FILE= test_server.appup
+APPUP_SRC= $(APPUP_FILE).src
+APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_COMPILE_FLAGS += -I../include
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+tests debug opt: $(TARGETS) $(TS_TARGETS)
+
+clean:
+ rm -f $(TARGET_FILES) $(TS_TARGET_FILES)
+ rm -f core
+
+doc:
+
+configure: configure.in
+ autoconf configure.in > configure
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(ERL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(INTERNAL_HRL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/include
+ $(INSTALL_DATA) $(HRL_FILES) $(RELSYSDIR)/include
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
+
+release_tests_spec: opt
+ $(INSTALL_DIR) $(RELEASE_PATH)/test_server
+ $(INSTALL_DATA) $(ERL_FILES) $(TS_ERL_FILES) \
+ $(HRL_FILES) $(INTERNAL_HRL_FILES) $(TS_HRL_FILES) \
+ $(TARGET_FILES) $(TS_TARGET_FILES) \
+ $(AUTOCONF_FILES) $(C_FILES) $(COVER_FILES) $(CONFIG) \
+ $(RELEASE_PATH)/test_server
+ $(INSTALL_PROGRAM) $(PROGRAMS) $(RELEASE_PATH)/test_server
+
+release_docs_spec:
+
diff --git a/lib/test_server/src/conf_vars.in b/lib/test_server/src/conf_vars.in
new file mode 100644
index 0000000000..7c55d7b9ed
--- /dev/null
+++ b/lib/test_server/src/conf_vars.in
@@ -0,0 +1,25 @@
+CC:@CC@
+LD:@LD@
+CFLAGS:@CFLAGS@
+EI_CFLAGS:@EI_CFLAGS@
+ERTS_CFLAGS:@ERTS_CFLAGS@
+CROSSLDFLAGS:@CROSSLDFLAGS@
+SHLIB_LD:@SHLIB_LD@
+SHLIB_LDFLAGS:@SHLIB_LDFLAGS@
+SHLIB_LDLIBS:@SHLIB_LDLIBS@
+SHLIB_CFLAGS:@SHLIB_CFLAGS@
+SHLIB_EXTRACT_ALL:@SHLIB_EXTRACT_ALL@
+dll:@SHLIB_SUFFIX@
+DEFS:@DEFS@
+ERTS_LIBS:@ERTS_LIBS@
+LIBS:@LIBS@
+target_host:@target_host@
+CPU:@host_cpu@
+os:@host_os@
+target:@host@
+obj:@obj@
+exe:@exe@
+SSLEAY_ROOT:@SSLEAY_ROOT@
+JAVAC:@JAVAC@
+make_command:@make_command@
+test_c_compiler:@test_c_compiler@
diff --git a/lib/test_server/src/config.guess b/lib/test_server/src/config.guess
new file mode 120000
index 0000000000..6f1eeddfcc
--- /dev/null
+++ b/lib/test_server/src/config.guess
@@ -0,0 +1 @@
+../../../erts/autoconf/config.guess \ No newline at end of file
diff --git a/lib/test_server/src/config.sub b/lib/test_server/src/config.sub
new file mode 120000
index 0000000000..47a0f10138
--- /dev/null
+++ b/lib/test_server/src/config.sub
@@ -0,0 +1 @@
+../../../erts/autoconf/config.sub \ No newline at end of file
diff --git a/lib/test_server/src/configure.in b/lib/test_server/src/configure.in
new file mode 100644
index 0000000000..2bbbc18966
--- /dev/null
+++ b/lib/test_server/src/configure.in
@@ -0,0 +1,423 @@
+dnl Process this file with autoconf to produce a configure script for Erlang.
+dnl
+dnl %CopyrightBegin%
+dnl
+dnl Copyright Ericsson AB 1997-2009. All Rights Reserved.
+dnl
+dnl The contents of this file are subject to the Erlang Public License,
+dnl Version 1.1, (the "License"); you may not use this file except in
+dnl compliance with the License. You should have received a copy of the
+dnl Erlang Public License along with this software. If not, it can be
+dnl retrieved online at http://www.erlang.org/.
+dnl
+dnl Software distributed under the License is distributed on an "AS IS"
+dnl basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+dnl the License for the specific language governing rights and limitations
+dnl under the License.
+dnl
+dnl %CopyrightEnd%
+dnl
+
+AC_INIT(conf_vars.in)
+
+AC_CANONICAL_HOST
+
+dnl Checks for programs.
+AC_PROG_CC
+
+DEBUG_FLAGS="-g -DDEBUG"
+if test "$GCC" = yes; then
+ DEBUG_FLAGS="$DEBUG_FLAGS -Wall $CFLAGS"
+fi
+AC_SUBST(DEBUG_FLAGS)
+
+AC_ARG_ENABLE(debug-mode,
+[ --enable-debug-mode enable debug mode],
+[ case "$enableval" in
+ no) ;;
+ *) CFLAGS=$DEBUG_FLAGS ;;
+ esac ], )
+
+AC_CHECK_LIB(m, sin)
+
+#--------------------------------------------------------------------
+# Interactive UNIX requires -linet instead of -lsocket, plus it
+# needs net/errno.h to define the socket-related error codes.
+#--------------------------------------------------------------------
+
+AC_CHECK_LIB(inet, main, [LIBS="$LIBS -linet"])
+AC_CHECK_HEADER(net/errno.h, AC_DEFINE(HAVE_NET_ERRNO_H))
+
+#--------------------------------------------------------------------
+# Linux/tcp.h may be needed for sockopt test in kernel
+#--------------------------------------------------------------------
+
+AC_CHECK_HEADER(linux/tcp.h, AC_DEFINE(HAVE_LINUX_TCP_H))
+AC_MSG_CHECKING(for sane linux/tcp.h)
+AC_TRY_COMPILE([#include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+ #include <unistd.h>
+ #include <stdarg.h>
+ #include <sys/types.h>
+ #include <sys/socket.h>
+ #include <sys/wait.h>
+ #include <linux/tcp.h>
+ #include <netinet/in.h>
+ #include <netdb.h>],
+ [return 0;],
+ have_sane_linux_tcp_h=yes,
+ have_sane_linux_tcp_h=no)
+
+if test $have_sane_linux_tcp_h = yes; then
+ AC_DEFINE(HAVE_SANE_LINUX_TCP_H,[1],
+ [Define if we have sane linux/tcp.h])
+ AC_MSG_RESULT(yes)
+else
+ AC_MSG_RESULT(no)
+fi
+
+
+
+#--------------------------------------------------------------------
+# Linux requires sys/socketio.h instead of sys/sockio.h
+#--------------------------------------------------------------------
+AC_CHECK_HEADER(sys/socketio.h, AC_DEFINE(HAVE_SOCKETIO_H))
+
+
+#--------------------------------------------------------------------
+# Misc
+#--------------------------------------------------------------------
+AC_CHECK_HEADER(poll.h, AC_DEFINE(HAVE_POLL_H))
+
+#--------------------------------------------------------------------
+# The statements below define a collection of symbols related to
+# dynamic loading and shared libraries:
+#
+# SHLIB_CFLAGS - Flags to pass to cc when compiling the components
+# of a shared library (may request position-independent
+# code, among other things).
+# SHLIB_LD - Base command to use for combining object files
+# into a shared library.
+# SHLIB_SUFFIX - Suffix to use for the names of dynamically loadable
+# extensions. An empty string means we don't know how
+# to use shared libraries on this platform.
+#--------------------------------------------------------------------
+
+# Step 1: set the variable "system" to hold the name and version number
+# for the system.
+
+AC_MSG_CHECKING([system version (for dynamic loading)])
+system=`uname -s`-`uname -r`
+AC_MSG_RESULT($system)
+
+# Step 2: check for existence of -ldl library. This is needed because
+# Linux can use either -ldl or -ldld for dynamic loading.
+
+AC_CHECK_LIB(dl, dlopen, have_dl=yes, have_dl=no)
+
+# Step 3: set configuration options based on system name and version.
+
+SHLIB_LDLIBS=
+
+fullSrcDir=`cd $srcdir; pwd`
+case $system in
+ Linux*)
+ SHLIB_CFLAGS="-fPIC"
+ SHLIB_SUFFIX=".so"
+ if test "$have_dl" = yes; then
+ SHLIB_LD="${CC}"
+ SHLIB_LDFLAGS="$LDFLAGS -shared"
+ LD_FLAGS="-rdynamic"
+ else
+ AC_CHECK_HEADER(dld.h, [
+ SHLIB_LD="ld"
+ SHLIB_LDFLAGS="-shared"])
+ fi
+ SHLIB_EXTRACT_ALL=""
+ ;;
+ NetBSD-*|FreeBSD-*|OpenBSD-*)
+ # Not available on all versions: check for include file.
+ AC_CHECK_HEADER(dlfcn.h, [
+ SHLIB_CFLAGS="-fpic"
+ SHLIB_LD="ld"
+ SHLIB_LDFLAGS="$LDFLAGS -Bshareable -x"
+ SHLIB_SUFFIX=".so"
+ ], [
+ # No dynamic loading.
+ SHLIB_CFLAGS=""
+ SHLIB_LD="ld"
+ SHLIB_LDFLAGS="$LDFLAGS"
+ SHLIB_SUFFIX=""
+ AC_MSG_ERROR(don't know how to compile and link dynamic drivers)
+ ])
+ SHLIB_EXTRACT_ALL=""
+ ;;
+ SunOS-4*)
+ SHLIB_CFLAGS="-PIC"
+ SHLIB_LD="ld"
+ SHLIB_LDFLAGS="$LDFLAGS"
+ SHLIB_SUFFIX=".so"
+ SHLIB_EXTRACT_ALL=""
+ ;;
+ SunOS-5*|UNIX_SV-4.2*)
+ SHLIB_CFLAGS="-KPIC"
+ SHLIB_LD="/usr/ccs/bin/ld"
+ SHLIB_LDFLAGS="$LDFLAGS -G -z text"
+ SHLIB_SUFFIX=".so"
+ SHLIB_EXTRACT_ALL="-z allextract"
+ ;;
+ Darwin*)
+ SHLIB_CFLAGS="-fno-common"
+ SHLIB_LD="cc"
+ SHLIB_LDFLAGS="$LDFLAGS -bundle -flat_namespace -undefined suppress"
+ SHLIB_SUFFIX=".so"
+ SHLIB_EXTRACT_ALL=""
+ ;;
+ OSF1*)
+ SHLIB_CFLAGS="-fPIC"
+ SHLIB_LD="ld"
+ SHLIB_LDFLAGS="$LDFLAGS -shared"
+ SHLIB_SUFFIX=".so"
+ SHLIB_EXTRACT_ALL=""
+ ;;
+ *osf5*)
+ SHLIB_CFLAGS="-fPIC"
+ SHLIB_LD="${CC} -shared"
+ SHLIB_LDFLAGS="$LDFLAGS"
+ SHLIB_SUFFIX=".so"
+ SHLIB_EXTRACT_ALL=""
+ ;;
+ *)
+ # No dynamic loading.
+ SHLIB_CFLAGS=""
+ SHLIB_LD="ld"
+ SHLIB_LDFLAGS=""
+ SHLIB_LDLIBS=""
+ SHLIB_SUFFIX=""
+ SHLIB_EXTRACT_ALL=""
+ AC_MSG_ERROR(don't know how to compile and link dynamic drivers)
+ ;;
+esac
+
+# If we're running gcc, then change the C flags for compiling shared
+# libraries to the right flags for gcc, instead of those for the
+# standard manufacturer compiler.
+
+if test "$CC" = "gcc" -o `$CC -v 2>&1 | grep -c gcc` != "0" ; then
+ case $system in
+ AIX-*)
+ ;;
+ BSD/OS*)
+ ;;
+ IRIX*)
+ ;;
+ NetBSD-*|FreeBSD-*|OpenBSD-*)
+ ;;
+ RISCos-*)
+ ;;
+ ULTRIX-4.*)
+ ;;
+ Darwin*)
+ ;;
+ *)
+ SHLIB_CFLAGS="-fPIC"
+ ;;
+ esac
+fi
+
+# Make it possible for erl_interface to use it's own compiler options
+EI_CFLAGS="$CFLAGS"
+
+# Add thread-safety flags if requested
+AC_ARG_ENABLE(shlib-thread-safety,
+[ --enable-shlib-thread-safety enable thread safety for build shared libraries],
+[ case "$enableval" in
+ no) ;;
+ *) SHLIB_CFLAGS="$SHLIB_CFLAGS -D_THREAD_SAFE -D_REENTRANT"
+ CFLAGS="$CFLAGS -D_THREAD_SAFE -D_REENTRANT"
+ ;;
+ esac ], )
+
+SHLIB_CFLAGS="$SHLIB_CFLAGS $CFLAGS"
+
+
+AC_SUBST(CFLAGS)
+AC_SUBST(SHLIB_LD)
+AC_SUBST(SHLIB_LDFLAGS)
+AC_SUBST(SHLIB_LDLIBS)
+AC_SUBST(SHLIB_CFLAGS)
+AC_SUBST(SHLIB_SUFFIX)
+AC_SUBST(SHLIB_EXTRACT_ALL)
+AC_SUBST(EI_CFLAGS)
+
+#--------------------------------------------------------------------
+# Check for the existence of the -lsocket and -lnsl libraries.
+# The order here is important, so that they end up in the right
+# order in the command line generated by make. Here are some
+# special considerations:
+# 1. Use "connect" and "accept" to check for -lsocket, and
+# "gethostbyname" to check for -lnsl.
+# 2. Use each function name only once: can't redo a check because
+# autoconf caches the results of the last check and won't redo it.
+# 3. Use -lnsl and -lsocket only if they supply procedures that
+# aren't already present in the normal libraries. This is because
+# IRIX 5.2 has libraries, but they aren't needed and they're
+# bogus: they goof up name resolution if used.
+# 4. On some SVR4 systems, can't use -lsocket without -lnsl too.
+# To get around this problem, check for both libraries together
+# if -lsocket doesn't work by itself.
+#--------------------------------------------------------------------
+
+erl_checkBoth=0
+AC_CHECK_FUNC(connect, erl_checkSocket=0, erl_checkSocket=1)
+if test "$erl_checkSocket" = 1; then
+ AC_CHECK_LIB(socket, main, LIBS="$LIBS -lsocket", erl_checkBoth=1)
+fi
+if test "$erl_checkBoth" = 1; then
+ tk_oldLibs=$LIBS
+ LIBS="$LIBS -lsocket -lnsl"
+ AC_CHECK_FUNC(accept, erl_checkNsl=0, [LIBS=$tk_oldLibs])
+fi
+AC_CHECK_FUNC(gethostbyname, , AC_CHECK_LIB(nsl, main, [LIBS="$LIBS -lnsl"]))
+
+dnl Checks for library functions.
+AC_CHECK_FUNCS(strerror)
+AC_CHECK_FUNCS(vsnprintf)
+
+# First check if the library is available, then if we can choose between
+# two versions of gethostbyname
+AC_HAVE_LIBRARY(resolv)
+AC_CHECK_LIB(resolv, res_gethostbyname,[DEFS="$DEFS -DHAVE_RES_GETHOSTBYNAME=1"])
+
+#--------------------------------------------------------------------
+# Emulator compatible flags (for drivers)
+#--------------------------------------------------------------------
+
+ERTS_CFLAGS=$CFLAGS
+AC_SUBST(ERTS_CFLAGS)
+
+ERTS_LIBS=$LIBS
+AC_SUBST(ERTS_LIBS)
+
+#--------------------------------------------------------------------
+# Special compiler macro to handle cross compiling
+# (HCC) is used to compile tools run in the HOST environment
+#--------------------------------------------------------------------
+HCC='$(CC)'
+AC_SUBST(HCC)
+
+#--------------------------------------------------------------------
+# ld is used for linking on vxworks
+#--------------------------------------------------------------------
+LD='$(CC) $(CFLAGS)'
+AC_SUBST(LD)
+
+#--------------------------------------------------------------------
+# object file suffix
+#--------------------------------------------------------------------
+obj='.o'
+AC_SUBST(obj)
+
+#--------------------------------------------------------------------
+# executable file suffix
+#--------------------------------------------------------------------
+exe=''
+AC_SUBST(exe)
+
+#--------------------------------------------------------------------
+# flags when linking for cross platform targets (yet 'tis useful with
+# native builds)
+#--------------------------------------------------------------------
+CROSSLDFLAGS=''
+AC_SUBST(CROSSLDFLAGS)
+
+dnl
+dnl SSL and CRYPTO needs the library openSSL/ssleay
+dnl
+dnl Check flags --with-ssl, --without-ssl --with-ssl=PATH.
+dnl If no option is given or --with-ssl is set without a path then we
+dnl search for SSL libraries and header files in the standard locations.
+dnl If set to --without-ssl we disable the use of SSL
+dnl If set to --with-ssl=PATH we use that path as the prefix, i.e. we
+dnl use "PATH/include" and "PATH/lib".
+
+AC_SUBST(SSLEAY_ROOT)
+TARGET=$host
+
+# We search for SSL. First in the OTP team ClearCase standard location,
+# then in the common OS standard locations
+# No we do not.
+SSL_APP=ssl
+CRYPTO_APP=crypto
+SSLEAY_ROOT=$TARGET
+#for dir in /usr /usr/pkg /usr/local /usr/local/ssl /usr/lib/ssl /usr/ssl; do
+# AC_CHECK_HEADER($dir/include/openssl/opensslv.h,
+# ac_cv_openssl=yes, ac_cv_openssl=no)
+# if test $ac_cv_openssl = yes ; then
+# SSLEAY_ROOT="$dir"
+# ssl_found=yes
+# break
+# fi
+#done
+
+# Find a usable java compiler
+#
+# WARNING this code is copied from ERTS configure.in, and should be
+# updated if that code changes. I hate duplicating code, but what
+# can I do.
+#
+dnl ERL_TRY_LINK_JAVA(CLASSES, FUNCTION-BODY
+dnl [ACTION_IF_FOUND [, ACTION-IF-NOT-FOUND]])
+dnl Freely inspired by AC_TRY_LINK. (Maybe better to create a
+dnl AC_LANG_JAVA instead...)
+AC_DEFUN(ERL_TRY_LINK_JAVA,
+[java_link='$JAVAC conftest.java 1>&AC_FD_CC'
+changequote(�, �)dnl
+cat > conftest.java <<EOF
+�$1�
+class conftest { public static void main(String[] args) {
+ �$2�
+ ; return; }}
+EOF
+changequote([, ])dnl
+if AC_TRY_EVAL(java_link) && test -s conftest.class; then
+ ifelse([$3], , :, [rm -rf conftest*
+ $3])
+else
+ echo "configure: failed program was:" 1>&AC_FD_CC
+ cat conftest.java 1>&AC_FD_CC
+ echo "configure: PATH was $PATH" 1>&AC_FD_CC
+ifelse([$4], , , [ rm -rf conftest*
+ $4
+])dnl
+fi
+rm -f conftest*])
+dnl
+AC_CHECK_PROGS(JAVAC, javac guavac gcj jikes bock)
+if test -n "$JAVAC"; then
+ dnl Make sure it's at least JDK 1.5
+ AC_CACHE_CHECK(for JDK version 1.5,
+ ac_cv_prog_javac_ver_1_5,
+ [ERL_TRY_LINK_JAVA([], [for (String i : args);],
+ ac_cv_prog_javac_ver_1_5=yes, ac_cv_prog_javac_ver_1_5=no)])
+ if test $ac_cv_prog_javac_ver_1_5 = no; then
+ unset -v JAVAC
+ fi
+fi
+if test -n "$JAVAC"; then
+ AC_SUBST(JAVAC)
+ :
+fi
+
+AC_CHECK_PROGS([make_command], [make gmake], [false])
+AC_SUBST(make_command)
+
+if test "$GCC" = yes; then
+ test_c_compiler="{gnuc, undefined}"
+else
+ test_c_compiler="undefined"
+fi
+AC_SUBST(test_c_compiler)
+
+AC_OUTPUT(conf_vars)
diff --git a/lib/test_server/src/cross.cover b/lib/test_server/src/cross.cover
new file mode 100644
index 0000000000..07bf0bed5c
--- /dev/null
+++ b/lib/test_server/src/cross.cover
@@ -0,0 +1,20 @@
+%%% This is an -*- erlang -*- file.
+%%%
+%%% Elements in this file shall be on the form
+%%% {Application,Modules}.
+%%%
+%%% Application is the name of an application or the atom all.
+%%% Modules is a list of module names
+%%%
+%%% The Application shall include the listed Modules in its cover compilation,
+%%% but not in the cover analysis.
+%%% If Application=all it means that all application shall include the listed
+%%% Modules in the cover compilation.
+%%%
+%%% After all tests are completed, the listed modules are analysed with cover
+%%% data from all tests and the result is stored under the application where
+%%% the modules belong.
+
+{all,[]}.
+
+{observer,[dbg]}.
diff --git a/lib/test_server/src/erl2html2.erl b/lib/test_server/src/erl2html2.erl
new file mode 100644
index 0000000000..c94d4627f9
--- /dev/null
+++ b/lib/test_server/src/erl2html2.erl
@@ -0,0 +1,182 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%------------------------------------------------------------------
+%%% Purpose:Convert Erlang files to html. (Pretty faaast... :-)
+%%%------------------------------------------------------------------
+
+%--------------------------------------------------------------------
+% Some stats (Sparc5@110Mhz):
+% 4109 lines (erl_parse.erl): 3.00 secs
+% 1847 lines (application_controller.erl): 0.57 secs
+% 3160 lines (test_server.erl): 1.00 secs
+% 1199 lines (ts_estone.erl): 0.35 secs
+%
+% Avg: ~4.5e-4s/line, or ~0.45s/1000 lines, or ~2200 lines/sec.
+%--------------------------------------------------------------------
+
+-module(erl2html2).
+-export([convert/2]).
+
+convert([], _Dest) -> % Fake clause.
+ ok;
+convert(File, Dest) ->
+ case file:read_file(File) of
+ {ok, Bin} ->
+ Code=binary_to_list(Bin),
+ statistics(runtime),
+ %% The generated code uses the BGCOLOR attribute in the
+ %% BODY tag, which wasn't valid until HTML 3.2. Also,
+ %% good HTML should either override all colour attributes
+ %% or none of them -- *never* just a few.
+ %%
+ %% FIXME: The colours should *really* be set with
+ %% stylesheets...
+ Html0
+ = ["<!DOCTYPE HTML PUBLIC "
+ "\"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<html>\n"
+ "<head><title>", File, "</title></head>\n\n"
+ "<body bgcolor=\"white\" text=\"black\""
+ " link=\"blue\" vlink=\"purple\" alink=\"red\">\n"],
+ {Html1, Lines} = root(Code, [], 1),
+ Html = [Html0,
+ "<pre>\n", Html1, "</pre>\n",
+ footer(Lines),"</body>\n</html>\n"],
+ file:write_file(Dest, Html);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+root([], Res, Line) ->
+ {Res, Line};
+root([Char0|Code], Res, Line0) ->
+ Char = [Char0],
+ case Char of
+ "-" ->
+ {Match, Line1, NewCode0, AttName} =
+ read_to_char(Line0+1, Code, [], [$(, $.]),
+ {_, Line2, NewCode, Stuff} = read_to_char(Line1, NewCode0, [], $\n),
+ NewRes = [Res,linenum(Line0),"-<b>",AttName,
+ "</b>",Match, Stuff, "\n"],
+ root(NewCode, NewRes, Line2);
+ "%" ->
+ {_, Line, NewCode, Stuff} = read_to_char(Line0+1, Code, [], $\n),
+ NewRes = [Res,linenum(Line0),"<i>%",Stuff,"</i>\n"],
+ root(NewCode, NewRes, Line);
+ "\n" ->
+ root(Code, [Res,linenum(Line0), "\n"], Line0+1);
+ " " ->
+ {_, Line, NewCode, Stuff} = read_to_char(Line0+1, Code, [], $\n),
+ root(NewCode, [Res,linenum(Line0)," ",Stuff, "\n"],
+ Line);
+ "\t" ->
+ {_, Line, NewCode, Stuff} = read_to_char(Line0+1, Code, [], $\n),
+ root(NewCode, [Res,linenum(Line0),"\t",Stuff, "\n"],
+ Line);
+ [Chr|_] when Chr>96, Chr<123 ->
+ %% Assumed to be function/clause start.
+ %% FIXME: This will trivially generate non-unique anchors
+ %% (one for each clause) --- which is illegal HTML.
+ {_, Line1, NewCode0, FName0} = read_to_char(Line0+1, Code, [], $(),
+ {_, Line2, NewCode, Stuff} =
+ read_to_char(Line1,NewCode0, [], $\n),
+ FuncName = [[Chr],FName0],
+ NewRes=[Res,"<a name=",FuncName,">",
+ linenum(Line0),"<b>",FuncName,"</b></a>",
+ "(",Stuff, "\n"],
+ root(NewCode, NewRes, Line2);
+ Chr ->
+ {_, Line, NewCode, Stuff} = read_to_char(Line0+1, Code, [], $\n),
+ root(NewCode, [Res,linenum(Line0),Chr,Stuff, "\n"],
+ Line)
+ end.
+
+read_to_char(Line0, [], Res, _Chr) ->
+ {nomatch, Line0, [], Res};
+read_to_char(Line0, [Char|Code], Res, Chr) ->
+ case Char of
+ Chr -> {Char, Line0, Code, Res};
+ _ when is_list(Chr) ->
+ case lists:member(Char,Chr) of
+ true ->
+ {Char, Line0, Code, Res};
+ false ->
+ {Line,NewCode,NewRes} = maybe_convert(Line0,Code,Res,Char),
+ read_to_char(Line, NewCode, NewRes, Chr)
+ end;
+ _ ->
+ {Line,NewCode,NewRes} = maybe_convert(Line0,Code,Res,Char),
+ read_to_char(Line,NewCode, NewRes, Chr)
+ end.
+
+maybe_convert(Line0,Code,Res,Chr) ->
+ case Chr of
+ %% Quoted stuff should not have the highlighting like normal code
+ %% FIXME: unbalanced quotes (e.g. in comments) will cause trouble with
+ %% highlighting and line numbering in the rest of the module.
+ $" ->
+ {_, Line1, NewCode, Stuff0} = read_to_char(Line0, Code, [], $"),
+ {Line2,Stuff} = add_linenumbers(Line1,lists:flatten(Stuff0),[]),
+ {Line2,NewCode,[Res,$",Stuff,$"]};
+ %% These chars have meaning in HTML, and *must* *not* be
+ %% written as themselves.
+ $& ->
+ {Line0, Code, [Res,"&amp;"]};
+ $< ->
+ {Line0, Code, [Res,"&lt;"]};
+ $> ->
+ {Line0, Code, [Res,"&gt;"]};
+ %% Everything else is simply copied.
+ OtherChr ->
+ {Line0, Code, [Res,OtherChr]}
+ end.
+
+add_linenumbers(Line,[Chr|Chrs],Res) ->
+ case Chr of
+ $\n -> add_linenumbers(Line+1,Chrs,[Res,$\n,linenum(Line)]);
+ _ -> add_linenumbers(Line,Chrs,[Res,Chr])
+ end;
+add_linenumbers(Line,[],Res) ->
+ {Line,Res}.
+
+%% Make nicely indented line numbers.
+linenum(Line) ->
+ Num = integer_to_list(Line),
+ A = case Line rem 10 of
+ 0 -> "<a name=\"" ++ Num ++"\"></a>";
+ _ -> []
+ end,
+ Pred =
+ case length(Num) of
+ Length when Length < 5 ->
+ lists:duplicate(5-Length,$\s);
+ _ ->
+ []
+ end,
+ [A,Pred,integer_to_list(Line),":"].
+
+footer(Lines) ->
+ {_, Time} = statistics(runtime),
+% io:format("Converted ~p lines in ~.2f Seconds.~n",
+% [Lines, Time/1000]),
+ S = "<i>The transformation of this file (~p lines) took ~.2f seconds</i>",
+ F = lists:flatten(io_lib:format(S, [Lines, Time/1000])),
+ ["<hr size=1>",F,"<br>\n"].
diff --git a/lib/test_server/src/install-sh b/lib/test_server/src/install-sh
new file mode 120000
index 0000000000..a859cade7f
--- /dev/null
+++ b/lib/test_server/src/install-sh
@@ -0,0 +1 @@
+../../../erts/autoconf/install-sh \ No newline at end of file
diff --git a/lib/test_server/src/test_server.app.src b/lib/test_server/src/test_server.app.src
new file mode 100644
index 0000000000..af2d4dc2cb
--- /dev/null
+++ b/lib/test_server/src/test_server.app.src
@@ -0,0 +1,36 @@
+% This is an -*- erlang -*- file.
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+
+{application, test_server,
+ [{description, "The OTP Test Server application"},
+ {vsn, "%VSN%"},
+ {modules, [
+ erl2html2,
+ test_server_ctrl,
+ test_server,
+ test_server_h,
+ test_server_line,
+ test_server_node,
+ test_server_sup
+ ]},
+ {registered, [test_server_ctrl,
+ test_server,
+ test_server_break_process]},
+ {applications, [kernel,stdlib]},
+ {env, []}]}.
+
diff --git a/lib/test_server/src/test_server.appup.src b/lib/test_server/src/test_server.appup.src
new file mode 100644
index 0000000000..0fbe5f23f7
--- /dev/null
+++ b/lib/test_server/src/test_server.appup.src
@@ -0,0 +1 @@
+{"%VSN%",[],[]}. \ No newline at end of file
diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl
new file mode 100644
index 0000000000..99e24205ae
--- /dev/null
+++ b/lib/test_server/src/test_server.erl
@@ -0,0 +1,2203 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(test_server).
+
+-define(DEFAULT_TIMETRAP_SECS, 60).
+
+%%% START %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([start/1,start/2]).
+
+%%% TEST_SERVER_CTRL INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([run_test_case_apply/1,init_target_info/0,init_purify/0]).
+-export([cover_compile/1,cover_analyse/2]).
+
+%%% TEST_SERVER_SUP INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([get_loc/1]).
+
+%%% TEST SUITE INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([lookup_config/2]).
+-export([fail/0,fail/1,format/1,format/2,format/3]).
+-export([capture_start/0,capture_stop/0,capture_get/0]).
+-export([messages_get/0]).
+-export([hours/1,minutes/1,seconds/1,sleep/1,timecall/3]).
+-export([timetrap_scale_factor/0,timetrap/1,timetrap_cancel/1]).
+-export([m_out_of_n/3,do_times/4,do_times/2]).
+-export([call_crash/3,call_crash/4,call_crash/5]).
+-export([temp_name/1]).
+-export([start_node/3, stop_node/1, wait_for_node/1, is_release_available/1]).
+-export([app_test/1, app_test/2]).
+-export([is_native/1]).
+-export([comment/1]).
+-export([os_type/0]).
+-export([run_on_shielded_node/2]).
+-export([is_cover/0,is_debug/0,is_commercial/0]).
+
+-export([break/1,continue/0]).
+
+%%% DEBUGGER INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([purify_new_leaks/0, purify_format/2, purify_new_fds_inuse/0,
+ purify_is_running/0]).
+
+%%% PRIVATE EXPORTED %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-record(state,{controller,jobs=[]}).
+
+-include("test_server_internal.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-define(pl2a(M), test_server_sup:package_atom(M)).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% **** START *** CODE FOR REMOTE TARGET ONLY ***
+%%
+%% test_server
+%% This process is started only if the test is to be run on a remote target
+%% The process is then started on target
+%% A socket connection is established with the test_server_ctrl process
+%% on host, and information about target is sent to host.
+start([ControllerHost]) when is_atom(ControllerHost) ->
+ start(atom_to_list(ControllerHost));
+start(ControllerHost) when is_list(ControllerHost) ->
+ start(ControllerHost,?MAIN_PORT).
+start(ControllerHost,ControllerPort) ->
+ S = self(),
+ Pid = spawn(fun() -> init(ControllerHost,ControllerPort,S) end),
+ receive {Pid,started} -> {ok,Pid};
+ {Pid,Error} -> Error
+ end.
+
+init(Host,Port,Starter) ->
+ global:register_name(?MODULE,self()),
+ process_flag(trap_exit,true),
+ test_server_sup:cleanup_crash_dumps(),
+ case gen_tcp:connect(Host,Port, [binary,
+ {reuseaddr,true},
+ {packet,2}]) of
+ {ok,MainSock} ->
+ Starter ! {self(),started},
+ request(MainSock,{target_info,init_target_info()}),
+ loop(#state{controller={Host,MainSock}});
+ Error ->
+ Starter ! {self(),{error,
+ {could_not_contact_controller,Error}}}
+ end.
+
+init_target_info() ->
+ [$.|Emu] = code:objfile_extension(),
+ {_, OTPRel} = init:script_id(),
+ TestServerDir = filename:absname(filename:dirname(code:which(?MODULE))),
+ #target_info{os_family=test_server_sup:get_os_family(),
+ os_type=os:type(),
+ version=erlang:system_info(version),
+ system_version=erlang:system_info(system_version),
+ root_dir=code:root_dir(),
+ test_server_dir=TestServerDir,
+ emulator=Emu,
+ otp_release=OTPRel,
+ username=test_server_sup:get_username(),
+ cookie=atom_to_list(erlang:get_cookie())}.
+
+
+loop(#state{controller={_,MainSock}} = State) ->
+ receive
+ {tcp, MainSock, <<1,Request/binary>>} ->
+ State1 = decode_main(binary_to_term(Request),State),
+ loop(State1);
+ {tcp_closed, MainSock} ->
+ gen_tcp:close(MainSock),
+ halt();
+ {'EXIT',Pid,Reason} ->
+ case lists:keysearch(Pid,1,State#state.jobs) of
+ {value,{Pid,Name}} ->
+ case Reason of
+ normal -> ignore;
+ _other -> request(MainSock,{job_proc_killed,Name,Reason})
+ end,
+ NewJobs = lists:keydelete(Pid,1,State#state.jobs),
+ loop(State#state{jobs = NewJobs});
+ false ->
+ loop(State)
+ end
+ end.
+
+%% Decode request on main socket
+decode_main({job,Port,Name},#state{controller={Host,_},jobs=Jobs}=State) ->
+ S = self(),
+ NewJob = spawn_link(fun() -> job(Host,Port,S) end),
+ receive {NewJob,started} -> State#state{jobs=[{NewJob,Name}|Jobs]};
+ {NewJob,_Error} -> State
+ end.
+
+init_purify() ->
+ purify_new_leaks().
+
+
+%% Temporary job process on target
+%% This process will live while all test cases in the job are executed.
+%% A socket connection is established with the job process on host.
+job(Host,Port,Starter) ->
+ process_flag(trap_exit,true),
+ init_purify(),
+ case gen_tcp:connect(Host,Port, [binary,
+ {reuseaddr,true},
+ {packet,4},
+ {active,false}]) of
+ {ok,JobSock} ->
+ Starter ! {self(),started},
+ job(JobSock);
+ Error ->
+ Starter ! {self(),{error,
+ {could_not_contact_controller,Error}}}
+ end.
+
+job(JobSock) ->
+ JobDir = get_jobdir(),
+ ok = file:make_dir(JobDir),
+ ok = file:make_dir(filename:join(JobDir,?priv_dir)),
+ put(test_server_job_sock,JobSock),
+ put(test_server_job_dir,JobDir),
+ {ok,Cwd} = file:get_cwd(),
+ job_loop(JobSock),
+ ok = file:set_cwd(Cwd),
+ send_privdir(JobDir,JobSock), % also recursively removes jobdir
+ ok.
+
+
+get_jobdir() ->
+ Now = now(),
+ {{Y,M,D},{H,Mi,S}} = calendar:now_to_local_time(Now),
+ Basename = io_lib:format("~w-~2.2.0w-~2.2.0w_~2.2.0w.~2.2.0w.~2.2.0w_~w",
+ [Y,M,D,H,Mi,S,element(3,Now)]),
+ %% if target has a file master, don't use prim_file to look up cwd
+ case lists:keymember(master,1,init:get_arguments()) of
+ true ->
+ {ok,Cwd} = file:get_cwd(),
+ Cwd ++ "/" ++ Basename;
+ false ->
+ filename:absname(Basename)
+ end.
+
+send_privdir(JobDir,JobSock) ->
+ LocalPrivDir = filename:join(JobDir,?priv_dir),
+ case file:list_dir(LocalPrivDir) of
+ {ok,List} when List/=[] ->
+ Tarfile0 = ?priv_dir ++ ".tar.gz",
+ Tarfile = filename:join(JobDir,Tarfile0),
+ {ok,Tar} = erl_tar:open(Tarfile,[write,compressed,cooked]),
+ ok = erl_tar:add(Tar,LocalPrivDir,?priv_dir,[]),
+ ok = erl_tar:close(Tar),
+ {ok,TarBin} = file:read_file(Tarfile),
+ file:delete(Tarfile),
+ ok = del_dir(JobDir),
+ request(JobSock,{{privdir,Tarfile0},TarBin});
+ _ ->
+ ok = del_dir(JobDir),
+ request(JobSock,{privdir,empty_priv_dir})
+ end.
+
+del_dir(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok,#file_info{type=directory}} ->
+ {ok,Cont} = file:list_dir(Dir),
+ lists:foreach(fun(F) -> del_dir(filename:join(Dir,F)) end, Cont),
+ ok = file:del_dir(Dir);
+ {ok,#file_info{}} ->
+ ok = file:delete(Dir);
+ _r ->
+ %% This might be a symlink - let's try to delete it!
+ catch file:delete(Dir),
+ ok
+ end.
+
+%%
+%% Receive and decode request on job socket
+%%
+job_loop(JobSock) ->
+ Request = recv(JobSock),
+ case decode_job(Request) of
+ ok -> job_loop(JobSock);
+ {stop,R} -> R
+ end.
+
+decode_job({{beam,Mod,Which},Beam}) ->
+ % FIXME, shared directory structure on host and target required,
+ % "Library beams" are not loaded from HOST... /Patrik
+ code:add_patha(filename:dirname(Which)),
+ % End of Patriks uglyness...
+ {module,Mod} = code:load_binary(Mod,Which,Beam),
+ ok;
+decode_job({{datadir,Tarfile0},Archive}) ->
+ JobDir = get(test_server_job_dir),
+ Tarfile = filename:join(JobDir,Tarfile0),
+ ok = file:write_file(Tarfile,Archive),
+ % Cooked is temporary removed/broken
+ % ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir},cooked]),
+ ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir}]),
+ ok = file:delete(Tarfile),
+ ok;
+decode_job({test_case,Case}) ->
+ Result = run_test_case_apply(Case),
+ JobSock = get(test_server_job_sock),
+ request(JobSock,{test_case_result,Result}),
+ case test_server_sup:tar_crash_dumps() of
+ {error,no_crash_dumps} -> request(JobSock,{crash_dumps,no_crash_dumps});
+ {ok,TarFile} ->
+ {ok,TarBin} = file:read_file(TarFile),
+ file:delete(TarFile),
+ request(JobSock,{{crash_dumps,filename:basename(TarFile)},TarBin})
+ end,
+ ok;
+decode_job({sync_apply,{M,F,A}}) ->
+ R = apply(M,F,A),
+ request(get(test_server_job_sock),{sync_result,R}),
+ ok;
+decode_job(job_done) ->
+ {stop,stopped}.
+
+%%
+%% **** STOP *** CODE FOR REMOTE TARGET ONLY ***
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% cover_compile({App,Include,Exclude,Cross}) ->
+%% {ok,AnalyseModules} | {error,Reason}
+%%
+%% App = atom() , name of application to be compiled
+%% Exclude = [atom()], list of modules to exclude
+%% Include = [atom()], list of modules outside of App that should be included
+%% in the cover compilation
+%% Cross = [atoms()], list of modules outside of App shat should be included
+%% in the cover compilation, but that shall not be part of
+%% the cover analysis for this application.
+%%
+%% Cover compile the given application. Return {ok,AnalyseMods} if application
+%% is found, else {error,application_not_found}.
+
+cover_compile({none,_Exclude,Include,Cross}) ->
+ CompileMods = Include++Cross,
+ case length(CompileMods) of
+ 0 ->
+ io:fwrite("WARNING: No modules to cover compile!\n\n",[]),
+ cover:start(), % start cover server anyway
+ {ok,[]};
+ N ->
+ io:fwrite("Cover compiling ~w modules - "
+ "this may take some time... ",[N]),
+ do_cover_compile(CompileMods),
+ io:fwrite("done\n\n",[]),
+ {ok,Include}
+ end;
+cover_compile({App,all,Include,Cross}) ->
+ CompileMods = Include++Cross,
+ case length(CompileMods) of
+ 0 ->
+ io:fwrite("WARNING: No modules to cover compile!\n\n",[]),
+ cover:start(), % start cover server anyway
+ {ok,[]};
+ N ->
+ io:fwrite("Cover compiling '~w' (~w files) - "
+ "this may take some time... ",[App,N]),
+ io:format("\nWARNING: All modules in \'~w\' are excluded\n"
+ "Only cover compiling modules in include list "
+ "and the modules\nin the cross cover file:\n"
+ "~p\n", [App,CompileMods]),
+ do_cover_compile(CompileMods),
+ io:fwrite("done\n\n",[]),
+ {ok,Include}
+ end;
+cover_compile({App,Exclude,Include,Cross}) ->
+ case code:lib_dir(App) of
+ {error,bad_name} ->
+ case Include++Cross of
+ [] ->
+ io:format("\nWARNING: Can't find lib_dir for \'~w\'\n"
+ "Not cover compiling!\n\n",[App]),
+ {error,application_not_found};
+ CompileMods ->
+ io:fwrite("Cover compiling '~w' (~w files) - "
+ "this may take some time... ",
+ [App,length(CompileMods)]),
+ io:format("\nWARNING: Can't find lib_dir for \'~w\'\n"
+ "Only cover compiling modules in include list: "
+ "~p\n", [App,Include]),
+ do_cover_compile(CompileMods),
+ io:fwrite("done\n\n",[]),
+ {ok,Include}
+ end;
+ LibDir ->
+ EbinDir = filename:join([LibDir,"ebin"]),
+ WC = filename:join(EbinDir,"*.beam"),
+ AllMods = module_names(filelib:wildcard(WC)),
+ AnalyseMods = (AllMods ++ Include) -- Exclude,
+ CompileMods = AnalyseMods ++ Cross,
+ case length(CompileMods) of
+ 0 ->
+ io:fwrite("WARNING: No modules to cover compile!\n\n",[]),
+ cover:start(), % start cover server anyway
+ {ok,[]};
+ N ->
+ io:fwrite("Cover compiling '~w' (~w files) - "
+ "this may take some time... ",[App,N]),
+ do_cover_compile(CompileMods),
+ io:fwrite("done\n\n",[]),
+ {ok,AnalyseMods}
+ end
+ end.
+
+
+module_names(Beams) ->
+ [list_to_atom(filename:basename(filename:rootname(Beam))) || Beam <- Beams].
+
+
+do_cover_compile(Modules) ->
+ do_cover_compile1(lists:usort(Modules)). % remove duplicates
+
+do_cover_compile1([Dont|Rest]) when Dont=:=cover;
+ Dont=:=test_server;
+ Dont=:=test_server_ctrl ->
+ do_cover_compile1(Rest);
+do_cover_compile1([M|Rest]) ->
+ case {code:is_sticky(M),code:is_loaded(M)} of
+ {true,_} ->
+ code:unstick_mod(M),
+ case cover:compile_beam(M) of
+ {ok,_} ->
+ ok;
+ Error ->
+ io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
+ [M,Error])
+ end,
+ code:stick_mod(M),
+ do_cover_compile1(Rest);
+ {false,false} ->
+ case code:load_file(M) of
+ {module,_} ->
+ do_cover_compile1([M|Rest]);
+ Error ->
+ io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]),
+ do_cover_compile1(Rest)
+ end;
+ {false,_} ->
+ case cover:compile_beam(M) of
+ {ok,_} ->
+ ok;
+ Error ->
+ io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
+ [M,Error])
+ end,
+ do_cover_compile1(Rest)
+ end;
+do_cover_compile1([]) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% cover_analyse(Analyse,Modules) -> [{M,{Cov,NotCov,Details}}]
+%%
+%% Analyse = {details,Dir} | details | {overview,void()} | overview
+%% Modules = [atom()], the modules to analyse
+%%
+%% Cover analysis. If this is a remote target, analyse_to_file can not be used.
+%% In that case the analyse level 'line' is used instead if Analyse==details.
+%%
+%% If this is a local target, the test directory is given
+%% (Analyse=={details,Dir}) and analyse_to_file can be used directly.
+%%
+%% If Analyse==overview | {overview,Dir} analyse_to_file is not used, only
+%% an overview containing the number of covered/not covered lines in each module.
+%%
+%% Also, if a Dir exists, cover data will be exported to a file called
+%% all.coverdata in that directory.
+cover_analyse(Analyse,Modules) ->
+ io:fwrite("Cover analysing...\n",[]),
+ DetailsFun =
+ case Analyse of
+ {details,Dir} ->
+ case cover:export(filename:join(Dir,"all.coverdata")) of
+ ok ->
+ fun(M) ->
+ OutFile = filename:join(Dir,
+ atom_to_list(M) ++
+ ".COVER.html"),
+ case cover:analyse_to_file(M,OutFile,[html]) of
+ {ok,_} ->
+ {file,OutFile};
+ Error ->
+ Error
+ end
+ end;
+ Error ->
+ fun(_) -> Error end
+ end;
+ details ->
+ fun(M) ->
+ case cover:analyse(M,line) of
+ {ok,Lines} ->
+ {lines,Lines};
+ Error ->
+ Error
+ end
+ end;
+ {overview,Dir} ->
+ case cover:export(filename:join(Dir,"all.coverdata")) of
+ ok ->
+ fun(_) -> undefined end;
+ Error ->
+ fun(_) -> Error end
+ end;
+ overview ->
+ fun(_) -> undefined end
+ end,
+ R = lists:map(
+ fun(M) ->
+ case cover:analyse(M,module) of
+ {ok,{M,{Cov,NotCov}}} ->
+ {M,{Cov,NotCov,DetailsFun(M)}};
+ Err ->
+ io:fwrite("WARNING: Analysis failed for ~w. Reason: ~p\n",
+ [M,Err]),
+ {M,Err}
+ end
+ end, Modules),
+ Sticky = unstick_all_sticky(node()),
+ cover:stop(),
+ stick_all_sticky(node(),Sticky),
+ R.
+
+
+unstick_all_sticky(Node) ->
+ lists:filter(
+ fun(M) ->
+ case code:is_sticky(M) of
+ true ->
+ rpc:call(Node,code,unstick_mod,[M]),
+ true;
+ false ->
+ false
+ end
+ end,
+ cover:modules()).
+
+stick_all_sticky(Node,Sticky) ->
+ lists:foreach(
+ fun(M) ->
+ rpc:call(Node,code,stick_mod,[M])
+ end,
+ Sticky).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% run_test_case_apply(Mod,Func,Args,Name,RunInit,MultiplyTimetrap) ->
+%% {Time,Value,Loc,Opts,Comment} | {died,Reason,unknown,Comment}
+%%
+%% Time = float() (seconds)
+%% Value = term()
+%% Loc = term()
+%% Comment = string()
+%% Reason = term()
+%%
+%% Spawns off a process (case process) that actually runs the test suite.
+%% The case process will have the job process as group leader, which makes
+%% it possible to capture all it's output from io:format/2, etc.
+%%
+%% The job process then sits down and waits for news from the case process.
+%% This might be io requests (which are redirected to the log files).
+%%
+%% Returns a tuple with the time spent (in seconds) in the test case,
+%% the return value from the test case or an {'EXIT',Reason} if the case
+%% failed, Loc points out where the test case crashed (if it did). Loc
+%% is either the name of the function, or {<Module>,<Line>} of the last
+%% line executed that had a ?line macro. If the test case did execute
+%% erase/0 or similar, it may be empty. Comment is the last comment added
+%% by test_server:comment/1, the reason if test_server:fail has been
+%% called or the comment given by the return value {comment,Comment} from
+%% a test case.
+%%
+%% {died,Reason,unknown,Comment} is returned if the test case was killed
+%% by some other process. Reason is the kill reason provided.
+%%
+%% MultiplyTimetrap indicates a possible extension of all timetraps
+%% Timetraps will be multiplied by this integer. If it is infinity, no
+%% timetraps will be started at all.
+
+run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,MultiplyTimetrap}) ->
+ purify_format("Test case #~w ~w:~w/1", [CaseNum, Mod, Func]),
+ case os:getenv("TS_RUN_VALGRIND") of
+ false ->
+ ok;
+ _ ->
+ os:putenv("VALGRIND_LOGFILE_INFIX",atom_to_list(Mod)++"."++
+ atom_to_list(Func)++"-")
+ end,
+ test_server_h:testcase({Mod,Func,1}),
+ ProcBef = erlang:system_info(process_count),
+ Result = run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap),
+ ProcAft = erlang:system_info(process_count),
+ purify_new_leaks(),
+ DetFail = get(test_server_detected_fail),
+ {Result,DetFail,ProcBef,ProcAft}.
+
+run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) ->
+ case get(test_server_job_dir) of
+ undefined ->
+ %% i'm a local target
+ do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap);
+ JobDir ->
+ %% i'm a remote target
+ case Args of
+ [Config] when is_list(Config) ->
+ {value,{data_dir,HostDataDir}} =
+ lists:keysearch(data_dir, 1, Config),
+ DataBase = filename:basename(HostDataDir),
+ TargetDataDir = filename:join(JobDir, DataBase),
+ Config1 = lists:keyreplace(data_dir, 1, Config,
+ {data_dir,TargetDataDir}),
+ TargetPrivDir = filename:join(JobDir, ?priv_dir),
+ Config2 = lists:keyreplace(priv_dir, 1, Config1,
+ {priv_dir,TargetPrivDir}),
+ do_run_test_case_apply(Mod, Func, [Config2], Name, RunInit,
+ MultiplyTimetrap);
+ _other ->
+ do_run_test_case_apply(Mod, Func, Args, Name, RunInit,
+ MultiplyTimetrap)
+ end
+ end.
+do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) ->
+ {ok,Cwd} = file:get_cwd(),
+ Args2Print = case Args of
+ [Args1] when is_list(Args1) ->
+ lists:keydelete(tc_group_result, 1, Args1);
+ _ ->
+ Args
+ end,
+ print(minor, "Test case started with:\n~s:~s(~p)\n", [Mod,Func,Args2Print]),
+ print(minor, "Current directory is ~p\n", [Cwd]),
+ print_timestamp(minor,"Started at "),
+ TCCallback = get(test_server_testcase_callback),
+ Ref = make_ref(),
+ OldGLeader = group_leader(),
+ %% Set ourself to group leader for the spawned process
+ group_leader(self(),self()),
+ Pid =
+ spawn_link(
+ fun() ->
+ run_test_case_eval(Mod, Func, Args, Name, Ref,
+ RunInit, MultiplyTimetrap,
+ TCCallback)
+ end),
+ group_leader(OldGLeader, self()),
+ put(test_server_detected_fail, []),
+ run_test_case_msgloop(Ref, Pid, false, false, "").
+
+%% Ugly bug (pre R5A):
+%% If this process (group leader of the test case) terminates before
+%% all messages have been replied back to the io server, the io server
+%% hangs. Fixed by the 20 milli timeout check here, and by using monitor in
+%% io.erl (livrem OCH hangslen mao :)
+%%
+%% A test case is known to have failed if it returns {'EXIT', _} tuple,
+%% or sends a message {failed, File, Line} to it's group_leader
+%%
+run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) ->
+ %% NOTE: Keep job_proxy_msgloop/0 up to date when changes
+ %% are made in this function!
+ {Timeout,ReturnValue} =
+ case Terminate of
+ {true, ReturnVal} ->
+ {20, ReturnVal};
+ false ->
+ {infinity, should_never_appear}
+ end,
+ receive
+ {abort_current_testcase,Reason,From} ->
+ Line = get_loc(Pid),
+ Mon = erlang:monitor(process, Pid),
+ exit(Pid,{testcase_aborted,Reason,Line}),
+ erlang:yield(),
+ From ! {self(),abort_current_testcase,ok},
+ NewComment =
+ receive
+ {'DOWN', Mon, process, Pid, _} ->
+ Comment
+ after 10000 ->
+ %% Pid is probably trapping exits, hit it harder...
+ exit(Pid, kill),
+ %% here's the only place we know Reason, so we save
+ %% it as a comment, potentially replacing user data
+ Error = lists:flatten(io_lib:format("Aborted: ~p",[Reason])),
+ Error1 = lists:flatten([string:strip(S,left) ||
+ S <- string:tokens(Error,[$\n])]),
+ if length(Error1) > 63 ->
+ string:substr(Error1,1,60) ++ "...";
+ true ->
+ Error1
+ end
+ end,
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,NewComment);
+ {io_request,From,ReplyAs,{put_chars,io_lib,Func,[Format,Args]}}
+ when is_list(Format) ->
+ Msg = (catch io_lib:Func(Format,Args)),
+ run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,io_lib,Func,[Format,Args]}}
+ when is_atom(Format) ->
+ Msg = (catch io_lib:Func(Format,Args)),
+ run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,Bytes}} ->
+ run_test_case_msgloop_io(
+ ReplyAs,CaptureStdout,Bytes,From,put_chars),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,unicode,io_lib,Func,[Format,Args]}}
+ when is_list(Format) ->
+ Msg = unicode_to_latin1(catch io_lib:Func(Format,Args)),
+ run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,latin1,io_lib,Func,[Format,Args]}}
+ when is_list(Format) ->
+ Msg = (catch io_lib:Func(Format,Args)),
+ run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,unicode,io_lib,Func,[Format,Args]}}
+ when is_atom(Format) ->
+ Msg = unicode_to_latin1(catch io_lib:Func(Format,Args)),
+ run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,latin1,io_lib,Func,[Format,Args]}}
+ when is_atom(Format) ->
+ Msg = (catch io_lib:Func(Format,Args)),
+ run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,unicode,Bytes}} ->
+ run_test_case_msgloop_io(
+ ReplyAs,CaptureStdout,unicode_to_latin1(Bytes),From,put_chars),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {io_request,From,ReplyAs,{put_chars,latin1,Bytes}} ->
+ run_test_case_msgloop_io(
+ ReplyAs,CaptureStdout,Bytes,From,put_chars),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ IoReq when element(1, IoReq) == io_request ->
+ %% something else, just pass it on
+ group_leader() ! IoReq,
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {structured_io,ClientPid,Msg} ->
+ output(Msg, ClientPid),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {capture,NewCapture} ->
+ run_test_case_msgloop(Ref,Pid,NewCapture,Terminate,Comment);
+ {sync_apply,From,MFA} ->
+ sync_local_or_remote_apply(false,From,MFA),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {sync_apply_proxy,Proxy,From,MFA} ->
+ sync_local_or_remote_apply(Proxy,From,MFA),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {printout,Detail,Format,Args} ->
+ print(Detail,Format,Args),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {comment,NewComment} ->
+ Terminate1 =
+ case Terminate of
+ {true,{Time,Value,Loc,Opts,_OldComment}} ->
+ {true,{Time,Value,mod_loc(Loc),Opts,NewComment}};
+ Other ->
+ Other
+ end,
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate1,NewComment);
+ {'EXIT',Pid,{Ref,Time,Value,Loc,Opts}} ->
+ RetVal = {Time/1000000,Value,mod_loc(Loc),Opts,Comment},
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment);
+ {'EXIT',Pid,Reason} ->
+ case Reason of
+ {timetrap_timeout,TVal,Loc} ->
+ %% convert Loc to form that can be formatted
+ Loc1 = mod_loc(Loc),
+ {Mod,Func} = get_mf(Loc1),
+ %% The framework functions mustn't execute on this
+ %% group leader process or io will cause deadlock,
+ %% so we spawn a dedicated process for the operation
+ %% and let the group leader go back to handle io.
+ spawn_fw_call(Mod,Func,Pid,{timetrap_timeout,TVal},
+ Loc1,self(),Comment),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {timetrap_timeout,TVal,Loc,InitOrEnd} ->
+ Loc1 = mod_loc(Loc),
+ {Mod,_Func} = get_mf(Loc1),
+ spawn_fw_call(Mod,InitOrEnd,Pid,{timetrap_timeout,TVal},
+ Loc1,self(),Comment),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ {testcase_aborted,Reason,Loc} ->
+ Loc1 = mod_loc(Loc),
+ {Mod,Func} = get_mf(Loc1),
+ spawn_fw_call(Mod,Func,Pid,{testcase_aborted,Reason},
+ Loc1,self(),Comment),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ killed ->
+ %% result of an exit(TestCase,kill) call, which is the
+ %% only way to abort a testcase process that traps exits
+ %% (see abort_current_testcase)
+ spawn_fw_call(undefined,undefined,Pid,testcase_aborted_or_killed,
+ unknown,self(),Comment),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ _ ->
+ %% the testcase has terminated because of Reason (e.g. an exit
+ %% because a linked process failed)
+ spawn_fw_call(undefined,undefined,Pid,Reason,
+ unknown,self(),Comment),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment)
+ end;
+ {_FwCallPid,fw_notify_done,RetVal} ->
+ %% the framework has been notified, we're finished
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment);
+ {'EXIT',_FwCallPid,{fw_notify_done,Func,Error}} ->
+ %% a framework function failed
+ CB = os:getenv("TEST_SERVER_FRAMEWORK"),
+ Loc = case CB of
+ false ->
+ {test_server,Func};
+ _ ->
+ {list_to_atom(CB),Func}
+ end,
+ RetVal = {died,{framework_error,Loc,Error},Loc,"Framework error"},
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment);
+ {failed,File,Line} ->
+ put(test_server_detected_fail,
+ [{File, Line}| get(test_server_detected_fail)]),
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ _Other when not is_tuple(_Other) ->
+ %% ignore anything not generated by test server
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment);
+ _Other when element(1, _Other) /= 'EXIT',
+ element(1, _Other) /= started,
+ element(1, _Other) /= finished,
+ element(1, _Other) /= print ->
+ %% ignore anything not generated by test server
+ run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment)
+ after Timeout ->
+ ReturnValue
+ end.
+
+run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func) ->
+ case Msg of
+ {'EXIT',_} ->
+ From ! {io_reply,ReplyAs,{error,Func}};
+ _ ->
+ From ! {io_reply,ReplyAs,ok}
+ end,
+ if CaptureStdout /= false ->
+ CaptureStdout ! {captured,Msg};
+ true ->
+ ok
+ end,
+ output({minor,Msg},From).
+
+output(Msg,Sender) ->
+ local_or_remote_apply({test_server_ctrl,output,[Msg,Sender]}).
+
+spawn_fw_call(Mod,{init_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why,
+ Loc,SendTo,Comment) ->
+ FwCall =
+ fun() ->
+ Skip = {skip,{failed,{Mod,init_per_testcase,Why}}},
+ %% if init_per_testcase fails, the test case
+ %% should be skipped
+ case catch test_server_sup:framework_call(
+ end_tc,[?pl2a(Mod),Func,{Pid,Skip,[[]]}]) of
+ {'EXIT',FwEndTCErr} ->
+ exit({fw_notify_done,end_tc,FwEndTCErr});
+ _ ->
+ ok
+ end,
+ %% finished, report back
+ SendTo ! {self(),fw_notify_done,
+ {TVal/1000,Skip,Loc,[],Comment}}
+ end,
+ spawn_link(FwCall);
+spawn_fw_call(Mod,{end_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why,
+ Loc,SendTo,_Comment) ->
+ FwCall =
+ fun() ->
+ Conf = [{tc_status,ok}],
+ %% if end_per_testcase fails, the test case should be
+ %% reported successful with a warning printed as comment
+ case catch test_server_sup:framework_call(end_tc,
+ [?pl2a(Mod),Func,
+ {Pid,
+ {failed,{Mod,end_per_testcase,Why}},
+ [Conf]}]) of
+ {'EXIT',FwEndTCErr} ->
+ exit({fw_notify_done,end_tc,FwEndTCErr});
+ _ ->
+ ok
+ end,
+ %% finished, report back
+ SendTo ! {self(),fw_notify_done,
+ {TVal/1000,{error,{Mod,end_per_testcase,Why}},Loc,[],
+ ["<font color=\"red\">"
+ "WARNING: end_per_testcase timed out!"
+ "</font>"]}}
+ end,
+ spawn_link(FwCall);
+spawn_fw_call(Mod,Func,Pid,Error,Loc,SendTo,Comment) ->
+ FwCall =
+ fun() ->
+ case catch fw_error_notify(Mod,Func,[],
+ Error,Loc) of
+ {'EXIT',FwErrorNotifyErr} ->
+ exit({fw_notify_done,error_notification,
+ FwErrorNotifyErr});
+ _ ->
+ ok
+ end,
+ Conf = [{tc_status,{failed,timetrap_timeout}}],
+ case catch test_server_sup:framework_call(end_tc,
+ [?pl2a(Mod),Func,
+ {Pid,Error,[Conf]}]) of
+ {'EXIT',FwEndTCErr} ->
+ exit({fw_notify_done,end_tc,FwEndTCErr});
+ _ ->
+ ok
+ end,
+ %% finished, report back
+ SendTo ! {self(),fw_notify_done,{died,Error,Loc,Comment}}
+ end,
+ spawn_link(FwCall).
+
+%% The job proxy process forwards messages between the test case
+%% process on a shielded node (and its descendants) and the job process.
+%%
+%% The job proxy process have to be started by the test-case process
+%% on the shielded node!
+start_job_proxy() ->
+ group_leader(spawn(fun () -> job_proxy_msgloop() end), self()), ok.
+
+%% The io_reply_proxy is not the most satisfying solution but it works...
+io_reply_proxy(ReplyTo) ->
+ receive
+ IoReply when is_tuple(IoReply),
+ element(1, IoReply) == io_reply ->
+ ReplyTo ! IoReply;
+ _ ->
+ io_reply_proxy(ReplyTo)
+ end.
+
+job_proxy_msgloop() ->
+ receive
+
+ %%
+ %% Messages that need intervention by proxy...
+ %%
+
+ %% io stuff ...
+ IoReq when tuple_size(IoReq) >= 2,
+ element(1, IoReq) == io_request ->
+
+ ReplyProxy = spawn(fun () -> io_reply_proxy(element(2, IoReq)) end),
+ group_leader() ! setelement(2, IoReq, ReplyProxy);
+
+ %% test_server stuff...
+ {sync_apply, From, MFA} ->
+ group_leader() ! {sync_apply_proxy, self(), From, MFA};
+ {sync_result_proxy, To, Result} ->
+ To ! {sync_result, Result};
+
+ %%
+ %% Messages that need no intervention by proxy...
+ %%
+ Msg ->
+ group_leader() ! Msg
+ end,
+ job_proxy_msgloop().
+
+%% A test case is known to have failed if it returns {'EXIT', _} tuple,
+%% or sends a message {failed, File, Line} to it's group_leader
+
+run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit,
+ MultiplyTimetrap, TCCallback) ->
+ put(test_server_multiply_timetraps,MultiplyTimetrap),
+ {{Time,Value},Loc,Opts} =
+ case test_server_sup:framework_call(init_tc,[?pl2a(Mod),Func,Args0],
+ {ok,Args0}) of
+ {ok,Args} ->
+ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback);
+ Error = {error,_Reason} ->
+ test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Error,Args0}]),
+ {{0,{skip,{failed,Error}}},{Mod,Func},[]};
+ {fail,Reason} ->
+ [Conf] = Args0,
+ Conf1 = [{tc_status,{failed,Reason}} | Conf],
+ fw_error_notify(Mod, Func, Conf, Reason),
+ test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,
+ {{error,Reason},[Conf1]}]),
+ {{0,{failed,Reason}},{Mod,Func},[]};
+ Skip = {skip,_Reason} ->
+ test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Skip,Args0}]),
+ {{0,Skip},{Mod,Func},[]};
+ {auto_skip,Reason} ->
+ test_server_sup:framework_call(end_tc,[?pl2a(Mod),
+ Func,
+ {{skip,Reason},Args0}]),
+ {{0,{skip,{fw_auto_skip,Reason}}},{Mod,Func},[]}
+ end,
+ exit({Ref,Time,Value,Loc,Opts}).
+
+run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
+ case RunInit of
+ run_init ->
+ put(test_server_init_or_end_conf,{init_per_testcase,Func}),
+ put(test_server_loc, {Mod,{init_per_testcase,Func}}),
+ ensure_timetrap(Args),
+ case init_per_testcase(Mod, Func, Args) of
+ Skip = {skip,Reason} ->
+ Line = get_loc(),
+ Conf = [{tc_status,{skipped,Reason}}],
+ test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Skip,[Conf]}]),
+ {{0,{skip,Reason}},Line,[]};
+ {skip_and_save,Reason,SaveCfg} ->
+ Line = get_loc(),
+ Conf = [{tc_status,{skipped,Reason}},{save_config,SaveCfg}],
+ test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,
+ {{skip,Reason},[Conf]}]),
+ {{0,{skip,Reason}},Line,[]};
+ {ok,NewConf} ->
+ put(test_server_init_or_end_conf,undefined),
+ %% call user callback function if defined
+ NewConf1 = user_callback(TCCallback, Mod, Func, init, NewConf),
+ put(test_server_loc, {Mod,Func}),
+ %% execute the test case
+ {{T,Return},Loc} = {ts_tc(Mod, Func, [NewConf1]),get_loc()},
+ {EndConf,TSReturn,FWReturn} =
+ case Return of
+ {E,TCError} when E=='EXIT' ; E==failed ->
+ fw_error_notify(Mod, Func, NewConf1,
+ TCError, mod_loc(Loc)),
+ {[{tc_status,{failed,TCError}}|NewConf1],
+ Return,{error,TCError}};
+ SaveCfg={save_config,_} ->
+ {[{tc_status,ok},SaveCfg|NewConf1],Return,ok};
+ {skip_and_save,Why,SaveCfg} ->
+ Skip = {skip,Why},
+ {[{tc_status,{skipped,Why}},{save_config,SaveCfg}|NewConf1],
+ Skip,Skip};
+ {skip,Why} ->
+ {[{tc_status,{skipped,Why}}|NewConf1],Return,Return};
+ _ ->
+ {[{tc_status,ok}|NewConf1],Return,ok}
+ end,
+ %% call user callback function if defined
+ EndConf1 = user_callback(TCCallback, Mod, Func, 'end', EndConf),
+ {FWReturn1,TSReturn1,EndConf2} =
+ case end_per_testcase(Mod, Func, EndConf1) of
+ SaveCfg1={save_config,_} ->
+ {FWReturn,TSReturn,[SaveCfg1|lists:keydelete(save_config, 1, EndConf1)]};
+ {fail,ReasonToFail} -> % user has failed the testcase
+ fw_error_notify(Mod, Func, EndConf1, ReasonToFail),
+ {{error,ReasonToFail},{failed,ReasonToFail},EndConf1};
+ {failed,{_,end_per_testcase,_}} = Failure -> % unexpected termination
+ {Failure,TSReturn,EndConf1};
+ _ ->
+ {FWReturn,TSReturn,EndConf1}
+ end,
+ case test_server_sup:framework_call(end_tc, [?pl2a(Mod), Func,
+ {FWReturn1,[EndConf2]}]) of
+ {fail,Reason} ->
+ fw_error_notify(Mod, Func, EndConf2, Reason),
+ {{T,{failed,Reason}},{Mod,Func},[]};
+ _ ->
+ {{T,TSReturn1},Loc,[]}
+ end
+ end;
+ skip_init ->
+ %% call user callback function if defined
+ Args1 = user_callback(TCCallback, Mod, Func, init, Args),
+ ensure_timetrap(Args1),
+ %% ts_tc does a catch
+ put(test_server_loc, {Mod,Func}),
+ %% if this is a named conf group, the test case (init or end conf)
+ %% should be called with the name as the first argument
+ Args2 = if Name == undefined -> Args1;
+ true -> [Name | Args1]
+ end,
+ %% execute the conf test case
+ {{T,Return},Loc} = {ts_tc(Mod, Func, Args2),get_loc()},
+ %% call user callback function if defined
+ Return1 = user_callback(TCCallback, Mod, Func, 'end', Return),
+ {Return2,Opts} = process_return_val([Return1], Mod,Func,Args1, Loc, Return1),
+ {{T,Return2},Loc,Opts}
+ end.
+
+%% the return value is a list and we have to check if it contains
+%% the result of an end conf case or if it's a Config list
+process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) ->
+ ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result],
+ %% check if all elements in the list are valid end conf return value tuples
+ case lists:all(fun(Val) when is_tuple(Val) ->
+ lists:any(fun(T) -> T == element(1, Val) end, ReturnTags);
+ (ok) ->
+ true;
+ (_) ->
+ false
+ end, Return) of
+ true -> % must be return value from end conf case
+ process_return_val1(Return, M,F,A, Loc, Final, []);
+ false -> % must be Config value from init conf case
+ test_server_sup:framework_call(end_tc, [?pl2a(M),F,{ok,A}]),
+ {Return,[]}
+ end;
+%% the return value is not a list, so it's the return value from an
+%% end conf case or it's a dummy value that can be ignored
+process_return_val(Return, M,F,A, Loc, Final) ->
+ process_return_val1(Return, M,F,A, Loc, Final, []).
+
+process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts) when E=='EXIT';
+ E==failed ->
+ fw_error_notify(M,F,A, TCError, mod_loc(Loc)),
+ test_server_sup:framework_call(end_tc,
+ [?pl2a(M),F,{{error,TCError},
+ [[{tc_status,{failed,TCError}}|Args]]}]),
+ {Failed,SaveOpts};
+process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) ->
+ process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts);
+process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) ->
+ process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts);
+process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) ->
+ process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]);
+process_return_val1([RetVal={Tag,_}|Opts], M,F,A, Loc, _, SaveOpts) when Tag==skip;
+ Tag==comment ->
+ process_return_val1(Opts, M,F,A, Loc, RetVal, SaveOpts);
+process_return_val1([_|Opts], M,F,A, Loc, Final, SaveOpts) ->
+ process_return_val1(Opts, M,F,A, Loc, Final, SaveOpts);
+process_return_val1([], M,F,A, _Loc, Final, SaveOpts) ->
+ test_server_sup:framework_call(end_tc, [?pl2a(M),F,{Final,A}]),
+ {Final,lists:reverse(SaveOpts)}.
+
+user_callback(undefined, _, _, _, Args) ->
+ Args;
+user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, [Args]) when is_list(Args) ->
+ case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of
+ Args1 when is_list(Args1) ->
+ [Args1];
+ _ ->
+ [Args]
+ end;
+user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, Args) ->
+ case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of
+ Args1 when is_list(Args1) ->
+ Args1;
+ _ ->
+ Args
+ end.
+
+init_per_testcase(Mod, Func, Args) ->
+ case code:is_loaded(Mod) of
+ false -> code:load_file(Mod);
+ _ -> ok
+ end,
+ %% init_per_testcase defined, returns new configuration
+ case erlang:function_exported(Mod,init_per_testcase,2) of
+ true ->
+ case catch my_apply(Mod, init_per_testcase, [Func|Args]) of
+ {'$test_server_ok',{Skip,Reason}} when Skip==skip;
+ Skip==skipped ->
+ {skip,Reason};
+ {'$test_server_ok',Res={skip_and_save,_,_}} ->
+ Res;
+ {'$test_server_ok',NewConf} when is_list(NewConf) ->
+ case lists:filter(fun(T) when is_tuple(T) -> false;
+ (_) -> true end, NewConf) of
+ [] ->
+ {ok,NewConf};
+ Bad ->
+ group_leader() ! {printout,12,
+ "ERROR! init_per_testcase has returned "
+ "bad elements in Config: ~p\n",[Bad]},
+ {skip,{failed,{Mod,init_per_testcase,bad_return}}}
+ end;
+ {'$test_server_ok',_Other} ->
+ group_leader() ! {printout,12,
+ "ERROR! init_per_testcase did not return "
+ "a Config list.\n",[]},
+ {skip,{failed,{Mod,init_per_testcase,bad_return}}};
+ {'EXIT',Reason} ->
+ Line = get_loc(),
+ FormattedLoc = test_server_sup:format_loc(mod_loc(Line)),
+ group_leader() ! {printout,12,
+ "ERROR! init_per_testcase crashed!\n"
+ "\tLocation: ~s\n\tReason: ~p\n",
+ [FormattedLoc,Reason]},
+ {skip,{failed,{Mod,init_per_testcase,Reason}}};
+ Other ->
+ Line = get_loc(),
+ FormattedLoc = test_server_sup:format_loc(mod_loc(Line)),
+ group_leader() ! {printout,12,
+ "ERROR! init_per_testcase thrown!\n"
+ "\tLocation: ~s\n\tReason: ~p\n",
+ [FormattedLoc, Other]},
+ {skip,{failed,{Mod,init_per_testcase,Other}}}
+ end;
+ false ->
+ %% Optional init_per_testcase not defined
+ %% keep quiet.
+ [Config] = Args,
+ {ok, Config}
+ end.
+
+end_per_testcase(Mod, Func, Conf) ->
+ case erlang:function_exported(Mod,end_per_testcase,2) of
+ true ->
+ do_end_per_testcase(Mod,end_per_testcase,Func,Conf);
+ false ->
+ %% Backwards compatibility!
+ case erlang:function_exported(Mod,fin_per_testcase,2) of
+ true ->
+ do_end_per_testcase(Mod,fin_per_testcase,Func,Conf);
+ false ->
+ ok
+ end
+ end.
+
+do_end_per_testcase(Mod,EndFunc,Func,Conf) ->
+ put(test_server_init_or_end_conf,{EndFunc,Func}),
+ put(test_server_loc, {Mod,{EndFunc,Func}}),
+ case catch my_apply(Mod, EndFunc, [Func,Conf]) of
+ {'$test_server_ok',SaveCfg={save_config,_}} ->
+ SaveCfg;
+ {'$test_server_ok',{fail,_}=Fail} ->
+ Fail;
+ {'$test_server_ok',_} ->
+ ok;
+ {'EXIT',Reason} = Why ->
+ comment(io_lib:format("<font color=\"red\">"
+ "WARNING: ~w crashed!"
+ "</font>\n",[EndFunc])),
+ group_leader() ! {printout,12,
+ "WARNING: ~w crashed!\n"
+ "Reason: ~p\n"
+ "Line: ~s\n",
+ [EndFunc, Reason,
+ test_server_sup:format_loc(
+ mod_loc(get_loc()))]},
+ {failed,{Mod,end_per_testcase,Why}};
+ Other ->
+ comment(io_lib:format("<font color=\"red\">"
+ "WARNING: ~w thrown!"
+ "</font>\n",[EndFunc])),
+ group_leader() ! {printout,12,
+ "WARNING: ~w thrown!\n"
+ "Reason: ~p\n"
+ "Line: ~s\n",
+ [EndFunc, Other,
+ test_server_sup:format_loc(
+ mod_loc(get_loc()))]},
+ {failed,{Mod,end_per_testcase,Other}}
+ end.
+
+get_loc() ->
+ case catch test_server_line:get_lines() of
+ [] ->
+ get(test_server_loc);
+ {'EXIT',_} ->
+ get(test_server_loc);
+ Loc ->
+ Loc
+ end.
+
+get_loc(Pid) ->
+ {dictionary,Dict} = process_info(Pid, dictionary),
+ lists:foreach(fun({Key,Val}) -> put(Key,Val) end,Dict),
+ get_loc().
+
+get_mf([{M,F,_}|_]) -> {M,F};
+get_mf([{M,F}|_]) -> {M,F};
+get_mf(_) -> {undefined,undefined}.
+
+mod_loc(Loc) ->
+ %% handle diff line num versions
+ case Loc of
+ [{{_M,_F},_L}|_] ->
+ [{?pl2a(M),F,L} || {{M,F},L} <- Loc];
+ [{_M,_F}|_] ->
+ [{?pl2a(M),F} || {M,F} <- Loc];
+ {{M,F},L} ->
+ [{?pl2a(M),F,L}];
+ {M,ForL} ->
+ [{?pl2a(M),ForL}];
+ _ ->
+ Loc
+ end.
+
+
+fw_error_notify(Mod, Func, Args, Error) ->
+ test_server_sup:framework_call(error_notification,
+ [?pl2a(Mod),Func,[Args],
+ {Error,unknown}]).
+fw_error_notify(Mod, Func, Args, Error, Loc) ->
+ test_server_sup:framework_call(error_notification,
+ [?pl2a(Mod),Func,[Args],
+ {Error,Loc}]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% print(Detail,Format,Args) -> ok
+%% Detail = integer()
+%% Format = string()
+%% Args = [term()]
+%%
+%% Just like io:format, except that depending on the Detail value, the output
+%% is directed to console, major and/or minor log files.
+
+print(Detail,Format,Args) ->
+ local_or_remote_apply({test_server_ctrl,print,[Detail,Format,Args]}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% print_timsteamp(Detail,Leader) -> ok
+%%
+%% Prints Leader followed by a time stamp (date and time). Depending on
+%% the Detail value, the output is directed to console, major and/or minor
+%% log files.
+
+print_timestamp(Detail,Leader) ->
+ local_or_remote_apply({test_server_ctrl,print_timestamp,[Detail,Leader]}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% lookup_config(Key,Config) -> {value,{Key,Value}} | undefined
+%% Key = term()
+%% Value = term()
+%% Config = [{Key,Value},...]
+%%
+%% Looks up a specific key in the config list, and returns the value
+%% of the associated key, or undefined if the key doesn't exist.
+
+lookup_config(Key,Config) ->
+ case lists:keysearch(Key,1,Config) of
+ {value,{Key,Val}} ->
+ Val;
+ _ ->
+ io:format("Could not find element ~p in Config.~n",[Key]),
+ undefined
+ end.
+
+%% timer:tc/3
+ts_tc(M, F, A) ->
+ Before = erlang:now(),
+ Val = (catch my_apply(M, F, A)),
+ After = erlang:now(),
+ Result = case Val of
+ {'$test_server_ok', R} ->
+ R; % test case ok
+ {'EXIT',_Reason} = R ->
+ R; % test case crashed
+ Other ->
+ {failed, {thrown,Other}} % test case was thrown
+ end,
+ Elapsed =
+ (element(1,After)*1000000000000
+ +element(2,After)*1000000+element(3,After)) -
+ (element(1,Before)*1000000000000
+ +element(2,Before)*1000000+element(3,Before)),
+ {Elapsed, Result}.
+
+my_apply(M, F, A) ->
+ {'$test_server_ok',apply(M, F, A)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% TEST SUITE SUPPORT FUNCTIONS %%
+%% %%
+%% Note: Some of these functions have been moved to test_server_sup %%
+%% in an attempt to keep this modules small (yeah, right!) %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+unicode_to_latin1(Chars) when is_list(Chars); is_binary(Chars) ->
+ lists:flatten(
+ [ case X of
+ High when High > 255 ->
+ io_lib:format("\\{~.8B}",[X]);
+ Low ->
+ Low
+ end || X <- unicode:characters_to_list(Chars,unicode) ]);
+unicode_to_latin1(Garbage) ->
+ Garbage.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% format(Format) -> IoLibReturn
+%% format(Detail,Format) -> IoLibReturn
+%% format(Format,Args) -> IoLibReturn
+%% format(Detail,Format,Args) -> IoLibReturn
+%% Detail = integer()
+%% Format = string()
+%% Args = [term(),...]
+%% IoLibReturn = term()
+%%
+%% Logs the Format string and Args, similar to io:format/1/2 etc. If
+%% Detail is not specified, the default detail level (which is 50) is used.
+%% Which log files the string will be logged in depends on the thresholds
+%% set with set_levels/3. Typically with default detail level, only the
+%% minor log file is used.
+format(Format) ->
+ format(minor, Format, []).
+
+format(major, Format) ->
+ format(major, Format, []);
+format(minor, Format) ->
+ format(minor, Format, []);
+format(Detail, Format) when is_integer(Detail) ->
+ format(Detail, Format, []);
+format(Format, Args) ->
+ format(minor, Format, Args).
+
+format(Detail, Format, Args) ->
+ Str =
+ case catch io_lib:format(Format,Args) of
+ {'EXIT',_} ->
+ io_lib:format("illegal format; ~p with args ~p.\n",
+ [Format,Args]);
+ Valid -> Valid
+ end,
+ log({Detail, Str}).
+
+log(Msg) ->
+ group_leader() ! {structured_io, self(), Msg},
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% capture_start() -> ok
+%% capture_stop() -> ok
+%%
+%% Starts/stops capturing all output from io:format, and similar. Capturing
+%% output doesn't stop output from happening. It just makes it possible
+%% to retrieve the output using capture_get/0.
+%% Starting and stopping capture doesn't affect already captured output.
+%% All output is stored as messages in the message queue until retrieved
+
+capture_start() ->
+ group_leader() ! {capture,self()},
+ ok.
+
+capture_stop() ->
+ group_leader() ! {capture,false},
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% capture_get() -> Output
+%% Output = [string(),...]
+%%
+%% Retrieves all the captured output since last call to capture_get/0.
+%% Note that since output arrive as messages to the process, it takes
+%% a short while from the call to io:format until all output is available
+%% by capture_get/0. It is not necessary to call capture_stop/0 before
+%% retreiving the output.
+capture_get() ->
+ test_server_sup:capture_get([]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% messages_get() -> Messages
+%% Messages = [term(),...]
+%%
+%% Returns all messages in the message queue.
+messages_get() ->
+ test_server_sup:messages_get([]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sleep(Time) -> ok
+%% Time = integer() | float() | infinity
+%%
+%% Sleeps the specified number of milliseconds. This sleep also accepts
+%% floating point numbers (which are truncated) and the atom 'infinity'.
+sleep(infinity) ->
+ receive
+ after infinity ->
+ ok
+ end;
+sleep(MSecs) ->
+ receive
+ after trunc(MSecs) ->
+ ok
+ end,
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% fail(Reason) -> exit({suite_failed,Reason})
+%%
+%% Immediately calls exit. Included because test suites are easier
+%% to read when using this function, rather than exit directly.
+fail(Reason) ->
+ comment(cast_to_list(Reason)),
+ exit({suite_failed,Reason}).
+
+cast_to_list(X) when is_list(X) -> X;
+cast_to_list(X) when is_atom(X) -> atom_to_list(X);
+cast_to_list(X) -> lists:flatten(io_lib:format("~p", [X])).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% fail() -> exit(suite_failed)
+%%
+%% Immediately calls exit. Included because test suites are easier
+%% to read when using this function, rather than exit directly.
+fail() ->
+ exit(suite_failed).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% break(Comment) -> ok
+%%
+%% Break a test case so part of the test can be done manually.
+%% Use continue/0 to continue.
+break(Comment) ->
+ case erase(test_server_timetraps) of
+ undefined -> ok;
+ List -> lists:foreach(fun(Ref) -> timetrap_cancel(Ref) end,List)
+ end,
+ io:format(user,
+ "\n\n\n--- SEMIAUTOMATIC TESTING ---"
+ "\nThe test case executes on process ~w"
+ "\n\n\n~s"
+ "\n\n\n-----------------------------\n\n"
+ "Continue with --> test_server:continue().\n",
+ [self(),Comment]),
+ case whereis(test_server_break_process) of
+ undefined ->
+ spawn_break_process(self());
+ OldBreakProcess ->
+ OldBreakProcess ! cancel,
+ spawn_break_process(self())
+ end,
+ receive continue -> ok end.
+
+spawn_break_process(Pid) ->
+ spawn(fun() ->
+ register(test_server_break_process,self()),
+ receive
+ continue -> continue(Pid);
+ cancel -> ok
+ end
+ end).
+
+continue() ->
+ case whereis(test_server_break_process) of
+ undefined ->
+ ok;
+ BreakProcess ->
+ BreakProcess ! continue
+ end.
+
+continue(Pid) ->
+ Pid ! continue.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timetrap_scale_factor() -> Factor
+%%
+%% Returns the amount to scale timetraps with.
+
+timetrap_scale_factor() ->
+ F0 = case test_server:purify_is_running() of
+ true -> 5;
+ false -> 1
+ end,
+ F1 = case {is_debug(), has_lock_checking()} of
+ {true,_} -> 6 * F0;
+ {false,true} -> 2 * F0;
+ {false,false} -> F0
+ end,
+ F2 = case has_superfluous_schedulers() of
+ true -> 3*F1;
+ false -> F1
+ end,
+ F = case test_server_sup:get_os_family() of
+ vxworks -> 5 * F2;
+ _ -> F2
+ end,
+ case test_server:is_cover() of
+ true -> 10 * F;
+ false -> F
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timetrap(Timeout) -> Handle
+%% Handle = term()
+%%
+%% Creates a time trap, that will kill the calling process if the
+%% trap is not cancelled with timetrap_cancel/1, within Timeout milliseconds.
+
+timetrap(Timeout0) ->
+ Timeout = time_ms(Timeout0),
+ cancel_default_timetrap(),
+ case get(test_server_multiply_timetraps) of
+ undefined -> timetrap1(Timeout);
+ infinity -> infinity;
+ Int -> timetrap1(Timeout*Int)
+ end.
+
+timetrap1(Timeout) ->
+ Ref = spawn_link(test_server_sup,timetrap,[Timeout,self()]),
+ case get(test_server_timetraps) of
+ undefined -> put(test_server_timetraps,[Ref]);
+ List -> put(test_server_timetraps,[Ref|List])
+ end,
+ Ref.
+
+ensure_timetrap(Config) ->
+ %format("ensure_timetrap:~p~n",[Config]),
+ case get(test_server_timetraps) of
+ [_|_] ->
+ ok;
+ _ ->
+ case get(test_server_default_timetrap) of
+ undefined -> ok;
+ Garbage ->
+ erase(test_server_default_timetrap),
+ format("=== WARNING: garbage in test_server_default_timetrap: ~p~n",
+ [Garbage])
+ end,
+ DTmo = case lists:keysearch(default_timeout,1,Config) of
+ {value,{default_timeout,Tmo}} -> Tmo;
+ _ -> ?DEFAULT_TIMETRAP_SECS
+ end,
+ format("=== test_server setting default timetrap of ~p seconds~n",
+ [DTmo]),
+ put(test_server_default_timetrap, timetrap(seconds(DTmo)))
+ end.
+
+cancel_default_timetrap() ->
+ case get(test_server_default_timetrap) of
+ undefined ->
+ ok;
+ TimeTrap when is_pid(TimeTrap) ->
+ timetrap_cancel(TimeTrap),
+ erase(test_server_default_timetrap),
+ format("=== test_server canceled default timetrap since another timetrap was set~n"),
+ ok;
+ Garbage ->
+ erase(test_server_default_timetrap),
+ format("=== WARNING: garbage in test_server_default_timetrap: ~p~n",
+ [Garbage]),
+ error
+ end.
+
+
+time_ms({hours,N}) -> hours(N);
+time_ms({minutes,N}) -> minutes(N);
+time_ms({seconds,N}) -> seconds(N);
+time_ms({Other,_N}) ->
+ format("=== ERROR: Invalid time specification: ~p. "
+ "Should be seconds, minutes, or hours.~n", [Other]),
+ exit({invalid_time_spec,Other});
+time_ms(Ms) when is_integer(Ms) -> Ms;
+time_ms(Other) -> exit({invalid_time_spec,Other}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timetrap_cancel(Handle) -> ok
+%% Handle = term()
+%%
+%% Cancels a time trap.
+timetrap_cancel(infinity) ->
+ ok;
+timetrap_cancel(Handle) ->
+ case get(test_server_timetraps) of
+ undefined -> ok;
+ [Handle] -> erase(test_server_timetraps);
+ List -> put(test_server_timetraps,lists:delete(Handle,List))
+ end,
+ test_server_sup:timetrap_cancel(Handle).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% hours(N) -> Milliseconds
+%% minutes(N) -> Milliseconds
+%% seconds(N) -> Milliseconds
+%% N = integer() | float()
+%% Milliseconds = integer()
+%%
+%% Transforms the named units to milliseconds. Fractions in the input
+%% are accepted. The output is an integer.
+hours(N) -> trunc(N * 1000 * 60 * 60).
+minutes(N) -> trunc(N * 1000 * 60).
+seconds(N) -> trunc(N * 1000).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timecall(M,F,A) -> {Time,Val}
+%% Time = float()
+%%
+%% Measures the time spent evaluating MFA. The measurement is done with
+%% erlang:now/0, and should have pretty good accuracy on most platforms.
+%% The function is not evaluated in a catch context.
+timecall(M, F, A) ->
+ test_server_sup:timecall(M,F,A).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% do_times(N,M,F,A) -> ok
+%% do_times(N,Fun) ->
+%% N = integer()
+%% Fun = fun() -> void()
+%%
+%% Evaluates MFA or Fun N times, and returns ok.
+do_times(N,M,F,A) when N>0 ->
+ apply(M,F,A),
+ do_times(N-1,M,F,A);
+do_times(0,_,_,_) ->
+ ok.
+
+do_times(N,Fun) when N>0 ->
+ Fun(),
+ do_times(N-1,Fun);
+do_times(0,_) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% m_out_of_n(M,N,Fun) -> ok | exit({m_out_of_n_failed,{R,left_to_do}})
+%% M = integer()
+%% N = integer()
+%% Fun = fun() -> void()
+%% R = integer()
+%%
+%% Repeats evaluating the given function until it succeeded (didn't crash)
+%% M times. If, after N times, M successful attempts have not been
+%% accomplished, the process crashes with reason {m_out_of_n_failed
+%% {R,left_to_do}}, where R indicates how many cases that remained to be
+%% successfully completed.
+%%
+%% For example:
+%% m_out_of_n(1,4,fun() -> tricky_test_case() end)
+%% Tries to run tricky_test_case() up to 4 times,
+%% and is happy if it succeeds once.
+%%
+%% m_out_of_n(7,8,fun() -> clock_sanity_check() end)
+%% Tries running clock_sanity_check() up to 8
+%% times and allows the function to fail once.
+%% This might be useful if clock_sanity_check/0
+%% is known to fail if the clock crosses an hour
+%% boundary during the test (and the up to 8
+%% test runs could never cross 2 boundaries)
+m_out_of_n(0,_,_) ->
+ ok;
+m_out_of_n(M,0,_) ->
+ exit({m_out_of_n_failed,{M,left_to_do}});
+m_out_of_n(M,N,Fun) ->
+ case catch Fun() of
+ {'EXIT',_} ->
+ m_out_of_n(M,N-1,Fun);
+ _Other ->
+ m_out_of_n(M-1,N-1,Fun)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%call_crash(M,F,A)
+%%call_crash(Time,M,F,A)
+%%call_crash(Time,Crash,M,F,A)
+%% M - atom()
+%% F - atom()
+%% A - [term()]
+%% Time - integer() in milliseconds.
+%% Crash - term()
+%%
+%% Spaws a new process that calls MFA. The call is considered
+%% successful if the call crashes with the given reason (Crash),
+%% or any other reason if Crash is not specified.
+%% ** The call must terminate withing the given Time (defaults
+%% to infinity), or it is considered a failure (exit with reason
+%% 'call_crash_timeout' is generated).
+
+call_crash(M,F,A) ->
+ call_crash(infinity,M,F,A).
+call_crash(Time,M,F,A) ->
+ call_crash(Time,any,M,F,A).
+call_crash(Time,Crash,M,F,A) ->
+ test_server_sup:call_crash(Time,Crash,M,F,A).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% start_node(SlaveName, Type, Options) ->
+%% {ok, Slave} | {error, Reason}
+%%
+%% SlaveName = string(), atom().
+%% Type = slave | peer
+%% Options = [{tuple(), term()}]
+%%
+%% OptionList is a tuplelist wich may contain one
+%% or more of these members:
+%%
+%% Slave and Peer:
+%% {remote, true} - Start the node on a remote host. If not specified,
+%% the node will be started on the local host (with
+%% some exceptions, as for the case of VxWorks and OSE,
+%% where all nodes are started on a remote host).
+%% {args, Arguments} - Arguments passed directly to the node.
+%% {cleanup, false} - Nodes started with this option will not be killed
+%% by the test server after completion of the test case
+%% Therefore it is IMPORTANT that the USER terminates
+%% the node!!
+%% {erl, ReleaseList} - Use an Erlang emulator determined by ReleaseList
+%% when starting nodes, instead of the same emulator
+%% as the test server is running. ReleaseList is a list
+%% of specifiers, where a specifier is either
+%% {release, Rel}, {prog, Prog}, or 'this'. Rel is
+%% either the name of a release, e.g., "r7a" or
+%% 'latest'. 'this' means using the same emulator as
+%% the test server. Prog is the name of an emulator
+%% executable. If the list has more than one element,
+%% one of them is picked randomly. (Only
+%% works on Solaris and Linux, and the test
+%% server gives warnings when it notices that
+%% nodes are not of the same version as
+%% itself.)
+%%
+%% Peer only:
+%% {wait, false} - Don't wait for the node to be started.
+%% {fail_on_error, false} - Returns {error, Reason} rather than failing
+%% the test case. This option can only be used with
+%% peer nodes.
+%% Note that slave nodes always act as if they had
+%% fail_on_error==false.
+%%
+
+start_node(Name, Type, Options) ->
+ lists:foreach(
+ fun(N) ->
+ case firstname(N) of
+ Name ->
+ format("=== WARNING: Trying to start node \'~w\' when node"
+ " with same first name exists: ~w", [Name, N]);
+ _other -> ok
+ end
+ end,
+ nodes()),
+
+ group_leader() ! {sync_apply,
+ self(),
+ {test_server_ctrl,start_node,[Name,Type,Options]}},
+ Result = receive {sync_result,R} -> R end,
+
+ case Result of
+ {ok,Node} ->
+
+ %% Cannot run cover on shielded node or on a node started
+ %% by a shielded node.
+ Cover = case is_cover() of
+ true ->
+ not is_shielded(Name) andalso same_version(Node);
+ false ->
+ false
+ end,
+
+ net_adm:ping(Node),
+ case Cover of
+ true ->
+ Sticky = unstick_all_sticky(Node),
+ cover:start(Node),
+ stick_all_sticky(Node,Sticky);
+ _ ->
+ ok
+ end,
+ {ok,Node};
+ {fail,Reason} -> fail(Reason);
+ Error -> Error
+ end.
+
+firstname(N) ->
+ list_to_atom(upto($@,atom_to_list(N))).
+
+%% This should!!! crash if H is not member in list.
+upto(H, [H | _T]) -> [];
+upto(H, [X | T]) -> [X | upto(H,T)].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% wait_for_node(Name) -> ok | {error,timeout}
+%%
+%% If a node is started with the options {wait,false}, this function
+%% can be used to wait for the node to come up from the
+%% test server point of view (i.e. wait until it has contacted
+%% the test server controller after startup)
+wait_for_node(Slave) ->
+ group_leader() ! {sync_apply,
+ self(),
+ {test_server_ctrl,wait_for_node,[Slave]}},
+ receive {sync_result,R} -> R end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% stop_node(Name) -> true|false
+%%
+%% Kills a (remote) node.
+%% Also inform test_server_ctrl so it can clean up!
+stop_node(Slave) ->
+ Nocover = is_shielded(Slave) orelse not same_version(Slave),
+ case is_cover() of
+ true when not Nocover ->
+ Sticky = unstick_all_sticky(Slave),
+ cover:stop(Slave),
+ stick_all_sticky(Slave,Sticky);
+ _ ->
+ ok
+ end,
+ group_leader() ! {sync_apply,self(),{test_server_ctrl,stop_node,[Slave]}},
+ Result = receive {sync_result,R} -> R end,
+ case Result of
+ ok ->
+ erlang:monitor_node(Slave, true),
+ slave:stop(Slave),
+ receive
+ {nodedown, Slave} ->
+ format(minor, "Stopped slave node: ~p", [Slave]),
+ format(major, "=node_stop ~p", [Slave]),
+ true
+ after 30000 ->
+ format("=== WARNING: Node ~p does not seem to terminate.",
+ [Slave]),
+ false
+ end;
+ {error, _Reason} ->
+ %% Either, the node is already dead or it was started
+ %% with the {cleanup,false} option, or it was started
+ %% in some other way than test_server:start_node/3
+ format("=== WARNING: Attempt to stop a nonexisting slavenode (~p)~n"
+ "=== Trying to kill it anyway!!!",
+ [Slave]),
+ case net_adm:ping(Slave)of
+ pong ->
+ slave:stop(Slave),
+ true;
+ pang ->
+ false
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% is_release_available(Release) -> true | false
+%% Release -> string()
+%%
+%% Test if a release (such as "r10b") is available to be
+%% started using start_node/3.
+
+is_release_available(Release) ->
+ group_leader() ! {sync_apply,
+ self(),
+ {test_server_ctrl,is_release_available,[Release]}},
+ receive {sync_result,R} -> R end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% run_on_shielded_node(Fun, CArgs) -> term()
+%% Fun -> function()
+%% CArg -> list()
+%%
+%%
+%% Fun is executed in a process on a temporarily created
+%% hidden node. Communication with the job process goes
+%% via a job proxy process on the hidden node, i.e. the
+%% group leader of the test case process is the job proxy
+%% process. This makes it possible to start nodes from the
+%% hidden node that are unaware of the test server node.
+%% Without the job proxy process all processes would have
+%% a process residing on the test_server node as group_leader.
+%%
+%% Fun - Function to execute
+%% CArg - Extra command line arguments to use when starting
+%% the shielded node.
+%%
+%% If Fun is successfully executed, the result is returned.
+%%
+
+run_on_shielded_node(Fun, CArgs) when is_function(Fun), is_list(CArgs) ->
+ {A,B,C} = now(),
+ Name = "shielded_node-" ++ integer_to_list(A) ++ "-" ++ integer_to_list(B)
+ ++ "-" ++ integer_to_list(C),
+ Node = case start_node(Name, slave, [{args, "-hidden " ++ CArgs}]) of
+ {ok, N} -> N;
+ Err -> fail({failed_to_start_shielded_node, Err})
+ end,
+ Master = self(),
+ Ref = make_ref(),
+ Slave = spawn(Node,
+ fun () ->
+ start_job_proxy(),
+ receive
+ Ref ->
+ Master ! {Ref, Fun()}
+ end,
+ receive after infinity -> infinity end
+ end),
+ MRef = erlang:monitor(process, Slave),
+ Slave ! Ref,
+ receive
+ {'DOWN', MRef, _, _, Info} ->
+ stop_node(Node),
+ fail(Info);
+ {Ref, Res} ->
+ stop_node(Node),
+ receive
+ {'DOWN', MRef, _, _, _} ->
+ Res
+ end
+ end.
+
+%% Return true if Name or node() is a shielded node
+is_shielded(Name) ->
+ case {cast_to_list(Name),atom_to_list(node())} of
+ {"shielded_node"++_,_} -> true;
+ {_,"shielded_node"++_} -> true;
+ _ -> false
+ end.
+
+same_version(Name) ->
+ ThisVersion = erlang:system_info(version),
+ OtherVersion = rpc:call(Name, erlang, system_info, [version]),
+ ThisVersion =:= OtherVersion.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% temp_name(Stem) -> string()
+%% Stem = string()
+%%
+%% Create a unique file name, based on (starting with) Stem.
+%% A filename of the form <Stem><Number> is generated, and the
+%% function checks that that file doesn't already exist.
+temp_name(Stem) ->
+ {A,B,C} = erlang:now(),
+ RandomNum = A bxor B bxor C,
+ RandomName = Stem ++ integer_to_list(RandomNum),
+ {ok,Files} = file:list_dir(filename:dirname(Stem)),
+ case lists:member(RandomName,Files) of
+ true ->
+ %% oh, already exists - bad luck. Try again.
+ temp_name(Stem); %% recursively try again
+ false ->
+ RandomName
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% app_test/1
+%%
+app_test(App) ->
+ app_test(App, pedantic).
+app_test(App, Mode) ->
+ case os:type() of
+ {ose,_} ->
+ Comment = "Skipping app_test on OSE",
+ comment(Comment), % in case user ignores the return value
+ {skip,Comment};
+ _other ->
+ test_server_sup:app_test(App, Mode)
+ end.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% is_native(Mod) -> true | false
+%%
+%% Checks wether the module is natively compiled or not.
+
+is_native(Mod) ->
+ case catch Mod:module_info(native_addresses) of
+ [_|_] -> true;
+ _Other -> false
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% comment(String) -> ok
+%%
+%% The given String will occur in the comment field
+%% of the table on the test suite result page. If
+%% called several times, only the last comment is
+%% printed.
+%% comment/1 is also overwritten by the return value
+%% {comment,Comment} or fail/1 (which prints Reason
+%% as a comment).
+comment(String) ->
+ group_leader() ! {comment,String},
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% os_type() -> OsType
+%%
+%% Returns the OsType of the target node. OsType is
+%% the same as returned from os:type()
+os_type() ->
+ test_server_ctrl:get_target_os_type().
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% is_cover() -> boolean()
+%%
+%% Returns true if cover is running, else false
+is_cover() ->
+ case whereis(cover_server) of
+ undefined -> false;
+ _ -> true
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% is_debug() -> boolean()
+%%
+%% Returns true if the emulator is debug-compiled, false otherwise.
+is_debug() ->
+ case catch erlang:system_info(debug_compiled) of
+ {'EXIT', _} ->
+ case string:str(erlang:system_info(system_version), "debug") of
+ Int when is_integer(Int), Int > 0 -> true;
+ _ -> false
+ end;
+ Res ->
+ Res
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% has_lock_checking() -> boolean()
+%%
+%% Returns true if the emulator has lock checking enabled, false otherwise.
+has_lock_checking() ->
+ case catch erlang:system_info(lock_checking) of
+ {'EXIT', _} -> false;
+ Res -> Res
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% has_superfluous_schedulers() -> boolean()
+%%
+%% Returns true if the emulator has more scheduler threads than logical
+%% processors, false otherwise.
+has_superfluous_schedulers() ->
+ case catch {erlang:system_info(schedulers),
+ erlang:system_info(logical_processors)} of
+ {S, P} when is_integer(S), is_integer(P), S > P -> true;
+ _ -> false
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% is_commercial_build() -> boolean()
+%%
+%% Returns true if the current emulator is commercially supported.
+%% (The emulator will not have "[source]" in its start-up message.)
+%% We might want to do more tests on a commercial platform, for instance
+%% ensuring that all applications have documentation).
+is_commercial() ->
+ case string:str(erlang:system_info(system_version), "source") of
+ Int when is_integer(Int), Int > 0 -> false;
+ _ -> true
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% DEBUGGER INTERFACE %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% purify_is_running() -> false|true
+%%
+%% Tests if Purify is currently running.
+
+purify_is_running() ->
+ case catch erlang:system_info({error_checker, running}) of
+ {'EXIT', _} -> false;
+ Res -> Res
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% purify_new_leaks() -> false|BytesLeaked
+%% BytesLeaked = integer()
+%%
+%% Checks for new memory leaks if Purify is active.
+%% Returns the number of bytes leaked, or false if Purify
+%% is not running.
+purify_new_leaks() ->
+ case catch erlang:system_info({error_checker, memory}) of
+ {'EXIT', _} -> false;
+ Leaked when is_integer(Leaked) -> Leaked
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% purify_new_fds_inuse() -> false|FdsInuse
+%% FdsInuse = integer()
+%%
+%% Checks for new file descriptors in use.
+%% Returns the number of new file descriptors in use, or false
+%% if Purify is not running.
+purify_new_fds_inuse() ->
+ case catch erlang:system_info({error_checker, fd}) of
+ {'EXIT', _} -> false;
+ Inuse when is_integer(Inuse) -> Inuse
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% purify_format(Format, Args) -> ok
+%% Format = string()
+%% Args = lists()
+%%
+%% Outputs the formatted string to Purify's logfile,if Purify is active.
+purify_format(Format, Args) ->
+ (catch erlang:system_info({error_checker, io_lib:format(Format, Args)})),
+ ok.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Generic send functions for communication with host
+%%
+sync_local_or_remote_apply(Proxy,From,{M,F,A} = MFA) ->
+ case get(test_server_job_sock) of
+ undefined ->
+ %% i'm a local target
+ Result = apply(M,F,A),
+ if is_pid(Proxy) -> Proxy ! {sync_result_proxy,From,Result};
+ true -> From ! {sync_result,Result}
+ end;
+ JobSock ->
+ %% i'm a remote target
+ request(JobSock,{sync_apply,MFA}),
+ {sync_result,Result} = recv(JobSock),
+ if is_pid(Proxy) -> Proxy ! {sync_result_proxy,From,Result};
+ true -> From ! {sync_result,Result}
+ end
+ end.
+local_or_remote_apply({M,F,A} = MFA) ->
+ case get(test_server_job_sock) of
+ undefined ->
+ %% i'm a local target
+ apply(M,F,A),
+ ok;
+ JobSock ->
+ %% i'm a remote target
+ request(JobSock,{apply,MFA}),
+ ok
+ end.
+
+request(Sock,Request) ->
+ gen_tcp:send(Sock,<<1,(term_to_binary(Request))/binary>>).
+
+%%
+%% Generic receive function for communication with host
+%%
+recv(Sock) ->
+ case gen_tcp:recv(Sock,0) of
+ {error,closed} ->
+ gen_tcp:close(Sock),
+ exit(connection_lost);
+ {ok,<<1,Request/binary>>} ->
+ binary_to_term(Request);
+ {ok,<<0,B/binary>>} ->
+ B
+ end.
diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl
new file mode 100644
index 0000000000..667d0cc051
--- /dev/null
+++ b/lib/test_server/src/test_server_ctrl.erl
@@ -0,0 +1,5253 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(test_server_ctrl).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The Erlang Test Server %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% MODULE DEPENDENCIES:
+%% HARD TO REMOVE: erlang, lists, io_lib, gen_server, file, io, string,
+%% code, ets, rpc, gen_tcp, inet, erl_tar, sets,
+%% test_server, test_server_sup, test_server_node
+%% EASIER TO REMOVE: filename, filelib, lib, re
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% ARCHITECTURE
+%%
+%% The Erlang Test Server can be run on the target machine (local target)
+%% or towards a remote target. The execution flow is mainly the same in
+%% both cases, but with a remote target the test cases are (obviously)
+%% executed on the target machine. Host and target communicates over
+%% socket connections because the host should not be introduced as an
+%% additional node in the distributed erlang system in which the test
+%% cases are run.
+%%
+%%
+%% Local Target:
+%% =============
+%%
+%% -----
+%% | | test_server_ctrl ({global,test_server})
+%% ----- (test_server_ctrl.erl)
+%% |
+%% |
+%% -----
+%% | | JobProc
+%% ----- (test_server_ctrl.erl and test_server.erl)
+%% |
+%% |
+%% -----
+%% | | CaseProc
+%% ----- (test_server.erl)
+%%
+%%
+%%
+%% test_server_ctrl is the main process in the system. It is a registered
+%% process, and it will always be alive when testing is ongoing.
+%% test_server_ctrl initiates testing and monitors JobProc(s).
+%%
+%% When target is local, and Test Server is *not* being used by a framework
+%% application (where it might cause duplicate name problems in a distributed
+%% test environment), the process is globally registered as 'test_server'
+%% to be able to simulate the {global,test_server} process on a remote target.
+%%
+%% JobProc is spawned for each 'job' added to the test_server_ctrl.
+%% A job can mean one test case, one test suite or one spec.
+%% JobProc creates and writes logs and presents results from testing.
+%% JobProc is the group leader for CaseProc.
+%%
+%% CaseProc is spawned for each test case. It runs the test case and
+%% sends results and any other information to its group leader - JobProc.
+%%
+%%
+%%
+%% Remote Target:
+%% ==============
+%%
+%% HOST TARGET
+%%
+%% ----- MainSock -----
+%% test_server_ctrl | |- - - - - - -| | {global,test_server}
+%% (test_server_ctrl.erl) ----- ----- (test_server.erl)
+%% | |
+%% | |
+%% ----- JobSock -----
+%% JobProcH | |- - - - - - -| | JobProcT
+%% (test_server_ctrl.erl) ----- ----- (test_server.erl)
+%% |
+%% |
+%% -----
+%% | | CaseProc
+%% ----- (test_server.erl)
+%%
+%%
+%%
+%%
+%% A separate test_server process only exists when target is remote. It
+%% is then the main process on target. It is started when test_server_ctrl
+%% is started, and a socket connection is established between
+%% test_server_ctrl and test_server. The following information can be sent
+%% over MainSock:
+%%
+%% HOST TARGET
+%% -> {target_info, TargetInfo} (during initiation)
+%% <- {job_proc_killed,Name,Reason} (if a JobProcT dies unexpectedly)
+%% -> {job,Port,Name} (to start a new JobProcT)
+%%
+%%
+%% When target is remote, JobProc is split into to processes: JobProcH
+%% executing on Host and JobProcT executing on Target. (The two processes
+%% execute the same code as JobProc does when target is local.) JobProcH
+%% and JobProcT communicates over a socket connection. The following
+%% information can be sent over JobSock:
+%%
+%% HOST TARGET
+%% -> {test_case, Case} To start a new test case
+%% -> {beam,Mod} .beam file as binary to be loaded
+%% on target, e.g. a test suite
+%% -> {datadir,Tarfile} Content of the datadir for a test suite
+%% <- {apply,MFA} MFA to be applied on host, ignore return;
+%% (apply is used for printing information in
+%% log or console)
+%% <- {sync_apply,MFA} MFA to be applied on host, wait for return
+%% (used for starting and stopping slave nodes)
+%% -> {sync_apply,MFA} MFA to be applied on target, wait for return
+%% (used for cover compiling and analysing)
+%% <-> {sync_result,Result} Return value from sync_apply
+%% <- {test_case_result,Result} When a test case is finished
+%% <- {crash_dumps,Tarfile} When a test case is finished
+%% -> job_done When a job is finished
+%% <- {privdir,Privdir} When a job is finished
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+
+%%% SUPERVISOR INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([start/0, start/1, start_link/1, stop/0]).
+
+%%% OPERATOR INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([add_spec/1, add_dir/2, add_dir/3]).
+-export([add_module/1, add_module/2, add_case/2, add_case/3, add_cases/2,
+ add_cases/3]).
+-export([add_dir_with_skip/3, add_dir_with_skip/4, add_tests_with_skip/3]).
+-export([add_module_with_skip/2, add_module_with_skip/3,
+ add_case_with_skip/3, add_case_with_skip/4,
+ add_cases_with_skip/3, add_cases_with_skip/4]).
+-export([jobs/0, run_test/1, wait_finish/0, idle_notify/1,
+ abort_current_testcase/1, abort/0]).
+-export([start_get_totals/1, stop_get_totals/0]).
+-export([get_levels/0, set_levels/3]).
+-export([multiply_timetraps/1]).
+-export([cover/2, cover/3, cover/7,
+ cross_cover_analyse/1, cross_cover_analyse/2, trc/1, stop_trace/0]).
+-export([testcase_callback/1]).
+-export([set_random_seed/1]).
+
+%%% TEST_SERVER INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([output/2, print/2, print/3, print_timestamp/2]).
+-export([start_node/3, stop_node/1, wait_for_node/1, is_release_available/1]).
+-export([format/1, format/2, format/3]).
+-export([get_target_info/0]).
+-export([get_hosts/0]).
+-export([get_target_os_type/0]).
+-export([node_started/1]).
+
+%%% DEBUGGER INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([i/0, p/1, p/3, pi/2, pi/4, t/0, t/1]).
+
+%%% PRIVATE EXPORTED %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([init/1, terminate/2]).
+-export([handle_call/3, handle_cast/2, handle_info/2]).
+-export([do_test_cases/4]).
+-export([do_spec/2, do_spec_list/2]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-include("test_server_internal.hrl").
+-include_lib("kernel/include/file.hrl").
+-define(suite_ext, "_SUITE").
+-define(log_ext, ".log.html").
+-define(src_listing_ext, ".src.html").
+-define(logdir_ext, ".logs").
+-define(data_dir_suffix, "_data/").
+-define(suitelog_name, "suite.log").
+-define(coverlog_name, "cover.html").
+-define(cross_coverlog_name, "cross_cover.html").
+-define(cover_total, "total_cover.log").
+-define(last_file, "last_name").
+-define(last_link, "last_link").
+-define(last_test, "last_test").
+-define(html_ext, ".html").
+-define(cross_cover_file, "cross.cover").
+-define(now, erlang:now()).
+
+-define(pl2a(M), test_server_sup:package_atom(M)).
+-define(void_fun, fun() -> ok end).
+-define(mod_result(X), if X == skip -> skipped;
+ X == auto_skip -> skipped;
+ true -> X end).
+
+-record(state,{jobs=[],levels={1,19,10},multiply_timetraps=1,finish=false,
+ target_info, trc=false, cover=false, wait_for_node=[],
+ testcase_callback=undefined, idle_notify=[],
+ get_totals=false, random_seed=undefined}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% OPERATOR INTERFACE
+
+add_dir(Name, Job=[Dir|_Dirs]) when is_list(Dir) ->
+ add_job(cast_to_list(Name),
+ lists:map(fun(D)-> {dir,cast_to_list(D)} end, Job));
+add_dir(Name, Dir) ->
+ add_job(cast_to_list(Name), {dir,cast_to_list(Dir)}).
+
+add_dir(Name, Job=[Dir|_Dirs], Pattern) when is_list(Dir) ->
+ add_job(cast_to_list(Name),
+ lists:map(fun(D)-> {dir,cast_to_list(D),
+ cast_to_list(Pattern)} end, Job));
+add_dir(Name, Dir, Pattern) ->
+ add_job(cast_to_list(Name), {dir,cast_to_list(Dir),cast_to_list(Pattern)}).
+
+add_module(Mod) when is_atom(Mod) ->
+ add_job(atom_to_list(Mod), {Mod,all}).
+add_module(Name, Mods) when is_list(Mods) ->
+ add_job(cast_to_list(Name), lists:map(fun(Mod) -> {Mod,all} end, Mods)).
+
+add_case(Mod, Case) when is_atom(Mod), is_atom(Case) ->
+ add_job(atom_to_list(Mod), {Mod,Case}).
+
+add_case(Name, Mod, Case) when is_atom(Mod), is_atom(Case) ->
+ add_job(Name, {Mod,Case}).
+
+add_cases(Mod, Cases) when is_atom(Mod), is_list(Cases) ->
+ add_job(atom_to_list(Mod), {Mod,Cases}).
+
+add_cases(Name, Mod, Cases) when is_atom(Mod), is_list(Cases) ->
+ add_job(Name, {Mod,Cases}).
+
+add_spec(Spec) ->
+ Name = filename:rootname(Spec, ".spec"),
+ case filelib:is_file(Spec) of
+ true -> add_job(Name, {spec,Spec});
+ false -> {error,nofile}
+ end.
+
+%% This version of the interface is to be used if there are
+%% suites or cases that should be skipped.
+
+add_dir_with_skip(Name, Job=[Dir|_Dirs], Skip) when is_list(Dir) ->
+ add_job(cast_to_list(Name),
+ lists:map(fun(D)-> {dir,cast_to_list(D)} end, Job),
+ Skip);
+add_dir_with_skip(Name, Dir, Skip) ->
+ add_job(cast_to_list(Name), {dir,cast_to_list(Dir)}, Skip).
+
+add_dir_with_skip(Name, Job=[Dir|_Dirs], Pattern, Skip) when is_list(Dir) ->
+ add_job(cast_to_list(Name),
+ lists:map(fun(D)-> {dir,cast_to_list(D),
+ cast_to_list(Pattern)} end, Job),
+ Skip);
+add_dir_with_skip(Name, Dir, Pattern, Skip) ->
+ add_job(cast_to_list(Name),
+ {dir,cast_to_list(Dir),cast_to_list(Pattern)}, Skip).
+
+add_module_with_skip(Mod, Skip) when is_atom(Mod) ->
+ add_job(atom_to_list(Mod), {Mod,all}, Skip).
+
+add_module_with_skip(Name, Mods, Skip) when is_list(Mods) ->
+ add_job(cast_to_list(Name), lists:map(fun(Mod) -> {Mod,all} end, Mods), Skip).
+
+add_case_with_skip(Mod, Case, Skip) when is_atom(Mod), is_atom(Case) ->
+ add_job(atom_to_list(Mod), {Mod,Case}, Skip).
+
+add_case_with_skip(Name, Mod, Case, Skip) when is_atom(Mod), is_atom(Case) ->
+ add_job(Name, {Mod,Case}, Skip).
+
+add_cases_with_skip(Mod, Cases, Skip) when is_atom(Mod), is_list(Cases) ->
+ add_job(atom_to_list(Mod), {Mod,Cases}, Skip).
+
+add_cases_with_skip(Name, Mod, Cases, Skip) when is_atom(Mod), is_list(Cases) ->
+ add_job(Name, {Mod,Cases}, Skip).
+
+add_tests_with_skip(LogDir, Tests, Skip) ->
+ add_job(LogDir,
+ lists:map(fun({Dir,all,all}) ->
+ {Dir,{dir,Dir}};
+ ({Dir,Mods,all}) ->
+ {Dir,lists:map(fun(M) -> {M,all} end, Mods)};
+ ({Dir,Mod,Cases}) ->
+ {Dir,{Mod,Cases}}
+ end, Tests),
+ Skip).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% COMMAND LINE INTERFACE
+
+parse_cmd_line(Cmds) ->
+ parse_cmd_line(Cmds, [], [], local, false, false, undefined).
+
+parse_cmd_line(['SPEC',Spec|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ case file:consult(Spec) of
+ {ok, TermList} ->
+ Name = filename:rootname(Spec),
+ parse_cmd_line(Cmds, TermList++SpecList, [Name|Names], Param,
+ Trc, Cov, TCCB);
+ {error,Reason} ->
+ io:format("Can't open ~s: ~p\n",
+ [cast_to_list(Spec), file:format_error(Reason)]),
+ parse_cmd_line(Cmds, SpecList, Names, Param, Trc, Cov, TCCB)
+ end;
+parse_cmd_line(['NAME',Name|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, SpecList, [{name,Name}|Names], Param, Trc, Cov, TCCB);
+parse_cmd_line(['SKIPMOD',Mod|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, [{skip,{Mod,"by command line"}}|SpecList], Names,
+ Param, Trc, Cov, TCCB);
+parse_cmd_line(['SKIPCASE',Mod,Case|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, [{skip,{Mod,Case,"by command line"}}|SpecList], Names,
+ Param, Trc, Cov, TCCB);
+parse_cmd_line(['DIR',Dir|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ Name = cast_to_list(filename:basename(Dir)),
+ parse_cmd_line(Cmds, [{topcase,{dir,Name}}|SpecList], [Name|Names],
+ Param, Trc, Cov, TCCB);
+parse_cmd_line(['MODULE',Mod|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, [{topcase,{Mod,all}}|SpecList], [Mod|Names],
+ Param, Trc, Cov, TCCB);
+parse_cmd_line(['CASE',Mod,Case|Cmds], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, [{topcase,{Mod,Case}}|SpecList], [Mod|Names],
+ Param, Trc, Cov, TCCB);
+parse_cmd_line(['PARAMETERS',Param|Cmds], SpecList, Names, _Param, Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, SpecList, Names, Param, Trc, Cov, TCCB);
+parse_cmd_line(['TRACE',Trc|Cmds], SpecList, Names, Param, _Trc, Cov, TCCB) ->
+ parse_cmd_line(Cmds, SpecList, Names, Param, Trc, Cov, TCCB);
+parse_cmd_line(['COVER',App,CF,Analyse|Cmds], SpecList, Names, Param, Trc, _Cov, TCCB) ->
+ parse_cmd_line(Cmds, SpecList, Names, Param, Trc, {{App,CF}, Analyse}, TCCB);
+parse_cmd_line(['TESTCASE_CALLBACK',Mod,Func|Cmds], SpecList, Names, Param, Trc, Cov, _) ->
+ parse_cmd_line(Cmds, SpecList, Names, Param, Trc, Cov, {Mod,Func});
+parse_cmd_line([Obj|_Cmds], _SpecList, _Names, _Param, _Trc, _Cov, _TCCB) ->
+ io:format("~p: Bad argument: ~p\n", [?MODULE,Obj]),
+ io:format(" Use the `ts' module to start tests.\n", []),
+ io:format(" (If you ARE using `ts', there is a bug in `ts'.)\n", []),
+ halt(1);
+parse_cmd_line([], SpecList, Names, Param, Trc, Cov, TCCB) ->
+ NameList = lists:reverse(Names, [suite]),
+ Name = case lists:keysearch(name, 1, NameList) of
+ {value,{name,N}} -> N;
+ false -> hd(NameList)
+ end,
+ {lists:reverse(SpecList), cast_to_list(Name), Param, Trc, Cov, TCCB}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% cast_to_list(X) -> string()
+%% X = list() | atom() | void()
+%% Returns a string representation of whatever was input
+
+cast_to_list(X) when is_list(X) -> X;
+cast_to_list(X) when is_atom(X) -> atom_to_list(X);
+cast_to_list(X) -> lists:flatten(io_lib:format("~w", [X])).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% START INTERFACE
+
+start() ->
+ start(local).
+
+start(Param) ->
+ case gen_server:start({local,?MODULE}, ?MODULE, [Param], []) of
+ {ok, Pid} ->
+ {ok, Pid};
+ Other ->
+ Other
+ end.
+
+start_link(Param) ->
+ case gen_server:start_link({local,?MODULE}, ?MODULE, [Param], []) of
+ {ok, Pid} ->
+ {ok, Pid};
+ Other ->
+ Other
+ end.
+
+run_test(CommandLine) ->
+ process_flag(trap_exit,true),
+ {SpecList,Name,Param,Trc,Cov,TCCB} = parse_cmd_line(CommandLine),
+ {ok,_TSPid} = start_link(Param),
+ case Trc of
+ false -> ok;
+ File -> trc(File)
+ end,
+ case Cov of
+ false -> ok;
+ {{App,CoverFile},Analyse} -> cover(App, maybe_file(CoverFile), Analyse)
+ end,
+ testcase_callback(TCCB),
+ add_job(Name, {command_line,SpecList}),
+
+ %% adding of jobs involves file i/o which may take long time
+ %% when running a nfs mounted file system (VxWorks).
+ case controller_call(get_target_info) of
+ #target_info{os_family=vxworks} ->
+ receive after 30000 -> ready_to_wait end;
+ _ ->
+ wait_now
+ end,
+ wait_finish().
+
+%% Converted CoverFile to a string unless it is 'none'
+maybe_file(none) ->
+ none;
+maybe_file(CoverFile) ->
+ atom_to_list(CoverFile).
+
+idle_notify(Fun) ->
+ {ok, Pid} = controller_call({idle_notify,Fun}),
+ Pid.
+
+start_get_totals(Fun) ->
+ {ok, Pid} = controller_call({start_get_totals,Fun}),
+ Pid.
+
+stop_get_totals() ->
+ ok = controller_call(stop_get_totals),
+ ok.
+
+wait_finish() ->
+ OldTrap = process_flag(trap_exit, true),
+ {ok, Pid} = finish(true),
+ link(Pid),
+ receive
+ {'EXIT',Pid,_} ->
+ ok
+ end,
+ process_flag(trap_exit, OldTrap),
+ ok.
+
+abort_current_testcase(Reason) ->
+ controller_call({abort_current_testcase,Reason}),
+ ok.
+
+abort() ->
+ OldTrap = process_flag(trap_exit, true),
+ {ok, Pid} = finish(abort),
+ link(Pid),
+ receive
+ {'EXIT',Pid,_} ->
+ ok
+ end,
+ process_flag(trap_exit, OldTrap),
+ ok.
+
+finish(Abort) ->
+ controller_call({finish,Abort}).
+
+stop() ->
+ controller_call(stop).
+
+jobs() ->
+ controller_call(jobs).
+
+get_levels() ->
+ controller_call(get_levels).
+
+set_levels(Show, Major, Minor) ->
+ controller_call({set_levels,Show,Major,Minor}).
+
+multiply_timetraps(N) ->
+ controller_call({multiply_timetraps,N}).
+
+trc(TraceFile) ->
+ controller_call({trace,TraceFile}, 2*?ACCEPT_TIMEOUT).
+
+stop_trace() ->
+ controller_call(stop_trace).
+
+node_started(Node) ->
+ gen_server:cast(?MODULE, {node_started,Node}).
+
+cover(App, Analyse) when is_atom(App) ->
+ cover(App, none, Analyse);
+cover(CoverFile, Analyse) ->
+ cover(none, CoverFile, Analyse).
+cover(App, CoverFile, Analyse) ->
+ controller_call({cover,{App,CoverFile},Analyse}).
+cover(App, CoverFile, Exclude, Include, Cross, Export, Analyse) ->
+ controller_call({cover,{App,{CoverFile,Exclude,Include,Cross,Export}},Analyse}).
+
+testcase_callback(ModFunc) ->
+ controller_call({testcase_callback,ModFunc}).
+
+set_random_seed(Seed) ->
+ controller_call({set_random_seed,Seed}).
+
+get_hosts() ->
+ get(test_server_hosts).
+
+get_target_os_type() ->
+ case whereis(?MODULE) of
+ undefined ->
+ %% This is probably called on the target node
+ os:type();
+ _pid ->
+ %% This is called on the controller, e.g. from a
+ %% specification clause of a test case
+ #target_info{os_type=OsType} = controller_call(get_target_info),
+ OsType
+ end.
+
+%%--------------------------------------------------------------------
+
+add_job(Name, TopCase) ->
+ add_job(Name, TopCase, []).
+
+add_job(Name, TopCase, Skip) ->
+ SuiteName =
+ case Name of
+ "." -> "current_dir";
+ ".." -> "parent_dir";
+ Other -> Other
+ end,
+ Dir = filename:absname(SuiteName),
+ controller_call({add_job,Dir,SuiteName,TopCase,Skip}).
+
+controller_call(Arg) ->
+ case catch gen_server:call(?MODULE, Arg, infinity) of
+ {'EXIT',{{badarg,_},{gen_server,call,_}}} ->
+ exit(test_server_ctrl_not_running);
+ {'EXIT',Reason} ->
+ exit(Reason);
+ Other ->
+ Other
+ end.
+controller_call(Arg, Timeout) ->
+ case catch gen_server:call(?MODULE, Arg, Timeout) of
+ {'EXIT',{{badarg,_},{gen_server,call,_}}} ->
+ exit(test_server_ctrl_not_running);
+ {'EXIT',Reason} ->
+ exit(Reason);
+ Other ->
+ Other
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% init([Mode])
+%% Mode = lazy | error_logger
+%% StateFile = string()
+%% ReadMode = ignore_errors | halt_on_errors
+%%
+%% init() is the init function of the test_server's gen_server.
+%% When Mode=error_logger: The init function of the test_server's gen_event
+%% event handler used as a replacement error_logger when running test_suites.
+%%
+%% The init function reads the test server state file, to see what test
+%% suites were running when the test server was last running, and which
+%% flags that were in effect. If no state file is found, or there are
+%% errors in it, defaults are used.
+%%
+%% Mode 'lazy' ignores (and resets to []) any jobs in the state file
+%%
+
+init([Param]) ->
+ case os:getenv("TEST_SERVER_CALL_TRACE") of
+ false ->
+ ok;
+ "" ->
+ ok;
+ TraceSpec ->
+ test_server_sup:call_trace(TraceSpec)
+ end,
+ process_flag(trap_exit, true),
+ case lists:keysearch(sasl, 1, application:which_applications()) of
+ {value,_} ->
+ test_server_h:install();
+ false ->
+ ok
+ end,
+ %% copy format_exception setting from init arg to application environment
+ case init:get_argument(test_server_format_exception) of
+ {ok,[[TSFE]]} ->
+ application:set_env(test_server, format_exception, list_to_atom(TSFE));
+ _ ->
+ ok
+ end,
+ test_server_sup:cleanup_crash_dumps(),
+ State = #state{jobs=[],finish=false},
+ put(test_server_free_targets,[]),
+ case contact_main_target(Param) of
+ {ok,TI} ->
+ ets:new(slave_tab, [named_table,set,public,{keypos,2}]),
+ set_hosts([TI#target_info.host]),
+ {ok,State#state{target_info=TI}};
+ {error,Reason} ->
+ {stop,Reason}
+ end.
+
+
+%% If the test is to be run at a remote target, this function sets up
+%% a socket communication with the target.
+contact_main_target(local) ->
+ %% When used by a general framework, global registration of
+ %% test_server should not be required.
+ case os:getenv("TEST_SERVER_FRAMEWORK") of
+ false ->
+ %% Local target! The global test_server process implemented by
+ %% test_server.erl will not be started, so we simulate it by
+ %% globally registering this process instead.
+ global:sync(),
+ case global:whereis_name(test_server) of
+ undefined ->
+ global:register_name(test_server, self());
+ Pid ->
+ case node() of
+ N when N == node(Pid) ->
+ io:format(user, "Warning: test_server already running!\n", []),
+ global:re_register_name(test_server,self());
+ _ ->
+ ok
+ end
+ end;
+ _ ->
+ ok
+ end,
+ TI = test_server:init_target_info(),
+ TargetHost = test_server_sup:hoststr(),
+ {ok,TI#target_info{where=local,
+ host=TargetHost,
+ naming=naming(),
+ master=TargetHost}};
+
+contact_main_target(ParameterFile) ->
+ case read_parameters(ParameterFile) of
+ {ok,Par} ->
+ case test_server_node:start_remote_main_target(Par) of
+ {ok,TI} ->
+ {ok,TI};
+ {error,Error} ->
+ {error,{could_not_start_main_target,Error}}
+ end;
+ {error,Error} ->
+ {error,{could_not_read_parameterfile,Error}}
+ end.
+
+read_parameters(File) ->
+ case file:consult(File) of
+ {ok,Data} ->
+ read_parameters(lists:flatten(Data), #par{naming=naming()});
+ Error ->
+ Error
+ end.
+read_parameters([{type,Type}|Data], Par) -> % mandatory
+ read_parameters(Data, Par#par{type=Type});
+read_parameters([{target,Target}|Data], Par) -> % mandatory
+ read_parameters(Data, Par#par{target=cast_to_list(Target)});
+read_parameters([{slavetargets,SlaveTargets}|Data], Par) ->
+ read_parameters(Data, Par#par{slave_targets=SlaveTargets});
+read_parameters([{longnames,Bool}|Data], Par) ->
+ Naming = if Bool->"-name"; true->"-sname" end,
+ read_parameters(Data, Par#par{naming=Naming});
+read_parameters([{master,{Node,Cookie}}|Data], Par) ->
+ read_parameters(Data, Par#par{master=cast_to_list(Node),
+ cookie=cast_to_list(Cookie)});
+read_parameters([Other|_Data], _Par) ->
+ {error,{illegal_parameter,Other}};
+read_parameters([], Par) when Par#par.type==undefined ->
+ {error, {missing_mandatory_parameter,type}};
+read_parameters([], Par) when Par#par.target==undefined ->
+ {error, {missing_mandatory_parameter,target}};
+read_parameters([], Par0) ->
+ Par =
+ case {Par0#par.type, Par0#par.master} of
+ {ose, undefined} ->
+ %% Use this node as master and bootserver for target
+ %% and slave nodes
+ Par0#par{master = atom_to_list(node()),
+ cookie = atom_to_list(erlang:get_cookie())};
+ {ose, _Master} ->
+ %% Master for target and slave nodes was defined in parameterfile
+ Par0;
+ _ ->
+ %% Use target as master for slave nodes,
+ %% (No master is used for target)
+ Par0#par{master="test_server@" ++ Par0#par.target}
+ end,
+ {ok,Par}.
+
+naming() ->
+ case lists:member($., test_server_sup:hoststr()) of
+ true -> "-name";
+ false -> "-sname"
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(kill_slavenodes, From, State) -> ok
+%%
+%% Kill all slave nodes that remain after a test case
+%% is completed.
+%%
+handle_call(kill_slavenodes, _From, State) ->
+ Nodes = test_server_node:kill_nodes(State#state.target_info),
+ {reply, Nodes, State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({set_hosts, HostList}, From, State) -> ok
+%%
+%% Set the global hostlist.
+%%
+handle_call({set_hosts, Hosts}, _From, State) ->
+ set_hosts(Hosts),
+ {reply, ok, State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(get_hosts, From, State) -> [Hosts]
+%%
+%% Returns the lists of hosts that the test server
+%% can use for slave nodes. This is primarily used
+%% for nodename generation.
+%%
+handle_call(get_hosts, _From, State) ->
+ Hosts = get_hosts(),
+ {reply, Hosts, State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({add_job,Dir,Name,TopCase,Skip}, _, State) ->
+%% ok | {error,Reason}
+%%
+%% Dir = string()
+%% Name = string()
+%% TopCase = term()
+%% Skip = [SkipItem]
+%% SkipItem = {Mod,Comment} | {Mod,Case,Comment} | {Mod,Cases,Comment}
+%% Mod = Case = atom()
+%% Comment = string()
+%% Cases = [Case]
+%%
+%% Adds a job to the job queue. The name of the job is Name. A log directory
+%% will be created in Dir/Name.logs. TopCase may be anything that
+%% collect_cases/3 accepts, plus the following:
+%%
+%% {spec,SpecName} executes the named test suite specification file. Commands
+%% in the file should be in the format accepted by do_spec_list/1.
+%%
+%% {command_line,SpecList} executes the list of specification instructions
+%% supplied, which should be in the format accepted by do_spec_list/1.
+
+handle_call({add_job,Dir,Name,TopCase,Skip}, _From, State) ->
+ LogDir = Dir ++ ?logdir_ext,
+ ExtraTools =
+ case State#state.cover of
+ false -> [];
+ {App,Analyse} -> [{cover,App,Analyse}]
+ end,
+ ExtraTools1 =
+ case State#state.random_seed of
+ undefined -> ExtraTools;
+ Seed -> [{random_seed,Seed}|ExtraTools]
+ end,
+ case lists:keysearch(Name, 1, State#state.jobs) of
+ false ->
+ case TopCase of
+ {spec,SpecName} ->
+ Pid = spawn_tester(
+ ?MODULE, do_spec,
+ [SpecName,State#state.multiply_timetraps],
+ LogDir, Name, State#state.levels,
+ State#state.testcase_callback, ExtraTools1),
+ NewJobs = [{Name,Pid}|State#state.jobs],
+ {reply, ok, State#state{jobs=NewJobs}};
+ {command_line,SpecList} ->
+ Pid = spawn_tester(
+ ?MODULE, do_spec_list,
+ [SpecList,State#state.multiply_timetraps],
+ LogDir, Name, State#state.levels,
+ State#state.testcase_callback, ExtraTools1),
+ NewJobs = [{Name,Pid}|State#state.jobs],
+ {reply, ok, State#state{jobs=NewJobs}};
+ TopCase ->
+ case State#state.get_totals of
+ {CliPid,Fun} ->
+ Result = count_test_cases(TopCase, Skip),
+ Fun(CliPid, Result),
+ {reply, ok, State};
+ _ ->
+ Cfg = make_config([]),
+ Pid = spawn_tester(
+ ?MODULE, do_test_cases,
+ [TopCase,Skip,Cfg,
+ State#state.multiply_timetraps],
+ LogDir, Name, State#state.levels,
+ State#state.testcase_callback, ExtraTools1),
+ NewJobs = [{Name,Pid}|State#state.jobs],
+ {reply, ok, State#state{jobs=NewJobs}}
+ end
+ end;
+ _ ->
+ {reply,{error,name_already_in_use},State}
+ end;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(jobs, _, State) -> JobList
+%% JobList = [{Name,Pid}, ...]
+%% Name = string()
+%% Pid = pid()
+%%
+%% Return the list of current jobs.
+
+handle_call(jobs, _From, State) ->
+ {reply,State#state.jobs,State};
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({abort_current_testcase,Reason}, _, State) -> Result
+%% Reason = term()
+%% Result = ok | {error,no_testcase_running}
+%%
+%% Attempts to abort the test case that's currently running.
+
+handle_call({abort_current_testcase,Reason}, _From, State) ->
+ case State#state.jobs of
+ [{_,Pid}|_] ->
+ Pid ! {abort_current_testcase,Reason,self()},
+ receive
+ {Pid,abort_current_testcase,Result} ->
+ {reply, Result, State}
+ after 10000 ->
+ {reply, {error,no_testcase_running}, State}
+ end;
+ _ ->
+ {reply, {error,no_testcase_running}, State}
+ end;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({finish,Fini}, _, State) -> {ok,Pid}
+%% Fini = true | abort
+%%
+%% Tells the test_server to stop as soon as there are no test suites
+%% running. Immediately if none are running. Abort is handled as soon
+%% as current test finishes.
+
+handle_call({finish,Fini}, _From, State) ->
+ case State#state.jobs of
+ [] ->
+ lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
+ State#state.idle_notify),
+ State2 = State#state{finish=false},
+ {stop,shutdown,{ok,self()}, State2};
+ _SomeJobs ->
+ State2 = State#state{finish=Fini},
+ {reply, {ok,self()}, State2}
+ end;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({idle_notify,Fun}, From, State) -> {ok,Pid}
+%%
+%% Lets a test client subscribe to receive a notification when the
+%% test server becomes idle (can be used to syncronize jobs).
+%% test_server calls Fun(From) when idle.
+
+handle_call({idle_notify,Fun}, {Cli,_Ref}, State) ->
+ case State#state.jobs of
+ [] ->
+ Fun(Cli),
+ {reply, {ok,self()}, State};
+ _ ->
+ Subscribed = State#state.idle_notify,
+ {reply, {ok,self()},
+ State#state{idle_notify=[{Cli,Fun}|Subscribed]}}
+ end;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(start_get_totals, From, State) -> {ok,Pid}
+%%
+%% Switch on the mode where the test server will only
+%% report back the number of tests it would execute
+%% given some subsequent jobs.
+
+handle_call({start_get_totals,Fun}, {Cli,_Ref}, State) ->
+ {reply, {ok,self()}, State#state{get_totals={Cli,Fun}}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(stop_get_totals, From, State) -> ok
+%%
+%% Lets a test client subscribe to receive a notification when the
+%% test server becomes idle (can be used to syncronize jobs).
+%% test_server calls Fun(From) when idle.
+
+handle_call(stop_get_totals, {_Cli,_Ref}, State) ->
+ {reply, ok, State#state{get_totals=false}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(get_levels, _, State) -> {Show,Major,Minor}
+%% Show = integer()
+%% Major = integer()
+%% Minor = integer()
+%%
+%% Returns a 3-tuple with the logging thresholds.
+%% All output and information from a test suite is tagged with a detail
+%% level. Lower values are more "important". Text that is output using
+%% io:format or similar is automatically tagged with detail level 50.
+%%
+%% All output with detail level:
+%% less or equal to Show is displayed on the screen (default 1)
+%% less or equal to Major is logged in the major log file (default 19)
+%% greater or equal to Minor is logged in the minor log files (default 10)
+
+handle_call(get_levels, _From, State) ->
+ {reply,State#state.levels,State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({set_levels,Show,Major,Minor}, _, State) -> ok
+%% Show = integer()
+%% Major = integer()
+%% Minor = integer()
+%%
+%% Sets the logging thresholds, see handle_call(get_levels,...) above.
+
+handle_call({set_levels,Show,Major,Minor}, _From, State) ->
+ {reply,ok,State#state{levels={Show,Major,Minor}}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({multiply_timetraps,N}, _, State) -> ok
+%% N = integer() | infinity
+%%
+%% Multiplies all timetraps set by test cases with N
+
+handle_call({multiply_timetraps,N}, _From, State) ->
+ {reply,ok,State#state{multiply_timetraps=N}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({trace,TraceFile}, _, State) -> ok | {error,Reason}
+%%
+%% Starts a separate node (trace control node) which
+%% starts tracing on target and all slave nodes
+%%
+%% TraceFile is a text file with elements of type
+%% {Trace,Mod,TracePattern}.
+%% {Trace,Mod,Func,TracePattern}.
+%% {Trace,Mod,Func,Arity,TracePattern}.
+%%
+%% Trace = tp | tpl; local or global call trace
+%% Mod,Func = atom(), Arity=integer(); defines what to trace
+%% TracePattern = [] | match_spec()
+%%
+%% The 'call' trace flag is set on all processes, and then
+%% the given trace patterns are set.
+
+handle_call({trace,TraceFile}, _From, State=#state{trc=false}) ->
+ TI = State#state.target_info,
+ case test_server_node:start_tracer_node(TraceFile, TI) of
+ {ok,Tracer} -> {reply,ok,State#state{trc=Tracer}};
+ Error -> {reply,Error,State}
+ end;
+handle_call({trace,_TraceFile}, _From, State) ->
+ {reply,{error,already_tracing},State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(stop_trace, _, State) -> ok | {error,Reason}
+%%
+%% Stops tracing on target and all slave nodes and
+%% terminates trace control node
+
+handle_call(stop_trace, _From, State=#state{trc=false}) ->
+ {reply,{error,not_tracing},State};
+handle_call(stop_trace, _From, State) ->
+ R = test_server_node:stop_tracer_node(State#state.trc),
+ {reply,R,State#state{trc=false}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({cover,App,Analyse}, _, State) -> ok | {error,Reason}
+%%
+%% All modules inn application App are cover compiled
+%% Analyse indicates on which level the coverage should be analysed
+
+handle_call({cover,App,Analyse}, _From, State) ->
+ {reply,ok,State#state{cover={App,Analyse}}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({testcase_callback,{Mod,Func}}, _, State) -> ok | {error,Reason}
+%%
+%% Add a callback function that will be called before and after every
+%% test case (on the test case process):
+%%
+%% Mod:Func(Suite,TestCase,InitOrEnd,Config)
+%%
+%% InitOrEnd = init | 'end'.
+
+handle_call({testcase_callback,ModFunc}, _From, State) ->
+ case ModFunc of
+ {Mod,Func} ->
+ case code:is_loaded(Mod) of
+ {file,_} ->
+ ok;
+ false ->
+ code:load_file(Mod)
+ end,
+ case erlang:function_exported(Mod,Func,4) of
+ true ->
+ ok;
+ false ->
+ io:format(user,
+ "WARNING! Callback function ~w:~w/4 undefined.~n~n",
+ [Mod,Func])
+ end;
+ _ ->
+ ok
+ end,
+ {reply,ok,State#state{testcase_callback=ModFunc}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({set_random_seed,Seed}, _, State) -> ok | {error,Reason}
+%%
+%% Let operator set a random seed value to be used e.g. for shuffling
+%% test cases.
+
+handle_call({set_random_seed,Seed}, _From, State) ->
+ {reply,ok,State#state{random_seed=Seed}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(stop, _, State) -> ok
+%%
+%% Stops the test server immediately.
+%% Some cleanup is done by terminate/2
+
+handle_call(stop, _From, State) ->
+ {stop, shutdown, ok, State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call(get_target_info, _, State) -> TI
+%%
+%% TI = #target_info{}
+%%
+%% Returns information about target
+
+handle_call(get_target_info, _From, State) ->
+ {reply, State#state.target_info, State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({start_node,Name,Type,Options}, _, State) ->
+%% ok | {error,Reason}
+%%
+%% Starts a new node (slave or peer)
+
+handle_call({start_node, Name, Type, Options}, From, State) ->
+ %% test_server_ctrl does gen_server:reply/2 explicitly
+ test_server_node:start_node(Name, Type, Options, From,
+ State#state.target_info),
+ {noreply,State};
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({wait_for_node,Node}, _, State) -> ok
+%%
+%% Waits for a new node to take contact. Used if
+%% node is started with option {wait,false}
+
+handle_call({wait_for_node, Node}, From, State) ->
+ NewWaitList =
+ case ets:lookup(slave_tab,Node) of
+ [] ->
+ [{Node,From}|State#state.wait_for_node];
+ _ ->
+ gen_server:reply(From,ok),
+ State#state.wait_for_node
+ end,
+ {noreply,State#state{wait_for_node=NewWaitList}};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({stop_node,Name}, _, State) -> ok | {error,Reason}
+%%
+%% Stops a slave or peer node. This is actually only some cleanup
+%% - the node is really stopped by test_server when this returns.
+
+handle_call({stop_node, Name}, _From, State) ->
+ R = test_server_node:stop_node(Name, State#state.target_info),
+ {reply, R, State};
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_call({stop_node,Name}, _, State) -> ok | {error,Reason}
+%%
+%% Tests if the release is available.
+
+handle_call({is_release_available, Release}, _From, State) ->
+ R = test_server_node:is_release_available(Release),
+ {reply, R, State}.
+
+%%--------------------------------------------------------------------
+set_hosts(Hosts) ->
+ put(test_server_hosts, Hosts).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_cast({node_started,Name}, _, State)
+%%
+%% Called by test_server_node when a slave/peer node is fully started.
+
+handle_cast({node_started,Node}, State) ->
+ case State#state.trc of
+ false -> ok;
+ Trc -> test_server_node:trace_nodes(Trc, [Node])
+ end,
+ NewWaitList =
+ case lists:keysearch(Node,1,State#state.wait_for_node) of
+ {value,{Node,From}} ->
+ gen_server:reply(From, ok),
+ lists:keydelete(Node, 1, State#state.wait_for_node);
+ false ->
+ State#state.wait_for_node
+ end,
+ {noreply, State#state{wait_for_node=NewWaitList}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_info({'EXIT',Pid,Reason}, State)
+%% Pid = pid()
+%% Reason = term()
+%%
+%% Handles exit messages from linked processes. Only test suites and
+%% possibly a target client are expected to be linked.
+%% When a test suite terminates, it is removed from the job queue.
+%% If a target client terminates it means that we lost contact with
+%% target. The test_server_ctrl process is terminated, and teminate/2
+%% will do the cleanup
+
+handle_info({'EXIT',Pid,Reason}, State) ->
+ case lists:keysearch(Pid,2,State#state.jobs) of
+ false ->
+ TI = State#state.target_info,
+ case TI#target_info.target_client of
+ Pid ->
+ %% The target client died - lost contact with target
+ {stop,{lost_contact_with_target,Reason},State};
+ _other ->
+ %% not our problem
+ {noreply,State}
+ end;
+ {value,{Name,_}} ->
+ NewJobs = lists:keydelete(Pid, 2, State#state.jobs),
+ case Reason of
+ normal ->
+ fine;
+ killed ->
+ io:format("Suite ~s was killed\n", [Name]);
+ _Other ->
+ io:format("Suite ~s was killed with reason ~p\n",
+ [Name,Reason])
+ end,
+ State2 = State#state{jobs=NewJobs},
+ case NewJobs of
+ [] ->
+ lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
+ State2#state.idle_notify),
+ case State2#state.finish of
+ false ->
+ {noreply,State2#state{idle_notify=[]}};
+ _ -> % true | abort
+ %% test_server:finish() has been called and
+ %% there are no jobs in the job queue =>
+ %% stop the test_server_ctrl
+ {stop,shutdown,State2#state{finish=false}}
+ end;
+ _ -> % pending jobs
+ case State2#state.finish of
+ abort -> % abort test now!
+ lists:foreach(fun({Cli,Fun}) -> Fun(Cli) end,
+ State2#state.idle_notify),
+ {stop,shutdown,State2#state{finish=false}};
+ _ -> % true | false
+ {noreply, State2}
+ end
+ end
+ end;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_info({tcp,Sock,Bin}, State)
+%%
+%% Message from remote main target process
+%% Only valid message is 'job_proc_killed', which indicates
+%% that a process running a test suite was killed
+
+handle_info({tcp,_MainSock,<<1,Request/binary>>}, State) ->
+ case binary_to_term(Request) of
+ {job_proc_killed,Name,Reason} ->
+ %% The only purpose of this is to inform the user about what
+ %% happened on target.
+ %% The local job proc will soon be killed by the closed socket or
+ %% because the job is finished. Then the above clause ('EXIT') will
+ %% handle the problem.
+ io:format("Suite ~s was killed on remote target with reason"
+ " ~p\n", [Name,Reason]);
+ _ ->
+ ignore
+ end,
+ {noreply,State};
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_info({tcp_closed,Sock}, State)
+%%
+%% A Socket was closed. This indicates that a node died.
+%% This can be
+%% *Target node (if remote)
+%% *Slave or peer node started by a test suite
+%% *Trace controll node
+
+handle_info({tcp_closed,Sock}, State=#state{trc=Sock}) ->
+ %% Tracer node died - can't really do anything
+ %%! Maybe print something???
+ {noreply,State#state{trc=false}};
+handle_info({tcp_closed,Sock}, State) ->
+ case test_server_node:nodedown(Sock,State#state.target_info) of
+ target_died ->
+ %% terminate/2 will do the cleanup
+ {stop,target_died,State};
+ _ ->
+ {noreply,State}
+ end;
+
+handle_info(_, State) ->
+ %% dummy; accept all, do nothing.
+ {noreply, State}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% terminate(Reason, State) -> ok
+%% Reason = term()
+%%
+%% Cleans up when the test_server is terminating. Kills the running
+%% test suites (if any) and terminates the remote target (if is exists)
+
+terminate(_Reason, State) ->
+ case State#state.trc of
+ false -> ok;
+ Sock -> test_server_node:stop_tracer_node(Sock)
+ end,
+ kill_all_jobs(State#state.jobs),
+ test_server_node:stop(State#state.target_info),
+ ok.
+
+kill_all_jobs([{_Name,JobPid}|Jobs]) ->
+ exit(JobPid, kill),
+ kill_all_jobs(Jobs);
+kill_all_jobs([]) ->
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%----------------------- INTERNAL FUNCTIONS -----------------------%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% spawn_tester(Mod, Func, Args, Dir, Name, Levels,
+%% TestCaseCallback, ExtraTools) -> Pid
+%% Mod = atom()
+%% Func = atom()
+%% Args = [term(),...]
+%% Dir = string()
+%% Name = string()
+%% Levels = {integer(),integer(),integer()}
+%% TestCaseCallback = {CBMod,CBFunc} | undefined
+%% ExtraTools = [ExtraTool,...]
+%% ExtraTool = CoverInfo | TraceInfo | RandomSeed
+%%
+%% Spawns a test suite execute-process, just an ordinary spawn, except
+%% that it will set a lot of dictionary information before starting the
+%% named function. Also, the execution is timed and protected by a catch.
+%% When the named function is done executing, a summary of the results
+%% is printed to the log files.
+
+spawn_tester(Mod, Func, Args, Dir, Name, Levels, TCCallback, ExtraTools) ->
+ spawn_link(
+ fun() -> init_tester(Mod, Func, Args, Dir, Name, Levels,
+ TCCallback, ExtraTools)
+ end).
+
+init_tester(Mod, Func, Args, Dir, Name, {SumLev,MajLev,MinLev},
+ TCCallback, ExtraTools) ->
+ process_flag(trap_exit, true),
+ put(test_server_name, Name),
+ put(test_server_dir, Dir),
+ put(test_server_total_time, 0),
+ put(test_server_ok, 0),
+ put(test_server_failed, 0),
+ put(test_server_skipped, {0,0}),
+ put(test_server_summary_level, SumLev),
+ put(test_server_major_level, MajLev),
+ put(test_server_minor_level, MinLev),
+ put(test_server_random_seed, proplists:get_value(random_seed, ExtraTools)),
+ put(test_server_testcase_callback, TCCallback),
+ StartedExtraTools = start_extra_tools(ExtraTools),
+ {TimeMy,Result} = ts_tc(Mod, Func, Args),
+ put(test_server_common_io_handler, undefined),
+ stop_extra_tools(StartedExtraTools),
+ case Result of
+ {'EXIT',test_suites_done} ->
+ print(25, "DONE, normal exit", []);
+ {'EXIT',_Pid,Reason} ->
+ print(1, "EXIT, reason ~p", [Reason]);
+ {'EXIT',Reason} ->
+ print(1, "EXIT, reason ~p", [Reason]);
+ _Other ->
+ print(25, "DONE", [])
+ end,
+ Time = TimeMy/1000000,
+ SuccessStr =
+ case get(test_server_failed) of
+ 0 -> "Ok";
+ _ -> "FAILED"
+ end,
+ {SkippedN,SkipStr} =
+ case get(test_server_skipped) of
+ {0,_} -> {0,""};
+ {Skipped,_} -> {Skipped,io_lib:format(", ~p Skipped", [Skipped])}
+ end,
+ OkN = get(test_server_ok),
+ FailedN = get(test_server_failed),
+ print(html,"<tr><td></td><td><b>TOTAL</b></td><td></td><td></td>"
+ "<td>~.3fs</td><td><b>~s</b></td><td>~p Ok, ~p Failed~s of ~p</td></tr>\n",
+ [Time,SuccessStr,OkN,FailedN,SkipStr,OkN+FailedN+SkippedN]).
+
+%% timer:tc/3
+ts_tc(M, F, A) ->
+ Before = ?now,
+ Val = (catch apply(M, F, A)),
+ After = ?now,
+ Elapsed = elapsed_time(Before, After),
+ {Elapsed,Val}.
+
+elapsed_time(Before, After) ->
+ (element(1,After)*1000000000000 +
+ element(2,After)*1000000 + element(3,After)) -
+ (element(1,Before)*1000000000000 +
+ element(2,Before)*1000000 + element(3,Before)).
+
+start_extra_tools(ExtraTools) ->
+ start_extra_tools(ExtraTools, []).
+start_extra_tools([{cover,App,Analyse} | ExtraTools], Started) ->
+ case cover_compile(App) of
+ {ok,AnalyseMods} ->
+ start_extra_tools(ExtraTools,
+ [{cover,App,Analyse,AnalyseMods}|Started]);
+ {error,_} ->
+ start_extra_tools(ExtraTools, Started)
+ end;
+start_extra_tools([_ | ExtraTools], Started) ->
+ start_extra_tools(ExtraTools, Started);
+start_extra_tools([], Started) ->
+ Started.
+
+stop_extra_tools(ExtraTools) ->
+ TestDir = get(test_server_log_dir_base),
+ case lists:keymember(cover, 1, ExtraTools) of
+ false ->
+ write_default_coverlog(TestDir);
+ true ->
+ ok
+ end,
+ stop_extra_tools(ExtraTools, TestDir).
+
+stop_extra_tools([{cover,App,Analyse,AnalyseMods}|ExtraTools], TestDir) ->
+ cover_analyse(App, Analyse, AnalyseMods, TestDir),
+ stop_extra_tools(ExtraTools, TestDir);
+%%stop_extra_tools([_ | ExtraTools], TestDir) ->
+%% stop_extra_tools(ExtraTools, TestDir);
+stop_extra_tools([], _) ->
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% do_spec(SpecName, MultiplyTimetrap) -> {error,Reason} | exit(Result)
+%% SpecName = string()
+%% MultiplyTimetrap = integer() | infinity
+%%
+%% Reads the named test suite specification file, and executes it.
+%%
+%% This function is meant to be called by a process created by
+%% spawn_tester/7, which sets up some necessary dictionary values.
+
+do_spec(SpecName, MultiplyTimetrap) when is_list(SpecName) ->
+ case file:consult(SpecName) of
+ {ok,TermList} ->
+ do_spec_list(TermList,MultiplyTimetrap);
+ {error,Reason} ->
+ io:format("Can't open ~s: ~p\n", [SpecName,Reason]),
+ {error,{cant_open_spec,Reason}}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% do_spec_list(TermList) -> exit(Result)
+%% TermList = [term()|...]
+%% MultiplyTimetrap = integer() | infinity
+%%
+%% Executes a list of test suite specification commands. The following
+%% commands are available, and may occur zero or more times (if several,
+%% the contents is appended):
+%%
+%% {topcase,TopCase} Specifies top level test goals. TopCase has the syntax
+%% specified by collect_cases/3.
+%%
+%% {skip,Skip} Specifies test cases to skip, and lists requirements that
+%% cannot be granted during the test run. Skip has the syntax specified
+%% by collect_cases/3.
+%%
+%% {nodes,Nodes} Lists node names avaliable to the test suites. Nodes have
+%% the syntax specified by collect_cases/3.
+%%
+%% {require_nodenames, Num} Specifies how many nodenames the test suite will
+%% need. Theese are automaticly generated and inserted into the Config by the
+%% test_server. The caller may specify other hosts to run theese nodes by
+%% using the {hosts, Hosts} option. If there are no hosts specified, all
+%% nodenames will be generated from the local host.
+%%
+%% {hosts, Hosts} Specifies a list of available hosts on which to start
+%% slave nodes. It is used when the {remote, true} option is given to the
+%% test_server:start_node/3 function. Also, if {require_nodenames, Num} is
+%% contained in the TermList, the generated nodenames will be spread over
+%% all hosts given in this Hosts list. The hostnames are given as atoms or
+%% strings.
+%%
+%% {diskless, true}</c></tag> is kept for backwards compatiblilty and
+%% should not be used. Use a configuration test case instead.
+%%
+%% This function is meant to be called by a process created by
+%% spawn_tester/7, which sets up some necessary dictionary values.
+
+do_spec_list(TermList0, MultiplyTimetrap) ->
+ Nodes = [],
+ TermList =
+ case lists:keysearch(hosts, 1, TermList0) of
+ {value, {hosts, Hosts0}} ->
+ Hosts = lists:map(fun(H) -> cast_to_list(H) end, Hosts0),
+ controller_call({set_hosts, Hosts}),
+ lists:keydelete(hosts, 1, TermList0);
+ _ ->
+ TermList0
+ end,
+ DefaultConfig = make_config([{nodes,Nodes}]),
+ {TopCases,SkipList,Config} = do_spec_terms(TermList, [], [], DefaultConfig),
+ do_test_cases(TopCases, SkipList, Config, MultiplyTimetrap).
+
+do_spec_terms([], TopCases, SkipList, Config) ->
+ {TopCases,SkipList,Config};
+do_spec_terms([{topcase,TopCase}|Terms], TopCases, SkipList, Config) ->
+ do_spec_terms(Terms,[TopCase|TopCases], SkipList, Config);
+do_spec_terms([{skip,Skip}|Terms], TopCases, SkipList, Config) ->
+ do_spec_terms(Terms, TopCases, [Skip|SkipList], Config);
+do_spec_terms([{nodes,Nodes}|Terms], TopCases, SkipList, Config) ->
+ do_spec_terms(Terms, TopCases, SkipList,
+ update_config(Config, {nodes,Nodes}));
+do_spec_terms([{diskless,How}|Terms], TopCases, SkipList, Config) ->
+ do_spec_terms(Terms, TopCases, SkipList,
+ update_config(Config, {diskless,How}));
+do_spec_terms([{config,MoreConfig}|Terms], TopCases, SkipList, Config) ->
+ do_spec_terms(Terms, TopCases, SkipList, Config++MoreConfig);
+do_spec_terms([{default_timeout,Tmo}|Terms], TopCases, SkipList, Config) ->
+ do_spec_terms(Terms, TopCases, SkipList,
+ update_config(Config, {default_timeout,Tmo}));
+
+do_spec_terms([{require_nodenames,NumNames}|Terms], TopCases, SkipList, Config) ->
+ NodeNames0=generate_nodenames(NumNames),
+ NodeNames=lists:delete([], NodeNames0),
+ do_spec_terms(Terms, TopCases, SkipList,
+ update_config(Config, {nodenames,NodeNames}));
+do_spec_terms([Other|Terms], TopCases, SkipList, Config) ->
+ io:format("** WARNING: Spec file contains unknown directive ~p\n",
+ [Other]),
+ do_spec_terms(Terms, TopCases, SkipList, Config).
+
+
+
+generate_nodenames(Num) ->
+ Hosts = case controller_call(get_hosts) of
+ [] ->
+ TI = controller_call(get_target_info),
+ [TI#target_info.host];
+ List ->
+ List
+ end,
+ generate_nodenames2(Num, Hosts, []).
+
+generate_nodenames2(0, _Hosts, Acc) ->
+ Acc;
+generate_nodenames2(N, Hosts, Acc) ->
+ Host=cast_to_list(lists:nth((N rem (length(Hosts)))+1, Hosts)),
+ Name=list_to_atom(temp_nodename("nod", []) ++ "@" ++ Host),
+ generate_nodenames2(N-1, Hosts, [Name|Acc]).
+
+temp_nodename([], Acc) ->
+ lists:flatten(Acc);
+temp_nodename([Chr|Base], Acc) ->
+ {A,B,C} = ?now,
+ New = [Chr | integer_to_list(Chr bxor A bxor B+A bxor C+B)],
+ temp_nodename(Base, [New|Acc]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% count_test_cases(TopCases, SkipCases) -> {Suites,NoOfCases} | error
+%% TopCases = term() (See collect_cases/3)
+%% SkipCases = term() (See collect_cases/3)
+%% Suites = list()
+%% NoOfCases = integer() | unknown
+%%
+%% Counts the test cases that are about to run and returns that number.
+%% If there's a conf group in TestSpec with a repeat property, the total number
+%% of cases can not be calculated and NoOfCases = unknown.
+count_test_cases(TopCases, SkipCases) when is_list(TopCases) ->
+ case collect_all_cases(TopCases, SkipCases) of
+ {error,_} ->
+ error;
+ TestSpec ->
+ {get_suites(TestSpec, []),
+ case remove_conf(TestSpec) of
+ {repeats,_} ->
+ unknown;
+ TestSpec1 ->
+ length(TestSpec1)
+ end}
+ end;
+
+count_test_cases(TopCase, SkipCases) ->
+ count_test_cases([TopCase], SkipCases).
+
+
+remove_conf(Cases) ->
+ remove_conf(Cases, [], false).
+
+remove_conf([{conf, _Ref, Props, _MF}|Cases], NoConf, Repeats) ->
+ case get_repeat(Props) of
+ undefined ->
+ remove_conf(Cases, NoConf, Repeats);
+ _ ->
+ remove_conf(Cases, NoConf, true)
+ end;
+remove_conf([{make,_Ref,_MF}|Cases], NoConf, Repeats) ->
+ remove_conf(Cases, NoConf, Repeats);
+remove_conf([{skip_case,{Type,_Ref,_MF,_Cmt}}|Cases],
+ NoConf, Repeats) when Type==conf;
+ Type==make ->
+ remove_conf(Cases, NoConf, Repeats);
+remove_conf([C|Cases], NoConf, Repeats) ->
+ remove_conf(Cases, [C|NoConf], Repeats);
+remove_conf([], NoConf, true) ->
+ {repeats,lists:reverse(NoConf)};
+remove_conf([], NoConf, false) ->
+ lists:reverse(NoConf).
+
+get_suites([{Mod,_Case}|Tests], Mods) when is_atom(Mod) ->
+ case add_mod(Mod, Mods) of
+ true -> get_suites(Tests, [Mod|Mods]);
+ false -> get_suites(Tests, Mods)
+ end;
+get_suites([{Mod,_Func,_Args}|Tests], Mods) when is_atom(Mod) ->
+ case add_mod(Mod, Mods) of
+ true -> get_suites(Tests, [Mod|Mods]);
+ false -> get_suites(Tests, Mods)
+ end;
+get_suites([_|Tests], Mods) ->
+ get_suites(Tests, Mods);
+
+get_suites([], Mods) ->
+ lists:reverse(Mods).
+
+add_mod(Mod, Mods) ->
+ case string:rstr(atom_to_list(Mod), "_SUITE") of
+ 0 -> false;
+ _ -> % test suite
+ case lists:member(Mod, Mods) of
+ true -> false;
+ false -> true
+ end
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) ->
+%% exit(Result)
+%%
+%% TopCases = term() (See collect_cases/3)
+%% SkipCases = term() (See collect_cases/3)
+%% Config = term() (See collect_cases/3)
+%% MultiplyTimetrap = integer() | infinity
+%%
+%% Initializes and starts the test run, for "ordinary" test suites.
+%% Creates log directories and log files, inserts initial timestamps and
+%% configuration information into the log files.
+%%
+%% This function is meant to be called by a process created by
+%% spawn_tester/7, which sets up some necessary dictionary values.
+
+do_test_cases(TopCases, SkipCases, Config, MultiplyTimetrap) when is_list(TopCases) ->
+ start_log_file(),
+ case collect_all_cases(TopCases, SkipCases) of
+ {error,Why} ->
+ print(1, "Error starting: ~p", [Why]),
+ exit(test_suites_done);
+ TestSpec0 ->
+ N = case remove_conf(TestSpec0) of
+ {repeats,_} -> unknown;
+ TS -> length(TS)
+ end,
+ put(test_server_cases, N),
+ put(test_server_case_num, 0),
+ TestSpec =
+ add_init_and_end_per_suite(TestSpec0, undefined, undefined),
+ TI = get_target_info(),
+ print(1, "Starting test~s", [print_if_known(N, {", ~w test cases",[N]},
+ {" (with repeated test cases)",[]})]),
+ test_server_sup:framework_call(report, [tests_start,
+ {get(test_server_name),N}]),
+ print(html,
+ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<html>\n"
+ "<head><title>Test ~p results</title>\n"
+ "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n"
+ "</head>\n"
+ "<body bgcolor=\"white\" text=\"black\" "
+ "link=\"blue\" vlink=\"purple\" alink=\"red\">"
+ "<h2>Results from test ~p</h2>\n",
+ [get(test_server_name),get(test_server_name)]),
+ print_timestamp(html, "Test started at "),
+
+ print(html, "<p>Host:<br>\n"),
+ print_who(test_server_sup:hoststr(), test_server_sup:get_username()),
+ print(html, "<br>Used Erlang ~s in <tt>~s</tt>.\n",
+ [erlang:system_info(version), code:root_dir()]),
+
+ case os:getenv("TEST_SERVER_FRAMEWORK") of
+ false ->
+ print(html, "<p>Target:<br>\n"),
+ print_who(TI#target_info.host, TI#target_info.username),
+ print(html, "<br>Used Erlang ~s in <tt>~s</tt>.\n",
+ [TI#target_info.version, TI#target_info.root_dir]);
+ _ ->
+ case test_server_sup:framework_call(target_info, []) of
+ TargetInfo when is_list(TargetInfo),
+ length(TargetInfo) > 0 ->
+ print(html, "<p>Target:<br>\n"),
+ print(html, "~s\n", [TargetInfo]);
+ _ ->
+ ok
+ end
+ end,
+
+ print(html,
+ "<p><a href=\"~s\">Full textual log</a>\n"
+ "<br><a href=\"~s\">Coverage log</a>\n",
+ [?suitelog_name,?coverlog_name]),
+ print(html,"<p>~s"
+ "<p>\n"
+ "<table border=3 cellpadding=5>"
+ "<tr><th>Num</th><th>Module</th><th>Case</th><th>Log</th>"
+ "<th>Time</th><th>Result</th><th>Comment</th></tr>\n",
+ [print_if_known(N, {"Suite contains ~p test cases.\n",[N]},
+ {"",[]})]),
+ print(major, "=cases ~p", [get(test_server_cases)]),
+ print(major, "=user ~s", [TI#target_info.username]),
+ print(major, "=host ~s", [TI#target_info.host]),
+
+ %% If there are no hosts specified,use only the local host
+ case controller_call(get_hosts) of
+ [] ->
+ print(major, "=hosts ~s", [TI#target_info.host]),
+ controller_call({set_hosts, [TI#target_info.host]});
+ Hosts ->
+ Str = lists:flatten(lists:map(fun(X) -> [X," "] end, Hosts)),
+ print(major, "=hosts ~s", [Str])
+ end,
+ print(major, "=emulator_vsn ~s", [TI#target_info.version]),
+ print(major, "=emulator ~s", [TI#target_info.emulator]),
+ print(major, "=otp_release ~s", [TI#target_info.otp_release]),
+ print(major, "=started ~s",
+ [lists:flatten(timestamp_get(""))]),
+ run_test_cases(TestSpec, Config, MultiplyTimetrap)
+ end;
+
+do_test_cases(TopCase, SkipCases, Config, MultiplyTimetrap) ->
+ %% when not list(TopCase)
+ do_test_cases([TopCase], SkipCases, Config, MultiplyTimetrap).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% start_log_file() -> ok | exit({Error,Reason})
+%% Stem = string()
+%%
+%% Creates the log directories, the major log file and the html log file.
+%% The log files are initialized with some header information.
+%%
+%% The name of the log directory will be <Name>.LOGS/run.<Date>/ where
+%% Name is the test suite name and Date is the current date and time.
+
+start_log_file() ->
+ Dir = get(test_server_dir),
+ case file:make_dir(Dir) of
+ ok ->
+ ok;
+ {error, eexist} ->
+ ok;
+ MkDirError ->
+ exit({cant_create_log_dir,{MkDirError,Dir}})
+ end,
+ TestDir = timestamp_filename_get(filename:join(Dir, "run.")),
+ case file:make_dir(TestDir) of
+ ok ->
+ ok;
+ MkDirError2 ->
+ exit({cant_create_log_dir,{MkDirError2,TestDir}})
+ end,
+
+ ok = file:write_file(filename:join(Dir, ?last_file), TestDir ++ "\n"),
+ ok = file:write_file(?last_file, TestDir ++ "\n"),
+
+ put(test_server_log_dir_base,TestDir),
+ MajorName = filename:join(TestDir, ?suitelog_name),
+ HtmlName = MajorName ++ ?html_ext,
+ {ok,Major} = file:open(MajorName, [write]),
+ {ok,Html} = file:open(HtmlName, [write]),
+ put(test_server_major_fd,Major),
+ put(test_server_html_fd,Html),
+
+ make_html_link(filename:absname(?last_test ++ ?html_ext),
+ HtmlName, filename:basename(Dir)),
+ LinkName = filename:join(Dir, ?last_link),
+ make_html_link(LinkName ++ ?html_ext, HtmlName,
+ filename:basename(Dir)),
+
+ PrivDir = filename:join(TestDir, ?priv_dir),
+ ok = file:make_dir(PrivDir),
+ put(test_server_priv_dir,PrivDir++"/"),
+ print_timestamp(13,"Suite started at "),
+ ok.
+
+make_html_link(LinkName, Target, Explanation) ->
+ %% if possible use a relative reference�to�Target.
+ TargetL = filename:split(Target),
+ PwdL = filename:split(filename:dirname(LinkName)),
+ Href = case lists:prefix(PwdL, TargetL) of
+ true ->
+ filename:join(lists:nthtail(length(PwdL), TargetL));
+ false ->
+ "file:" ++ Target
+ end,
+ H = io_lib:format("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<html>\n"
+ "<head><title>~s</title></head>\n"
+ "<body bgcolor=\"white\" text=\"black\""
+ " link=\"blue\" vlink=\"purple\" alink=\"red\">\n"
+ "<h1>Last test</h1>\n"
+ "<a href=\"~s\">~s</a>~n"
+ "</body>\n</html>\n",
+ [Explanation,Href,Explanation]),
+ ok = file:write_file(LinkName, H).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% start_minor_log_file(Mod, Func) -> AbsName
+%% Mod = atom()
+%% Func = atom()
+%% AbsName = string()
+%%
+%% Create a minor log file for the test case Mod,Func,Args. The log file
+%% will be stored in the log directory under the name <Mod>.<Func>.log.
+%% Some header info will also be inserted into the log file.
+
+start_minor_log_file(Mod, Func) ->
+ LogDir = get(test_server_log_dir_base),
+ Name0 = lists:flatten(io_lib:format("~s.~s~s", [Mod,Func,?html_ext])),
+ Name = downcase(Name0),
+ AbsName = filename:join(LogDir, Name),
+ case file:read_file_info(AbsName) of
+ {error,_} -> %% normal case, unique name
+ start_minor_log_file1(Mod, Func, LogDir, AbsName);
+ {ok,_} -> %% special case, duplicate names
+ {_,S,Us} = now(),
+ Name1_0 =
+ lists:flatten(io_lib:format("~s.~s.~w.~w~s", [Mod,Func,S,
+ trunc(Us/1000),
+ ?html_ext])),
+ Name1 = downcase(Name1_0),
+ AbsName1 = filename:join(LogDir, Name1),
+ start_minor_log_file1(Mod, Func, LogDir, AbsName1)
+ end.
+
+start_minor_log_file1(Mod, Func, LogDir, AbsName) ->
+ {ok,Fd} = file:open(AbsName, [write]),
+ Lev = get(test_server_minor_level)+1000, %% far down in the minor levels
+ put(test_server_minor_fd, Fd),
+ io:fwrite(Fd,
+ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<html>\n"
+ "<head><title>"++cast_to_list(Mod)++"</title>\n"
+ "<meta http-equiv=\"cache-control\" content=\"no-cache\">\n"
+ "</head>\n"
+ "<body bgcolor=\"white\" text=\"black\""
+ " link=\"blue\" vlink=\"purple\" alink=\"red\">\n",
+ []),
+
+ SrcListing = downcase(cast_to_list(Mod)) ++ ?src_listing_ext,
+ case filelib:is_file(filename:join(LogDir, SrcListing)) of
+ true ->
+ print(Lev, "<a href=\"~s#~s\">source code for ~p:~p/1</a>\n",
+ [SrcListing,Func,Mod,Func]);
+ false -> ok
+ end,
+
+ io:fwrite(Fd, "<pre>\n", []),
+
+% Stupid BUG!
+% case catch apply(Mod, Func, [doc]) of
+% {'EXIT', _Why} -> ok;
+% Comment -> print(Lev, "Comment: ~s~n<br>", [Comment])
+% end,
+
+ AbsName.
+
+stop_minor_log_file() ->
+ Fd = get(test_server_minor_fd),
+ io:fwrite(Fd, "</pre>\n</body>\n</html>\n", []),
+ file:close(Fd),
+ put(test_server_minor_fd, undefined).
+
+downcase(S) -> downcase(S, []).
+downcase([Uc|Rest], Result) when $A =< Uc, Uc =< $Z ->
+ downcase(Rest, [Uc-$A+$a|Result]);
+downcase([C|Rest], Result) ->
+ downcase(Rest, [C|Result]);
+downcase([], Result) ->
+ lists:reverse(Result).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% html_convert_modules(TestSpec, Config) -> ok
+%% Isolate the modules affected by TestSpec and
+%% make sure they are converted to html.
+%%
+%% Errors are silently ignored.
+
+html_convert_modules(TestSpec, _Config) ->
+ Mods = html_isolate_modules(TestSpec),
+ html_convert_modules(Mods),
+ copy_html_files(get(test_server_dir), get(test_server_log_dir_base)).
+
+%% Retrieve a list of modules out of the test spec.
+
+html_isolate_modules(List) -> html_isolate_modules(List, sets:new()).
+
+html_isolate_modules([], Set) -> sets:to_list(Set);
+html_isolate_modules([{skip_case,_}|Cases], Set) ->
+ html_isolate_modules(Cases, Set);
+html_isolate_modules([{conf,_Ref,_Props,{Mod,_Func}}|Cases], Set) ->
+ html_isolate_modules(Cases, sets:add_element(Mod, Set));
+html_isolate_modules([{Mod,_Case}|Cases], Set) ->
+ html_isolate_modules(Cases, sets:add_element(Mod, Set));
+html_isolate_modules([{Mod,_Case,_Args}|Cases], Set) ->
+ html_isolate_modules(Cases, sets:add_element(Mod, Set)).
+
+%% Given a list of modules, convert each module's source code to HTML.
+
+html_convert_modules([Mod|Mods]) ->
+ case code:which(Mod) of
+ Path when is_list(Path) ->
+ SrcFile = filename:rootname(Path) ++ ".erl",
+ DestDir = get(test_server_dir),
+ Name = atom_to_list(Mod),
+ DestFile = filename:join(DestDir, downcase(Name) ++ ?src_listing_ext),
+ html_possibly_convert(SrcFile, DestFile),
+ html_convert_modules(Mods);
+ _Other -> ok
+ end;
+html_convert_modules([]) -> ok.
+
+%% Convert source code to HTML if possible and needed.
+
+html_possibly_convert(Src, Dest) ->
+ case file:read_file_info(Src) of
+ {ok,SInfo} ->
+ case file:read_file_info(Dest) of
+ {error,_Reason} -> % no dest file
+ erl2html2:convert(Src, Dest);
+ {ok,DInfo} when DInfo#file_info.mtime < SInfo#file_info.mtime ->
+ erl2html2:convert(Src, Dest);
+ {ok,_DInfo} -> ok % dest file up to date
+ end;
+ {error,_Reason} -> ok % no source code found
+ end.
+
+%% Copy all HTML files in InDir to OutDir.
+
+copy_html_files(InDir, OutDir) ->
+ Files = filelib:wildcard(filename:join(InDir, "*" ++ ?src_listing_ext)),
+ lists:foreach(fun (Src) -> copy_html_file(Src, OutDir) end, Files).
+
+copy_html_file(Src, DestDir) ->
+ Dest = filename:join(DestDir, filename:basename(Src)),
+ case file:read_file(Src) of
+ {ok,Bin} ->
+ ok = file:write_file(Dest, Bin);
+ {error,_Reason} ->
+ io:format("File ~p: read failed\n", [Src])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% add_init_and_end_per_suite(TestSpec, Mod, Ref) -> NewTestSpec
+%%
+%% Expands TestSpec with an initial init_per_suite, and a final
+%% end_per_suite element, per each discovered suite in the list.
+
+add_init_and_end_per_suite([{make,_,_}=Case|Cases], LastMod, LastRef) ->
+ [Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
+add_init_and_end_per_suite([{skip_case,{{Mod,all},_}}=Case|Cases], LastMod, LastRef)
+ when Mod =/= LastMod ->
+ {PreCases, NextMod, NextRef} =
+ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod),
+ PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
+add_init_and_end_per_suite([{skip_case,{{Mod,_},_}}=Case|Cases], LastMod, LastRef)
+ when Mod =/= LastMod ->
+ {PreCases, NextMod, NextRef} =
+ do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
+ PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
+add_init_and_end_per_suite([{skip_case,{conf,_,{Mod,_},_}}=Case|Cases], LastMod, LastRef)
+ when Mod =/= LastMod ->
+ {PreCases, NextMod, NextRef} =
+ do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
+ PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
+add_init_and_end_per_suite([{skip_case,_}=Case|Cases], LastMod, LastRef) ->
+ [Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
+add_init_and_end_per_suite([{conf,_,_,{Mod,_}}=Case|Cases], LastMod, LastRef)
+ when Mod =/= LastMod ->
+ {PreCases, NextMod, NextRef} =
+ do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
+ PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
+add_init_and_end_per_suite([{conf,_,_,_}=Case|Cases], LastMod, LastRef) ->
+ [Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
+add_init_and_end_per_suite([{Mod,_}=Case|Cases], LastMod, LastRef)
+ when Mod =/= LastMod ->
+ {PreCases, NextMod, NextRef} =
+ do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
+ PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
+add_init_and_end_per_suite([{Mod,_,_}=Case|Cases], LastMod, LastRef)
+ when Mod =/= LastMod ->
+ {PreCases, NextMod, NextRef} =
+ do_add_init_and_end_per_suite(LastMod, LastRef, Mod),
+ PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod, NextRef)];
+add_init_and_end_per_suite([Case|Cases], LastMod, LastRef)->
+ [Case|add_init_and_end_per_suite(Cases, LastMod, LastRef)];
+add_init_and_end_per_suite([], _LastMod, undefined) ->
+ [];
+add_init_and_end_per_suite([], _LastMod, skipped_suite) ->
+ [];
+add_init_and_end_per_suite([], LastMod, LastRef) ->
+ [{conf,LastRef,[],{LastMod,end_per_suite}}].
+
+do_add_init_and_end_per_suite(LastMod, LastRef, Mod) ->
+ case code:is_loaded(Mod) of
+ false -> code:load_file(Mod);
+ _ -> ok
+ end,
+ {Init,NextMod,NextRef} =
+ case erlang:function_exported(Mod, init_per_suite, 1) of
+ true ->
+ Ref = make_ref(),
+ {[{conf,Ref,[],{Mod,init_per_suite}}],Mod,Ref};
+ false ->
+ {[],Mod,undefined}
+ end,
+ Cases =
+ if LastRef==undefined ->
+ Init;
+ LastRef==skipped_suite ->
+ Init;
+ true ->
+ %% Adding end_per_suite here without checking if the
+ %% function is actually exported. This is because a
+ %% conf case must have an end case - so if it doesn't
+ %% exist, it will only fail...
+ [{conf,LastRef,[],{LastMod,end_per_suite}}|Init]
+ end,
+ {Cases,NextMod,NextRef}.
+
+do_add_end_per_suite_and_skip(LastMod, LastRef, Mod) ->
+ case LastRef of
+ No when No==undefined ; No==skipped_suite ->
+ {[],Mod,skipped_suite};
+ _Ref ->
+ {[{conf,LastRef,[],{LastMod,end_per_suite}}],Mod,skipped_suite}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% run_test_cases(TestSpec, Config, MultiplyTimetrap) -> exit(Result)
+%%
+%% If remote target, a socket connection is established.
+%% Runs the specified tests, then displays/logs the summary.
+
+run_test_cases(TestSpec, Config, MultiplyTimetrap) ->
+
+ maybe_open_job_sock(),
+
+ html_convert_modules(TestSpec, Config),
+
+ %%! For readable tracing...
+ %%! Config1 = [{data_dir,""},{priv_dir,""},{nodes,[]}],
+ %%! run_test_cases_loop(TestSpec, [[]], MultiplyTimetrap, [], []),
+
+ run_test_cases_loop(TestSpec, [Config], MultiplyTimetrap, [], []),
+
+ maybe_get_privdir(),
+
+ {AllSkippedN,UserSkipN,AutoSkipN,SkipStr} =
+ case get(test_server_skipped) of
+ {0,0} -> {0,0,0,""};
+ {US,AS} -> {US+AS,US,AS,io_lib:format(", ~w skipped", [US+AS])}
+ end,
+ OkN = get(test_server_ok),
+ FailedN = get(test_server_failed),
+ print(1, "TEST COMPLETE, ~w ok, ~w failed~s of ~w test cases\n",
+ [OkN,FailedN,SkipStr,OkN+FailedN+AllSkippedN]),
+ test_server_sup:framework_call(report, [tests_done,
+ {OkN,FailedN,{UserSkipN,AutoSkipN}}]),
+ print(major, "=finished ~s", [lists:flatten(timestamp_get(""))]),
+ print(major, "=failed ~p", [FailedN]),
+ print(major, "=successful ~p", [OkN]),
+ print(major, "=user_skipped ~p", [UserSkipN]),
+ print(major, "=auto_skipped ~p", [AutoSkipN]),
+ exit(test_suites_done).
+
+%% If the test is run at a remote target, this function sets up a socket
+%% communication with the target for handling this particular job.
+maybe_open_job_sock() ->
+ TI = get_target_info(),
+ case TI#target_info.where of
+ local ->
+ %% local target
+ test_server:init_purify();
+ MainSock ->
+ %% remote target
+ {ok,LSock} = gen_tcp:listen(0, [binary,
+ {reuseaddr,true},
+ {packet,4},
+ {active,false}]),
+ {ok,Port} = inet:port(LSock),
+ request(MainSock, {job,Port,get(test_server_name)}),
+ case gen_tcp:accept(LSock, ?ACCEPT_TIMEOUT) of
+ {ok,Sock} -> put(test_server_ctrl_job_sock, Sock);
+ {error,Reason} -> exit({no_contact,Reason})
+ end
+ end.
+
+%% If the test is run at a remote target, this function waits for a
+%% tar packet containing the privdir created by the test case.
+maybe_get_privdir() ->
+ case get(test_server_ctrl_job_sock) of
+ undefined ->
+ %% local target
+ ok;
+ Sock ->
+ %% remote target
+ request(Sock, job_done),
+ gen_tcp:close(Sock)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% run_test_cases_loop(TestCases, Config, MultiplyTimetrap, Mode, Status) -> ok
+%% TestCases = [Test,...]
+%% Config = [[{Key,Val},...],...]
+%% MultiplyTimetrap = integer() | infinity
+%% Mode = [{Ref,[Prop,..],StartTime}]
+%% Ref = reference()
+%% Prop = {name,Name} | sequence | parallel |
+%% shuffle | {shuffle,Seed} |
+%% repeat | {repeat,N} |
+%% repeat_until_all_ok | {repeat_until_all_ok,N} |
+%% repeat_until_any_ok | {repeat_until_any_ok,N} |
+%% repeat_until_any_fail | {repeat_until_any_fail,N} |
+%% repeat_until_all_fail | {repeat_until_all_fail,N}
+%% Status = [{Ref,{{Ok,Skipped,Failed},CopiedCases}}]
+%% Ok = Skipped = Failed = [Case,...]
+%%
+%% Execute the TestCases under configuration Config. Config is a list
+%% of lists, where hd(Config) holds the config tuples for the current
+%% conf case and tl(Config) is the data for the higher level conf cases.
+%% Config data is "inherited" from top to nested conf cases, but
+%% never the other way around. if length(Config) == 1, Config contains
+%% only the initial config data for the suite.
+%%
+%% Test may be one of the following:
+%%
+%% {conf,Ref,Props,{Mod,Func}} Mod:Func is a configuration modification
+%% function, call it with the current configuration as argument. It will
+%% return a new configuration.
+%%
+%% {make,Ref,{Mod,Func,Args}} Mod:Func is a make function, and it is called
+%% with the given arguments. This function will *always* be called on the host
+%% - not on target.
+%%
+%% {Mod,Case} This is a normal test case. Determine the correct
+%% configuration, and insert {Mod,Case,Config} as head of the list,
+%% then reiterate.
+%%
+%% {Mod,Case,Args} A test case with predefined argument (usually a normal
+%% test case which just got a fresh configuration (see above)).
+%%
+%% {skip_case,{conf,Ref,Case,Comment}} An init conf case gets skipped
+%% by the user. This will also cause the end conf case to be skipped.
+%% Note that it is not possible to skip an end conf case directly (it
+%% can only be skipped indirectly by a skipped init conf case). The
+%% comment (which gets printed in the log files) describes why the case
+%% was skipped.
+%%
+%% {skip_case,{Case,Comment}} A normal test case skipped by the user.
+%% The comment (which gets printed in the log files) describes why the
+%% case was skipped.
+%%
+%% {auto_skip_case,{conf,Ref,Case,Comment},Mode} This is the result of
+%% an end conf case being automatically skipped due to a failing init
+%% conf case. It could also be a nested conf case that gets skipped
+%% because of a failed or skipped top level conf.
+%%
+%% {auto_skip_case,{Case,Comment},Mode} This is a normal test case which
+%% gets automatically skipped because of a failing init conf case or
+%% because of a failing previous test case in a sequence.
+%%
+%% -------------------------------------------------------------------
+%% Description of IO handling during execution of parallel test cases:
+%% -------------------------------------------------------------------
+%%
+%% A conf group can have an associated list of properties. If the
+%% parallel property is specified for a group, it means the test cases
+%% should be spawned and run in parallel rather than called sequentially
+%% (which is always the default mode). Test cases that execute in parallel
+%% also write to their respective minor log files in parallel. Printouts
+%% to common log files, such as the summary html file and the major log
+%% file on text format, still have to be processed sequentially. For this
+%% reason, the Mode argument specifies if a parallel group is currently
+%% being executed.
+%%
+%% A parallel test case process will always set the dictionary value
+%% 'test_server_common_io_handler' to the pid of the main (starting)
+%% process. With this value set, the print/3 function will send print
+%% messages to the main process instead of writing the data to file
+%% (only true for printouts to common log files).
+%%
+%% If a conf group nested under a parallel group in the test
+%% specification should be started, the 'test_server_common_io_handler'
+%% value gets set also on the main process. This causes all printouts
+%% to common files - both from parallel test cases and from cases
+%% executed by the main process - to all end up as messages in the
+%% inbox of the main process.
+%%
+%% During execution of a parallel group (or of a group nested under a
+%% parallel group), *any* new test case being started gets registered
+%% in a list saved in the dictionary with 'test_server_queued_io' as key.
+%% When the top level parallel group is finished (only then can we be
+%% sure all parallel test cases have finished and "reported in"), the
+%% list of test cases is traversed in order and printout messages from
+%% each process - including the main process - are handled in turn. See
+%% handle_test_case_io_and_status/0 for details.
+%%
+%% To be able to handle nested conf groups with different properties,
+%% the Mode argument specifies a list of {Ref,Properties} tuples.
+%% The head of the Mode list at any given time identifies the group
+%% currently being processed. The tail of the list identifies groups
+%% on higher level.
+%%
+%% -------------------------------------------------------------------
+%% Notes on parallel execution of test cases
+%% -------------------------------------------------------------------
+%%
+%% A group nested under a parallel group will start executing in
+%% parallel with previous (parallel) test cases (no matter what
+%% properties the nested group has). Test cases are however never
+%% executed in parallel with the start or end conf case of the same
+%% group! Because of this, the test_server_ctrl loop waits at
+%% the end conf of a group for all parallel cases to finish
+%% before the end conf case actually executes. This has the effect
+%% that it's only after a nested group has finished that any
+%% remaining parallel cases in the previous group get spawned (*).
+%% Example (all parallel cases):
+%%
+%% group1_init |---->
+%% group1_case1 | --------->
+%% group1_case2 | --------------------------------->
+%% group2_init | ---->
+%% group2_case1 | ------>
+%% group2_case2 | ---------->
+%% group2_end | --->
+%% group1_case3 (*)| ---->
+%% group1_case4 (*)| -->
+%% group1_end | --->
+%%
+
+run_test_cases_loop([{auto_skip_case,{Type,Ref,Case,Comment},SkipMode}|Cases],
+ Config, MultiplyTimetrap, Mode, Status) when Type==conf;
+ Type==make ->
+
+ file:set_cwd(filename:dirname(get(test_server_dir))),
+ CurrIOHandler = get(test_server_common_io_handler),
+ %% check and update the mode for test case execution and io msg handling
+ case {curr_ref(Mode),check_props(parallel, Mode)} of
+ {Ref,Ref} ->
+ case check_props(parallel, tl(Mode)) of
+ false ->
+ %% this is a skipped end conf for a top level parallel group,
+ %% buffered io can be flushed
+ handle_test_case_io_and_status(),
+ set_io_buffering(undefined),
+ {Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, false, SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ delete_status(Ref, Status));
+ _ ->
+ %% this is a skipped end conf for a parallel group nested under a
+ %% parallel group (io buffering is active)
+ wait_for_cases(Ref),
+ {Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, true, SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ case CurrIOHandler of
+ {Ref,_} ->
+ %% current_io_handler was set by start conf of this
+ %% group, so we can unset it now (no more io from main
+ %% process needs to be buffered)
+ set_io_buffering(undefined);
+ _ ->
+ ok
+ end,
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ delete_status(Ref, Status))
+ end;
+ {Ref,false} ->
+ %% this is a skipped end conf for a non-parallel group that's not
+ %% nested under a parallel group
+ {Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, false, SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ delete_status(Ref, Status));
+ {Ref,_} ->
+ %% this is a skipped end conf for a non-parallel group nested under
+ %% a parallel group (io buffering is active)
+ {Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, true, SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ case CurrIOHandler of
+ {Ref,_} ->
+ %% current_io_handler was set by start conf of this
+ %% group, so we can unset it now (no more io from main
+ %% process needs to be buffered)
+ set_io_buffering(undefined);
+ _ ->
+ ok
+ end,
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, tl(Mode),
+ delete_status(Ref, Status));
+ {_,false} ->
+ %% this is a skipped start conf for a group which is not nested
+ %% under a parallel group
+ {Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, false, SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, [conf(Ref,[])|Mode], Status);
+ {_,Ref0} when is_reference(Ref0) ->
+ %% this is a skipped start conf for a group nested under a parallel group
+ %% and if this is the first nested group, io buffering must be activated
+ if CurrIOHandler == undefined ->
+ set_io_buffering({Ref,self()});
+ true ->
+ ok
+ end,
+ {Mod,Func} = skip_case(auto, Ref, 0, Case, Comment, true, SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, [conf(Ref,[])|Mode], Status)
+ end;
+
+run_test_cases_loop([{auto_skip_case,{Case,Comment},SkipMode}|Cases],
+ Config, MultiplyTimetrap, Mode, Status) ->
+ {Mod,Func} = skip_case(auto, undefined, get(test_server_case_num)+1, Case, Comment,
+ (undefined /= get(test_server_common_io_handler)), SkipMode),
+ test_server_sup:framework_call(report, [tc_auto_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode,
+ update_status(skipped, Mod, Func, Status));
+
+run_test_cases_loop([{skip_case,{conf,Ref,Case,Comment}}|Cases0],
+ Config, MultiplyTimetrap, Mode, Status) ->
+ {Mod,Func} = skip_case(user, Ref, 0, Case, Comment,
+ (undefined /= get(test_server_common_io_handler))),
+ {Cases,Config1} =
+ case curr_ref(Mode) of
+ Ref ->
+ %% skipped end conf
+ {Cases0,tl(Config)};
+ _ ->
+ %% skipped start conf
+ {skip_cases_upto(Ref, Cases0, Comment, conf, Mode),Config}
+ end,
+ test_server_sup:framework_call(report, [tc_user_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config1, MultiplyTimetrap, Mode,
+ update_status(skipped, Mod, Func, Status));
+
+run_test_cases_loop([{skip_case,{Case,Comment}}|Cases],
+ Config, MultiplyTimetrap, Mode, Status) ->
+ {Mod,Func} = skip_case(user, undefined, get(test_server_case_num)+1, Case, Comment,
+ (undefined /= get(test_server_common_io_handler))),
+ test_server_sup:framework_call(report, [tc_user_skip,{?pl2a(Mod),Func,Comment}]),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode,
+ update_status(skipped, Mod, Func, Status));
+
+%% a start *or* end conf case, wrapping test cases or other conf cases
+run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
+ Config, MultiplyTimetrap, Mode0, Status) ->
+
+ CurrIOHandler = get(test_server_common_io_handler),
+ %% check and update the mode for test case execution and io msg handling
+ {StartConf,Mode,IOHandler,ConfTime,Status1} =
+ case {curr_ref(Mode0),check_props(parallel, Mode0)} of
+ {Ref,Ref} ->
+ case check_props(parallel, tl(Mode0)) of
+ false ->
+ %% this is an end conf for a top level parallel group, collect
+ %% results from the test case processes and calc total time
+ OkSkipFail = handle_test_case_io_and_status(),
+ file:set_cwd(filename:dirname(get(test_server_dir))),
+ After = ?now,
+ Before = get(test_server_parallel_start_time),
+ Elapsed = elapsed_time(Before, After)/1000000,
+ put(test_server_total_time, Elapsed),
+ {false,tl(Mode0),undefined,Elapsed,
+ update_status(Ref, OkSkipFail, Status)};
+ _ ->
+ %% this is an end conf for a parallel group nested under a
+ %% parallel group (io buffering is active)
+ OkSkipFail = wait_for_cases(Ref),
+ queue_test_case_io(Ref, self(), 0, Mod, Func),
+ Elapsed = elapsed_time(conf_start(Ref, Mode0),?now)/1000000,
+ case CurrIOHandler of
+ {Ref,_} ->
+ %% current_io_handler was set by start conf of this
+ %% group, so we can unset it after this case (no
+ %% more io from main process needs to be buffered)
+ {false,tl(Mode0),undefined,Elapsed,
+ update_status(Ref, OkSkipFail, Status)};
+ _ ->
+ {false,tl(Mode0),CurrIOHandler,Elapsed,
+ update_status(Ref, OkSkipFail, Status)}
+ end
+ end;
+ {Ref,false} ->
+ %% this is an end conf for a non-parallel group that's not
+ %% nested under a parallel group, so no need to buffer io
+ {false,tl(Mode0),undefined,
+ elapsed_time(conf_start(Ref, Mode0),?now)/1000000, Status};
+ {Ref,_} ->
+ %% this is an end conf for a non-parallel group nested under
+ %% a parallel group (io buffering is active)
+ queue_test_case_io(Ref, self(), 0, Mod, Func),
+ Elapsed = elapsed_time(conf_start(Ref, Mode0),?now)/1000000,
+ case CurrIOHandler of
+ {Ref,_} ->
+ %% current_io_handler was set by start conf of this
+ %% group, so we can unset it after this case (no
+ %% more io from main process needs to be buffered)
+ {false,tl(Mode0),undefined,Elapsed,Status};
+ _ ->
+ {false,tl(Mode0),CurrIOHandler,Elapsed,Status}
+ end;
+ {_,false} ->
+ %% this is a start conf for a group which is not nested under a
+ %% parallel group, check if this case starts a new parallel group
+ case lists:member(parallel, Props) of
+ true ->
+ %% prepare for execution of parallel group
+ put(test_server_parallel_start_time, ?now),
+ put(test_server_queued_io, []);
+ false ->
+ ok
+ end,
+ {true,[conf(Ref,Props)|Mode0],undefined,0,Status};
+ {_,_Ref0} ->
+ %% this is a start conf for a group nested under a parallel group, the
+ %% parallel_start_time and parallel_test_cases values have already been set
+ queue_test_case_io(Ref, self(), 0, Mod, Func),
+ %% if this is the first nested group under a parallel group, io
+ %% buffering must be activated
+ IOHandler1 = if CurrIOHandler == undefined ->
+ IOH = {Ref,self()},
+ set_io_buffering(IOH),
+ IOH;
+ true ->
+ CurrIOHandler
+ end,
+ {true,[conf(Ref,Props)|Mode0],IOHandler1,0,Status}
+ end,
+
+ %% if this is a start conf we check if cases should be shuffled
+ {[_Conf|Cases1]=Cs1,Shuffle} =
+ if StartConf ->
+ case get_shuffle(Props) of
+ undefined ->
+ {Cs0,undefined};
+ {_,repeated} ->
+ %% if group is repeated, a new seed should not be set every
+ %% turn - last one is saved in dictionary
+ CurrSeed = get(test_server_curr_random_seed),
+ {shuffle_cases(Ref, Cs0, CurrSeed),{shuffle,CurrSeed}};
+ {_,Seed} ->
+ UseSeed=
+ %% Determine which seed to use by:
+ %% 1. check the TS_RANDOM_SEED env variable
+ %% 2. check random_seed in process state
+ %% 3. use value provided with shuffle option
+ %% 4. use now() values for seed
+ case os:getenv("TS_RANDOM_SEED") of
+ Undef when Undef == false ; Undef == "undefined" ->
+ case get(test_server_random_seed) of
+ undefined -> Seed;
+ TSRS -> TSRS
+ end;
+ NumStr ->
+ %% Ex: "123 456 789" or "123,456,789" -> {123,456,789}
+ list_to_tuple([list_to_integer(NS) ||
+ NS <- string:tokens(NumStr, [$ ,$:,$,])])
+ end,
+ {shuffle_cases(Ref, Cs0, UseSeed),{shuffle,UseSeed}}
+ end;
+ not StartConf ->
+ {Cs0,undefined}
+ end,
+
+ %% if this is a start conf we check if Props specifies repeat and if so
+ %% we copy the group and carry the copy until the end conf where we
+ %% decide to perform the repetition or not
+ {Repeating,Status2,Cases,ReportRepeatStop} =
+ if StartConf ->
+ case get_repeat(Props) of
+ undefined ->
+ %% we *must* have a status entry for every conf since we
+ %% will continously update status with test case results
+ %% without knowing the Ref (but update hd(Status))
+ {false,new_status(Ref, Status1),Cases1,?void_fun};
+ _ ->
+ {Copied,_} = copy_cases(Ref, make_ref(), Cs1),
+ {true,new_status(Ref, Copied, Status1),Cases1,?void_fun}
+ end;
+ not StartConf ->
+ RepVal = get_repeat(get_props(Mode0)),
+ ReportStop =
+ fun() ->
+ print(minor, "~n*** Stopping repeat operation ~w", [RepVal]),
+ print(1, "Stopping repeat operation ~w", [RepVal])
+ end,
+ CopiedCases = get_copied_cases(Status1),
+ EndStatus = delete_status(Ref, Status1),
+ %% check in Mode0 if this is a repeat conf
+ case RepVal of
+ undefined ->
+ {false,EndStatus,Cases1,?void_fun};
+ {repeat,_} ->
+ {true,EndStatus,CopiedCases++Cases1,?void_fun};
+ {repeat_until_all_ok,_} ->
+ {RestCs,Fun} = case get_tc_results(Status1) of
+ {_,_,[]} ->
+ {Cases1,ReportStop};
+ _ ->
+ {CopiedCases++Cases1,?void_fun}
+ end,
+ {true,EndStatus,RestCs,Fun};
+ {repeat_until_any_ok,_} ->
+ {RestCs,Fun} = case get_tc_results(Status1) of
+ {Ok,_,_} when length(Ok) > 0 ->
+ {Cases1,ReportStop};
+ _ ->
+ {CopiedCases++Cases1,?void_fun}
+ end,
+ {true,EndStatus,RestCs,Fun};
+ {repeat_until_any_fail,_} ->
+ {RestCs,Fun} = case get_tc_results(Status1) of
+ {_,_,Fails} when length(Fails) > 0 ->
+ {Cases1,ReportStop};
+ _ ->
+ {CopiedCases++Cases1,?void_fun}
+ end,
+ {true,EndStatus,RestCs,Fun};
+ {repeat_until_all_fail,_} ->
+ {RestCs,Fun} = case get_tc_results(Status1) of
+ {[],_,_} ->
+ {Cases1,ReportStop};
+ _ ->
+ {CopiedCases++Cases1,?void_fun}
+ end,
+ {true,EndStatus,RestCs,Fun}
+ end
+ end,
+
+ ReportAbortRepeat = fun(What) when Repeating ->
+ print(minor, "~n*** Aborting repeat operation "
+ "(configuration case ~w)", [What]),
+ print(1, "Aborting repeat operation "
+ "(configuration case ~w)", [What]);
+ (_) -> ok
+ end,
+
+ CfgProps = if StartConf ->
+ if Shuffle == undefined ->
+ [{tc_group_properties,Props}];
+ true ->
+ [{tc_group_properties,[Shuffle|delete_shuffle(Props)]}]
+ end;
+ not StartConf ->
+ {TcOk,TcSkip,TcFail} = get_tc_results(Status1),
+ [{tc_group_properties,get_props(Mode0)},
+ {tc_group_result,[{ok,TcOk},{skipped,TcSkip},{failed,TcFail}]}]
+ end,
+ ActualCfg =
+ update_config(hd(Config), [{priv_dir,get(test_server_priv_dir)},
+ {data_dir,get_data_dir(Mod)}] ++ CfgProps),
+ CurrMode = curr_mode(Ref, Mode0, Mode),
+
+ ConfCaseResult = run_test_case(Ref, 0, Mod, Func, [ActualCfg], skip_init, target,
+ MultiplyTimetrap, CurrMode),
+
+ case ConfCaseResult of
+ {_,NewCfg,_} when Func == init_per_suite, is_list(NewCfg) ->
+ %% check that init_per_suite returned data on correct format
+ case lists:filter(fun({_,_}) -> false;
+ (_) -> true end, NewCfg) of
+ [] ->
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, [NewCfg|Config],
+ MultiplyTimetrap, Mode, Status2);
+ Bad ->
+ print(minor, "~n*** ~p returned bad elements in Config: ~p.~n",
+ [Func,Bad]),
+ Reason = {failed,{Mod,init_per_suite,bad_return}},
+ Cases2 = skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode,
+ delete_status(Ref, Status2))
+ end;
+ {_,NewCfg,_} when StartConf, is_list(NewCfg) ->
+ print_conf_time(ConfTime),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, [NewCfg|Config], MultiplyTimetrap, Mode, Status2);
+ {_,{framework_error,{FwMod,FwFunc},Reason},_} ->
+ print(minor, "~n*** ~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
+ print(1, "~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
+ exit(framework_error);
+ {_,Fail,_} when element(1,Fail) == 'EXIT';
+ element(1,Fail) == timetrap_timeout;
+ element(1,Fail) == failed ->
+ {Cases2,Config1} =
+ if StartConf ->
+ ReportAbortRepeat(failed),
+ print(minor, "~n*** ~p failed.~n"
+ " Skipping all cases.", [Func]),
+ Reason = {failed,{Mod,Func,Fail}},
+ {skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),Config};
+ not StartConf ->
+ ReportRepeatStop(),
+ print_conf_time(ConfTime),
+ {Cases,tl(Config)}
+ end,
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases2, Config1, MultiplyTimetrap, Mode,
+ delete_status(Ref, Status2));
+ {died,Why,_} when Func == init_per_suite ->
+ print(minor, "~n*** Unexpected exit during init_per_suite.~n", []),
+ Reason = {failed,{Mod,init_per_suite,Why}},
+ Cases2 = skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode,
+ delete_status(Ref, Status2));
+ {_,{Skip,Reason},_} when StartConf and ((Skip==skip) or (Skip==skipped)) ->
+ ReportAbortRepeat(skipped),
+ print(minor, "~n*** ~p skipped.~n"
+ " Skipping all cases.", [Func]),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ Config, MultiplyTimetrap, Mode,
+ delete_status(Ref, Status2));
+ {_,{skip_and_save,Reason,_SavedConfig},_} when StartConf ->
+ ReportAbortRepeat(skipped),
+ print(minor, "~n*** ~p skipped.~n"
+ " Skipping all cases.", [Func]),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ Config, MultiplyTimetrap, Mode,
+ delete_status(Ref, Status2));
+ {_,_Other,_} when Func == init_per_suite ->
+ print(minor, "~n*** init_per_suite failed to return a Config list.~n", []),
+ Reason = {failed,{Mod,init_per_suite,bad_return}},
+ Cases2 = skip_cases_upto(Ref, Cases, Reason, conf, CurrMode),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode,
+ delete_status(Ref, Status2));
+ {_,_Other,_} when StartConf ->
+ print_conf_time(ConfTime),
+ set_io_buffering(IOHandler),
+ ReportRepeatStop(),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, [hd(Config)|Config], MultiplyTimetrap,
+ Mode, Status2);
+
+ {_,_EndConfRetVal,Opts} ->
+ %% check if return_group_result is set (ok, skipped or failed) and
+ %% if so return the value to the group "above" so that result may be
+ %% used for evaluating repeat_until_*
+ Status3 =
+ case lists:keysearch(return_group_result, 1, Opts) of
+ {value,{_,GroupResult}} ->
+ update_status(GroupResult, group_result, Func,
+ delete_status(Ref, Status2));
+ false ->
+ delete_status(Ref, Status2)
+ end,
+ print_conf_time(ConfTime),
+ ReportRepeatStop(),
+ set_io_buffering(IOHandler),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, tl(Config), MultiplyTimetrap, Mode, Status3)
+ end;
+
+run_test_cases_loop([{make,Ref,{Mod,Func,Args}}|Cases0], Config, MultiplyTimetrap, Mode, Status) ->
+ case run_test_case(Ref, 0, Mod, Func, Args, skip_init, host, MultiplyTimetrap) of
+ {_,Why={'EXIT',_},_} ->
+ print(minor, "~n*** ~p failed.~n"
+ " Skipping all cases.", [Func]),
+ Reason = {failed,{Mod,Func,Why}},
+ Cases = skip_cases_upto(Ref, Cases0, Reason, conf, Mode),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status);
+ {_,_Whatever,_} ->
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases0, Config, MultiplyTimetrap, Mode, Status)
+ end;
+
+run_test_cases_loop([{conf,_Ref,_Props,_X}=Conf|_Cases0],
+ Config, _MultiplyTimetrap, _Mode, _Status) ->
+ erlang:error(badarg, [Conf,Config]);
+
+run_test_cases_loop([{Mod,Case}|Cases], Config, MultiplyTimetrap, Mode, Status) ->
+ ActualCfg =
+ update_config(hd(Config), [{priv_dir,get(test_server_priv_dir)},
+ {data_dir,get_data_dir(Mod)}]),
+ run_test_cases_loop([{Mod,Case,[ActualCfg]}|Cases], Config,
+ MultiplyTimetrap, Mode, Status);
+
+run_test_cases_loop([{Mod,Func,Args}|Cases], Config, MultiplyTimetrap, Mode, Status) ->
+ Num = put(test_server_case_num, get(test_server_case_num)+1),
+ %% check the current execution mode and save info about the case if
+ %% detected that printouts to common log files is handled later
+ case check_prop(parallel, Mode) of
+ false ->
+ case get(test_server_common_io_handler) of
+ undefined ->
+ %% io printouts are written to straight to file
+ ok;
+ _ ->
+ %% io messages are buffered, put test case in queue
+ queue_test_case_io(undefined, self(), Num+1, Mod, Func)
+ end;
+ _ ->
+ ok
+ end,
+ case run_test_case(undefined, Num+1, Mod, Func, Args,
+ run_init, target, MultiplyTimetrap, Mode) of
+ %% callback to framework module failed, exit immediately
+ {_,{framework_error,{FwMod,FwFunc},Reason},_} ->
+ print(minor, "~n*** ~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
+ print(1, "~p failed in ~p. Reason: ~p~n", [FwMod,FwFunc,Reason]),
+ stop_minor_log_file(),
+ exit(framework_error);
+ %% sequential execution of test case finished
+ {Time,RetVal,_} ->
+ {Failed,Status1} =
+ case Time of
+ died ->
+ {true,update_status(failed, Mod, Func, Status)};
+ _ when is_tuple(RetVal) ->
+ case element(1, RetVal) of
+ R when R=='EXIT'; R==failed ->
+ {true,update_status(failed, Mod, Func, Status)};
+ R when R==skip; R==skipped ->
+ {false,update_status(skipped, Mod, Func, Status)};
+ _ ->
+ {false,update_status(ok, Mod, Func, Status)}
+ end;
+ _ ->
+ {false,update_status(ok, Mod, Func, Status)}
+ end,
+ case check_prop(sequence, Mode) of
+ false ->
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status1);
+ Ref ->
+ %% the case is in a sequence; we must check the result and
+ %% determine if the following cases should run or be skipped
+ if not Failed -> % proceed with next case
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status1);
+ true -> % skip rest of cases in sequence
+ print(minor, "~n*** ~p failed.~n"
+ " Skipping all other cases in sequence.", [Func]),
+ Reason = {failed,{Mod,Func}},
+ Cases2 = skip_cases_upto(Ref, Cases, Reason, tc, Mode),
+ stop_minor_log_file(),
+ run_test_cases_loop(Cases2, Config, MultiplyTimetrap, Mode, Status1)
+ end
+ end;
+ %% the test case is being executed in parallel with the main process (and
+ %% other test cases) and Pid is the dedicated process executing the case
+ Pid ->
+ %% io from Pid will be buffered in the main process inbox and handled
+ %% later, so we have to save info about the case
+ queue_test_case_io(undefined, Pid, Num+1, Mod, Func),
+ run_test_cases_loop(Cases, Config, MultiplyTimetrap, Mode, Status)
+ end;
+
+%% TestSpec processing finished
+run_test_cases_loop([], _Config, _MultiplyTimetrap, _, _) ->
+ ok.
+
+%%--------------------------------------------------------------------
+%% various help functions
+
+new_status(Ref, Status) ->
+ [{Ref,{{[],[],[]},[]}} | Status].
+
+new_status(Ref, CopiedCases, Status) ->
+ [{Ref,{{[],[],[]},CopiedCases}} | Status].
+
+delete_status(Ref, Status) ->
+ lists:keydelete(Ref, 1, Status).
+
+update_status(ok, Mod, Func, [{Ref,{{Ok,Skip,Fail},Cs}} | Status]) ->
+ [{Ref,{{Ok++[{Mod,Func}],Skip,Fail},Cs}} | Status];
+
+update_status(skipped, Mod, Func, [{Ref,{{Ok,Skip,Fail},Cs}} | Status]) ->
+ [{Ref,{{Ok,Skip++[{Mod,Func}],Fail},Cs}} | Status];
+
+update_status(failed, Mod, Func, [{Ref,{{Ok,Skip,Fail},Cs}} | Status]) ->
+ [{Ref,{{Ok,Skip,Fail++[{Mod,Func}]},Cs}} | Status];
+
+update_status(_, _, _, []) ->
+ [].
+
+update_status(Ref, {Ok,Skip,Fail}, [{Ref,{{Ok0,Skip0,Fail0},Cs}} | Status]) ->
+ [{Ref,{{Ok0++Ok,Skip0++Skip,Fail0++Fail},Cs}} | Status].
+
+get_copied_cases([{_,{_,Cases}} | _Status]) ->
+ Cases.
+
+get_tc_results([{_,{OkSkipFail,_}} | _Status]) ->
+ OkSkipFail.
+
+conf(Ref, Props) ->
+ {Ref,Props,?now}.
+
+curr_ref([{Ref,_Props,_}|_]) ->
+ Ref;
+curr_ref([]) ->
+ undefined.
+
+curr_mode(Ref, Mode0, Mode1) ->
+ case curr_ref(Mode1) of
+ Ref -> Mode1;
+ _ -> Mode0
+ end.
+
+get_props([{_,Props,_} | _]) ->
+ Props;
+get_props([]) ->
+ [].
+
+check_prop(_Attrib, []) ->
+ false;
+check_prop(Attrib, [{Ref,Props,_}|_]) ->
+ case lists:member(Attrib, Props) of
+ true -> Ref;
+ false -> false
+ end.
+
+check_props(Attrib, Mode) ->
+ case [R || {R,Ps,_} <- Mode, lists:member(Attrib, Ps)] of
+ [] -> false;
+ [Ref|_] -> Ref
+ end.
+
+get_name([{_Ref,Props,_}|_]) ->
+ proplists:get_value(name, Props);
+get_name([]) ->
+ undefined.
+
+conf_start(Ref, Mode) ->
+ case lists:keysearch(Ref, 1, Mode) of
+ {value,{_,_,T}} -> T;
+ false -> 0
+ end.
+
+get_data_dir(Mod) ->
+ case code:which(Mod) of
+ non_existing ->
+ print(12, "The module ~p is not loaded", [Mod]),
+ [];
+ FullPath ->
+ filename:dirname(FullPath) ++ "/" ++ cast_to_list(Mod) ++
+ ?data_dir_suffix
+ end.
+
+print_conf_time(0) ->
+ ok;
+print_conf_time(ConfTime) ->
+ print(major, "=group_time ~.3fs", [ConfTime]),
+ print(minor, "~n=== Total execution time of group: ~.3fs~n", [ConfTime]).
+
+print_props(_, []) ->
+ ok;
+print_props(true, Props) ->
+ print(major, "=group_props ~p", [Props]),
+ print(minor, "Group properties: ~p~n", [Props]);
+print_props(_, _) ->
+ ok.
+
+%% repeat N times: {repeat,N}
+%% repeat N times or until all successful: {repeat_until_all_ok,N}
+%% repeat N times or until at least one successful: {repeat_until_any_ok,N}
+%% repeat N times or until at least one case fails: {repeat_until_any_fail,N}
+%% repeat N times or until all fails: {repeat_until_all_fail,N}
+%% N = integer() | forever
+get_repeat(Props) ->
+ get_prop([repeat,repeat_until_all_ok,repeat_until_any_ok,
+ repeat_until_any_fail,repeat_until_all_fail], forever, Props).
+
+update_repeat(Props) ->
+ case get_repeat(Props) of
+ undefined ->
+ Props;
+ {RepType,N} ->
+ Props1 =
+ if N == forever ->
+ [{RepType,N}|lists:keydelete(RepType, 1, Props)];
+ N < 2 ->
+ lists:keydelete(RepType, 1, Props);
+ N >= 2 ->
+ [{RepType,N-1}|lists:keydelete(RepType, 1, Props)]
+ end,
+ %% if shuffle is used in combination with repeat, a new
+ %% seed shouldn't be set every new turn
+ case get_shuffle(Props1) of
+ undefined ->
+ Props1;
+ _ ->
+ [{shuffle,repeated}|delete_shuffle(Props1)]
+ end
+ end.
+
+get_shuffle(Props) ->
+ get_prop([shuffle], ?now, Props).
+
+delete_shuffle(Props) ->
+ delete_prop([shuffle], Props).
+
+%% Return {Item,Value} if found, else if Item alone
+%% is found, return {Item,Default}
+get_prop([Item|Items], Default, Props) ->
+ case lists:keysearch(Item, 1, Props) of
+ {value,R} ->
+ R;
+ false ->
+ case lists:member(Item, Props) of
+ true ->
+ {Item,Default};
+ false ->
+ get_prop(Items, Default, Props)
+ end
+ end;
+get_prop([], _Def, _Props) ->
+ undefined.
+
+delete_prop([Item|Items], Props) ->
+ Props1 = lists:delete(Item, lists:keydelete(Item, 1, Props)),
+ delete_prop(Items, Props1);
+delete_prop([], Props) ->
+ Props.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% shuffle_cases(Ref, Cases, Seed) -> Cases1
+%%
+%% Shuffles the order of Cases.
+
+shuffle_cases(Ref, Cases, undefined) ->
+ shuffle_cases(Ref, Cases, ?now);
+
+shuffle_cases(Ref, [{conf,Ref,_,_}=Start | Cases], Seed) ->
+ {N,CasesToShuffle,Rest} = cases_to_shuffle(Ref, Cases),
+ ShuffledCases = random_order(N, random:uniform_s(N, Seed), CasesToShuffle, []),
+ [Start|ShuffledCases] ++ Rest.
+
+cases_to_shuffle(Ref, Cases) ->
+ cases_to_shuffle(Ref, Cases, 1, []).
+
+cases_to_shuffle(Ref, [{conf,Ref,_,_} | _]=Cs, N, Ix) -> % end
+ {N-1,Ix,Cs};
+cases_to_shuffle(Ref, [{skip_case,{_,Ref,_,_}} | _]=Cs, N, Ix) -> % end
+ {N-1,Ix,Cs};
+
+cases_to_shuffle(Ref, [{conf,Ref1,_,_}=C | Cs], N, Ix) -> % nested group
+ {Cs1,Rest} = get_subcases(Ref1, Cs, []),
+ cases_to_shuffle(Ref, Rest, N+1, [{N,[C|Cs1]} | Ix]);
+cases_to_shuffle(Ref, [{skip_case,{_,Ref1,_,_}}=C | Cs], N, Ix) -> % nested group
+ {Cs1,Rest} = get_subcases(Ref1, Cs, []),
+ cases_to_shuffle(Ref, Rest, N+1, [{N,[C|Cs1]} | Ix]);
+
+cases_to_shuffle(Ref, [C | Cs], N, Ix) ->
+ cases_to_shuffle(Ref, Cs, N+1, [{N,[C]} | Ix]).
+
+get_subcases(SubRef, [{conf,SubRef,_,_}=C | Cs], SubCs) ->
+ {lists:reverse([C|SubCs]),Cs};
+get_subcases(SubRef, [{skip_case,{_,SubRef,_,_}}=C | Cs], SubCs) ->
+ {lists:reverse([C|SubCs]),Cs};
+get_subcases(SubRef, [C|Cs], SubCs) ->
+ get_subcases(SubRef, Cs, [C|SubCs]).
+
+random_order(1, {_Pos,Seed}, [{_Ix,CaseOrGroup}], Shuffled) ->
+ %% save current seed to be used if test cases are repeated
+ put(test_server_curr_random_seed, Seed),
+ Shuffled++CaseOrGroup;
+random_order(N, {Pos,NewSeed}, IxCases, Shuffled) ->
+ {First,[{_Ix,CaseOrGroup}|Rest]} = lists:split(Pos-1, IxCases),
+ random_order(N-1, random:uniform_s(N-1, NewSeed),
+ First++Rest, Shuffled++CaseOrGroup).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% skip_case(Type, Ref, CaseNum, Case, Comment, SendSync) -> {Mod,Func}
+%%
+%% Prints info about a skipped case in the major and html log files.
+%% SendSync determines if start and finished messages must be sent so
+%% that the printouts can be buffered and handled in order with io from
+%% parallel processes.
+
+skip_case(Type, Ref, CaseNum, Case, Comment, SendSync) ->
+ skip_case(Type, Ref, CaseNum, Case, Comment, SendSync, []).
+
+skip_case(Type, Ref, CaseNum, Case, Comment, SendSync, Mode) ->
+ MF = {Mod,Func} = case Case of
+ {M,F,_A} -> {M,F};
+ {M,F} -> {M,F}
+ end,
+ if SendSync ->
+ queue_test_case_io(Ref, self(), CaseNum, Mod, Func),
+ self() ! {started,Ref,self(),CaseNum,Mod,Func},
+ skip_case1(Type, CaseNum, Mod, Func, Comment, Mode),
+ self() ! {finished,Ref,self(),CaseNum,Mod,Func,skipped,{0,skipped,[]}};
+ not SendSync ->
+ skip_case1(Type, CaseNum, Mod, Func, Comment, Mode)
+ end,
+ MF.
+
+skip_case1(Type, CaseNum, Mod, Func, Comment, Mode) ->
+ {{Col0,Col1},_} = get_font_style((CaseNum > 0), Mode),
+ ResultCol = if Type == auto -> "#ffcc99";
+ Type == user -> "#ff9933"
+ end,
+
+ Comment1 = reason_to_string(Comment),
+
+ print(major, "~n=case ~p:~p", [Mod,Func]),
+ print(major, "=started ~s", [lists:flatten(timestamp_get(""))]),
+ print(major, "=result skipped: ~s", [Comment1]),
+ print(2,"*** Skipping test case #~w ~p ***", [CaseNum,{Mod,Func}]),
+ print(html,
+ "<tr valign=top>"
+ "<td>" ++ Col0 ++ "~s" ++ Col1 ++ "</td>"
+ "<td>" ++ Col0 ++ "~p" ++ Col1 ++ "</td>"
+ "<td>" ++ Col0 ++ "~p" ++ Col1 ++ "</td>"
+ "<td>" ++ Col0 ++ "< >" ++ Col1 ++ "</td>"
+ "<td>" ++ Col0 ++ "0.000s" ++ Col1 ++ "</td>"
+ "<td><font color=\"~s\">SKIPPED</font></td>"
+ "<td>~s</td></tr>\n",
+ [num2str(CaseNum),Mod,Func,ResultCol,Comment1]),
+ if CaseNum > 0 ->
+ {US,AS} = get(test_server_skipped),
+ case Type of
+ user -> put(test_server_skipped, {US+1,AS});
+ auto -> put(test_server_skipped, {US,AS+1})
+ end,
+ put(test_server_case_num, CaseNum);
+ true -> % conf
+ ok
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% skip_cases_upto(Ref, Cases, Reason, Origin, Mode) -> Cases1
+%%
+%% Mark all cases tagged with Ref as skipped.
+
+skip_cases_upto(Ref, Cases, Reason, Origin, Mode) ->
+ {_,Modified,Rest} = modify_cases_upto(Ref, {skip,Reason,Origin,Mode}, Cases),
+ Modified++Rest.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% copy_cases(OrigRef, NewRef, Cases) -> Cases1
+%%
+%% Copy the test cases marked with OrigRef and tag the copies with NewRef.
+%% The start conf case copy will also get its repeat property updated.
+
+copy_cases(OrigRef, NewRef, Cases) ->
+ {Original,Altered,Rest} = modify_cases_upto(OrigRef, {copy,NewRef}, Cases),
+ {Altered,Original++Altered++Rest}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% modify_cases_upto(Ref, ModOp, Cases) -> {Original,Altered,Remaining}
+%%
+%% ModOp = {skip,Reason,Origin,Mode} | {copy,NewRef}
+%% Origin = conf | tc
+%%
+%% Modifies Cases according to ModOp and returns the original elements,
+%% the modified versions of these elements and the remaining (untouched)
+%% cases.
+
+modify_cases_upto(Ref, ModOp, Cases) ->
+ {Original,Altered,Rest} = modify_cases_upto(Ref, ModOp, Cases, [], []),
+ {lists:reverse(Original),lists:reverse(Altered),Rest}.
+
+%% first case of a copy operation is the start conf
+modify_cases_upto(Ref, {copy,NewRef}=Op, [{conf,Ref,Props,MF}=C|T], Orig, Alt) ->
+ modify_cases_upto(Ref, Op, T, [C|Orig], [{conf,NewRef,update_repeat(Props),MF}|Alt]);
+
+modify_cases_upto(Ref, ModOp, Cases, Orig, Alt) ->
+ %% we need to check if there's an end conf case with the
+ %% same ref in the list, if not, this *is* an end conf case
+ case lists:any(fun({_,R,_,_}) when R == Ref -> true;
+ ({_,R,_}) when R == Ref -> true;
+ ({skip_case,{_,R,_,_}}) when R == Ref -> true;
+ (_) -> false
+ end, Cases) of
+ true ->
+ modify_cases_upto1(Ref, ModOp, Cases, Orig, Alt);
+ false ->
+ {[],[],Cases}
+ end.
+
+%% next case is a conf with same ref, must be end conf = we're done
+modify_cases_upto1(Ref, {skip,Reason,conf,Mode}, [{conf,Ref,_Props,MF}|T], Orig, Alt) ->
+ {Orig,[{auto_skip_case,{conf,Ref,MF,Reason},Mode}|Alt],T};
+modify_cases_upto1(Ref, {copy,NewRef}, [{conf,Ref,Props,MF}=C|T], Orig, Alt) ->
+ {[C|Orig],[{conf,NewRef,update_repeat(Props),MF}|Alt],T};
+
+%% we've skipped all remaining cases in a sequence
+modify_cases_upto1(Ref, {skip,_,tc,_}, [{conf,Ref,_Props,_MF}|_]=Cs, Orig, Alt) ->
+ {Orig,Alt,Cs};
+
+%% next is a make case
+modify_cases_upto1(Ref, {skip,Reason,_,Mode}, [{make,Ref,MF}|T], Orig, Alt) ->
+ {Orig,[{auto_skip_case,{make,Ref,MF,Reason},Mode}|Alt],T};
+modify_cases_upto1(Ref, {copy,NewRef}, [{make,Ref,MF}=M|T], Orig, Alt) ->
+ {[M|Orig],[{make,NewRef,MF}|Alt],T};
+
+%% next case is a user skipped end conf with the same ref = we're done
+modify_cases_upto1(Ref, {skip,Reason,_,Mode}, [{skip_case,{Type,Ref,MF,_Cmt}}|T], Orig, Alt) ->
+ {Orig,[{auto_skip_case,{Type,Ref,MF,Reason},Mode}|Alt],T};
+modify_cases_upto1(Ref, {copy,NewRef}, [{skip_case,{Type,Ref,MF,Cmt}}=C|T], Orig, Alt) ->
+ {[C|Orig],[{skip_case,{Type,NewRef,MF,Cmt}}|Alt],T};
+
+%% next is a skip_case, could be one test case or 'all' in suite, we must proceed
+modify_cases_upto1(Ref, ModOp, [{skip_case,{_F,_Cmt}}=MF|T], Orig, Alt) ->
+ modify_cases_upto1(Ref, ModOp, T, [MF|Orig], [MF|Alt]);
+
+%% next is a normal case (possibly in a sequence), mark as skipped, or copy, and proceed
+modify_cases_upto1(Ref, {skip,Reason,_,Mode}=Op, [{_M,_F}=MF|T], Orig, Alt) ->
+ modify_cases_upto1(Ref, Op, T, Orig, [{auto_skip_case,{MF,Reason},Mode}|Alt]);
+modify_cases_upto1(Ref, CopyOp, [{_M,_F}=MF|T], Orig, Alt) ->
+ modify_cases_upto1(Ref, CopyOp, T, [MF|Orig], [MF|Alt]);
+
+%% next is some other case, ignore or copy
+modify_cases_upto1(Ref, {skip,_,_,_}=Op, [_|T], Orig, Alt) ->
+ modify_cases_upto1(Ref, Op, T, Orig, Alt);
+modify_cases_upto1(Ref, CopyOp, [C|T], Orig, Alt) ->
+ modify_cases_upto1(Ref, CopyOp, T, [C|Orig], [C|Alt]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_io_buffering(IOHandler) -> PrevIOHandler
+%%
+%% Save info about current process (always the main process) buffering
+%% io printout messages from parallel test case processes (*and* possibly
+%% also the main process). If the value is the default 'undefined',
+%% io is not buffered but printed directly to file (see print/3).
+
+set_io_buffering(IOHandler) ->
+ put(test_server_common_io_handler, IOHandler).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% queue_test_case_io(Pid, Num, Mod, Func) -> ok
+%%
+%% Save info about test case that gets its io buffered. This can
+%% be a parallel test case or it can be a test case (conf or normal)
+%% that belongs to a group nested under a parallel group. The queue
+%% is processed after io buffering is disabled. See run_test_cases_loop/4
+%% and handle_test_case_io_and_status/0 for more info.
+
+queue_test_case_io(Ref, Pid, Num, Mod, Func) ->
+ Entry = {Ref,Pid,Num,Mod,Func},
+ %% the order of the test cases is very important!
+ put(test_server_queued_io,
+ get(test_server_queued_io)++[Entry]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% wait_for_cases(Ref) -> {Ok,Skipped,Failed}
+%%
+%% At the end of a nested parallel group, we have to wait for the test
+%% cases to terminate before we can go on (since test cases never execute
+%% in parallel with the end conf case of the group). When a top level
+%% parallel group is finished, buffered io messages must be handled and
+%% this is taken care of by handle_test_case_io_and_status/0.
+
+wait_for_cases(Ref) ->
+ case get(test_server_queued_io) of
+ [] ->
+ {[],[],[]};
+ Cases ->
+ [_Start|TCs] =
+ lists:dropwhile(fun({R,_,_,_,_}) when R == Ref -> false;
+ (_) -> true
+ end, Cases),
+ wait_and_resend(Ref, TCs, [],[],[])
+ end.
+
+wait_and_resend(Ref, [{OtherRef,_,0,_,_}|Ps],
+ Ok,Skip,Fail) when is_reference(OtherRef),
+ OtherRef /= Ref ->
+ %% ignore cases that belong to nested group
+ Ps1 = rm_cases_upto(OtherRef, Ps),
+ wait_and_resend(Ref, Ps1, Ok,Skip,Fail);
+
+wait_and_resend(Ref, [{_,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
+ receive
+ {finished,_Ref,CurrPid,CaseNum,Mod,Func,Result,_RetVal} = Msg ->
+ %% resend message to main process so that it can be used
+ %% to handle buffered io messages later
+ self() ! Msg,
+ MF = {Mod,Func},
+ {Ok1,Skip1,Fail1} =
+ case Result of
+ ok -> {[MF|Ok],Skip,Fail};
+ skipped -> {Ok,[MF|Skip],Fail};
+ failed -> {Ok,Skip,[MF|Fail]}
+ end,
+ wait_and_resend(Ref, Ps, Ok1,Skip1,Fail1);
+ {'EXIT',CurrPid,Reason} when Reason /= normal ->
+ %% unexpected termination of test case process
+ {value,{_,_,CaseNum,Mod,Func}} = lists:keysearch(CurrPid, 2, Cases),
+ print(1, "Error! Process for test case #~p (~p:~p) died! Reason: ~p",
+ [CaseNum, Mod, Func, Reason]),
+ exit({unexpected_termination,{CaseNum,Mod,Func},{CurrPid,Reason}})
+ end;
+
+wait_and_resend(_, [], Ok,Skip,Fail) ->
+ {lists:reverse(Ok),lists:reverse(Skip),lists:reverse(Fail)}.
+
+rm_cases_upto(Ref, [{Ref,_,0,_,_}|Ps]) ->
+ Ps;
+rm_cases_upto(Ref, [_|Ps]) ->
+ rm_cases_upto(Ref, Ps).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% handle_test_case_io_and_status() -> [Ok,Skipped,Failed}
+%%
+%% Each parallel test case process prints to its own minor log file during
+%% execution. The common log files (major, html etc) must however be
+%% written to sequentially. The test case processes send print requests
+%% to the main (starting) process (the same process executing
+%% run_test_cases_loop/4), which handles these requests in the same
+%% order that the test case processes were started.
+%%
+%% An io session is always started with a {started,Ref,Pid,Num,Mod,Func}
+%% message and terminated with {finished,Ref,Pid,Num,Mod,Func,Result,RetVal}.
+%% The result shipped with the finished message from a parallel process
+%% is used to update status data of the current test run. An 'EXIT'
+%% message from each parallel test case process (after finishing and
+%% terminating) is also received and handled here.
+%%
+%% During execution of a parallel group, any cases (conf or normal)
+%% belonging to a nested group will also get its io printouts buffered.
+%% This is necessary to get the major and html log files written in
+%% correct sequence. This function handles also the print messages
+%% generated by nested group cases that have been executed sequentially
+%% by the main process (note that these cases do not generate 'EXIT'
+%% messages, only 'start', 'print' and 'finished' messages).
+%%
+%% See the header comment for run_test_cases_loop/4 for more
+%% info about IO handling.
+%%
+%% Note: It is important that the type of messages handled here
+%% do not get consumated by test_server:run_test_case_msgloop/5
+%% during the test case execution (e.g. in the catch clause of
+%% the receive)!
+
+handle_test_case_io_and_status() ->
+ case get(test_server_queued_io) of
+ [] ->
+ {[],[],[]};
+ Cases ->
+ %% Cases = [{Ref,Pid,CaseNum,Mod,Func} | ...]
+ Result = handle_io_and_exit_loop([], Cases, [],[],[]),
+ Main = self(),
+ %% flush normal exit messages
+ lists:foreach(fun({_,Pid,_,_,_}) when Pid /= Main ->
+ receive
+ {'EXIT',Pid,normal} -> ok
+ after
+ 1000 -> ok
+ end;
+ (_) ->
+ ok
+ end, Cases),
+ Result
+ end.
+
+%% Handle cases (without Ref) that belong to the top parallel group (i.e. when Refs = [])
+handle_io_and_exit_loop([], [{undefined,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
+ %% retreive the start message for the current io session (= testcase)
+ receive
+ {started,_,CurrPid,CaseNum,Mod,Func} ->
+ {Ok1,Skip1,Fail1} =
+ case handle_io_and_exits(self(), CurrPid, CaseNum, Mod, Func, Cases) of
+ {ok,MF} -> {[MF|Ok],Skip,Fail};
+ {skipped,MF} -> {Ok,[MF|Skip],Fail};
+ {failed,MF} -> {Ok,Skip,[MF|Fail]}
+ end,
+ handle_io_and_exit_loop([], Ps, Ok1,Skip1,Fail1)
+ after
+ 1000 ->
+ exit({testcase_failed_to_start,Mod,Func})
+ end;
+
+%% Handle cases that belong to groups nested under top parallel group
+handle_io_and_exit_loop(Refs, [{Ref,CurrPid,CaseNum,Mod,Func}|Ps] = Cases, Ok,Skip,Fail) ->
+ receive
+ {started,_,CurrPid,CaseNum,Mod,Func} ->
+ handle_io_and_exits(self(), CurrPid, CaseNum, Mod, Func, Cases),
+ Refs1 =
+ case Refs of
+ [Ref|Rs] -> % must be end conf case for subgroup
+ Rs;
+ _ when is_reference(Ref) -> % must be start of new subgroup
+ [Ref|Refs];
+ _ -> % must be normal subgroup testcase
+ Refs
+ end,
+ handle_io_and_exit_loop(Refs1, Ps, Ok,Skip,Fail)
+ after
+ 1000 ->
+ exit({testcase_failed_to_start,Mod,Func})
+ end;
+
+handle_io_and_exit_loop(_, [], Ok,Skip,Fail) ->
+ {lists:reverse(Ok),lists:reverse(Skip),lists:reverse(Fail)}.
+
+handle_io_and_exits(Main, CurrPid, CaseNum, Mod, Func, Cases) ->
+ receive
+ %% end of io session from test case executed by main process
+ {finished,_,Main,CaseNum,Mod,Func,Result,_RetVal} ->
+ {Result,{Mod,Func}};
+ %% end of io session from test case executed by parallel process
+ {finished,_,CurrPid,CaseNum,Mod,Func,Result,RetVal} ->
+ case Result of
+ ok ->
+ put(test_server_ok, get(test_server_ok)+1);
+ failed ->
+ put(test_server_failed, get(test_server_failed)+1);
+ skipped ->
+ SkipCounters =
+ update_skip_counters(RetVal, get(test_server_skipped)),
+ put(test_server_skipped, SkipCounters)
+ end,
+ {Result,{Mod,Func}};
+
+ %% print to common log file
+ {print,CurrPid,Detail,Msg} ->
+ output({Detail,Msg}, internal),
+ handle_io_and_exits(Main, CurrPid, CaseNum, Mod, Func, Cases);
+
+ %% unexpected termination of test case process
+ {'EXIT',TCPid,Reason} when Reason /= normal ->
+ {value,{_,_,Num,M,F}} = lists:keysearch(TCPid, 2, Cases),
+ print(1, "Error! Process for test case #~p (~p:~p) died! Reason: ~p",
+ [Num, M, F, Reason]),
+ exit({unexpected_termination,{Num,M,F},{TCPid,Reason}})
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% run_test_case(Ref, Num, Mod, Func, Args, RunInit,
+%% Where, MultiplyTimetrap, Mode) -> RetVal
+%%
+%% Creates the minor log file and inserts some test case specific headers
+%% and footers into the log files. If a remote target is used, the test
+%% suite (binary) and the content of data_dir is sent. Then the test case
+%% is executed and the result is printed to the log files (also info
+%% about lingering processes & slave nodes in the system is presented).
+%%
+%% RunInit decides if the per test case init is to be run (true for all
+%% but conf cases).
+%%
+%% Where specifies if the test case should run on target or on the host.
+%% (Note that 'make' test cases always run on host).
+%%
+%% Mode specifies if the test case should be executed by a dedicated,
+%% parallel, process rather than sequentially by the main process. If
+%% the former, the new process is spawned and the dictionary of the main
+%% process is copied to the test case process.
+%%
+%% RetVal is the result of executing the test case. It contains info
+%% about the execution time and the return value of the test case function.
+
+run_test_case(Ref, Num, Mod, Func, Args, RunInit, Where, MultiplyTimetrap) ->
+ file:set_cwd(filename:dirname(get(test_server_dir))),
+ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ MultiplyTimetrap, [], [], self()).
+
+run_test_case(Ref, Num, Mod, Func, Args, skip_init, Where, MultiplyTimetrap, Mode) ->
+ %% a conf case is always executed by the main process
+ run_test_case1(Ref, Num, Mod, Func, Args, skip_init, Where,
+ MultiplyTimetrap, [], Mode, self());
+
+run_test_case(Ref, Num, Mod, Func, Args, RunInit, Where, MultiplyTimetrap, Mode) ->
+ file:set_cwd(filename:dirname(get(test_server_dir))),
+ case check_prop(parallel, Mode) of
+ false ->
+ %% this is a sequential test case
+ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ MultiplyTimetrap, [], Mode, self());
+ _Ref ->
+ %% this a parallel test case, spawn the new process
+ Main = self(),
+ {dictionary,State} = process_info(self(), dictionary),
+ spawn_link(fun() ->
+ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ MultiplyTimetrap, State, Mode, Main)
+ end)
+ end.
+
+run_test_case1(Ref, Num, Mod, Func, Args, RunInit, Where,
+ MultiplyTimetrap, State, Mode, Main) ->
+ %% if this runs on a parallel test case process,
+ %% copy the dictionary from the main process
+ do_if_parallel(Main, fun() -> process_flag(trap_exit, true) end, ok),
+ CopyDict = fun() -> lists:foreach(fun({Key,Val}) -> put(Key, Val) end, State) end,
+ do_if_parallel(Main, CopyDict, ok),
+ do_if_parallel(Main, fun() -> put(test_server_common_io_handler, {tc,Main}) end, ok),
+ %% if io is being buffered, send start io session message
+ %% (no matter if case runs on parallel or main process)
+ case get(test_server_common_io_handler) of
+ undefined -> ok;
+ _ -> Main ! {started,Ref,self(),Num,Mod,Func}
+ end,
+ TSDir = get(test_server_dir),
+ case Where of
+ target ->
+ maybe_send_beam_and_datadir(Mod);
+ host ->
+ ok
+ end,
+ test_server_sup:framework_call(report, [tc_start,{?pl2a(Mod),Func}]),
+ print(major, "=case ~p:~p", [Mod, Func]),
+ MinorName = start_minor_log_file(Mod, Func),
+ print(minor, "<a name=top></a>", []),
+ MinorBase = filename:basename(MinorName),
+ print(major, "=logfile ~s", [filename:basename(MinorName)]),
+ print_props((RunInit==skip_init), get_props(Mode)),
+ print(major, "=started ~s", [lists:flatten(timestamp_get(""))]),
+ {{Col0,Col1},Style} = get_font_style((RunInit==run_init), Mode),
+ print(html, "<tr valign=top><td>" ++ Col0 ++ "~s" ++ Col1 ++ "</td>"
+ "<td>" ++ Col0 ++ "~p" ++ Col1 ++ "</td>"
+ "<td><a href=\"~s\">~p</a></td>"
+ "<td><a href=\"~s#top\"><</a> <a href=\"~s#end\">></a></td>",
+ [num2str(Num),Mod,MinorBase,Func,MinorBase,MinorBase]),
+
+ do_if_parallel(Main, ok, fun erlang:yield/0),
+ %% run the test case
+ {Result,DetectedFail,ProcsBefore,ProcsAfter} =
+ run_test_case_apply(Num, Mod, Func, Args, get_name(Mode),
+ RunInit, Where, MultiplyTimetrap),
+ {Time,RetVal,Loc,Opts,Comment} =
+ case Result of
+ Normal={_Time,_RetVal,_Loc,_Opts,_Comment} -> Normal;
+ {died,DReason,DLoc,DCmt} -> {died,DReason,DLoc,[],DCmt}
+ end,
+
+ print(minor, "<a name=end></a>", []),
+ print_timestamp(minor, "Ended at "),
+ print(major, "=ended ~s", [lists:flatten(timestamp_get(""))]),
+
+ do_if_parallel(Main, ok, fun() -> file:set_cwd(filename:dirname(TSDir)) end),
+
+ %% call the appropriate progress function clause to print the results to log
+ Status =
+ case {Time,RetVal} of
+ {died,{timetrap_timeout,TimetrapTimeout}} ->
+ progress(failed, Num, Mod, Func, Loc,
+ timetrap_timeout, TimetrapTimeout, Comment, Style);
+ {died,Reason} ->
+ progress(failed, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {_,{'EXIT',{Skip,Reason}}} when Skip==skip; Skip==skipped ->
+ progress(skip, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {_,{'EXIT',_Pid,{Skip,Reason}}} when Skip==skip; Skip==skipped ->
+ progress(skip, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {_,{'EXIT',_Pid,Reason}} ->
+ progress(failed, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {_,{'EXIT',Reason}} ->
+ progress(failed, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {_, {failed, Reason}} ->
+ progress(failed, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {_, {Skip, Reason}} when Skip==skip; Skip==skipped ->
+ progress(skip, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style);
+ {Time,RetVal} ->
+ case DetectedFail of
+ [] ->
+ progress(ok, Num, Mod, Func, Loc, RetVal,
+ Time, Comment, Style);
+
+ Reason ->
+ progress(failed, Num, Mod, Func, Loc, Reason,
+ Time, Comment, Style)
+ end
+ end,
+ %% if the test case was executed sequentially, this updates the
+ %% status count on the main process (status of parallel test cases
+ %% is updated later by the handle_test_case_io_and_status/0 function)
+ case {RunInit,Status} of
+ {skip_init,_} -> % conf doesn't count
+ ok;
+ {_,ok} ->
+ put(test_server_ok, get(test_server_ok)+1);
+ {_,failed} ->
+ put(test_server_failed, get(test_server_failed)+1);
+ {_,skip} ->
+ {US,AS} = get(test_server_skipped),
+ put(test_server_skipped, {US+1,AS});
+ {_,auto_skip} ->
+ {US,AS} = get(test_server_skipped),
+ put(test_server_skipped, {US,AS+1})
+ end,
+ %% only if test case execution is sequential do we care about the
+ %% remaining processes and slave nodes count
+ case self() of
+ Main ->
+ case test_server_sup:framework_call(warn, [processes], true) of
+ true ->
+ if ProcsBefore < ProcsAfter ->
+ print(minor,
+ "WARNING: ~w more processes in system after test case",
+ [ProcsAfter-ProcsBefore]);
+ ProcsBefore > ProcsAfter ->
+ print(minor,
+ "WARNING: ~w less processes in system after test case",
+ [ProcsBefore-ProcsAfter]);
+ true -> ok
+ end;
+ false ->
+ ok
+ end,
+ case test_server_sup:framework_call(warn, [nodes], true) of
+ true ->
+ case catch controller_call(kill_slavenodes) of
+ {'EXIT',_}=Exit ->
+ print(minor,
+ "WARNING: There might be slavenodes left in the"
+ " system. I tried to kill them, but I failed: ~p\n",
+ [Exit]);
+ [] -> ok;
+ List ->
+ print(minor, "WARNING: ~w slave nodes in system after test"++
+ "case. Tried to killed them.~n"++
+ " Names:~p",
+ [length(List),List])
+ end;
+ false ->
+ ok
+ end;
+ _ ->
+ ok
+ end,
+ %% if the test case was executed sequentially, this updates the execution
+ %% time count on the main process (adding execution time of parallel test
+ %% case groups is done in run_test_cases_loop/4)
+ if is_number(Time) ->
+ put(test_server_total_time, get(test_server_total_time)+Time);
+ true ->
+ ok
+ end,
+ check_new_crash_dumps(Where),
+
+ %% if io is being buffered, send finished message
+ %% (no matter if case runs on parallel or main process)
+ case get(test_server_common_io_handler) of
+ undefined -> ok;
+ _ -> Main ! {finished,Ref,self(),Num,Mod,Func,
+ ?mod_result(Status),{Time,RetVal,Opts}}
+ end,
+ {Time,RetVal,Opts}.
+
+
+%%--------------------------------------------------------------------
+%% various help functions
+
+%% Call If() if we're on parallel process, or
+%% call Else() if we're on main process
+do_if_parallel(Pid, If, Else) ->
+ case self() of
+ Pid ->
+ if is_function(Else) -> Else();
+ true -> Else
+ end;
+ _ ->
+ if is_function(If) -> If();
+ true -> If
+ end
+ end.
+
+num2str(0) -> "";
+num2str(N) -> integer_to_list(N).
+
+%% If remote target, this function sends the test suite (if not already sent)
+%% and the content of datadir til target.
+maybe_send_beam_and_datadir(Mod) ->
+ case get(test_server_ctrl_job_sock) of
+ undefined ->
+ %% local target
+ ok;
+ JobSock ->
+ %% remote target
+ case get(test_server_downloaded_suites) of
+ undefined ->
+ send_beam_and_datadir(Mod, JobSock),
+ put(test_server_downloaded_suites, [Mod]);
+ Suites ->
+ case lists:member(Mod, Suites) of
+ false ->
+ send_beam_and_datadir(Mod, JobSock),
+ put(test_server_downloaded_suites, [Mod|Suites]);
+ true ->
+ ok
+ end
+ end
+ end.
+
+send_beam_and_datadir(Mod, JobSock) ->
+ case code:which(Mod) of
+ non_existing ->
+ io:format("** WARNING: Suite ~w could not be found on host\n",
+ [Mod]);
+ BeamFile ->
+ send_beam(JobSock, Mod, BeamFile)
+ end,
+ DataDir = get_data_dir(Mod),
+ case file:read_file_info(DataDir) of
+ {ok,_I} ->
+ {ok,All} = file:list_dir(DataDir),
+ AddTarFiles =
+ case controller_call(get_target_info) of
+ #target_info{os_family=ose} ->
+ ObjExt = code:objfile_extension(),
+ Wc = filename:join(DataDir, "*" ++ ObjExt),
+ ModsInDatadir = filelib:wildcard(Wc),
+ SendBeamFun = fun(X) -> send_beam(JobSock, X) end,
+ lists:foreach(SendBeamFun, ModsInDatadir),
+ %% No need to send C code or makefiles since
+ %% no compilation can be done on target anyway.
+ %% Compiled C code must exist on target.
+ %% Beam files are already sent as binaries.
+ %% Erlang source are sent in case the test case
+ %% is to compile it.
+ Filter = fun("Makefile") -> false;
+ ("Makefile.src") -> false;
+ (Y) ->
+ case filename:extension(Y) of
+ ".c" -> false;
+ ObjExt -> false;
+ _ -> true
+ end
+ end,
+ lists:filter(Filter, All);
+ _ ->
+ All
+ end,
+ Tarfile = "data_dir.tar.gz",
+ {ok,Tar} = erl_tar:open(Tarfile, [write,compressed]),
+ ShortDataDir = filename:basename(DataDir),
+ AddTarFun =
+ fun(File) ->
+ Long = filename:join(DataDir, File),
+ Short = filename:join(ShortDataDir, File),
+ ok = erl_tar:add(Tar, Long, Short, [])
+ end,
+ lists:foreach(AddTarFun, AddTarFiles),
+ ok = erl_tar:close(Tar),
+ {ok,TarBin} = file:read_file(Tarfile),
+ file:delete(Tarfile),
+ request(JobSock, {{datadir,Tarfile}, TarBin});
+ {error,_R} ->
+ ok
+ end.
+
+send_beam(JobSock, BeamFile) ->
+ Mod=filename:rootname(filename:basename(BeamFile), code:objfile_extension()),
+ send_beam(JobSock, list_to_atom(Mod), BeamFile).
+send_beam(JobSock, Mod, BeamFile) ->
+ {ok,BeamBin} = file:read_file(BeamFile),
+ request(JobSock, {{beam,Mod,BeamFile}, BeamBin}).
+
+check_new_crash_dumps(Where) ->
+ case Where of
+ target ->
+ case get(test_server_ctrl_job_sock) of
+ undefined ->
+ ok;
+ Socket ->
+ read_job_sock_loop(Socket)
+ end;
+ _ ->
+ ok
+ end,
+ test_server_sup:check_new_crash_dumps().
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% progress(Result, CaseNum, Mod, Func, Location, Reason, Time,
+%% Comment, TimeFormat) -> Result
+%%
+%% Prints the result of the test case to log file.
+%% Note: Strings that are to be written to the minor log must
+%% be prefixed with "=== " here, or the indentation will be wrong.
+
+progress(skip, CaseNum, Mod, Func, Loc, Reason, Time,
+ Comment, {St0,St1}) ->
+ {Reason1,{Color,Ret}} = if_auto_skip(Reason,
+ fun() -> {"#ffcc99",auto_skip} end,
+ fun() -> {"#ff9933",skip} end),
+ print(major, "=result skipped", []),
+ print(1, "*** SKIPPED *** ~s",
+ [get_info_str(Func, CaseNum, get(test_server_cases))]),
+ test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,
+ {skipped,Reason1}}]),
+ ReasonStr = reason_to_string(Reason1),
+ ReasonStr1 = lists:flatten([string:strip(S,left) ||
+ S <- string:tokens(ReasonStr,[$\n])]),
+ ReasonStr2 =
+ if length(ReasonStr1) > 80 ->
+ string:substr(ReasonStr1, 1, 77) ++ "...";
+ true ->
+ ReasonStr1
+ end,
+ Comment1 = case Comment of
+ "" -> "";
+ _ -> "<br>(" ++ to_string(Comment) ++ ")"
+ end,
+ print(html,
+ "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>"
+ "<td><font color=\"~s\">SKIPPED</font></td>"
+ "<td>~s~s</td></tr>\n",
+ [Time,Color,ReasonStr2,Comment1]),
+ FormatLoc = test_server_sup:format_loc(Loc),
+ print(minor, "=== location ~s", [FormatLoc]),
+ print(minor, "=== reason = ~s", [ReasonStr1]),
+ Ret;
+
+progress(failed, CaseNum, Mod, Func, Loc, timetrap_timeout, T,
+ Comment0, {St0,St1}) ->
+ print(major, "=result failed: timeout, ~p", [Loc]),
+ print(1, "*** FAILED *** ~s",
+ [get_info_str(Func, CaseNum, get(test_server_cases))]),
+ test_server_sup:framework_call(report,
+ [tc_done,{?pl2a(Mod),Func,
+ {failed,timetrap_timeout}}]),
+ FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
+ ErrorReason = io_lib:format("{timetrap_timeout,~s}", [FormatLastLoc]),
+ Comment =
+ case Comment0 of
+ "" -> "<font color=\"red\">" ++ ErrorReason ++ "</font>";
+ _ -> "<font color=\"red\">" ++ ErrorReason ++ "</font><br>" ++
+ to_string(Comment0)
+ end,
+ print(html,
+ "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>"
+ "<td><font color=\"red\">FAILED</font></td>"
+ "<td>~s</td></tr>\n",
+ [T/1000,Comment]),
+ FormatLoc = test_server_sup:format_loc(Loc),
+ print(minor, "=== location ~s", [FormatLoc]),
+ print(minor, "=== reason = timetrap timeout", []),
+ failed;
+
+progress(failed, CaseNum, Mod, Func, Loc, {testcase_aborted,Reason}, _T,
+ Comment0, {St0,St1}) ->
+ print(major, "=result failed: testcase_aborted, ~p", [Loc]),
+ print(1, "*** FAILED *** ~s",
+ [get_info_str(Func, CaseNum, get(test_server_cases))]),
+ test_server_sup:framework_call(report,
+ [tc_done,{?pl2a(Mod),Func,
+ {failed,testcase_aborted}}]),
+ FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
+ ErrorReason = io_lib:format("{testcase_aborted,~s}", [FormatLastLoc]),
+ Comment =
+ case Comment0 of
+ "" -> "<font color=\"red\">" ++ ErrorReason ++ "</font>";
+ _ -> "<font color=\"red\">" ++ ErrorReason ++ "</font><br>" ++
+ to_string(Comment0)
+ end,
+ print(html,
+ "<td>" ++ St0 ++ "died" ++ St1 ++ "</td>"
+ "<td><font color=\"red\">FAILED</font></td>"
+ "<td>~s</td></tr>\n",
+ [Comment]),
+ FormatLoc = test_server_sup:format_loc(Loc),
+ print(minor, "=== location ~s", [FormatLoc]),
+ print(minor, "=== reason = {testcase_aborted,~p}", [Reason]),
+ failed;
+
+progress(failed, CaseNum, Mod, Func, unknown, Reason, Time,
+ Comment0, {St0,St1}) ->
+ print(major, "=result failed: ~p, ~p", [Reason,unknown]),
+ print(1, "*** FAILED *** ~s",
+ [get_info_str(Func, CaseNum, get(test_server_cases))]),
+ test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,
+ {failed,Reason}}]),
+ TimeStr = io_lib:format(if is_float(Time) -> "~.3fs";
+ true -> "~w"
+ end, [Time]),
+ ErrorReason = lists:flatten(io_lib:format("~p", [Reason])),
+ ErrorReason1 = lists:flatten([string:strip(S,left) ||
+ S <- string:tokens(ErrorReason,[$\n])]),
+ ErrorReason2 =
+ if length(ErrorReason1) > 63 ->
+ string:substr(ErrorReason1, 1, 60) ++ "...";
+ true ->
+ ErrorReason1
+ end,
+ Comment =
+ case Comment0 of
+ "" -> "<font color=\"red\">" ++ ErrorReason2 ++ "</font>";
+ _ -> "<font color=\"red\">" ++ ErrorReason2 ++ "</font><br>" ++
+ to_string(Comment0)
+ end,
+ print(html,
+ "<td>" ++ St0 ++ "~s" ++ St1 ++ "</td>"
+ "<td><font color=\"red\">FAILED</font></td>"
+ "<td>~s</td></tr>\n",
+ [TimeStr,Comment]),
+ print(minor, "=== location ~s", [unknown]),
+ {FStr,FormattedReason} = format_exception(Reason),
+ print(minor, "=== reason = "++FStr, [FormattedReason]),
+ failed;
+
+progress(failed, CaseNum, Mod, Func, Loc, Reason, Time,
+ Comment0, {St0,St1}) ->
+ print(major, "=result failed: ~p, ~p", [Reason,Loc]),
+ print(1, "*** FAILED *** ~s",
+ [get_info_str(Func, CaseNum, get(test_server_cases))]),
+ test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,
+ {failed,Reason}}]),
+ TimeStr = io_lib:format(if is_float(Time) -> "~.3fs";
+ true -> "~w"
+ end, [Time]),
+ Comment =
+ case Comment0 of
+ "" -> "";
+ _ -> "<br>" ++ to_string(Comment0)
+ end,
+ FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
+ print(html,
+ "<td>" ++ St0 ++ "~s" ++ St1 ++ "</td>"
+ "<td><font color=\"red\">FAILED</font></td>"
+ "<td><font color=\"red\">~s</font>~s</td></tr>\n",
+ [TimeStr,FormatLastLoc,Comment]),
+ FormatLoc = test_server_sup:format_loc(Loc),
+ print(minor, "=== location ~s", [FormatLoc]),
+ {FStr,FormattedReason} = format_exception(Reason),
+ print(minor, "=== reason = "++FStr, [FormattedReason]),
+ failed;
+
+progress(ok, _CaseNum, Mod, Func, _Loc, RetVal, Time,
+ Comment0, {St0,St1}) ->
+ print(minor, "successfully completed test case", []),
+ test_server_sup:framework_call(report, [tc_done,{?pl2a(Mod),Func,ok}]),
+ Comment =
+ case RetVal of
+ {comment,RetComment} ->
+ String = to_string(RetComment),
+ print(major, "=result ok: ~s", [String]),
+ "<td>" ++ String ++ "</td>";
+ _ ->
+ print(major, "=result ok", []),
+ case Comment0 of
+ "" -> "";
+ _ -> "<td>" ++ to_string(Comment0) ++ "</td>"
+ end
+ end,
+ print(major, "=elapsed ~p", [Time]),
+ print(html,
+ "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>"
+ "<td><font color=\"green\">Ok</font></td>"
+ "~s</tr>\n",
+ [Time,Comment]),
+ print(minor, "=== returned value = ~p", [RetVal]),
+ ok.
+
+%%--------------------------------------------------------------------
+%% various help functions
+
+if_auto_skip(Reason={failed,{_,init_per_testcase,_}}, True, _False) ->
+ {Reason,True()};
+if_auto_skip({_T,{skip,Reason={failed,{_,init_per_testcase,_}}},_Opts}, True, _False) ->
+ {Reason,True()};
+if_auto_skip({fw_auto_skip,Reason}, True, _False) ->
+ {Reason,True()};
+if_auto_skip({_T,{skip,{fw_auto_skip,Reason}},_Opts}, True, _False) ->
+ {Reason,True()};
+if_auto_skip(Reason, _True, False) ->
+ {Reason,False()}.
+
+update_skip_counters(RetVal, {US,AS}) ->
+ {_,Result} = if_auto_skip(RetVal, fun() -> {US,AS+1} end, fun() -> {US+1,AS} end),
+ Result.
+
+get_info_str(Func, 0, _Cases) ->
+ atom_to_list(Func);
+get_info_str(_Func, CaseNum, unknown) ->
+ "test case " ++ integer_to_list(CaseNum);
+get_info_str(_Func, CaseNum, Cases) ->
+ "test case " ++ integer_to_list(CaseNum) ++
+ " of " ++ integer_to_list(Cases).
+
+print_if_known(Known, {SK,AK}, {SU,AU}) ->
+ {S,A} = if Known == unknown -> {SU,AU};
+ true -> {SK,AK}
+ end,
+ io_lib:format(S, A).
+
+to_string(Term) when is_list(Term) ->
+ case (catch io_lib:format("~s", [Term])) of
+ {'EXIT',_} -> io_lib:format("~p", [Term]);
+ String -> lists:flatten(String)
+ end;
+to_string(Term) ->
+ lists:flatten(io_lib:format("~p", [Term])).
+
+get_last_loc(Loc) when is_tuple(Loc) ->
+ Loc;
+get_last_loc([Loc|_]) when is_tuple(Loc) ->
+ [Loc];
+get_last_loc(Loc) ->
+ Loc.
+
+reason_to_string({failed,{_,FailFunc,bad_return}}) ->
+ atom_to_list(FailFunc) ++ " bad return value";
+reason_to_string({failed,{_,FailFunc,{timetrap_timeout,_}}}) ->
+ atom_to_list(FailFunc) ++ " timed out";
+reason_to_string(FWInitFail = {failed,{_CB,init_tc,_Reason}}) ->
+ to_string(FWInitFail);
+reason_to_string({failed,{_,FailFunc,_}}) ->
+ atom_to_list(FailFunc) ++ " failed";
+reason_to_string(Other) ->
+ to_string(Other).
+
+%get_font_style(Prop) ->
+% {Col,St0,St1} = get_font_style1(Prop),
+% {{"<font color="++Col++">","</font>"},
+% {"<font color="++Col++">"++St0,St1++"</font>"}}.
+
+get_font_style(NormalCase, Mode) ->
+ Prop = if not NormalCase ->
+ default;
+ true ->
+ case check_prop(parallel, Mode) of
+ false ->
+ case check_prop(sequence, Mode) of
+ false ->
+ default;
+ _ ->
+ sequence
+ end;
+ _ ->
+ parallel
+ end
+ end,
+ {Col,St0,St1} = get_font_style1(Prop),
+ {{"<font color="++Col++">","</font>"},
+ {"<font color="++Col++">"++St0,St1++"</font>"}}.
+
+get_font_style1(parallel) ->
+ {"\"darkslategray\"","<i>","</i>"};
+get_font_style1(sequence) ->
+% {"\"darkolivegreen\"","",""};
+ {"\"saddlebrown\"","",""};
+get_font_style1(default) ->
+ {"\"black\"","",""}.
+%%get_font_style1(skipped) ->
+%% {"\"lightgray\"","",""}.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% format_exception({Error,Stack}) -> {CtrlSeq,Term}
+%%
+%% The default behaviour is that error information gets formatted
+%% (like in the erlang shell) before printed to the minor log file.
+%% The framework application can switch this feature off by setting
+%% *its* application environment variable 'format_exception' to false.
+%% It is also possible to switch formatting off by starting the
+%% test_server node with init argument 'test_server_format_exception'
+%% set to false.
+
+format_exception(Reason={_Error,Stack}) when is_list(Stack) ->
+ case os:getenv("TEST_SERVER_FRAMEWORK") of
+ false ->
+ case application:get_env(test_server, format_exception) of
+ {ok,false} ->
+ {"~p",Reason};
+ _ ->
+ do_format_exception(Reason)
+ end;
+ FW ->
+ case application:get_env(list_to_atom(FW), format_exception) of
+ {ok,false} ->
+ {"~p",Reason};
+ _ ->
+ do_format_exception(Reason)
+ end
+ end;
+format_exception(Error) ->
+ format_exception({Error,[]}).
+
+do_format_exception(Reason={Error,Stack}) ->
+ StackFun = fun(_, _, _) -> false end,
+ PF = fun(Term, I) ->
+ io_lib:format("~." ++ integer_to_list(I) ++ "p", [Term])
+ end,
+ case catch lib:format_exception(1, error, Error, Stack, StackFun, PF) of
+ {'EXIT',_} ->
+ {"~p",Reason};
+ Formatted ->
+ Formatted1 = re:replace(Formatted, "exception error: ", "", [{return,list}]),
+ {"~s",lists:flatten(Formatted1)}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit,
+%% Where, MultiplyTimetrap) ->
+%% {{Time,RetVal,Loc,Opts,Comment},DetectedFail,ProcessesBefore,ProcessesAfter} |
+%% {{died,Reason,unknown,Comment},DetectedFail,ProcessesBefore,ProcessesAfter}
+%% Name = atom()
+%% Where = target | host
+%% Time = float() (seconds)
+%% RetVal = term()
+%% Loc = term()
+%% Comment = string()
+%% Reason = term()
+%% DetectedFail = [{File,Line}]
+%% ProcessesBefore = ProcessesAfter = integer()
+%%
+%% Where indicates if the test should run on target or always on the host.
+%%
+%% If test is to be run on target, and target is remote the request is
+%% sent over socket to target, and test_server runs the case and sends the
+%% result back over the socket. Else test_server runs the case directly on host.
+
+run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, host, MultiplyTimetrap) ->
+ test_server:run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,
+ MultiplyTimetrap});
+run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit, target, MultiplyTimetrap) ->
+ case get(test_server_ctrl_job_sock) of
+ undefined ->
+ %% local target
+ test_server:run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,
+ MultiplyTimetrap});
+ JobSock ->
+ %% remote target
+ request(JobSock, {test_case,{CaseNum,Mod,Func,Args,Name,RunInit,
+ MultiplyTimetrap}}),
+ read_job_sock_loop(JobSock)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% print(Detail, Format, Args) -> ok
+%% Detail = integer()
+%% Format = string()
+%% Args = [term()]
+%%
+%% Just like io:format, except that depending on the Detail value, the output
+%% is directed to console, major and/or minor log files.
+%%
+%% To handle printouts to common (not minor) log files from parallel test
+%% case processes, the test_server_common_io_handler value is checked. If
+%% set, the data is sent to the main controlling process. Note that test
+%% cases that belong to a conf group nested under a parallel group will also
+%% get its io data sent to main rather than immediately printed out, even
+%% if the test cases are executed by the same, main, process (ie the main
+%% process sends messages to itself then).
+%%
+%% Buffered io is handled by the handle_test_case_io_and_status/0 function.
+
+print(Detail, Format) ->
+ print(Detail, Format, []).
+
+print(Detail, Format, Args) ->
+ print(Detail, Format, Args, internal).
+
+print(Detail, Format, Args, Printer) ->
+ Msg = io_lib:format(Format, Args),
+ print_or_buffer(Detail, Msg, Printer).
+
+print_or_buffer(Detail, Msg, Printer) ->
+ case get(test_server_minor_level) of
+ _ when Detail == minor ->
+ output({Detail,Msg}, Printer);
+ MinLevel when is_number(Detail), Detail >= MinLevel ->
+ output({Detail,Msg}, Printer);
+ _ -> % Detail < Minor | major | html
+ case get(test_server_common_io_handler) of
+ undefined ->
+ output({Detail,Msg}, Printer);
+ {_,MainPid} ->
+ MainPid ! {print,self(),Detail,Msg}
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% print_timestamp(Detail, Leader) -> ok
+%%
+%% Prints Leader followed by a time stamp (date and time). Depending on
+%% the Detail value, the output is directed to console, major and/or minor
+%% log files.
+
+print_timestamp(Detail, Leader) ->
+ print(Detail, timestamp_get(Leader), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% print_who(Host, User) -> ok
+%%
+%% Logs who runs the suite.
+
+print_who(Host, User) ->
+ UserStr = case User of
+ "" -> "";
+ _ -> " by " ++ User
+ end,
+ print(html, "Run~s on ~s", [UserStr,Host]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% format(Format) -> IoLibReturn
+%% format(Detail, Format) -> IoLibReturn
+%% format(Format, Args) -> IoLibReturn
+%% format(Detail, Format, Args) -> IoLibReturn
+%%
+%% Detail = integer()
+%% Format = string()
+%% Args = [term(),...]
+%% IoLibReturn = term()
+%%
+%% Logs the Format string and Args, similar to io:format/1/2 etc. If
+%% Detail is not specified, the default detail level (which is 50) is used.
+%% Which log files the string will be logged in depends on the thresholds
+%% set with set_levels/3. Typically with default detail level, only the
+%% minor log file is used.
+
+format(Format) ->
+ format(minor, Format, []).
+
+format(major, Format) ->
+ format(major, Format, []);
+format(minor, Format) ->
+ format(minor, Format, []);
+format(Detail, Format) when is_integer(Detail) ->
+ format(Detail, Format, []);
+format(Format, Args) ->
+ format(minor, Format, Args).
+
+format(Detail, Format, Args) ->
+ Str =
+ case catch io_lib:format(Format, Args) of
+ {'EXIT',_} ->
+ io_lib:format("illegal format; ~p with args ~p.\n",
+ [Format,Args]);
+ Valid -> Valid
+ end,
+ print_or_buffer(Detail, Str, self()).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% output({Level,Message}, Sender) -> ok
+%% Level = integer() | minor | major | html
+%% Message = string() | [integer()]
+%% Sender = string() | internal
+%%
+%% Outputs the message on the channels indicated by Level. If Level is an
+%% atom, only the corresponding channel receives the output. When Level is
+%% an integer console, major and/or minor log file will receive output
+%% depending on the user set thresholds (see get_levels/0, set_levels/3)
+%%
+%% When printing on the console, the message is prefixed with the test
+%% suite's name. In case a name is not set (yet), Sender is used.
+%%
+%% When not outputting to the console, and the Sender is 'internal',
+%% the message is prefixed with "=== ", so that it will be apparent that
+%% the message comes from the test server and not the test suite itself.
+
+output({Level,Msg}, Sender) when is_integer(Level) ->
+ SumLev = get(test_server_summary_level),
+ if Level =< SumLev ->
+ output_to_fd(stdout, Msg, Sender);
+ true ->
+ ok
+ end,
+ MajLev = get(test_server_major_level),
+ if Level =< MajLev ->
+ output_to_fd(get(test_server_major_fd), Msg, Sender);
+ true ->
+ ok
+ end,
+ MinLev = get(test_server_minor_level),
+ if Level >= MinLev ->
+ output_to_fd(get(test_server_minor_fd), Msg, Sender);
+ true ->
+ ok
+ end;
+output({minor,Bytes}, Sender) when is_list(Bytes) ->
+ output_to_fd(get(test_server_minor_fd), Bytes, Sender);
+output({major,Bytes}, Sender) when is_list(Bytes) ->
+ output_to_fd(get(test_server_major_fd), Bytes, Sender);
+output({minor,Bytes}, Sender) when is_binary(Bytes) ->
+ output_to_fd(get(test_server_minor_fd),binary_to_list(Bytes), Sender);
+output({major,Bytes}, Sender) when is_binary(Bytes) ->
+ output_to_fd(get(test_server_major_fd),binary_to_list(Bytes), Sender);
+output({html,Msg}, _Sender) ->
+ case get(test_server_html_fd) of
+ undefined ->
+ ok;
+ Fd ->
+ io:put_chars(Fd,Msg),
+ case file:position(Fd, {cur, 0}) of
+ {ok, Pos} ->
+ %% We are writing to a seekable file. Finalise so
+ %% we get complete valid (and viewable) HTML code.
+ %% Then rewind to overwrite the finalising code.
+ io:put_chars(Fd, "\n</table>\n</body>\n</html>\n"),
+ file:position(Fd, Pos);
+ {error, epipe} ->
+ %% The file is not seekable. We cannot erase what
+ %% we've already written --- so the reader will
+ %% have to wait until we're done.
+ ok
+ end
+ end;
+output({minor,Data}, Sender) ->
+ output_to_fd(get(test_server_minor_fd),
+ lists:flatten(io_lib:format(
+ "Unexpected output: ~p~n", [Data])),Sender);
+output({major,Data}, Sender) ->
+ output_to_fd(get(test_server_major_fd),
+ lists:flatten(io_lib:format(
+ "Unexpected output: ~p~n", [Data])),Sender).
+
+output_to_fd(stdout, Msg, Sender) ->
+ Name =
+ case get(test_server_name) of
+ undefined -> Sender;
+ Other -> Other
+ end,
+ io:format("Testing ~s: ~s\n", [Name, lists:flatten(Msg)]);
+output_to_fd(undefined, _Msg, _Sender) ->
+ ok;
+output_to_fd(Fd, [$=|Msg], internal) ->
+ io:put_chars(Fd, [$=]),
+ io:put_chars(Fd, Msg),
+ io:put_chars(Fd, "\n");
+output_to_fd(Fd, Msg, internal) ->
+ io:put_chars(Fd, [$=,$=,$=,$ ]),
+ io:put_chars(Fd, Msg),
+ io:put_chars(Fd, "\n");
+output_to_fd(Fd, Msg, _Sender) ->
+ io:put_chars(Fd, Msg),
+ io:put_chars(Fd, "\n").
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timestamp_filename_get(Leader) -> string()
+%% Leader = string()
+%%
+%% Returns a string consisting of Leader concatenated with the current
+%% date and time. The resulting string is suitable as a filename.
+timestamp_filename_get(Leader) ->
+ timestamp_get_internal(Leader,
+ "~s~w-~2.2.0w-~2.2.0w_~2.2.0w.~2.2.0w.~2.2.0w").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timestamp_get(Leader) -> string()
+%% Leader = string()
+%%
+%% Returns a string consisting of Leader concatenated with the current
+%% date and time. The resulting string is suitable for display.
+timestamp_get(Leader) ->
+ timestamp_get_internal(Leader,
+ "~s~w-~2.2.0w-~2.2.0w ~2.2.0w:~2.2.0w:~2.2.0w").
+
+timestamp_get_internal(Leader, Format) ->
+ {YY,MM,DD,H,M,S} = time_get(),
+ io_lib:format(Format, [Leader,YY,MM,DD,H,M,S]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% time_get() -> {YY,MM,DD,H,M,S}
+%% YY = integer()
+%% MM = integer()
+%% DD = integer()
+%% H = integer()
+%% M = integer()
+%% S = integer()
+%%
+%% Returns the current Year,Month,Day,Hours,Minutes,Seconds.
+%% The function checks that the date doesn't wrap while calling
+%% getting the time.
+time_get() ->
+ {YY,MM,DD} = date(),
+ {H,M,S} = time(),
+ case date() of
+ {YY,MM,DD} ->
+ {YY,MM,DD,H,M,S};
+ _NewDay ->
+ %% date changed between call to date() and time(), try again
+ time_get()
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% make_config(Config) -> NewConfig
+%% Config = [{Key,Value},...]
+%% NewConfig = [{Key,Value},...]
+%%
+%% Creates a configuration list (currently returns it's input)
+
+make_config(Initial) ->
+ Initial.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% update_config(Config, Update) -> NewConfig
+%% Config = [{Key,Value},...]
+%% Update = [{Key,Value},...] | {Key,Value}
+%% NewConfig = [{Key,Value},...]
+%%
+%% Adds or replaces the key-value pairs in config with those in update.
+%% Returns the updated list.
+
+update_config(Config, {Key,Val}) ->
+ case lists:keymember(Key, 1, Config) of
+ true ->
+ lists:keyreplace(Key, 1, Config, {Key,Val});
+ false ->
+ [{Key,Val}|Config]
+ end;
+update_config(Config, [Assoc|Assocs]) ->
+ NewConfig = update_config(Config, Assoc),
+ update_config(NewConfig, Assocs);
+update_config(Config, []) ->
+ Config.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% collect_cases(CurMod, TopCase, SkipList) ->
+%% BasicCaseList | {error,Reason}
+%%
+%% CurMod = atom()
+%% TopCase = term()
+%% SkipList = [term(),...]
+%% BasicCaseList = [term(),...]
+%%
+%% Parses the given test goal(s) in TopCase, and transforms them to a
+%% simple list of test cases to call, when executing the test suite.
+%%
+%% CurMod is the "current" module, that is, the module the last instruction
+%% was read from. May be be set to 'none' initially.
+%%
+%% SkipList is the list of test cases to skip and requirements to deny.
+%%
+%% The BasicCaseList is built out of TopCase, which may be any of the
+%% following terms:
+%%
+%% [] Nothing is added
+%% List list() The list is decomposed, and each element is
+%% treated according to this table
+%% Case atom() CurMod:Case(suite) is called
+%% {module,Case} CurMod:Case(suite) is called
+%% {Module,Case} Module:Case(suite) is called
+%% {module,Module,Case} Module:Case(suite) is called
+%% {module,Module,Case,Args} Module:Case is called with Args as arguments
+%% {dir,Dir} All modules *_SUITE in the named directory
+%% are listed, and each Module:all(suite) is called
+%% {dir,Dir,Pattern} All modules <Pattern>_SUITE in the named dir
+%% are listed, and each Module:all(suite) is called
+%% {conf,InitMF,Cases,FinMF}
+%% {conf,Props,InitMF,Cases,FinMF}
+%% InitMF is placed in the BasicCaseList, then
+%% Cases is treated according to this table, then
+%% FinMF is placed in the BasicCaseList. InitMF
+%% and FinMF are configuration manipulation
+%% functions. See below.
+%% {make,InitMFA,Cases,FinMFA}
+%% InitMFA is placed in the BasicCaseList, then
+%% Cases is treated according to this table, then
+%% FinMFA is placed in the BasicCaseList. InitMFA
+%% and FinMFA are make/unmake functions. If InitMFA
+%% fails, Cases are not run. InitMFA and FinMFA are
+%% always run on the host - not on target.
+%%
+%% When a function is called, above, it means that the function is invoked
+%% and the return is expected to be:
+%%
+%% [] Leaf case
+%% {req,ReqList} Kept for backwards compatibility - same as []
+%% {req,ReqList,Cases} Kept for backwards compatibility -
+%% Cases parsed recursively with collect_cases/3
+%% Cases (list) Recursively parsed with collect_cases/3
+%%
+%% Leaf cases are added to the BasicCaseList as Module:Case(Config). Each
+%% case is checked against the SkipList. If present, a skip instruction
+%% is inserted instead, which only prints the case name and the reason
+%% why the case was skipped in the log files.
+%%
+%% Configuration manipulation functions are called with the current
+%% configuration list as only argument, and are expected to return a new
+%% configuration list. Such a pair of function may, for example, start a
+%% server and stop it after a serie of test cases.
+%%
+%% SkipCases is expected to be in the format:
+%%
+%% Other Recursively parsed with collect_cases/3
+%% {Mod,Comment} Skip Mod, with Comment
+%% {Mod,Funcs,Comment} Skip listed functions in Mod with Comment
+%% {Mod,Func,Comment} Skip named function in Mod with Comment
+%%
+-record(cc, {mod, % current module
+ skip}). % skip list
+
+collect_all_cases(Top, Skip) when is_list(Skip) ->
+ Result =
+ case collect_cases(Top, #cc{mod=[],skip=Skip}) of
+ {ok,Cases,_St} -> Cases;
+ Other -> Other
+ end,
+ Result.
+
+
+collect_cases([], St) -> {ok,[],St};
+collect_cases([Case|Cs0], St0) ->
+ case collect_cases(Case, St0) of
+ {ok,FlatCases1,St1} ->
+ case collect_cases(Cs0, St1) of
+ {ok,FlatCases2,St} ->
+ {ok,FlatCases1 ++ FlatCases2,St};
+ {error,_Reason}=Error -> Error
+ end;
+ {error,_Reason}=Error -> Error
+ end;
+
+
+collect_cases({module,Case}, St) when is_atom(Case), is_atom(St#cc.mod) ->
+ collect_case({St#cc.mod,Case}, St);
+collect_cases({module,Mod,Case}, St) ->
+ collect_case({Mod,Case}, St);
+collect_cases({module,Mod,Case,Args}, St) ->
+ collect_case({Mod,Case,Args}, St);
+
+collect_cases({dir,SubDir}, St) ->
+ collect_files(SubDir, "*_SUITE", St);
+collect_cases({dir,SubDir,Pattern}, St) ->
+ collect_files(SubDir, Pattern++"*", St);
+
+collect_cases({conf,InitF,CaseList,FinMF}, St) when is_atom(InitF) ->
+ collect_cases({conf,[],{St#cc.mod,InitF},CaseList,FinMF}, St);
+collect_cases({conf,InitMF,CaseList,FinF}, St) when is_atom(FinF) ->
+ collect_cases({conf,[],InitMF,CaseList,{St#cc.mod,FinF}}, St);
+collect_cases({conf,InitMF,CaseList,FinMF}, St0) ->
+ collect_cases({conf,[],InitMF,CaseList,FinMF}, St0);
+collect_cases({conf,Props,InitF,CaseList,FinMF}, St) when is_atom(InitF) ->
+ collect_cases({conf,Props,{St#cc.mod,InitF},CaseList,FinMF}, St);
+collect_cases({conf,Props,InitMF,CaseList,FinF}, St) when is_atom(FinF) ->
+ collect_cases({conf,Props,InitMF,CaseList,{St#cc.mod,FinF}}, St);
+collect_cases({conf,Props,InitMF,CaseList,FinMF}, St0) ->
+ case collect_cases(CaseList, St0) of
+ {ok,[],_St}=Empty ->
+ Empty;
+ {ok,FlatCases,St} ->
+ Ref = make_ref(),
+ case in_skip_list(InitMF, St#cc.skip) of
+ {true,Comment} ->
+ {ok,[{skip_case,{conf,Ref,InitMF,Comment}} |
+ FlatCases ++ [{conf,Ref,[],FinMF}]],St};
+ false ->
+ {ok,[{conf,Ref,Props,InitMF} |
+ FlatCases ++ [{conf,Ref,keep_name(Props),FinMF}]],St}
+ end;
+ {error,_Reason}=Error ->
+ Error
+ end;
+
+collect_cases({make,InitMFA,CaseList,FinMFA}, St0) ->
+ case collect_cases(CaseList, St0) of
+ {ok,[],_St}=Empty -> Empty;
+ {ok,FlatCases,St} ->
+ Ref = make_ref(),
+ {ok,[{make,Ref,InitMFA}|FlatCases ++
+ [{make,Ref,FinMFA}]],St};
+ {error,_Reason}=Error -> Error
+ end;
+
+collect_cases({Module, Cases}, St) when is_list(Cases) ->
+ case (catch collect_case(Cases, St#cc{mod=Module}, [])) of
+ {ok, NewCases, NewSt} ->
+ {ok, NewCases, NewSt};
+ Other ->
+ {error, Other}
+ end;
+
+collect_cases({_Mod,_Case}=Spec, St) ->
+ collect_case(Spec, St);
+
+collect_cases({_Mod,_Case,_Args}=Spec, St) ->
+ collect_case(Spec, St);
+collect_cases(Case, St) when is_atom(Case), is_atom(St#cc.mod) ->
+ collect_case({St#cc.mod,Case}, St);
+collect_cases(Other, _St) ->
+ {error,{bad_subtest_spec,Other}}.
+
+collect_case(MFA, St) ->
+ case in_skip_list(MFA, St#cc.skip) of
+ {true,Comment} ->
+ {ok,[{skip_case,{MFA,Comment}}],St};
+ false ->
+ case MFA of
+ {Mod,Case} -> collect_case_invoke(Mod, Case, MFA, St);
+ {_Mod,_Case,_Args} -> {ok,[MFA],St}
+ end
+ end.
+
+collect_case([], St, Acc) ->
+ {ok, Acc, St};
+
+collect_case([Case | Cases], St, Acc) ->
+ {ok, FlatCases, NewSt} = collect_case({St#cc.mod, Case}, St),
+ collect_case(Cases, NewSt, Acc ++ FlatCases).
+
+collect_case_invoke(Mod, Case, MFA, St) ->
+ case os:getenv("TEST_SERVER_FRAMEWORK") of
+ false ->
+ case catch apply(Mod, Case, [suite]) of
+ {'EXIT',_} ->
+ {ok,[MFA],St};
+ Suite ->
+ collect_subcases(Mod, Case, MFA, St, Suite)
+ end;
+ _ ->
+ Suite = test_server_sup:framework_call(get_suite, [?pl2a(Mod),Case],[]),
+ collect_subcases(Mod, Case, MFA, St, Suite)
+ end.
+
+collect_subcases(Mod, Case, MFA, St, Suite) ->
+ case Suite of
+ [] when Case == all -> {ok,[],St};
+ [] -> {ok,[MFA],St};
+%%%! --- START Kept for backwards compatibilty ---
+%%%! Requirements are not used
+ {req,ReqList} ->
+ collect_case_deny(Mod, Case, MFA, ReqList, [], St);
+ {req,ReqList,SubCases} ->
+ collect_case_deny(Mod, Case, MFA, ReqList, SubCases, St);
+%%%! --- END Kept for backwards compatibilty ---
+ {Skip,Reason} when Skip==skip; Skip==skipped ->
+ {ok,[{skip_case,{MFA,Reason}}],St};
+ SubCases ->
+ collect_case_subcases(Mod, Case, SubCases, St)
+ end.
+
+collect_case_subcases(Mod, Case, SubCases, St0) ->
+ OldMod = St0#cc.mod,
+ case collect_cases(SubCases, St0#cc{mod=Mod}) of
+ {ok,FlatCases,St} ->
+ {ok,FlatCases,St#cc{mod=OldMod}};
+ {error,Reason} ->
+ {error,{{Mod,Case},Reason}}
+ end.
+
+collect_files(Dir, Pattern, St) ->
+ {ok,Cwd} = file:get_cwd(),
+ Dir1 = filename:join(Cwd, Dir),
+ Wc = filename:join([Dir1,Pattern++code:objfile_extension()]),
+ case catch filelib:wildcard(Wc) of
+ {'EXIT', Reason} ->
+ io:format("Could not collect files: ~p~n", [Reason]),
+ {error,{collect_fail,Dir,Pattern}};
+ Mods0 ->
+ Mods = [{path_to_module(Mod),all} || Mod <- lists:sort(Mods0)],
+ collect_cases(Mods, St)
+ end.
+
+path_to_module(Path) ->
+ list_to_atom(filename:rootname(filename:basename(Path))).
+
+collect_case_deny(Mod, Case, MFA, ReqList, SubCases, St) ->
+ case {check_deny(ReqList, St#cc.skip),SubCases} of
+ {{denied,Comment},_SubCases} ->
+ {ok,[{skip_case,{MFA,Comment}}],St};
+ {granted,[]} ->
+ {ok,[MFA],St};
+ {granted,SubCases} ->
+ collect_case_subcases(Mod, Case, SubCases, St)
+ end.
+
+check_deny([Req|Reqs], DenyList) ->
+ case check_deny_req(Req, DenyList) of
+ {denied,_Comment}=Denied -> Denied;
+ granted -> check_deny(Reqs, DenyList)
+ end;
+check_deny([], _DenyList) -> granted;
+check_deny(Req, DenyList) -> check_deny([Req], DenyList).
+
+check_deny_req({Req,Val}, DenyList) ->
+ %%io:format("ValCheck ~p=~p in ~p\n", [Req,Val,DenyList]),
+ case lists:keysearch(Req, 1, DenyList) of
+ {value,{_Req,DenyVal}} when Val >= DenyVal ->
+ {denied,io_lib:format("Requirement ~p=~p", [Req,Val])};
+ _ ->
+ check_deny_req(Req, DenyList)
+ end;
+check_deny_req(Req, DenyList) ->
+ case lists:member(Req, DenyList) of
+ true -> {denied,io_lib:format("Requirement ~p", [Req])};
+ false -> granted
+ end.
+
+in_skip_list({Mod,Func,_Args}, SkipList) ->
+ in_skip_list({Mod,Func}, SkipList);
+in_skip_list({Mod,Func}, [{Mod,Funcs,Comment}|SkipList]) when is_list(Funcs) ->
+ case lists:member(Func, Funcs) of
+ true ->
+ {true,Comment};
+ _ ->
+ in_skip_list({Mod,Func}, SkipList)
+ end;
+in_skip_list({Mod,Func}, [{Mod,Func,Comment}|_SkipList]) ->
+ {true,Comment};
+in_skip_list({Mod,_Func}, [{Mod,Comment}|_SkipList]) ->
+ {true,Comment};
+in_skip_list({Mod,Func}, [_|SkipList]) ->
+ in_skip_list({Mod,Func}, SkipList);
+in_skip_list(_, []) ->
+ false.
+
+keep_name(Props) ->
+ lists:filter(fun({name,_}) -> true; (_) -> false end, Props).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Target node handling functions %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% get_target_info() -> #target_info
+%%
+%% Returns a record containing system information for target
+
+get_target_info() ->
+ controller_call(get_target_info).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% start_node(SlaveName, Type, Options) ->
+%% {ok, Slave} | {error, Reason}
+%%
+%% Called by test_server. See test_server:start_node/3 for details
+
+start_node(Name, Type, Options) ->
+ T = 10 * ?ACCEPT_TIMEOUT, % give some extra time
+ format(minor, "Attempt to start ~w node ~p with options ~p",
+ [Type, Name, Options]),
+ case controller_call({start_node,Name,Type,Options}, T) of
+ {{ok,Nodename}, Host, Cmd, Info, Warning} ->
+ format(minor,
+ "Successfully started node ~p on ~p with command: ~p",
+ [Nodename, Host, Cmd]),
+ format(major, "=node_start ~p", [Nodename]),
+ case Info of
+ [] -> ok;
+ _ -> format(minor, Info)
+ end,
+ case Warning of
+ [] -> ok;
+ _ ->
+ format(1, Warning),
+ format(minor, Warning)
+ end,
+ {ok, Nodename};
+ {fail,{Ret, Host, Cmd}} ->
+ format(minor,
+ "Failed to start node ~p on ~p with command: ~p~n"
+ "Reason: ~p",
+ [Name, Host, Cmd, Ret]),
+ {fail,Ret};
+ {Ret, undefined, undefined} ->
+ format(minor, "Failed to start node ~p: ~p", [Name,Ret]),
+ Ret;
+ {Ret, Host, Cmd} ->
+ format(minor,
+ "Failed to start node ~p on ~p with command: ~p~n"
+ "Reason: ~p",
+ [Name, Host, Cmd, Ret]),
+ Ret
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% wait_for_node(Node) -> ok | {error,timeout}
+%%
+%% Wait for a slave/peer node which has been started with
+%% the option {wait,false}. This function returns when
+%% when the new node has contacted test_server_ctrl again
+
+wait_for_node(Slave) ->
+ case catch controller_call({wait_for_node,Slave},10000) of
+ {'EXIT',{timeout,_}} -> {error,timeout};
+ ok -> ok
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% is_release_available(Release) -> true | false
+%% Release -> string()
+%%
+%% Test if a release (such as "r10b") is available to be
+%% started using start_node/3.
+
+is_release_available(Release) ->
+ controller_call({is_release_available,Release}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% stop_node(Name) -> ok | {error,Reason}
+%%
+%% Clean up - test_server will stop this node
+
+stop_node(Slave) ->
+ controller_call({stop_node,Slave}).
+
+
+%%--------------------------------------------------------------------
+%% Functions handling target communication over socket
+
+%% Generic send function for communication with target
+request(Sock,Request) ->
+ gen_tcp:send(Sock,<<1,(term_to_binary(Request))/binary>>).
+
+%% Receive and decode request on job specific socket
+%% Used when test is running on a remote target
+read_job_sock_loop(Sock) ->
+ case gen_tcp:recv(Sock,0) of
+ {error,Reason} ->
+ gen_tcp:close(Sock),
+ exit({controller,connection_lost,Reason});
+ {ok,<<1,Request/binary>>} ->
+ case decode(binary_to_term(Request)) of
+ ok ->
+ read_job_sock_loop(Sock);
+ {stop,Result} ->
+ Result
+ end
+ end.
+
+decode({apply,{M,F,A}}) ->
+ apply(M,F,A),
+ ok;
+decode({sync_apply,{M,F,A}}) ->
+ R = apply(M,F,A),
+ request(get(test_server_ctrl_job_sock),{sync_result,R}),
+ ok;
+decode({sync_result,Result}) ->
+ {stop,Result};
+decode({test_case_result,Result}) ->
+ {stop,Result};
+decode({privdir,empty_priv_dir}) ->
+ {stop,ok};
+decode({{privdir,PrivDirTar},TarBin}) ->
+ Root = get(test_server_log_dir_base),
+ unpack_tar(Root,PrivDirTar,TarBin),
+ {stop,ok};
+decode({crash_dumps,no_crash_dumps}) ->
+ {stop,ok};
+decode({{crash_dumps,CrashDumpTar},TarBin}) ->
+ Dir = test_server_sup:crash_dump_dir(),
+ unpack_tar(Dir,CrashDumpTar,TarBin),
+ {stop,ok}.
+
+unpack_tar(Dir,TarFileName0,TarBin) ->
+ TarFileName = filename:join(Dir,TarFileName0),
+ ok = file:write_file(TarFileName,TarBin),
+ ok = erl_tar:extract(TarFileName,[compressed,{cwd,Dir}]),
+ ok = file:delete(TarFileName).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% DEBUGGER INTERFACE %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+i() ->
+ hformat("Pid", "Initial Call", "Current Function", "Reducts", "Msgs"),
+ Line=lists:duplicate(27, "-"),
+ hformat(Line, Line, Line, Line, Line),
+ display_info(processes(), 0, 0).
+
+p(A,B,C) ->
+ pinfo(ts_pid(A,B,C)).
+p(X) when is_atom(X) ->
+ pinfo(whereis(X));
+p({A,B,C}) ->
+ pinfo(ts_pid(A,B,C));
+p(X) ->
+ pinfo(X).
+
+t() ->
+ t(wall_clock).
+t(X) ->
+ element(1, statistics(X)).
+
+pi(Item,X) ->
+ lists:keysearch(Item,1,p(X)).
+pi(Item,A,B,C) ->
+ lists:keysearch(Item,1,p(A,B,C)).
+
+%% c:pid/3
+ts_pid(X,Y,Z) when is_integer(X), is_integer(Y), is_integer(Z) ->
+ list_to_pid("<" ++ integer_to_list(X) ++ "." ++
+ integer_to_list(Y) ++ "." ++
+ integer_to_list(Z) ++ ">").
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% display_info(Pids, Reductions, Messages) -> void
+%% Pids = [pid(),...]
+%% Reductions = integer()
+%% Messaged = integer()
+%%
+%% Displays info, similar to c:i() about the processes in the list Pids.
+%% Also counts the total number of reductions and msgs for the listed
+%% processes, if called with Reductions = Messages = 0.
+
+display_info([Pid|T], R, M) ->
+ case pinfo(Pid) of
+ undefined ->
+ display_info(T, R, M);
+ Info ->
+ Call = fetch(initial_call, Info),
+ Curr = case fetch(current_function, Info) of
+ {Mod,F,Args} when is_list(Args) ->
+ {Mod,F,length(Args)};
+ Other ->
+ Other
+ end,
+ Reds = fetch(reductions, Info),
+ LM = length(fetch(messages, Info)),
+ pformat(io_lib:format("~w", [Pid]),
+ io_lib:format("~w", [Call]),
+ io_lib:format("~w", [Curr]), Reds, LM),
+ display_info(T, R+Reds, M + LM)
+ end;
+display_info([], R, M) ->
+ Line=lists:duplicate(27, "-"),
+ hformat(Line, Line, Line, Line, Line),
+ pformat("Total", "", "", R, M).
+
+hformat(A1, A2, A3, A4, A5) ->
+ io:format("~-10s ~-27s ~-27s ~8s ~4s~n", [A1,A2,A3,A4,A5]).
+
+pformat(A1, A2, A3, A4, A5) ->
+ io:format("~-10s ~-27s ~-27s ~8w ~4w~n", [A1,A2,A3,A4,A5]).
+
+fetch(Key, Info) ->
+ case lists:keysearch(Key, 1, Info) of
+ {value, {_, Val}} ->
+ Val;
+ _ ->
+ 0
+ end.
+
+pinfo(P) ->
+ Node = node(),
+ case node(P) of
+ Node ->
+ process_info(P);
+ _ ->
+ rpc:call(node(P),erlang,process_info,[P])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Support functions for COVER %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% A module is included in the cover analysis if
+%% - it belongs to the tested application and is not listed in the
+%% {exclude,List} part of the App.cover file
+%% - it does not belong to the application, but is listed in the
+%% {include,List} part of the App.cover file
+%% - it does not belong to the application, but is listed in the
+%% cross.cover file (in the test_server application) under 'all'
+%% or under the tested application.
+%%
+%% The modules listed in the cross.cover file are modules that are
+%% hevily used by other applications than the one they belong
+%% to. After all tests are completed, these modules can be analysed
+%% with coverage data from all tests - see cross_cover_analyse/1. The
+%% result is stored in a file called cross_cover.html in the
+%% run.<timestamp> directory of the application the modules belong
+%% to.
+%%
+%% For example, the lists module is listed in cross.cover to be
+%% included in all tests. lists belongs to the stdlib
+%% application. cross_cover_analyse/1 will create a file named
+%% cross_cover.html under the newest stdlib.logs/run.xxx directory,
+%% where the coverage result for the lists module from all tests is
+%% presented.
+%%
+%% The lists module is also presented in the normal coverage log
+%% for stdlib, but that only includes the coverage achieved by
+%% the stdlib tests themselves.
+%%
+%% The Cross cover file cross.cover contains elements like this:
+%% {App,Modules}.
+%% where App can be an application name or the atom all. The
+%% application (or all applications) shall cover compile the listed
+%% Modules.
+
+
+%% Cover compilation
+%% The compilation is executed on the target node
+cover_compile({App,{_File,Exclude,Include,Cross,_Export}}) ->
+ cover_compile1({App,Exclude,Include,Cross});
+
+cover_compile({App,CoverFile}) ->
+ Cross = get_cross_modules(App),
+ {Exclude,Include} = read_cover_file(CoverFile),
+ cover_compile1({App,Exclude,Include,Cross}).
+
+cover_compile1(What) ->
+ case get(test_server_ctrl_job_sock) of
+ undefined ->
+ %% local target
+ test_server:cover_compile(What);
+ JobSock ->
+ %% remote target
+ request(JobSock, {sync_apply,{test_server,cover_compile,[What]}}),
+ read_job_sock_loop(JobSock)
+ end.
+
+
+%% Read the coverfile for an application and return a list of modules
+%% that are members of the application but shall not be compiled
+%% (Exclude), and a list of modules that are not members of the
+%% application but shall be compiled (Include).
+read_cover_file(none) ->
+ {[],[]};
+read_cover_file(CoverFile) ->
+ case file:consult(CoverFile) of
+ {ok,List} ->
+ case check_cover_file(List, [], []) of
+ {ok,Exclude,Include} -> {Exclude,Include};
+ error ->
+ io:fwrite("Faulty format of CoverFile ~p\n", [CoverFile]),
+ {[],[]}
+ end;
+ {error,Reason} ->
+ io:fwrite("Can't read CoverFile ~p\nReason: ~p\n",
+ [CoverFile,Reason]),
+ {[],[]}
+ end.
+
+check_cover_file([{exclude,all}|Rest], _, Include) ->
+ check_cover_file(Rest, all, Include);
+check_cover_file([{exclude,Exclude}|Rest], _, Include) ->
+ case lists:all(fun(M) -> is_atom(M) end, Exclude) of
+ true ->
+ check_cover_file(Rest, Exclude, Include);
+ false ->
+ error
+ end;
+check_cover_file([{include,Include}|Rest], Exclude, _) ->
+ case lists:all(fun(M) -> is_atom(M) end, Include) of
+ true ->
+ check_cover_file(Rest, Exclude, Include);
+ false ->
+ error
+ end;
+check_cover_file([], Exclude, Include) ->
+ {ok,Exclude,Include}.
+
+
+
+%% Cover analysis, per application
+%% This analysis is executed on the target node once the test is
+%% completed for an application. This is not the same as the cross
+%% cover analysis, which can be executed on any node after the tests
+%% are finshed.
+%%
+%% This per application analysis writes the file cover.html in the
+%% application's run.<timestamp> directory.
+cover_analyse({App,CoverInfo}, Analyse, AnalyseMods, TestDir) ->
+ write_default_cross_coverlog(TestDir),
+
+ {ok,CoverLog} = file:open(filename:join(TestDir, ?coverlog_name), [write]),
+ write_coverlog_header(CoverLog),
+ io:fwrite(CoverLog, "<h1>Coverage for application '~w'</h1>\n", [App]),
+ io:fwrite(CoverLog,
+ "<p><a href=\"~s\">Coverdata collected over all tests</a></p>",
+ [?cross_coverlog_name]),
+
+ {CoverFile,_Included,Excluded} =
+ case CoverInfo of
+ {File,Excl,Incl,_Cross,Export} ->
+ cover:export(Export),
+ {File,Incl,Excl};
+ File ->
+ {Excl,Incl} = read_cover_file(File),
+ {File,Incl,Excl}
+ end,
+ io:fwrite(CoverLog, "<p>CoverFile: <code>~p</code>\n", [CoverFile]),
+
+ case length(cover:imported_modules()) of
+ Imps when Imps > 0 ->
+ io:fwrite(CoverLog, "<p>Analysis includes data from ~w imported module(s).\n",
+ [Imps]);
+ _ ->
+ ok
+ end,
+
+ io:fwrite(CoverLog, "<p>Excluded module(s): <code>~p</code>\n", [Excluded]),
+
+ Coverage = cover_analyse(Analyse, AnalyseMods),
+
+ case lists:filter(fun({_M,{_,_,_}}) -> false;
+ (_) -> true
+ end, Coverage) of
+ [] ->
+ ok;
+ Bad ->
+ io:fwrite(CoverLog, "<p>Analysis failed for ~w module(s): "
+ "<code>~w</code>\n",
+ [length(Bad),[BadM || {BadM,{_,_Why}} <- Bad]])
+ end,
+
+ TotPercent = write_cover_result_table(CoverLog, Coverage),
+ file:write_file(filename:join(TestDir, ?cover_total),
+ term_to_binary(TotPercent)).
+
+cover_analyse(Analyse, AnalyseMods) ->
+ TestDir = get(test_server_log_dir_base),
+ case get(test_server_ctrl_job_sock) of
+ undefined ->
+ %% local target
+ test_server:cover_analyse({Analyse,TestDir}, AnalyseMods);
+ JobSock ->
+ %% remote target
+ request(JobSock, {sync_apply,{test_server,
+ cover_analyse,
+ [Analyse,AnalyseMods]}}),
+ read_job_sock_loop(JobSock)
+ end.
+
+
+%% Cover analysis, cross application
+%% This can be executed on any node after all tests are finished.
+%% The node's current directory must be the same as when the tests
+%% were run.
+cross_cover_analyse(Analyse) ->
+ cross_cover_analyse(Analyse, undefined).
+
+cross_cover_analyse(Analyse, CrossModules) ->
+ CoverdataFiles = get_coverdata_files(),
+ lists:foreach(fun(CDF) -> cover:import(CDF) end, CoverdataFiles),
+ io:fwrite("Cover analysing... ", []),
+ DetailsFun =
+ case Analyse of
+ details ->
+ fun(Dir,M) ->
+ OutFile = filename:join(Dir,
+ atom_to_list(M) ++
+ ".CROSS_COVER.html"),
+ cover:analyse_to_file(M, OutFile, [html]),
+ {file,OutFile}
+ end;
+ _ ->
+ fun(_,_) -> undefined end
+ end,
+ SortedModules =
+ case CrossModules of
+ undefined ->
+ sort_modules([Mod || Mod <- get_all_cross_modules(),
+ lists:member(Mod, cover:imported_modules())], []);
+ _ ->
+ sort_modules(CrossModules, [])
+ end,
+ Coverage = analyse_apps(SortedModules, DetailsFun, []),
+ cover:stop(),
+ write_cross_cover_logs(Coverage).
+
+%% For each application from which there are modules listed in the
+%% cross.cover, write a cross cover log (cross_cover.html).
+write_cross_cover_logs([{App,Coverage}|T]) ->
+ case last_test_for_app(App) of
+ false ->
+ ok;
+ Dir ->
+ CoverLogName = filename:join(Dir,?cross_coverlog_name),
+ {ok,CoverLog} = file:open(CoverLogName, [write]),
+ write_coverlog_header(CoverLog),
+ io:fwrite(CoverLog,
+ "<h1>Coverage results for \'~w\' from all tests</h1>\n",
+ [App]),
+ write_cover_result_table(CoverLog, Coverage),
+ io:fwrite("Written file ~p\n", [CoverLogName])
+ end,
+ write_cross_cover_logs(T);
+write_cross_cover_logs([]) ->
+ io:fwrite("done\n", []).
+
+%% Find all exported coverdata files. First find all the latest
+%% run.<timestamp> directories, and the check if there is a file named
+%% all.coverdata.
+get_coverdata_files() ->
+ PossibleFiles = [last_coverdata_file(Dir) ||
+ Dir <- filelib:wildcard([$*|?logdir_ext]),
+ filelib:is_dir(Dir)],
+ [File || File <- PossibleFiles, filelib:is_file(File)].
+
+last_coverdata_file(Dir) ->
+ LastDir = last_test(filelib:wildcard(filename:join(Dir,"run.[1-2]*")),false),
+ filename:join(LastDir,"all.coverdata").
+
+
+%% Find the latest run.<timestamp> directory for the given application.
+last_test_for_app(App) ->
+ AppLogDir = atom_to_list(App)++?logdir_ext,
+ last_test(filelib:wildcard(filename:join(AppLogDir,"run.[1-2]*")),false).
+
+last_test([Run|Rest], false) ->
+ last_test(Rest, Run);
+last_test([Run|Rest], Latest) when Run > Latest ->
+ last_test(Rest, Run);
+last_test([_|Rest], Latest) ->
+ last_test(Rest, Latest);
+last_test([], Latest) ->
+ Latest.
+
+%% Sort modules according to the application they belong to.
+%% Return [{App,LastTestDir,ModuleList}]
+sort_modules([M|Modules], Acc) ->
+ App = get_app(M),
+ Acc1 =
+ case lists:keysearch(App, 1, Acc) of
+ {value,{App,LastTest,List}} ->
+ lists:keyreplace(App, 1, Acc, {App,LastTest,[M|List]});
+ false ->
+ [{App,last_test_for_app(App),[M]}|Acc]
+ end,
+ sort_modules(Modules, Acc1);
+sort_modules([], Acc) ->
+ Acc.
+
+get_app(Module) ->
+ Beam = code:which(Module),
+ AppDir = filename:basename(filename:dirname(filename:dirname(Beam))),
+ [AppStr|_] = string:tokens(AppDir,"-"),
+ list_to_atom(AppStr).
+
+
+%% For each application, analyse all modules
+%% Used for cross cover analysis.
+analyse_apps([{App,LastTest,Modules}|T], DetailsFun, Acc) ->
+ Cov = analyse_modules(LastTest, Modules, DetailsFun, []),
+ analyse_apps(T, DetailsFun, [{App,Cov}|Acc]);
+analyse_apps([], _DetailsFun, Acc) ->
+ Acc.
+
+%% Analyse each module
+%% Used for cross cover analysis.
+analyse_modules(Dir, [M|Modules], DetailsFun, Acc) ->
+ {ok,{M,{Cov,NotCov}}} = cover:analyse(M, module),
+ Acc1 = [{M,{Cov,NotCov,DetailsFun(Dir,M)}}|Acc],
+ analyse_modules(Dir, Modules, DetailsFun, Acc1);
+analyse_modules(_Dir, [], _DetailsFun, Acc) ->
+ Acc.
+
+
+%% Read the cross cover file (cross.cover)
+get_all_cross_modules() ->
+ get_cross_modules(all).
+get_cross_modules(App) ->
+ case file:consult(?cross_cover_file) of
+ {ok,List} ->
+ get_cross_modules(App, List, []);
+ _X ->
+ []
+ end.
+
+get_cross_modules(App, [{_To,Modules}|T], Acc) when App==all->
+ get_cross_modules(App, T, Acc ++ Modules);
+get_cross_modules(App, [{To,Modules}|T], Acc) when To==App; To==all->
+ get_cross_modules(App, T, Acc ++ Modules);
+get_cross_modules(App, [_H|T], Acc) ->
+ get_cross_modules(App, T, Acc);
+get_cross_modules(_App, [], Acc) ->
+ Acc.
+
+
+%% Support functions for writing the cover logs (both cross and normal)
+write_coverlog_header(CoverLog) ->
+ case catch
+ io:fwrite(CoverLog,
+ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '~w'. -->\n"
+ "<html>\n"
+ "<head><title>Coverage results</title></head>\n"
+ "<body bgcolor=\"white\" text=\"black\" "
+ "link=\"blue\" vlink=\"purple\" alink=\"red\">",
+ [?MODULE]) of
+ {'EXIT',Reason} ->
+ io:format("\n\nERROR: Could not write normal heading in coverlog.\n"
+ "CoverLog: ~w\n"
+ "Reason: ~p\n",
+ [CoverLog,Reason]),
+ io:format(CoverLog,"<html><body>\n", []);
+ _ ->
+ ok
+ end.
+
+
+format_analyse(M,Cov,NotCov,undefined) ->
+ io_lib:fwrite("<tr><td>~w</td>"
+ "<td align=right>~w %</td>"
+ "<td align=right>~w</td>"
+ "<td align=right>~w</td></tr>\n",
+ [M,pc(Cov,NotCov),Cov,NotCov]);
+format_analyse(M,Cov,NotCov,{file,File}) ->
+ io_lib:fwrite("<tr><td><a href=\"~s\">~w</a></td>"
+ "<td align=right>~w %</td>"
+ "<td align=right>~w</td>"
+ "<td align=right>~w</td></tr>\n",
+ [filename:basename(File),M,pc(Cov,NotCov),Cov,NotCov]);
+format_analyse(M,Cov,NotCov,{lines,Lines}) ->
+ CoverOutName = atom_to_list(M)++".COVER.html",
+ {ok,CoverOut} = file:open(CoverOutName, [write]),
+ write_not_covered(CoverOut,M,Lines),
+ io_lib:fwrite("<tr><td><a href=\"~s\">~w</a></td>"
+ "<td align=right>~w %</td>"
+ "<td align=right>~w</td>"
+ "<td align=right>~w</td></tr>\n",
+ [CoverOutName,M,pc(Cov,NotCov),Cov,NotCov]);
+format_analyse(M,Cov,NotCov,{error,_}) ->
+ io_lib:fwrite("<tr><td>~w</td>"
+ "<td align=right>~w %</td>"
+ "<td align=right>~w</td>"
+ "<td align=right>~w</td></tr>\n",
+ [M,pc(Cov,NotCov),Cov,NotCov]).
+
+
+pc(0,0) ->
+ 0;
+pc(Cov,NotCov) ->
+ round(Cov/(Cov+NotCov)*100).
+
+
+write_not_covered(CoverOut,M,Lines) ->
+ io:fwrite(CoverOut,
+ "<html>\n"
+ "The following lines in module ~w are not covered:\n"
+ "<table border=3 cellpadding=5>\n"
+ "<th>Line Number</th>\n",
+ [M]),
+ lists:foreach(fun({{_M,Line},{0,1}}) ->
+ io:fwrite(CoverOut,"<tr><td>~w</td></tr>\n", [Line]);
+ (_) ->
+ ok
+ end,
+ Lines),
+ io:fwrite(CoverOut,"</table>\n</html>\n", []).
+
+
+write_default_coverlog(TestDir) ->
+ {ok,CoverLog} = file:open(filename:join(TestDir,?coverlog_name), [write]),
+ write_coverlog_header(CoverLog),
+ io:fwrite(CoverLog,"Cover tool is not used\n</body></html>\n", []),
+ file:close(CoverLog).
+
+write_default_cross_coverlog(TestDir) ->
+ {ok,CrossCoverLog} =
+ file:open(filename:join(TestDir,?cross_coverlog_name), [write]),
+ write_coverlog_header(CrossCoverLog),
+ io:fwrite(CrossCoverLog,
+ "No cross cover modules exist for this application,<br>"
+ "or cross cover analysis is not completed.\n"
+ "</body></html>\n", []),
+ file:close(CrossCoverLog).
+
+write_cover_result_table(CoverLog,Coverage) ->
+ io:fwrite(CoverLog,
+ "<p><table border=3 cellpadding=5>\n"
+ "<tr><th>Module</th><th>Covered (%)</th><th>Covered (Lines)</th>"
+ "<th>Not covered (Lines)</th>\n",
+ []),
+ {TotCov,TotNotCov} =
+ lists:foldl(fun({M,{Cov,NotCov,Details}},{AccCov,AccNotCov}) ->
+ Str = format_analyse(M,Cov,NotCov,Details),
+ io:fwrite(CoverLog,"~s", [Str]),
+ {AccCov+Cov,AccNotCov+NotCov};
+ ({_M,{error,_Reason}},{AccCov,AccNotCov}) ->
+ {AccCov,AccNotCov}
+ end,
+ {0,0},
+ Coverage),
+ TotPercent = pc(TotCov,TotNotCov),
+ io:fwrite(CoverLog,
+ "<tr><th align=left>Total</th><th align=right>~w %</th>"
+ "<th align=right>~w</th><th align=right>~w</th></tr>\n"
+ "</table>\n"
+ "</body>\n"
+ "</html>\n",
+ [TotPercent,TotCov,TotNotCov]),
+ file:close(CoverLog),
+ TotPercent.
diff --git a/lib/test_server/src/test_server_h.erl b/lib/test_server/src/test_server_h.erl
new file mode 100644
index 0000000000..e423863b99
--- /dev/null
+++ b/lib/test_server/src/test_server_h.erl
@@ -0,0 +1,129 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(test_server_h).
+-behaviour(gen_event).
+
+%% API
+-export([install/0, restore/0]).
+-export([testcase/1]).
+
+%% gen_event callbacks
+-export([init/1, handle_event/2, handle_call/2,
+ handle_info/2, terminate/2, code_change/3]).
+
+-record(state, {kernel, sasl, testcase}).
+
+%%====================================================================
+%% API
+%%====================================================================
+
+install() ->
+ case gen_event:add_handler(error_logger, ?MODULE, []) of
+ ok ->
+ error_logger:delete_report_handler(sasl_report_tty_h),
+ gen_event:delete_handler(error_logger, error_logger_tty_h, []),
+ ok;
+ Error ->
+ Error
+ end.
+
+restore() ->
+ gen_event:add_handler(error_logger, error_logger_tty_h, []),
+ error_logger:add_report_handler(sasl_report_tty_h, all),
+ gen_event:delete_handler(error_logger, ?MODULE, []).
+
+testcase(Testcase) ->
+ gen_event:call(error_logger, ?MODULE, {set_testcase, Testcase}, 10*60*1000).
+
+%%====================================================================
+%% gen_event callbacks
+%%====================================================================
+
+init([]) ->
+
+ %% error_logger_tty_h initialization
+ User = set_group_leader(),
+
+ %% sasl_report_tty_h initialization
+ Type = all,
+
+ {ok, #state{kernel={User, []}, sasl=Type}}.
+
+set_group_leader() ->
+ case whereis(user) of
+ User when is_pid(User) ->
+ link(User),
+ group_leader(User, self()),
+ User;
+ _ ->
+ false
+ end.
+
+handle_event({_Type, GL, _Msg}, State) when node(GL)/=node() ->
+ {ok, State};
+handle_event({Tag, _GL, {_Pid, Type, _Report}} = Event, State) ->
+ case report(Tag, Type) of
+ sasl ->
+ tag(State#state.testcase),
+ sasl_report_tty_h:handle_event(Event, State#state.sasl);
+ kernel ->
+ tag(State#state.testcase),
+ error_logger_tty_h:handle_event(Event, State#state.kernel);
+ none ->
+ ignore
+ end,
+ {ok, State};
+handle_event(_Event, State) ->
+ {ok, State}.
+
+handle_call({set_testcase, Testcase}, State) ->
+ {ok, ok, State#state{testcase=Testcase}};
+handle_call(_Query, _State) ->
+ {error, bad_query}.
+
+handle_info({emulator,GL,_Chars}=Event, State) when node(GL)==node() ->
+ tag(State#state.testcase),
+ error_logger_tty_h:handle_info(Event, State#state.kernel),
+ {ok, State};
+handle_info(_Msg, State) ->
+ {ok, State}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+report(error_report, supervisor_report) -> sasl;
+report(error_report, crash_report) -> sasl;
+report(info_report, progress) -> sasl;
+report(error, _) -> kernel;
+report(error_report, _) -> kernel;
+report(warning_msg, _) -> kernel;
+report(warning_report, _) -> kernel;
+report(info, _) -> kernel;
+report(info_msg, _) -> kernel;
+report(info_report, _) -> kernel;
+report(_, _) -> none.
+
+tag({M,F,A}) when is_atom(M), is_atom(F), is_integer(A) ->
+ io:format(user, "~n=TESTCASE: ~p:~p/~p", [M,F,A]);
+tag(Testcase) ->
+ io:format(user, "~n=TESTCASE: ~p", [Testcase]).
diff --git a/lib/test_server/src/test_server_internal.hrl b/lib/test_server/src/test_server_internal.hrl
new file mode 100644
index 0000000000..6fa5ef75b1
--- /dev/null
+++ b/lib/test_server/src/test_server_internal.hrl
@@ -0,0 +1,59 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-define(priv_dir,"log_private").
+-define(MAIN_PORT,3289).
+-define(ACCEPT_TIMEOUT,20000).
+
+%% Target information generated by test_server:init_target_info/0 and
+%% test_server_ctrl:contact_main_target/2
+%% Once initiated, this information will never change!!
+-record(target_info, {where, % local | Socket
+ os_family, % atom(); win32 | unix | vxworks | ose
+ os_type, % result of os:type()
+ host, % string(); the name of the target machine
+ version, % string()
+ system_version, % string()
+ root_dir, % string()
+ test_server_dir, % string()
+ emulator, % string()
+ otp_release, % string()
+ username, % string()
+ cookie, % string(); Cookie for target node
+ naming, % string(); "-name" | "-sname"
+ master, % string(); For OSE this is the master
+ % node for main target and slave nodes.
+ % For other platforms the target node
+ % itself is master for slave nodes
+
+ %% The following are only used for remote targets
+ target_client, % reference to a client talking to target
+ slave_targets=[]}).% list() of atom(); all available
+ % targets for starting slavenodes
+
+%% Temporary information generated by test_server_ctrl:read_parameters/X
+%% This information is used when starting the main target, and for
+%% initiating the #target_info record.
+-record(par, {type,
+ target,
+ slave_targets=[],
+ naming,
+ master,
+ cookie}).
+
diff --git a/lib/test_server/src/test_server_line.erl b/lib/test_server/src/test_server_line.erl
new file mode 100644
index 0000000000..26ef3a3040
--- /dev/null
+++ b/lib/test_server/src/test_server_line.erl
@@ -0,0 +1,380 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(test_server_line).
+
+%% User interface
+-export([get_lines/0]).
+-export([clear/0]).
+
+%% Parse transform functions
+-export([parse_transform/2]).
+-export(['$test_server_line'/3]).
+-export(['$test_server_lineQ'/3]).
+-export([trace_line/3]).
+
+-define(TEST_SERVER_LINE_SIZE, 10).
+%-define(STORAGE_FUNCTION, '$test_server_line').
+-define(STORAGE_FUNCTION, '$test_server_lineQ').
+
+-include("test_server.hrl").
+
+-record(vars, {module, % atom() Module name
+ function, % atom() Function name
+ arity, % int() Function arity
+ lines, % [int()] seen lines
+ is_guard=false, % boolean()
+ no_lines=[], % [{atom(),integer()}]
+ % Functions to exclude
+ line_trace=false
+ }).
+
+
+
+
+%% Process dictionary littering variant
+%%
+
+'$test_server_line'(Mod, Func, Line) ->
+ {Prev,Next} =
+ case get('$test_server_line') of
+ I when is_integer(I) ->
+ if 1 =< I, I < ?TEST_SERVER_LINE_SIZE -> {I,I+1};
+ true -> {?TEST_SERVER_LINE_SIZE,1}
+ end;
+ _ -> {?TEST_SERVER_LINE_SIZE,1}
+ end,
+ PrevTag = {'$test_server_line',Prev},
+ case get(PrevTag) of
+ {Mod,Func,_} -> put(PrevTag, {Mod,Func,Line});
+ _ ->
+ put({'$test_server_line',Next}, {Mod,Func,Line}),
+ put('$test_server_line', Next)
+ end, ok.
+
+test_server_line_get() ->
+ case get('$test_server_line') of
+ I when is_integer(I), 1 =< I, I =< ?TEST_SERVER_LINE_SIZE ->
+ test_server_line_get_1(?TEST_SERVER_LINE_SIZE, I, []);
+ _ -> []
+ end.
+
+test_server_line_get_1(0, _I, R) ->
+ R;
+test_server_line_get_1(Cnt, I, R) ->
+ J = if I < ?TEST_SERVER_LINE_SIZE -> I+1;
+ true -> 1 end,
+ case get({'$test_server_line',J}) of
+ undefined ->
+ %% Less than ?TEST_SERVER_LINE_SIZE number of lines stored
+ %% Start from line 1 and stop at actutual number of lines
+ case get({'$test_server_line',1}) of
+ undefined -> R; % no lines at all stored
+ E -> test_server_line_get_1(I-1,1,[E|R])
+ end;
+ E ->
+ test_server_line_get_1(Cnt-1, J, [E|R])
+ end.
+
+test_server_line_clear() ->
+ Is = lists:seq(1,?TEST_SERVER_LINE_SIZE),
+ lists:foreach(fun (I) -> erase({'$test_server_line',I}) end, Is),
+ erase('$test_server_line'),
+ ok.
+
+
+%% Queue variant, uses just one process dictionary entry
+%%
+
+'$test_server_lineQ'(Mod, Func, Line) ->
+ case get('$test_server_lineQ') of
+ {I,Q} when is_integer(I), 1 =< I, I =< ?TEST_SERVER_LINE_SIZE ->
+ case queue:head(Q) of
+ {Mod,Func,_} ->
+ %% Replace queue head
+ put('$test_server_lineQ',
+ {I,queue:cons({Mod,Func,Line}, queue:tail(Q))});
+ _ when I < ?TEST_SERVER_LINE_SIZE ->
+ put('$test_server_lineQ',
+ {I+1,queue:cons({Mod,Func,Line}, Q)});
+ _ ->
+ %% Waste last in queue
+ put('$test_server_lineQ',
+ {I,queue:cons({Mod,Func,Line}, queue:lait(Q))})
+ end;
+ _ ->
+ Q = queue:new(),
+ put('$test_server_lineQ', {1,queue:cons({Mod,Func,Line}, Q)})
+ end, ok.
+
+%test_server_lineQ_get() ->
+% case get('$test_server_lineQ') of
+% {I,Q} when integer(I), 1 =< I, I =< ?TEST_SERVER_LINE_SIZE ->
+% queue:to_list(Q);
+% _ -> []
+% end.
+
+test_server_lineQ_clear() ->
+ erase('$test_server_lineQ'),
+ ok.
+
+
+%% Get line - check if queue or dictionary is used, then get the lines
+%%
+
+get_lines() ->
+ case get('$test_server_lineQ') of
+ {I,Q} when is_integer(I), 1 =< I, I =< ?TEST_SERVER_LINE_SIZE ->
+ queue:to_list(Q);
+ _ ->
+ test_server_line_get()
+ end.
+
+%% Clear all dictionary entries
+%%
+clear() ->
+ test_server_line_clear(),
+ test_server_lineQ_clear().
+
+
+trace_line(Mod,Func,Line) ->
+ io:format(lists:concat([Mod,":",Func,",",integer_to_list(Line),": ~p"]),
+ [erlang:now()]).
+
+
+%%%=================================================================
+%%%========= **** PARSE TRANSFORM **** ========================
+%%%=================================================================
+parse_transform(Forms, _Options) ->
+ transform(Forms, _Options).
+
+%% forms(Fs) -> lists:map(fun (F) -> form(F) end, Fs).
+
+transform(Forms, _Options)->
+ Vars0 = #vars{},
+ {ok, MungedForms, _Vars} = transform(Forms, [], Vars0),
+ MungedForms.
+
+
+transform([Form|Forms], MungedForms, Vars) ->
+ case munge(Form, Vars) of
+ ignore ->
+ transform(Forms, MungedForms, Vars);
+ {MungedForm, Vars2} ->
+ transform(Forms, [MungedForm|MungedForms], Vars2)
+ end;
+transform([], MungedForms, Vars) ->
+ {ok, lists:reverse(MungedForms), Vars}.
+
+%% This code traverses the abstract code, stored as the abstract_code
+%% chunk in the BEAM file, as described in absform(3) for Erlang/OTP R8B
+%% (Vsn=abstract_v2).
+%% The abstract format after preprocessing differs slightly from the abstract
+%% format given eg using epp:parse_form, this has been noted in comments.
+munge(Form={attribute,_,module,Module}, Vars) ->
+ Vars2 = Vars#vars{module=Module},
+ {Form, Vars2};
+
+munge(Form={attribute,_,no_lines,Funcs}, Vars) ->
+ Vars2 = Vars#vars{no_lines=Funcs},
+ {Form, Vars2};
+
+munge(Form={attribute,_,line_trace,_}, Vars) ->
+ Vars2 = Vars#vars{line_trace=true},
+ {Form, Vars2};
+
+munge({function,0,module_info,_Arity,_Clauses}, _Vars) ->
+ ignore; % module_info will be added again when the forms are recompiled
+munge(Form = {function,Line,Function,Arity,Clauses}, Vars) ->
+ case lists:member({Function,Arity},Vars#vars.no_lines) of
+ true ->
+ %% Line numbers in this function shall not be stored
+ {Form,Vars};
+ false ->
+ Vars2 = Vars#vars{function=Function,
+ arity=Arity,
+ lines=[]},
+ {MungedClauses, Vars3} = munge_clauses(Clauses, Vars2, []),
+ {{function,Line,Function,Arity,MungedClauses}, Vars3}
+ end;
+munge(Form, Vars) -> % attributes
+ {Form, Vars}.
+
+munge_clauses([{clause,Line,Pattern,Guards,Body}|Clauses], Vars, MClauses) ->
+ {MungedGuards, _Vars} = munge_exprs(Guards, Vars#vars{is_guard=true},[]),
+ {MungedBody, Vars2} = munge_body(Body, Vars, []),
+ munge_clauses(Clauses, Vars2,
+ [{clause,Line,Pattern,MungedGuards,MungedBody}|
+ MClauses]);
+munge_clauses([], Vars, MungedClauses) ->
+ {lists:reverse(MungedClauses), Vars}.
+
+munge_body([Expr|Body], Vars, MungedBody) ->
+ %% Here is the place to add a call to storage function!
+ Line = element(2, Expr),
+ Lines = Vars#vars.lines,
+ case lists:member(Line,Lines) of
+ true -> % already a bump at this line!
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ munge_body(Body, Vars2, [MungedExpr|MungedBody]);
+ false ->
+ Bump = {call, 0, {remote,0,
+ {atom,0,?MODULE},
+ {atom,0,?STORAGE_FUNCTION}},
+ [{atom,0,Vars#vars.module},
+ {atom, 0, Vars#vars.function},
+ {integer, 0, Line}]},
+ Lines2 = [Line|Lines],
+
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars#vars{lines=Lines2}),
+ MungedBody2 =
+ if Vars#vars.line_trace ->
+ LineTrace = {call, 0, {remote,0,
+ {atom,0,?MODULE},
+ {atom,0,trace_line}},
+ [{atom,0,Vars#vars.module},
+ {atom, 0, Vars#vars.function},
+ {integer, 0, Line}]},
+ [MungedExpr,LineTrace,Bump|MungedBody];
+ true ->
+ [MungedExpr,Bump|MungedBody]
+ end,
+ munge_body(Body, Vars2, MungedBody2)
+ end;
+munge_body([], Vars, MungedBody) ->
+ {lists:reverse(MungedBody), Vars}.
+
+munge_expr({match,Line,ExprL,ExprR}, Vars) ->
+ {MungedExprL, Vars2} = munge_expr(ExprL, Vars),
+ {MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
+ {{match,Line,MungedExprL,MungedExprR}, Vars3};
+munge_expr({tuple,Line,Exprs}, Vars) ->
+ {MungedExprs, Vars2} = munge_exprs(Exprs, Vars, []),
+ {{tuple,Line,MungedExprs}, Vars2};
+munge_expr({record,Line,Expr,Exprs}, Vars) ->
+ %% Only for Vsn=raw_abstract_v1
+ {MungedExprName, Vars2} = munge_expr(Expr, Vars),
+ {MungedExprFields, Vars3} = munge_exprs(Exprs, Vars2, []),
+ {{record,Line,MungedExprName,MungedExprFields}, Vars3};
+munge_expr({record_field,Line,ExprL,ExprR}, Vars) ->
+ %% Only for Vsn=raw_abstract_v1
+ {MungedExprL, Vars2} = munge_expr(ExprL, Vars),
+ {MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
+ {{record_field,Line,MungedExprL,MungedExprR}, Vars3};
+munge_expr({cons,Line,ExprH,ExprT}, Vars) ->
+ {MungedExprH, Vars2} = munge_expr(ExprH, Vars),
+ {MungedExprT, Vars3} = munge_expr(ExprT, Vars2),
+ {{cons,Line,MungedExprH,MungedExprT}, Vars3};
+munge_expr({op,Line,Op,ExprL,ExprR}, Vars) ->
+ {MungedExprL, Vars2} = munge_expr(ExprL, Vars),
+ {MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
+ {{op,Line,Op,MungedExprL,MungedExprR}, Vars3};
+munge_expr({op,Line,Op,Expr}, Vars) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ {{op,Line,Op,MungedExpr}, Vars2};
+munge_expr({'catch',Line,Expr}, Vars) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ {{'catch',Line,MungedExpr}, Vars2};
+munge_expr({call,Line1,{remote,Line2,ExprM,ExprF},Exprs},
+ Vars) when Vars#vars.is_guard==false->
+ {MungedExprM, Vars2} = munge_expr(ExprM, Vars),
+ {MungedExprF, Vars3} = munge_expr(ExprF, Vars2),
+ {MungedExprs, Vars4} = munge_exprs(Exprs, Vars3, []),
+ {{call,Line1,{remote,Line2,MungedExprM,MungedExprF},MungedExprs}, Vars4};
+munge_expr({call,Line1,{remote,_Line2,_ExprM,ExprF},Exprs},
+ Vars) when Vars#vars.is_guard==true ->
+ %% Difference in abstract format after preprocessing: BIF calls in guards
+ %% are translated to {remote,...} (which is not allowed as source form)
+ %% NOT NECESSARY FOR Vsn=raw_abstract_v1
+ munge_expr({call,Line1,ExprF,Exprs}, Vars);
+munge_expr({call,Line,Expr,Exprs}, Vars) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ {MungedExprs, Vars3} = munge_exprs(Exprs, Vars2, []),
+ {{call,Line,MungedExpr,MungedExprs}, Vars3};
+munge_expr({lc,Line,Expr,LC}, Vars) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ {MungedLC, Vars3} = munge_lc(LC, Vars2, []),
+ {{lc,Line,MungedExpr,MungedLC}, Vars3};
+munge_expr({block,Line,Body}, Vars) ->
+ {MungedBody, Vars2} = munge_body(Body, Vars, []),
+ {{block,Line,MungedBody}, Vars2};
+munge_expr({'if',Line,Clauses}, Vars) ->
+ {MungedClauses,Vars2} = munge_clauses(Clauses, Vars, []),
+ {{'if',Line,MungedClauses}, Vars2};
+munge_expr({'case',Line,Expr,Clauses}, Vars) ->
+ {MungedExpr,Vars2} = munge_expr(Expr,Vars),
+ {MungedClauses,Vars3} = munge_clauses(Clauses, Vars2, []),
+ {{'case',Line,MungedExpr,MungedClauses}, Vars3};
+munge_expr({'receive',Line,Clauses}, Vars) ->
+ {MungedClauses,Vars2} = munge_clauses(Clauses, Vars, []),
+ {{'receive',Line,MungedClauses}, Vars2};
+munge_expr({'receive',Line,Clauses,Expr,Body}, Vars) ->
+ {MungedClauses,Vars2} = munge_clauses(Clauses, Vars, []),
+ {MungedExpr, Vars3} = munge_expr(Expr, Vars2),
+ {MungedBody, Vars4} = munge_body(Body, Vars3, []),
+ {{'receive',Line,MungedClauses,MungedExpr,MungedBody}, Vars4};
+munge_expr({'try',Line,Exprs,Clauses,CatchClauses,After}, Vars) ->
+ {MungedExprs, Vars1} = munge_exprs(Exprs, Vars, []),
+ {MungedClauses, Vars2} = munge_clauses(Clauses, Vars1, []),
+ {MungedCatchClauses, Vars3} = munge_clauses(CatchClauses, Vars2, []),
+ {MungedAfter, Vars4} = munge_body(After, Vars3, []),
+ {{'try',Line,MungedExprs,MungedClauses,MungedCatchClauses,MungedAfter},
+ Vars4};
+%% Difference in abstract format after preprocessing: Funs get an extra
+%% element Extra.
+%% NOT NECESSARY FOR Vsn=raw_abstract_v1
+munge_expr({'fun',Line,{function,Name,Arity},_Extra}, Vars) ->
+ {{'fun',Line,{function,Name,Arity}}, Vars};
+munge_expr({'fun',Line,{clauses,Clauses},_Extra}, Vars) ->
+ {MungedClauses,Vars2}=munge_clauses(Clauses, Vars, []),
+ {{'fun',Line,{clauses,MungedClauses}}, Vars2};
+munge_expr({'fun',Line,{clauses,Clauses}}, Vars) ->
+ %% Only for Vsn=raw_abstract_v1
+ {MungedClauses,Vars2}=munge_clauses(Clauses, Vars, []),
+ {{'fun',Line,{clauses,MungedClauses}}, Vars2};
+munge_expr(Form, Vars) -> % var|char|integer|float|string|atom|nil|bin|eof
+ {Form, Vars}.
+
+munge_exprs([Expr|Exprs], Vars, MungedExprs) when Vars#vars.is_guard==true,
+ is_list(Expr) ->
+ {MungedExpr, _Vars} = munge_exprs(Expr, Vars, []),
+ munge_exprs(Exprs, Vars, [MungedExpr|MungedExprs]);
+munge_exprs([Expr|Exprs], Vars, MungedExprs) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ munge_exprs(Exprs, Vars2, [MungedExpr|MungedExprs]);
+munge_exprs([], Vars, MungedExprs) ->
+ {lists:reverse(MungedExprs), Vars}.
+
+munge_lc([{generate,Line,Pattern,Expr}|LC], Vars, MungedLC) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ munge_lc(LC, Vars2, [{generate,Line,Pattern,MungedExpr}|MungedLC]);
+munge_lc([Expr|LC], Vars, MungedLC) ->
+ {MungedExpr, Vars2} = munge_expr(Expr, Vars),
+ munge_lc(LC, Vars2, [MungedExpr|MungedLC]);
+munge_lc([], Vars, MungedLC) ->
+ {lists:reverse(MungedLC), Vars}.
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/test_server/src/test_server_node.erl b/lib/test_server/src/test_server_node.erl
new file mode 100644
index 0000000000..ddc89d50d4
--- /dev/null
+++ b/lib/test_server/src/test_server_node.erl
@@ -0,0 +1,1013 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(test_server_node).
+-compile(r11).
+
+%%%
+%%% The same compiled code for this module must be possible to load
+%%% in R11B, R12B and later. To make that possible no bit syntax
+%%% must be used.
+%%%
+
+
+%% Test Controller interface
+-export([is_release_available/1]).
+-export([start_remote_main_target/1,stop/1]).
+-export([start_tracer_node/2,trace_nodes/2,stop_tracer_node/1]).
+-export([start_node/5, stop_node/2]).
+-export([kill_nodes/1, nodedown/2]).
+%% Internal export
+-export([node_started/1,trc/1,handle_debug/4]).
+
+-include("test_server_internal.hrl").
+-record(slave_info, {name,socket,client}).
+-define(VXWORKS_ACCEPT_TIMEOUT,?ACCEPT_TIMEOUT).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% %%%
+%%% All code in this module executes on the test_server_ctrl process %%%
+%%% except for node_started/1 and trc/1 which execute on a new node. %%%
+%%% %%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+is_release_available(Rel) when is_atom(Rel) ->
+ is_release_available(atom_to_list(Rel));
+is_release_available(Rel) ->
+ case os:type() of
+ {unix,_} ->
+ Erl = find_release(Rel),
+ case Erl of
+ none -> false;
+ _ -> filelib:is_regular(Erl)
+ end;
+ _ ->
+ false
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Start main target node on remote host
+%%% The target node must not know the controller node via erlang distribution.
+start_remote_main_target(Parameters) ->
+ #par{type=TargetType,
+ target=TargetHost,
+ naming=Naming,
+ master=MasterNode,
+ cookie=MasterCookie,
+ slave_targets=SlaveTargets} = Parameters,
+
+ lists:foreach(fun(T) -> maybe_reboot_target({TargetType,T}) end,
+ [list_to_atom(TargetHost)|SlaveTargets]),
+
+ % Must give the targets a chance to reboot...
+ case TargetType of
+ vxworks ->
+ receive after 15000 -> ok end;
+ _ ->
+ ok
+ end,
+
+ Cmd0 = get_main_target_start_command(TargetType,TargetHost,Naming,
+ MasterNode,MasterCookie),
+ Cmd =
+ case os:getenv("TEST_SERVER_FRAMEWORK") of
+ false -> Cmd0;
+ FW -> Cmd0 ++ " -env TEST_SERVER_FRAMEWORK " ++ FW
+ end,
+
+ {ok,LSock} = gen_tcp:listen(?MAIN_PORT,[binary,{reuseaddr,true},{packet,2}]),
+ case start_target(TargetType,TargetHost,Cmd) of
+ {ok,TargetClient,AcceptTimeout} ->
+ case gen_tcp:accept(LSock,AcceptTimeout) of
+ {ok,Sock} ->
+ gen_tcp:close(LSock),
+ receive
+ {tcp,Sock,Bin} when is_binary(Bin) ->
+ case unpack(Bin) of
+ error ->
+ gen_tcp:close(Sock),
+ close_target_client(TargetClient),
+ {error,bad_message};
+ {ok,{target_info,TI}} ->
+ put(test_server_free_targets,SlaveTargets),
+ {ok, TI#target_info{where=Sock,
+ host=TargetHost,
+ naming=Naming,
+ master=MasterNode,
+ target_client=TargetClient,
+ slave_targets=SlaveTargets}}
+ end;
+ {tcp_closed,Sock} ->
+ gen_tcp:close(Sock),
+ close_target_client(TargetClient),
+ {error,could_not_contact_target}
+ after AcceptTimeout ->
+ gen_tcp:close(Sock),
+ close_target_client(TargetClient),
+ {error,timeout}
+ end;
+ Error ->
+ %%! maybe something like kill_target(...)???
+ gen_tcp:close(LSock),
+ close_target_client(TargetClient),
+ {error,{could_not_contact_target,Error}}
+ end;
+ Error ->
+ gen_tcp:close(LSock),
+ {error,{could_not_start_target,Error}}
+ end.
+
+stop(TI) ->
+ kill_nodes(TI),
+ case TI#target_info.where of
+ local -> % there is no remote target to stop
+ ok;
+ Sock -> % stop remote target
+ gen_tcp:close(Sock),
+ close_target_client(TI#target_info.target_client)
+ end.
+
+nodedown(Sock, TI) ->
+ Match = #slave_info{name='$1',socket=Sock,client='$2',_='_'},
+ case ets:match(slave_tab,Match) of
+ [[Node,Client]] -> % Slave node died
+ gen_tcp:close(Sock),
+ ets:delete(slave_tab,Node),
+ close_target_client(Client),
+ HostAtom = test_server_sup:hostatom(Node),
+ case lists:member(HostAtom,TI#target_info.slave_targets) of
+ true ->
+ put(test_server_free_targets,
+ get(test_server_free_targets) ++ [HostAtom]);
+ false -> ok
+ end,
+ slave_died;
+ [] ->
+ case TI#target_info.where of
+ Sock ->
+ %% test_server_ctrl will do the cleanup
+ target_died;
+ _ ->
+ ignore
+ end
+ end.
+
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Start trace node
+%%%
+start_tracer_node(TraceFile,TI) ->
+ Match = #slave_info{name='$1',_='_'},
+ SlaveNodes = lists:map(fun([N]) -> [" ",N] end,
+ ets:match(slave_tab,Match)),
+ TargetNode = case TI#target_info.where of
+ local -> node();
+ _ -> "test_server@" ++ TI#target_info.host
+ end,
+ Cookie = TI#target_info.cookie,
+ {ok,LSock} = gen_tcp:listen(0,[binary,{reuseaddr,true},{packet,2}]),
+ {ok,TracePort} = inet:port(LSock),
+ Prog = pick_erl_program(default),
+ Cmd = lists:concat([Prog, " -sname tracer -hidden -setcookie ", Cookie,
+ " -s ", ?MODULE, " trc ", TraceFile, " ",
+ TracePort, " ", TI#target_info.os_family]),
+ spawn(fun() -> print_data(open_port({spawn,Cmd},[stream])) end),
+%! open_port({spawn,Cmd},[stream]),
+ case gen_tcp:accept(LSock,?ACCEPT_TIMEOUT) of
+ {ok,Sock} ->
+ gen_tcp:close(LSock),
+ receive
+ {tcp,Sock,Result} when is_binary(Result) ->
+ case unpack(Result) of
+ error ->
+ gen_tcp:close(Sock),
+ {error,timeout};
+ {ok,started} ->
+ trace_nodes(Sock,[TargetNode | SlaveNodes]),
+ {ok,Sock};
+ {ok,Error} -> Error
+ end;
+ {tcp_closed,Sock} ->
+ gen_tcp:close(Sock),
+ {error,could_not_start_tracernode}
+ after ?ACCEPT_TIMEOUT ->
+ gen_tcp:close(Sock),
+ {error,timeout}
+ end;
+ Error ->
+ gen_tcp:close(LSock),
+ {error,{could_not_start_tracernode,Error}}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Start a tracer on each of these nodes and set flags and patterns
+%%%
+trace_nodes(Sock,Nodes) ->
+ Bin = term_to_binary({add_nodes,Nodes}),
+ ok = gen_tcp:send(Sock, [1|Bin]),
+ receive_ack(Sock).
+
+
+receive_ack(Sock) ->
+ receive
+ {tcp,Sock,Bin} when is_binary(Bin) ->
+ case unpack(Bin) of
+ error -> receive_ack(Sock);
+ {ok,_} -> ok
+ end;
+ _ ->
+ receive_ack(Sock)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Stop trace node
+%%%
+stop_tracer_node(Sock) ->
+ Bin = term_to_binary(id(stop)),
+ ok = gen_tcp:send(Sock, [1|Bin]),
+ receive {tcp_closed,Sock} -> gen_tcp:close(Sock) end,
+ ok.
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% trc([TraceFile,Nodes]) -> ok
+%%
+%% Start tracing on the given nodes
+%%
+%% This function executes on the new node
+%%
+trc([TraceFile, PortAtom, Type]) ->
+ {Result,Patterns} =
+ case file:consult(TraceFile) of
+ {ok,TI} ->
+ Pat = parse_trace_info(lists:flatten(TI)),
+ {started,Pat};
+ Error ->
+ {Error,[]}
+ end,
+ Port = list_to_integer(atom_to_list(PortAtom)),
+ case catch gen_tcp:connect("localhost", Port, [binary,
+ {reuseaddr,true},
+ {packet,2}]) of
+ {ok,Sock} ->
+ BinResult = term_to_binary(Result),
+ ok = gen_tcp:send(Sock,[1|BinResult]),
+ trc_loop(Sock,Patterns,Type);
+ _else ->
+ ok
+ end,
+ erlang:halt().
+trc_loop(Sock,Patterns,Type) ->
+ receive
+ {tcp,Sock,Bin} ->
+ case unpack(Bin) of
+ error ->
+ ttb:stop(),
+ gen_tcp:close(Sock);
+ {ok,{add_nodes,Nodes}} ->
+ add_nodes(Nodes,Patterns,Type),
+ Bin = term_to_binary(id(ok)),
+ ok = gen_tcp:send(Sock, [1|Bin]),
+ trc_loop(Sock,Patterns,Type);
+ {ok,stop} ->
+ ttb:stop(),
+ gen_tcp:close(Sock)
+ end;
+ {tcp_closed,Sock} ->
+ ttb:stop(),
+ gen_tcp:close(Sock)
+ end.
+add_nodes(Nodes,Patterns,_Type) ->
+ ttb:tracer(Nodes,[{file,{local, test_server}},
+ {handler, {{?MODULE,handle_debug},initial}}]),
+ ttb:p(all,[call,timestamp]),
+ lists:foreach(fun({TP,M,F,A,Pat}) -> ttb:TP(M,F,A,Pat);
+ ({CTP,M,F,A}) -> ttb:CTP(M,F,A)
+ end,
+ Patterns).
+
+parse_trace_info([{TP,M,Pat}|Pats]) when TP=:=tp; TP=:=tpl ->
+ [{TP,M,'_','_',Pat}|parse_trace_info(Pats)];
+parse_trace_info([{TP,M,F,Pat}|Pats]) when TP=:=tp; TP=:=tpl ->
+ [{TP,M,F,'_',Pat}|parse_trace_info(Pats)];
+parse_trace_info([{TP,M,F,A,Pat}|Pats]) when TP=:=tp; TP=:=tpl ->
+ [{TP,M,F,A,Pat}|parse_trace_info(Pats)];
+parse_trace_info([CTP|Pats]) when CTP=:=ctp; CTP=:=ctpl; CTP=:=ctpg ->
+ [{CTP,'_','_','_'}|parse_trace_info(Pats)];
+parse_trace_info([{CTP,M}|Pats]) when CTP=:=ctp; CTP=:=ctpl; CTP=:=ctpg ->
+ [{CTP,M,'_','_'}|parse_trace_info(Pats)];
+parse_trace_info([{CTP,M,F}|Pats]) when CTP=:=ctp; CTP=:=ctpl; CTP=:=ctpg ->
+ [{CTP,M,F,'_'}|parse_trace_info(Pats)];
+parse_trace_info([{CTP,M,F,A}|Pats]) when CTP=:=ctp; CTP=:=ctpl; CTP=:=ctpg ->
+ [{CTP,M,F,A}|parse_trace_info(Pats)];
+parse_trace_info([]) ->
+ [];
+parse_trace_info([_other|Pats]) -> % ignore
+ parse_trace_info(Pats).
+
+handle_debug(Out,Trace,TI,initial) ->
+ handle_debug(Out,Trace,TI,0);
+handle_debug(_Out,end_of_trace,_TI,N) ->
+ N;
+handle_debug(Out,Trace,_TI,N) ->
+ print_trc(Out,Trace,N),
+ N+1.
+
+print_trc(Out,{trace_ts,P,call,{M,F,A},C,Ts},N) ->
+ io:format(Out,
+ "~w: ~s~n"
+ "Process : ~w~n"
+ "Call : ~w:~w/~w~n"
+ "Arguments : ~p~n"
+ "Caller : ~w~n~n",
+ [N,ts(Ts),P,M,F,length(A),A,C]);
+print_trc(Out,{trace_ts,P,call,{M,F,A},Ts},N) ->
+ io:format(Out,
+ "~w: ~s~n"
+ "Process : ~w~n"
+ "Call : ~w:~w/~w~n"
+ "Arguments : ~p~n~n",
+ [N,ts(Ts),P,M,F,length(A),A]);
+print_trc(Out,{trace_ts,P,return_from,{M,F,A},R,Ts},N) ->
+ io:format(Out,
+ "~w: ~s~n"
+ "Process : ~w~n"
+ "Return from : ~w:~w/~w~n"
+ "Return value : ~p~n~n",
+ [N,ts(Ts),P,M,F,A,R]);
+print_trc(Out,{drop,X},N) ->
+ io:format(Out,
+ "~w: Tracer dropped ~w messages - too busy~n~n",
+ [N,X]);
+print_trc(Out,Trace,N) ->
+ Ts = element(size(Trace),Trace),
+ io:format(Out,
+ "~w: ~s~n"
+ "Trace : ~p~n~n",
+ [N,ts(Ts),Trace]).
+ts({_, _, Micro} = Now) ->
+ {{Y,M,D},{H,Min,S}} = calendar:now_to_local_time(Now),
+ io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w ~2.2.0w:~2.2.0w:~2.2.0w,~6.6.0w",
+ [Y,M,D,H,Min,S,Micro]).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Start slave/peer nodes (initiated by test_server:start_node/5)
+%%%
+start_node(SlaveName, slave, Options, From, TI) when is_list(SlaveName) ->
+ start_node_slave(list_to_atom(SlaveName), Options, From, TI);
+start_node(SlaveName, slave, Options, From, TI) ->
+ start_node_slave(SlaveName, Options, From, TI);
+start_node(SlaveName, peer, Options, From, TI) when is_atom(SlaveName) ->
+ start_node_peer(atom_to_list(SlaveName), Options, From, TI);
+start_node(SlaveName, peer, Options, From, TI) ->
+ start_node_peer(SlaveName, Options, From, TI);
+start_node(_SlaveName, _Type, _Options, _From, _TI) ->
+ not_implemented_yet.
+
+%%
+%% Peer nodes are always started on the same host as test_server_ctrl
+%% Socket communication is used in case target and controller is
+%% not the same node (target must not know the controller node
+%% via erlang distribution)
+%%
+start_node_peer(SlaveName, OptList, From, TI) ->
+ SuppliedArgs = start_node_get_option_value(args, OptList, []),
+ Cleanup = start_node_get_option_value(cleanup, OptList, true),
+ HostStr = test_server_sup:hoststr(),
+ {ok,LSock} = gen_tcp:listen(0,[binary,
+ {reuseaddr,true},
+ {packet,2}]),
+ {ok,WaitPort} = inet:port(LSock),
+ NodeStarted = lists:concat([" -s ", ?MODULE, " node_started ",
+ HostStr, " ", WaitPort]),
+
+ % Support for erl_crash_dump files..
+ CrashFile = filename:join([TI#target_info.test_server_dir,
+ "erl_crash_dump."++cast_to_list(SlaveName)]),
+ CrashArgs = lists:concat([" -env ERL_CRASH_DUMP ",CrashFile," "]),
+ FailOnError = start_node_get_option_value(fail_on_error, OptList, true),
+ Pa = TI#target_info.test_server_dir,
+ Prog0 = start_node_get_option_value(erl, OptList, default),
+ Prog = pick_erl_program(Prog0),
+ Args =
+ case string:str(SuppliedArgs,"-setcookie") of
+ 0 -> "-setcookie " ++ TI#target_info.cookie ++ " " ++ SuppliedArgs;
+ _ -> SuppliedArgs
+ end,
+ Cmd = lists:concat([Prog,
+ " -detached ",
+ TI#target_info.naming, " ", SlaveName,
+ " -pa ", Pa,
+ NodeStarted,
+ CrashArgs,
+ " ", Args]),
+ Opts = case start_node_get_option_value(env, OptList, []) of
+ [] -> [];
+ Env -> [{env, Env}]
+ end,
+ %% peer is always started on localhost
+ %%
+ %% Bad environment can cause open port to fail. If this happens,
+ %% we ignore it and let the testcase handle the situation...
+ catch open_port({spawn, Cmd}, [stream|Opts]),
+
+ case start_node_get_option_value(wait, OptList, true) of
+ true ->
+ Ret = wait_for_node_started(LSock,60000,undefined,Cleanup,TI,self()),
+ case {Ret,FailOnError} of
+ {{{ok, Node}, Warning},_} ->
+ gen_server:reply(From,{{ok,Node},HostStr,Cmd,[],Warning});
+ {_,false} ->
+ gen_server:reply(From,{Ret, HostStr, Cmd});
+ {_,true} ->
+ gen_server:reply(From,{fail,{Ret, HostStr, Cmd}})
+ end;
+ false ->
+ Nodename = list_to_atom(SlaveName ++ "@" ++ HostStr),
+ I = "=== Not waiting for node",
+ gen_server:reply(From,{{ok, Nodename}, HostStr, Cmd, I, []}),
+ Self = self(),
+ spawn_link(
+ fun() ->
+ wait_for_node_started(LSock,60000,undefined,
+ Cleanup,TI,Self),
+ receive after infinity -> ok end
+ end),
+ ok
+ end.
+
+%%
+%% Slave nodes are started on a remote host if
+%% - the option remote is given when calling test_server:start_node/3
+%% or
+%% - the target type is vxworks, since only one erlang node
+%% can be started on each vxworks host.
+%%
+start_node_slave(SlaveName, OptList, From, TI) ->
+ SuppliedArgs = start_node_get_option_value(args, OptList, []),
+ Cleanup = start_node_get_option_value(cleanup, OptList, true),
+
+ CrashFile = filename:join([TI#target_info.test_server_dir,
+ "erl_crash_dump."++cast_to_list(SlaveName)]),
+ CrashArgs = lists:concat([" -env ERL_CRASH_DUMP ",CrashFile," "]),
+ Pa = TI#target_info.test_server_dir,
+ Args = lists:concat([" -pa ", Pa, " ", SuppliedArgs, CrashArgs]),
+
+ Prog0 = start_node_get_option_value(erl, OptList, default),
+ Prog = pick_erl_program(Prog0),
+ Ret =
+ case start_which_node(OptList) of
+ {error,Reason} -> {{error,Reason},undefined,undefined};
+ Host0 -> do_start_node_slave(Host0,SlaveName,Args,Prog,Cleanup,TI)
+ end,
+ gen_server:reply(From,Ret).
+
+
+do_start_node_slave(Host0, SlaveName, Args, Prog, Cleanup, TI) ->
+ case TI#target_info.where of
+ local ->
+ Host =
+ case Host0 of
+ local -> test_server_sup:hoststr();
+ _ -> cast_to_list(Host0)
+ end,
+ Cmd = Prog ++ " " ++ Args,
+ %% Can use slave.erl here because I'm both controller and target
+ %% so I will ping the new node anyway
+ case slave:start(Host, SlaveName, Args, no_link, Prog) of
+ {ok,Nodename} ->
+ case Cleanup of
+ true -> ets:insert(slave_tab,#slave_info{name=Nodename});
+ false -> ok
+ end,
+ {{ok,Nodename}, Host, Cmd, [], []};
+ Ret ->
+ {Ret, Host, Cmd}
+ end;
+
+ _Sock ->
+ %% Cannot use slave.erl here because I'm only controller, and will
+ %% not ping the new node. Only target shall contact the new node!!
+ no_contact_start_slave(Host0,SlaveName,Args,Prog,Cleanup,TI)
+ end.
+
+
+
+no_contact_start_slave(Host, Name, Args0, Prog, Cleanup,TI) ->
+ Args1 = case string:str(Args0,"-setcookie") of
+ 0 -> "-setcookie " ++ TI#target_info.cookie ++ " " ++ Args0;
+ _ -> Args0
+ end,
+ Args = TI#target_info.naming ++ " " ++ cast_to_list(Name) ++ " " ++ Args1,
+ case Host of
+ local ->
+ case get(test_server_free_targets) of
+ [] ->
+ io:format("Starting slave ~p on HOST~n", [Name]),
+ TargetType = test_server_sup:get_os_family(),
+ Cmd0 = get_slave_node_start_command(TargetType,
+ Prog,
+ TI#target_info.master),
+ Cmd = Cmd0 ++ " " ++ Args,
+ do_no_contact_start_slave(TargetType,
+ test_server_sup:hoststr(),
+ Cmd, Cleanup,TI, false);
+ [H|T] ->
+ TargetType = TI#target_info.os_family,
+ Cmd0 = get_slave_node_start_command(TargetType,
+ Prog,
+ TI#target_info.master),
+ Cmd = Cmd0 ++ " " ++ Args,
+ case do_no_contact_start_slave(TargetType,H,Cmd,Cleanup,
+ TI,true) of
+ {error,remove} ->
+ io:format("Cannot start node on ~p, "
+ "removing from slave "
+ "target list.", [H]),
+ put(test_server_free_targets,T),
+ no_contact_start_slave(Host,Name,Args,Prog,
+ Cleanup,TI);
+ {error,keep} ->
+ %% H is added to the END OF THE LIST
+ %% in order to avoid the same target to
+ %% be selected each time
+ put(test_server_free_targets,T++[H]),
+ no_contact_start_slave(Host,Name,Args,Prog,
+ Cleanup,TI);
+ R ->
+ put(test_server_free_targets,T),
+ R
+ end
+ end;
+ _ ->
+ TargetType = TI#target_info.os_family,
+ Cmd0 = get_slave_node_start_command(TargetType,
+ Prog,
+ TI#target_info.master),
+ Cmd = Cmd0 ++ " " ++ Args,
+ do_no_contact_start_slave(TargetType, Host, Cmd, Cleanup, TI, false)
+ end.
+
+do_no_contact_start_slave(TargetType,Host0,Cmd0,Cleanup,TI,Retry) ->
+ %% Must use TargetType instead of TI#target_info.os_familiy here
+ %% because if there were no free_targets we will be starting the
+ %% slave node on host which might have a different os_familiy
+ Host = cast_to_list(Host0),
+ {ok,LSock} = gen_tcp:listen(0,[binary,
+ {reuseaddr,true},
+ {packet,2}]),
+ {ok,WaitPort} = inet:port(LSock),
+ Cmd = lists:concat([Cmd0, " -s ", ?MODULE, " node_started ",
+ test_server_sup:hoststr(), " ", WaitPort]),
+
+ case start_target(TargetType,Host,Cmd) of
+ {ok,Client,AcceptTimeout} ->
+ case wait_for_node_started(LSock,AcceptTimeout,
+ Client,Cleanup,TI,self()) of
+ {error,_}=WaitError ->
+ if Retry ->
+ case maybe_reboot_target(Client) of
+ {error,_} -> {error,remove};
+ ok -> {error,keep}
+ end;
+ true ->
+ {WaitError,Host,Cmd}
+ end;
+ {Ok,Warning} ->
+ {Ok,Host,Cmd,[],Warning}
+ end;
+ StartError ->
+ gen_tcp:close(LSock),
+ if Retry -> {error,remove};
+ true -> {{error,{could_not_start_target,StartError}},Host,Cmd}
+ end
+ end.
+
+
+wait_for_node_started(LSock,Timeout,Client,Cleanup,TI,CtrlPid) ->
+ case gen_tcp:accept(LSock,Timeout) of
+ {ok,Sock} ->
+ gen_tcp:close(LSock),
+ receive
+ {tcp,Sock,Started0} when is_binary(Started0) ->
+ case unpack(Started0) of
+ error ->
+ gen_tcp:close(Sock),
+ {error, connection_closed};
+ {ok,Started} ->
+ Version = TI#target_info.otp_release,
+ VsnStr = TI#target_info.system_version,
+ {ok,Nodename, W} =
+ handle_start_node_return(Version,
+ VsnStr,
+ Started),
+ case Cleanup of
+ true ->
+ ets:insert(slave_tab,#slave_info{name=Nodename,
+ socket=Sock,
+ client=Client});
+ false -> ok
+ end,
+ gen_tcp:controlling_process(Sock,CtrlPid),
+ test_server_ctrl:node_started(Nodename),
+ {{ok,Nodename},W}
+ end;
+ {tcp_closed,Sock} ->
+ gen_tcp:close(Sock),
+ {error, connection_closed}
+ after Timeout ->
+ gen_tcp:close(Sock),
+ {error, timeout}
+ end;
+ {error,Reason} ->
+ gen_tcp:close(LSock),
+ {error, {no_connection,Reason}}
+ end.
+
+
+
+handle_start_node_return(Version,VsnStr,{started, Node, Version, VsnStr}) ->
+ {ok, Node, []};
+handle_start_node_return(Version,VsnStr,{started, Node, OVersion, OVsnStr}) ->
+ Str = io_lib:format("WARNING: Started node "
+ "reports different system "
+ "version than current node! "
+ "Current node version: ~p, ~p "
+ "Started node version: ~p, ~p",
+ [Version, VsnStr,
+ OVersion, OVsnStr]),
+ Str1 = lists:flatten(Str),
+ {ok, Node, Str1}.
+
+
+%%
+%% This function executes on the new node
+%%
+node_started([Host,PortAtom]) ->
+ %% Must spawn a new process because the boot process should not
+ %% hang forever!!
+ spawn(fun() -> node_started(Host,PortAtom) end).
+
+%% This process hangs forever, just waiting for the socket to be
+%% closed and terminating the node
+node_started(Host,PortAtom) ->
+ {_, Version} = init:script_id(),
+ VsnStr = erlang:system_info(system_version),
+ Port = list_to_integer(atom_to_list(PortAtom)),
+ case catch gen_tcp:connect(Host,Port, [binary,
+ {reuseaddr,true},
+ {packet,2}]) of
+
+ {ok,Sock} ->
+ Started = term_to_binary({started, node(), Version, VsnStr}),
+ ok = gen_tcp:send(Sock, [1|Started]),
+ receive _Anyting ->
+ gen_tcp:close(Sock),
+ erlang:halt()
+ end;
+ _else ->
+ erlang:halt()
+ end.
+
+
+
+
+
+% start_which_node(Optlist) -> hostname
+start_which_node(Optlist) ->
+ case start_node_get_option_value(remote, Optlist) of
+ undefined ->
+ local;
+ true ->
+ case find_remote_host() of
+ {error, Other} ->
+ {error, Other};
+ RHost ->
+ RHost
+ end
+ end.
+
+find_remote_host() ->
+ HostList=test_server_ctrl:get_hosts(),
+ case lists:delete(test_server_sup:hoststr(), HostList) of
+ [] ->
+ {error, no_remote_hosts};
+ [RHost|_Rest] ->
+ RHost
+ end.
+
+start_node_get_option_value(Key, List) ->
+ start_node_get_option_value(Key, List, undefined).
+
+start_node_get_option_value(Key, List, Default) ->
+ case lists:keysearch(Key, 1, List) of
+ {value, {Key, Value}} ->
+ Value;
+ false ->
+ Default
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% stop_node(Name) -> ok | {error,Reason}
+%%
+%% Clean up - test_server will stop this node
+stop_node(Name, TI) ->
+ case ets:lookup(slave_tab,Name) of
+ [#slave_info{client=Client}] ->
+ ets:delete(slave_tab,Name),
+ HostAtom = test_server_sup:hostatom(Name),
+ case lists:member(HostAtom,TI#target_info.slave_targets) of
+ true ->
+ put(test_server_free_targets,
+ get(test_server_free_targets) ++ [HostAtom]);
+ false -> ok
+ end,
+ close_target_client(Client),
+ ok;
+ [] ->
+ {error, not_a_slavenode}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% kill_nodes(TI) -> ok
+%%
+%% Brutally kill all slavenodes that were not stopped by test_server
+kill_nodes(TI) ->
+ case ets:match_object(slave_tab,'_') of
+ [] -> [];
+ List ->
+ lists:map(fun(SI) -> kill_node(SI,TI) end, List)
+ end.
+
+kill_node(SI,TI) ->
+ Name = SI#slave_info.name,
+ ets:delete(slave_tab,Name),
+ HostAtom = test_server_sup:hostatom(Name),
+ case lists:member(HostAtom,TI#target_info.slave_targets) of
+ true ->
+ put(test_server_free_targets,
+ get(test_server_free_targets) ++ [HostAtom]);
+ false -> ok
+ end,
+ case SI#slave_info.socket of
+ undefined ->
+ catch rpc:call(Name,erlang,halt,[]);
+ Sock ->
+ gen_tcp:close(Sock)
+ end,
+ close_target_client(SI#slave_info.client),
+ Name.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Platform specific code
+
+start_target(vxworks,TargetHost,Cmd) ->
+ case vxworks_client:open(TargetHost) of
+ {ok,P} ->
+ case vxworks_client:send_data(P,Cmd,"start_erl called") of
+ {ok,_} ->
+ {ok,{vxworks,P},?VXWORKS_ACCEPT_TIMEOUT};
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end;
+
+start_target(unix,TargetHost,Cmd0) ->
+ Cmd =
+ case test_server_sup:hoststr() of
+ TargetHost -> Cmd0;
+ _ -> lists:concat(["rsh ",TargetHost, " ", Cmd0])
+ end,
+ open_port({spawn, Cmd}, [stream]),
+ {ok,undefined,?ACCEPT_TIMEOUT}.
+
+maybe_reboot_target({vxworks,P}) when is_pid(P) ->
+ %% Reboot the vxworks card.
+ %% Client is also closed after this, even if reboot fails
+ vxworks_client:send_data_wait_for_close(P,"q");
+maybe_reboot_target({vxworks,T}) when is_atom(T) ->
+ %% Reboot the vxworks card.
+ %% Client is also closed after this, even if reboot fails
+ vxworks_client:reboot(T);
+maybe_reboot_target(_) ->
+ {error, cannot_reboot_target}.
+
+close_target_client({vxworks,P}) ->
+ vxworks_client:close(P);
+close_target_client(undefined) ->
+ ok.
+
+
+
+%%
+%% Command for starting main target
+%%
+get_main_target_start_command(vxworks,_TargetHost,Naming,
+ _MasterNode,_MasterCookie) ->
+ "e" ++ Naming ++ " test_server -boot start_sasl"
+ " -sasl errlog_type error"
+ " -s test_server start " ++ test_server_sup:hoststr();
+get_main_target_start_command(unix,_TargetHost,Naming,
+ _MasterNode,_MasterCookie) ->
+ Prog = pick_erl_program(default),
+ Prog ++ " " ++ Naming ++ " test_server" ++
+ " -boot start_sasl -sasl errlog_type error"
+ " -s test_server start " ++ test_server_sup:hoststr().
+
+%%
+%% Command for starting slave nodes
+%%
+get_slave_node_start_command(vxworks, _Prog, _MasterNode) ->
+ "e";
+ %"e-noinput -master " ++ MasterNode;
+get_slave_node_start_command(unix, Prog, MasterNode) ->
+ cast_to_list(Prog) ++ " -detached -master " ++ MasterNode.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% cast_to_list(X) -> string()
+%%% X = list() | atom() | void()
+%%% Returns a string representation of whatever was input
+
+cast_to_list(X) when is_list(X) -> X;
+cast_to_list(X) when is_atom(X) -> atom_to_list(X);
+cast_to_list(X) -> lists:flatten(io_lib:format("~w", [X])).
+
+
+%%% L contains elements of the forms
+%%% {prog, String}
+%%% {release, Rel} where Rel = String | latest | previous
+%%% this
+%%%
+pick_erl_program(default) ->
+ cast_to_list(lib:progname());
+pick_erl_program(L) ->
+ P = random_element(L),
+ case P of
+ {prog, S} ->
+ S;
+ {release, S} ->
+ find_release(S);
+ this ->
+ lib:progname()
+ end.
+
+random_element(L) ->
+ {A,B,C} = now(),
+ E = lists:sum([A,B,C]) rem length(L),
+ lists:nth(E+1, L).
+
+find_release(latest) ->
+ "/usr/local/otp/releases/latest/bin/erl";
+find_release(previous) ->
+ "kaka";
+find_release(Rel) ->
+ find_release(os:type(), Rel).
+
+find_release({unix,sunos}, Rel) ->
+ case os:cmd("uname -p") of
+ "sparc" ++ _ ->
+ "/usr/local/otp/releases/otp_beam_solaris8_" ++ Rel ++ "/bin/erl";
+ _ ->
+ none
+ end;
+find_release({unix,linux}, Rel) ->
+ Candidates = find_rel_linux(Rel),
+ case lists:dropwhile(fun(N) ->
+ not filelib:is_regular(N)
+ end, Candidates) of
+ [] -> none;
+ [Erl|_] -> Erl
+ end;
+find_release(_, _) -> none.
+
+find_rel_linux(Rel) ->
+ case suse_release() of
+ none -> [];
+ SuseRel -> find_rel_suse(Rel, SuseRel)
+ end.
+
+find_rel_suse(Rel, SuseRel) ->
+ Root = "/usr/local/otp/releases/otp_beam_linux_sles",
+ case SuseRel of
+ "11" ->
+ %% Try both SuSE 11, SuSE 10 and SuSe 9 in that order.
+ find_rel_suse_1(Rel, Root++"11") ++
+ find_rel_suse_1(Rel, Root++"10") ++
+ find_rel_suse_1(Rel, Root++"9");
+ "10" ->
+ %% Try both SuSE 10 and SuSe 9 in that order.
+ find_rel_suse_1(Rel, Root++"10") ++
+ find_rel_suse_1(Rel, Root++"9");
+ "9" ->
+ find_rel_suse_1(Rel, Root++"9");
+ _ ->
+ []
+ end.
+
+find_rel_suse_1(Rel, RootWc) ->
+ case erlang:system_info(wordsize) of
+ 4 ->
+ find_rel_suse_2(Rel, RootWc++"_i386");
+ 8 ->
+ find_rel_suse_2(Rel, RootWc++"_x64") ++
+ find_rel_suse_2(Rel, RootWc++"_i386")
+ end.
+
+find_rel_suse_2(Rel, RootWc) ->
+ Wc = RootWc ++ "_" ++ Rel,
+ case filelib:wildcard(Wc) of
+ [] ->
+ [];
+ [R|_] ->
+ [filename:join([R,"bin","erl"])]
+ end.
+
+%% suse_release() -> VersionString | none.
+%% Return the major SuSE version number for this platform or
+%% 'none' if this is not a SuSE platform.
+suse_release() ->
+ case file:open("/etc/SuSE-release", [read]) of
+ {ok,Fd} ->
+ try
+ suse_release(Fd)
+ after
+ file:close(Fd)
+ end;
+ {error,_} -> none
+ end.
+
+suse_release(Fd) ->
+ case io:get_line(Fd, '') of
+ eof -> none;
+ Line when is_list(Line) ->
+ case re:run(Line, "^VERSION\\s*=\\s*(\\d+)\s*",
+ [{capture,all_but_first,list}]) of
+ nomatch ->
+ suse_release(Fd);
+ {match,[Version]} ->
+ Version
+ end
+ end.
+
+unpack(Bin) ->
+ {One,Term} = split_binary(Bin, 1),
+ case binary_to_list(One) of
+ [1] ->
+ case catch {ok,binary_to_term(Term)} of
+ {'EXIT',_} -> error;
+ {ok,_}=Res -> Res
+ end;
+ _ -> error
+ end.
+
+id(I) -> I.
+
+print_data(Port) ->
+ receive
+ {Port, {data, Bytes}} ->
+ io:put_chars(Bytes),
+ print_data(Port);
+ {Port, eof} ->
+ Port ! {self(), close},
+ receive
+ {Port, closed} ->
+ true
+ end,
+ receive
+ {'EXIT', Port, _} ->
+ ok
+ after 1 -> % force context switch
+ ok
+ end
+ end.
diff --git a/lib/test_server/src/test_server_sup.erl b/lib/test_server/src/test_server_sup.erl
new file mode 100644
index 0000000000..c665f185fd
--- /dev/null
+++ b/lib/test_server/src/test_server_sup.erl
@@ -0,0 +1,616 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% Purpose: Test server support functions.
+%%%-------------------------------------------------------------------
+-module(test_server_sup).
+-export([timetrap/2, timetrap_cancel/1, capture_get/1, messages_get/1,
+ timecall/3, call_crash/5, app_test/2, check_new_crash_dumps/0,
+ cleanup_crash_dumps/0, crash_dump_dir/0, tar_crash_dumps/0,
+ get_username/0, get_os_family/0,
+ hostatom/0, hostatom/1, hoststr/0, hoststr/1,
+ framework_call/2,framework_call/3,
+ format_loc/1, package_str/1, package_atom/1,
+ call_trace/1]).
+-include("test_server_internal.hrl").
+-define(crash_dump_tar,"crash_dumps.tar.gz").
+-define(src_listing_ext, ".src.html").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timetrap(Timeout,Pid) -> Handle
+%% Handle = term()
+%%
+%% Creates a time trap, that will kill the given process if the
+%% trap is not cancelled with timetrap_cancel/1, within Timeout
+%% milliseconds.
+
+timetrap(Timeout0, Pid) ->
+ process_flag(priority, max),
+ Timeout = test_server:timetrap_scale_factor() * Timeout0,
+ receive
+ after trunc(Timeout) ->
+ Line = test_server:get_loc(Pid),
+ Mon = erlang:monitor(process, Pid),
+ Trap =
+ case get(test_server_init_or_end_conf) of
+ undefined ->
+ {timetrap_timeout,trunc(Timeout),Line};
+ InitOrEnd ->
+ {timetrap_timeout,trunc(Timeout),Line,InitOrEnd}
+ end,
+ exit(Pid,Trap),
+ receive
+ {'DOWN', Mon, process, Pid, _} ->
+ ok
+ after 10000 ->
+ %% Pid is probably trapping exits, hit it harder...
+ catch error_logger:warning_msg("Testcase process ~p not "
+ "responding to timetrap "
+ "timeout:~n"
+ " ~p.~n"
+ "Killing testcase...~n",
+ [Pid, Trap]),
+ exit(Pid, kill)
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% timetrap_cancel(Handle) -> ok
+%% Handle = term()
+%%
+%% Cancels a time trap.
+
+timetrap_cancel(Handle) ->
+ unlink(Handle),
+ MonRef = erlang:monitor(process, Handle),
+ exit(Handle, kill),
+ receive {'DOWN',MonRef,_,_,_} -> ok after 2000 -> ok end.
+
+capture_get(Msgs) ->
+ receive
+ {captured,Msg} ->
+ capture_get([Msg|Msgs])
+ after 0 ->
+ lists:reverse(Msgs)
+ end.
+
+
+messages_get(Msgs) ->
+ receive
+ Msg ->
+ messages_get([Msg|Msgs])
+ after 0 ->
+ lists:reverse(Msgs)
+ end.
+
+
+timecall(M, F, A) ->
+ Befor = erlang:now(),
+ Val = apply(M, F, A),
+ After = erlang:now(),
+ Elapsed =
+ (element(1,After)*1000000+element(2,After)+element(3,After)/1000000)-
+ (element(1,Befor)*1000000+element(2,Befor)+element(3,Befor)/1000000),
+ {Elapsed, Val}.
+
+
+
+call_crash(Time,Crash,M,F,A) ->
+ OldTrapExit = process_flag(trap_exit,true),
+ Pid = spawn_link(M,F,A),
+ Answer =
+ receive
+ {'EXIT',Crash} ->
+ ok;
+ {'EXIT',Pid,Crash} ->
+ ok;
+ {'EXIT',_Reason} when Crash==any ->
+ ok;
+ {'EXIT',Pid,_Reason} when Crash==any ->
+ ok;
+ {'EXIT',Reason} ->
+ test_server:format(12, "Wrong crash reason. Wanted ~p, got ~p.",
+ [Crash, Reason]),
+ exit({wrong_crash_reason,Reason});
+ {'EXIT',Pid,Reason} ->
+ test_server:format(12, "Wrong crash reason. Wanted ~p, got ~p.",
+ [Crash, Reason]),
+ exit({wrong_crash_reason,Reason});
+ {'EXIT',OtherPid,Reason} when OldTrapExit == false ->
+ exit({'EXIT',OtherPid,Reason})
+ after do_trunc(Time) ->
+ exit(call_crash_timeout)
+ end,
+ process_flag(trap_exit,OldTrapExit),
+ Answer.
+
+do_trunc(infinity) -> infinity;
+do_trunc(T) -> trunc(T).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% app_test/2
+%%
+%% Checks one applications .app file for obvious errors.
+%% Checks..
+%% * .. required fields
+%% * .. that all modules specified actually exists
+%% * .. that all requires applications exists
+%% * .. that no module included in the application has export_all
+%% * .. that all modules in the ebin/ dir is included
+%% (This only produce a warning, as all modules does not
+%% have to be included (If the `pedantic' option isn't used))
+app_test(Application, Mode) ->
+ case is_app(Application) of
+ {ok, AppFile} ->
+ do_app_tests(AppFile, Application, Mode);
+ Error ->
+ test_server:fail(Error)
+ end.
+
+is_app(Application) ->
+ case file:consult(filename:join([code:lib_dir(Application),"ebin",
+ atom_to_list(Application)++".app"])) of
+ {ok, [{application, Application, AppFile}] } ->
+ {ok, AppFile};
+ _ ->
+ test_server:format(minor,
+ "Application (.app) file not found, "
+ "or it has very bad syntax.~n"),
+ {error, not_an_application}
+ end.
+
+
+do_app_tests(AppFile, AppName, Mode) ->
+ DictList=
+ [
+ {missing_fields, []},
+ {missing_mods, []},
+ {superfluous_mods_in_ebin, []},
+ {export_all_mods, []},
+ {missing_apps, []}
+ ],
+ fill_dictionary(DictList),
+
+ %% An appfile must (?) have some fields..
+ check_fields([description, modules, registered, applications], AppFile),
+
+ %% Check for missing and extra modules.
+ {value, {modules, Mods}}=lists:keysearch(modules, 1, AppFile),
+ EBinList=lists:sort(get_ebin_modnames(AppName)),
+ {Missing, Extra} = common(lists:sort(Mods), EBinList),
+ put(superfluous_mods_in_ebin, Extra),
+ put(missing_mods, Missing),
+
+ %% Check that no modules in the application has export_all.
+ app_check_export_all(Mods),
+
+ %% Check that all specified applications exists.
+ {value, {applications, Apps}}=
+ lists:keysearch(applications, 1, AppFile),
+ check_apps(Apps),
+
+ A=check_dict(missing_fields, "Inconsistent app file, "
+ "missing fields"),
+ B=check_dict(missing_mods, "Inconsistent app file, "
+ "missing modules"),
+ C=check_dict_tolerant(superfluous_mods_in_ebin, "Inconsistent app file, "
+ "Modules not included in app file.", Mode),
+ D=check_dict(export_all_mods, "Inconsistent app file, "
+ "Modules have `export_all'."),
+ E=check_dict(missing_apps, "Inconsistent app file, "
+ "missing applications."),
+
+ erase_dictionary(DictList),
+ case A+B+C+D+E of
+ 5 ->
+ ok;
+ _ ->
+ test_server:fail()
+ end.
+
+app_check_export_all([]) ->
+ ok;
+app_check_export_all([Mod|Mods]) ->
+ case catch apply(Mod, module_info, [compile]) of
+ {'EXIT', {undef,_}} ->
+ app_check_export_all(Mods);
+ COpts ->
+ case lists:keysearch(options, 1, COpts) of
+ false ->
+ app_check_export_all(Mods);
+ {value, {options, List}} ->
+ case lists:member(export_all, List) of
+ true ->
+ put(export_all_mods, [Mod|get(export_all_mods)]),
+ app_check_export_all(Mods);
+ false ->
+ app_check_export_all(Mods)
+ end
+ end
+ end.
+
+%% Given two sorted lists, L1 and L2, returns {NotInL2, NotInL1},
+%% NotInL2 is the elements of L1 which don't occurr in L2,
+%% NotInL1 is the elements of L2 which don't ocurr in L1.
+
+common(L1, L2) ->
+ common(L1, L2, [], []).
+
+common([X|Rest1], [X|Rest2], A1, A2) ->
+ common(Rest1, Rest2, A1, A2);
+common([X|Rest1], [Y|Rest2], A1, A2) when X < Y ->
+ common(Rest1, [Y|Rest2], [X|A1], A2);
+common([X|Rest1], [Y|Rest2], A1, A2) ->
+ common([X|Rest1], Rest2, A1, [Y|A2]);
+common([], L, A1, A2) ->
+ {A1, L++A2};
+common(L, [], A1, A2) ->
+ {L++A1, A2}.
+
+check_apps([]) ->
+ ok;
+check_apps([App|Apps]) ->
+ case is_app(App) of
+ {ok, _AppFile} ->
+ ok;
+ {error, _} ->
+ put(missing_apps, [App|get(missing_apps)])
+ end,
+ check_apps(Apps).
+
+check_fields([], _AppFile) ->
+ ok;
+check_fields([L|Ls], AppFile) ->
+ check_field(L, AppFile),
+ check_fields(Ls, AppFile).
+
+check_field(FieldName, AppFile) ->
+ case lists:keymember(FieldName, 1, AppFile) of
+ true ->
+ ok;
+ false ->
+ put(missing_fields, [FieldName|get(missing_fields)]),
+ ok
+ end.
+
+check_dict(Dict, Reason) ->
+ case get(Dict) of
+ [] ->
+ 1; % All ok.
+ List ->
+ io:format("** ~s (~s) ->~n~p~n",[Reason, Dict, List]),
+ 0
+ end.
+
+check_dict_tolerant(Dict, Reason, Mode) ->
+ case get(Dict) of
+ [] ->
+ 1; % All ok.
+ List ->
+ io:format("** ~s (~s) ->~n~p~n",[Reason, Dict, List]),
+ case Mode of
+ pedantic ->
+ 0;
+ _ ->
+ 1
+ end
+ end.
+
+get_ebin_modnames(AppName) ->
+ Wc=filename:join([code:lib_dir(AppName),"ebin",
+ "*"++code:objfile_extension()]),
+ TheFun=fun(X, Acc) ->
+ [list_to_atom(filename:rootname(
+ filename:basename(X)))|Acc] end,
+ _Files=lists:foldl(TheFun, [], filelib:wildcard(Wc)).
+
+%%
+%% This function removes any erl_crash_dump* files found in the
+%% test server directory. Done only once when the test server
+%% is started.
+%%
+cleanup_crash_dumps() ->
+ Dir = crash_dump_dir(),
+ Dumps = filelib:wildcard(filename:join(Dir, "erl_crash_dump*")),
+ delete_files(Dumps).
+
+crash_dump_dir() ->
+ filename:dirname(code:which(?MODULE)).
+
+tar_crash_dumps() ->
+ Dir = crash_dump_dir(),
+ case filelib:wildcard(filename:join(Dir, "erl_crash_dump*")) of
+ [] -> {error,no_crash_dumps};
+ Dumps ->
+ TarFileName = filename:join(Dir,?crash_dump_tar),
+ {ok,Tar} = erl_tar:open(TarFileName,[write,compressed]),
+ lists:foreach(
+ fun(File) ->
+ ok = erl_tar:add(Tar,File,filename:basename(File),[])
+ end,
+ Dumps),
+ ok = erl_tar:close(Tar),
+ delete_files(Dumps),
+ {ok,TarFileName}
+ end.
+
+
+check_new_crash_dumps() ->
+ Dir = crash_dump_dir(),
+ Dumps = filelib:wildcard(filename:join(Dir, "erl_crash_dump*")),
+ case length(Dumps) of
+ 0 ->
+ ok;
+ Num ->
+ test_server_ctrl:format(minor,
+ "Found ~p crash dumps:~n", [Num]),
+ append_files_to_logfile(Dumps),
+ delete_files(Dumps)
+ end.
+
+append_files_to_logfile([]) -> ok;
+append_files_to_logfile([File|Files]) ->
+ NodeName=from($., File),
+ test_server_ctrl:format(minor, "Crash dump from node ~p:~n",[NodeName]),
+ Fd=get(test_server_minor_fd),
+ case file:read_file(File) of
+ {ok, Bin} ->
+ case file:write(Fd, Bin) of
+ ok ->
+ ok;
+ {error,Error} ->
+ %% Write failed. The following io:format/3 will probably also
+ %% fail, but in that case it will throw an exception so that
+ %% we will be aware of the problem.
+ io:format(Fd, "Unable to write the crash dump "
+ "to this file: ~p~n", [file:format_error(Error)])
+ end;
+ _Error ->
+ io:format(Fd, "Failed to read: ~s\n", [File])
+ end,
+ append_files_to_logfile(Files).
+
+delete_files([]) -> ok;
+delete_files([File|Files]) ->
+ io:format("Deleting file: ~s~n", [File]),
+ case file:delete(File) of
+ {error, _} ->
+ case file:rename(File, File++".old") of
+ {error, Error} ->
+ io:format("Could neither delete nor rename file "
+ "~s: ~s.~n", [File, Error]);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end,
+ delete_files(Files).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% erase_dictionary(Vars) -> ok
+%% Vars = [atom(),...]
+%%
+%% Takes a list of dictionary keys, KeyVals, erases
+%% each key and returns ok.
+erase_dictionary([{Var, _Val}|Vars]) ->
+ erase(Var),
+ erase_dictionary(Vars);
+erase_dictionary([]) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% fill_dictionary(KeyVals) -> void()
+%% KeyVals = [{atom(),term()},...]
+%%
+%% Takes each Key-Value pair, and inserts it in the process dictionary.
+fill_dictionary([{Var,Val}|Vars]) ->
+ put(Var,Val),
+ fill_dictionary(Vars);
+fill_dictionary([]) ->
+ [].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% get_username() -> UserName
+%%
+%% Returns the current user
+get_username() ->
+ getenv_any(["USER","USERNAME"]).
+
+getenv_any([Key|Rest]) ->
+ case catch os:getenv(Key) of
+ String when is_list(String) -> String;
+ false -> getenv_any(Rest)
+ end;
+getenv_any([]) -> "".
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% get_os_family() -> OsFamily
+%%
+%% Returns the OS family
+get_os_family() ->
+ case os:type() of
+ {OsFamily,_OsName} -> OsFamily;
+ OsFamily -> OsFamily
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% hostatom()/hostatom(Node) -> Host; atom()
+%% hoststr() | hoststr(Node) -> Host; string()
+%%
+%% Returns the OS family
+hostatom() ->
+ hostatom(node()).
+hostatom(Node) ->
+ list_to_atom(hoststr(Node)).
+hoststr() ->
+ hoststr(node()).
+hoststr(Node) when is_atom(Node) ->
+ hoststr(atom_to_list(Node));
+hoststr(Node) when is_list(Node) ->
+ from($@, Node).
+
+from(H, [H | T]) -> T;
+from(H, [_ | T]) -> from(H, T);
+from(_H, []) -> [].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% framework_call(Callback,Func,Args,DefaultReturn) -> Return | DefaultReturn
+%%
+%% Calls the given Func in Callback
+framework_call(Func,Args) ->
+ framework_call(Func,Args,ok).
+framework_call(Func,Args,DefaultReturn) ->
+ CB = os:getenv("TEST_SERVER_FRAMEWORK"),
+ framework_call(CB,Func,Args,DefaultReturn).
+framework_call(false,_Func,_Args,DefaultReturn) ->
+ DefaultReturn;
+framework_call(Callback,Func,Args,DefaultReturn) ->
+ Mod = list_to_atom(Callback),
+ case code:is_loaded(Mod) of
+ false -> code:load_file(Mod);
+ _ -> ok
+ end,
+ case erlang:function_exported(Mod,Func,length(Args)) of
+ true ->
+ apply(Mod,Func,Args);
+ false ->
+ DefaultReturn
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% format_loc(Loc) -> string()
+%%
+%% Formats the printout of the line of code read from
+%% process dictionary (test_server_loc). Adds link to
+%% correct line in source code.
+format_loc([{Mod,Func,Line}]) ->
+ [format_loc1({Mod,Func,Line})];
+format_loc([{Mod,Func,Line}|Rest]) ->
+ ["[",format_loc1({Mod,Func,Line}),",\n"|format_loc1(Rest)];
+format_loc([{Mod,LineOrFunc}]) ->
+ format_loc({Mod,LineOrFunc});
+format_loc({Mod,Func}) when is_atom(Func) ->
+ io_lib:format("{~s,~w}",[package_str(Mod),Func]);
+format_loc({Mod,Line}) when is_integer(Line) ->
+ %% ?line macro is used
+ ModStr = package_str(Mod),
+ case lists:reverse(ModStr) of
+ [$E,$T,$I,$U,$S,$_|_] ->
+ io_lib:format("{~s,<a href=\"~s~s#~w\">~w</a>}",
+ [ModStr,downcase(ModStr),?src_listing_ext,
+ round_to_10(Line),Line]);
+ _ ->
+ io_lib:format("{~s,~w}",[ModStr,Line])
+ end;
+format_loc(Loc) ->
+ io_lib:format("~p",[Loc]).
+
+format_loc1([{Mod,Func,Line}]) ->
+ [" ",format_loc1({Mod,Func,Line}),"]"];
+format_loc1([{Mod,Func,Line}|Rest]) ->
+ [" ",format_loc1({Mod,Func,Line}),",\n"|format_loc1(Rest)];
+format_loc1({Mod,Func,Line}) ->
+ ModStr = package_str(Mod),
+ case lists:reverse(ModStr) of
+ [$E,$T,$I,$U,$S,$_|_] ->
+ io_lib:format("{~s,~w,<a href=\"~s~s#~w\">~w</a>}",
+ [ModStr,Func,downcase(ModStr),?src_listing_ext,
+ round_to_10(Line),Line]);
+ _ ->
+ io_lib:format("{~s,~w,~w}",[ModStr,Func,Line])
+ end.
+
+round_to_10(N) when (N rem 10) == 0 ->
+ N;
+round_to_10(N) ->
+ trunc(N/10)*10.
+
+downcase(S) -> downcase(S, []).
+downcase([Uc|Rest], Result) when $A =< Uc, Uc =< $Z ->
+ downcase(Rest, [Uc-$A+$a|Result]);
+downcase([C|Rest], Result) ->
+ downcase(Rest, [C|Result]);
+downcase([], Result) ->
+ lists:reverse(Result).
+
+package_str(Mod) when is_atom(Mod) ->
+ atom_to_list(Mod);
+package_str(Mod) when is_list(Mod), is_atom(hd(Mod)) ->
+ %% convert [s1,s2] -> "s1.s2"
+ [_|M] = lists:flatten(["."++atom_to_list(S) || S <- Mod]),
+ M;
+package_str(Mod) when is_list(Mod) ->
+ Mod.
+
+package_atom(Mod) when is_atom(Mod) ->
+ Mod;
+package_atom(Mod) when is_list(Mod), is_atom(hd(Mod)) ->
+ list_to_atom(package_str(Mod));
+package_atom(Mod) when is_list(Mod) ->
+ list_to_atom(Mod).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% call_trace(TraceSpecFile) -> ok
+%%
+%% Read terms on format {m,Mod} | {f,Mod,Func}
+%% from TraceSpecFile and enable call trace for
+%% specified functions.
+call_trace(TraceSpec) ->
+ case catch try_call_trace(TraceSpec) of
+ {'EXIT',Reason} ->
+ erlang:display(Reason),
+ exit(Reason);
+ Ok ->
+ Ok
+ end.
+
+try_call_trace(TraceSpec) ->
+ case file:consult(TraceSpec) of
+ {ok,Terms} ->
+ dbg:tracer(),
+ %% dbg:p(self(), [p, m, sos, call]),
+ dbg:p(self(), [sos, call]),
+ lists:foreach(fun({m,M}) ->
+ case dbg:tpl(M,[{'_',[],[{return_trace}]}]) of
+ {error,What} -> exit({error,{tracing_failed,What}});
+ _ -> ok
+ end;
+ ({f,M,F}) ->
+ case dbg:tpl(M,F,[{'_',[],[{return_trace}]}]) of
+ {error,What} -> exit({error,{tracing_failed,What}});
+ _ -> ok
+ end;
+ (Huh) ->
+ exit({error,{unrecognized_trace_term,Huh}})
+ end, Terms),
+ ok;
+ {_,Error} ->
+ exit({error,{tracing_failed,TraceSpec,Error}})
+ end.
+
diff --git a/lib/test_server/src/things/distr_startup_SUITE.erl b/lib/test_server/src/things/distr_startup_SUITE.erl
new file mode 100644
index 0000000000..0d4e467570
--- /dev/null
+++ b/lib/test_server/src/things/distr_startup_SUITE.erl
@@ -0,0 +1,238 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(distr_startup_SUITE).
+-compile([export_all]).
+%%-define(line_trace,1).
+-include("test_server.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(suite) -> [reads,writes].
+
+-define(iterations,10000).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+app1() ->
+ {application, app1,
+ [{description, "ERTS CXC 138 10"},
+ {vsn, "2.0"},
+ {applications, [kernel, stdlib]},
+ {mod, {ch_sup, {app1, 1, 3}}}]}.
+
+app3() ->
+ {application, app3,
+ [{description, "ERTS CXC 138 10"},
+ {vsn, "2.0"},
+ {applications, [kernel, stdlib]},
+ {mod, {ch_sup, {app3, 7, 9}}}]}.
+
+
+config(Fd,C1,C2,C3) ->
+ io:format(Fd,
+ "[{kernel, [{sync_nodes_optional, ['~s','~s','~s']},"
+ "{sync_nodes_timeout, 1},"
+ "{distributed, [{app1, ['~s', '~s', '~s']},"
+ "{app2, 10000, ['~s', '~s', '~s']},"
+ "{app3, 5000, [{'~s', '~s'}, '~s']}]}]}].~n",
+ [C1,C2,C3, C1,C2,C3, C1,C2,C3, C1,C2,C3]).
+
+from(H, [H | T]) -> T;
+from(H, [_ | T]) -> from(H, T);
+from(H, []) -> [].
+
+%%-----------------------------------------------------------------
+%% Test suite for distributed applications, tests start, load
+%% etc indirectly.
+%% Should be started in a CC view with:
+%% erl -sname master -rsh ctrsh
+%%-----------------------------------------------------------------
+start_nodes(Conf) ->
+ % Write a config file
+ ?line Nodes = ?config(nodes,Conf),
+ ?line [C1,C2,C3|_] = Nodes, %% Need at least 3 nodes
+ ?line Dir = ?config(priv_dir,Conf),
+ ?line {ok, Fd} = file:open(Dir ++ "sys.config", write),
+ ?line config(Fd,C1,C2,C3),
+ ?line file:close(Fd),
+ ?line Config = Dir ++ "sys",
+
+ % Test [cp1, cp2, cp3]
+ ?line {ok, Cp1} = start_node(lists:nth(1,Nodes), Config),
+ ?line {ok, Cp2} = start_node(lists:nth(2,Nodes), Config),
+ ?line {ok, Cp3} = start_node(lists:nth(3,Nodes), Config),
+ % Start app1 and make sure cp1 starts it
+ %%?line rpc:multicall([Cp1, Cp2, Cp3], application, load, [app1()]),
+ %%?line rpc:multicall([Cp1, Cp2, Cp3], application, start,[app1,permanent]),
+ ?line test_server:sleep(1000),
+ {Cp1,Cp2,Cp3}.
+
+stop_nodes({Cp1,Cp2,Cp3}) ->
+ ?line stop_node(Cp1),
+ ?line stop_node(Cp2),
+ ?line stop_node(Cp3).
+
+start_node(NodeAtHost, Config) ->
+ ?line NodeAtHostStr = atom_to_list(NodeAtHost),
+ ?line HostStr = from($@,NodeAtHostStr),
+ ?line NodeStr = lists:reverse(from($@,lists:reverse(NodeAtHostStr))),
+ ?line Host = list_to_atom(HostStr),
+ ?line Node = list_to_atom(NodeStr),
+ ?line io:format("Launching slave node ~p@~p ~p",[Node,Host,Config]),
+ ?line slave:start(Host, Node, lists:concat(["-config ", Config])).
+
+stop_node(Node) ->
+ ?line rpc:cast(Node, erlang, halt, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+start_client_process(Cp,Mode,NodeNum) ->
+ io:format("Starting client process at ~p in mode ~p",[Cp,Mode]),
+ ?line case rpc:call(Cp, erlang, spawn,
+ [?MODULE, client,
+ [Mode,NodeNum,self(),random:uniform(1000)]]) of
+ {badrpc,Reason} ->
+ ?line exit({badrpc,{Cp,Reason}});
+ Client ->
+ ?line Client
+ end.
+
+start_clients(Mode,Conf) ->
+ ?line random:seed(4711,0,0),
+ ?line {Cp1,Cp2,Cp3} = start_nodes(Conf),
+ ?line Client1 = start_client_process(Cp1,Mode,1),
+ ?line Client2 = start_client_process(Cp2,Mode,2),
+ ?line Client3 = start_client_process(Cp3,Mode,3),
+ test_server:format(1,"All 3 nodes started, "
+ "power off client(s) any time...",[]),
+ Client1 ! go,
+ Client2 ! go,
+ Client3 ! go,
+ {{Cp1,Cp2,Cp3},{Client1,Client2,Client3}}.
+
+stop_clients(Cps) ->
+ test_server:format(1,"Test completed.",[]),
+ ?line stop_nodes(Cps).
+
+data() ->
+ {{self(),foo,bar,[1,2,3,4,5,6,7],{{{{}}}},
+ "We need pretty long packages, so that there is a big risk "
+ "of cutting it in the middle when suddenly turning off "
+ "the power or breaking the connection. "
+ "We don't check the contents of the data very much, but "
+ "at least there is a magic cookie at the end (123456)."
+ "If that one arrives correctly, the link is ok as far "
+ "as we are concerned."},
+ 123456}.
+
+reads(suite) -> [];
+reads(Conf) ->
+ ?line {Cps,_} = start_clients(w,Conf),
+ ?line read_loop(?iterations,0),
+ ?line stop_clients(Cps),
+ ok.
+
+read_loop(0,M) ->
+ ok;
+read_loop(N,M) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(0.5)),
+ M2 =
+ receive
+ {Node,Count,{_,123456}} ->
+ ?line setelement(Node,M,element(Node,M)+1);
+ {Node,Count,Data} ->
+ ?line exit({network_transmission_error,Data});
+ {nodedown,Node} ->
+ ?line test_server:format(1,"Node ~s went down",[Node]),
+ ?line M;
+ Other ->
+ ?line M
+ after test_server:seconds(0.1) ->
+ ?line io:format("No message!"),
+ ?line M
+ end,
+ ?line test_server:timetrap_cancel(Dog),
+ ?line M3 =
+ case N rem 100 of
+ 0 -> io:format("~p reads to go (~w msgs)",[N,M2]),
+ {0,0,0};
+ _ -> M2
+ end,
+ ?line read_loop(N-1,M3).
+
+client(w,NodeNum,Pid,Seed) ->
+ random:seed(Seed,0,0),
+ receive
+ go -> ok
+ end,
+ client_write_loop(Pid,0,NodeNum,data());
+client(r,NodeNum,Pid,Seed) ->
+ random:seed(Seed,0,0),
+ receive
+ go -> ok
+ end,
+ client_read_loop(0).
+
+client_write_loop(Pid,N,NodeNum,Data) ->
+ test_server:sleep(random:uniform(20)),
+ Pid ! {NodeNum,N,Data},
+ client_write_loop(Pid,N+1,NodeNum,Data).
+
+writes(suite) -> [];
+writes(Conf) ->
+ ?line {Cps,{C1,C2,C3}} = start_clients(r,Conf),
+ ?line write_loop(2*?iterations,{C1,C2,C3},data()),
+ ?line stop_clients(Cps),
+ ok.
+
+write_loop(0,_,_) ->
+ ok;
+write_loop(N,Clients,Data) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(0.5)),
+ ?line Client = element(random:uniform(size(Clients)),Clients),
+ ?line Client ! {node(),N,Data},
+ ?line test_server:timetrap_cancel(Dog),
+ receive
+ {nodedown,Node} ->
+ ?line test_server:format(1,"Node ~s went down",[Node])
+ after 0 ->
+ ?line ok
+ end,
+ ?line case N rem 100 of
+ 0 -> io:format("~p writes to go",[N]);
+ _ -> ok
+ end,
+ ?line write_loop(N-1,Clients,Data).
+
+client_read_loop(N) ->
+ receive
+ {Node,Count,{_,123456}} ->
+ ?line ok;
+ {Node,Count,Data} ->
+ ?line io:format("~p(~p): transmission error from node ~p(~p): ~p",
+ [node(),N,Node,Count,Data]);
+ Other ->
+ ?line io:format("~p(~p): got a strange message: ~p",
+ [node(),N,Other])
+ end,
+ client_read_loop(N+1).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
diff --git a/lib/test_server/src/things/mnesia_power_SUITE.erl b/lib/test_server/src/things/mnesia_power_SUITE.erl
new file mode 100644
index 0000000000..281dac7742
--- /dev/null
+++ b/lib/test_server/src/things/mnesia_power_SUITE.erl
@@ -0,0 +1,125 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(mnesia_power_SUITE).
+-compile([export_all]).
+%%-define(line_trace,1).
+-include("test_server.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(suite) -> [run].
+
+-define(iterations,3). %% nof power-off cycles to do before acceptance
+-define(rows,8). %% nof database rows to use (not too big, please)
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-record(sum_table_1,{row,a,b,c,s}).
+
+run(suite) -> [];
+run(Config) ->
+ ?line mnesia:create_schema([node()]),
+ ?line mnesia:start(),
+ ?line mnesia:create_table([{name, sum_table_1}, {disc_copies,[node()]},
+ {attributes,record_info(fields,sum_table_1)}]),
+ ?line run_test(Config,?iterations).
+
+run(Config,N) ->
+ ?line mnesia:start(),
+ ?line check_consistency(sum_table_1),
+ case N of
+ 0 -> ?line ok;
+ N -> ?line run_test(Config,N)
+ end.
+
+run_test(Config,N) ->
+ ?line Pid1a = start_manipulator(sum_table_1),
+ ?line Pid1b = start_manipulator(sum_table_1),
+ ?line Pid1c = start_manipulator(sum_table_1),
+ ?line test_server:resume_point(?MODULE,run,[Config,N-1]),
+ ?line test_server:format(1,"Manipulating data like crazy now, "
+ "power off any time..."),
+ ?line test_server:sleep(infinity).
+
+start_manipulator(Table) ->
+ ?line spawn_link(?MODULE,manipulator_init,[Table]).
+
+manipulator_init(Table) ->
+ random:seed(4711,0,0),
+ manipulator(0,Table).
+
+manipulator(N,Table) ->
+ ?line Fun =
+ fun() ->
+ ?line Row = random:uniform(?rows),
+ ?line A = random:uniform(100000),
+ ?line B = random:uniform(100000),
+ ?line C = random:uniform(100000),
+ ?line Sum = A+B+C,
+ ?line case mnesia:write(#sum_table_1
+ {row=Row,a=A,b=B,c=C,s=Sum}) of
+ ok -> ok;
+ Other ->
+ ?line io:format("Trans failed: ~p\n",[Other])
+ end
+ end,
+ ?line mnesia:transaction(Fun),
+ case mnesia:table_info(sum_table_1,size) of
+ 0 -> exit(still_empty);
+ _ -> ok
+ end,
+ case N rem 2000 of
+ 0 -> io:format("~p did ~p operations",[self(),N]),
+ check_consistency(sum_table_1);
+ _ -> ok
+ end,
+ ?line manipulator(N+1,Table).
+
+check_consistency(Table) ->
+ io:format("Checking consistency of table ~p\n",[Table]),
+ All = mnesia:table_info(Table,wild_pattern),
+ ?line Fun =
+ fun() ->
+ mnesia:match_object(All)
+ end,
+ ?line case mnesia:transaction(Fun) of
+ {atomic,Val} ->
+ check_consistency_rows(Val,0);
+ Other ->
+ io:format("Trans failed: ~p\n",[Other]),
+ exit(failed),
+ check_consistency(Table)
+ end.
+
+check_consistency_rows([#sum_table_1{a=A,b=B,c=C,s=Sum}|Rows],N) ->
+ ?line Sum=A+B+C,
+ ?line check_consistency_rows(Rows,N+1);
+check_consistency_rows([],N) ->
+ io:format("All ~p rows were consistent\n",[N]),
+ {ok,N};
+check_consistency_rows(Thing,N) ->
+ io:format("Mnesia transaction returned:\n~p\n",[Thing]),
+ exit({bad_format,Thing}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+
+
+
diff --git a/lib/test_server/src/things/random_kill_SUITE.erl b/lib/test_server/src/things/random_kill_SUITE.erl
new file mode 100644
index 0000000000..4fcd63e3af
--- /dev/null
+++ b/lib/test_server/src/things/random_kill_SUITE.erl
@@ -0,0 +1,81 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(random_kill_SUITE).
+-compile([export_all]).
+%%-define(line_trace,1).
+-include("test_server.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(suite) -> [run].
+
+-define(iterations,25). %% Kill this many processes,
+ %% possibly with reboots in between
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+run(suite) -> [];
+run(Config) ->
+ registered(?iterations).
+
+registered(0) ->
+ ok;
+registered(N) ->
+ random:seed(3461*N,1159*N,351*N),
+ Pid = select_victim(registered),
+ test_server:resume_point(?MODULE,registered,[N-1]),
+ test_server:format("About to kill pid ~p (~p)\n~p",
+ [Pid,process_info(Pid,registered_name),info(Pid)]),
+ %%exit(Pid,kill),
+ registered(N-1).
+
+info(Pid) ->
+ Rest0 = tl(pid_to_list(Pid)),
+ {P1,Rest1} = get_until($.,Rest0),
+ {P2,Rest2} = get_until($.,Rest1),
+ {P3,_} = get_until($>,Rest2),
+ c:i(list_to_integer(P1),list_to_integer(P2),list_to_integer(P3)).
+
+get_until(Ch,L) ->
+ get_until(Ch,L,[]).
+get_until(Ch,[],Acc) ->
+ {lists:reverse(Acc),[]};
+get_until(Ch,[Ch|T],Acc) ->
+ {lists:reverse(Acc),T};
+get_until(Ch,[H|T],Acc) ->
+ get_until(Ch,T,[H|Acc]).
+
+select_victim(registered) ->
+ Pids =
+ lists:map(fun(Server)-> whereis(Server) end,registered()),
+ ImmunePids =
+ [self()|lists:map(fun(Job)-> element(2,Job) end,test_server:jobs())],
+ SuitablePids =
+ lists:filter(fun(Pid)-> case lists:member(Pid,ImmunePids) of
+ true -> false;
+ false -> true
+ end
+ end, Pids),
+ Selected = random:uniform(length(SuitablePids)),
+ io:format("Selected ~p if ~p",[Selected,length(SuitablePids)]),
+ lists:nth(Selected,SuitablePids).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
diff --git a/lib/test_server/src/things/soft.gs.txt b/lib/test_server/src/things/soft.gs.txt
new file mode 100644
index 0000000000..ec57884997
--- /dev/null
+++ b/lib/test_server/src/things/soft.gs.txt
@@ -0,0 +1,16 @@
+6> gs:start().
+RealTimeViolation, 478ms (after 1164 good)
+{1,<0.65.0>}
+RealTimeViolation, 352ms (after 0 good)
+RealTimeViolation, 492ms (after 0 good)
+RealTimeViolation, 166ms (after 0 good)
+RealTimeInfo, 18ms (after 7 good)
+RealTimeViolation, 115ms (after 13 good)
+7> application-specific initialization failed: couldn't connect to display ":0.0"
+RealTimeViolation, 20340ms (after 0 good)
+gs error: user backend died reason {port_handler,#Port,normal}
+
+RealTimeInfo, 31ms (after 21 good)
+RealTimeInfo, 21ms (after 69 good)
+RealTimeInfo, 21ms (after 119 good)
+RealTimeInfo, 21ms (after 169 good)
diff --git a/lib/test_server/src/things/verify.erl b/lib/test_server/src/things/verify.erl
new file mode 100644
index 0000000000..eac20c013e
--- /dev/null
+++ b/lib/test_server/src/things/verify.erl
@@ -0,0 +1,199 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(verify).
+
+-export([dir/0, dir/1]).
+
+%% usage verify:dir()
+%% or verify:dir(Dir)
+%%
+%% runs tests on all files with the extension ".t1"
+%% creates an error log file verify.log in the directory where the
+%% tests were run
+
+-import(lists, [reverse/1, foldl/3, map/2]).
+
+dir() ->
+ dir(".").
+
+dir(Dir) ->
+ case file:list_dir(Dir) of
+ {ok, Files} ->
+ VFiles = collect_vers(Files, []),
+ VFiles1 = map(fun(F) -> Dir ++ "/" ++ F end, VFiles),
+ Nerrs = foldl(fun(F, Sum) ->
+ case file(F) of
+ {file,_,had,N,errors} ->
+ Sum + N;
+ no_errors ->
+ Sum;
+ Other ->
+ Sum + 1
+ end
+ end, 0, VFiles1),
+ case Nerrs of
+ 0 -> no_errors;
+ _ -> {dir,Dir,had,Nerrs,errors}
+ end;
+ _ ->
+ {error, cannot,list_dir, Dir}
+ end.
+
+collect_vers([H|T], L) ->
+ case reverse(H) of
+ [$1,$t,$.|T1] -> collect_vers(T, [reverse(T1)|L]);
+ _ -> collect_vers(T, L)
+ end;
+collect_vers([], L) ->
+ L.
+
+file(File) ->
+ case file:open(File ++ ".t1", read) of
+ {ok, S} ->
+ io:format("Verifying: ~s\n", [File]),
+ ErrFile = File ++ ".errs",
+ {ok, E} = file:open(ErrFile, write),
+ Bind0 = erl_eval:new_bindings(),
+ NErrs = do(S, {E, File, Bind0, 0}, 1),
+ file:close(S),
+ file:close(E),
+ case NErrs of
+ 0 ->
+ file:delete(ErrFile),
+ no_errors;
+ _ ->
+ {file,File,had,NErrs,errors}
+ end;
+ _ ->
+ error_in_opening_file
+ end.
+
+do(S, Env, Line) ->
+ R = io:scan_erl_exprs(S, '', Line),
+ do1(R, S, Env).
+
+do1({eof,_}, _, {_,_,_,NErrs}) ->
+ NErrs;
+do1({ok,Toks,Next}, S, Env0) ->
+ E1 = handle_toks(Toks, Next, Env0),
+ do(S, E1, Next);
+do1({error, {Line,Mod,Args}, Next}, S, E) ->
+ io:format("*** ~w ~p~n", [Line,Mod:format_error(Args)]),
+ E1 = add_error(E),
+ do(S, E1, Next).
+
+add_error({Stream, File, Bindings, N}) -> {Stream, File, Bindings, N+1}.
+
+handle_toks(Toks, Line, Env0) ->
+ %% io:format("Toks:~p\n", [Toks]).
+ case erl_parse:parse_exprs(Toks) of
+ {ok, Exprs} ->
+ %% io:format("Got:~p\n", [Exprs]),
+ eval(Exprs, Line, Env0);
+ {error, {LineNo, Mod, What}} ->
+ Str = apply(Mod, format_error, [What]),
+ io:format("*** Line:~w ***~s\n", [LineNo, Str]),
+ add_error(Env0);
+ Parse_error ->
+ io:format("Parse Error:~p\n",[Parse_error]),
+ add_error(Env0)
+ end.
+
+forget([{var,_,Name}], B0) -> erl_eval:del_binding(Name, B0);
+forget([], _) -> erl_eval:new_bindings().
+
+eval([{call,_,{atom,_,f}, Args}], _, {Stream, Bind0, Errs}) ->
+ Bind1 = forget(Args, Bind0),
+ {Stream, Bind1, Errs};
+eval(Exprs, Line, {Stream, File, Bind0, NErrs}) ->
+ %% io:format("Bindings >> ~p\n", [Bind0]),
+ %% io:format("Exprs >> ~p\n", [Exprs]),
+ case catch erl_eval:exprs(Exprs, Bind0) of
+ {'EXIT', Reason} ->
+ out_both(Stream, "----------------------------------~n", []),
+ out_both(Stream, "File:~s Error in:~s~n", [File, pp(Exprs)]),
+ print_bindings(Stream, Exprs, Bind0),
+ print_lhs(Stream, Exprs),
+ out_both(Stream, '*** Rhs evaluated to:~p~n',[rhs(Exprs, Bind0)]),
+ {Stream, File, Bind0, NErrs+1};
+ {value, _, Bind1} ->
+ {Stream, File, Bind1, NErrs}
+ end.
+
+pp([H]) -> erl_pp:expr(H);
+pp([H|T]) -> [erl_pp:expr(H),$,|pp(T)];
+pp([]) -> [].
+
+print_bindings(E, Form, Bindings) ->
+ case varsin(Form) of
+ [] ->
+ true;
+ Vars ->
+ print_vars(E, Vars, Bindings)
+ end.
+
+print_vars(E, [Var|T], Bindings) ->
+ case erl_eval:binding(Var, Bindings) of
+ {value, Val} ->
+ out_both(E, '~s = ~p\n',[Var, Val]);
+ unbound ->
+ out_both(E, '~s *is unbound*\n', [Var])
+ end,
+ print_vars(E, T, Bindings);
+print_vars(_, [], _) ->
+ true.
+
+
+out_both(E, Format, Data) ->
+ io:format(Format, Data),
+ io:format(E, Format, Data).
+
+print_lhs(E, [{match, _, Lhs, Rhs}]) ->
+ %% io:format(">>>> here:~w\n",[Lhs]),
+ out_both(E, '*** Lhs was:~s\n',[erl_pp:expr(Lhs)]);
+print_lhs(E, _) ->
+ out_both(E, '** UNDEFINED **', []).
+
+
+rhs([{match, _, Lhs, Rhs}], Bindings) ->
+ case catch erl_eval:exprs([Rhs], Bindings) of
+ {value, Val, _} -> Val;
+ Other -> undefined()
+ end;
+rhs(_, _) ->
+ undefined().
+
+varsin(X) -> varsin(X, []).
+
+varsin({var,_,'_'}, L) ->
+ L;
+varsin({var,_,V}, L) ->
+ case lists:member(V, L) of
+ true -> L;
+ false -> [V|L]
+ end;
+varsin([H|T], L) ->
+ varsin(T, varsin(H, L));
+varsin(T, L) when tuple(T) ->
+ varsin(tuple_to_list(T), L);
+varsin(_, L) ->
+ L.
+
+undefined() ->
+ '** UNDEFINED **'.
diff --git a/lib/test_server/src/ts.config b/lib/test_server/src/ts.config
new file mode 100644
index 0000000000..30ef25a0b8
--- /dev/null
+++ b/lib/test_server/src/ts.config
@@ -0,0 +1,45 @@
+%% -*- erlang -*-
+{ipv6_hosts,[otptest06,otptest08,sauron,iluvatar]}.
+
+%%% Change these to suite the environment.
+%%% test_hosts are looked up using "ypmatch xx yy zz hosts"
+{test_hosts,
+ [bingo, hurin, turin, gandalf, super,
+ merry, nenya, sam, elrond, isildur]}.
+
+%% IPv4 host only - no ipv6 entry must exist!
+{test_host_ipv4_only,
+ {"isildur", %Short hostname
+ "isildur.du.uab.ericsson.se", %Long hostname
+ "134.138.177.24", %IP string
+ {134,138,177,24}, %IP tuple
+ ["isildur"], %Any aliases
+ "::ffff:134.138.177.24", %IPv6 string (compatibilty addr)
+ {0,0,0,0,0,65535,34442,45336} %IPv6 tuple
+ }}.
+
+{test_host_ipv6_only,
+ {"otptest06", %Short hostname
+ "otptest06.du.uab.ericsson.se", %Long hostname
+ "fec0::a00:20ff:feb2:b4a9", %IPv6 string
+ {65216,0,0,0,2560,8447,65202,46249}, %IPv6 tuple
+ ["otptest06-ip6"] %Aliases.
+ }}.
+
+
+
+{test_dummy_host, {"dummy",
+ "dummy.du.uab.ericsson.se",
+ "192.138.177.1",
+ {192,138,177,1},
+ ["dummy"],
+ "::ffff:192.138.177.1",
+ {0,0,0,0,0,65535,49290,45313}
+ }}.
+
+{test_dummy_ipv6_host, {"dummy6",
+ "dummy6.du.uab.ericsson.se",
+ "fec0::a00:20ff:feb2:6666",
+ {65216,0,0,0,2560,8447,65202,26214},
+ ["dummy6-ip6"]
+ }}.
diff --git a/lib/test_server/src/ts.erl b/lib/test_server/src/ts.erl
new file mode 100644
index 0000000000..1b750c3858
--- /dev/null
+++ b/lib/test_server/src/ts.erl
@@ -0,0 +1,695 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : ts.erl
+%%% Purpose : Frontend for running tests.
+%%%-------------------------------------------------------------------
+
+-module(ts).
+
+-export([run/0, run/1, run/2, run/3, run/4,
+ clean/0, clean/1,
+ tests/0, tests/1,
+ install/0, install/1, install/2, index/0,
+ estone/0, estone/1,
+ cross_cover_analyse/1,
+ help/0]).
+-export([i/0, l/1, r/0, r/1, r/2, r/3]).
+
+%%%----------------------------------------------------------------------
+%%% This module, ts, is the interface to all of the functionality of
+%%% the TS framework. The picture below shows the relationship of
+%%% the modules:
+%%%
+%%% +-- ts_install --+------ ts_autoconf_win32
+%%% | |
+%%% | +------ ts_autoconf_vxworks
+%%% |
+%%% ts ---+ +------ ts_erl_config
+%%% | | ts_lib
+%%% | +------ ts_make
+%%% | |
+%%% +-- ts_run -----+
+%%% | ts_filelib
+%%% +------ ts_make_erl
+%%% |
+%%% +------ ts_reports (indirectly)
+%%%
+%%%
+%%%
+%%% The modules ts_lib and ts_filelib contains utilities used by
+%%% the other modules.
+%%%
+%%% Module Description
+%%% ------ -----------
+%%% ts Frontend to the test server framework. Contains all
+%%% interface functions.
+%%% ts_install Installs the test suite. On Unix, `autoconf' is
+%%% is used; on Windows, ts_autoconf_win32 is used,
+%%% on VxWorks, ts_autoconf_vxworks is used.
+%%% The result is written to the file `variables'.
+%%% ts_run Supervises running of the tests.
+%%% ts_autconf_win32 An `autoconf' for Windows.
+%%% ts_autconf_cross_env `autoconf' for other platforms (cross environment)
+%%% ts_erl_config Finds out information about the Erlang system,
+%%% for instance the location of erl_interface.
+%%% This works for either an installed OTP or an Erlang
+%%% system running from Clearcase.
+%%% ts_make Interface to run the `make' program on Unix
+%%% and other platforms.
+%%% ts_make_erl A corrected version of the standar Erlang module
+%%% make (used for rebuilding test suites).
+%%% ts_reports Generates index pages in HTML, providing a summary
+%%% of the tests run.
+%%% ts_lib Miscellanous utility functions, each used by several
+%%% other modules.
+%%%----------------------------------------------------------------------
+
+-include_lib("kernel/include/file.hrl").
+-include("ts.hrl").
+
+-define(
+ install_help,
+ [
+ " ts:install() - Install TS for local target with no Options.\n"
+ " ts:install([Options])\n",
+ " - Install TS for local target with Options\n"
+ " ts:install({Architecture, Target_name})\n",
+ " - Install TS for a remote target architecture.\n",
+ " and target network name (e.g. {vxworks_cpu32, sauron}).\n",
+ " ts:install({Architecture, Target_name}, [Options])\n",
+ " - Install TS as above, and with Options.\n",
+ "\n",
+ "Installation options supported:\n",
+ " {longnames, true} - Use fully qualified hostnames\n",
+ " {hosts, [HostList]}\n"
+ " - Use theese hosts for distributed testing.\n"
+ " {verbose, Level} - Sets verbosity level for TS output (0,1,2), 0 is\n"
+ " quiet(default).\n"
+ " {slavetargets, SlaveTarges}\n"
+ " - Available hosts for starting slave nodes for\n"
+ " platforms which cannot have more than one erlang\n"
+ " node per host.\n"
+ " {crossroot, TargetErlRoot}\n"
+ " - Erlang root directory on target host\n"
+ " Mandatory for remote targets\n"
+ " {master, {MasterHost, MasterCookie}}\n"
+ " - Master host and cookie for targets which are\n"
+ " started as slave nodes (i.e. OSE/Delta targets\n"
+ " erl_boot_server must be started on master before\n"
+ " test is run.\n"
+ " Optional, default is controller host and then\n"
+ " erl_boot_server is started autmatically\n"
+ ]).
+
+help() ->
+ case filelib:is_file(?variables) of
+ false -> help(uninstalled);
+ true -> help(installed)
+ end.
+
+help(uninstalled) ->
+ H = ["TS is not installed yet. To install use:\n\n"],
+ show_help([H,?install_help]);
+help(installed) ->
+ H = ["Run functions:\n",
+ " ts:run() - Run all available tests.\n",
+ " ts:run(Spec) - Run all tests in given test spec file.\n",
+ " The spec file is actually ../*_test/Spec.spec\n",
+ " ts:run([Specs]) - Run all tests in all given test spec files.\n",
+ " ts:run(Spec, Mod) - Run a single test suite.\n",
+ " ts:run(Spec, Mod, Case)\n",
+ " - Run a single test case.\n",
+ " All above run functions can have the additional Options argument\n",
+ " which is a list of options.\n",
+ "\n",
+ "Run options supported:\n",
+ " batch - Do not start a new xterm\n",
+ " {verbose, Level} - Same as the verbosity option for install\n",
+ " verbose - Same as {verbose, 1}\n",
+ " {vars, Vars} - Variables in addition to the 'variables' file\n",
+ " Can be any of the install options\n",
+ " {trace, TraceSpec}- Start call trace on target and slave nodes\n",
+ " TraceSpec is the name of a file containing\n",
+ " trace specifications or a list of trace\n",
+ " specification elements.\n",
+ "\n",
+ "Supported trace information elements\n",
+ " {tp | tpl, Mod, [] | match_spec()}\n",
+ " {tp | tpl, Mod, Func, [] | match_spec()}\n",
+ " {tp | tpl, Mod, Func, Arity, [] | match_spec()}\n",
+ " {ctp | ctpl, Mod}\n",
+ " {ctp | ctpl, Mod, Func}\n",
+ " {ctp | ctpl, Mod, Func, Arity}\n",
+ "\n",
+ "Support functions\n",
+ " ts:tests() - Shows all available families of tests.\n",
+ " ts:tests(Spec) - Shows all available test modules in Spec,\n",
+ " i.e. ../Spec_test/*_SUITE.erl\n",
+ " ts:index() - Updates local index page.\n",
+ " ts:clean() - Cleans up all but the last tests run.\n",
+ " ts:clean(all) - Cleans up all test runs found.\n",
+ " ts:estone() - Run estone_SUITE in kernel application with\n"
+ " no run options\n",
+ " ts:estone(Opts) - Run estone_SUITE in kernel application with\n"
+ " the given run options\n",
+ " ts:cross_cover_analyse(Level)\n"
+ " - Used after ts:run with option cover or \n"
+ " cover_details. Analyses modules specified in\n"
+ " cross.cover.\n"
+ " Level can be 'overview' or 'details'.\n",
+ " \n"
+ "Installation (already done):\n"
+ ],
+ show_help([H,?install_help]).
+
+show_help(H) ->
+ io:put_chars(lists:flatten(H)).
+
+
+%% Installs tests.
+install() ->
+ ts_install:install(install_local,[]).
+install({Architecture, Target_name}) ->
+ ts_install:install({ts_lib:maybe_atom_to_list(Architecture),
+ ts_lib:maybe_atom_to_list(Target_name)}, []);
+install(Options) when is_list(Options) ->
+ ts_install:install(install_local,Options).
+install({Architecture, Target_name}, Options) when is_list(Options)->
+ ts_install:install({ts_lib:maybe_atom_to_list(Architecture),
+ ts_lib:maybe_atom_to_list(Target_name)}, Options).
+
+%% Updates the local index page.
+
+index() ->
+ check_and_run(fun(_Vars) -> ts_reports:make_index(), ok end).
+
+%%
+%% clean(all)
+%% Deletes all logfiles.
+%%
+clean(all) ->
+ delete_files(filelib:wildcard("*" ++ ?logdir_ext)).
+
+%% clean/0
+%%
+%% Cleans up run logfiles, all but the last run.
+clean() ->
+ clean1(filelib:wildcard("*" ++ ?logdir_ext)).
+
+clean1([Dir|Dirs]) ->
+ List0 = filelib:wildcard(filename:join(Dir, "run.*")),
+ case lists:reverse(lists:sort(List0)) of
+ [] -> ok;
+ [_Last|Rest] -> delete_files(Rest)
+ end,
+ clean1(Dirs);
+clean1([]) -> ok.
+
+%% run/0
+%% Runs all specs found by ts:tests(), if any, or returns
+%% {error, no_tests_available}. (batch)
+run() ->
+ case ts:tests() of
+ [] ->
+ {error, no_tests_available};
+ _ ->
+ check_and_run(fun(Vars) -> run_all(Vars) end)
+ end.
+run_all(_Vars) ->
+ run_some(tests(), [batch]).
+
+run_some([], _Opts) ->
+ ok;
+run_some([Spec|Specs], Opts) ->
+ case run(Spec, Opts) of
+ ok -> ok;
+ Error -> io:format("~p: ~p~n",[Spec,Error])
+ end,
+ run_some(Specs, Opts).
+
+%% Runs one test spec (interactive).
+run(Testspec) when is_atom(Testspec) ->
+ Options=check_test_get_opts(Testspec, []),
+ File = atom_to_list(Testspec),
+ run_test(File, ["SPEC current.spec NAME ",File], Options);
+
+%% This can be used from command line, e.g.
+%% erl -s ts run all_tests <config>
+%% When using the all_tests flag and running with cover, one can also
+%% use the cross_cover_analysis flag.
+run([all_tests|Config0]) ->
+ AllAtomsFun = fun(X) when is_atom(X) -> true;
+ (_) -> false
+ end,
+ Config1 =
+ case lists:all(AllAtomsFun,Config0) of
+ true ->
+ %% Could be from command line
+ lists:map(fun(Conf)->to_erlang_term(Conf) end,Config0)--[batch];
+ false ->
+ Config0--[batch]
+ end,
+ %% Make sure there is exactly one occurence of 'batch'
+ Config2 = [batch|Config1],
+
+ R = run(tests(),Config2),
+
+ case check_for_cross_cover_analysis_flag(Config2) of
+ false ->
+ ok;
+ Level ->
+ cross_cover_analyse(Level)
+ end,
+
+ R;
+
+%% ts:run(ListOfTests)
+run(List) when is_list(List) ->
+ run(List, [batch]).
+
+run(List, Opts) when is_list(List), is_list(Opts) ->
+ run_some(List, Opts);
+
+%% run/2
+%% Runs one test spec with Options
+run(Testspec, Config) when is_atom(Testspec), is_list(Config) ->
+ Options=check_test_get_opts(Testspec, Config),
+ File=atom_to_list(Testspec),
+ run_test(File, ["SPEC current.spec NAME ", File], Options);
+%% Runs one module in a spec (interactive)
+run(Testspec, Mod) when is_atom(Testspec), is_atom(Mod) ->
+ run_test({atom_to_list(Testspec), Mod},
+ ["SPEC current.spec NAME ", atom_to_list(Mod)],
+ [interactive]).
+
+%% run/3
+%% Run one module in a spec with Config
+run(Testspec,Mod,Config) when is_atom(Testspec), is_atom(Mod), is_list(Config) ->
+ Options=check_test_get_opts(Testspec, Config),
+ run_test({atom_to_list(Testspec), Mod},
+ ["SPEC current.spec NAME ", atom_to_list(Mod)],
+ Options);
+
+%% Runs one testcase in a module.
+run(Testspec, Mod, Case) when is_atom(Testspec), is_atom(Mod), is_atom(Case) ->
+ Options=check_test_get_opts(Testspec, []),
+ Args = ["CASE ",atom_to_list(Mod)," ",atom_to_list(Case)],
+ run_test(atom_to_list(Testspec), Args, Options).
+
+%% run/4
+%% Run one testcase in a module with Options.
+run(Testspec, Mod, Case, Config) when is_atom(Testspec), is_atom(Mod), is_atom(Case), is_list(Config) ->
+ Options=check_test_get_opts(Testspec, Config),
+ Args = ["CASE ",atom_to_list(Mod), " ",atom_to_list(Case)],
+ run_test(atom_to_list(Testspec), Args, Options).
+
+%% Check testspec to be valid and get possible Options
+%% from the config.
+check_test_get_opts(Testspec, Config) ->
+ validate_test(Testspec),
+ Mode = configmember(batch, {batch, interactive}, Config),
+ Vars = configvars(Config),
+ Trace = configtrace(Config),
+ KeepTopcase = configmember(keep_topcase, {keep_topcase,[]}, Config),
+ Cover = configcover(Testspec,Config),
+ lists:flatten([Vars,Mode,Trace,KeepTopcase,Cover]).
+
+to_erlang_term(Atom) ->
+ String = atom_to_list(Atom),
+ {ok, Tokens, _} = erl_scan:string(lists:append([String, ". "])),
+ {ok, Term} = erl_parse:parse_term(Tokens),
+ Term.
+
+%% Validate that a Testspec really is a testspec,
+%% and exit if not.
+validate_test(Testspec) ->
+ case lists:member(Testspec, tests()) of
+ true ->
+ ok;
+ false ->
+ io:format("This testspec does not seem to be "
+ "available.~n Please try ts:tests() "
+ "to see available tests.~n"),
+ exit(self(), {error, test_not_available})
+ end.
+
+configvars(Config) ->
+ case lists:keysearch(vars, 1, Config) of
+ {value, {vars, List}} ->
+ List0 = special_vars(Config),
+ Key = fun(T) -> element(1,T) end,
+ DelDupList =
+ lists:filter(fun(V) ->
+ case lists:keysearch(Key(V),1,List0) of
+ {value,_} -> false;
+ _ -> true
+ end
+ end, List),
+ {vars, [List0|DelDupList]};
+ _ ->
+ {vars, special_vars(Config)}
+ end.
+
+%% Allow some shortcuts in the Options...
+special_vars(Config) ->
+ SpecVars =
+ case lists:member(verbose, Config) of
+ true ->
+ [{verbose, 1}];
+ false ->
+ case lists:keysearch(verbose, 1, Config) of
+ {value, {verbose, Lvl}} ->
+ [{verbose, Lvl}];
+ _ ->
+ [{verbose, 0}]
+ end
+ end,
+ SpecVars1 =
+ case lists:keysearch(diskless, 1, Config) of
+ {value,{diskless, true}} ->
+ [{diskless, true} | SpecVars];
+ _ ->
+ SpecVars
+ end,
+ case lists:keysearch(testcase_callback, 1, Config) of
+ {value,{testcase_callback, CBM, CBF}} ->
+ [{ts_testcase_callback, {CBM,CBF}} | SpecVars1];
+ {value,{testcase_callback, CB}} ->
+ [{ts_testcase_callback, CB} | SpecVars1];
+ _ ->
+ SpecVars1
+ end.
+
+configtrace(Config) ->
+ case lists:keysearch(trace,1,Config) of
+ {value,Value} -> Value;
+ false -> []
+ end.
+
+configcover(Testspec,[cover|_]) ->
+ {cover,Testspec,default_coverfile(Testspec),overview};
+configcover(Testspec,[cover_details|_]) ->
+ {cover,Testspec,default_coverfile(Testspec),details};
+configcover(Testspec,[{cover,File}|_]) ->
+ {cover,Testspec,File,overview};
+configcover(Testspec,[{cover_details,File}|_]) ->
+ {cover,Testspec,File,details};
+configcover(Testspec,[_H|T]) ->
+ configcover(Testspec,T);
+configcover(_Testspec,[]) ->
+ [].
+
+default_coverfile(Testspec) ->
+ {ok,Cwd} = file:get_cwd(),
+ CoverFile = filename:join([filename:dirname(Cwd),
+ atom_to_list(Testspec)++"_test",
+ atom_to_list(Testspec)++".cover"]),
+ case filelib:is_file(CoverFile) of
+ true ->
+ CoverFile;
+ false ->
+ none
+ end.
+
+configmember(Member, {True, False}, Config) ->
+ case lists:member(Member, Config) of
+ true ->
+ True;
+ false ->
+ False
+ end.
+
+
+check_for_cross_cover_analysis_flag(Config) ->
+ check_for_cross_cover_analysis_flag(Config,false,false).
+check_for_cross_cover_analysis_flag([cover|Config],false,false) ->
+ check_for_cross_cover_analysis_flag(Config,overview,false);
+check_for_cross_cover_analysis_flag([cover|_Config],false,true) ->
+ overview;
+check_for_cross_cover_analysis_flag([cover_details|Config],false,false) ->
+ check_for_cross_cover_analysis_flag(Config,details,false);
+check_for_cross_cover_analysis_flag([cover_details|_Config],false,true) ->
+ details;
+check_for_cross_cover_analysis_flag([cross_cover_analysis|Config],false,_) ->
+ check_for_cross_cover_analysis_flag(Config,false,true);
+check_for_cross_cover_analysis_flag([cross_cover_analysis|_Config],Level,_) ->
+ Level;
+check_for_cross_cover_analysis_flag([_|Config],Level,CrossFlag) ->
+ check_for_cross_cover_analysis_flag(Config,Level,CrossFlag);
+check_for_cross_cover_analysis_flag([],_,_) ->
+ false.
+
+%% Returns a list of available test suites.
+
+tests() ->
+ {ok, Cwd} = file:get_cwd(),
+ ts_lib:specs(Cwd).
+
+tests(Spec) ->
+ {ok, Cwd} = file:get_cwd(),
+ ts_lib:suites(Cwd, atom_to_list(Spec)).
+
+
+%%
+%% estone/0, estone/1
+%% Opts = same as Opts or Config for the run(...) function,
+%% e.g. [batch]
+%%
+estone() -> run(emulator,estone_SUITE).
+estone(Opts) when is_list(Opts) -> run(emulator,estone_SUITE,Opts).
+
+%%
+%% cross_cover_analyse/1
+%% Level = details | overview
+%% Can be called on any node after a test (with cover) is
+%% completed. The node's current directory must be the same as when
+%% the tests were run.
+%%
+cross_cover_analyse([Level]) ->
+ cross_cover_analyse(Level);
+cross_cover_analyse(Level) ->
+ test_server_ctrl:cross_cover_analyse(Level).
+
+
+%%% Implementation.
+
+check_and_run(Fun) ->
+ case file:consult(?variables) of
+ {ok, Vars} ->
+ check_and_run(Fun, Vars);
+ {error, Error} when is_atom(Error) ->
+ {error, not_installed};
+ {error, Reason} ->
+ {error, {bad_installation, file:format_error(Reason)}}
+ end.
+
+check_and_run(Fun, Vars) ->
+ Platform = ts_install:platform_id(Vars),
+ case lists:keysearch(platform_id, 1, Vars) of
+ {value, {_, Platform}} ->
+ case catch apply(Fun, [Vars]) of
+ {'EXIT', Reason} ->
+ exit(Reason);
+ Other ->
+ Other
+ end;
+ {value, {_, OriginalPlatform}} ->
+ io:format("These test suites were installed for '~s'.\n",
+ [OriginalPlatform]),
+ io:format("But the current platform is '~s'.\nPlease "
+ "install for this platform before running "
+ "any tests.\n", [Platform]),
+ {error, inconsistent_platforms};
+ false ->
+ {error, {bad_installation, no_platform}}
+ end.
+
+run_test(File, Args, Options) ->
+ check_and_run(fun(Vars) -> run_test(File, Args, Options, Vars) end).
+
+run_test(File, Args, Options, Vars) ->
+ ts_run:run(File, Args, Options, Vars).
+
+
+delete_files([]) -> ok;
+delete_files([Item|Rest]) ->
+ case file:delete(Item) of
+ ok ->
+ delete_files(Rest);
+ {error,eperm} ->
+ file:change_mode(Item, 8#777),
+ delete_files(filelib:wildcard(filename:join(Item, "*"))),
+ file:del_dir(Item),
+ ok;
+ {error,eacces} ->
+ %% We'll see about that!
+ file:change_mode(Item, 8#777),
+ case file:delete(Item) of
+ ok -> ok;
+ {error,_} ->
+ erlang:yield(),
+ file:change_mode(Item, 8#777),
+ file:delete(Item),
+ ok
+ end;
+ {error,_} -> ok
+ end,
+ delete_files(Rest).
+
+
+%% This module provides some convenient shortcuts to running
+%% the test server from within a started Erlang shell.
+%% (This are here for backwards compatibility.)
+%%
+%% r()
+%% r(Opts)
+%% r(SpecOrMod)
+%% r(SpecOrMod, Opts)
+%% r(Mod, Case)
+%% r(Mod, Case, Opts)
+%% Each of these functions starts the test server if it
+%% isn't already running, then runs the test case(s) selected
+%% by the aguments.
+%% SpecOrMod can be a module name or the name of a test spec file,
+%% with the extension .spec or .spec.OsType. The module Mod will
+%% be reloaded before running the test cases.
+%% Opts = [Opt],
+%% Opt = {Cover,AppOrCoverFile} | {Cover,App,CoverFile}
+%% Cover = cover | cover_details
+%% AppOrCoverFile = App | CoverFile
+%% App = atom(), an application name
+%% CoverFile = string(), name of a cover file
+%% (see doc of test_server_ctrl:cover/2/3)
+%%
+%% i()
+%% Shows information about the jobs being run, by dumping
+%% the process information for the test_server.
+%%
+%% l(Mod)
+%% This function reloads a module just like c:l/1, but works
+%% even for a module in one of the sticky library directories
+%% (for instance, lists can be reloaded).
+
+%% Runs all tests cases in the current directory.
+
+r() ->
+ r([]).
+r(Opts) when is_list(Opts), is_atom(hd(Opts)) ->
+ ensure_ts_started(Opts),
+ test_server_ctrl:add_dir("current_dir", ".");
+
+%% Checks if argument is a spec file or a module
+%% (spec file must be named "*.spec" or "*.spec.OsType")
+%% If module, reloads module and runs all test cases in it.
+%% If spec, runs all test cases in it.
+
+r(SpecOrMod) ->
+ r(SpecOrMod,[]).
+r(SpecOrMod,Opts) when is_list(Opts) ->
+ ensure_ts_started(Opts),
+ case filename:extension(SpecOrMod) of
+ [] ->
+ l(SpecOrMod),
+ test_server_ctrl:add_module(SpecOrMod);
+ ".spec" ->
+ test_server_ctrl:add_spec(SpecOrMod);
+ _ ->
+ Spec2 = filename:rootname(SpecOrMod),
+ case filename:extension(Spec2) of
+ ".spec" ->
+ %% *.spec.Type
+ test_server_ctrl:add_spec(SpecOrMod);
+ _ ->
+ {error, unknown_filetype}
+ end
+ end;
+
+%% Reloads the given module and runs the given test case in it.
+
+r(Mod, Case) ->
+ r(Mod,Case,[]).
+r(Mod, Case, Opts) ->
+ ensure_ts_started(Opts),
+ l(Mod),
+ test_server_ctrl:add_case(Mod, Case).
+
+%% Shows information about the jobs being run.
+
+i() ->
+ ensure_ts_started([]),
+ hformat("Job", "Current", "Total", "Success", "Failed", "Skipped"),
+ i(test_server_ctrl:jobs()).
+
+i([{Name, Pid}|Rest]) when is_pid(Pid) ->
+ {dictionary, PI} = process_info(Pid, dictionary),
+ {value, {_, CaseNum}} = lists:keysearch(test_server_case_num, 1, PI),
+ {value, {_, Cases}} = lists:keysearch(test_server_cases, 1, PI),
+ {value, {_, Failed}} = lists:keysearch(test_server_failed, 1, PI),
+ {value, {_, {UserSkipped,AutoSkipped}}} = lists:keysearch(test_server_skipped, 1, PI),
+ {value, {_, Ok}} = lists:keysearch(test_server_ok, 1, PI),
+ nformat(Name, CaseNum, Cases, Ok, Failed, UserSkipped+AutoSkipped),
+ i(Rest);
+i([]) ->
+ ok.
+
+hformat(A1, A2, A3, A4, A5, A6) ->
+ io:format("~-20s ~8s ~8s ~8s ~8s ~8s~n", [A1,A2,A3,A4,A5,A6]).
+
+nformat(A1, A2, A3, A4, A5, A6) ->
+ io:format("~-20s ~8w ~8w ~8w ~8w ~8w~n", [A1,A2,A3,A4,A5,A6]).
+
+%% Force load of a module even if it is in a sticky directory.
+
+l(Mod) ->
+ case do_load(Mod) of
+ {error, sticky_directory} ->
+ Dir = filename:dirname(code:which(Mod)),
+ code:unstick_dir(Dir),
+ do_load(Mod),
+ code:stick_dir(Dir);
+ X ->
+ X
+ end.
+
+
+ensure_ts_started(Opts) ->
+ Pid = case whereis(test_server_ctrl) of
+ undefined ->
+ test_server_ctrl:start();
+ P when is_pid(P) ->
+ P
+ end,
+ case Opts of
+ [{Cover,AppOrCoverFile}] when Cover==cover; Cover==cover_details ->
+ test_server_ctrl:cover(AppOrCoverFile,cover_type(Cover));
+ [{Cover,App,CoverFile}] when Cover==cover; Cover==cover_details ->
+ test_server_ctrl:cover(App,CoverFile,cover_type(Cover));
+ _ ->
+ ok
+ end,
+ Pid.
+
+cover_type(cover) -> overview;
+cover_type(cover_details) -> details.
+
+do_load(Mod) ->
+ code:purge(Mod),
+ code:load_file(Mod).
diff --git a/lib/test_server/src/ts.hrl b/lib/test_server/src/ts.hrl
new file mode 100644
index 0000000000..885a726c54
--- /dev/null
+++ b/lib/test_server/src/ts.hrl
@@ -0,0 +1,36 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%% Defines ripped out from test_server (these must remain the same
+%% as in test_server).
+
+-define(logdir_ext, ".logs").
+-define(suitelog_name, "suite.log").
+-define(last_file, "last_name").
+-define(last_link, "last_link").
+-define(last_test, "last_test").
+-define(run_summary, "suite.summary").
+-define(cover_total,"total_cover.log").
+-define(variables, "variables").
+-define(LF, [10]). % Newline in VxWorks script
+-define(CHAR_PER_LINE, 60). % Characters per VxWorks script building line
+-define(CROSS_COOKIE, "cross"). % cookie used when cross platform testing
+-define(TS_PORT, 7887).
+-define(TEST_SERVER_SCRIPT, "test_server_vx.script").
+
diff --git a/lib/test_server/src/ts.unix.config b/lib/test_server/src/ts.unix.config
new file mode 100644
index 0000000000..b4325f065f
--- /dev/null
+++ b/lib/test_server/src/ts.unix.config
@@ -0,0 +1,4 @@
+%% -*- erlang -*-
+
+%% Always run a (VNC) X server on host
+{xserver, "frumgar.du.uab.ericsson.se:66"}.
diff --git a/lib/test_server/src/ts.vxworks.config b/lib/test_server/src/ts.vxworks.config
new file mode 100644
index 0000000000..b0b66e07ad
--- /dev/null
+++ b/lib/test_server/src/ts.vxworks.config
@@ -0,0 +1,19 @@
+%% -*- erlang -*-
+
+%%% There is no equivalent command to ypmatch on Win32... :-(
+{hardcoded_hosts,
+ [{"134.138.177.74","strider"},
+ {"134.138.177.72", "elrond"},
+ {"134.138.177.67", "sam"},
+ {"134.138.176.215", "nenya"},
+ {"134.138.176.192", "merry"},
+ {"134.138.177.35", "lw4"},
+ {"134.138.177.35", "lw5"},
+ {"134.138.176.16", "super"},
+ {"134.138.177.16", "gandalf"},
+ {"134.138.177.92", "turin"},
+ {"134.138.177.86", "mallor"}]}.
+
+{hardcoded_ipv6_hosts,
+ [{"fe80::a00:20ff:feb2:b4a9","otptest06"},
+ {"fe80::a00:20ff:feb2:a621","otptest08"}]}.
diff --git a/lib/test_server/src/ts.win32.config b/lib/test_server/src/ts.win32.config
new file mode 100644
index 0000000000..2802c4a75a
--- /dev/null
+++ b/lib/test_server/src/ts.win32.config
@@ -0,0 +1,15 @@
+%% -*- erlang -*-
+
+%%% There is no equivalent command to ypmatch on Win32... :-(
+{hardcoded_hosts,
+ [{"134.138.177.24","isildur"},
+ {"134.138.177.72", "elrond"},
+ {"134.138.176.215", "nenya"},
+ {"134.138.176.192", "merry"},
+ {"134.138.176.16", "super"},
+ {"134.138.177.16", "gandalf"},
+ {"134.138.177.92", "turin"}]}.
+
+{hardcoded_ipv6_hosts,
+ [{"fe80::a00:20ff:feb2:b4a9","otptest06"},
+ {"fe80::a00:20ff:feb2:a621","otptest08"}]}.
diff --git a/lib/test_server/src/ts_autoconf_vxworks.erl b/lib/test_server/src/ts_autoconf_vxworks.erl
new file mode 100644
index 0000000000..f4535cd89a
--- /dev/null
+++ b/lib/test_server/src/ts_autoconf_vxworks.erl
@@ -0,0 +1,191 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% Purpose : Autoconf for cross environments.
+
+-module(ts_autoconf_vxworks).
+-export([configure/1]).
+%%% Supported cross platforms:
+-define(PLATFORMS, ["vxworks_cpu32", "vxworks_ppc860", "vxworks_ppc603",
+ "vxworks_sparc", "vxworks_ppc750", "vxworks_simso"]).
+-include("ts.hrl").
+
+%% takes an argument {Target_arch, Target_host} (e.g. {vxworks_ppc860, thorin}).
+configure({Target_arch, Target_host}) ->
+ case variables({Target_arch, Target_host}) of
+ {ok, Vars} ->
+ ts_lib:subst_file("conf_vars.in", "conf_vars", Vars);
+ Error ->
+ Error
+ end.
+
+variables(Cross_spec) ->
+ run_tests(Cross_spec, tests(), []).
+
+run_tests(Cross_spec, [{Prompt, Tester}|Rest], Vars) ->
+ io:format("checking ~s... ", [Prompt]),
+ case catch Tester(Cross_spec, Vars) of
+ {'EXIT', Reason} ->
+ io:format("FAILED~nExit status: ~p~n", [Reason]),
+ {error, auto_conf_failed};
+ {Result, NewVars} ->
+ io:format("~s~n", [lists:concat([Result])]),
+ run_tests(Cross_spec, Rest, NewVars)
+ end;
+run_tests(_Cross_spec, [], Vars) ->
+ {ok, Vars}.
+
+
+%%% The tests.
+
+tests() ->
+ [{"supported target architecture", fun target_architecture/2},
+ {"cross target host to run tests on", fun target_host/2},
+ {"CPU type", fun cpu/2},
+ {"for cross-compiling gcc", fun find_gcc/2},
+ {"for cross-linker", fun find_ld/2},
+ {"for object extension", fun find_obj/2},
+ {"for shared libraries extension", fun find_dll/2},
+ {"for executables extension", fun find_exe/2},
+ {"for make", fun find_make/2}].
+
+target_architecture({Architecture, _Target_host}, Vars) ->
+ case lists:member(Architecture, ?PLATFORMS) of
+ true ->
+ {Architecture, [{host_os, os_type(Architecture)}, {host, Architecture}|Vars]};
+ false ->
+ {"unsupported_platform", Vars}
+ end.
+
+target_host({_Architecture, Target_host}, Vars) ->
+ {Target_host, [{target_host, Target_host} | Vars]}.
+
+cpu({Arch, _Target_host}, Vars) ->
+ Cpu = processor(Arch),
+ {Cpu, [{host_cpu, Cpu}|Vars]}.
+
+find_gcc({Arch, _Target_host}, Vars) ->
+ Gcc = "cc" ++ gnu_suffix(Arch),
+ case os:find_executable(Gcc) of
+ false ->
+ {no, Vars};
+ Path when is_list(Path) ->
+ Cflags = cflags(Arch),
+ {Path, [{'CC', Gcc},
+ {'CFLAGS', Cflags},
+ {'EI_CFLAGS', Cflags},
+ {'ERTS_CFLAGS', Cflags},
+ {'DEFS', ""},
+ {'ERTS_LIBS', ""},
+ {'LIBS', ""},
+ {'SHLIB_CFLAGS', Cflags},
+ {test_c_compiler, "{gnuc, undefined}"} | Vars]}
+ end.
+
+find_ld({Arch, _Target_host}, Vars) ->
+ Linker = "ld" ++ gnu_suffix(Arch),
+ case os:find_executable(Linker) of
+ false ->
+ {no, Vars};
+ Path when is_list(Path) ->
+ {Path, [{'LD', Linker},
+ {'CROSSLDFLAGS', ldflags(Arch)},
+ {'SHLIB_EXTRACT_ALL', ""},
+ {'SHLIB_LD', Linker},
+ {'SHLIB_LDFLAGS', ""},
+ {'SHLIB_LDLIBS', ""} | Vars]}
+ end.
+
+find_obj({Arch, _Target_host}, Vars) ->
+ Obj = obj_ext(Arch),
+ {Obj, [{obj, Obj}|Vars]}.
+
+find_dll({Arch, _Target_host}, Vars) ->
+ Dll = dll_ext(Arch),
+ {Dll, [{'SHLIB_SUFFIX', Dll}|Vars]}.
+
+find_exe({Arch, _Target_host}, Vars) ->
+ Exe = exe_ext(Arch),
+ {Exe, [{exe, Exe}|Vars]}.
+
+find_make(_, Vars) ->
+ {"make", [{make_command, "make"} | Vars]}.
+
+%%% some utility functions
+gnu_suffix(Arch) ->
+ {_, _, _, _, Suffix, _Cpu, _Cflags, _} = cross_data(Arch),
+ Suffix.
+
+processor(Arch) ->
+ {_, _, _, _, _Suffix, Cpu, _Cflags, _} = cross_data(Arch),
+ Cpu.
+
+cflags(Arch) ->
+ {_, _, _, _, _Suffix, _Cpu, Cflags, _} = cross_data(Arch),
+ Cflags.
+
+ldflags(Arch) ->
+ {_, _, _, _, _Suffix, _Cpu, _Cflags, Ldflags} = cross_data(Arch),
+ Ldflags.
+
+os_type(Arch) ->
+ {Os_type, _, _, _, _, _, _, _} = cross_data(Arch),
+ Os_type.
+
+obj_ext(Arch) ->
+ {_, _, Obj, _, _, _, _, _} = cross_data(Arch),
+ Obj.
+
+dll_ext(Arch) ->
+ {_, _, _, Dll, _, _, _, _} = cross_data(Arch),
+ Dll.
+
+exe_ext(Arch) ->
+ {_, Exe, _, _, _, _, _, _} = cross_data(Arch),
+ Exe.
+
+cross_data(Arch) ->
+ case Arch of
+ "vxworks_cpu32" ->
+ {"VxWorks", "", ".o", ".eld", "68k", "cpu32",
+ "-DCPU=CPU32 -DVXWORKS -I$(WIND_BASE)/target/h -mnobitfield -fno-builtin -nostdinc -fvolatile -msoft-float",
+ "-r -d"};
+ "vxworks_ppc860" ->
+ {"VxWorks", "", ".o", ".eld", "ppc", "ppc860",
+ "-DCPU=PPC860 -DVXWORKS -I$(WIND_BASE)/target/h -mcpu=860 -fno-builtin -fno-for-scope -msoft-float -D_GNU_TOOL -nostdinc",
+ "-r -d"};
+ "vxworks_ppc603" ->
+ {"VxWorks", "", ".o", ".eld", "ppc", "ppc603",
+ "-DCPU=PPC603 -DVXWORKS -I$(WIND_BASE)/target/h -fno-builtin -fno-for-scope -D_GNU_TOOL -nostdinc",
+ "-r -d"};
+ "vxworks_sparc" ->
+ %%% The Sparc Architecture is included for private use (i.e. not Tornado 1.0.1 compatible).
+ {"VxWorks", "", ".o", ".eld", "sparc", "sparc",
+ "-DCPU=SPARC -DVXWORKS -I/home/gandalf/bsproj/BS.2/UOS/vw/5.2/h -fno-builtin -nostdinc",
+ "-r -d"};
+ "vxworks_ppc750" ->
+ {"VxWorks", "", ".o", ".eld", "ppc", "ppc604",
+ "-DCPU=PPC604 -DVXWORKS -DTOOL_FAMILY=gnu -DTOOL=gnu -I$(WIND_BASE)/target/h -fno-builtin -fno-for-scope -D_GNU_TOOL",
+ "-r -d"};
+ "vxworks_simso" ->
+ {"VxWorks", "", ".o", ".eld", "simso", "simso",
+ "-DCPU=SIMSPARCSOLARIS -DVXWORKS -DTOOL_FAMILY=gnu -DTOOL=gnu -I$(WIND_BASE)/target/h -I$(WIND_GCC_INCLUDE) -fno-builtin -fno-for-scope -D_GNU_TOOL",
+ "-r -d"}
+
+ end.
diff --git a/lib/test_server/src/ts_autoconf_win32.erl b/lib/test_server/src/ts_autoconf_win32.erl
new file mode 100644
index 0000000000..9103542fd2
--- /dev/null
+++ b/lib/test_server/src/ts_autoconf_win32.erl
@@ -0,0 +1,254 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% Purpose : Autoconf for Windows.
+
+-module(ts_autoconf_win32).
+-export([configure/0]).
+
+-include("ts.hrl").
+
+configure() ->
+ case variables() of
+ {ok, Vars} ->
+ ts_lib:subst_file("conf_vars.in", "conf_vars", Vars);
+ Error ->
+ Error
+ end.
+
+variables() ->
+ run_tests(tests(), []).
+
+run_tests([{Prompt, Tester}|Rest], Vars) ->
+ io:format("checking ~s... ", [Prompt]),
+ case catch Tester(Vars) of
+ {'EXIT', Reason} ->
+ io:format("FAILED~nExit status: ~p~n", [Reason]),
+ {error, auto_conf_failed};
+ {Result, NewVars} ->
+ io:format("~s~n", [lists:concat([Result])]),
+ run_tests(Rest, NewVars)
+ end;
+run_tests([], Vars) ->
+ {ok, Vars}.
+
+%%% The tests.
+
+tests() ->
+ [{"host system type", fun system_type/1},
+ {"CPU type", fun cpu/1},
+ {"for C compiler", fun c_compiler/1},
+ {"for make program", fun make/1},
+ {"for location of SSL libraries", fun ssl/1},
+ {"for location of Java compiler", fun javac/1}].
+
+system_type(Vars) ->
+ Os = case os:type() of
+ {win32, nt} ->
+ case os:version() of
+ {4,_,_} -> "Windows NT";
+ {5,0,_} -> "Windows 2000";
+ {5,1,_} -> "Windows XP";
+ {5,2,_} -> "Windows 2003";
+ {6,0,_} -> "Windows Vista";
+ {_,_,_} -> "Windows NCC-1701-D"
+ end;
+ {win32, windows} ->
+ case os:version() of
+ {4,0,_} -> "Windows 95";
+ {4,10,_} -> "Windows 98"
+ end;
+ {win32, _} -> "Windows"
+ end,
+ {Os, [{host_os, Os}, {host, "win32"}|Vars]}.
+
+cpu(Vars) ->
+ Arch = os:getenv("PROCESSOR_ARCHITECTURE"),
+ Level0 = os:getenv("PROCESSOR_Level"),
+ Cpu = case {Arch, Level0} of
+ {"x86", Level} when is_list(Level) ->
+ "i" ++ Level ++ "86";
+ {Other, _Level} when is_list(Other) ->
+ Other;
+ {false, _} ->
+ "i386"
+ end,
+ {Cpu, [{host_cpu, Cpu}|Vars]}.
+
+c_compiler(Vars) ->
+ try
+ CompTests = [{msc, fun visual_cxx/1},
+ {gnuc, fun mingw32/1}],
+ %% First try to find the same compiler that the system
+ %% was built with...
+ UsedCompiler = case erlang:system_info(c_compiler_used) of
+ {UsedCmplr, _} ->
+ case lists:keysearch(UsedCmplr, 1, CompTests) of
+ {value, {UsedCmplr, CompTest}} ->
+ CompTest(Vars);
+ _ ->
+ ok
+ end,
+ UsedCmplr;
+ undefined ->
+ undefined
+ end,
+ %% ... then try to find a compiler...
+ lists:foreach(fun ({Cmplr, _CmplrTst}) when Cmplr =:= UsedCompiler ->
+ ok; % Have already checked for this one
+ ({_Cmplr, CmplrTst}) ->
+ CmplrTst(Vars)
+ end,
+ CompTests),
+ {no, Vars}
+ catch
+ throw:{_Path, _NewVars} = Res -> Res
+ end.
+
+visual_cxx(Vars) ->
+ case os:find_executable("cl") of
+ false ->
+ {no, Vars};
+ Path when is_list(Path) ->
+ {DEFAULT_THR_LIB,
+ ERTS_THR_LIB,
+ DLL,
+ DBG_LINK,
+ DBG_COMP,
+ OPT} =
+ case is_debug_build() of
+ true ->
+ {"-MTd ",
+ "-MDd ",
+ "-LDd ",
+ "-debug -pdb:none ",
+ "-Z7 -DDEBUG",
+ " "};
+ false ->
+ {"-MT ",
+ "-MD ",
+ "-LD ",
+ " ",
+ " ",
+ "-Ox "}
+ end,
+ WIN32 = "-D__WIN32__ ",
+ ERTS_CFLAGS = ERTS_THR_LIB ++ WIN32 ++ OPT ++ DBG_COMP,
+ LIBS = "ws2_32.lib",
+ CC = "cl -nologo",
+ throw({Path, [{'CC', CC},
+ {'LD', CC},
+ {'SHLIB_LD', CC},
+ {'SHLIB_LDFLAGS', ERTS_THR_LIB ++ DLL},
+ {'SHLIB_LDLIBS', "-link " ++ DBG_LINK ++ "kernel32.lib"},
+ {'SHLIB_EXTRACT_ALL', ""},
+ {'CFLAGS', DEFAULT_THR_LIB ++ WIN32 ++ DBG_COMP},
+ {'EI_CFLAGS', DEFAULT_THR_LIB ++ WIN32 ++ DBG_COMP},
+ {'ERTS_CFLAGS', ERTS_CFLAGS},
+ {'SHLIB_CFLAGS', ERTS_CFLAGS++DLL},
+ {'CROSSLDFLAGS', ""},
+ {'DEFS', common_c_defs()},
+ {'SHLIB_SUFFIX', ".dll"},
+ {'ERTS_LIBS', ERTS_THR_LIB ++ LIBS},
+ {'LIBS', DEFAULT_THR_LIB ++ "-link " ++ DBG_LINK ++ LIBS},
+ {obj,".obj"},
+ {exe, ".exe"},
+ {test_c_compiler, "{msc, undefined}"}
+ | Vars]})
+ end.
+
+mingw32(Vars) ->
+ Gcc = "mingw32-gcc",
+ case os:find_executable(Gcc) of
+ false ->
+ {no, Vars};
+ Path when is_list(Path) ->
+ {DBG_COMP,
+ OPT} =
+ case is_debug_build() of
+ true ->
+ {"-DDEBUG",
+ " "};
+ false ->
+ {" ",
+ "-O2 "}
+ end,
+ WIN32 = "-D__WIN32__ ",
+ ERTS_CFLAGS = WIN32 ++ "-g " ++ OPT ++ DBG_COMP,
+ LIBS = "-lws2_32",
+ CC = Gcc,
+ throw({Path, [{'CC', CC},
+ {'LD', CC},
+ {'SHLIB_LD', CC},
+ {'SHLIB_LDFLAGS', "-shared "},
+ {'SHLIB_LDLIBS', " -lkernel32"},
+ {'SHLIB_EXTRACT_ALL', ""},
+ {'CFLAGS', WIN32 ++ DBG_COMP},
+ {'EI_CFLAGS', WIN32 ++ DBG_COMP},
+ {'ERTS_CFLAGS', ERTS_CFLAGS},
+ {'SHLIB_CFLAGS', ERTS_CFLAGS},
+ {'CROSSLDFLAGS', ""},
+ {'DEFS', common_c_defs()},
+ {'SHLIB_SUFFIX', ".dll"},
+ {'ERTS_LIBS', LIBS},
+ {'LIBS', LIBS},
+ {obj,".o"},
+ {exe, ".exe"},
+ {test_c_compiler, "{gnuc, undefined}"}
+ | Vars]})
+ end.
+
+common_c_defs() ->
+ "-DHAVE_STRERROR=1".
+
+make(Vars) ->
+ try
+ find_make("nmake -nologo", Vars),
+ find_make("mingw32-make", Vars)
+ catch
+ throw:{_Path, _NewVars} = Res -> Res
+ end.
+
+find_make(MakeCmd, Vars) ->
+ [Make|_] = string:tokens(MakeCmd, " \t"),
+ case os:find_executable(Make) of
+ false ->
+ {no, Vars};
+ Path when is_list(Path) ->
+ throw({Path, [{make_command, MakeCmd} | Vars]})
+ end.
+
+ssl(Vars) ->
+ {"win32",[{'SSLEAY_ROOT',"win32"}|Vars]}.
+
+javac(Vars) ->
+ case os:find_executable("javac") of
+ false ->
+ {no, Vars};
+ Path when is_list(Path) ->
+ {Path, [{'JAVAC', "javac"} | Vars]}
+ end.
+
+is_debug_build() ->
+ case catch string:str(erlang:system_info(system_version), "debug") of
+ Int when is_integer(Int), Int > 0 ->
+ true;
+ _ ->
+ false
+ end.
diff --git a/lib/test_server/src/ts_erl_config.erl b/lib/test_server/src/ts_erl_config.erl
new file mode 100644
index 0000000000..4fc46fc5d6
--- /dev/null
+++ b/lib/test_server/src/ts_erl_config.erl
@@ -0,0 +1,398 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% Purpose : Updates variable list with variables depending on
+%%% running Erlang system.
+
+-module(ts_erl_config).
+
+
+-export([variables/2]).
+
+%% Returns a list of key, value pairs.
+
+variables(Base0, OsType) ->
+ Base1 = erl_include(Base0),
+ Base2 = get_app_vars(fun erl_interface/2, Base1, OsType),
+ Base3 = get_app_vars(fun ic/2, Base2, OsType),
+ Base4 = get_app_vars(fun jinterface/2, Base3, OsType),
+ Base5 = dl_vars(Base4, OsType),
+ Base6 = emu_vars(Base5),
+ Base7 = get_app_vars(fun ssl/2, Base6, OsType),
+ Base8 = erts_lib(Base7, OsType),
+ Base = separators(Base8, OsType),
+ [{'EMULATOR', tl(code:objfile_extension())},
+ {emu_threads, atom_to_list(erlang:system_info(threads))},
+ {type_marker, case is_debug_build() of
+ true ->
+ ".debug";
+ false ->
+ ""
+ end}
+ | Base].
+
+get_app_vars(AppFun, Vars, OsType) ->
+ case catch AppFun(Vars,OsType) of
+ Res when is_list(Res) ->
+ Res;
+ {cannot_find_app, App} ->
+ io:format("* WARNING: Cannot find ~p!~n", [App]),
+ Vars;
+ {'EXIT', Reason} ->
+ exit(Reason);
+ Garbage ->
+ exit({unexpected_internal_error, Garbage})
+ end.
+
+dl_vars(Vars, _) ->
+ ShlibRules0 = ".SUFFIXES:\n" ++
+ ".SUFFIXES: @dll@ @obj@ .c\n\n" ++
+ ".c@dll@:\n" ++
+ "\t@CC@ -c @SHLIB_CFLAGS@ $(SHLIB_EXTRA_CFLAGS) -I@erl_include@ @DEFS@ $<\n" ++
+ "\t@SHLIB_LD@ @CROSSLDFLAGS@ @SHLIB_LDFLAGS@ $(SHLIB_EXTRA_LDFLAGS) -o $@ $*@obj@ @SHLIB_LDLIBS@ $(SHLIB_EXTRA_LDLIBS)",
+
+ ShlibRules = ts_lib:subst(ShlibRules0, Vars),
+ [{'SHLIB_RULES', ShlibRules}|Vars].
+
+erts_lib_name(multi_threaded, win32) ->
+ link_library("erts_MD" ++ case is_debug_build() of
+ true -> "d";
+ false -> ""
+ end,
+ win32);
+erts_lib_name(single_threaded, win32) ->
+ link_library("erts_ML" ++ case is_debug_build() of
+ true -> "d";
+ false -> ""
+ end,
+ win32);
+erts_lib_name(multi_threaded, OsType) ->
+ link_library("erts_r", OsType);
+erts_lib_name(single_threaded, OsType) ->
+ link_library("erts", OsType).
+
+erts_lib(Vars,OsType) ->
+ {ErtsLibInclude,
+ ErtsLibIncludeGenerated,
+ ErtsLibIncludeInternal,
+ ErtsLibIncludeInternalGenerated,
+ ErtsLibPath,
+ ErtsLibInternalPath}
+ = case erl_root(Vars) of
+ {installed, _Root} ->
+ Erts = lib_dir(Vars, erts),
+ ErtsInclude = filename:join([Erts, "include"]),
+ ErtsIncludeInternal = filename:join([ErtsInclude, "internal"]),
+ ErtsLib = filename:join([Erts, "lib"]),
+ ErtsLibInternal = filename:join([ErtsLib, "internal"]),
+ {ErtsInclude,
+ ErtsInclude,
+ ErtsIncludeInternal,
+ ErtsIncludeInternal,
+ ErtsLib,
+ ErtsLibInternal};
+ {Type, Root, Target} when Type == clearcase; Type == srctree ->
+ Erts = filename:join([Root, "erts"]),
+ ErtsInclude = filename:join([Erts, "include"]),
+ ErtsIncludeTarget = filename:join([ErtsInclude, Target]),
+ ErtsIncludeInternal = filename:join([ErtsInclude,
+ "internal"]),
+ ErtsIncludeInternalTarget = filename:join([ErtsIncludeInternal,
+ Target]),
+ ErtsLib = filename:join([Erts, "lib", Target]),
+ ErtsLibInternal = filename:join([Erts,
+ "lib",
+ "internal",
+ Target]),
+ {ErtsInclude,
+ ErtsIncludeTarget,
+ ErtsIncludeInternal,
+ ErtsIncludeInternalTarget,
+ ErtsLib,
+ ErtsLibInternal}
+ end,
+ [{erts_lib_include,
+ filename:nativename(ErtsLibInclude)},
+ {erts_lib_include_generated,
+ filename:nativename(ErtsLibIncludeGenerated)},
+ {erts_lib_include_internal,
+ filename:nativename(ErtsLibIncludeInternal)},
+ {erts_lib_include_internal_generated,
+ filename:nativename(ErtsLibIncludeInternalGenerated)},
+ {erts_lib_path, filename:nativename(ErtsLibPath)},
+ {erts_lib_internal_path, filename:nativename(ErtsLibInternalPath)},
+ {erts_lib_multi_threaded, erts_lib_name(multi_threaded, OsType)},
+ {erts_lib_single_threaded, erts_lib_name(single_threaded, OsType)}
+ | Vars].
+
+erl_include(Vars) ->
+ Include =
+ case erl_root(Vars) of
+ {installed, Root} ->
+ filename:join([Root, "usr", "include"]);
+ {Type, Root, Target} when Type == clearcase; Type == srctree ->
+ filename:join([Root, "erts", "emulator", "beam"])
+ ++ " -I" ++ filename:join([Root, "erts", "emulator"])
+ ++ system_include(Root, Vars)
+ ++ " -I" ++ filename:join([Root, "erts", "include"])
+ ++ " -I" ++ filename:join([Root, "erts", "include", Target])
+ end,
+ [{erl_include, filename:nativename(Include)}|Vars].
+
+
+system_include(Root, Vars) ->
+ SysDir =
+ case ts_lib:var(os, Vars) of
+ "Windows" ++ _T -> "sys/win32";
+ "VxWorks" -> "sys.vxworks";
+ "OSE" -> "sys/ose";
+ _ -> "sys/unix"
+ end,
+ " -I" ++ filename:nativename(filename:join([Root, "erts", "emulator", SysDir])).
+
+erl_interface(Vars,OsType) ->
+ {Incl, {LibPath, MkIncl}} =
+ case lib_dir(Vars, erl_interface) of
+ {error, bad_name} ->
+ throw({cannot_find_app, erl_interface});
+ Dir ->
+ {filename:join(Dir, "include"),
+ case erl_root(Vars) of
+ {installed, _Root} ->
+ {filename:join(Dir, "lib"),
+ filename:join(Dir, "src")};
+ {srctree, _Root, _Target} when OsType =:= vxworks ->
+ {filename:join(Dir, "lib"),
+ filename:join([Dir, "src"])};
+ {Type, _Root, Target} when Type == clearcase; Type == srctree ->
+ {filename:join([Dir, "obj", Target]),
+ filename:join([Dir, "src", Target])}
+ end}
+ end,
+ Lib = link_library("erl_interface",OsType),
+ Lib1 = link_library("ei",OsType),
+ {LibDrv, Lib1Drv} =
+ case erlang:system_info(threads) of
+ false ->
+ case OsType of
+ {unix,_} ->
+ {link_library("erl_interface_st",OsType),
+ link_library("ei_st",OsType)};
+ _ ->
+ {Lib, Lib1}
+ end;
+ true ->
+ case OsType of
+ {win32, _} ->
+ {link_library("erl_interface_md",OsType),
+ link_library("ei_md",OsType)};
+ _ ->
+ {Lib, Lib1}
+ end
+ end,
+ ThreadLib = case OsType of
+ % FIXME: FreeBSD uses gcc flag '-pthread' or linking with
+ % "libc_r". So it has to be last of libs. This is an
+ % temporary solution, should be configured elsewhere.
+
+ % This temporary solution have now failed!
+ % A new temporary solution is installed ...
+ % {unix,freebsd} -> "-lc_r";
+ {unix,freebsd} ->
+ "-lpthread";
+ {unix,_} ->
+ "-lpthread";
+ _ ->
+ "" % VxWorks or OSE
+ end,
+ CrossCompile = case OsType of
+ vxworks -> "true";
+ ose -> "true";
+ _ -> "false"
+ end,
+ [{erl_interface_libpath, filename:nativename(LibPath)},
+ {erl_interface_sock_libs, sock_libraries(OsType)},
+ {erl_interface_lib, Lib},
+ {erl_interface_eilib, Lib1},
+ {erl_interface_lib_drv, LibDrv},
+ {erl_interface_eilib_drv, Lib1Drv},
+ {erl_interface_threadlib, ThreadLib},
+ {erl_interface_include, filename:nativename(Incl)},
+ {erl_interface_mk_include, filename:nativename(MkIncl)},
+ {erl_interface_cross_compile, CrossCompile} | Vars].
+
+ic(Vars, OsType) ->
+ {ClassPath, LibPath, Incl} =
+ case lib_dir(Vars, ic) of
+ {error, bad_name} ->
+ throw({cannot_find_app, ic});
+ Dir ->
+ {filename:join([Dir, "priv", "ic.jar"]),
+ case erl_root(Vars) of
+ {installed, _Root} ->
+ filename:join([Dir, "priv", "lib"]);
+ {Type, _Root, Target} when Type == clearcase; Type == srctree ->
+ filename:join([Dir, "priv", "lib", Target])
+ end,
+ filename:join(Dir, "include")}
+ end,
+ [{ic_classpath, filename:nativename(ClassPath)},
+ {ic_libpath, filename:nativename(LibPath)},
+ {ic_lib, link_library("ic", OsType)},
+ {ic_include_path, filename:nativename(Incl)}|Vars].
+
+jinterface(Vars, _OsType) ->
+ ClassPath =
+ case lib_dir(Vars, jinterface) of
+ {error, bad_name} ->
+ throw({cannot_find_app, jinterface});
+ Dir ->
+ filename:join([Dir, "priv", "OtpErlang.jar"])
+ end,
+ [{jinterface_classpath, filename:nativename(ClassPath)}|Vars].
+
+%% Unused!
+% ig_vars(Vars) ->
+% {Lib0, Incl} =
+% case erl_root(Vars) of
+% {installed, Root} ->
+% Base = filename:join([Root, "usr"]),
+% {filename:join([Base, "lib"]),
+% filename:join([Base, "include"])};
+% {Type, Root, Target} when Type == clearcase; Type == srctree ->
+% {filename:join([Root, "lib", "ig", "obj", Target]),
+% filename:join([Root, "lib", "ig", "include"])}
+% end,
+% [{ig_libdir, filename:nativename(Lib0)},
+% {ig_include, filename:nativename(Incl)}|Vars].
+
+lib_dir(Vars, Lib) ->
+ LibLibDir = case Lib of
+ erts ->
+ filename:join([code:root_dir(),
+ "erts-" ++ erlang:system_info(version)]);
+ _ ->
+ code:lib_dir(Lib)
+ end,
+ case {get_var(crossroot, Vars), LibLibDir} of
+ {{error, _}, _} -> %no crossroot
+ LibLibDir;
+ {_, {error, _}} -> %no lib
+ LibLibDir;
+ {CrossRoot, _} ->
+ %% XXX: Ugly. So ugly I won't comment it
+ %% /Patrik
+ CLibDirList = case Lib of
+ erts ->
+ [CrossRoot, "erts"];
+ _ ->
+ [CrossRoot, "lib", atom_to_list(Lib)]
+ end,
+ CLibDir = filename:join(CLibDirList),
+ Cmd = "ls -d " ++ CLibDir ++ "*",
+ XLibDir = lists:last(string:tokens(os:cmd(Cmd),"\n")),
+ case file:list_dir(XLibDir) of
+ {error, enoent} ->
+ [];
+ _ ->
+ XLibDir
+ end
+ end.
+
+erl_root(Vars) ->
+ Root = code:root_dir(),
+ case ts_lib:erlang_type() of
+ {clearcase, _Version} ->
+ Target = get_var(target, Vars),
+ {clearcase, Root, Target};
+ {srctree, _Version} ->
+ Target = get_var(target, Vars),
+ {srctree, Root, Target};
+ {_, _Version} ->
+ case get_var(crossroot,Vars) of
+ {error, notfound} ->
+ {installed, Root};
+ CrossRoot ->
+ {installed, CrossRoot}
+ end
+ end.
+
+
+get_var(Key, Vars) ->
+ case lists:keysearch(Key, 1, Vars) of
+ {value, {Key, Value}} ->
+ Value;
+ _ ->
+ {error, notfound}
+ end.
+
+
+sock_libraries({win32, _}) ->
+ "ws2_32.lib";
+sock_libraries({unix, _}) ->
+ ""; % Included in general libraries if needed.
+sock_libraries(vxworks) ->
+ "";
+sock_libraries(ose) ->
+ "";
+sock_libraries(_Other) ->
+ exit({sock_libraries, not_supported}).
+
+
+link_library(LibName,{win32, _}) ->
+ LibName ++ ".lib";
+link_library(LibName,{unix, _}) ->
+ "lib" ++ LibName ++ ".a";
+link_library(LibName,vxworks) ->
+ "lib" ++ LibName ++ ".a";
+link_library(_LibName,ose) ->
+ "";
+link_library(_LibName,_Other) ->
+ exit({link_library, not_supported}).
+
+%% Returns emulator specific variables.
+emu_vars(Vars) ->
+ [{is_source_build, is_source_build()},
+ {erl_name, atom_to_list(lib:progname())}|Vars].
+
+is_source_build() ->
+ string:str(erlang:system_info(system_version), "[source]") > 0.
+
+is_debug_build() ->
+ case catch string:str(erlang:system_info(system_version), "debug") of
+ Int when is_integer(Int), Int > 0 ->
+ true;
+ _ ->
+ false
+ end.
+%%
+%% ssl_libdir
+%%
+ssl(Vars, _OsType) ->
+ case lib_dir(Vars, ssl) of
+ {error, bad_name} ->
+ throw({cannot_find_app, ssl});
+ Dir ->
+ [{ssl_libdir, filename:nativename(Dir)}| Vars]
+ end.
+
+separators(Vars, {win32,_}) ->
+ [{'DS',"\\"},{'PS',";"}|Vars];
+separators(Vars, _) ->
+ [{'DS',"/"},{'PS',":"}|Vars].
diff --git a/lib/test_server/src/ts_install.erl b/lib/test_server/src/ts_install.erl
new file mode 100644
index 0000000000..94926eba80
--- /dev/null
+++ b/lib/test_server/src/ts_install.erl
@@ -0,0 +1,348 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ts_install).
+
+
+-export([install/2, platform_id/1]).
+
+-include("ts.hrl").
+
+install(install_local, Options) ->
+ install(os:type(), Options);
+
+install(TargetSystem, Options) ->
+ io:format("Running configure for cross architecture, network target name~n"
+ "~p~n", [TargetSystem]),
+ case autoconf(TargetSystem) of
+ {ok, Vars0} ->
+ OsType = os_type(TargetSystem),
+ Vars1 = ts_erl_config:variables(merge(Vars0,Options),OsType),
+ {Options1, Vars2} = add_vars(Vars1, Options),
+ Vars3 = lists:flatten([Options1|Vars2]),
+ write_terms(?variables, Vars3);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+os_type({unix,_}=OsType) -> OsType;
+os_type({win32,_}=OsType) -> OsType;
+os_type(_Other) -> vxworks.
+
+merge(Vars,[]) ->
+ Vars;
+merge(Vars,[{crossroot,X}| Tail]) ->
+ merge([{crossroot, X} | Vars], Tail);
+merge(Vars,[_X | Tail]) ->
+ merge(Vars,Tail).
+
+%% Autoconf for various platforms.
+%% unix uses the configure script
+%% win32 uses ts_autoconf_win32
+%% VxWorks uses ts_autoconf_vxworks.
+
+autoconf(TargetSystem) ->
+ case autoconf1(TargetSystem) of
+ ok ->
+ autoconf2(file:read_file("conf_vars"));
+ Error ->
+ Error
+ end.
+
+autoconf1({win32, _}) ->
+ ts_autoconf_win32:configure();
+autoconf1({unix, _}) ->
+ unix_autoconf();
+autoconf1(Other) ->
+ ts_autoconf_vxworks:configure(Other).
+
+autoconf2({ok, Bin}) ->
+ get_vars(binary_to_list(Bin), name, [], []);
+autoconf2(Error) ->
+ Error.
+
+get_vars([$:|Rest], name, Current, Result) ->
+ Name = list_to_atom(lists:reverse(Current)),
+ get_vars(Rest, value, [], [Name|Result]);
+get_vars([$\r|Rest], value, Current, Result) ->
+ get_vars(Rest, value, Current, Result);
+get_vars([$\n|Rest], value, Current, [Name|Result]) ->
+ Value = lists:reverse(Current),
+ get_vars(Rest, name, [], [{Name, Value}|Result]);
+get_vars([C|Rest], State, Current, Result) ->
+ get_vars(Rest, State, [C|Current], Result);
+get_vars([], name, [], Result) ->
+ {ok, Result};
+get_vars(_, _, _, _) ->
+ {error, fatal_bad_conf_vars}.
+
+unix_autoconf() ->
+ Configure = filename:absname("configure"),
+ Args = case catch erlang:system_info(threads) of
+ false -> "";
+ _ -> " --enable-shlib-thread-safety"
+ end
+ ++ case catch string:str(erlang:system_info(system_version),
+ "debug") > 0 of
+ false -> "";
+ _ -> " --enable-debug-mode"
+ end,
+ case filelib:is_file(Configure) of
+ true ->
+ Env = macosx_cflags(),
+ Port = open_port({spawn, Configure ++ Args},
+ [stream, eof, {env,Env}]),
+ ts_lib:print_data(Port);
+ false ->
+ {error, no_configure_script}
+ end.
+
+macosx_cflags() ->
+ case os:type() of
+ {unix, darwin} ->
+ %% To ensure that the drivers we build can be loaded
+ %% by the emulator, add either -m32 or -m64 to CFLAGS.
+ WordSize = erlang:system_info(wordsize),
+ Mflag = "-m" ++ integer_to_list(8*WordSize),
+ [{"CFLAGS", Mflag},{"LDFLAGS", Mflag}];
+ _ ->
+ []
+ end.
+
+write_terms(Name, Terms) ->
+ case file:open(Name, [write]) of
+ {ok, Fd} ->
+ Result = write_terms1(Fd, Terms),
+ file:close(Fd),
+ Result;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+write_terms1(Fd, [Term|Rest]) ->
+ ok = io:format(Fd, "~p.\n", [Term]),
+ write_terms1(Fd, Rest);
+write_terms1(_, []) ->
+ ok.
+
+add_vars(Vars0, Opts0) ->
+ {Opts,LongNames} =
+ case lists:keymember(longnames, 1, Opts0) of
+ true ->
+ {lists:keydelete(longnames, 1, Opts0),true};
+ false ->
+ {Opts0,false}
+ end,
+ {PlatformId, PlatformLabel, PlatformFilename, Version} =
+ platform([{longnames, LongNames}|Vars0]),
+ {Opts, [{longnames, LongNames},
+ {platform_id, PlatformId},
+ {platform_filename, PlatformFilename},
+ {rsh_name, get_rsh_name()},
+ {platform_label, PlatformLabel},
+ {erl_flags, []},
+ {erl_release, Version},
+ {ts_testcase_callback, get_testcase_callback()} | Vars0]}.
+
+get_testcase_callback() ->
+ case os:getenv("TS_TESTCASE_CALLBACK") of
+ ModFunc when is_list(ModFunc), ModFunc /= "" ->
+ case string:tokens(ModFunc, " ") of
+ [_Mod,_Func] -> ModFunc;
+ _ -> ""
+ end;
+ _ ->
+ case init:get_argument(ts_testcase_callback) of
+ {ok,[[Mod,Func]]} -> Mod ++ " " ++ Func;
+ _ -> ""
+ end
+ end.
+
+get_rsh_name() ->
+ case os:getenv("ERL_RSH") of
+ false ->
+ case ts_lib:erlang_type() of
+ {clearcase, _} ->
+ "ctrsh";
+ {_, _} ->
+ "rsh"
+ end;
+ Str ->
+ Str
+ end.
+
+platform_id(Vars) ->
+ {Id,_,_,_} = platform(Vars),
+ Id.
+
+platform(Vars) ->
+ Hostname = hostname(),
+
+ {Type,Version} = ts_lib:erlang_type(),
+ Cpu = ts_lib:var('CPU', Vars),
+ Os = ts_lib:var(os, Vars),
+
+ ErlType = to_upper(atom_to_list(Type)),
+ OsType = ts_lib:initial_capital(Os),
+ CpuType = ts_lib:initial_capital(Cpu),
+ LinuxDist = linux_dist(),
+ ExtraLabel = extra_platform_label(),
+ Schedulers = schedulers(),
+ BindType = bind_type(),
+ KP = kernel_poll(),
+ IOTHR = io_thread(),
+ LC = lock_checking(),
+ MT = modified_timing(),
+ AsyncThreads = async_threads(),
+ HeapType = heap_type_label(),
+ Debug = debug(),
+ CpuBits = word_size(),
+ Common = lists:concat([Hostname,"/",OsType,"/",CpuType,CpuBits,LinuxDist,
+ Schedulers,BindType,KP,IOTHR,LC,MT,AsyncThreads,
+ HeapType,Debug,ExtraLabel]),
+ PlatformId = lists:concat([ErlType, " ", Version, Common]),
+ PlatformLabel = ErlType ++ Common,
+ PlatformFilename = platform_as_filename(PlatformId),
+ {PlatformId, PlatformLabel, PlatformFilename, Version}.
+
+platform_as_filename(Label) ->
+ lists:map(fun($ ) -> $_;
+ ($/) -> $_;
+ (C) when $A =< C, C =< $Z -> C - $A + $a;
+ (C) -> C end,
+ Label).
+
+to_upper(String) ->
+ lists:map(fun(C) when $a =< C, C =< $z -> C - $a + $A;
+ (C) -> C end,
+ String).
+
+word_size() ->
+ case erlang:system_info(wordsize) of
+ 4 -> "";
+ 8 -> "/64"
+ end.
+
+linux_dist() ->
+ case os:type() of
+ {unix,linux} ->
+ linux_dist_1([fun linux_dist_suse/0]);
+ _ -> ""
+ end.
+
+linux_dist_1([F|T]) ->
+ case F() of
+ "" -> linux_dist_1(T);
+ Str -> Str
+ end;
+linux_dist_1([]) -> "".
+
+linux_dist_suse() ->
+ case filelib:is_file("/etc/SuSE-release") of
+ false -> "";
+ true ->
+ Ver0 = os:cmd("awk '/^VERSION/ {print $3}' /etc/SuSE-release"),
+ [_|Ver1] = lists:reverse(Ver0),
+ Ver = lists:reverse(Ver1),
+ "/Suse" ++ Ver
+ end.
+
+hostname() ->
+ case catch inet:gethostname() of
+ {ok, Hostname} when is_list(Hostname) ->
+ "/" ++ lists:takewhile(fun (C) -> C /= $. end, Hostname);
+ _ ->
+ "/localhost"
+ end.
+
+heap_type_label() ->
+ case catch erlang:system_info(heap_type) of
+ hybrid -> "/Hybrid";
+ _ -> "" %private
+ end.
+
+async_threads() ->
+ case catch erlang:system_info(threads) of
+ true -> "/A"++integer_to_list(erlang:system_info(thread_pool_size));
+ _ -> ""
+ end.
+
+schedulers() ->
+ case catch erlang:system_info(smp_support) of
+ true ->
+ case {erlang:system_info(schedulers),
+ erlang:system_info(schedulers_online)} of
+ {S,S} ->
+ "/S"++integer_to_list(S);
+ {S,O} ->
+ "/S"++integer_to_list(S) ++ ":" ++
+ integer_to_list(O)
+ end;
+ _ -> ""
+ end.
+
+bind_type() ->
+ case catch erlang:system_info(scheduler_bind_type) of
+ thread_no_node_processor_spread -> "/sbttnnps";
+ no_node_processor_spread -> "/sbtnnps";
+ no_node_thread_spread -> "/sbtnnts";
+ processor_spread -> "/sbtps";
+ thread_spread -> "/sbtts";
+ no_spread -> "/sbtns";
+ _ -> ""
+ end.
+
+
+debug() ->
+ case string:str(erlang:system_info(system_version), "debug") of
+ 0 -> "";
+ _ -> "/Debug"
+ end.
+
+lock_checking() ->
+ case catch erlang:system_info(lock_checking) of
+ true -> "/LC";
+ _ -> ""
+ end.
+
+modified_timing() ->
+ case catch erlang:system_info(modified_timing_level) of
+ N when is_integer(N) ->
+ "/T" ++ integer_to_list(N);
+ _ -> ""
+ end.
+
+kernel_poll() ->
+ case catch erlang:system_info(kernel_poll) of
+ true -> "/KP";
+ _ -> ""
+ end.
+
+io_thread() ->
+ case catch erlang:system_info(io_thread) of
+ true -> "/IOTHR";
+ _ -> ""
+ end.
+
+extra_platform_label() ->
+ case os:getenv("TS_EXTRA_PLATFORM_LABEL") of
+ [] -> "";
+ [_|_]=Label -> "/" ++ Label;
+ false -> ""
+ end.
+
diff --git a/lib/test_server/src/ts_lib.erl b/lib/test_server/src/ts_lib.erl
new file mode 100644
index 0000000000..082c9e0519
--- /dev/null
+++ b/lib/test_server/src/ts_lib.erl
@@ -0,0 +1,335 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ts_lib).
+
+-include_lib("kernel/include/file.hrl").
+-include("ts.hrl").
+
+-export([error/1, var/2, erlang_type/0,
+ initial_capital/1, interesting_logs/1,
+ specs/1, suites/2, last_test/1,
+ force_write_file/2, force_delete/1,
+ subst_file/3, subst/2, print_data/1,
+ maybe_atom_to_list/1, progress/4
+ ]).
+
+error(Reason) ->
+ throw({error, Reason}).
+
+%% Returns the value for a variable
+
+var(Name, Vars) ->
+ case lists:keysearch(Name, 1, Vars) of
+ {value, {Name, Value}} ->
+ Value;
+ false ->
+ error({bad_installation, {undefined_var, Name, Vars}})
+ end.
+
+%% Returns the level of verbosity (0-X)
+verbosity(Vars) ->
+ % Check for a single verbose option.
+ case lists:member(verbose, Vars) of
+ true ->
+ 1;
+ false ->
+ case lists:keysearch(verbose, 1, Vars) of
+ {value, {verbose, Level}} ->
+ Level;
+ _ ->
+ 0
+ end
+ end.
+
+% Displays output to the console if verbosity is equal or more
+% than Level.
+progress(Vars, Level, Format, Args) ->
+ V=verbosity(Vars),
+ if
+ V>=Level ->
+ io:format(Format, Args);
+ true ->
+ ok
+ end.
+
+%% Returns: {Type, Version} where Type is otp|src
+
+erlang_type() ->
+ {_, Version} = init:script_id(),
+ RelDir = filename:join([code:root_dir(), "releases"]), % Only in installed
+ SysDir = filename:join([code:root_dir(), "system"]), % Nonexisting link/dir outside ClearCase
+ case {filelib:is_file(RelDir),filelib:is_file(SysDir)} of
+ {true,_} -> {otp, Version}; % installed OTP
+ {_,true} -> {clearcase, Version};
+ _ -> {srctree, Version}
+ end.
+
+%% Upcases the first letter in a string.
+
+initial_capital([C|Rest]) when $a =< C, C =< $z ->
+ [C-$a+$A|Rest];
+initial_capital(String) ->
+ String.
+
+%% Returns a list of the "interesting logs" in a directory,
+%% i.e. those that correspond to spec files.
+
+interesting_logs(Dir) ->
+ Logs = filelib:wildcard(filename:join(Dir, [$*|?logdir_ext])),
+ Interesting =
+ case specs(Dir) of
+ [] ->
+ Logs;
+ Specs0 ->
+ Specs = ordsets:from_list(Specs0),
+ [L || L <- Logs, ordsets:is_element(filename_to_atom(L), Specs)]
+ end,
+ sort_tests(Interesting).
+
+specs(Dir) ->
+ Specs = filelib:wildcard(filename:join([filename:dirname(Dir),
+ "*_test", "*.{dyn,}spec"])),
+ sort_tests([filename_to_atom(Name) || Name <- Specs]).
+
+suites(Dir, Spec) ->
+ Glob=filename:join([filename:dirname(Dir), Spec++"_test",
+ "*_SUITE.erl"]),
+ Suites=filelib:wildcard(Glob),
+ [filename_to_atom(Name) || Name <- Suites].
+
+filename_to_atom(Name) ->
+ list_to_atom(filename:rootname(filename:basename(Name))).
+
+%% Sorts a list of either log files directories or spec files.
+
+sort_tests(Tests) ->
+ Sorted = lists:usort([{suite_order(filename_to_atom(X)), X} ||
+ X <- Tests]),
+ [X || {_, X} <- Sorted].
+
+%% This defines the order in which tests should be run and be presented
+%% in index files.
+
+suite_order(emulator) -> 0;
+suite_order(test_server) -> 1;
+suite_order(kernel) -> 4;
+suite_order(stdlib) -> 6;
+suite_order(compiler) -> 8;
+suite_order(hipe) -> 9;
+suite_order(erl_interface) -> 12;
+suite_order(jinterface) -> 14;
+suite_order(sasl) -> 16;
+suite_order(tools) -> 18;
+suite_order(runtime_tools) -> 19;
+suite_order(parsetools) -> 20;
+suite_order(pman) -> 21;
+suite_order(debugger) -> 22;
+suite_order(toolbar) -> 23;
+suite_order(ic) -> 24;
+suite_order(orber) -> 26;
+suite_order(inets) -> 28;
+suite_order(asn1) -> 30;
+suite_order(os_mon) -> 32;
+suite_order(snmp) -> 38;
+suite_order(mnemosyne) -> 40;
+suite_order(mnesia_session) -> 42;
+suite_order(mnesia) -> 44;
+suite_order(system) -> 999; %% IMPORTANT: system SHOULD always be last!
+suite_order(_) -> 200.
+
+last_test(Dir) ->
+ last_test(filelib:wildcard(filename:join(Dir, "run.[1-2]*")), false).
+
+last_test([Run|Rest], false) ->
+ last_test(Rest, Run);
+last_test([Run|Rest], Latest) when Run > Latest ->
+ last_test(Rest, Run);
+last_test([_|Rest], Latest) ->
+ last_test(Rest, Latest);
+last_test([], Latest) ->
+ Latest.
+
+%% Do the utmost to ensure that the file is written, by deleting or
+%% renaming an old file with the same name.
+
+force_write_file(Name, Contents) ->
+ force_delete(Name),
+ file:write_file(Name, Contents).
+
+force_delete(Name) ->
+ case file:delete(Name) of
+ {error, eacces} ->
+ force_rename(Name, Name ++ ".old.", 0);
+ Other ->
+ Other
+ end.
+
+force_rename(From, To, Number) ->
+ Dest = [To|integer_to_list(Number)],
+ case file:read_file_info(Dest) of
+ {ok, _} ->
+ force_rename(From, To, Number+1);
+ {error, _} ->
+ file:rename(From, Dest)
+ end.
+
+%% Substitute all occurrences of @var@ in the In file, using
+%% the list of variables in Vars, producing the output file Out.
+%% Returns: ok | {error, Reason}
+
+subst_file(In, Out, Vars) ->
+ case file:read_file(In) of
+ {ok, Bin} ->
+ Subst = subst(binary_to_list(Bin), Vars, []),
+ case file:write_file(Out, Subst) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ {error, {file_write, Reason}}
+ end;
+ Error ->
+ Error
+ end.
+
+subst(String, Vars) ->
+ subst(String, Vars, []).
+
+subst([$@, $_|Rest], Vars, Result) ->
+ subst_var([$_|Rest], Vars, Result, []);
+subst([$@, C|Rest], Vars, Result) when $A =< C, C =< $Z ->
+ subst_var([C|Rest], Vars, Result, []);
+subst([$@, C|Rest], Vars, Result) when $a =< C, C =< $z ->
+ subst_var([C|Rest], Vars, Result, []);
+subst([C|Rest], Vars, Result) ->
+ subst(Rest, Vars, [C|Result]);
+subst([], _Vars, Result) ->
+ lists:reverse(Result).
+
+subst_var([$@|Rest], Vars, Result, VarAcc) ->
+ Key = list_to_atom(lists:reverse(VarAcc)),
+ {Result1,Rest1} = do_subst_var(Key, Rest, Vars, Result, VarAcc),
+ subst(Rest1, Vars, Result1);
+
+subst_var([C|Rest], Vars, Result, VarAcc) ->
+ subst_var(Rest, Vars, Result, [C|VarAcc]);
+subst_var([], Vars, Result, VarAcc) ->
+ subst([], Vars, [VarAcc++[$@|Result]]).
+
+%% handle conditional
+do_subst_var(Cond, Rest, Vars, Result, _VarAcc) when Cond == 'IFEQ' ;
+ Cond == 'IFNEQ' ->
+ {Bool,Comment,Rest1} = do_test(Rest, Vars, Cond),
+ Rest2 = extract_clause(Bool, Rest1),
+ {lists:reverse(Comment, Result),Rest2};
+%% variable substitution
+do_subst_var(Key, Rest, Vars, Result, VarAcc) ->
+ case lists:keysearch(Key, 1, Vars) of
+ {value, {Key, Value}} ->
+ {lists:reverse(Value, Result),Rest};
+ false ->
+ {[$@|VarAcc++[$@|Result]],Rest}
+ end.
+
+%% check arguments in "@IF[N]EQ@ (Arg1, Arg2)" for equality
+do_test(Rest, Vars, Test) ->
+ {Arg1,Rest1} = get_arg(Rest, Vars, $,, []),
+ {Arg2,Rest2} = get_arg(Rest1, Vars, 41, []), % $)
+ Result = case Arg1 of
+ Arg2 when Test == 'IFEQ' -> true;
+ Arg2 when Test == 'IFNEQ' -> false;
+ _ when Test == 'IFNEQ' -> true;
+ _ -> false
+ end,
+ Comment = io_lib:format("# Result of test: ~s (~s, ~s) -> ~w",
+ [atom_to_list(Test),Arg1,Arg2,Result]),
+ {Result,Comment,Rest2}.
+
+%% extract an argument
+get_arg([$ |Rest], Vars, Stop, Acc) ->
+ get_arg(Rest, Vars, Stop, Acc);
+get_arg([$(|Rest], Vars, Stop, _) ->
+ get_arg(Rest, Vars, Stop, []);
+get_arg([Stop|Rest], Vars, Stop, Acc) ->
+ Arg = lists:reverse(Acc),
+ Subst = subst(Arg, Vars),
+ {Subst,Rest};
+get_arg([C|Rest], Vars, Stop, Acc) ->
+ get_arg(Rest, Vars, Stop, [C|Acc]).
+
+%% keep only the true or false conditional clause
+extract_clause(true, Rest) ->
+ extract_clause(true, Rest, []);
+extract_clause(false, Rest) ->
+ Rest1 = discard_clause(Rest), % discard true clause
+ extract_clause(false, Rest1, []).
+
+%% true clause buffered, done
+extract_clause(true, [$@,$E,$L,$S,$E,$@|Rest], Acc) ->
+ Rest1 = discard_clause(Rest), % discard false clause
+ lists:reverse(Acc, Rest1);
+%% buffering of false clause starts now
+extract_clause(false, [$@,$E,$L,$S,$E,$@|Rest], _Acc) ->
+ extract_clause(false, Rest, []);
+%% true clause buffered, done
+extract_clause(true, [$@,$E,$N,$D,$I,$F,$@|Rest], Acc) ->
+ lists:reverse(Acc, Rest);
+%% false clause buffered, done
+extract_clause(false, [$@,$E,$N,$D,$I,$F,$@|Rest], Acc) ->
+ lists:reverse(Acc, Rest);
+%% keep buffering
+extract_clause(Bool, [C|Rest], Acc) ->
+ extract_clause(Bool, Rest, [C|Acc]);
+%% parse error
+extract_clause(_, [], Acc) ->
+ lists:reverse(Acc).
+
+discard_clause([$@,$E,$L,$S,$E,$@|Rest]) ->
+ Rest;
+discard_clause([$@,$E,$N,$D,$I,$F,$@|Rest]) ->
+ Rest;
+discard_clause([_C|Rest]) ->
+ discard_clause(Rest);
+discard_clause([]) -> % parse error
+ [].
+
+
+print_data(Port) ->
+ receive
+ {Port, {data, Bytes}} ->
+ io:put_chars(Bytes),
+ print_data(Port);
+ {Port, eof} ->
+ Port ! {self(), close},
+ receive
+ {Port, closed} ->
+ true
+ end,
+ receive
+ {'EXIT', Port, _} ->
+ ok
+ after 1 -> % force context switch
+ ok
+ end
+ end.
+
+maybe_atom_to_list(To_list) when is_list(To_list) ->
+ To_list;
+maybe_atom_to_list(To_list) when is_atom(To_list)->
+ atom_to_list(To_list).
+
diff --git a/lib/test_server/src/ts_make.erl b/lib/test_server/src/ts_make.erl
new file mode 100644
index 0000000000..3df66111a3
--- /dev/null
+++ b/lib/test_server/src/ts_make.erl
@@ -0,0 +1,103 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ts_make).
+
+-export([make/1,make/3,unmake/1]).
+
+-include("test_server.hrl").
+
+%% Functions to be called from make test cases.
+
+make(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+ Makefile = ?config(makefile, Config),
+ Make = ?config(make_command, Config),
+ case make(Make, DataDir, Makefile) of
+ ok -> ok;
+ {error,Reason} -> ?t:fail({make_failed,Reason})
+ end.
+
+unmake(Config) when is_list(Config) ->
+ ok.
+
+%% Runs `make' in the given directory.
+%% Result: ok | {error, Reason}
+
+make(Make, Dir, Makefile) ->
+ {RunFile, RunCmd, Script} = run_make_script(os:type(), Make, Dir, Makefile),
+ case file:write_file(RunFile, Script) of
+ ok ->
+ Log = filename:join(Dir, "make.log"),
+ file:delete(Log),
+ Port = open_port({spawn, RunCmd}, [eof,stream,in,stderr_to_stdout]),
+ case get_port_data(Port, [], false) of
+ "*ok*" ++ _ -> ok;
+ "*error*" ++ _ -> {error, make};
+ Other ->{error,{make,Other}}
+ end;
+ Error -> Error
+ end.
+
+get_port_data(Port, Last0, Complete0) ->
+ receive
+ {Port,{data,Bytes}} ->
+ {Last, Complete} = update_last(Bytes, Last0, Complete0),
+ get_port_data(Port, Last, Complete);
+ {Port, eof} ->
+ Result = update_last(eof, Last0, Complete0),
+ unlink(Port),
+ exit(Port, die),
+ Result
+ end.
+
+update_last([C|Rest], Line, true) ->
+ io:put_chars(Line),
+ io:nl(),
+ update_last([C|Rest], [], false);
+update_last([$\r|Rest], Result, Complete) ->
+ update_last(Rest, Result, Complete);
+update_last([$\n|Rest], Result, _Complete) ->
+ update_last(Rest, lists:reverse(Result), true);
+update_last([C|Rest], Result, Complete) ->
+ update_last(Rest, [C|Result], Complete);
+update_last([], Result, Complete) ->
+ {Result, Complete};
+update_last(eof, Result, _) ->
+ Result.
+
+run_make_script({win32, _}, Make, Dir, Makefile) ->
+ {"run_make.bat",
+ ".\\run_make",
+ ["@echo off\r\n",
+ "cd ", filename:nativename(Dir), "\r\n",
+ Make, " -f ", Makefile, " \r\n",
+ "if errorlevel 1 echo *error*\r\n",
+ "if not errorlevel 1 echo *ok*\r\n"]};
+run_make_script({unix, _}, Make, Dir, Makefile) ->
+ {"run_make",
+ "/bin/sh ./run_make",
+ ["#!/bin/sh\n",
+ "cd ", Dir, "\n",
+ Make, " -f ", Makefile, " 2>&1\n",
+ "case $? in\n",
+ " 0) echo '*ok*';;\n",
+ " *) echo '*error*';;\n",
+ "esac\n"]};
+run_make_script(_Other, _Make, _Dir, _Makefile) ->
+ exit(dont_know_how_to_make_script_on_this_platform).
diff --git a/lib/test_server/src/ts_reports.erl b/lib/test_server/src/ts_reports.erl
new file mode 100644
index 0000000000..b41291d342
--- /dev/null
+++ b/lib/test_server/src/ts_reports.erl
@@ -0,0 +1,543 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% Purpose : Produces reports in HTML from the outcome of test suite runs.
+
+-module(ts_reports).
+
+-export([make_index/0, make_master_index/2, make_progress_index/2]).
+-export([count_cases/1, year/0, current_time/0]).
+
+-include_lib("kernel/include/file.hrl").
+-include("ts.hrl").
+
+-import(filename, [basename/1, rootname/1]).
+-import(ts_lib, [error/1]).
+
+
+%% Make master index page which points out index pages for all platforms.
+
+make_master_index(Dir, Vars) ->
+ IndexName = filename:join(Dir, "index.html"),
+ {ok, Index0} = make_master_index1(directories(Dir), master_header(Vars)),
+ Index = [Index0|master_footer()],
+ io:put_chars("Updating " ++ IndexName ++ "... "),
+ ok = ts_lib:force_write_file(IndexName, Index),
+ io:put_chars("done\n").
+
+make_master_index1([Dir|Rest], Result) ->
+ NewResult =
+ case catch read_variables(Dir) of
+ {'EXIT',{{bad_installation,Reason},_}} ->
+ io:put_chars("Failed to read " ++ filename:join(Dir,?variables)++
+ ": " ++ Reason ++ " - Ignoring this directory\n"),
+ Result;
+ Vars ->
+ Platform = ts_lib:var(platform_label, Vars),
+ case make_index(Dir, Vars, false) of
+ {ok, Summary} ->
+ make_master_index(Platform, Dir, Summary, Result);
+ {error, _} ->
+ Result
+ end
+ end,
+ make_master_index1(Rest, NewResult);
+make_master_index1([], Result) ->
+ {ok, Result}.
+
+make_progress_index(Dir, Vars) ->
+ IndexName = filename:join(Dir, "index.html"),
+ io:put_chars("Updating " ++ IndexName ++ "... "),
+ Index0=progress_header(Vars),
+ ts_lib:force_delete(IndexName),
+ Dirs=find_progress_runs(Dir),
+ Index1=[Index0|make_progress_links(Dirs, [])],
+ IndexF=[Index1|progress_footer()],
+ ok = ts_lib:force_write_file(IndexName, IndexF),
+ io:put_chars("done\n").
+
+find_progress_runs(Dir) ->
+ case file:list_dir(Dir) of
+ {ok, Dirs0} ->
+ Dirs1= [filename:join(Dir,X) || X <- Dirs0,
+ filelib:is_dir(filename:join(Dir,X))],
+ lists:sort(Dirs1);
+ _ ->
+ []
+ end.
+
+name_from_vars(Dir, Platform) ->
+ VarFile=filename:join([Dir, Platform, "variables"]),
+ case file:consult(VarFile) of
+ {ok, Vars} ->
+ ts_lib:var(platform_id, Vars);
+ _Other ->
+ Platform
+ end.
+
+make_progress_links([], Acc) ->
+ Acc;
+make_progress_links([RDir|Rest], Acc) ->
+ Dir=filename:basename(RDir),
+ Platforms=[filename:basename(X) ||
+ X <- find_progress_runs(RDir)],
+ PlatformLinks=["<A HREF=\""++filename:join([Dir,X,"index.html"])
+ ++"\">"++name_from_vars(RDir, X)++"</A><BR>" ||
+ X <- Platforms],
+ LinkName=Dir++"/index.html",
+ Link =
+ [
+ "<TR valign=top>\n",
+ "<TD><A HREF=\"", LinkName, "\">", Dir, "</A></TD>", "\n",
+ "<TD>", PlatformLinks, "</TD>", "\n"
+ ],
+ make_progress_links(Rest, [Link|Acc]).
+
+read_variables(Dir) ->
+ case file:consult(filename:join(Dir, ?variables)) of
+ {ok, Vars} -> Vars;
+ {error, Reason} ->
+ erlang:error({bad_installation,file:format_error(Reason)}, [Dir])
+ end.
+
+make_master_index(Platform, Dirname, {Succ, Fail, UserSkip,AutoSkip}, Result) ->
+ Link = filename:join(filename:basename(Dirname), "index.html"),
+ FailStr =
+ if Fail > 0 ->
+ ["<FONT color=\"red\">",
+ integer_to_list(Fail),"</FONT>"];
+ true ->
+ integer_to_list(Fail)
+ end,
+ AutoSkipStr =
+ if AutoSkip > 0 ->
+ ["<FONT color=\"brown\">",
+ integer_to_list(AutoSkip),"</FONT>"];
+ true -> integer_to_list(AutoSkip)
+ end,
+ [Result,
+ "<TR valign=top>\n",
+ "<TD><A HREF=\"", Link, "\">", Platform, "</A></TD>", "\n",
+ make_row(integer_to_list(Succ), false),
+ make_row(FailStr, false),
+ make_row(integer_to_list(UserSkip), false),
+ make_row(AutoSkipStr, false),
+ "</TR>\n"].
+
+%% Make index page which points out individual test suites for a single platform.
+
+make_index() ->
+ {ok, Pwd} = file:get_cwd(),
+ Vars = read_variables(Pwd),
+ make_index(Pwd, Vars, true).
+
+make_index(Dir, Vars, IncludeLast) ->
+ IndexName = filename:absname("index.html", Dir),
+ io:put_chars("Updating " ++ IndexName ++ "... "),
+ case catch make_index1(Dir, IndexName, Vars, IncludeLast) of
+ {'EXIT', Reason} ->
+ io:put_chars("CRASHED!\n"),
+ io:format("~p~n", [Reason]),
+ {error, Reason};
+ {error, Reason} ->
+ io:put_chars("FAILED\n"),
+ io:format("~p~n", [Reason]),
+ {error, Reason};
+ {ok, Summary} ->
+ io:put_chars("done\n"),
+ {ok, Summary};
+ Err ->
+ io:format("Unknown internal error. Please report.\n(Err: ~p, ID: 1)",
+ [Err]),
+ {error, Err}
+ end.
+
+make_index1(Dir, IndexName, Vars, IncludeLast) ->
+ Logs0 = ts_lib:interesting_logs(Dir),
+ Logs =
+ case IncludeLast of
+ true -> add_last_name(Logs0);
+ false -> Logs0
+ end,
+ {ok, {Index0, Summary}} = make_index(Logs, header(Vars), 0, 0, 0, 0, 0),
+ Index = [Index0|footer()],
+ case ts_lib:force_write_file(IndexName, Index) of
+ ok ->
+ {ok, Summary};
+ {error, Reason} ->
+ error({index_write_error, Reason})
+ end.
+
+make_index([Name|Rest], Result, TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt) ->
+ case ts_lib:last_test(Name) of
+ false ->
+ %% Silently skip.
+ make_index(Rest, Result, TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt);
+ Last ->
+ case count_cases(Last) of
+ {Succ, Fail, USkip, ASkip} ->
+ Cov =
+ case file:read_file(filename:join(Last,?cover_total)) of
+ {ok,Bin} ->
+ TotCoverage = binary_to_term(Bin),
+ io_lib:format("~w %",[TotCoverage]);
+ _error ->
+ ""
+ end,
+ Link = filename:join(basename(Name), basename(Last)),
+ JustTheName = rootname(basename(Name)),
+ NotBuilt = not_built(JustTheName),
+ NewResult = [Result, make_index1(JustTheName,
+ Link, Succ, Fail, USkip, ASkip,
+ NotBuilt, Cov, false)],
+ make_index(Rest, NewResult, TotSucc+Succ, TotFail+Fail,
+ UserSkip+USkip, AutoSkip+ASkip, TotNotBuilt+NotBuilt);
+ error ->
+ make_index(Rest, Result, TotSucc, TotFail, UserSkip, AutoSkip,
+ TotNotBuilt)
+ end
+ end;
+make_index([], Result, TotSucc, TotFail, UserSkip, AutoSkip, TotNotBuilt) ->
+ {ok, {[Result|make_index1("Total", no_link,
+ TotSucc, TotFail, UserSkip, AutoSkip,
+ TotNotBuilt, "", true)],
+ {TotSucc, TotFail, UserSkip, AutoSkip}}}.
+
+make_index1(SuiteName, Link, Success, Fail, UserSkip, AutoSkip, NotBuilt, Coverage, Bold) ->
+ Name = test_suite_name(SuiteName),
+ FailStr =
+ if Fail > 0 ->
+ ["<FONT color=\"red\">",
+ integer_to_list(Fail),"</FONT>"];
+ true ->
+ integer_to_list(Fail)
+ end,
+ AutoSkipStr =
+ if AutoSkip > 0 ->
+ ["<FONT color=\"brown\">",
+ integer_to_list(AutoSkip),"</FONT>"];
+ true -> integer_to_list(AutoSkip)
+ end,
+ ["<TR valign=top>\n",
+ "<TD>",
+ case Link of
+ no_link ->
+ ["<B>", Name|"</B>"];
+ _Other ->
+ CrashDumpName = SuiteName ++ "_erl_crash.dump",
+ CrashDumpLink =
+ case filelib:is_file(CrashDumpName) of
+ true ->
+ ["&nbsp;<A HREF=\"", CrashDumpName,
+ "\">(CrashDump)</A>"];
+ false ->
+ ""
+ end,
+ LogFile = filename:join(Link, ?suitelog_name ++ ".html"),
+ ["<A HREF=\"", LogFile, "\">", Name, "</A>\n", CrashDumpLink,
+ "</TD>\n"]
+ end,
+ make_row(integer_to_list(Success), Bold),
+ make_row(FailStr, Bold),
+ make_row(integer_to_list(UserSkip), Bold),
+ make_row(AutoSkipStr, Bold),
+ make_row(integer_to_list(NotBuilt), Bold),
+ make_row(Coverage, Bold),
+ "</TR>\n"].
+
+make_row(Row, true) ->
+ ["<TD ALIGN=right><B>", Row|"</B></TD>"];
+make_row(Row, false) ->
+ ["<TD ALIGN=right>", Row|"</TD>"].
+
+not_built(BaseName) ->
+ Dir = filename:join("..", BaseName++"_test"),
+ Erl = length(filelib:wildcard(filename:join(Dir,"*_SUITE.erl"))),
+ Beam = length(filelib:wildcard(filename:join(Dir,"*_SUITE.beam"))),
+ Erl-Beam.
+
+
+%% Add the log file directory for the very last test run (according to
+%% last_name).
+
+add_last_name(Logs) ->
+ case file:read_file("last_name") of
+ {ok, Bin} ->
+ Name = filename:dirname(lib:nonl(binary_to_list(Bin))),
+ case lists:member(Name, Logs) of
+ true -> Logs;
+ false -> [Name|Logs]
+ end;
+ _ ->
+ Logs
+ end.
+
+term_to_text(Term) ->
+ lists:flatten(io_lib:format("~p.\n", [Term])).
+
+test_suite_name(Name) ->
+ ts_lib:initial_capital(Name) ++ " suite".
+
+directories(Dir) ->
+ {ok, Files} = file:list_dir(Dir),
+ [filename:join(Dir, X) || X <- Files,
+ filelib:is_dir(filename:join(Dir, X))].
+
+
+%%% Headers and footers.
+
+header(Vars) ->
+ Platform = ts_lib:var(platform_id, Vars),
+ ["<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<HTML>\n",
+ "<HEAD>\n",
+ "<TITLE>Test Results for ", Platform, "</TITLE>\n",
+ "</HEAD>\n",
+
+ body_tag(),
+
+ "<!-- ---- DOCUMENT TITLE ---- -->\n",
+
+ "<CENTER>\n",
+ "<H1>Test Results for ", Platform, "</H1>\n",
+ "</CENTER>\n",
+
+ "<!-- ---- CONTENT ---- -->\n",
+ "<CENTER>\n",
+
+ "<TABLE border=3 cellpadding=5>\n",
+ "<th><B>Family</B></th>\n",
+ "<th>Successful</th>\n",
+ "<th>Failed</th>\n",
+ "<th>User Skipped</th>\n"
+ "<th>Auto Skipped</th>\n"
+ "<th>Missing Suites</th>\n"
+ "<th>Coverage</th>\n"
+ "\n"].
+
+footer() ->
+ ["</TABLE>\n"
+ "</CENTER>\n"
+ "<P><CENTER>\n"
+ "<HR>\n"
+ "<P><FONT SIZE=-1>\n"
+ "Copyright &copy; ", year(),
+ " <A HREF=\"http://erlang.ericsson.se\">Open Telecom Platform</A><BR>\n"
+ "Updated: <!date>", current_time(), "<!/date><BR>\n"
+ "</FONT>\n"
+ "</CENTER>\n"
+ "</body>\n"
+ "</HTML>\n"].
+
+progress_header(Vars) ->
+ Release = ts_lib:var(erl_release, Vars),
+ ["<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<HTML>\n",
+ "<HEAD>\n",
+ "<TITLE>", Release, " Progress Test Results</TITLE>\n",
+ "</HEAD>\n",
+
+ body_tag(),
+
+ "<!-- ---- DOCUMENT TITLE ---- -->\n",
+
+ "<CENTER>\n",
+ "<H1>", Release, " Progress Test Results</H1>\n",
+ "<TABLE border=3 cellpadding=5>\n",
+ "<th><b>Test Run</b></th><th>Platforms</th>\n"].
+
+progress_footer() ->
+ ["</TABLE>\n",
+ "</CENTER>\n",
+ "<P><CENTER>\n",
+ "<HR>\n",
+ "<P><FONT SIZE=-1>\n",
+ "Copyright &copy; ", year(),
+ " <A HREF=\"http://erlang.ericsson.se\">Open Telecom Platform</A><BR>\n",
+ "Updated: <!date>", current_time(), "<!/date><BR>\n",
+ "</FONT>\n",
+ "</CENTER>\n",
+ "</body>\n",
+ "</HTML>\n"].
+
+master_header(Vars) ->
+ Release = ts_lib:var(erl_release, Vars),
+ Vsn = erlang:system_info(version),
+ ["<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n"
+ "<!-- autogenerated by '"++atom_to_list(?MODULE)++"'. -->\n"
+ "<HTML>\n",
+ "<HEAD>\n",
+ "<TITLE>", Release, " Test Results (", Vsn, ")</TITLE>\n",
+ "</HEAD>\n",
+
+ body_tag(),
+
+ "<!-- ---- DOCUMENT TITLE ---- -->\n",
+
+ "<CENTER>\n",
+ "<H1>", Release, " Test Results (", Vsn, ")</H1>\n",
+ "</CENTER>\n",
+
+ "<!-- ---- CONTENT ---- -->\n",
+
+ "<CENTER>\n",
+
+ "<TABLE border=3 cellpadding=5>\n",
+ "<th><b>Platform</b></th>\n",
+ "<th>Successful</th>\n",
+ "<th>Failed</th>\n",
+ "<th>User Skipped</th>\n"
+ "<th>Auto Skipped</th>\n"
+ "\n"].
+
+master_footer() ->
+ ["</TABLE>\n",
+ "</CENTER>\n",
+ "<P><CENTER>\n",
+ "<HR>\n",
+ "<P><FONT SIZE=-1>\n",
+ "Copyright &copy; ", year(),
+ " <A HREF=\"http://erlang.ericsson.se\">Open Telecom Platform</A><BR>\n",
+ "Updated: <!date>", current_time(), "<!/date><BR>\n",
+ "</FONT>\n",
+ "</CENTER>\n",
+ "</body>\n",
+ "</HTML>\n"].
+
+body_tag() ->
+ "<body bgcolor=\"#FFFFFF\" text=\"#000000\" link=\"#0000FF\""
+ "vlink=\"#800080\" alink=\"#FF0000\">".
+
+year() ->
+ {Y, _, _} = date(),
+ integer_to_list(Y).
+
+current_time() ->
+ {{Y, Mon, D}, {H, Min, S}} = calendar:local_time(),
+ Weekday = weekday(calendar:day_of_the_week(Y, Mon, D)),
+ lists:flatten(io_lib:format("~s ~s ~p ~2.2.0w:~2.2.0w:~2.2.0w ~w",
+ [Weekday, month(Mon), D, H, Min, S, Y])).
+
+weekday(1) -> "Mon";
+weekday(2) -> "Tue";
+weekday(3) -> "Wed";
+weekday(4) -> "Thu";
+weekday(5) -> "Fri";
+weekday(6) -> "Sat";
+weekday(7) -> "Sun".
+
+month(1) -> "Jan";
+month(2) -> "Feb";
+month(3) -> "Mar";
+month(4) -> "Apr";
+month(5) -> "May";
+month(6) -> "Jun";
+month(7) -> "Jul";
+month(8) -> "Aug";
+month(9) -> "Sep";
+month(10) -> "Oct";
+month(11) -> "Nov";
+month(12) -> "Dec".
+
+%% Count test cases in the given directory (a directory of the type
+%% run.1997-08-04_09.58.52).
+
+count_cases(Dir) ->
+ SumFile = filename:join(Dir, ?run_summary),
+ case read_summary(SumFile, [summary]) of
+ {ok, [{Succ,Fail,Skip}]} ->
+ {Succ,Fail,Skip,0};
+ {ok, [Summary]} ->
+ Summary;
+ {error, _} ->
+ LogFile = filename:join(Dir, ?suitelog_name),
+ case file:read_file(LogFile) of
+ {ok, Bin} ->
+ Summary = count_cases1(binary_to_list(Bin), {0, 0, 0, 0}),
+ write_summary(SumFile, Summary),
+ Summary;
+ {error, _Reason} ->
+ io:format("\nFailed to read ~p (skipped)\n", [LogFile]),
+ error
+ end
+ end.
+
+write_summary(Name, Summary) ->
+ File = [term_to_text({summary, Summary})],
+ ts_lib:force_write_file(Name, File).
+
+% XXX: This function doesn't do what the writer expect. It can't handle
+% the case if there are several different keys and I had to add a special
+% case for the empty file. The caller also expect just one tuple as
+% a result so this function is written way to general for no reason.
+% But it works sort of. /kgb
+
+read_summary(Name, Keys) ->
+ case file:consult(Name) of
+ {ok, []} ->
+ {error, "Empty summary file"};
+ {ok, Terms} ->
+ {ok, lists:map(fun(Key) -> {value, {_, Value}} =
+ lists:keysearch(Key, 1, Terms),
+ Value end,
+ Keys)};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+count_cases1("=failed" ++ Rest, {Success, _Fail, UserSkip,AutoSkip}) ->
+ {NextLine, Count} = get_number(Rest),
+ count_cases1(NextLine, {Success, Count, UserSkip,AutoSkip});
+count_cases1("=successful" ++ Rest, {_Success, Fail, UserSkip,AutoSkip}) ->
+ {NextLine, Count} = get_number(Rest),
+ count_cases1(NextLine, {Count, Fail, UserSkip,AutoSkip});
+count_cases1("=skipped" ++ Rest, {Success, Fail, _UserSkip,AutoSkip}) ->
+ {NextLine, Count} = get_number(Rest),
+ count_cases1(NextLine, {Success, Fail, Count,AutoSkip});
+count_cases1("=user_skipped" ++ Rest, {Success, Fail, _UserSkip,AutoSkip}) ->
+ {NextLine, Count} = get_number(Rest),
+ count_cases1(NextLine, {Success, Fail, Count,AutoSkip});
+count_cases1("=auto_skipped" ++ Rest, {Success, Fail, UserSkip,_AutoSkip}) ->
+ {NextLine, Count} = get_number(Rest),
+ count_cases1(NextLine, {Success, Fail, UserSkip,Count});
+count_cases1([], Counters) ->
+ Counters;
+count_cases1(Other, Counters) ->
+ count_cases1(skip_to_nl(Other), Counters).
+
+get_number([$\s|Rest]) ->
+ get_number(Rest);
+get_number([Digit|Rest]) when $0 =< Digit, Digit =< $9 ->
+ get_number(Rest, Digit-$0).
+
+get_number([Digit|Rest], Acc) when $0 =< Digit, Digit =< $9 ->
+ get_number(Rest, Acc*10+Digit-$0);
+get_number([$\n|Rest], Acc) ->
+ {Rest, Acc};
+get_number([_|Rest], Acc) ->
+ get_number(Rest, Acc).
+
+skip_to_nl([$\n|Rest]) ->
+ Rest;
+skip_to_nl([_|Rest]) ->
+ skip_to_nl(Rest);
+skip_to_nl([]) ->
+ [].
diff --git a/lib/test_server/src/ts_run.erl b/lib/test_server/src/ts_run.erl
new file mode 100644
index 0000000000..3461e1383c
--- /dev/null
+++ b/lib/test_server/src/ts_run.erl
@@ -0,0 +1,746 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% Purpose : Supervises running of test cases.
+
+-module(ts_run).
+
+-export([run/4]).
+
+-define(DEFAULT_MAKE_TIMETRAP_MINUTES, 60).
+-define(DEFAULT_UNMAKE_TIMETRAP_MINUTES, 15).
+
+-include("ts.hrl").
+
+-import(lists, [map/2,member/2,filter/2,reverse/1]).
+
+-record(state,
+ {file, % File given.
+ mod, % Module to run.
+ test_server_args, % Arguments to test server.
+ command, % Command to run.
+ test_dir, % Directory for test suite.
+ makefiles, % List of all makefiles.
+ makefile, % Current makefile.
+ batch, % Are we running in batch mode?
+ data_wc, % Wildcard for data dirs.
+ topcase, % Top case specification.
+ all % Set if we have all_SUITE_data
+ }).
+
+-define(tracefile,"traceinfo").
+
+%% Options is a slightly modified version of the options given to
+%% ts:run. Vars0 are from the variables file.
+run(File, Args0, Options, Vars0) ->
+ Vars=
+ case lists:keysearch(vars, 1, Options) of
+ {value, {vars, Vars1}} ->
+ Vars1++Vars0;
+ _ ->
+ Vars0
+ end,
+ {Batch,Runner} =
+ case {member(interactive, Options), member(batch, Options)} of
+ {false, true} ->
+ {true, fun run_batch/3};
+ _ ->
+ {false, fun run_interactive/3}
+ end,
+ HandleTopcase = case member(keep_topcase, Options) of
+ true -> [fun copy_topcase/3];
+ false -> [fun remove_original_topcase/3,
+ fun init_topcase/3]
+ end,
+ MakefileHooks = [fun make_make/3,
+ fun add_make_testcase/3],
+ MakeLoop = fun(V, Sp, St) -> make_loop(MakefileHooks, V, Sp, St) end,
+ Hooks = [fun init_state/3,
+ fun read_spec_file/3] ++
+ HandleTopcase ++
+ [fun run_preinits/3,
+ fun find_makefiles/3,
+ MakeLoop,
+ fun make_test_suite/3,
+ fun add_topcase_to_spec/3,
+ fun write_spec_file/3,
+ fun make_command/3,
+ Runner],
+ Args = make_test_server_args(Args0,Options,Vars),
+ St = #state{file=File,test_server_args=Args,batch=Batch},
+ R = execute(Hooks, Vars, [], St),
+ case Batch of
+ true -> ts_reports:make_index();
+ false -> ok % ts_reports:make_index() is run on the test_server node
+ end,
+ case R of
+ {ok,_,_,_} -> ok;
+ Error -> Error
+ end.
+
+make_loop(Hooks, Vars0, Spec0, St0) ->
+ case St0#state.makefiles of
+ [Makefile|Rest] ->
+ case execute(Hooks, Vars0, Spec0, St0#state{makefile=Makefile}) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Vars, Spec, St} ->
+ make_loop(Hooks, Vars, Spec, St#state{makefiles=Rest})
+ end;
+ [] ->
+ {ok, Vars0, Spec0, St0}
+ end.
+
+execute([Hook|Rest], Vars0, Spec0, St0) ->
+ case Hook(Vars0, Spec0, St0) of
+ ok ->
+ execute(Rest, Vars0, Spec0, St0);
+ {ok, Vars, Spec, St} ->
+ execute(Rest, Vars, Spec, St);
+ Error ->
+ Error
+ end;
+execute([], Vars, Spec, St) ->
+ {ok, Vars, Spec, St}.
+
+%%
+%% Deletes File from Files when File is on the form .../<SUITE>_data/<file>
+%% when all of <SUITE> has been skipped in Spec, i.e. there
+%% exists a {skip, {<SUITE>, _}} tuple in Spec.
+%%
+del_skipped_suite_data_dir(Files, Spec) ->
+ SkipDirNames = lists:foldl(fun ({skip, {SS, _C}}, SSs) ->
+ [atom_to_list(SS) ++ "_data" | SSs];
+ (_, SSs) ->
+ SSs
+ end,
+ [],
+ Spec),
+ filter(fun (File) ->
+ not member(filename:basename(filename:dirname(File)),
+ SkipDirNames)
+ end,
+ Files).
+
+%% Initialize our internal state.
+
+init_state(Vars, [], St0) ->
+ {FileBase,Wc0,Mod} =
+ case St0#state.file of
+ {Fil,Mod0} -> {Fil, atom_to_list(Mod0) ++ "*_data",Mod0};
+ Fil -> {Fil,"*_SUITE_data",[]}
+ end,
+ {ok,Cwd} = file:get_cwd(),
+ TestDir = filename:join(filename:dirname(Cwd), FileBase++"_test"),
+ case filelib:is_dir(TestDir) of
+ true ->
+ Wc = filename:join(TestDir, Wc0),
+ {ok,Vars,[],St0#state{file=FileBase,mod=Mod,
+ test_dir=TestDir,data_wc=Wc}};
+ false ->
+ {error,{no_test_directory,TestDir}}
+ end.
+
+%% Read the spec file for the test suite.
+
+read_spec_file(Vars, _, St) ->
+ TestDir = St#state.test_dir,
+ File = St#state.file,
+ {SpecFile,Res} = get_spec_filename(Vars, TestDir, File),
+ case Res of
+ {ok,Spec} ->
+ {ok,Vars,Spec,St};
+ {error,Atom} when is_atom(Atom) ->
+ {error,{no_spec,SpecFile}};
+ {error,Reason} ->
+ {error,{bad_spec,lists:flatten(file:format_error(Reason))}}
+ end.
+
+get_spec_filename(Vars, TestDir, File) ->
+ DynSpec = filename:join(TestDir, File ++ ".dynspec"),
+ case filelib:is_file(DynSpec) of
+ true ->
+ Bs0 = erl_eval:new_bindings(),
+ Bs1 = erl_eval:add_binding('Target', ts_lib:var(target, Vars), Bs0),
+ Bs2 = erl_eval:add_binding('Os', ts_lib:var(os, Vars), Bs1),
+ TCCStr = ts_lib:var(test_c_compiler, Vars),
+ TCC = try
+ {ok, Toks, _} = erl_scan:string(TCCStr ++ "."),
+ {ok, Tcc} = erl_parse:parse_term(Toks),
+ Tcc
+ catch
+ _:_ -> undefined
+ end,
+ Bs = erl_eval:add_binding('TestCCompiler', TCC, Bs2),
+ {DynSpec,file:script(DynSpec, Bs)};
+ false ->
+ SpecFile = get_spec_filename_1(Vars, TestDir, File),
+ {SpecFile,file:consult(SpecFile)}
+ end.
+
+get_spec_filename_1(Vars, TestDir, File) ->
+ case ts_lib:var(os, Vars) of
+ "VxWorks" ->
+ check_spec_filename(TestDir, File, ".spec.vxworks");
+ "OSE" ->
+ check_spec_filename(TestDir, File, ".spec.ose");
+ "Windows"++_ ->
+ check_spec_filename(TestDir, File, ".spec.win");
+ _Other ->
+ filename:join(TestDir, File ++ ".spec")
+ end.
+
+check_spec_filename(TestDir, File, Ext) ->
+ Spec = filename:join(TestDir, File ++ Ext),
+ case filelib:is_file(Spec) of
+ true -> Spec;
+ false -> filename:join(TestDir, File ++ ".spec")
+ end.
+
+%% Remove the top case from the spec file. We will add our own
+%% top case later.
+
+remove_original_topcase(Vars, Spec, St) ->
+ {ok,Vars,filter(fun ({topcase,_}) -> false;
+ (_Other) -> true end, Spec),St}.
+
+%% Initialize our new top case. We'll keep in it the state to be
+%% able to add more to it.
+
+init_topcase(Vars, Spec, St) ->
+ TestDir = St#state.test_dir,
+ TopCase =
+ case St#state.mod of
+ Mod when is_atom(Mod) ->
+ ModStr = atom_to_list(Mod),
+ case filelib:is_file(filename:join(TestDir,ModStr++".erl")) of
+ true -> [{Mod,all}];
+ false ->
+ Wc = filename:join(TestDir, ModStr ++ "*_SUITE.erl"),
+ [{list_to_atom(filename:basename(M, ".erl")),all} ||
+ M <- filelib:wildcard(Wc)]
+ end;
+ _Other ->
+ %% Here we used to return {dir,TestDir}. Now we instead
+ %% list all suites in TestDir, so we can add make testcases
+ %% around it later (see add_make_testcase) without getting
+ %% duplicates of the suite. (test_server_ctrl does no longer
+ %% check for duplicates of testcases)
+ Wc = filename:join(TestDir, "*_SUITE.erl"),
+ [{list_to_atom(filename:basename(M, ".erl")),all} ||
+ M <- filelib:wildcard(Wc)]
+ end,
+ {ok,Vars,Spec,St#state{topcase=TopCase}}.
+
+%% Or if option keep_topcase was given, eh... keep the topcase
+copy_topcase(Vars, Spec, St) ->
+ {value,{topcase,Tc}} = lists:keysearch(topcase,1,Spec),
+ {ok, Vars, lists:keydelete(topcase,1,Spec),St#state{topcase=Tc}}.
+
+
+%% Run any "Makefile.first" files first.
+%% XXX We should fake a failing test case if the make fails.
+
+run_preinits(Vars, Spec, St) ->
+ Wc = filename:join(St#state.data_wc, "Makefile.first"),
+ run_pre_makefiles(del_skipped_suite_data_dir(filelib:wildcard(Wc), Spec),
+ Vars, Spec, St),
+ {ok,Vars,Spec,St}.
+
+run_pre_makefiles([Makefile|Ms], Vars0, Spec0, St0) ->
+ Hooks = [fun run_pre_makefile/3],
+ case execute(Hooks, Vars0, Spec0, St0#state{makefile=Makefile}) of
+ {error,_Reason}=Error -> Error;
+ {ok,Vars,Spec,St} -> run_pre_makefiles(Ms, Vars, Spec, St)
+ end;
+run_pre_makefiles([], Vars, Spec, St) -> {ok,Vars,Spec,St}.
+
+run_pre_makefile(Vars, Spec, St) ->
+ Makefile = St#state.makefile,
+ Shortname = filename:basename(Makefile),
+ DataDir = filename:dirname(Makefile),
+ Make = ts_lib:var(make_command, Vars),
+ case ts_make:make(Make,DataDir, Shortname) of
+ ok -> {ok,Vars,Spec,St};
+ {error,_Reason}=Error -> Error
+ end.
+
+%% Search for `Makefile.src' in each *_SUITE_data directory.
+
+find_makefiles(Vars, Spec, St) ->
+ Wc = filename:join(St#state.data_wc, "Makefile.src"),
+ Makefiles = reverse(del_skipped_suite_data_dir(filelib:wildcard(Wc), Spec)),
+ {ok,Vars,Spec,St#state{makefiles=Makefiles}}.
+
+%% Create "Makefile" from "Makefile.src".
+
+make_make(Vars, Spec, State) ->
+ Src = State#state.makefile,
+ Dest = filename:rootname(Src),
+ ts_lib:progress(Vars, 1, "Making ~s...\n", [Dest]),
+ case ts_lib:subst_file(Src, Dest, Vars) of
+ ok ->
+ {ok, Vars, Spec, State#state{makefile=Dest}};
+ {error, Reason} ->
+ {error, {Src, Reason}}
+ end.
+
+%% Add a testcase which will do the making of the stuff in the data directory.
+
+add_make_testcase(Vars, Spec, St) ->
+ Makefile = St#state.makefile,
+ Dir = filename:dirname(Makefile),
+ case ts_lib:var(os, Vars) of
+ "OSE" ->
+ %% For OSE, C code in datadir must be linked in the image file,
+ %% and erlang code is sent as binaries from test_server_ctrl
+ %% Making erlang code here because the Makefile.src probably won't
+ %% work.
+ Erl_flags=[{i, "../../test_server"}|ts_lib:var(erl_flags,Vars)],
+ {ok, Cwd} = file:get_cwd(),
+ ok = file:set_cwd(Dir),
+ Result = (catch make:all(Erl_flags)),
+ ok = file:set_cwd(Cwd),
+ case Result of
+ up_to_date -> {ok, Vars, Spec, St};
+ _error -> {error, {erlang_make_failed,Dir}}
+ end;
+ _ ->
+ Shortname = filename:basename(Makefile),
+ Suite = filename:basename(Dir, "_data"),
+ Config = [{data_dir,Dir},{makefile,Shortname}],
+ MakeModule = Suite ++ "_make",
+ MakeModuleSrc = filename:join(filename:dirname(Dir),
+ MakeModule ++ ".erl"),
+ MakeMod = list_to_atom(MakeModule),
+ case filelib:is_file(MakeModuleSrc) of
+ true -> ok;
+ false -> generate_make_module(ts_lib:var(make_command, Vars),
+ MakeModuleSrc,
+ MakeModule)
+ end,
+ case Suite of
+ "all_SUITE" ->
+ {ok,Vars,Spec,St#state{all={MakeMod,Config}}};
+ _ ->
+ %% Avoid duplicates of testcases. There is no longer
+ %% a check for this in test_server_ctrl.
+ TestCase = {list_to_atom(Suite),all},
+ TopCase0 = case St#state.topcase of
+ List when is_list(List) ->
+ List -- [TestCase];
+ Top ->
+ [Top] -- [TestCase]
+ end,
+ TopCase = [{make,{MakeMod,make,[Config]},
+ TestCase,
+ {MakeMod,unmake,[Config]}}|TopCase0],
+ {ok,Vars,Spec,St#state{topcase=TopCase}}
+ end
+ end.
+
+generate_make_module(MakeCmd, Name, ModuleString) ->
+ {ok,Host} = inet:gethostname(),
+ file:write_file(Name,
+ ["-module(",ModuleString,").\n",
+ "\n",
+ "-export([make/1,unmake/1]).\n",
+ "\n",
+ "make(Config) when is_list(Config) ->\n",
+ " Mins = " ++ integer_to_list(?DEFAULT_MAKE_TIMETRAP_MINUTES) ++ ",\n"
+ " test_server:format(\"=== Setting timetrap to ~p minutes ===~n\", [Mins]),\n"
+ " TimeTrap = test_server:timetrap(test_server:minutes(Mins)),\n"
+ " Res = ts_make:make([{make_command, \""++MakeCmd++"\"},{cross_node,\'ts@" ++ Host ++ "\'}|Config]),\n",
+ " test_server:timetrap_cancel(TimeTrap),\n"
+ " Res.\n"
+ "\n",
+ "unmake(Config) when is_list(Config) ->\n",
+ " Mins = " ++ integer_to_list(?DEFAULT_UNMAKE_TIMETRAP_MINUTES) ++ ",\n"
+ " test_server:format(\"=== Setting timetrap to ~p minutes ===~n\", [Mins]),\n"
+ " TimeTrap = test_server:timetrap(test_server:minutes(Mins)),\n"
+ " Res = ts_make:unmake([{make_command, \""++MakeCmd++"\"}|Config]),\n"
+ " test_server:timetrap_cancel(TimeTrap),\n"
+ " Res.\n"
+ "\n"]).
+
+
+make_test_suite(Vars, _Spec, State) ->
+ TestDir = State#state.test_dir,
+
+ Erl_flags=[{i, "../test_server"}|ts_lib:var(erl_flags,Vars)],
+
+ case code:is_loaded(test_server_line) of
+ false -> code:load_file(test_server_line);
+ _ -> ok
+ end,
+
+ {ok, Cwd} = file:get_cwd(),
+ ok = file:set_cwd(TestDir),
+ Result = (catch make:all(Erl_flags)),
+ ok = file:set_cwd(Cwd),
+ case Result of
+ up_to_date ->
+ ok;
+ {'EXIT', Reason} ->
+ %% If I return an error here, the test will be stopped
+ %% and it will not show up in the top index page. Instead
+ %% I return ok - the test will run for all existing suites.
+ %% It might be that there are old suites that are run, but
+ %% at least one suite is missing, and that is reported on the
+ %% top index page.
+ io:format("~s: {error,{make_crashed,~p}\n",
+ [State#state.file,Reason]),
+ ok;
+ error ->
+ %% See comment above
+ io:format("~s: {error,make_of_test_suite_failed}\n",
+ [State#state.file]),
+ ok
+ end.
+
+%% Add topcase to spec.
+
+add_topcase_to_spec(Vars, Spec, St) ->
+ Tc = case St#state.all of
+ {MakeMod,Config} ->
+ [{make,{MakeMod,make,[Config]},
+ St#state.topcase,
+ {MakeMod,unmake,[Config]}}];
+ undefined -> St#state.topcase
+ end,
+ {ok,Vars,Spec++[{topcase,Tc}],St}.
+
+%% Writes the (possibly transformed) spec file.
+
+write_spec_file(Vars, Spec, _State) ->
+ F = fun(Term) -> io_lib:format("~p.~n", [Term]) end,
+ SpecFile = map(F, Spec),
+ Hosts =
+ case lists:keysearch(hosts, 1, Vars) of
+ false ->
+ [];
+ {value, {hosts, HostList}} ->
+ io_lib:format("{hosts,~p}.~n",[HostList])
+ end,
+ DiskLess =
+ case lists:keysearch(diskless, 1, Vars) of
+ false ->
+ [];
+ {value, {diskless, How}} ->
+ io_lib:format("{diskless, ~p}.~n",[How])
+ end,
+ Conf = consult_config(),
+ MoreConfig = io_lib:format("~p.\n", [{config,Conf}]),
+ file:write_file("current.spec", [DiskLess,Hosts,MoreConfig,SpecFile]).
+
+consult_config() ->
+ {ok,Conf} = file:consult("ts.config"),
+ case os:type() of
+ {unix,_} -> consult_config("ts.unix.config", Conf);
+ {win32,_} -> consult_config("ts.win32.config", Conf);
+ vxworks -> consult_config("ts.vxworks.config", Conf);
+ _ -> Conf
+ end.
+
+consult_config(File, Conf0) ->
+ case file:consult(File) of
+ {ok,Conf} -> Conf++Conf0;
+ {error,enoent} -> Conf0
+ end.
+
+%% Makes the command to start up the Erlang node to run the tests.
+
+backslashify([$\\, $" | T]) ->
+ [$\\, $" | backslashify(T)];
+backslashify([$" | T]) ->
+ [$\\, $" | backslashify(T)];
+backslashify([H | T]) ->
+ [H | backslashify(T)];
+backslashify([]) ->
+ [].
+
+make_command(Vars, Spec, State) ->
+ TestDir = State#state.test_dir,
+ TestPath = filename:nativename(TestDir),
+ Erl = case os:getenv("TS_RUN_VALGRIND") of
+ false ->
+ atom_to_list(lib:progname());
+ _ ->
+ case State#state.file of
+ Dir when is_list(Dir) ->
+ os:putenv("VALGRIND_LOGFILE_PREFIX", Dir++"-");
+ _ ->
+ ok
+ end,
+ "cerl -valgrind" ++
+ case erlang:system_info(smp_support) of
+ true -> " -smp";
+ false -> ""
+ end
+ end,
+ Naming =
+ case ts_lib:var(longnames, Vars) of
+ true ->
+ " -name ";
+ false ->
+ " -sname "
+ end,
+ ExtraArgs =
+ case lists:keysearch(erl_start_args,1,Vars) of
+ {value,{erl_start_args,Args}} -> Args;
+ false -> ""
+ end,
+ CrashFile = State#state.file ++ "_erl_crash.dump",
+ case filelib:is_file(CrashFile) of
+ true ->
+ io:format("ts_run: Deleting dump: ~s\n",[CrashFile]),
+ file:delete(CrashFile);
+ false ->
+ ok
+ end,
+ Cmd = [Erl, Naming, "test_server -pa ", $", TestPath, $",
+ " -rsh ", ts_lib:var(rsh_name, Vars),
+ " -env PATH \"",
+ backslashify(lists:flatten([TestPath, path_separator(),
+ remove_path_spaces()])),
+ "\"",
+ " -env ERL_CRASH_DUMP ", CrashFile,
+ %% uncomment the line below to disable exception formatting
+ %% " -test_server_format_exception false",
+ " -boot start_sasl -sasl errlog_type error",
+ " -s test_server_ctrl run_test ", State#state.test_server_args,
+ " ",
+ ExtraArgs],
+ {ok, Vars, Spec, State#state{command=lists:flatten(Cmd)}}.
+
+run_batch(Vars, _Spec, State) ->
+ process_flag(trap_exit, true),
+ Command = State#state.command ++ " -noinput -s erlang halt",
+ ts_lib:progress(Vars, 1, "Command: ~s~n", [Command]),
+ Port = open_port({spawn, Command}, [stream, in, eof]),
+ tricky_print_data(Port).
+
+tricky_print_data(Port) ->
+ receive
+ {Port, {data, Bytes}} ->
+ io:put_chars(Bytes),
+ tricky_print_data(Port);
+ {Port, eof} ->
+ Port ! {self(), close},
+ receive
+ {Port, closed} ->
+ true
+ end,
+ receive
+ {'EXIT', Port, _} ->
+ ok
+ after 1 -> % force context switch
+ ok
+ end
+ after 30000 ->
+ case erl_epmd:names() of
+ {ok,Names} ->
+ case is_testnode_dead(Names) of
+ true ->
+ io:put_chars("WARNING: No EOF, but "
+ "test_server node is down!\n");
+ false ->
+ tricky_print_data(Port)
+ end;
+ _ ->
+ tricky_print_data(Port)
+ end
+ end.
+
+is_testnode_dead([]) -> true;
+is_testnode_dead([{"test_server",_}|_]) -> false;
+is_testnode_dead([_|T]) -> is_testnode_dead(T).
+
+run_interactive(Vars, _Spec, State) ->
+ Command = State#state.command ++ " -s ts_reports make_index",
+ ts_lib:progress(Vars, 1, "Command: ~s~n", [Command]),
+ case ts_lib:var(os, Vars) of
+ "Windows 95" ->
+ %% Windows 95 strikes again! We must redirect standard
+ %% input and output for the `start' command, to force
+ %% standard input and output to the Erlang shell to be
+ %% connected to the newly started console.
+ %% Without these redirections, the Erlang shell would be
+ %% connected to the pipes provided by the port program
+ %% and there would be an inactive console window.
+ os:cmd("start < nul > nul w" ++ Command),
+ ok;
+ "Windows 98" ->
+ os:cmd("start < nul > nul w" ++ Command),
+ ok;
+ "Windows"++_ ->
+ os:cmd("start w" ++ Command),
+ ok;
+ _Other ->
+ %% Assuming ts and controller always run on solaris
+ start_xterm(Command)
+ end.
+
+start_xterm(Command) ->
+ case os:find_executable("xterm") of
+ false ->
+ io:format("The `xterm' program was not found.\n"),
+ {error, no_xterm};
+ _Xterm ->
+ case os:getenv("DISPLAY") of
+ false ->
+ io:format("DISPLAY is not set.\n"),
+ {error, display_not_set};
+ Display ->
+ io:format("Starting xterm (DISPLAY=~s)...\n",
+ [Display]),
+ os:cmd("xterm -sl 10000 -e " ++ Command ++ "&"),
+ ok
+ end
+ end.
+
+path_separator() ->
+ case os:type() of
+ {win32, _} -> ";";
+ {unix, _} -> ":";
+ vxworks -> ":"
+ end.
+
+
+make_test_server_args(Args0,Options,Vars) ->
+ Parameters =
+ case ts_lib:var(os, Vars) of
+ "VxWorks" ->
+ F = write_parameterfile(vxworks,Vars),
+ " PARAMETERS " ++ F;
+ "OSE" ->
+ F = write_parameterfile(ose,Vars),
+ " PARAMETERS " ++ F;
+ _ ->
+ ""
+ end,
+ Trace =
+ case lists:keysearch(trace,1,Options) of
+ {value,{trace,TI}} when is_tuple(TI); is_tuple(hd(TI)) ->
+ ok = file:write_file(?tracefile,io_lib:format("~p.~n",[TI])),
+ " TRACE " ++ ?tracefile;
+ {value,{trace,TIFile}} when is_atom(TIFile) ->
+ " TRACE " ++ atom_to_list(TIFile);
+ {value,{trace,TIFile}} ->
+ " TRACE " ++ TIFile;
+ false ->
+ ""
+ end,
+ Cover =
+ case lists:keysearch(cover,1,Options) of
+ {value,{cover,App,File,Analyse}} ->
+ " COVER " ++ to_list(App) ++ " " ++ to_list(File) ++ " " ++
+ to_list(Analyse);
+ false ->
+ ""
+ end,
+ TCCallback =
+ case ts_lib:var(ts_testcase_callback, Vars) of
+ "" ->
+ "";
+ {Mod,Func} ->
+ io:format("Function ~w:~w/4 will be called before and "
+ "after each test case.\n", [Mod,Func]),
+ " TESTCASE_CALLBACK " ++ to_list(Mod) ++ " " ++ to_list(Func);
+ ModFunc when is_list(ModFunc) ->
+ [Mod,Func]=string:tokens(ModFunc," "),
+ io:format("Function ~s:~s/4 will be called before and "
+ "after each test case.\n", [Mod,Func]),
+ " TESTCASE_CALLBACK " ++ ModFunc;
+ _ ->
+ ""
+ end,
+ Args0 ++ Parameters ++ Trace ++ Cover ++ TCCallback.
+
+to_list(X) when is_atom(X) ->
+ atom_to_list(X);
+to_list(X) when is_list(X) ->
+ X.
+
+write_parameterfile(Type,Vars) ->
+ Cross_host = ts_lib:var(target_host, Vars),
+ SlaveTargets = case lists:keysearch(slavetargets,1,Vars) of
+ {value, ST} ->
+ [ST];
+ _ ->
+ []
+ end,
+ Master = case lists:keysearch(master,1,Vars) of
+ {value,M} -> [M];
+ false -> []
+ end,
+ ToWrite = [{type,Type},
+ {target, list_to_atom(Cross_host)}] ++ SlaveTargets ++ Master,
+
+ Crossfile = atom_to_list(Type) ++ "parameters" ++ os:getpid(),
+ ok = file:write_file(Crossfile,io_lib:format("~p.~n", [ToWrite])),
+ Crossfile.
+
+%%
+%% Paths and spaces handling for w2k and XP
+%%
+remove_path_spaces() ->
+ Path = os:getenv("PATH"),
+ case os:type() of
+ {win32,nt} ->
+ remove_path_spaces(Path);
+ _ ->
+ Path
+ end.
+
+remove_path_spaces(Path) ->
+ SPath = split_path(Path),
+ [NSHead|NSTail] = lists:map(fun(X) -> filename:nativename(
+ filename:join(
+ translate_path(split_one(X))))
+ end,
+ SPath),
+ NSHead ++ lists:flatten([[$;|X] || X <- NSTail]).
+
+translate_path(PList) ->
+ %io:format("translate_path([~p|~p]~n",[Base,PList]),
+ translate_path(PList,[]).
+
+
+translate_path([],_) ->
+ [];
+translate_path([PC | T],BaseList) ->
+ FullPath = filename:nativename(filename:join(BaseList ++ [PC])),
+ NewPC = case catch file:altname(FullPath) of
+ {ok,X} ->
+ X;
+ _ ->
+ PC
+ end,
+ %io:format("NewPC:~s, DirList:~p~n",[NewPC,DirList]),
+ NewBase = BaseList ++ [NewPC],
+ [NewPC | translate_path(T,NewBase)].
+
+split_one(Path) ->
+ filename:split(Path).
+
+split_path(Path) ->
+ string:tokens(Path,";").
+
+
diff --git a/lib/test_server/src/ts_selftest.erl b/lib/test_server/src/ts_selftest.erl
new file mode 100644
index 0000000000..655aa4bab3
--- /dev/null
+++ b/lib/test_server/src/ts_selftest.erl
@@ -0,0 +1,120 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ts_selftest).
+-export([selftest/0]).
+
+selftest() ->
+ case node() of
+ nonode@nohost ->
+ io:format("Sorry, you have to start this node distributed.~n"),
+ exit({error, node_not_distributed});
+ _ ->
+ ok
+ end,
+ case catch ts:tests(test_server) of
+ {'EXIT', _} ->
+ io:format("Test Server self test not availiable.");
+ Other ->
+ selftest1()
+ end.
+
+selftest1() ->
+ % Batch starts
+ io:format("Selftest #1: Whole spec, batch mode:~n"),
+ io:format("------------------------------------~n"),
+ ts:run(test_server, [batch]),
+ ok=check_result(1, "test_server.logs", 2),
+
+ io:format("Selftest #2: One module, batch mode:~n"),
+ io:format("------------------------------------~n"),
+ ts:run(test_server, test_server_SUITE, [batch]),
+ ok=check_result(2, "test_server_SUITE.logs", 2),
+
+ io:format("Selftest #3: One testcase, batch mode:~n"),
+ io:format("--------------------------------------~n"),
+ ts:run(test_server, test_server_SUITE, msgs, [batch]),
+ ok=check_result(3, "test_server_SUITE.logs", 0),
+
+ % Interactive starts
+ io:format("Selftest #4: Whole spec, interactive mode:~n"),
+ io:format("------------------------------------------~n"),
+ ts:run(test_server),
+ kill_test_server(),
+ ok=check_result(4, "test_server.logs", 2),
+
+ io:format("Selftest #5: One module, interactive mode:~n"),
+ io:format("------------------------------------------~n"),
+ ts:run(test_server, test_server_SUITE),
+ kill_test_server(),
+ ok=check_result(5, "test_server_SUITE.logs", 2),
+
+ io:format("Selftest #6: One testcase, interactive mode:~n"),
+ io:format("--------------------------------------------~n"),
+ ts:run(test_server, test_server_SUITE, msgs),
+ kill_test_server(),
+ ok=check_result(6, "test_server_SUITE.logs", 0),
+
+ ok.
+
+check_result(Test, TDir, ExpSkip) ->
+ Dir=ts_lib:last_test(TDir),
+ {Total, Failed, Skipped}=ts_reports:count_cases(Dir),
+ io:format("Selftest #~p:",[Test]),
+ case {Total, Failed, Skipped} of
+ {_, 0, ExpSkip} -> % 2 test cases should be skipped.
+ io:format("All ok.~n~n"),
+ ok;
+ {_, _, _} ->
+ io:format("Not completely successful.~n~n"),
+ error
+ end.
+
+
+%% Wait for test server to get started.
+kill_test_server() ->
+ Node=list_to_atom("test_server@"++atom_to_list(hostname())),
+ net_adm:ping(Node),
+ case whereis(test_server_ctrl) of
+ undefined ->
+ kill_test_server();
+ Pid ->
+ kill_test_server(0, Pid)
+ end.
+
+%% Wait for test server to finish.
+kill_test_server(30, Pid) ->
+ exit(self(), test_server_is_dead);
+kill_test_server(Num, Pid) ->
+ case whereis(test_server_ctrl) of
+ undefined ->
+ slave:stop(node(Pid));
+ Pid ->
+ receive
+ after
+ 1000 ->
+ kill_test_server(Num+1, Pid)
+ end
+ end.
+
+
+hostname() ->
+ list_to_atom(from($@, atom_to_list(node()))).
+from(H, [H | T]) -> T;
+from(H, [_ | T]) -> from(H, T);
+from(H, []) -> [].
diff --git a/lib/test_server/src/vxworks_client.erl b/lib/test_server/src/vxworks_client.erl
new file mode 100644
index 0000000000..ca65eca02a
--- /dev/null
+++ b/lib/test_server/src/vxworks_client.erl
@@ -0,0 +1,243 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(vxworks_client).
+
+-export([open/1, close/1, send_data/2, send_data/3, send_data_wait_for_close/2, reboot/1]).
+-export([init/2]).
+
+-include("ts.hrl").
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% This is a client talking to a test server daemon on a VxWorks card.
+%%%
+%%% User interface:
+%%%
+%%% open/1
+%%% Start a client and establish the connection with the test server daemon
+%%%
+%%% send_data/2
+%%% Send data/command to the test server daemon, don't wait for any return
+%%%
+%%% send_data/3
+%%% Send data/command to the test server daemon and wait for the given
+%%% return value.
+%%%
+%%% send_data_wait_for_close/2
+%%% Send data/command to the test server daemon and wait for the daemon to
+%%% close the connection.
+%%%
+%%% close/1
+%%% Close the client.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+%%
+%% User interface
+%%
+
+reboot(Target) ->
+ {ok, {_,_,_,_,_,[Addr|_]}} = inet:gethostbyname(Target),
+ Fun = fun({ok, Socket}) ->
+ gen_tcp:send(Socket, "q\n"),
+ receive
+ {tcp_closed, Socket} ->
+ gen_tcp:close(Socket),
+ {ok, socket_closed}
+ after 5000 ->
+ exit({timeout, tryagain})
+ end
+ end,
+ io:format("Stopping (rebooting) ~p ",[Target]),
+ case fun_target(Addr, Fun) of
+ {ok, socket_closed} ->
+ ok;
+ _Else ->
+ io:format("No contact with ts daemon - exiting ...~n"),
+ exit({stop, no_ts_daemon_contact})
+ end.
+
+
+%% open(Target) -> {ok,Client} | {error, Reason}
+open(Target) ->
+ {ok, {_,_,_,_,_,[Addr|_]}} = inet:gethostbyname(Target),
+ Fun = fun({ok, Socket}) ->
+ P = spawn(?MODULE,init,[Target,Socket]),
+ inet_tcp:controlling_process(Socket,P),
+ {ok,P}
+ end,
+ case fun_target(Addr,Fun) of
+ {ok, Pid} ->
+ {ok, Pid};
+ {error,Reason} ->
+ {error, Reason}
+ end.
+
+%% send_data(Client,Data) -> ok
+send_data(Pid,Data) ->
+ Pid ! {send_data,Data++"\n"},
+ ok.
+
+%% send_data(Client,Data,ExpectedReturn) -> {ok,ExpectedReturn} | {error,Reason}
+send_data(Pid,Data,Return) ->
+ Pid ! {send_data,Data++"\n",Return,self()},
+ receive {Pid,Result} -> Result end.
+
+%% send_data_wait_for_close(Client,Data) -> ok | {error,Reason}
+send_data_wait_for_close(Pid,Data) ->
+ send_data(Pid,Data,tcp_closed).
+
+%% close(Client) -> ok
+close(Pid) ->
+ Pid ! close,
+ ok.
+
+
+%%
+%% Internal
+%%
+
+init(Target,Socket) ->
+ process_flag(trap_exit,true),
+ loop(Target,Socket).
+
+loop(Target,Socket) ->
+ receive
+ {send_data,Data} ->
+ %% io:format("vx client sending: ~p~n", [Data]),
+ gen_tcp:send(Socket, Data),
+ loop(Socket,Target);
+ {send_data,Data,tcp_closed,From} ->
+ %% io:format("vx client sending: ~p~n", [Data]),
+ gen_tcp:send(Socket, Data),
+ receive
+ {tcp_closed, Socket} ->
+ From ! {self(),ok}
+ after 5000 ->
+ From ! {self(),{error,timeout}}
+ end,
+ closed(Socket,normal);
+ {send_data,Data,Return,From} ->
+ %% io:format("vx client sending: ~p~n", [Data]),
+ gen_tcp:send(Socket, Data),
+ case receive_line(Socket,[],Return,200) of
+ {tcp_closed, Socket} ->
+ From ! {self(),{error,{socket_closed,Target}}},
+ closed(Socket,{socket_closed,Target});
+ {tcp,Socket,_Rest} ->
+ From ! {self(),{ok,Data}},
+ got_data(Target,Socket,Data);
+ error ->
+ From ! {self(),{error,{catatonic,Target}}}
+ end;
+ close ->
+ closed(Socket,normal);
+ {tcp_closed, Socket} ->
+ closed(Socket,{socket_closed,Target});
+ {tcp,Socket,Data} ->
+ got_data(Target,Socket,Data)
+ end.
+
+
+
+closed(Socket,Reason) ->
+ gen_tcp:close(Socket),
+ exit(Reason).
+
+got_data(Target,Socket,Data) ->
+ if is_atom(Target) ->
+ io:format("~w: ~s",[Target,uncr(Data)]);
+ true ->
+ io:format("~s: ~s",[Target,uncr(Data)])
+ end,
+ loop(Target,Socket).
+
+uncr([]) ->
+ [];
+uncr([$\r | T]) ->
+ uncr(T);
+uncr([H | T]) ->
+ [H | uncr(T)].
+
+strip_line(Line) ->
+ RPos = string:rchr(Line, $\n),
+ string:substr(Line,RPos+1).
+
+maybe_done_receive(Socket,Ack,Match,C) ->
+ case string:str(Ack,Match) of
+ 0 ->
+ receive_line(Socket,strip_line(Ack),Match,C);
+ _ ->
+ {tcp,Socket,strip_line(Ack)}
+ end.
+
+
+receive_line(_Socket,_Ack,_Match,0) ->
+ error;
+receive_line(Socket,Ack,Match,Counter) ->
+ receive
+ {tcp_closed, Socket} ->
+ {tcp_closed, Socket};
+ {tcp,Socket,Data} ->
+ NewAck = Ack ++ Data,
+ case {string:str(NewAck,"\r") > 0,
+ string:str(NewAck,"\n") > 0} of
+ {true,_} ->
+ maybe_done_receive(Socket,NewAck,Match,Counter-1);
+ {_,true} ->
+ maybe_done_receive(Socket,NewAck,Match,Counter-1);
+ _ ->
+ receive_line(Socket,NewAck,Match,Counter)
+ end
+ after 20000 ->
+ error
+ end.
+
+
+%% Misc functions
+fun_target(Addr, Fun) ->
+ io:format("["),
+ fun_target(Addr, Fun, 60). %Vx-cards need plenty of time.
+
+fun_target(_Addr, _Fun, 0) ->
+ io:format(" no contact with ts daemon]~n"),
+ {error,failed_to_connect};
+fun_target(Addr, Fun, Tries_left) ->
+ receive after 1 -> ok end,
+ case do_connect(Addr, Fun) of
+ {ok, Value} ->
+ io:format(" ok]~n"),
+ {ok, Value};
+ _Error -> % typical {error, econnrefused}
+ io:format("."),
+ receive after 10000 -> ok end,
+ fun_target(Addr, Fun, Tries_left-1)
+ end.
+
+do_connect(Addr, Fun) ->
+ case gen_tcp:connect(Addr, ?TS_PORT, [{reuseaddr, true}], 60000) of
+ {ok, Socket} ->
+ Fun({ok, Socket});
+ Error ->
+ Error
+ end.
+
+
+
diff --git a/lib/test_server/vsn.mk b/lib/test_server/vsn.mk
new file mode 100644
index 0000000000..8e02fde8bc
--- /dev/null
+++ b/lib/test_server/vsn.mk
@@ -0,0 +1,2 @@
+TEST_SERVER_VSN = 3.3.5
+