diff options
Diffstat (limited to 'lib/tools')
31 files changed, 1727 insertions, 837 deletions
diff --git a/lib/tools/c_src/Makefile.in b/lib/tools/c_src/Makefile.in index e6b76e2238..65a7f5f424 100644 --- a/lib/tools/c_src/Makefile.in +++ b/lib/tools/c_src/Makefile.in @@ -128,12 +128,13 @@ EMEM_ERTS_LIB=erts_r$(TYPEMARKER) endif +EMEM_ETHR_LIBS=$(subst -l$(ETHR_LIB_NAME),-l$(ETHR_LIB_NAME)$(TYPEMARKER),$(subst -lerts_internal_r,-lerts_internal_r$(TYPEMARKER),$(ETHR_LIBS))) + EMEM_LIBS = $(LIBS) \ -L$(ERL_TOP)/erts/lib/$(TARGET) \ -L$(ERL_TOP)/erts/lib/internal/$(TARGET) \ -l$(EMEM_ERTS_LIB) \ - -l$(ETHR_LIB_NAME)$(TYPEMARKER) \ - $(ETHR_X_LIBS) + $(EMEM_ETHR_LIBS) EMEM_OBJS = $(addprefix $(EMEM_OBJ_DIR)/,$(notdir $(EMEM_SRCS:.c=.o))) diff --git a/lib/tools/c_src/erl_memory.c b/lib/tools/c_src/erl_memory.c index a0e139f059..872d55e789 100644 --- a/lib/tools/c_src/erl_memory.c +++ b/lib/tools/c_src/erl_memory.c @@ -1,22 +1,22 @@ -/* ``The contents of this file are subject to the Erlang Public License, +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2003-2010. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be - * retrieved via the world wide web at http://www.erlang.org/. - * + * retrieved online at http://www.erlang.org/. + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * - * The Initial Developer of the Original Code is Ericsson Utvecklings AB. - * Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings - * AB. All Rights Reserved.'' - * - * $Id$ + * + * %CopyrightEnd% */ - /* * Description: * @@ -281,17 +281,13 @@ mutex_destroy(ethr_mutex *mtx) static INLINE void mutex_lock(ethr_mutex *mtx) { - int res = ethr_mutex_lock(mtx); - if (res) - error_msg(res, "Mutex lock"); + ethr_mutex_lock(mtx); } static INLINE void mutex_unlock(ethr_mutex *mtx) { - int res = ethr_mutex_unlock(mtx); - if (res) - error_msg(res, "Mutex unlock"); + ethr_mutex_unlock(mtx); } static INLINE void @@ -314,16 +310,14 @@ static INLINE void cond_wait(ethr_cond *cnd, ethr_mutex *mtx) { int res = ethr_cond_wait(cnd, mtx); - if (res) + if (res != 0 && res != EINTR) error_msg(res, "Cond wait"); } static INLINE void cond_signal(ethr_cond *cnd) { - int res = ethr_cond_signal(cnd); - if (res) - error_msg(res, "Cond signal"); + ethr_cond_signal(cnd); } @@ -2774,7 +2768,7 @@ main(int argc, char *argv[]) exit(1); } - if (ethr_init(NULL) != 0) { + if (ethr_init(NULL) != 0 || ethr_late_init(NULL) != 0) { fprintf(stderr, "emem: failed to initialize thread package\n"); exit(1); } diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml index 323bd0dda8..0a3302bda5 100644 --- a/lib/tools/doc/src/cover.xml +++ b/lib/tools/doc/src/cover.xml @@ -270,6 +270,8 @@ defaults to <c>function</c>.</p> <p>If <c>Module</c> is not Cover compiled, the function returns <c>{error,{not_cover_compiled,Module}}</c>.</p> + <p>HINT: It is possible to issue multiple analyse_to_file commands at + the same time. </p> </desc> </func> <func> @@ -307,6 +309,33 @@ <c>.beam</c> file, or in <c>../src</c> relative to that directory. If no source code is found, <c>,{error,no_source_code_found}</c> is returned.</p> + <p>HINT: It is possible to issue multiple analyse_to_file commands at + the same time. </p> + </desc> + </func> + <func> + <name>async_analyse_to_file(Module) -> </name> + <name>async_analyse_to_file(Module,Options) -> </name> + <name>async_analyse_to_file(Module, OutFile) -> </name> + <name>async_analyse_to_file(Module, OutFile, Options) -> pid()</name> + <fsummary>Asynchronous call to analyse_to_file.</fsummary> + <type> + <v>Module = atom()</v> + <v>OutFile = string()</v> + <v>Options = [Option]</v> + <v>Option = html</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v> File = string()</v> + <v> Reason = term()</v> + </type> + <desc> + <p>This function works exactly the same way as + <seealso marker="#analyse_to_file-1">analyse_to_file</seealso> except + that it is asynchronous instead of synchronous. The spawned process + will link with the caller when created. If an <c>Error</c> occurs + while doing the cover analysis the process will crash with the same + error reason as <seealso marker="#analyse_to_file-1">analyse_to_file</seealso> + would return.</p> </desc> </func> <func> diff --git a/lib/tools/doc/src/cover_chapter.xml b/lib/tools/doc/src/cover_chapter.xml index b4f7919183..92a790c34e 100644 --- a/lib/tools/doc/src/cover_chapter.xml +++ b/lib/tools/doc/src/cover_chapter.xml @@ -403,6 +403,13 @@ ok database contains information about each executable line in each Cover compiled module, performance decreases proportionally to the size and number of the Cover compiled modules.</p> + <p>To improve performance when analysing cover results it is possible + to do multiple calls to <seealso marker="cover#analyse-1">analyse</seealso> + and <seealso marker="cover#analyse_to_file-1">analyse_to_file</seealso> + at once. You can also use the + <seealso marker="cover#async_analyse_to_file-1">async_analyse_to_file</seealso> + convenience function. + </p> </section> <section> diff --git a/lib/tools/doc/src/eprof.xml b/lib/tools/doc/src/eprof.xml index ae1033f2d0..6d68c90768 100644 --- a/lib/tools/doc/src/eprof.xml +++ b/lib/tools/doc/src/eprof.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2009</year> + <year>1996</year><year>2010</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -13,12 +13,12 @@ compliance with the License. You should have received a copy of the Erlang Public License along with this software. If not, it can be retrieved online at http://www.erlang.org/. - + Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. - + </legalnotice> <title>eprof</title> @@ -35,8 +35,7 @@ used. The profiling is done using the Erlang trace BIFs. Tracing of local function calls for a specified set of processes is enabled when profiling is begun, and disabled when profiling is stopped.</p> - <p>When using Eprof, expect a significant slowdown in program execution, - in most cases at least 100 percent.</p> + <p>When using Eprof expect a slowdown in program execution.</p> </description> <funcs> <func> @@ -47,15 +46,19 @@ <v>Reason = {already_started,Pid}</v> </type> <desc> - <p>Starts the Eprof server which owns the Eprof internal database.</p> + <p>Starts the Eprof server which holds the internal state of the collected data.</p> </desc> </func> <func> - <name>start_profiling(Rootset) -> profiling | error</name> - <name>profile(Rootset) -> profiling | error</name> + <name>start_profiling(Rootset) -> profiling | {error, Reason}</name> + <name>start_profiling(Rootset,Pattern) -> profiling | {error, Reason}</name> <fsummary>Start profiling.</fsummary> <type> <v>Rootset = [atom() | pid()]</v> + <v>Pattern = {Module, Function, Arity}</v> + <v>Module = Function = atom()</v> + <v>Arity = integer()</v> + <v>Reason = term()</v> </type> <desc> <p>Starts profiling for the processes in <c>Rootset</c> (and any new @@ -64,6 +67,9 @@ <p><c>Rootset</c> is a list of pids and registered names.</p> <p>The function returns <c>profiling</c> if tracing could be enabled for all processes in <c>Rootset</c>, or <c>error</c> otherwise.</p> + <p>A pattern can be selected to narrow the profiling. For instance ca a specific + module be selected and only the code processes executes in that module will be + profiled.</p> </desc> </func> <func> @@ -75,14 +81,20 @@ </desc> </func> <func> - <name>profile(Rootset,Fun) -> {ok,Value} | {error,Reason} | error</name> - <name>profile(Rootset,Module,Function,Args) -> {ok,Value} | {error,Reason} | error</name> + <name>profile(Fun) -> profiling | {error, Reason}</name> + <name>profile(Rootset) -> profiling | {error, Reason}</name> + <name>profile(Rootset,Fun) -> {ok, Value} | {error,Reason}</name> + <name>profile(Rootset,Fun,Pattern) -> {ok, Value} | {error, Reason}</name> + <name>profile(Rootset,Module,Function,Args) -> {ok, Value} | {error, Reason}</name> + <name>profile(Rootset,Module,Function,Args,Pattern) -> {ok, Value} | {error, Reason}</name> <fsummary>Start profiling.</fsummary> <type> <v>Rootset = [atom() | pid()]</v> <v>Fun = fun() -> term()</v> + <v>Pattern = {Module, Function, Arity}</v> <v>Module = Function = atom()</v> <v>Args = [term()]</v> + <v>Arity = integer()</v> <v>Value = Reason = term()</v> </type> <desc> @@ -96,7 +108,7 @@ <c>Rootset</c>, the function returns <c>{ok,Value}</c> when <c>Fun()</c>/<c>apply</c> returns with the value <c>Value</c>, or <c>{error,Reason}</c> if <c>Fun()</c>/<c>apply</c> fails with - exit reason <c>Reason</c>. Otherwise it returns <c>error</c> + exit reason <c>Reason</c>. Otherwise it returns <c>{error, Reason}</c> immediately.</p> <p>The programmer must ensure that the function given as argument is truly synchronous and that no work continues after @@ -104,7 +116,15 @@ </desc> </func> <func> - <name>analyse()</name> + <name>analyze() -> ok</name> + <name>analyze(Type) -> ok</name> + <name>analyze(Type,Options) -> ok</name> + <type> + <v>Type = procs | total</v> + <v>Options = [{filter, Filter} | {sort, Sort}</v> + <v>Filter = [{calls, integer()} | {time, float()}]</v> + <v>Sort = time | calls | mfa</v> + </type> <fsummary>Display profiling results per process.</fsummary> <desc> <p>Call this function when profiling has been stopped to display @@ -113,17 +133,10 @@ <item>how much time has been used by each process, and</item> <item>in which function calls this time has been spent.</item> </list> - <p>Time is shown as percentage of total time, not as absolute time.</p> - </desc> - </func> - <func> - <name>total_analyse()</name> - <fsummary>Display profiling results per function call.</fsummary> - <desc> - <p>Call this function when profiling has been stopped to display + <p>Call <c>analyze</c> with <c>total</c> option when profiling has been stopped to display the results per function call, that is in which function calls the time has been spent.</p> - <p>Time is shown as percentage of total time, not as absolute time.</p> + <p>Time is shown as percentage of total time and as absolute time.</p> </desc> </func> <func> diff --git a/lib/tools/doc/src/erlang_mode.xml b/lib/tools/doc/src/erlang_mode.xml index 912c442153..c21afc1f9b 100644 --- a/lib/tools/doc/src/erlang_mode.xml +++ b/lib/tools/doc/src/erlang_mode.xml @@ -173,7 +173,7 @@ sum(L) -> sum(L, 0). sum([H|T], Sum) -> sum(T, Sum + H); - sum([], Sum) -> Sum."</code> + sum([], Sum) -> Sum.</code> </item> </list> </section> diff --git a/lib/tools/doc/src/erlang_mode_chapter.xml b/lib/tools/doc/src/erlang_mode_chapter.xml index b22c6b1809..8aabd6ae74 100644 --- a/lib/tools/doc/src/erlang_mode_chapter.xml +++ b/lib/tools/doc/src/erlang_mode_chapter.xml @@ -45,7 +45,7 @@ <section> <title>Elisp</title> - <p>There are two Elsip modules include in this tool package + <p>There are two Elisp modules included in this tool package for Emacs. There is erlang.el that defines the actual erlang mode and there is erlang-start.el that makes some nice initializations.</p> </section> diff --git a/lib/tools/doc/src/notes.xml b/lib/tools/doc/src/notes.xml index e7d1ae150c..5f9cecd6e4 100644 --- a/lib/tools/doc/src/notes.xml +++ b/lib/tools/doc/src/notes.xml @@ -30,6 +30,112 @@ </header> <p>This document describes the changes made to the Tools application.</p> +<section><title>Tools 2.6.6.2</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p>eprof: API sort mismatch has now been fixed. </p> + <p> + Own Id: OTP-8853</p> + </item> + <item> + <p> + eprof: fix division by zero in statistics</p> + <p> + Own Id: OTP-8963</p> + </item> + </list> + </section> + +</section> + +<section><title>Tools 2.6.6.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + <c>cover</c> will now show ampersand characters in the + source code correctly. (Thanks to Tom Moertel.)</p> + <p> + Own Id: OTP-8776</p> + </item> + </list> + </section> + +</section> + +<section><title>Tools 2.6.6</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p>A race condition affecting Cover has been removed.</p> + <p> + Own Id: OTP-8469</p> + </item> + <item> + <p> + Emacs improvements:</p> + <p> + Fixed emacs-mode installation problems.</p> + <p> + Fixed a couple of -spec and -type indentation and + font-lock problems.</p> + <p> + Fixed error messages on emacs-21.</p> + <p> + Magnus Henoch fixed several issues.</p> + <p> + Ralf Doering, Klas Johansson and Chris Bernard + contributed various emacs-eunit improvements.</p> + <p> + Klas Johansson and Dave Peticolas added emacs-flymake + support.</p> + <p> + Own Id: OTP-8530</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p>Xref has been updated to use the <c>re</c> module + instead of the deprecated <c>regexp</c> module.</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-8472</p> + </item> + <item> + <p>When given the option <c>{builtins,true}</c> Xref now + adds calls to operators.</p> + <p> + Own Id: OTP-8647</p> + </item> + <item> + <p><c>eprof</c> has been reimplemented with support in + the Erlang virtual machine and is now both faster (i.e. + slows down the code being measured less) and scales much + better. In measurements we saw speed-ups compared to the + old eprof ranging from 6 times (for sequential code that + only uses one scheduler/core) up to 84 times (for + parallel code that uses 8 cores).</p> + <p>Note: The API for the <c>eprof</c> has been cleaned up + and extended. See the documentation.</p> + <p> + *** POTENTIAL INCOMPATIBILITY ***</p> + <p> + Own Id: OTP-8706</p> + </item> + </list> + </section> + +</section> + <section><title>Tools 2.6.5.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/tools/emacs/erlang-eunit.el b/lib/tools/emacs/erlang-eunit.el index 970afe2e9f..f2c0db67dd 100644 --- a/lib/tools/emacs/erlang-eunit.el +++ b/lib/tools/emacs/erlang-eunit.el @@ -23,8 +23,22 @@ (eval-when-compile (require 'cl)) -(defvar erlang-eunit-separate-src-and-test-directories t - "*Whether or not to keep source and EUnit test files in separate directories") +(defvar erlang-eunit-src-candidate-dirs '("../src" ".") + "*Name of directories which to search for source files matching +an EUnit test file. The first directory in the list will be used, +if there is no match.") + +(defvar erlang-eunit-test-candidate-dirs '("../test" ".") + "*Name of directories which to search for EUnit test files matching +a source file. The first directory in the list will be used, +if there is no match.") + +(defvar erlang-eunit-autosave nil + "*Set to non-nil to automtically save unsaved buffers before running tests. +This is useful, reducing the save-compile-load-test cycle to one keychord.") + +(defvar erlang-eunit-recent-info '((mode . nil) (module . nil) (test . nil) (cover . nil)) + "Info about the most recent running of an EUnit test representation.") ;;; ;;; Switch between src/EUnit test buffers @@ -44,7 +58,6 @@ buffer and vice versa" "Open the EUnit test file which corresponds to a src file" (find-file-other-window (erlang-eunit-test-filename src-file-path))) - ;;; ;;; Open the src file which corresponds to the an EUnit test file ;;; @@ -55,37 +68,55 @@ buffer and vice versa" ;;; Return the name and path of the EUnit test file ;;, (input may be either the source filename itself or the EUnit test filename) (defun erlang-eunit-test-filename (file-path) - (erlang-eunit-rewrite-filename file-path "test" "_tests")) + (if (erlang-eunit-test-file-p file-path) + file-path + (erlang-eunit-rewrite-filename file-path erlang-eunit-test-candidate-dirs))) ;;; Return the name and path of the source file ;;, (input may be either the source filename itself or the EUnit test filename) (defun erlang-eunit-src-filename (file-path) - (erlang-eunit-rewrite-filename file-path "src" "")) + (if (erlang-eunit-src-file-p file-path) + file-path + (erlang-eunit-rewrite-filename file-path erlang-eunit-src-candidate-dirs))) ;;; Rewrite a filename from the src or test filename to the other -(defun erlang-eunit-rewrite-filename (orig-file-path dest-dirname dest-suffix) - (let* ((root-dir-name (erlang-eunit-file-root-dir-name orig-file-path)) - (src-module-name (erlang-eunit-source-module-name orig-file-path)) - (dest-base-name (concat src-module-name dest-suffix ".erl")) - (dest-dir-name-1 (file-name-directory orig-file-path)) - (dest-dir-name-2 (filename-join root-dir-name dest-dirname)) - (dest-file-name-1 (filename-join dest-dir-name-1 dest-base-name)) - (dest-file-name-2 (filename-join dest-dir-name-2 dest-base-name))) - ;; This function tries to be a bit intelligent: - ;; * if there already is a test (or source) file in the same - ;; directory as a source (or test) file, it'll be picked - ;; * if there already is a test (or source) file in a separate - ;; test (or src) directory, it'll be picked - ;; * otherwise it'll resort to whatever alternative (same or - ;; separate directories) that the user has chosen - (cond ((file-readable-p dest-file-name-1) - dest-file-name-1) - ((file-readable-p dest-file-name-2) - dest-file-name-2) - (erlang-eunit-separate-src-and-test-directories - dest-file-name-2) - (t - dest-file-name-1)))) +(defun erlang-eunit-rewrite-filename (orig-file-path candidate-dirs) + (or (erlang-eunit-locate-buddy orig-file-path candidate-dirs) + (erlang-eunit-buddy-file-path orig-file-path (car candidate-dirs)))) + +;;; Search for a file's buddy file (a source file's EUnit test file, +;;; or an EUnit test file's source file) in a list of candidate +;;; directories. +(defun erlang-eunit-locate-buddy (orig-file-path candidate-dirs) + (when candidate-dirs + (let ((buddy-file-path (erlang-eunit-buddy-file-path + orig-file-path + (car candidate-dirs)))) + (if (file-readable-p buddy-file-path) + buddy-file-path + (erlang-eunit-locate-buddy orig-file-path (cdr candidate-dirs)))))) + +(defun erlang-eunit-buddy-file-path (orig-file-path buddy-dir-name) + (let* ((orig-dir-name (file-name-directory orig-file-path)) + (buddy-dir-name (file-truename + (filename-join orig-dir-name buddy-dir-name))) + (buddy-base-name (erlang-eunit-buddy-basename orig-file-path))) + (filename-join buddy-dir-name buddy-base-name))) + +;;; Return the basename of the buddy file: +;;; /tmp/foo/src/x.erl --> x_tests.erl +;;; /tmp/foo/test/x_tests.erl --> x.erl +(defun erlang-eunit-buddy-basename (file-path) + (let ((src-module-name (erlang-eunit-source-module-name file-path))) + (cond + ((erlang-eunit-src-file-p file-path) + (concat src-module-name "_tests.erl")) + ((erlang-eunit-test-file-p file-path) + (concat src-module-name ".erl"))))) + +;;; Checks whether a file is a source file or not +(defun erlang-eunit-src-file-p (file-path) + (not (erlang-eunit-test-file-p file-path))) ;;; Checks whether a file is a EUnit test file or not (defun erlang-eunit-test-file-p (file-path) @@ -96,11 +127,10 @@ buffer and vice versa" ;;; /tmp/foo/test/x_tests.erl --> x (defun erlang-eunit-source-module-name (file-path) (interactive) - (let* ((file-name (file-name-nondirectory file-path)) - (base-name (file-name-sans-extension file-name))) - (if (string-match "^\\(.+\\)_tests$" base-name) - (substring base-name (match-beginning 1) (match-end 1)) - base-name))) + (let ((module-name (erlang-eunit-module-name file-path))) + (if (string-match "^\\(.+\\)_tests$" module-name) + (substring module-name (match-beginning 1) (match-end 1)) + module-name))) ;;; Return the module name of the file ;;; /tmp/foo/src/x.erl --> x @@ -109,18 +139,6 @@ buffer and vice versa" (interactive) (file-name-sans-extension (file-name-nondirectory file-path))) -;;; Return the directory name which is common to both src and test -;;; /tmp/foo/src/x.erl --> /tmp/foo -;;; /tmp/foo/test/x_tests.erl --> /tmp/foo -(defun erlang-eunit-file-root-dir-name (file-path) - (erlang-eunit-dir-parent-dirname (file-name-directory file-path))) - -;;; Return the parent directory name of a directory -;;; /tmp/foo/ --> /tmp -;;; /tmp/foo --> /tmp -(defun erlang-eunit-dir-parent-dirname (dir-name) - (file-name-directory (directory-file-name dir-name))) - ;;; Older emacsen don't have string-match-p. (defun erlang-eunit-string-match-p (regexp string &optional start) (if (fboundp 'string-match-p) ;; appeared in emacs 23 @@ -135,12 +153,28 @@ buffer and vice versa" (concat dir file) (concat dir "/" file))) +;;; Get info about the most recent running of EUnit +(defun erlang-eunit-recent (key) + (cdr (assq key erlang-eunit-recent-info))) + +;;; Record info about the most recent running of EUnit +;;; Known modes are 'module-mode and 'test-mode +(defun erlang-eunit-record-recent (mode module test) + (setcdr (assq 'mode erlang-eunit-recent-info) mode) + (setcdr (assq 'module erlang-eunit-recent-info) module) + (setcdr (assq 'test erlang-eunit-recent-info) test)) + +;;; Record whether the most recent running of EUnit included cover +;;; compilation +(defun erlang-eunit-record-recent-compile (under-cover) + (setcdr (assq 'cover erlang-eunit-recent-info) under-cover)) + ;;; Determine options for EUnit. (defun erlang-eunit-opts () (if current-prefix-arg ", [verbose]" "")) ;;; Determine current test function -(defun erlang-eunit-test-name () +(defun erlang-eunit-current-test () (save-excursion (erlang-end-of-function 1) (erlang-beginning-of-function 1) @@ -152,45 +186,125 @@ buffer and vice versa" (defun erlang-eunit-test-generator-p (test-name) (if (erlang-eunit-string-match-p "^\\(.+\\)_test_$" test-name) t nil)) -;;; Run the current EUnit test -(defun erlang-eunit-run-current-test () - (let* ((module-name (erlang-add-quotes-if-needed - (erlang-eunit-module-name buffer-file-name))) - (test-name (erlang-eunit-test-name)) - (command - (cond ((erlang-eunit-simple-test-p test-name) - (format "eunit:test({%s, %s}%s)." - module-name test-name (erlang-eunit-opts))) - ((erlang-eunit-test-generator-p test-name) - (format "eunit:test({generator, %s, %s}%s)." - module-name test-name (erlang-eunit-opts))) - (t (format "%% WARNING: '%s' is not a test function" test-name))))) +;;; Run one EUnit test +(defun erlang-eunit-run-test (module-name test-name) + (let ((command + (cond ((erlang-eunit-simple-test-p test-name) + (format "eunit:test({%s, %s}%s)." + module-name test-name (erlang-eunit-opts))) + ((erlang-eunit-test-generator-p test-name) + (format "eunit:test({generator, %s, %s}%s)." + module-name test-name (erlang-eunit-opts))) + (t (format "%% WARNING: '%s' is not a test function" test-name))))) + (erlang-eunit-record-recent 'test-mode module-name test-name) (erlang-eunit-inferior-erlang-send-command command))) ;;; Run EUnit tests for the current module -(defun erlang-eunit-run-module-tests () - (let* ((module-name (erlang-add-quotes-if-needed - (erlang-eunit-source-module-name buffer-file-name))) - (command (format "eunit:test(%s%s)." module-name (erlang-eunit-opts)))) +(defun erlang-eunit-run-module-tests (module-name) + (let ((command (format "eunit:test(%s%s)." module-name (erlang-eunit-opts)))) + (erlang-eunit-record-recent 'module-mode module-name nil) (erlang-eunit-inferior-erlang-send-command command))) +(defun erlang-eunit-compile-and-run-recent () + "Compile the source and test files and repeat the most recent EUnit test run. + +With prefix arg, compiles for debug and runs tests with the verbose flag set." + (interactive) + (case (erlang-eunit-recent 'mode) + ('test-mode + (erlang-eunit-compile-and-test + 'erlang-eunit-run-test (list (erlang-eunit-recent 'module) + (erlang-eunit-recent 'test)))) + ('module-mode + (erlang-eunit-compile-and-test + 'erlang-eunit-run-module-tests (list (erlang-eunit-recent 'module)) + (erlang-eunit-recent 'cover))) + (t (error "EUnit has not yet been run. Please run a test first.")))) + +(defun erlang-eunit-cover-compile () + "Cover compile current module." + (interactive) + (let* ((erlang-compile-extra-opts + (append (list 'debug_info) erlang-compile-extra-opts)) + (module-name + (erlang-add-quotes-if-needed + (erlang-eunit-module-name buffer-file-name))) + (compile-command + (format "cover:compile_beam(%s)." module-name))) + (erlang-compile) + (if (erlang-eunit-last-compilation-successful-p) + (erlang-eunit-inferior-erlang-send-command compile-command)))) + +(defun erlang-eunit-analyze-coverage () + "Analyze the data collected by cover tool for the module in the +current buffer. + +Assumes that the module has been cover compiled prior to this +call. This function will do two things: print the number of +covered and uncovered functions in the erlang shell and display a +new buffer called *<module name> coverage* which shows the source +code along with the coverage analysis results." + (interactive) + (let* ((module-name (erlang-add-quotes-if-needed + (erlang-eunit-module-name buffer-file-name))) + (tmp-filename (make-temp-file "cover")) + (analyze-command (format "cover:analyze_to_file(%s, \"%s\"). " + module-name tmp-filename)) + (buf-name (format "*%s coverage*" module-name))) + (erlang-eunit-inferior-erlang-send-command analyze-command) + ;; The purpose of the following snippet is to get the result of the + ;; analysis from a file into a new buffer (or an old, if one with + ;; the specified name already exists). Also we want the erlang-mode + ;; *and* view-mode to be enabled. + (save-excursion + (let ((buf (get-buffer-create (format "*%s coverage*" module-name)))) + (set-buffer buf) + (setq buffer-read-only nil) + (insert-file-contents tmp-filename nil nil nil t) + (if (= (buffer-size) 0) + (kill-buffer buf) + ;; FIXME: this would be a good place to enable (emacs-mode) + ;; to get some nice syntax highlighting in the + ;; coverage report, but it doesn't play well with + ;; flymake. Leave it off for now. + (view-buffer buf)))) + (delete-file tmp-filename))) + (defun erlang-eunit-compile-and-run-current-test () "Compile the source and test files and run the current EUnit test. With prefix arg, compiles for debug and runs tests with the verbose flag set." (interactive) - (erlang-eunit-compile-and-test 'erlang-eunit-run-current-test)) + (let ((module-name (erlang-add-quotes-if-needed + (erlang-eunit-module-name buffer-file-name))) + (test-name (erlang-eunit-current-test))) + (erlang-eunit-compile-and-test + 'erlang-eunit-run-test (list module-name test-name)))) (defun erlang-eunit-compile-and-run-module-tests () "Compile the source and test files and run all EUnit tests in the module. With prefix arg, compiles for debug and runs tests with the verbose flag set." (interactive) - (erlang-eunit-compile-and-test 'erlang-eunit-run-module-tests)) + (let ((module-name (erlang-add-quotes-if-needed + (erlang-eunit-source-module-name buffer-file-name)))) + (erlang-eunit-compile-and-test + 'erlang-eunit-run-module-tests (list module-name)))) ;;; Compile source and EUnit test file and finally run EUnit tests for ;;; the current module -(defun erlang-eunit-compile-and-test (run-tests) +(defun erlang-eunit-compile-and-test (test-fun test-args &optional under-cover) + "Compile the source and test files and run the EUnit test suite. + +If under-cover is set to t, the module under test is compile for +code coverage analysis. If under-cover is left out or not set, +coverage analysis is disabled. The result of the code coverage +is both printed to the erlang shell (the number of covered vs +uncovered functions in a module) and written to a buffer called +*<module> coverage* (which shows the source code for the module +and the number of times each line is covered). +With prefix arg, compiles for debug and runs tests with the verbose flag set." + (erlang-eunit-record-recent-compile under-cover) (let ((src-filename (erlang-eunit-src-filename buffer-file-name)) (test-filename (erlang-eunit-test-filename buffer-file-name))) @@ -198,7 +312,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." ;; below, is to ask the question about saving buffers only once, ;; instead of possibly several: one for each file to compile, ;; for instance for both x.erl and x_tests.erl. - (save-some-buffers) + (save-some-buffers erlang-eunit-autosave) (flet ((save-some-buffers (&optional any) nil)) ;; Compilation of the source file is mandatory (the file must @@ -206,23 +320,56 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." ;; test file on the other hand, is optional, since eunit tests may ;; be placed in the source file instead. Any compilation error ;; will prevent the subsequent steps to be run (hence the `and') - (and (erlang-eunit-compile-file src-filename) + (and (erlang-eunit-compile-file src-filename under-cover) (if (file-readable-p test-filename) (erlang-eunit-compile-file test-filename) t) - (funcall run-tests))))) + (apply test-fun test-args) + (if under-cover + (save-excursion + (set-buffer (find-file-noselect src-filename)) + (erlang-eunit-analyze-coverage))))))) -(defun erlang-eunit-compile-file (file-path) +(defun erlang-eunit-compile-and-run-module-tests-under-cover () + "Compile the source and test files and run the EUnit test suite and measure +code coverage. + +With prefix arg, compiles for debug and runs tests with the verbose flag set." + (interactive) + (let ((module-name (erlang-add-quotes-if-needed + (erlang-eunit-source-module-name buffer-file-name)))) + (erlang-eunit-compile-and-test + 'erlang-eunit-run-module-tests (list module-name) t))) + +(defun erlang-eunit-compile-file (file-path &optional under-cover) (if (file-readable-p file-path) (save-excursion - (set-buffer (find-file-noselect file-path)) - (erlang-compile) - (erlang-eunit-last-compilation-successful-p)) + (set-buffer (find-file-noselect file-path)) + ;; In order to run a code coverage analysis on a + ;; module, we have two options: + ;; + ;; * either compile the module with cover:compile instead of the + ;; regular compiler + ;; + ;; * or first compile the module with the regular compiler (but + ;; *with* debug_info) and then compile it for coverage + ;; analysis using cover:compile_beam. + ;; + ;; We could accomplish the first by changing the + ;; erlang-compile-erlang-function to cover:compile, but there's + ;; a risk that that's used for other purposes. Therefore, a + ;; safer alternative (although with more steps) is to add + ;; debug_info to the list of compiler options and go for the + ;; second alternative. + (if under-cover + (erlang-eunit-cover-compile) + (erlang-compile)) + (erlang-eunit-last-compilation-successful-p)) (let ((msg (format "Could not read %s" file-path))) - (erlang-eunit-inferior-erlang-send-command + (erlang-eunit-inferior-erlang-send-command (format "%% WARNING: %s" msg)) (error msg)))) - + (defun erlang-eunit-last-compilation-successful-p () (save-excursion (set-buffer inferior-erlang-buffer) @@ -231,7 +378,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." (lambda (re) (let ((continue t) (result t)) (while continue ; ignore warnings, stop at errors - (if (re-search-forward re (point-max) t) + (if (re-search-forward re (point-max) t) (if (erlang-eunit-is-compilation-warning) t (setq result nil) @@ -242,7 +389,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." (mapcar (lambda (e) (car e)) erlang-error-regexp-alist)))) (defun erlang-eunit-is-compilation-warning () - (erlang-eunit-string-match-p + (erlang-eunit-string-match-p "[0-9]+: Warning:" (buffer-substring (line-beginning-position) (line-end-position)))) @@ -271,7 +418,11 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." (defconst erlang-eunit-key-bindings '(("\C-c\C-et" erlang-eunit-toggle-src-and-test-file-other-window) ("\C-c\C-ek" erlang-eunit-compile-and-run-module-tests) - ("\C-c\C-ej" erlang-eunit-compile-and-run-current-test))) + ("\C-c\C-ej" erlang-eunit-compile-and-run-current-test) + ("\C-c\C-el" erlang-eunit-compile-and-run-recent) + ("\C-c\C-ec" erlang-eunit-compile-and-run-module-tests-under-cover) + ("\C-c\C-ev" erlang-eunit-cover-compile) + ("\C-c\C-ea" erlang-eunit-analyze-coverage))) (defun erlang-eunit-add-key-bindings () (dolist (binding erlang-eunit-key-bindings) diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el index 91acfdf2b6..ed825a298f 100644 --- a/lib/tools/emacs/erlang.el +++ b/lib/tools/emacs/erlang.el @@ -1481,7 +1481,23 @@ Other commands: erlang-font-lock-keywords-3 erlang-font-lock-keywords-4) nil nil ((?_ . "w")) erlang-beginning-of-clause - (font-lock-mark-block-function . erlang-mark-clause)))) + (font-lock-mark-block-function . erlang-mark-clause) + (font-lock-syntactic-keywords + ;; A dollar sign right before the double quote that ends a + ;; string is not a character escape. + ;; + ;; And a "string" has with a double quote not escaped by a + ;; dollar sign, any number of non-backslash non-newline + ;; characters or escaped backslashes, a dollar sign + ;; (otherwise we wouldn't care) and a double quote. This + ;; doesn't match multi-line strings, but this is probably + ;; the best we can get, since while font-locking we don't + ;; know whether matching started inside a string: limiting + ;; search to a single line keeps things sane. + . (("\\(?:^\\|[^$]\\)\"\\(?:[^\"\n]\\|\\\\\"\\)*\\(\\$\\)\"" 1 "w") + ;; And the dollar sign in $\" escapes two characters, not + ;; just one. + ("\\(\\$\\)\\\\\\\"" 1 "'")))))) diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 6501e05a6e..ada2db45be 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -35,23 +35,37 @@ %% remote_process_loop/1. %% %% TABLES -%% Each nodes has an ets table named 'cover_internal_data_table' -%% (?COVER_TABLE). This table contains the coverage data and is -%% continously updated when cover compiled code is executed. +%% Each nodes has two tables: cover_internal_data_table (?COVER_TABLE) and. +%% cover_internal_clause_table (?COVER_CLAUSE_TABLE). +%% ?COVER_TABLE contains the bump data i.e. the data about which lines +%% have been executed how many times. +%% ?COVER_CLAUSE_TABLE contains information about which clauses in which modules +%% cover is currently collecting statistics. %% -%% The main node owns a table named -%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE). This table -%% contains data which is collected from remote nodes (either when a -%% remote node is stopped with cover:stop/1 or when analysing. When -%% analysing, data is even moved from the ?COVER_TABLE on the main -%% node to the ?COLLECTION_TABLE. +%% The main node owns tables named +%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE) and +%% 'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE). +%% These tables contain data which is collected from remote nodes (either when a +%% remote node is stopped with cover:stop/1 or when analysing). When +%% analysing, data is even moved from the COVER tables on the main +%% node to the COLLECTION tables. %% %% The main node also has a table named 'cover_binary_code_table' %% (?BINARY_TABLE). This table contains the binary code for each cover %% compiled module. This is necessary so that the code can be loaded %% on remote nodes that are started after the compilation. %% - +%% PARELLALISM +%% To take advantage of SMP when doing the cover analysis both the data +%% collection and analysis has been parallelized. One process is spawned for +%% each node when collecting data, and on the remote node when collecting data +%% one process is spawned per module. +%% +%% When analyzing data it is possible to issue multiple analyse(_to_file)/X +%% calls at once. They are however all calls (for backwardscompatability +%% reasons) so the user of cover will have to spawn several processes to to the +%% calls ( or use async_analyse_to_file ). +%% %% External exports -export([start/0, start/1, @@ -61,6 +75,9 @@ analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3, analyse_to_file/1, analyse_to_file/2, analyse_to_file/3, analyze_to_file/1, analyze_to_file/2, analyze_to_file/3, + async_analyse_to_file/1,async_analyse_to_file/2, + async_analyse_to_file/3, async_analyze_to_file/1, + async_analyze_to_file/2, async_analyze_to_file/3, export/1, export/2, import/1, modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1, reset/1, reset/0, @@ -100,8 +117,10 @@ }). -define(COVER_TABLE, 'cover_internal_data_table'). +-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table'). -define(BINARY_TABLE, 'cover_binary_code_table'). -define(COLLECTION_TABLE, 'cover_collected_remote_data_table'). +-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table'). -define(TAG, cover_compiled). -define(SERVER, cover_server). @@ -114,6 +133,8 @@ true -> ?BLOCK(Expr) end). +-define(SPAWN_DBG(Tag,Value),put(Tag,Value)). + -include_lib("stdlib/include/ms_transform.hrl"). %%%---------------------------------------------------------------------- @@ -127,7 +148,10 @@ start() -> case whereis(?SERVER) of undefined -> Starter = self(), - Pid = spawn(fun() -> init_main(Starter) end), + Pid = spawn(fun() -> + ?SPAWN_DBG(start,[]), + init_main(Starter) + end), Ref = erlang:monitor(process,Pid), Return = receive @@ -382,6 +406,30 @@ analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut). analyze_to_file(Module, OutFile, Options) -> analyse_to_file(Module, OutFile, Options). +async_analyse_to_file(Module) -> + do_spawn(?MODULE, analyse_to_file, [Module]). +async_analyse_to_file(Module, OutFileOrOpts) -> + do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]). +async_analyse_to_file(Module, OutFile, Options) -> + do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). + +do_spawn(M,F,A) -> + spawn_link(fun() -> + case apply(M,F,A) of + {ok, _} -> + ok; + {error, Reason} -> + exit(Reason) + end + end). + +async_analyze_to_file(Module) -> + async_analyse_to_file(Module). +async_analyze_to_file(Module, OutFileOrOpts) -> + async_analyse_to_file(Module, OutFileOrOpts). +async_analyze_to_file(Module, OutFile, Options) -> + async_analyse_to_file(Module, OutFile, Options). + outfilename(Module,Opts) -> case lists:member(html,Opts) of true -> @@ -500,6 +548,8 @@ remote_call(Node,Request) -> Return end. +remote_reply(Proc,Reply) when is_pid(Proc) -> + Proc ! {?SERVER,Reply}; remote_reply(MainNode,Reply) -> {?SERVER,MainNode} ! {?SERVER,Reply}. @@ -509,9 +559,15 @@ remote_reply(MainNode,Reply) -> init_main(Starter) -> register(?SERVER,self()), - ets:new(?COVER_TABLE, [set, public, named_table]), + %% Having write concurrancy here gives a 40% performance boost + %% when collect/1 is called. + ets:new(?COVER_TABLE, [set, public, named_table + ,{write_concurrency, true} + ]), + ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), ets:new(?BINARY_TABLE, [set, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), + ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]), process_flag(trap_exit,true), Starter ! {?SERVER,started}, main_process_loop(#main_state{}). @@ -593,40 +649,10 @@ main_process_loop(State) -> end; {From, {export,OutFile,Module}} -> - case file:open(OutFile,[write,binary,raw]) of - {ok,Fd} -> - Reply = - case Module of - '_' -> - export_info(State#main_state.imported), - collect(State#main_state.nodes), - do_export_table(State#main_state.compiled, - State#main_state.imported, - Fd); - _ -> - export_info(Module,State#main_state.imported), - case is_loaded(Module, State) of - {loaded, File} -> - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - do_export_table([{Module,File}],[],Fd); - {imported, File, ImportFiles} -> - %% don't know if I should allow this - - %% export a module which is only imported - Imported = [{Module,File,ImportFiles}], - do_export_table([],Imported,Fd); - _NotLoaded -> - {error,{not_cover_compiled,Module}} - end - end, - file:close(Fd), - reply(From, Reply); - {error,Reason} -> - reply(From, {error, {cant_open_file,OutFile,Reason}}) - - end, + spawn(fun() -> + ?SPAWN_DBG(export,{OutFile, Module}), + do_export(Module, OutFile, From, State) + end), main_process_loop(State); {From, {import,File}} -> @@ -692,107 +718,73 @@ main_process_loop(State) -> unregister(?SERVER), reply(From, ok); - {From, {Request, Module}} -> - case is_loaded(Module, State) of - {loaded, File} -> - {Reply,State1} = - case Request of - {analyse, Analysis, Level} -> - analyse_info(Module,State#main_state.imported), - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module,Clauses,State#main_state.nodes), - R = do_analyse(Module, Analysis, Level, Clauses), - {R,State}; - - {analyse_to_file, OutFile, Opts} -> - R = case find_source(File) of - {beam,_BeamFile} -> - {error,no_source_code_found}; - ErlFile -> - Imported = State#main_state.imported, - analyse_info(Module,Imported), - [{Module,Clauses}] = - ets:lookup(?COVER_TABLE,Module), - collect(Module, Clauses, - State#main_state.nodes), - HTML = lists:member(html,Opts), - do_analyse_to_file(Module,OutFile, - ErlFile,HTML) - end, - {R,State}; - - is_compiled -> - {{file, File},State}; - - reset -> - R = do_reset_main_node(Module, - State#main_state.nodes), - Imported = - remove_imported(Module, - State#main_state.imported), - {R,State#main_state{imported=Imported}} - end, - reply(From, Reply), - main_process_loop(State1); - - {imported,File,_ImportFiles} -> - {Reply,State1} = - case Request of - {analyse, Analysis, Level} -> - analyse_info(Module,State#main_state.imported), - [{Module,Clauses}] = - ets:lookup(?COLLECTION_TABLE,Module), - R = do_analyse(Module, Analysis, Level, Clauses), - {R,State}; - - {analyse_to_file, OutFile, Opts} -> - R = case find_source(File) of - {beam,_BeamFile} -> - {error,no_source_code_found}; - ErlFile -> - Imported = State#main_state.imported, - analyse_info(Module,Imported), - HTML = lists:member(html,Opts), - do_analyse_to_file(Module,OutFile, - ErlFile,HTML) - end, - {R,State}; - - is_compiled -> - {false,State}; - - reset -> - R = do_reset_collection_table(Module), - Imported = - remove_imported(Module, - State#main_state.imported), - {R,State#main_state{imported=Imported}} - end, - reply(From, Reply), - main_process_loop(State1); - - NotLoaded -> - Reply = - case Request of - is_compiled -> - false; - _ -> - {error, {not_cover_compiled,Module}} - end, - Compiled = - case NotLoaded of - unloaded -> - do_clear(Module), - remote_unload(State#main_state.nodes,[Module]), - update_compiled([Module], - State#main_state.compiled); - false -> - State#main_state.compiled + {From, {{analyse, Analysis, Level}, Module}} -> + S = try + Loaded = is_loaded(Module, State), + spawn(fun() -> + ?SPAWN_DBG(analyse,{Module,Analysis, Level}), + do_parallel_analysis( + Module, Analysis, Level, + Loaded, From, State) + end), + State + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {{analyse_to_file, OutFile, Opts},Module}} -> + S = try + Loaded = is_loaded(Module, State), + spawn(fun() -> + ?SPAWN_DBG(analyse_to_file, + {Module,OutFile, Opts}), + do_parallel_analysis_to_file( + Module, OutFile, Opts, + Loaded, From, State) + end), + State + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {is_compiled, Module}} -> + S = try is_loaded(Module, State) of + {loaded, File} -> + reply(From,{file, File}), + State; + {imported,_File,_ImportFiles} -> + reply(From,false), + State + catch throw:Reason -> + reply(From,false), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); + + {From, {reset, Module}} -> + S = try + Loaded = is_loaded(Module,State), + R = case Loaded of + {loaded, _File} -> + do_reset_main_node( + Module, State#main_state.nodes); + {imported, _File, _} -> + do_reset_collection_table(Module) end, - reply(From, Reply), - main_process_loop(State#main_state{compiled=Compiled}) - end; + Imported = + remove_imported(Module, + State#main_state.imported), + reply(From, R), + State#main_state{imported=Imported} + catch throw:Reason -> + reply(From,{error, {not_cover_compiled,Module}}), + not_loaded(Module, Reason, State) + end, + main_process_loop(S); {'EXIT',Pid,_Reason} -> %% Exit is trapped on the main node only, so this will only happen @@ -807,17 +799,17 @@ main_process_loop(State) -> main_process_loop(State) end. - - - - %%%---------------------------------------------------------------------- %%% cover_server on remote node %%%---------------------------------------------------------------------- init_remote(Starter,MainNode) -> register(?SERVER,self()), - ets:new(?COVER_TABLE, [set, public, named_table]), + ets:new(?COVER_TABLE, [set, public, named_table + %% write_concurrency here makes otp_8270 break :( + %,{write_concurrency, true} + ]), + ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), Starter ! {self(),started}, remote_process_loop(#remote_state{main_node=MainNode}). @@ -843,29 +835,14 @@ remote_process_loop(State) -> remote_process_loop(State); {remote,collect,Module,CollectorPid} -> - MS = - case Module of - '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end); - _ -> ets:fun2ms(fun({M,C}) when M=:=Module -> C end) - end, - AllClauses = lists:flatten(ets:select(?COVER_TABLE,MS)), - - %% Sending clause by clause in order to avoid large lists - lists:foreach( - fun({M,F,A,C,_L}) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps} - end, - AllClauses), - CollectorPid ! done, - remote_reply(State#remote_state.main_node, ok), + self() ! {remote,collect,Module,CollectorPid, ?SERVER}; + + {remote,collect,Module,CollectorPid,From} -> + spawn(fun() -> + ?SPAWN_DBG(remote_collect, + {Module, CollectorPid, From}), + do_collect(Module, CollectorPid, From) + end), remote_process_loop(State); {remote,stop} -> @@ -894,6 +871,33 @@ remote_process_loop(State) -> end. +do_collect(Module, CollectorPid, From) -> + AllMods = + case Module of + '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); + _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) + end, + + %% Sending clause by clause in order to avoid large lists + pmap( + fun({_Mod,Clauses}) -> + lists:map(fun(Clause) -> + send_collected_data(Clause, CollectorPid) + end,Clauses) + end,AllMods), + CollectorPid ! done, + remote_reply(From, ok). + +send_collected_data({M,F,A,C,_L}, CollectorPid) -> + Pattern = + {#bump{module=M, function=F, arity=A, clause=C}, '_'}, + Bumps = ets:match_object(?COVER_TABLE, Pattern), + %% Reset + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Bumps), + CollectorPid ! {chunk,Bumps}. reload_originals([{Module,_File}|Compiled]) -> do_reload_original(Module), @@ -932,6 +936,9 @@ load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) -> load_compiled([],Acc) -> Acc. +insert_initial_data([Item|Items]) when is_atom(element(1,Item)) -> + ets:insert(?COVER_CLAUSE_TABLE, Item), + insert_initial_data(Items); insert_initial_data([Item|Items]) -> ets:insert(?COVER_TABLE, Item), insert_initial_data(Items); @@ -957,7 +964,10 @@ remote_start(MainNode) -> case whereis(?SERVER) of undefined -> Starter = self(), - Pid = spawn(fun() -> init_remote(Starter,MainNode) end), + Pid = spawn(fun() -> + ?SPAWN_DBG(remote_start,{MainNode}), + init_remote(Starter,MainNode) + end), Ref = erlang:monitor(process,Pid), Return = receive @@ -972,14 +982,25 @@ remote_start(MainNode) -> {error,{already_started,Pid}} end. -%% Load a set of cover compiled modules on remote nodes -remote_load_compiled(Nodes,Compiled0) -> - Compiled = lists:map(fun get_data_for_remote_loading/1,Compiled0), +%% Load a set of cover compiled modules on remote nodes, +%% We do it ?MAX_MODS modules at a time so that we don't +%% run out of memory on the cover_server node. +-define(MAX_MODS, 10). +remote_load_compiled(Nodes,Compiled) -> + remote_load_compiled(Nodes, Compiled, [], 0). +remote_load_compiled(_Nodes, [], [], _ModNum) -> + ok; +remote_load_compiled(Nodes, Compiled, Acc, ModNum) + when Compiled == []; ModNum == ?MAX_MODS -> lists:foreach( fun(Node) -> - remote_call(Node,{remote,load_compiled,Compiled}) + remote_call(Node,{remote,load_compiled,Acc}) end, - Nodes). + Nodes), + remote_load_compiled(Nodes, Compiled, [], 0); +remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> + remote_load_compiled( + Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1). %% Read all data needed for loading a cover compiled module on a remote node %% Binary is the beam code for the module and InitialTable is the initial @@ -987,15 +1008,15 @@ remote_load_compiled(Nodes,Compiled0) -> get_data_for_remote_loading({Module,File}) -> [{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module), %%! The InitialTable list will be long if the module is big - what to do?? - InitialTable = ets:select(?COVER_TABLE,ms(Module)), - {Module,File,Binary,InitialTable}. + InitialBumps = ets:select(?COVER_TABLE,ms(Module)), + InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module), + + {Module,File,Binary,InitialBumps ++ InitialClauses}. %% Create a match spec which returns the clause info {Module,InitInfo} and %% all #bump keys for the given module with 0 number of calls. ms(Module) -> - ets:fun2ms(fun({Module,InitInfo}) -> - {Module,InitInfo}; - ({Key,_}) when is_record(Key,bump),Key#bump.module=:=Module -> + ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module -> {Key,0} end). @@ -1017,27 +1038,30 @@ remote_reset(Module,Nodes) -> %% Collect data from remote nodes - used for analyse or stop(Node) remote_collect(Module,Nodes,Stop) -> - CollectorPid = spawn(fun() -> collector_proc(length(Nodes)) end), - lists:foreach( - fun(Node) -> - remote_call(Node,{remote,collect,Module,CollectorPid}), - if Stop -> remote_call(Node,{remote,stop}); - true -> ok - end - end, - Nodes). + pmap(fun(Node) -> + ?SPAWN_DBG(remote_collect, + {Module, Nodes, Stop}), + do_collection(Node, Module, Stop) + end, + Nodes). + +do_collection(Node, Module, Stop) -> + CollectorPid = spawn(fun collector_proc/0), + remote_call(Node,{remote,collect,Module,CollectorPid, self()}), + if Stop -> remote_call(Node,{remote,stop}); + true -> ok + end. %% Process which receives chunks of data from remote nodes - either when %% analysing or when stopping cover on the remote nodes. -collector_proc(0) -> - ok; -collector_proc(N) -> +collector_proc() -> + ?SPAWN_DBG(collector_proc, []), receive {chunk,Chunk} -> insert_in_collection_table(Chunk), - collector_proc(N); + collector_proc(); done -> - collector_proc(N-1) + ok end. insert_in_collection_table([{Key,Val}|Chunk]) -> @@ -1052,7 +1076,13 @@ insert_in_collection_table(Key,Val) -> ets:update_counter(?COLLECTION_TABLE, Key,Val); false -> - ets:insert(?COLLECTION_TABLE,{Key,Val}) + %% Make sure that there are no race conditions from ets:member + case ets:insert_new(?COLLECTION_TABLE,{Key,Val}) of + false -> + insert_in_collection_table(Key,Val); + _ -> + ok + end end. @@ -1073,14 +1103,15 @@ analyse_info(Module,Imported) -> export_info(_Module,[]) -> ok; -export_info(Module,Imported) -> - imported_info("Export",Module,Imported). +export_info(_Module,_Imported) -> + %% Do not print that the export includes imported modules + ok. export_info([]) -> ok; -export_info(Imported) -> - AllImportFiles = get_all_importfiles(Imported,[]), - io:format("Export includes data from imported files\n~p\n",[AllImportFiles]). +export_info(_Imported) -> + %% Do not print that the export includes imported modules + ok. get_all_importfiles([{_M,_F,ImportFiles}|Imported],Acc) -> NewAcc = do_get_all_importfiles(ImportFiles,Acc), @@ -1153,14 +1184,14 @@ is_loaded(Module, State) -> {ok, File} -> case code:which(Module) of ?TAG -> {loaded, File}; - _ -> unloaded + _ -> throw(unloaded) end; false -> case get_file(Module,State#main_state.imported) of {ok,File,ImportFiles} -> {imported, File, ImportFiles}; false -> - false + throw(not_loaded) end end. @@ -1259,7 +1290,7 @@ do_compile_beam(Module,Beam) -> %% Store info about all function clauses in database InitInfo = reverse(Vars#vars.init_info), - ets:insert(?COVER_TABLE, {Module, InitInfo}), + ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}), %% Store binary code so it can be loaded on remote nodes ets:insert(?BINARY_TABLE, {Module, Binary}), @@ -1793,9 +1824,8 @@ common_elems(L1, L2) -> %% Collect data for all modules collect(Nodes) -> %% local node - MS = ets:fun2ms(fun({M,C}) when is_atom(M) -> {M,C} end), - AllClauses = ets:select(?COVER_TABLE,MS), - move_modules(AllClauses), + AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE), + pmap(fun move_modules/1,AllClauses), %% remote nodes remote_collect('_',Nodes,false). @@ -1803,7 +1833,7 @@ collect(Nodes) -> %% Collect data for one module collect(Module,Clauses,Nodes) -> %% local node - move_modules([{Module,Clauses}]), + move_modules({Module,Clauses}), %% remote nodes remote_collect(Module,Nodes,false). @@ -1811,12 +1841,9 @@ collect(Module,Clauses,Nodes) -> %% When analysing, the data from the local ?COVER_TABLE is moved to the %% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE -move_modules([{Module,Clauses}|AllClauses]) -> - ets:insert(?COLLECTION_TABLE,{Module,Clauses}), - move_clauses(Clauses), - move_modules(AllClauses); -move_modules([]) -> - ok. +move_modules({Module,Clauses}) -> + ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}), + move_clauses(Clauses). move_clauses([{M,F,A,C,_L}|Clauses]) -> Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, @@ -1855,6 +1882,22 @@ find_source(File0) -> end end. +do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> + analyse_info(Module,State#main_state.imported), + C = case Loaded of + {loaded, _File} -> + [{Module,Clauses}] = + ets:lookup(?COVER_CLAUSE_TABLE,Module), + collect(Module,Clauses,State#main_state.nodes), + Clauses; + _ -> + [{Module,Clauses}] = + ets:lookup(?COLLECTION_CLAUSE_TABLE,Module), + Clauses + end, + R = do_analyse(Module, Analysis, Level, C), + reply(From, R). + %% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error} %% Clauses = [{Module,Function,Arity,Clause,Lines}] do_analyse(Module, Analysis, line, _Clauses) -> @@ -1931,6 +1974,28 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) -> merge_functions([], _MFun, Result) -> Result. +do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> + File = case Loaded of + {loaded, File0} -> + [{Module,Clauses}] = + ets:lookup(?COVER_CLAUSE_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + File0; + {imported, File0, _} -> + File0 + end, + case find_source(File) of + {beam,_BeamFile} -> + reply(From, {error,no_source_code_found}); + ErlFile -> + analyse_info(Module,State#main_state.imported), + HTML = lists:member(html,Opts), + R = do_analyse_to_file(Module,OutFile, + ErlFile,HTML), + reply(From, R) + end. + %% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} %% Module = atom() %% OutFile = ErlFile = string() @@ -2027,6 +2092,42 @@ fill2() -> ".| ". fill3() -> "| ". %%%--Export-------------------------------------------------------------- +do_export(Module, OutFile, From, State) -> + case file:open(OutFile,[write,binary,raw]) of + {ok,Fd} -> + Reply = + case Module of + '_' -> + export_info(State#main_state.imported), + collect(State#main_state.nodes), + do_export_table(State#main_state.compiled, + State#main_state.imported, + Fd); + _ -> + export_info(Module,State#main_state.imported), + try is_loaded(Module, State) of + {loaded, File} -> + [{Module,Clauses}] = + ets:lookup(?COVER_CLAUSE_TABLE,Module), + collect(Module, Clauses, + State#main_state.nodes), + do_export_table([{Module,File}],[],Fd); + {imported, File, ImportFiles} -> + %% don't know if I should allow this - + %% export a module which is only imported + Imported = [{Module,File,ImportFiles}], + do_export_table([],Imported,Fd) + catch throw:_ -> + {error,{not_cover_compiled,Module}} + end + end, + file:close(Fd), + reply(From, Reply); + {error,Reason} -> + reply(From, {error, {cant_open_file,OutFile,Reason}}) + + end. + do_export_table(Compiled, Imported, Fd) -> ModList = merge(Imported,Compiled), write_module_data(ModList,Fd). @@ -2043,7 +2144,7 @@ merge([],ModuleList) -> write_module_data([{Module,File}|ModList],Fd) -> write({file,Module,File},Fd), - [Clauses] = ets:lookup(?COLLECTION_TABLE,Module), + [Clauses] = ets:lookup(?COLLECTION_CLAUSE_TABLE,Module), write(Clauses,Fd), ModuleData = ets:match_object(?COLLECTION_TABLE,{#bump{module=Module},'_'}), do_write_module_data(ModuleData,Fd), @@ -2093,7 +2194,7 @@ do_import_to_table(Fd,ImportFile,Imported,DontImport) -> {Module,Clauses} -> case lists:member(Module,DontImport) of false -> - ets:insert(?COLLECTION_TABLE,{Module,Clauses}); + ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}); true -> ok end, @@ -2127,14 +2228,14 @@ do_reset_main_node(Module,Nodes) -> remote_reset(Module,Nodes). do_reset_collection_table(Module) -> - ets:delete(?COLLECTION_TABLE,Module), + ets:delete(?COLLECTION_CLAUSE_TABLE,Module), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). %% do_reset(Module) -> ok %% The reset is done on a per-clause basis to avoid building %% long lists in the case of very large modules do_reset(Module) -> - [{Module,Clauses}] = ets:lookup(?COVER_TABLE, Module), + [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module), do_reset2(Clauses). do_reset2([{M,F,A,C,_L}|Clauses]) -> @@ -2149,10 +2250,19 @@ do_reset2([]) -> ok. do_clear(Module) -> - ets:match_delete(?COVER_TABLE, {Module,'_'}), + ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}), ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}), ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). +not_loaded(Module, unloaded, State) -> + do_clear(Module), + remote_unload(State#main_state.nodes,[Module]), + Compiled = update_compiled([Module], + State#main_state.compiled), + State#main_state{ compiled = Compiled }; +not_loaded(_Module,_Else, State) -> + State. + %%%--Div----------------------------------------------------------------- @@ -2174,7 +2284,36 @@ escape_lt_and_gt1([$<|T],Acc) -> escape_lt_and_gt1(T,[$;,$t,$l,$&|Acc]); escape_lt_and_gt1([$>|T],Acc) -> escape_lt_and_gt1(T,[$;,$t,$g,$&|Acc]); +escape_lt_and_gt1([$&|T],Acc) -> + escape_lt_and_gt1(T,[$;,$p,$m,$a,$&|Acc]); escape_lt_and_gt1([],Acc) -> lists:reverse(Acc); escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). + +pmap(Fun, List) -> + pmap(Fun, List, 20). +pmap(Fun, List, Limit) -> + pmap(Fun, List, [], Limit, 0, []). +pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit -> + Collector = self(), + Pid = spawn_link(fun() -> + ?SPAWN_DBG(pmap,E), + Collector ! {res,self(),Fun(E)} + end), + erlang:monitor(process, Pid), + pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc); +pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) -> + receive + {'DOWN', _Ref, process, _, _} -> + pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc); + {res, Pid, Res} -> + pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc]) + end; +pmap(_Fun, [], [], _Limit, 0, Acc) -> + lists:reverse(Acc); +pmap(Fun, [], [], Limit, Cnt, Acc) -> + receive + {'DOWN', _Ref, process, _, _} -> + pmap(Fun, [], [], Limit, Cnt - 1, Acc) + end. diff --git a/lib/tools/src/eprof.erl b/lib/tools/src/eprof.erl index b4313d6888..87fdc1fa34 100644 --- a/lib/tools/src/eprof.erl +++ b/lib/tools/src/eprof.erl @@ -1,19 +1,19 @@ %% %% %CopyrightBegin% -%% -%% Copyright Ericsson AB 1996-2009. All Rights Reserved. -%% +%% +%% Copyright Ericsson AB 1996-2010. All Rights Reserved. +%% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in %% compliance with the License. You should have received a copy of the %% Erlang Public License along with this software. If not, it can be %% retrieved online at http://www.erlang.org/. -%% +%% %% Software distributed under the License is distributed on an "AS IS" %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See %% the License for the specific language governing rights and limitations %% under the License. -%% +%% %% %CopyrightEnd% %% %% Purpose: Profile a system in order to figure out where the @@ -23,456 +23,467 @@ -module(eprof). -behaviour(gen_server). --export([start/0, stop/0, dump/0, total_analyse/0, - start_profiling/1, profile/2, profile/4, profile/1, - stop_profiling/0, analyse/0, log/1]). +-export([start/0, + stop/0, + dump/0, + start_profiling/1, start_profiling/2, + profile/1, profile/2, profile/3, profile/4, profile/5, + stop_profiling/0, + analyze/0, analyze/1, analyze/2, + log/1]). %% Internal exports -export([init/1, - call/4, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). +-record(bpd, { + n = 0, % number of total calls + us = 0, % sum of uS for all calls + p = gb_trees:empty(), % tree of {Pid, {Mfa, {Count, Us}}} + mfa = [] % list of {Mfa, {Count, Us}} + }). + +-record(state, { + profiling = false, + pattern = {'_','_','_'}, + rootset = [], + fd = undefined, + start_ts = undefined, + reply = undefined, + bpd = #bpd{} + }). + + + +%% -------------------------------------------------------------------- %% +%% +%% API +%% +%% -------------------------------------------------------------------- %% --include_lib("stdlib/include/qlc.hrl"). - --import(lists, [flatten/1,reverse/1,keysort/2]). - - --record(state, {table = notable, - proc = noproc, - profiling = false, - pfunc = undefined, - pop = running, - ptime = 0, - overhead = 0, - rootset = []}). - -%%%%%%%%%%%%%% - -start() -> gen_server:start({local, eprof}, eprof, [], []). -stop() -> gen_server:call(eprof, stop, infinity). +start() -> gen_server:start({local, ?MODULE}, ?MODULE, [], []). +stop() -> gen_server:call(?MODULE, stop, infinity). +profile(Fun) when is_function(Fun) -> + profile([], Fun); +profile(Rs) when is_list(Rs) -> + start_profiling(Rs). profile(Pids, Fun) -> - start(), - gen_server:call(eprof, {profile,Pids,erlang,apply,[Fun,[]]},infinity). + profile(Pids, Fun, {'_','_','_'}). + +profile(Pids, Fun, Pattern) -> + profile(Pids, erlang, apply, [Fun,[]], Pattern). profile(Pids, M, F, A) -> + profile(Pids, M, F, A, {'_','_','_'}). + +profile(Pids, M, F, A, Pattern) -> start(), - gen_server:call(eprof, {profile,Pids,M,F,A},infinity). + gen_server:call(?MODULE, {profile,Pids,Pattern,M,F,A},infinity). dump() -> - gen_server:call(eprof, dump, infinity). + gen_server:call(?MODULE, dump, infinity). -analyse() -> - gen_server:call(eprof, analyse, infinity). +analyze() -> + analyze(procs). -log(File) -> - gen_server:call(eprof, {logfile, File}, infinity). +analyze(Type) when is_atom(Type) -> + analyze(Type, []); +analyze(Opts) when is_list(Opts) -> + analyze(procs, Opts). +analyze(Type, Opts) when is_list(Opts) -> + gen_server:call(?MODULE, {analyze, Type, Opts}, infinity). -total_analyse() -> - gen_server:call(eprof, total_analyse, infinity). +log(File) -> + gen_server:call(?MODULE, {logfile, File}, infinity). start_profiling(Rootset) -> + start_profiling(Rootset, {'_','_','_'}). +start_profiling(Rootset, Pattern) -> start(), - gen_server:call(eprof, {profile, Rootset}, infinity). + gen_server:call(?MODULE, {profile, Rootset, Pattern}, infinity). stop_profiling() -> - gen_server:call(eprof, stop_profiling, infinity). + gen_server:call(?MODULE, stop_profiling, infinity). -profile(Rs) -> - start_profiling(Rs). -%%%%%%%%%%%%%%%% +%% -------------------------------------------------------------------- %% +%% +%% init +%% +%% -------------------------------------------------------------------- %% -init(_) -> +init([]) -> process_flag(trap_exit, true), - process_flag(priority, max), - put(three_one, {3,1}), %To avoid building garbage. {ok, #state{}}. -subtr({X1,Y1,Z1}, {X1,Y1,Z2}) -> - Z1 - Z2; -subtr({X1,Y1,Z1}, {X2,Y2,Z2}) -> - (((X1-X2) * 1000000) + Y1 - Y2) * 1000000 + Z1 - Z2. +%% -------------------------------------------------------------------- %% +%% +%% handle_call +%% +%% -------------------------------------------------------------------- %% -update_call_statistics(Tab, Key, Time) -> - try ets:update_counter(Tab, Key, Time) of - NewTime when is_integer(NewTime) -> - ets:update_counter(Tab, Key, get(three_one)) - catch - error:badarg -> - ets:insert(Tab, {Key,Time,1}) - end. +%% analyze -update_other_statistics(Tab, Key, Time) -> - try - ets:update_counter(Tab, Key, Time) - catch - error:badarg -> - ets:insert(Tab, {Key,Time,0}) - end. +handle_call({analyze, _, _}, _, #state{ bpd = #bpd{ p = {0,nil}, us = 0, n = 0} = Bpd } = S) when is_record(Bpd, bpd) -> + {reply, nothing_to_analyze, S}; -do_messages({trace_ts,From,Op,Mfa,Time}, Tab, undefined,_PrevOp0,_PrevTime0) -> - PrevFunc = [From|Mfa], - receive - {trace_ts,_,_,_,_}=Ts -> do_messages(Ts, Tab, PrevFunc, Op, Time) - after 0 -> - {PrevFunc,Op,Time} - end; -do_messages({trace_ts,From,Op,Mfa,Time}, Tab, PrevFunc0, call, PrevTime0) -> - update_call_statistics(Tab, PrevFunc0, subtr(Time, PrevTime0)), - PrevFunc = case Op of - exit -> undefined; - out -> undefined; - _ -> [From|Mfa] - end, - receive - {trace_ts,_,_,_,_}=Ts -> do_messages(Ts, Tab, PrevFunc, Op, Time) - after 0 -> - {PrevFunc,Op,Time} - end; -do_messages({trace_ts,From,Op,Mfa,Time}, Tab, PrevFunc0, _PrevOp0, PrevTime0) -> - update_other_statistics(Tab, PrevFunc0, subtr(Time, PrevTime0)), - PrevFunc = case Op of - exit -> undefined; - out -> undefined; - _ -> [From|Mfa] - end, - receive - {trace_ts,_,_,_,_}=Ts -> do_messages(Ts, Tab, PrevFunc, Op, Time) - after 0 -> - {PrevFunc,Op,Time} - end. +handle_call({analyze, procs, Opts}, _, #state{ bpd = #bpd{ p = Ps, us = Tus} = Bpd, fd = Fd} = S) when is_record(Bpd, bpd) -> + lists:foreach(fun + ({Pid, Mfas}) -> + {Pn, Pus} = sum_bp_total_n_us(Mfas), + format(Fd, "~n****** Process ~w -- ~s % of profiled time *** ~n", [Pid, s("~.2f", [100.0*divide(Pus,Tus)])]), + print_bp_mfa(Mfas, {Pn,Pus}, Fd, Opts), + ok + end, gb_trees:to_list(Ps)), + {reply, ok, S}; -%%%%%%%%%%%%%%%%%% +handle_call({analyze, total, Opts}, _, #state{ bpd = #bpd{ mfa = Mfas, n = Tn, us = Tus} = Bpd, fd = Fd} = S) when is_record(Bpd, bpd) -> + print_bp_mfa(Mfas, {Tn, Tus}, Fd, Opts), + {reply, ok, S}; -handle_cast(_Req, S) -> {noreply, S}. +handle_call({analyze, Type, _Opts}, _, S) -> + {reply, {error, {undefined, Type}}, S}; -terminate(_Reason,_S) -> - call_trace_for_all(false), - normal. +%% profile -%%%%%%%%%%%%%%%%%% +handle_call({profile, _Rootset, _Pattern, _M,_F,_A}, _From, #state{ profiling = true } = S) -> + {reply, {error, already_profiling}, S}; -handle_call({logfile, F}, _FromTag, Status) -> - case file:open(F, [write]) of - {ok, Fd} -> - case get(fd) of - undefined -> ok; - FdOld -> file:close(FdOld) - end, - put(fd, Fd), - {reply, ok, Status}; - {error, _} -> - {reply, error, Status} - end; +handle_call({profile, Rootset, Pattern, M,F,A}, From, #state{fd = Fd } = S) -> -handle_call({profile, Rootset}, {From, _Tag}, S) -> - link(From), - maybe_delete(S#state.table), - io:format("eprof: Starting profiling ..... ~n",[]), - ptrac(S#state.rootset, false, all()), - flush_receive(), - Tab = ets:new(eprof, [set, public]), - case ptrac(Rootset, true, all()) of - false -> - {reply, error, #state{}}; + set_pattern_trace(false, S#state.pattern), + set_process_trace(false, S#state.rootset), + + Pid = setup_profiling(M,F,A), + case set_process_trace(true, [Pid|Rootset]) of true -> - uni_schedule(), - call_trace_for_all(true), - erase(replyto), - {reply, profiling, #state{table = Tab, - proc = From, - profiling = true, - rootset = Rootset}} + set_pattern_trace(true, Pattern), + T0 = now(), + execute_profiling(Pid), + {noreply, #state{ + profiling = true, + rootset = [Pid|Rootset], + start_ts = T0, + reply = From, + fd = Fd, + pattern = Pattern + }}; + false -> + exit(Pid, eprof_kill), + {reply, error, #state{ fd = Fd}} end; -handle_call(stop_profiling, _FromTag, S) when S#state.profiling -> - ptrac(S#state.rootset, false, all()), - call_trace_for_all(false), - multi_schedule(), - io:format("eprof: Stop profiling~n",[]), - ets:delete(S#state.table, nofunc), - {reply, profiling_stopped, S#state{profiling = false}}; +handle_call({profile, _Rootset, _Pattern}, _From, #state{ profiling = true } = S) -> + {reply, {error, already_profiling}, S}; -handle_call(stop_profiling, _FromTag, S) -> - {reply, profiling_already_stopped, S}; +handle_call({profile, Rootset, Pattern}, From, #state{ fd = Fd } = S) -> + + set_pattern_trace(false, S#state.pattern), + set_process_trace(false, S#state.rootset), -handle_call({profile, Rootset, M, F, A}, FromTag, S) -> - io:format("eprof: Starting profiling..... ~n", []), - maybe_delete(S#state.table), - ptrac(S#state.rootset, false, all()), - flush_receive(), - put(replyto, FromTag), - Tab = ets:new(eprof, [set, public]), - P = spawn_link(eprof, call, [self(), M, F, A]), - case ptrac([P|Rootset], true, all()) of + case set_process_trace(true, Rootset) of true -> - uni_schedule(), - call_trace_for_all(true), - P ! {self(),go}, - {noreply, #state{table = Tab, - profiling = true, - rootset = [P|Rootset]}}; + T0 = now(), + set_pattern_trace(true, Pattern), + {reply, profiling, #state{ + profiling = true, + rootset = Rootset, + start_ts = T0, + reply = From, + fd = Fd, + pattern = Pattern + }}; false -> - exit(P, kill), - erase(replyto), - {reply, error, #state{}} + {reply, error, #state{ fd = Fd }} end; -handle_call(dump, _FromTag, S) -> - {reply, dump(S#state.table), S}; - -handle_call(analyse, _FromTag, S) -> - {reply, analyse(S), S}; +handle_call(stop_profiling, _From, #state{ profiling = false } = S) -> + {reply, profiling_already_stopped, S}; -handle_call(total_analyse, _FromTag, S) -> - {reply, total_analyse(S), S}; +handle_call(stop_profiling, _From, #state{ profiling = true } = S) -> -handle_call(stop, _FromTag, S) -> - multi_schedule(), - {stop, normal, stopped, S}. + set_pattern_trace(pause, S#state.pattern), -%%%%%%%%%%%%%%%%%%% + Bpd = collect_bpd(), -handle_info({trace_ts,_From,_Op,_Func,_Time}=M, S0) when S0#state.profiling -> - Start = erlang:now(), - #state{table=Tab,pop=PrevOp0,ptime=PrevTime0,pfunc=PrevFunc0, - overhead=Overhead0} = S0, - {PrevFunc,PrevOp,PrevTime} = do_messages(M, Tab, PrevFunc0, PrevOp0, PrevTime0), - Overhead = Overhead0 + subtr(erlang:now(), Start), - S = S0#state{overhead=Overhead,pfunc=PrevFunc,pop=PrevOp,ptime=PrevTime}, - {noreply,S}; + set_process_trace(false, S#state.rootset), + set_pattern_trace(false, S#state.pattern), -handle_info({trace_ts, From, _, _, _}, S) when not S#state.profiling -> - ptrac([From], false, all()), - {noreply, S}; + {reply, profiling_stopped, S#state{ + profiling = false, + rootset = [], + pattern = {'_','_','_'}, + bpd = Bpd + }}; -handle_info({_P, {answer, A}}, S) -> - ptrac(S#state.rootset, false, all()), - io:format("eprof: Stop profiling~n",[]), - {From,_Tag} = get(replyto), - catch unlink(From), - ets:delete(S#state.table, nofunc), - gen_server:reply(erase(replyto), {ok, A}), - multi_schedule(), - {noreply, S#state{profiling = false, - rootset = []}}; - -handle_info({'EXIT', P, Reason}, - #state{profiling=true,proc=P,table=T,rootset=RootSet}) -> - maybe_delete(T), - ptrac(RootSet, false, all()), - multi_schedule(), - io:format("eprof: Profiling failed\n",[]), - case erase(replyto) of - undefined -> - {noreply, #state{}}; - FromTag -> - gen_server:reply(FromTag, {error, Reason}), - {noreply, #state{}} +%% logfile +handle_call({logfile, File}, _From, #state{ fd = OldFd } = S) -> + case file:open(File, [write]) of + {ok, Fd} -> + case OldFd of + undefined -> ok; + OldFd -> file:close(OldFd) + end, + {reply, ok, S#state{ fd = Fd}}; + Error -> + {reply, Error, S} end; -handle_info({'EXIT',_P,_Reason}, S) -> - {noreply, S}. +handle_call(dump, _From, #state{ bpd = Bpd } = S) when is_record(Bpd, bpd) -> + {reply, gb_trees:to_list(Bpd#bpd.p), S}; -uni_schedule() -> - erlang:system_flag(multi_scheduling, block). +handle_call(stop, _FromTag, S) -> + {stop, normal, stopped, S}. -multi_schedule() -> - erlang:system_flag(multi_scheduling, unblock). +%% -------------------------------------------------------------------- %% +%% +%% handle_cast +%% +%% -------------------------------------------------------------------- %% -%%%%%%%%%%%%%%%%%% +handle_cast(_Msg, State) -> + {noreply, State}. -call(Top, M, F, A) -> - receive - {Top,go} -> - Top ! {self(), {answer, apply(M,F,A)}} - end. +%% -------------------------------------------------------------------- %% +%% +%% handle_info +%% +%% -------------------------------------------------------------------- %% -call_trace_for_all(Flag) -> - erlang:trace_pattern(on_load, Flag, [local]), - erlang:trace_pattern({'_','_','_'}, Flag, [local]). +handle_info({'EXIT', _, normal}, S) -> + {noreply, S}; +handle_info({'EXIT', _, eprof_kill}, S) -> + {noreply, S}; +handle_info({'EXIT', _, Reason}, #state{ reply = FromTag } = S) -> -ptrac([P|T], How, Flags) when is_pid(P) -> - case dotrace(P, How, Flags) of - true -> - ptrac(T, How, Flags); - false when How -> - false; - false -> - ptrac(T, How, Flags) - end; + set_process_trace(false, S#state.rootset), + set_pattern_trace(false, S#state.pattern), -ptrac([P|T], How, Flags) when is_atom(P) -> - case whereis(P) of - undefined when How -> - false; - undefined when not How -> - ptrac(T, How, Flags); - Pid -> - ptrac([Pid|T], How, Flags) - end; + gen_server:reply(FromTag, {error, Reason}), + {noreply, S#state{ + profiling = false, + rootset = [], + pattern = {'_','_','_'} + }}; -ptrac([H|_],_How,_Flags) -> - io:format("** eprof bad process ~w~n",[H]), - false; +% check if Pid is spawned process? +handle_info({_Pid, {answer, Result}}, #state{ reply = {From,_} = FromTag} = S) -> -ptrac([],_,_) -> true. + set_pattern_trace(pause, S#state.pattern), -dotrace(P, How, What) -> - case (catch erlang:trace(P, How, What)) of - 1 -> - true; - _Other when not How -> - true; - _Other -> - io:format("** eprof: bad process: ~p,~p,~p~n", [P,How,What]), - false - end. + Bpd = collect_bpd(), -all() -> [call,arity,return_to,running,timestamp,set_on_spawn]. - -total_analyse(#state{table=notable}) -> - nothing_to_analyse; -total_analyse(S) -> - #state{table = T, overhead = Overhead} = S, - QH = qlc:q([{{From,Mfa},Time,Count} || - {[From|Mfa],Time,Count} <- ets:table(T)]), - Pcalls = reverse(keysort(2, replicas(qlc:eval(QH)))), - Time = collect_times(Pcalls), - format("FUNCTION~44s TIME ~n", ["CALLS"]), - printit(Pcalls, Time), - format("\nTotal time: ~.2f\n", [Time / 1000000]), - format("Measurement overhead: ~.2f\n", [Overhead / 1000000]). - -analyse(#state{table=notable}) -> - nothing_to_analyse; -analyse(S) -> - #state{table = T, overhead = Overhead} = S, - Pids = ordsets:from_list(flatten(ets:match(T, {['$1'|'_'],'_', '_'}))), - Times = sum(ets:match(T, {'_','$1', '_'})), - format("FUNCTION~44s TIME ~n", ["CALLS"]), - do_pids(Pids, T, 0, Times), - format("\nTotal time: ~.2f\n", [Times / 1000000]), - format("Measurement overhead: ~.2f\n", [Overhead / 1000000]). - -do_pids([Pid|Tail], T, AckTime, Total) -> - Pcalls = - reverse(keysort(2, to_tups(ets:match(T, {[Pid|'$1'], '$2','$3'})))), - Time = collect_times(Pcalls), - PercentTotal = 100 * (divide(Time, Total)), - format("~n****** Process ~w -- ~s % of profiled time *** ~n", - [Pid, fpf(PercentTotal)]), - printit(Pcalls, Time), - do_pids(Tail, T, AckTime + Time, Total); -do_pids([], _, _, _) -> - ok. + set_process_trace(false, S#state.rootset), + set_pattern_trace(false, S#state.pattern), -printit([],_) -> ok; -printit([{{Mod,Fun,Arity}, Time, Calls} |Tail], ProcTime) -> - format("~s ~s ~s % ~n", [ff(Mod,Fun,Arity), fint(Calls), - fpf(100*(divide(Time,ProcTime)))]), - printit(Tail, ProcTime); -printit([{{_,{Mod,Fun,Arity}}, Time, Calls} |Tail], ProcTime) -> - format("~s ~s ~s % ~n", [ff(Mod,Fun,Arity), fint(Calls), - fpf(100*(divide(Time,ProcTime)))]), - printit(Tail, ProcTime); -printit([_|T], Time) -> - printit(T, Time). - -ff(Mod,Fun,Arity) -> - pad(flatten(io_lib:format("~w:~w/~w", [Mod,Fun, Arity])),45). - -pad(Str, Len) -> - Strlen = length(Str), - if - Strlen > Len -> strip_tail(Str, 45); - true -> lists:append(Str, mklist(Len-Strlen)) - end. + catch unlink(From), + gen_server:reply(FromTag, {ok, Result}), + {noreply, S#state{ + profiling = false, + rootset = [], + pattern = {'_','_','_'}, + bpd = Bpd + }}. + +%% -------------------------------------------------------------------- %% +%% +%% termination +%% +%% -------------------------------------------------------------------- %% + +terminate(_Reason, #state{ fd = undefined }) -> + set_pattern_trace(false, {'_','_','_'}), + ok; +terminate(_Reason, #state{ fd = Fd }) -> + file:close(Fd), + set_pattern_trace(false, {'_','_','_'}), + ok. -strip_tail([_|_], 0) ->[]; -strip_tail([H|T], I) -> [H|strip_tail(T, I-1)]; -strip_tail([],_I) -> []. +%% -------------------------------------------------------------------- %% +%% +%% code_change +%% +%% -------------------------------------------------------------------- %% -fpf(F) -> strip_tail(flatten(io_lib:format("~w", [round(F)])), 5). -fint(Int) -> pad(flatten(io_lib:format("~w",[Int])), 10). +code_change(_OldVsn, State, _Extra) -> + {ok, State}. -mklist(0) -> []; -mklist(I) -> [$ |mklist(I-1)]. -to_tups(L) -> lists:map(fun(List) -> erlang:list_to_tuple(List) end, L). +%% -------------------------------------------------------------------- %% +%% +%% AUX Functions +%% +%% -------------------------------------------------------------------- %% -divide(X,Y) -> X / Y. +setup_profiling(M,F,A) -> + spawn_link(fun() -> spin_profile(M,F,A) end). -collect_times([]) -> 0; -collect_times([Tup|Tail]) -> element(2, Tup) + collect_times(Tail). +spin_profile(M, F, A) -> + receive + {Pid, execute} -> + Pid ! {self(), {answer, erlang:apply(M,F,A)}} + end. -dump(T) -> - L = ets:tab2list(T), - format(L). +execute_profiling(Pid) -> + Pid ! {self(), execute}. -format([H|T]) -> - format("~p~n", [H]), format(T); -format([]) -> ok. +set_pattern_trace(Flag, Pattern) -> + erlang:system_flag(multi_scheduling, block), + erlang:trace_pattern(on_load, Flag, [call_time]), + erlang:trace_pattern(Pattern, Flag, [call_time]), + erlang:system_flag(multi_scheduling, unblock), + ok. -format(F, A) -> - io:format(F,A), - case get(fd) of - undefined -> ok; - Fd -> io:format(Fd, F,A) +set_process_trace(Flag, Pids) -> + % do we need procs for meta info? + % could be useful + set_process_trace(Flag, Pids, [call, set_on_spawn]). +set_process_trace(_, [], _) -> true; +set_process_trace(Flag, [Pid|Pids], Options) when is_pid(Pid) -> + try + erlang:trace(Pid, Flag, Options), + set_process_trace(Flag, Pids, Options) + catch + _:_ -> + false + end; +set_process_trace(Flag, [Name|Pids], Options) when is_atom(Name) -> + case whereis(Name) of + undefined -> + set_process_trace(Flag, Pids, Options); + Pid -> + set_process_trace(Flag, [Pid|Pids], Options) end. -maybe_delete(T) -> - catch ets:delete(T). +collect_bpd() -> + collect_bpd([M || M <- [element(1, Mi) || Mi <- code:all_loaded()], M =/= ?MODULE]). + +collect_bpd(Ms) when is_list(Ms) -> + collect_bpdf(collect_mfas(Ms) ++ erlang:system_info(snifs)). + +collect_mfas(Ms) -> + lists:foldl(fun + (M, Mfas) -> + Mfas ++ [{M, F, A} || {F, A} <- M:module_info(functions)] + end, [], Ms). + +collect_bpdf(Mfas) -> + collect_bpdf(Mfas, #bpd{}). +collect_bpdf([], Bpd) -> + Bpd; +collect_bpdf([Mfa|Mfas], #bpd{n = N, us = Us, p = Tree, mfa = Code } = Bpd) -> + case erlang:trace_info(Mfa, call_time) of + {call_time, []} -> + collect_bpdf(Mfas, Bpd); + {call_time, Data} when is_list(Data) -> + {CTn, CTus, CTree} = collect_bpdfp(Mfa, Tree, Data), + collect_bpdf(Mfas, Bpd#bpd{ + n = CTn + N, + us = CTus + Us, + p = CTree, + mfa = [{Mfa, {CTn, CTus}}|Code] + }); + {call_time, false} -> + collect_bpdf(Mfas, Bpd); + {call_time, _Other} -> + collect_bpdf(Mfas, Bpd) + end. -sum([[H]|T]) -> H + sum(T); -sum([]) -> 0. +collect_bpdfp(Mfa, Tree, Data) -> + lists:foldl(fun + ({Pid, Ni, Si, Usi}, {PTno, PTuso, To}) -> + Time = Si * 1000000 + Usi, + Ti1 = case gb_trees:lookup(Pid, To) of + none -> + gb_trees:enter(Pid, [{Mfa, {Ni, Time}}], To); + {value, Pmfas} -> + gb_trees:enter(Pid, [{Mfa, {Ni, Time}}|Pmfas], To) + end, + {PTno + Ni, PTuso + Time, Ti1} + end, {0,0, Tree}, Data). + +%% manipulators +sort_mfa(Bpfs, mfa) when is_list(Bpfs) -> + lists:sort(fun + ({A,_}, {B,_}) when A < B -> true; + (_, _) -> false + end, Bpfs); +sort_mfa(Bpfs, time) when is_list(Bpfs) -> + lists:sort(fun + ({_,{_,A}}, {_,{_,B}}) when A < B -> true; + (_, _) -> false + end, Bpfs); +sort_mfa(Bpfs, calls) when is_list(Bpfs) -> + lists:sort(fun + ({_,{A,_}}, {_,{B,_}}) when A < B -> true; + (_, _) -> false + end, Bpfs); +sort_mfa(Bpfs, _) when is_list(Bpfs) -> sort_mfa(Bpfs, time). + +filter_mfa(Bpfs, Ts) when is_list(Ts) -> + filter_mfa(Bpfs, [], proplists:get_value(calls, Ts, 0), proplists:get_value(time, Ts, 0)); +filter_mfa(Bpfs, _) -> Bpfs. +filter_mfa([], Out, _, _) -> lists:reverse(Out); +filter_mfa([{_, {C, T}}=Bpf|Bpfs], Out, Ct, Tt) when C >= Ct, T >= Tt -> filter_mfa(Bpfs, [Bpf|Out], Ct, Tt); +filter_mfa([_|Bpfs], Out, Ct, Tt) -> filter_mfa(Bpfs, Out, Ct, Tt). + +sum_bp_total_n_us(Mfas) -> + lists:foldl(fun ({_, {Ci,Usi}}, {Co, Uso}) -> {Co + Ci, Uso + Usi} end, {0,0}, Mfas). + +%% strings and format + +string_bp_mfa(Mfas, Tus) -> string_bp_mfa(Mfas, Tus, {0,0,0,0,0}, []). +string_bp_mfa([], _, Ws, Strings) -> {Ws, lists:reverse(Strings)}; +string_bp_mfa([{Mfa, {Count, Time}}|Mfas], Tus, {MfaW, CountW, PercW, TimeW, TpCW}, Strings) -> + Smfa = s(Mfa), + Scount = s(Count), + Stime = s(Time), + Sperc = s("~.2f", [100*divide(Time,Tus)]), + Stpc = s("~.2f", [divide(Time,Count)]), + + string_bp_mfa(Mfas, Tus, { + erlang:max(MfaW, length(Smfa)), + erlang:max(CountW,length(Scount)), + erlang:max(PercW, length(Sperc)), + erlang:max(TimeW, length(Stime)), + erlang:max(TpCW, length(Stpc)) + }, [[Smfa, Scount, Sperc, Stime, Stpc] | Strings]). + +print_bp_mfa(Mfas, {_Tn, Tus}, Fd, Opts) -> + Fmfas = filter_mfa(sort_mfa(Mfas, proplists:get_value(sort, Opts)), proplists:get_value(filter, Opts)), + {{MfaW, CountW, PercW, TimeW, TpCW}, Strs} = string_bp_mfa(Fmfas, Tus), + Ws = { + erlang:max(length("FUNCTION"), MfaW), + erlang:max(length("CALLS"), CountW), + erlang:max(length(" %"), PercW), + erlang:max(length("TIME"), TimeW), + erlang:max(length("uS / CALLS"), TpCW) + }, + format(Fd, Ws, ["FUNCTION", "CALLS", " %", "TIME", "uS / CALLS"]), + format(Fd, Ws, ["--------", "-----", "---", "----", "----------"]), + + lists:foreach(fun (String) -> format(Fd, Ws, String) end, Strs), + ok. -replicas(L) -> - replicas(L, []). +s({M,F,A}) -> s("~w:~w/~w",[M,F,A]); +s(Term) -> s("~p", [Term]). +s(Format, Terms) -> lists:flatten(io_lib:format(Format, Terms)). -replicas([{{Pid, {Mod,Fun,Arity}}, Ack,Calls} |Tail], Result) -> - case search({Mod,Fun,Arity},Result) of - false -> - replicas(Tail, [{{Pid, {Mod,Fun,Arity}}, Ack,Calls} |Result]); - {Ack2, Calls2} -> - Result2 = del({Mod,Fun,Arity}, Result), - replicas(Tail, [{{Pid, {Mod,Fun,Arity}}, - Ack+Ack2,Calls+Calls2} |Result2]) - end; -replicas([_|T], Ack) -> %% Whimpy - replicas(T, Ack); - -replicas([], Res) -> Res. - -search(Key, [{{_,Key}, Ack, Calls}|_]) -> - {Ack, Calls}; -search(Key, [_|T]) -> - search(Key, T); -search(_Key,[]) -> false. - -del(Key, [{{_,Key},_Ack,_Calls}|T]) -> - T; -del(Key, [H | Tail]) -> - [H|del(Key, Tail)]; -del(_Key,[]) -> []. - -flush_receive() -> - receive - {trace_ts, From, _, _, _} when is_pid(From) -> - ptrac([From], false, all()), - flush_receive(); - _ -> - flush_receive() - after 0 -> - ok - end. +format(Fd, {MfaW, CountW, PercW, TimeW, TpCW}, Strings) -> + format(Fd, s("~~.~ps ~~~ps ~~~ps ~~~ps [~~~ps]~~n", [MfaW, CountW, PercW, TimeW, TpCW]), Strings); +format(undefined, Format, Strings) -> + io:format(Format, Strings), + ok; +format(Fd, Format, Strings) -> + io:format(Fd, Format, Strings), + io:format(Format, Strings), + ok. -code_change(_OldVsn, State, _Extra) -> - {ok,State}. +divide(_,0) -> 0.0; +divide(T,N) -> T/N. diff --git a/lib/tools/src/xref_base.erl b/lib/tools/src/xref_base.erl index 1656899e8f..93f0e9c0c8 100644 --- a/lib/tools/src/xref_base.erl +++ b/lib/tools/src/xref_base.erl @@ -19,6 +19,8 @@ -module(xref_base). +%% Avoid warning for local function error/1 clashing with autoimported BIF. +-compile({no_auto_import,[error/1]}). -export([new/0, new/1, delete/1, add_directory/2, add_directory/3, add_module/2, add_module/3, diff --git a/lib/tools/src/xref_compiler.erl b/lib/tools/src/xref_compiler.erl index c80eb0e669..1445e135be 100644 --- a/lib/tools/src/xref_compiler.erl +++ b/lib/tools/src/xref_compiler.erl @@ -31,6 +31,8 @@ -define(CALL(F), ok). -endif. +%% Avoid warning for local function error/1 clashing with autoimported BIF. +-compile({no_auto_import,[error/1]}). -export([compile/2]). -export([update_graph_counter/3]). diff --git a/lib/tools/src/xref_utils.erl b/lib/tools/src/xref_utils.erl index 0ef199cec7..9d4a175d88 100644 --- a/lib/tools/src/xref_utils.erl +++ b/lib/tools/src/xref_utils.erl @@ -18,6 +18,8 @@ %% -module(xref_utils). +%% Avoid warning for local function error/1 clashing with autoimported BIF. +-compile({no_auto_import,[error/1]}). -export([xset/2]). -export([is_directory/1, file_info/1, fa_to_mfa/2]). diff --git a/lib/tools/test/Makefile b/lib/tools/test/Makefile index 3a59be758a..826a8f3ee8 100644 --- a/lib/tools/test/Makefile +++ b/lib/tools/test/Makefile @@ -39,7 +39,8 @@ INSTALL_PROGS= $(TARGET_FILES) EMAKEFILE=Emakefile -SPEC_FILES= tools.spec tools.spec.win +SPEC_FILES= tools.spec +COVER_FILE = tools.cover # ---------------------------------------------------- # Release directory specification @@ -84,7 +85,8 @@ release_spec: opt release_tests_spec: make_emakefile $(INSTALL_DIR) $(RELSYSDIR) - $(INSTALL_DATA) $(SPEC_FILES) $(EMAKEFILE) $(ERL_FILES) $(RELSYSDIR) + $(INSTALL_DATA) $(SPEC_FILES) $(COVER_FILE) $(EMAKEFILE) \ + $(ERL_FILES) $(RELSYSDIR) chmod -f -R u+w $(RELSYSDIR) @tar cf - *_SUITE_data | (cd $(RELSYSDIR); tar xf -) diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index b9ccd62d0b..494ef55f59 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2009. All Rights Reserved. +%% Copyright Ericsson AB 2001-2010. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,13 +18,16 @@ %% -module(cover_SUITE). --export([all/1]). +-export([all/0, init_per_testcase/2, end_per_testcase/2, + suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2]). + -export([start/1, compile/1, analyse/1, misc/1, stop/1, distribution/1, export_import/1, otp_5031/1, eif/1, otp_5305/1, otp_5418/1, otp_6115/1, otp_7095/1, otp_8188/1, otp_8270/1, otp_8273/1, otp_8340/1]). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %%---------------------------------------------------------------------- %% The following directory structure is assumed: @@ -37,18 +40,50 @@ %% y %%---------------------------------------------------------------------- -all(suite) -> +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> case whereis(cover_server) of undefined -> [start, compile, analyse, misc, stop, distribution, - export_import, - otp_5031, eif, otp_5305, otp_5418, otp_6115, otp_7095, - otp_8188, otp_8270, otp_8273, otp_8340]; + export_import, otp_5031, eif, otp_5305, otp_5418, + otp_6115, otp_7095, otp_8188, otp_8270, otp_8273, + otp_8340]; _pid -> - {skip,"It looks like the test server is running cover. " - "Can't run cover test."} + {skip, + "It looks like the test server is running " + "cover. Can't run cover test."} end. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +init_per_testcase(TC, Config) when TC =:= misc; TC =:= compile -> + case code:which(crypto) of + Path when is_list(Path) -> + init_per_testcase(dummy_tc, Config); + _Else -> + {skip, "No crypto file to test with"} + end; +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + %cover:stop(), + ok. + start(suite) -> []; start(Config) when is_list(Config) -> ?line ok = file:set_cwd(?config(data_dir, Config)), @@ -381,8 +416,8 @@ export_import(Config) when is_list(Config) -> ?line {ok,a} = cover:compile(a), ?line ?t:capture_start(), ?line ok = cover:export("all_exported"), - ?line [Text2] = ?t:capture_get(), - ?line "Export includes data from imported files"++_ = lists:flatten(Text2), + ?line [] = ?t:capture_get(), +% ?line "Export includes data from imported files"++_ = lists:flatten(Text2), ?line ?t:capture_stop(), ?line ok = cover:stop(), ?line ok = cover:import("all_exported"), diff --git a/lib/tools/test/cprof_SUITE.erl b/lib/tools/test/cprof_SUITE.erl index e697cc1571..b6f786d33f 100644 --- a/lib/tools/test/cprof_SUITE.erl +++ b/lib/tools/test/cprof_SUITE.erl @@ -41,7 +41,7 @@ -define(config(A,B),config(A,B)). -export([config/2]). -else. --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -endif. -ifdef(debug). @@ -63,14 +63,17 @@ config(data_dir, _) -> "cprof_SUITE_data". -else. %% When run in test server. --export([all/1, init_per_testcase/2, fin_per_testcase/2, not_run/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, + init_per_testcase/2, end_per_testcase/2, + not_run/1]). -export([basic/1, on_load/1, modules/1]). init_per_testcase(_Case, Config) -> ?line Dog=test_server:timetrap(test_server:seconds(30)), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, Config) -> +end_per_testcase(_Case, Config) -> erlang:trace_pattern({'_','_','_'}, false, [local,meta,call_count]), erlang:trace_pattern(on_load, false, [local,meta,call_count]), erlang:trace(all, false, [all]), @@ -78,16 +81,30 @@ fin_per_testcase(_Case, Config) -> test_server:timetrap_cancel(Dog), ok. -all(doc) -> - ["Test the cprof profiling tool."]; -all(suite) -> - case test_server:is_native(?MODULE) of +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + case test_server:is_native(cprof_SUITE) of true -> [not_run]; false -> [basic, on_load, modules] -%, on_and_off, info, -% pause_and_restart, combo] end. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + not_run(Config) when is_list(Config) -> {skipped,"Native code"}. diff --git a/lib/tools/test/emem_SUITE.erl b/lib/tools/test/emem_SUITE.erl index 430fa86c6c..8b38e7d3a8 100644 --- a/lib/tools/test/emem_SUITE.erl +++ b/lib/tools/test/emem_SUITE.erl @@ -24,7 +24,8 @@ receive_and_save_trace/2, send_trace/2]). --export([all/1, init_per_testcase/2, fin_per_testcase/2]). +-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2, + init_per_testcase/2, end_per_testcase/2]). -export([live_node/1, 'sparc_sunos5.8_32b_emt2.0'/1, @@ -41,7 +42,7 @@ -include_lib("kernel/include/file.hrl"). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -define(DEFAULT_TIMEOUT, ?t:minutes(5)). @@ -65,23 +66,32 @@ %% %% -all(doc) -> []; -all(suite) -> +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> case is_debug_compiled() of - true -> {skipped, "Not run when debug compiled"}; + true -> {skip, "Not run when debug compiled"}; false -> test_cases() end. - -test_cases() -> - [live_node, - 'sparc_sunos5.8_32b_emt2.0', + +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + +test_cases() -> + [live_node, 'sparc_sunos5.8_32b_emt2.0', 'pc_win2000_32b_emt2.0', 'pc.smp_linux2.2.19pre17_32b_emt2.0', 'powerpc_darwin7.7.0_32b_emt2.0', 'alpha_osf1v5.1_64b_emt2.0', 'sparc_sunos5.8_64b_emt2.0', - 'sparc_sunos5.8_32b_emt1.0', - 'pc_win2000_32b_emt1.0', + 'sparc_sunos5.8_32b_emt1.0', 'pc_win2000_32b_emt1.0', 'powerpc_darwin7.7.0_32b_emt1.0', 'alpha_osf1v5.1_64b_emt1.0', 'sparc_sunos5.8_64b_emt1.0']. @@ -100,7 +110,7 @@ init_per_testcase(Case, Config) when is_list(Config) -> [{watchdog, Dog}, {testcase, Case} | Config]) end. -fin_per_testcase(_Case, Config) when is_list(Config) -> +end_per_testcase(_Case, Config) when is_list(Config) -> ignore_cores:restore(Config), Dog = ?config(watchdog, Config), ?t:timetrap_cancel(Dog), @@ -700,8 +710,8 @@ start_node(Name, Args) -> % stop_node(Node) -> % ?t:stop_node(Node). -is_debug_compiled() -> - is_debug_compiled(erlang:system_info(system_version)). +is_debug_compiled() -> +is_debug_compiled(erlang:system_info(system_version)). is_debug_compiled([$d,$e,$b,$u,$g | _]) -> true; diff --git a/lib/tools/test/eprof_SUITE.erl b/lib/tools/test/eprof_SUITE.erl index 028fea8fe1..16246d5e0b 100644 --- a/lib/tools/test/eprof_SUITE.erl +++ b/lib/tools/test/eprof_SUITE.erl @@ -18,12 +18,111 @@ %% -module(eprof_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). --export([all/1,tiny/1,eed/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2,tiny/1,eed/1,basic/1]). -all(suite) -> [tiny,eed]. +suite() -> [{ct_hooks,[ts_install_cth]}]. +all() -> + [basic, tiny, eed]. + +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + +basic(suite) -> []; +basic(Config) when is_list(Config) -> + + %% load eprof_test and change directory + + ?line {ok, OldCurDir} = file:get_cwd(), + Datadir = ?config(data_dir, Config), + Privdir = ?config(priv_dir, Config), + ?line {ok,eprof_test} = compile:file(filename:join(Datadir, "eprof_test"), + [trace,{outdir, Privdir}]), + ?line ok = file:set_cwd(Privdir), + ?line code:purge(eprof_test), + ?line {module,eprof_test} = code:load_file(eprof_test), + + %% rootset profiling + + ?line ensure_eprof_stopped(), + ?line profiling = eprof:profile([self()]), + ?line {error, already_profiling} = eprof:profile([self()]), + ?line profiling_stopped = eprof:stop_profiling(), + ?line profiling_already_stopped = eprof:stop_profiling(), + ?line profiling = eprof:start_profiling([self(),self(),self()]), + ?line profiling_stopped = eprof:stop_profiling(), + + %% with patterns + + ?line profiling = eprof:start_profiling([self()], {?MODULE, '_', '_'}), + ?line {error, already_profiling} = eprof:start_profiling([self()], {?MODULE, '_', '_'}), + ?line profiling_stopped = eprof:stop_profiling(), + ?line profiling = eprof:start_profiling([self()], {?MODULE, start_stop, '_'}), + ?line profiling_stopped = eprof:stop_profiling(), + ?line profiling = eprof:start_profiling([self()], {?MODULE, start_stop, 1}), + ?line profiling_stopped = eprof:stop_profiling(), + + %% with fun + + ?line {ok, _} = eprof:profile(fun() -> eprof_test:go(10) end), + ?line profiling = eprof:profile([self()]), + ?line {error, already_profiling} = eprof:profile(fun() -> eprof_test:go(10) end), + ?line profiling_stopped = eprof:stop_profiling(), + ?line {ok, _} = eprof:profile(fun() -> eprof_test:go(10) end), + ?line {ok, _} = eprof:profile([], fun() -> eprof_test:go(10) end), + ?line Pid = whereis(eprof), + ?line {ok, _} = eprof:profile(erlang:processes() -- [Pid], fun() -> eprof_test:go(10) end), + ?line {ok, _} = eprof:profile([], fun() -> eprof_test:go(10) end, {eprof_test, '_', '_'}), + ?line {ok, _} = eprof:profile([], fun() -> eprof_test:go(10) end, {eprof_test, go, '_'}), + ?line {ok, _} = eprof:profile([], fun() -> eprof_test:go(10) end, {eprof_test, go, 1}), + ?line {ok, _} = eprof:profile([], fun() -> eprof_test:go(10) end, {eprof_test, dec, 1}), + + %% error case + + ?line error = eprof:profile([Pid], fun() -> eprof_test:go(10) end), + ?line Pid = whereis(eprof), + ?line error = eprof:profile([Pid], fun() -> eprof_test:go(10) end), + ?line A = spawn(fun() -> receive _ -> ok end end), + ?line profiling = eprof:profile([A]), + ?line true = exit(A, kill_it), + ?line profiling_stopped = eprof:stop_profiling(), + ?line {error,_} = eprof:profile(fun() -> a = b end), + + %% with mfa + + ?line {ok, _} = eprof:profile([], eprof_test, go, [10]), + ?line {ok, _} = eprof:profile([], eprof_test, go, [10], {eprof_test, dec, 1}), + + %% dump + + ?line {ok, _} = eprof:profile([], fun() -> eprof_test:go(10) end, {eprof_test, '_', '_'}), + ?line [{_, Mfas}] = eprof:dump(), + ?line Dec_mfa = {eprof_test, dec, 1}, + ?line Go_mfa = {eprof_test, go, 1}, + ?line {value, {Go_mfa, { 1, _Time1}}} = lists:keysearch(Go_mfa, 1, Mfas), + ?line {value, {Dec_mfa, {11, _Time2}}} = lists:keysearch(Dec_mfa, 1, Mfas), + + %% change current working directory + + ?line ok = file:set_cwd(OldCurDir), + ?line stopped = eprof:stop(), + ok. tiny(suite) -> []; tiny(Config) when is_list(Config) -> @@ -40,11 +139,11 @@ tiny(Config) when is_list(Config) -> ?line code:purge(eprof_suite_test), ?line {module,eprof_suite_test} = code:load_file(eprof_suite_test), ?line {ok,_Pid} = eprof:start(), - ?line nothing_to_analyse = eprof:analyse(), - ?line nothing_to_analyse = eprof:total_analyse(), + ?line nothing_to_analyze = eprof:analyze(), + ?line nothing_to_analyze = eprof:analyze(total), ?line eprof:profile([], eprof_suite_test, test, [Config]), - ?line ok = eprof:analyse(), - ?line ok = eprof:total_analyse(), + ?line ok = eprof:analyze(), + ?line ok = eprof:analyze(total), ?line ok = eprof:log("eprof_SUITE_logfile"), ?line stopped = eprof:stop(), ?line ?t:timetrap_cancel(TTrap), @@ -79,8 +178,8 @@ eed(Config) when is_list(Config) -> ?line {ok,ok} = eprof:profile([], eed, file, [Script]), ?line {T3,_} = statistics(runtime), ?line profiling_already_stopped = eprof:stop_profiling(), - ?line ok = eprof:analyse(), - ?line ok = eprof:total_analyse(), + ?line ok = eprof:analyze(), + ?line ok = eprof:analyze(total), ?line ok = eprof:log("eprof_SUITE_logfile"), ?line stopped = eprof:stop(), ?line ?t:timetrap_cancel(TTrap), diff --git a/lib/tools/test/eprof_SUITE_data/eprof_test.erl b/lib/tools/test/eprof_SUITE_data/eprof_test.erl new file mode 100644 index 0000000000..33c428e893 --- /dev/null +++ b/lib/tools/test/eprof_SUITE_data/eprof_test.erl @@ -0,0 +1,9 @@ +-module(eprof_test). +-export([go/1]). + +go(N) -> + 0 = dec(N), + ok. + +dec(0) -> 0; +dec(N) -> dec(N - 1). diff --git a/lib/tools/test/fprof_SUITE.erl b/lib/tools/test/fprof_SUITE.erl index e437007e76..78de77526c 100644 --- a/lib/tools/test/fprof_SUITE.erl +++ b/lib/tools/test/fprof_SUITE.erl @@ -18,10 +18,11 @@ %% -module(fprof_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %% Test server framework exports --export([all/1, not_run/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, not_run/1]). %% Test suites -export([stack_seq/1, tail_seq/1, create_file_slow/1, spawn_simple/1, @@ -54,18 +55,33 @@ -all(doc) -> - ["Test the 'fprof' profiling tool."]; -all(suite) -> - case test_server:is_native(?MODULE) of - true -> - [not_run]; +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + case test_server:is_native(fprof_SUITE) of + true -> [not_run]; false -> [stack_seq, tail_seq, create_file_slow, spawn_simple, imm_tail_seq, imm_create_file_slow, imm_compile, cpu_create_file_slow] end. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + not_run(Config) when is_list(Config) -> {skipped, "Native code"}. @@ -356,7 +372,7 @@ imm_tail_seq(Config) when is_list(Config) -> ?line profiling_stopped = eprof:stop_profiling(), ?line R2 = R0, %% - ?line eprof:analyse(), + ?line eprof:analyze(), ?line stopped = eprof:stop(), %% ?line {ok, Tracer} = fprof:profile(start), @@ -471,7 +487,7 @@ imm_compile(Config) when is_list(Config) -> ?line TS3 = erlang:now(), ?line profiling_stopped = eprof:stop_profiling(), %% - ?line eprof:analyse(), + ?line eprof:analyze(), ?line stopped = eprof:stop(), %% ?line {ok, Tracer} = fprof:profile(start), diff --git a/lib/tools/test/ignore_cores.erl b/lib/tools/test/ignore_cores.erl index 8902a469ef..8b1ac0fe6c 120000..100644 --- a/lib/tools/test/ignore_cores.erl +++ b/lib/tools/test/ignore_cores.erl @@ -1 +1,158 @@ -../../../erts/test/ignore_cores.erl
\ No newline at end of file +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2008-2010. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%%------------------------------------------------------------------- +%%% File : ignore_cores.erl +%%% Author : Rickard Green <[email protected]> +%%% Description : +%%% +%%% Created : 11 Feb 2008 by Rickard Green <[email protected]> +%%%------------------------------------------------------------------- + +-module(ignore_cores). + +-include_lib("test_server/include/test_server.hrl"). + +-export([init/1, fini/1, setup/3, setup/4, restore/1, dir/1]). + +-record(ignore_cores, {org_cwd, + org_path, + org_pwd_env, + ign_dir = false, + cores_dir = false}). + +%% +%% Takes a testcase config +%% + +init(Config) -> + {ok, OrgCWD} = file:get_cwd(), + [{ignore_cores, + #ignore_cores{org_cwd = OrgCWD, + org_path = code:get_path(), + org_pwd_env = os:getenv("PWD")}} + | lists:keydelete(ignore_cores, 1, Config)]. + +fini(Config) -> + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD} = ?config(ignore_cores, Config), + ok = file:set_cwd(OrgCWD), + true = code:set_path(OrgPath), + case OrgPWD of + false -> ok; + _ -> true = os:putenv("PWD", OrgPWD) + end, + lists:keydelete(ignore_cores, 1, Config). + +setup(Suite, Testcase, Config) -> + setup(Suite, Testcase, Config, false). + +setup(Suite, Testcase, Config, SetCwd) when is_atom(Suite), + is_atom(Testcase), + is_list(Config) -> + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD} = ?config(ignore_cores, Config), + Path = lists:map(fun (".") -> OrgCWD; (Dir) -> Dir end, OrgPath), + true = code:set_path(Path), + PrivDir = ?config(priv_dir, Config), + IgnDir = filename:join([PrivDir, + atom_to_list(Suite) + ++ "_" + ++ atom_to_list(Testcase) + ++ "_wd"]), + ok = file:make_dir(IgnDir), + case SetCwd of + false -> + ok; + _ -> + ok = file:set_cwd(IgnDir), + OrgPWD = case os:getenv("PWD") of + false -> false; + PWD -> + os:putenv("PWD", IgnDir), + PWD + end + end, + ok = file:write_file(filename:join([IgnDir, "ignore_core_files"]), <<>>), + %% cores are dumped in /cores on MacOS X + CoresDir = case {?t:os_type(), filelib:is_dir("/cores")} of + {{unix,darwin}, true} -> + filelib:fold_files("/cores", + "^core.*$", + false, + fun (C,Cs) -> [C|Cs] end, + []); + _ -> + false + end, + lists:keyreplace(ignore_cores, + 1, + Config, + {ignore_cores, + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD, + ign_dir = IgnDir, + cores_dir = CoresDir}}). + +restore(Config) -> + #ignore_cores{org_cwd = OrgCWD, + org_path = OrgPath, + org_pwd_env = OrgPWD, + ign_dir = IgnDir, + cores_dir = CoresDir} = ?config(ignore_cores, Config), + try + case CoresDir of + false -> + ok; + _ -> + %% Move cores dumped by these testcases in /cores + %% to cwd. + lists:foreach(fun (C) -> + case lists:member(C, CoresDir) of + true -> ok; + _ -> + Dst = filename:join( + [IgnDir, + filename:basename(C)]), + {ok, _} = file:copy(C, Dst), + file:delete(C) + end + end, + filelib:fold_files("/cores", + "^core.*$", + false, + fun (C,Cs) -> [C|Cs] end, + [])) + end + after + catch file:set_cwd(OrgCWD), + catch code:set_path(OrgPath), + case OrgPWD of + false -> ok; + _ -> catch os:putenv("PWD", OrgPWD) + end + end. + + +dir(Config) -> + #ignore_cores{ign_dir = Dir} = ?config(ignore_cores, Config), + Dir. diff --git a/lib/tools/test/instrument_SUITE.erl b/lib/tools/test/instrument_SUITE.erl index da5930e015..6800a94f94 100644 --- a/lib/tools/test/instrument_SUITE.erl +++ b/lib/tools/test/instrument_SUITE.erl @@ -18,22 +18,43 @@ %% -module(instrument_SUITE). --export([all/1,init_per_testcase/2,fin_per_testcase/2]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, + init_per_testcase/2,end_per_testcase/2]). -export(['+Mim true'/1, '+Mis true'/1]). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). init_per_testcase(_Case, Config) -> ?line Dog=?t:timetrap(10000), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, Config) -> +end_per_testcase(_Case, Config) -> Dog=?config(watchdog, Config), ?t:timetrap_cancel(Dog), ok. -all(suite) -> ['+Mim true', '+Mis true']. +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + ['+Mim true', '+Mis true']. + +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + '+Mim true'(doc) -> ["Check that memory data can be read and processed"]; '+Mim true'(suite) -> []; diff --git a/lib/tools/test/lcnt_SUITE.erl b/lib/tools/test/lcnt_SUITE.erl index e6866f721d..21383fa544 100644 --- a/lib/tools/test/lcnt_SUITE.erl +++ b/lib/tools/test/lcnt_SUITE.erl @@ -18,10 +18,10 @@ %% -module(lcnt_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %% Test server specific exports --export([all/1]). +-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2]). -export([init_per_suite/1, end_per_suite/1]). -export([init_per_testcase/2, end_per_testcase/2]). @@ -51,10 +51,21 @@ end_per_testcase(_Case, Config) -> ?t:timetrap_cancel(Dog), ok. -all(suite) -> - % Test cases +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> [load_v1, conflicts, locations, swap_keys]. +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + %%---------------------------------------------------------------------- %% Tests %%---------------------------------------------------------------------- diff --git a/lib/tools/test/make_SUITE.erl b/lib/tools/test/make_SUITE.erl index 72dccdb465..524ed04af4 100644 --- a/lib/tools/test/make_SUITE.erl +++ b/lib/tools/test/make_SUITE.erl @@ -18,12 +18,13 @@ %% -module(make_SUITE). --export([all/1, make_all/1, make_files/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, make_all/1, make_files/1]). -export([otp_6057_init/1, otp_6057_a/1, otp_6057_b/1, otp_6057_c/1, otp_6057_end/1]). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -include_lib("kernel/include/file.hrl"). @@ -35,9 +36,27 @@ %% that the file :"test5.erl" shall be compiled with the 'S' option, %% i.e. produce "test5.S" instead of "test5.<objext>" -all(suite) -> [make_all, make_files, - {conf, otp_6057_init, - [otp_6057_a,otp_6057_b,otp_6057_c], otp_6057_end}]. +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [make_all, make_files, {group, otp_6057}]. + +groups() -> + [{otp_6057,[],[otp_6057_a, otp_6057_b, + otp_6057_c]}]. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + otp_6057_init(Config). + +end_per_group(_GroupName, Config) -> + otp_6057_end(Config). + test_files() -> ["test1", "test2", "test3", "test4"]. @@ -146,7 +165,7 @@ otp_6057_init(Config) when is_list(Config) -> otp_6057_a(suite) -> []; otp_6057_a(doc) -> - ["Test that make:all/0 looks for object file in correct place"]; + ["Test that make:all/0, suite/0 looks for object file in correct place"]; otp_6057_a(Config) when is_list(Config) -> ?line PrivDir = ?config(priv_dir, Config), diff --git a/lib/tools/test/tools.cover b/lib/tools/test/tools.cover new file mode 100644 index 0000000000..1053be4f0f --- /dev/null +++ b/lib/tools/test/tools.cover @@ -0,0 +1,2 @@ +{incl_app,tools,details}. + diff --git a/lib/tools/test/tools.spec b/lib/tools/test/tools.spec index 93d5930472..1b07cf1cb6 100644 --- a/lib/tools/test/tools.spec +++ b/lib/tools/test/tools.spec @@ -1 +1 @@ -{topcase, {dir, "../tools_test"}}. +{suites,"../tools_test",all}. diff --git a/lib/tools/test/tools_SUITE.erl b/lib/tools/test/tools_SUITE.erl index 6b952f10ab..69dfab8fe7 100644 --- a/lib/tools/test/tools_SUITE.erl +++ b/lib/tools/test/tools_SUITE.erl @@ -18,28 +18,45 @@ %% -module(tools_SUITE). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). %% Default timetrap timeout (set in init_per_testcase). -define(default_timeout, ?t:minutes(1)). -define(application, tools). %% Test server specific exports --export([all/1]). --export([init_per_testcase/2, fin_per_testcase/2]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2]). +-export([init_per_testcase/2, end_per_testcase/2]). %% Test cases must be exported. -export([app_test/1]). -all(doc) -> - []; -all(suite) -> +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> [app_test]. +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + + init_per_testcase(_Case, Config) -> ?line Dog=test_server:timetrap(?default_timeout), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, Config) -> +end_per_testcase(_Case, Config) -> Dog=?config(watchdog, Config), test_server:timetrap_cancel(Dog), ok. diff --git a/lib/tools/test/xref_SUITE.erl b/lib/tools/test/xref_SUITE.erl index f9d062ef85..1fad070b67 100644 --- a/lib/tools/test/xref_SUITE.erl +++ b/lib/tools/test/xref_SUITE.erl @@ -29,28 +29,29 @@ -define(privdir, "xref_SUITE_priv"). -define(copydir, "xref_SUITE_priv/datacopy"). -else. --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -define(format(S, A), ok). -define(datadir, ?config(data_dir, Conf)). -define(privdir, ?config(priv_dir, Conf)). -define(copydir, ?config(copy_dir, Conf)). -endif. --export([all/1, init/1, fini/1]). +-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, + init_per_group/2,end_per_group/2, init/1, fini/1]). --export([xref/1, +-export([ addrem/1, convert/1, intergraph/1, lines/1, loops/1, no_data/1, modules/1]). --export([files/1, +-export([ add/1, default/1, info/1, lib/1, read/1, read2/1, remove/1, replace/1, update/1, deprecated/1, trycatch/1, abstract_modules/1, fun_mfa/1, qlc/1]). --export([analyses/1, +-export([ analyze/1, basic/1, md/1, q/1, variables/1, unused_locals/1]). --export([misc/1, +-export([ format_error/1, otp_7423/1, otp_7831/1]). -import(lists, [append/2, flatten/1, keysearch/3, member/2, sort/1, usort/1]). @@ -59,7 +60,7 @@ range/1, relation_to_family/1, set/1, to_external/1, union/2]). --export([init_per_testcase/2, fin_per_testcase/2]). +-export([init_per_testcase/2, end_per_testcase/2]). %% Checks some info counters of a server and some relations that should hold. -export([check_count/1, check_state/1]). @@ -68,8 +69,36 @@ -include_lib("tools/src/xref.hrl"). -all(suite) -> - {conf, init, [xref, files, analyses, misc], fini}. +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + [{group, xref}, {group, files}, {group, analyses}, + {group, misc}]. + +groups() -> + [{xref, [], + [addrem, convert, intergraph, lines, loops, no_data, + modules]}, + {files, [], + [add, default, info, lib, read, read2, remove, replace, + update, deprecated, trycatch, abstract_modules, fun_mfa, + qlc]}, + {analyses, [], + [analyze, basic, md, q, variables, unused_locals]}, + {misc, [], [format_error, otp_7423, otp_7831]}]. + +init_per_suite(Config) -> + init(Config). + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + init(Conf) when is_list(Conf) -> DataDir = ?datadir, @@ -91,13 +120,11 @@ init_per_testcase(_Case, Config) -> Dog=?t:timetrap(?t:minutes(2)), [{watchdog, Dog}|Config]. -fin_per_testcase(_Case, _Config) -> +end_per_testcase(_Case, _Config) -> Dog=?config(watchdog, _Config), test_server:timetrap_cancel(Dog), ok. -xref(suite) -> - [addrem, convert, intergraph, lines, loops, no_data, modules]. %% Seems a bit short... addrem(suite) -> []; @@ -680,9 +707,6 @@ modules(Conf) when is_list(Conf) -> ?line ok = xref_base:delete(S), ok. -files(suite) -> - [add, default, info, lib, read, read2, remove, replace, update, - deprecated, trycatch, abstract_modules, fun_mfa, qlc]. add(suite) -> []; add(doc) -> ["Add modules, applications, releases, directories"]; @@ -1788,8 +1812,6 @@ qlc(Conf) when is_list(Conf) -> ok. -analyses(suite) -> - [analyze, basic, md, q, variables, unused_locals]. analyze(suite) -> []; analyze(doc) -> ["Simple analyses"]; @@ -2312,8 +2334,6 @@ unused_locals(Conf) when is_list(Conf) -> ?line ok = file:delete(Beam2), ok. -misc(suite) -> - [format_error, otp_7423, otp_7831]. format_error(suite) -> []; format_error(doc) -> ["Format error messages"]; diff --git a/lib/tools/vsn.mk b/lib/tools/vsn.mk index 13cf5af9f5..0c89b18065 100644 --- a/lib/tools/vsn.mk +++ b/lib/tools/vsn.mk @@ -1,19 +1 @@ -# This is an -*-makefile-*- file. -# %CopyrightBegin% -# -# Copyright Ericsson AB 1997-2009. All Rights Reserved. -# -# The contents of this file are subject to the Erlang Public License, -# Version 1.1, (the "License"); you may not use this file except in -# compliance with the License. You should have received a copy of the -# Erlang Public License along with this software. If not, it can be -# retrieved online at http://www.erlang.org/. -# -# Software distributed under the License is distributed on an "AS IS" -# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -# the License for the specific language governing rights and limitations -# under the License. -# -# %CopyrightEnd% - -TOOLS_VSN = 2.6.5.1 +TOOLS_VSN = 2.6.6.2 |