aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer
diff options
context:
space:
mode:
Diffstat (limited to 'lib/dialyzer')
-rw-r--r--lib/dialyzer/AUTHORS4
-rw-r--r--lib/dialyzer/Makefile43
-rw-r--r--lib/dialyzer/README84
-rw-r--r--lib/dialyzer/RELEASE_NOTES302
-rw-r--r--lib/dialyzer/doc/about.txt5
-rw-r--r--lib/dialyzer/doc/html/.gitignore0
-rw-r--r--lib/dialyzer/doc/man3/.gitignore0
-rw-r--r--lib/dialyzer/doc/manual.txt358
-rw-r--r--lib/dialyzer/doc/pdf/.gitignore0
-rwxr-xr-xlib/dialyzer/doc/src/Makefile113
-rwxr-xr-xlib/dialyzer/doc/src/book.xml49
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml267
-rw-r--r--lib/dialyzer/doc/src/dialyzer_chapter.xml217
-rwxr-xr-xlib/dialyzer/doc/src/fascicules.xml18
-rwxr-xr-xlib/dialyzer/doc/src/make.dep20
-rw-r--r--lib/dialyzer/doc/src/note.gifbin0 -> 1539 bytes
-rwxr-xr-xlib/dialyzer/doc/src/notes.xml746
-rwxr-xr-xlib/dialyzer/doc/src/part.xml36
-rwxr-xr-xlib/dialyzer/doc/src/part_notes.xml36
-rwxr-xr-xlib/dialyzer/doc/src/ref_man.xml35
-rw-r--r--lib/dialyzer/doc/src/warning.gifbin0 -> 1498 bytes
-rw-r--r--lib/dialyzer/doc/warnings.txt114
-rw-r--r--lib/dialyzer/ebin/.gitignore0
-rwxr-xr-xlib/dialyzer/info2
-rw-r--r--lib/dialyzer/plt/.gitignore0
-rw-r--r--lib/dialyzer/src/Makefile159
-rw-r--r--lib/dialyzer/src/dialyzer.app.src42
-rw-r--r--lib/dialyzer/src/dialyzer.appup.src20
-rw-r--r--lib/dialyzer/src/dialyzer.erl480
-rw-r--r--lib/dialyzer/src/dialyzer.hrl146
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl530
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl697
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl717
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl448
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl282
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl492
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl3468
-rw-r--r--lib/dialyzer/src/dialyzer_dep.erl580
-rw-r--r--lib/dialyzer/src/dialyzer_explanation.erl52
-rw-r--r--lib/dialyzer/src/dialyzer_gui.erl1349
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl1243
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.hrl112
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl269
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl576
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl2426
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl540
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl2756
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl458
-rw-r--r--lib/dialyzer/vsn.mk1
49 files changed, 20292 insertions, 0 deletions
diff --git a/lib/dialyzer/AUTHORS b/lib/dialyzer/AUTHORS
new file mode 100644
index 0000000000..8faf46b275
--- /dev/null
+++ b/lib/dialyzer/AUTHORS
@@ -0,0 +1,4 @@
+Original Authors and Contributors:
+
+Tobias Lindahl
+Kostis Sagonas
diff --git a/lib/dialyzer/Makefile b/lib/dialyzer/Makefile
new file mode 100644
index 0000000000..287867d7be
--- /dev/null
+++ b/lib/dialyzer/Makefile
@@ -0,0 +1,43 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2006-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+#=============================================================================
+#
+# File: lib/dialyzer/Makefile
+# Authors: Kostis Sagonas and Tobias Lindahl
+#
+#=============================================================================
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+#
+# Macros
+#
+
+SUB_DIRECTORIES = src doc/src
+
+include vsn.mk
+VSN = $(DIALYZER_VSN)
+
+SPECIAL_TARGETS =
+
+#
+# Default Subdir Targets
+#
+include $(ERL_TOP)/make/otp_subdir.mk
+
diff --git a/lib/dialyzer/README b/lib/dialyzer/README
new file mode 100644
index 0000000000..e9b03883d2
--- /dev/null
+++ b/lib/dialyzer/README
@@ -0,0 +1,84 @@
+##----------------------------------------------------------------------------
+## File: README
+## Author(s): Tobias Lindahl <[email protected]>
+## Kostis Sagonas <[email protected]>
+##
+## Copyright: Held by the authors; all rights reserved (2004 - 2009).
+##
+## $Id$
+##----------------------------------------------------------------------------
+
+The DIALYZER, a DIscrepany AnaLYZer for ERlang programs.
+
+
+-----------------------------------------------
+--
+-- Starting the Dialyzer
+--
+-----------------------------------------------
+
+You can use Dialyzer either in its GUI mode, simply by:
+
+ ./dialyzer
+
+or in its command-line mode, as e.g. by:
+
+ ./dialyzer -r OTP_DIR/lib/inets
+
+which analyzes all the bytecode (.beam) files of the "inets" application of
+the Erlang/OTP installation for discrepancies.
+
+The complete set of Dialyzer options is:
+
+ dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
+ [-pa dir]* [--plt plt] [-Ddefine]* [-I include_dir]*
+ [--output_plt file] [-Wwarn]* [--src]
+ [-c applications] [-r applications] [-o outfile]
+ [--build_plt] [--add_to_plt] [--remove_from_plt] [--check_plt]
+ [--plt_info] [--get_warnings]
+
+Use "dialyzer --help" to see an explanation of these options as well as
+a description of the various options to turn on or suppress certain types
+of warnings (for the latter only type "dialyzer -Whelp").
+
+
+The Persistent Lookup Table
+===========================
+Most Dialyzer uses require information about functions in the Erlang/OTP
+standard libraries. This information is stored in a Persistent Lookup
+Table (PLT) and is the starting point for later analyses. Before the
+first use of Dialyzer, the PLT needs to be built explicitly by the user
+using a command of the form:
+
+ dialyzer --build_plt -r lib/kernel/ebin lib/stdlib/ebin ... OTHER LIBS
+
+At each subsequent startup of Dialyzer the validity of this PLT is checked,
+and if something has changed in the libraries and applications that were
+included in it when the PLT was initially created, the PLT will be rebuilt.
+
+It is possible to have multiple PLTs and select dynamically among them.
+It is also possible to build PLTs incrementally. For more information
+refer to the documentation of the relevant options.
+
+
+-----------------------------------------------
+--
+-- More information about using the Dialyzer
+--
+-----------------------------------------------
+
+Start up the Dialyzer and click on its Help menu (in the upmost right corner)
+to read a brief overview and a user manual.
+
+
+-----------------------------------------------
+--
+-- Feedback & bug reports
+--
+-----------------------------------------------
+
+We welcome all sorts of user feedback (even wish-lists!). If you notice
+something weird, please send an error report describing the symptoms and
+how to reproduce them to:
+
diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES
new file mode 100644
index 0000000000..8205acf192
--- /dev/null
+++ b/lib/dialyzer/RELEASE_NOTES
@@ -0,0 +1,302 @@
+==============================================================================
+ Major features, additions and changes between Dialyzer versions
+ (in reversed chronological order)
+==============================================================================
+
+Version 2.1.0 (in Erlang/OTP R13B03)
+------------------------------------
+ - Dialyzer can statically detect some kinds of data races in Erlang programs.
+ Use the new option -Wrace_conditions to enable the race analysis.
+ The technique is described in a paper which is available at:
+ http://www.it.uu.se/research/group/hipe/dialyzer/publications/races.pdf
+ - Added support for packages (thanks to Maria Christakis).
+ - There has been a major change in the default mode of Dialyzer.
+ Previously, the default mode was the GUI, while now it is the command
+ line. As a result of this change, it is now possible to analyze a set of
+ files/dirs with the command:
+ dialyzer file1 ... fileN
+ In other words, the -c (--command-line) option is no longer necessary,
+ though it will be retained for some time for backwards compatibility.
+ To start dialyzer's GUI use either of the following commands:
+ dialyzer --gui %% for the old gs-based GUI
+ dialyzer --wx %% for the new wx-based GUI (where available)
+ - There is a new option --apps which allows the user to easily refer to
+ Erlang/OTP applications and include them in the analysis or in the
+ building of the PLT. For example, we recommend building the PLT with:
+ dialyzer --build_plt --apps erts kernel stdlib mnesia ...
+ The new option can also take absolute file names as well as applications.
+ Note that the application versions that will be included in the PLT are
+ those that correspond to the Erlang/OTP system which is used.
+ - Dialyzer has a new wxWidgets based GUI (thanks to Elli Frangaki)
+ for platforms where the wx application is available.
+
+Version 2.0.0 (in Erlang/OTP R13B02)
+------------------------------------
+ - There is a major addition to the capabilities of dialyzer, worthy
+ of bumping the version number. Starting with this version, dialyzer
+ not only accepts but also properly processes remote types (i.e.,
+ types of the form ModuleName:TypeName()). Previous dialyzer
+ versions only accepted this notation in -type and -spec
+ declarations, but effectively ignored its information by mapping
+ remote types to the type any(). In contrast, starting with this
+ version, remote types are used in the analysis and are also stored
+ in the PLTs. (This addition changes the format of PLTs and requires
+ rebuilding any PLTs created by an older dialyzer version.)
+ Note that dialyzer will complain and abort the analysis of a set of
+ modules if it needs to process a remote type without a definition
+ (either because the module does not include a definition of the
+ type or the module is not included in the analysis). We may relax
+ this restriction in a future version.
+ - Fixed minor issue with dialyzer:run/1 crashing (upon its return)
+ when used for adding type information to an existing PLT.
+ - Fixed minor but quite annoying issues in dialyzer's GUI.
+
+Version 1.9.2 (in Erlang/OTP R13B01)
+------------------------------------
+ - Fixed problem with type inference going into an infinite loop when
+ analyzing a strongly connected component of functions that do not
+ return but also contain an erroneous call which makes them fail and
+ be assigned the type none() instead of the type unit().
+ - More accurate type information for some BIFs and library files.
+ - Introduced boolean() as the `official' name for the type which was so
+ far known as bool(). The latter is still accepted as well as boolean()
+ but results in a warning from the compiler; converting the code to use
+ the new name is strongly encouraged.
+
+Version 1.9.1 (in Erlang/OTP R13B)
+----------------------------------
+ - Contains the changes described below (R13A) but also:
+ - Has better handling of opaque types.
+ - The handling of UFT segments of bitstreams has been significantly
+ strengthened and revised. In all probability, now it is correct.
+
+Version 1.9.0 (in Erlang/OTP R13A)
+----------------------------------
+ - The analysis accepts opaque type declarations and detects violations of
+ opaqueness of terms of such types. Starting with R13, many Erlang/OTP
+ standard libraries (array, dict, digraph, ets, gb_sets, gb_trees, queue,
+ and sets) contain opaque type declarations of their main data types.
+ Dialyzer will spit out warnings in code that explicitly depends on the
+ structure of these terms.
+ - Added support for handling UTF segments in bitstreams and for detecting
+ obvious type errors in these segments.
+ Warning: This code is not terribly tested -- not surprising since this is
+ a new language feature of R13.
+ - Strengthened the discrepancy identification when testing for equality and
+ matching between terms of different types. This detects more bugs in code.
+ - Added warning for M:F(...) calls where M is not a module term and F is not
+ an atom. Previously, such calls where found to fail but the reason for
+ the failure was not reported.
+ - Added a convenient shorthand for the --no_check_plt option (-n).
+ - Added the --dump_callgraph option for dumping the callgraph of all files
+ that are analyzed into a specified file. The callgraph either be dumped
+ in raw format, in .dot format, or converted to a .ps (postscript) file.
+ Note that in large callgraphs the generated postscript file might not be
+ interpretable by Ghostview.
+ (Thanks to Ilya Khlopotov for the initial version of this functionality.)
+
+Version 1.8.3 (in Erlang/OTP R12B-5)
+------------------------------------
+ - Added the --no_check_plt option that makes the startup time faster
+ when working with PLTs that are known not to change.
+ - Changed the phrasing of some warnings so that they do not cause confusion
+ to some users and correspond better to reality.
+
+Version 1.8.2 (in Erlang/OTP R12B-4)
+------------------------------------
+ - The return type of the Erlang interface dialyzer:run/1 has changed
+ to only return a list of warnings. If something goes wrong dialyzer
+ dies with an exception.
+ - The handling of the PLT has changed. Please consult the manual for
+ the changes.
+
+Version 1.8.0 (in Erlang/OTP R12B-2)
+------------------------------------
+ - Dialyzer's analysis is from now on exclusively based on success typings.
+ In particular, support for options --old_style and --dataflow has been
+ discontinued.
+ - Better and more aggressive handling of type information in records.
+ - Dialyzer has a new warning option -Wunmatched_returns which warns for
+ function calls that ignore the return value.
+ This catches many common programming errors (e.g. calling file:close/1
+ and not checking for the absense of errors), interface discrepancies
+ (e.g. a function returning multiple values when in reality the function
+ is void and only called for its side-effects), calling the wrong function
+ (e.g. io_lib:format/1 instead of io:format/1), and even possible
+ performance defects (e.g. unnecessarily constructing a list using a list
+ comprehension instead of using lists:foreach/2).
+ Whenever a function returns a single atomic value (e.g. 'ok' or pid()),
+ the warning is suppressed. This allows for "void" functions (returning
+ a single atom like 'ok') or for calls to certain builtins like spawn.
+ Because not all calls which ignore the return value are discrepancies,
+ the option is off by default and has to be explicitly requested by the user.
+ But we recommend it nevertheless.
+ - Some warning options (-Wno_comp, -Wno_guards, -Wno_unsafe_beam, etc.)
+ which could be used when analyzing bytecode produced by an old BEAM
+ compiler have been removed.
+
+Version 1.7.2 (in Erlang/OTP R12B-1)
+------------------------------------
+ - The Erlang interface of Dialyzer returns warnings tagged by their type.
+
+Version 1.7.1 (in Erlang/OTP R12B-0)
+------------------------------------
+ - Use of success typings is now default, is robust and detects significantly
+ more errors than in previous versions.
+ - Dialyzer accepts and takes into account type information in record
+ declarations and in contracts -- see related paper in Erlang'07 workshop.
+ Various OTP applications (e.g. stdlib and kernel) are partially annotated
+ with appropriate contracts specifying types information for their functions.
+ - The type previously known as unit() has been renamed to no_return().
+ Its use in a contract as the return type of a function now silences off
+ the "function has no local return" dialyzer warning without use of the
+ corresponding option.
+
+Version 1.7.0 (in Erlang/OTP R11B-5)
+------------------------------------
+ - Dialyzer can now use success typings to find discrepancies. As a
+ consequence significantly more discrepancies are detected. The downside
+ is that the analysis takes about 2 to 2.5 times longer than in previous
+ versions, and may also result in some warnings that might be harder to
+ interpret even though they are correct. This has been a major change,
+ worth of increasing the version number.
+ New command-line options:
+ --succ_typings Use the success typings analysis.
+ --dataflow (default) Use the previous analysis.
+ - The new type unit() has been introduced to handle the return type of
+ non-terminating functions such as servers.
+ - Dialyzer's code server uses a compressed representation and the analysis
+ requires significantly less memory than in previous versions.
+
+Version 1.6.0 (in Erlang/OTP R11B-4)
+------------------------------------
+ - Dialyzer (in the modes where either source or BEAM code which contains
+ debug_info is analyzed) spits out line information for all discrepancies.
+ This has been a major change, worth of increasing the version number.
+ We warn users that *considerably* more discrepancies are identified
+ by this version of Dialyzer compared with previous ones. If warnings
+ generated by previous versions of Dialyzer are preferable for some
+ applications, the command line option --old_style can be employed.
+ - Dialyzer handles term comparison operators more precisely and is able to
+ identify more discrepancies in 'if' or 'case' statements with comparisons.
+ - Dialyzer has more precise type information for many standard OTP functions.
+
+Version 1.5.1 (in Erlang/OTP R11B-3)
+------------------------------------
+ - Added the option --check_init_plt that can be used to force the re-building
+ of the initial plt for installed systems.
+ - Fixed some bugs in the handling of guards in the dataflow analysis after
+ a report from Fredrik Thulin.
+ - Cleaned up the error handling and the interface and now Dialyzer has an
+ Erlang interface.
+ - Building the PLT using success typings is considerably faster than in 1.5.0.
+ - Added option --verbose which makes Dialyzer a bit more verbose.
+
+Version 1.5.0 (in Erlang/OTP R11B-2)
+------------------------------------
+ - Dialyzer's building of PLT is based on a different type inference algorithm.
+ More specifically, Dialyzer uses inferrence of refined success typings to
+ infer function prototypes. As a result, Dialyzer bases its analysis on a
+ significantly more powerful basis and thus is able to detect more
+ discrepancies. The downside is that building the PLT is a considerably
+ slower process. We will work on improving that.
+ - Dialyzer takes into account the BEAM compiler directive
+ -compile({nowarn_unused_function, {F,A}}).
+ and then suppresses the warning that function F/A will never be called.
+ - Dialyzer's default initial PLT now also includes "mnesia".
+
+Version 1.4.2 (in Erlang/OTP R11B-1)
+------------------------------------
+ - Speed up changes and small bug fixes.
+
+Version 1.4.1 (in Erlang/OTP R11B-0)
+------------------------------------
+ - Dialyzer is officially part of Erlang/OTP as a library application.
+ - Instead of being a shell script, "dialyzer" is an executable (generated
+ by a C program -- thanks to Bjorn Gustavsson), and works similarly to "erlc".
+ As a side-effect of this change, Dialyzer now also works on Windows.
+
+Version 1.4.0
+-------------
+ - Changes for Dialyzer to work with Erlang/OTP R10B-10
+ - Dialyzer's analysis is significantly faster as it uses a global function
+ call-graph for obtaining type signatures for all analyzed functions
+ - ...
+ - Many more other changes; in fact too many to mention here.
+
+Version 1.3.1 (mostly internal)
+-------------
+ - Small changes for Dialyzer to work with Erlang/OTP R10B-5.
+ - Modified setup script for execution under Cygwin (patch by Vlad Dumitrescu)
+ - Added command line option --no_warn_on_inline
+ - Fixed a small buglet in the analysis; this affected one of HiPE's files
+ - Dialyzer now explicitly warns when modules with the same name
+ but from different dirs are being analyzed (contribution by Ulf Wiger)
+
+Version 1.3.0
+-------------
+ - Requires the presence of an Erlang/OTP R10B-4 system.
+ - Dialyzer is significantly (approx 40%) faster since it now uses 'ets'
+ rather than 'dets' for its PLT.
+ - Slightly improved the precision of the analysis.
+ - Fixed a number of false positives that Dialyzer 1.2.0 was spitting out.
+ - In the GUI version, Dialyzer now reports the list of modules that should
+ be included in the modules to analyze in order to possibly improve
+ the accuracy of the reported results.
+ - Some more information is displayed when calling a function or closure
+ with arguments of the wrong type.
+ - The record guard now allows discrepancies involving tuples that are known
+ to be records to be displayed as #rec{} rather than {'rec',_,...,_}
+ - Added -q option which makes the command-line version of Dialyzer
+ a bit more silent.
+
+Version 1.2.0
+-------------
+ - Dialyzer works with the open source and commercial versions of Erlang/OTP
+ R10B-2 on all platforms (i.e., HiPE support is not a prerequisite anymore).
+ - Whenever a .beam file contains abstract code (i.e., has been compiled
+ with the +debug_info option), the analysis starts from this code rather
+ than from BEAM bytecode -- this makes the results identical to those
+ obtained by analysis starting from source code.
+ (This is a contribution from Bjorn Gustavsson - Thanks!)
+ - Added -pa command line option for easier handling of -include_lib()
+ directives in source code.
+ - Includes all changes added in v1.1.1; see below. The "Add Recursively"
+ functionality is also available in the command-line mode (-r option).
+
+Version 1.1.1 (internal, not released to the masses :-)
+-------------
+ - The command-line mode of Dialyzer spits discrepancies in stdout
+ by default and returns a numeric value indicating its exit status.
+ - Fixed problems using the shell script on Solaris machines.
+ - Fixed small inconsistencies in Dialyzer's documentation and help.
+ - Added "Add Recursively" button contributed by Martin Bjorklund (thanks!).
+
+Version 1.1.0
+-------------
+ - Dialyzer works with the open source version of Erlang/OTP R10B-1 (on
+ platforms where HiPE support is available) and it does not require
+ installation of a customized Erlang/OTP system anymore.
+ - Dialyzer comes with an extensive command-line interface, in addition
+ to an enhanced GUI.
+ - Analysis can start from source code (through Core Erlang) as well as
+ from BEAM bytecode.
+ - Dialyzer finds considerably more discrepancies in Erlang code than
+ previous versions.
+ - Added ability to selectively turn on/off different warning categories.
+
+Version 1.0.1
+-------------
+ - Fixed major bug of v1.0.0 which caused the analysis to stop prematurely
+ (i.e., without properly reaching fixpoint) when analyzing more than one
+ module. This did not affect correctness of the results, but lost many
+ opportunities to detect discrepancies.
+ - Performance improvements in the analysis.
+
+Version 1.0.0
+-------------
+ Initial Dialyzer version: Analysis starts from BEAM bytecode only and
+ only a GUI version is available. Dialyzer reports discrepancies in the
+ use of the "sequential" part of Erlang. To be used, Dialyzer requires
+ the presence of a specific snapshot of Erlang/OTP which can be obtained
+ from Dialyzer's homepage.
diff --git a/lib/dialyzer/doc/about.txt b/lib/dialyzer/doc/about.txt
new file mode 100644
index 0000000000..7d9d819731
--- /dev/null
+++ b/lib/dialyzer/doc/about.txt
@@ -0,0 +1,5 @@
+ This is DIALYZER version 2.1.0
+ DIALYZER is a DIscrepany AnaLYZer for ERlang programs.
+
+ Copyright (C) Tobias Lindahl <[email protected]>
+ Kostis Sagonas <[email protected]>
diff --git a/lib/dialyzer/doc/html/.gitignore b/lib/dialyzer/doc/html/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/doc/html/.gitignore
diff --git a/lib/dialyzer/doc/man3/.gitignore b/lib/dialyzer/doc/man3/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/doc/man3/.gitignore
diff --git a/lib/dialyzer/doc/manual.txt b/lib/dialyzer/doc/manual.txt
new file mode 100644
index 0000000000..f1faed3c79
--- /dev/null
+++ b/lib/dialyzer/doc/manual.txt
@@ -0,0 +1,358 @@
+##----------------------------------------------------------------------------
+## File: doc/manual.txt
+## Author(s): Tobias Lindahl <[email protected]>
+## Kostis Sagonas <[email protected]>
+##
+## $Id$
+##----------------------------------------------------------------------------
+
+The DIALYZER, a DIscrepany AnaLYZer for ERlang programs.
+
+
+-----------------------------------------------
+--
+-- Using Dialyzer from the GUI
+--
+-----------------------------------------------
+
+Choosing the applications or modules
+====================================
+In the "File" window you will find a listing of the current directory.
+Click your way to the directories/modules you want to add or type the
+correct path in the entry.
+
+Mark the directories/modules you want to analyze for discrepancies and
+click "Add". You can either add the .beam and .erl-files directly, or
+you can add directories that contain these kinds of files. Note that
+you are only allowed to add the type of files that can be analyzed in
+the current mode of operation (see below), and that you cannot mix
+.beam and .erl-files.
+
+The analysis modes
+==================
+Dialyzer has several modes of analysis. These are controlled by the
+buttons in the top-middle part of the main window, under "Analysis Options".
+
+The parameters are:
+* File Type:
+ - Byte code:
+ The analysis starts from .beam bytecode files.
+ The files must be compiled with +debug_info.
+ - Source code:
+ The analysis starts from .erl files.
+
+Controlling the discrepancies reported by the Dialyzer
+======================================================
+Under the "Warnings" pull-down menu, there are buttons that control
+which discrepancies are reported to the user in the "Warnings" window.
+By clicking on these buttons, one can enable/disable a whole class of
+warnings. Information about the classes of warnings can be found on
+the "Warnings" item under the "Help" menu (at the rightmost top corner).
+
+Running the analysis
+====================
+Once you have chosen the modules or directories you want to analyze,
+click the "Run" button to start the analysis. If for some reason you
+want to stop the analysis while it is running, push the "Stop" button.
+
+The information from the analysis will be displayed in the Log and the
+Warnings windows.
+
+
+Include directories and macro definitions
+=========================================
+When analyzing from source you might have to supply Dialyzer with a
+list of include directories and macro definitions (as you can do with
+the erlc flags -I and -D). This can be done either by starting Dialyzer
+with these flags from the command line as in:
+
+ ./dialyzer -I my_includes -DDEBUG -Dvsn=42 -I one_more_dir
+
+or by adding these explicitly using the "Manage Macro Definitions" or
+"Manage Include Directories" sub-menus in the "Options" menu.
+
+
+Saving the information on the Log and Warnings windows
+======================================================
+In the "File" menu there are options to save the contents of the Log
+and the Warnings window. Just choose the options and enter the file to
+save the contents in.
+
+There are also buttons to clear the contents of each window.
+
+
+Inspecting the inferred types of the analyzed functions
+=======================================================
+Dialyzer stores the information of the analyzed functions in a
+Persistent Lookup Table (PLT). After an analysis you can inspect this
+information. In the PLT menu you can choose to either search the PLT
+or inspect the contents of the whole PLT. The information is presented
+in edoc format.
+
+NOTE: Currently, the information which is displayed is NOT the type
+signatures of the functions. The return values are the least upper
+bound of the returned type from the function and the argument types
+are the least upper bound of the types that the function is called
+with. In other words, the argument types is not what the function can
+accept, but rather a description of how the function is used.
+
+We are working on finding the type signatures of the function, and
+this will (hopefully) be included in a future version of Dialyzer.
+
+
+-----------------------------------------------
+--
+-- Using Dialyzer from the command line
+--
+-----------------------------------------------
+
+Dialyzer also has a command line version for automated use. Below is a
+brief description of the list of its options. The same information can
+be obtained by writing
+
+ "dialyzer --help"
+
+in a shell. Please refer to the GUI description for more details on
+the operation of Dialyzer.
+
+The exit status of the command line version is:
+
+ 0 - No problems were encountered during the analysis and no
+ warnings were emitted.
+ 1 - Problems were encountered during the analysis.
+ 2 - No problems were encountered, but warnings were emitted.
+
+
+
+Usage: dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
+ [-pa dir]* [--plt plt] [-Ddefine]* [-I include_dir]*
+ [--output_plt file] [-Wwarn]* [--src]
+ [-c applications] [-r applications] [-o outfile]
+ [--build_plt] [--add_to_plt] [--remove_from_plt] [--check_plt]
+ [--plt_info] [--get_warnings]
+
+Options:
+ -c applications (or --command-line applications)
+ Use Dialyzer from the command line (no GUI) to detect defects in the
+ specified applications (directories or .erl or .beam files)
+ -r applications
+ Same as -c only that directories are searched recursively for
+ subdirectories containing .erl or .beam files (depending on the
+ type of analysis)
+ -o outfile (or --output outfile)
+ When using Dialyzer from the command line, send the analysis
+ results in the specified \"outfile\" rather than in stdout
+ --raw
+ When using Dialyzer from the command line, output the raw analysis
+ results (Erlang terms) instead of the formatted result.
+ The raw format is easier to post-process (for instance, to filter
+ warnings or to output HTML pages)
+ --src
+ Override the default, which is to analyze BEAM bytecode, and
+ analyze starting from Erlang source code instead
+ -Dname (or -Dname=value)
+ When analyzing from source, pass the define to Dialyzer (**)
+ -I include_dir
+ When analyzing from source, pass the include_dir to Dialyzer (**)
+ --output_plt file
+ Store the plt at the specified file after building it
+ --plt plt
+ Use the specified plt as the initial plt (if the plt was built
+ during setup the files will be checked for consistency)
+ -pa dir
+ Include dir in the path for Erlang (useful when analyzing files
+ that have '-include_lib()' directives)
+ -Wwarn
+ A family of options which selectively turn on/off warnings
+ (for help on the names of warnings use dialyzer -Whelp)
+ --shell
+ Do not disable the Erlang shell while running the GUI
+ --version (or -v)
+ Prints the Dialyzer version and some more information and exits
+ --help (or -h)
+ Prints this message and exits
+ --quiet (or -q)
+ Makes Dialyzer a bit more quiet
+ --verbose
+ Makes Dialyzer a bit more verbose
+ --build_plt
+ The analysis starts from an empty plt and creates a new one from the
+ files specified with -c and -r. Only works for beam files.
+ Use --plt or --output_plt to override the default plt location.
+ --add_to_plt
+ The plt is extended to also include the files specified with -c and -r.
+ Use --plt to specify wich plt to start from, and --output_plt to
+ specify where to put the plt. Note that the analysis might include
+ files from the plt if they depend on the new files.
+ This option only works with beam files.
+ --remove_from_plt
+ The information from the files specified with -c and -r is removed
+ from the plt. Note that this may cause a re-analysis of the remaining
+ dependent files.
+ --check_plt
+ Checks the plt for consistency and rebuilds it if it is not up-to-date.
+ --no_check_plt
+ Skip the plt check when running Dialyzer. Useful when working with
+ installed plts that never change.
+ --plt_info
+ Makes Dialyzer print information about the plt and then quit. The plt
+ can be specified with --plt.
+ --get_warnings
+ Makes Dialyzer emit warnings even when manipulating the plt. Only
+ emits warnings for files that are actually analyzed.
+
+Note:
+ * denotes that multiple occurrences of these options are possible.
+ ** options -D and -I work both from command-line and in the Dialyzer GUI;
+ the syntax of defines and includes is the same as that used by "erlc".
+
+Warning options:
+ -Wno_return
+ Suppress warnings for functions that will never return a value.
+ -Wno_unused
+ Suppress warnings for unused functions.
+ -Wno_improper_lists
+ Suppress warnings for construction of improper lists.
+ -Wno_fun_app
+ Suppress warnings for fun applications that will fail.
+ -Wno_match
+ Suppress warnings for patterns that are unused or cannot match.
+ -Wunmatched_returns ***
+ Include warnings for function calls which ignore the return value(s).
+ -Werror_handling ***
+ Include warnings for functions that only return by means of an exception.
+ -Wunderspecs ***
+ Warn about underspecified functions
+ (the -spec is strictly more allowing than the success typing)
+ -Woverspecs ***
+ Warn about overspecified functions
+ (the -spec is strictly less allowing than the success typing)
+ -Wspecdiffs ***
+ Warn when the -spec is different than the success typing
+
+Note:
+ *** These are options that turn on warnings rather than turning them off.
+
+
+-----------------------------------------------
+--
+-- Using Dialyzer from Erlang
+--
+-----------------------------------------------
+
+You can also use Dialyzer directly from Erlang. Both the GUI and the
+command line version is available. The options are similar to the ones
+given from the command line, so please refer to the sections above for
+a description of these.
+
+Interface:
+
+dialyzer:gui() -> 'ok'
+
+dialyzer:gui(OptList) -> 'ok'
+
+dialyzer:run(OptList) -> Warnings
+
+Warnings :: [{tag(), id(), msg()}]
+tag() :: 'warn_return_no_exit' | 'warn_return_only_exit' | 'warn_not_called'
+ | 'warn_non_proper_list' | 'warn_fun_app' | 'warn_matching'
+ | 'warn_failing_call' | 'warn_contract_types'
+ | 'warn_contract_syntax' | 'warn_contract_not_equal'
+ | 'warn_contract_subtype' | 'warn_contract_supertype'
+id() :: {File :: string(), Line :: integer()}
+msg() :: Undefined
+
+OptList :: [Option]
+Option :: {files, [Filename :: string()]}
+ | {files_rec, [DirName :: string()]}
+ | {defines, [{Macro :: atom(), Value :: term()}]}
+ | {from, src_code | byte_code} %% Defaults to byte_code
+ | {init_plt, FileName :: string()} %% If changed from default
+ | {include_dirs, [DirName :: string()]}
+ | {output_file, FileName :: string()}
+ | {output_plt, FileName :: string()}
+ | {analysis_type, 'success_typings' | 'plt_add' |
+ 'plt_build' | 'plt_check' | 'plt_remove'}
+ | {warnings, [WarnOpts]}
+
+WarnOpts :: no_return
+ | no_unused
+ | no_improper_lists
+ | no_fun_app
+ | no_match
+ | no_fail_call
+ | unmatched_returns
+ | error_handling
+
+dialyzer:format_warning({tag(), id(), msg()}) -> string()
+
+Returns a string representation of the warnings as returned by dialyzer:run/1.
+
+dialyzer:plt_info(string()) -> {'ok', [{atom(), any()}]} | {'error', atom()}
+
+Returns information about the specified plt.
+
+-----------------------------------------------
+--
+-- More on the Persistent Lookup Table (PLT)
+--
+-----------------------------------------------
+
+The persistent lookup table, or PLT, is used to store the result of an
+analysis. The PLT can then be used as a starting point for later
+analyses. It is recommended to build a PLT with the otp applications
+that you are using, but also to include your own applications that you
+are using frequently.
+
+The PLT is built using the --build_plt option to dialyzer. The
+following command builds the recommended minimal PLT for OTP.
+
+dialyzer --build_plt -r $ERL_TOP/lib/stdlib/ebin\
+ $ERL_TOP/lib/kernel/ebin\
+ $ERL_TOP/lib/mnesia/ebin
+
+Dialyzer will look if there is an environment variable called
+$DIALYZER_PLT and place the PLT at this location. If no such variable
+is set, Dialyzer will place the PLT at $HOME/.dialyzer_plt. The
+placement can also be specified using the --plt, or --output_plt
+options.
+
+You can also add information to an existing plt using the --add_to_plt
+option. Suppose you want to also include the compiler in the PLT and
+place it in a new PLT, then give the command
+
+dialyzer --add_to_plt -r $ERL_TOP/lib/compiler/ebin --output_plt my.plt
+
+Then you would like to add your favorite application my_app to the new
+plt.
+
+dialyzer --add_to_plt --plt my.plt -r <path>/my_app/ebin
+
+But you realize that it is unnecessary to have compiler in this one.
+
+dialyzer --remove_from_plt --plt my.plt -r $ERL_TOP/lib/compiler/ebin
+
+Later, when you have fixed a bug in your application my_app, you want
+to update the plt so that it will be fresh the next time you run
+Dialyzer, run the command
+
+dialyzer --check_plt --plt my.plt
+
+Dialyzer will then reanalyze the files that have been changed, and the
+files that depend on these files. Note that this consistency check
+will be performed automatically the next time you run Dialyzer with
+this plt. The --check_plt option is merely for doing so without doing
+any other analysis.
+
+-----------------------------------------------
+--
+-- Feedback & bug reports
+--
+-----------------------------------------------
+
+At this point, we very much welcome user feedback (even wish-lists!).
+If you notice something weird, especially if the Dialyzer reports any
+discrepancy that is a false positive, please send an error report
+describing the symptoms and how to reproduce them to:
+
diff --git a/lib/dialyzer/doc/pdf/.gitignore b/lib/dialyzer/doc/pdf/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/doc/pdf/.gitignore
diff --git a/lib/dialyzer/doc/src/Makefile b/lib/dialyzer/doc/src/Makefile
new file mode 100755
index 0000000000..37bcb49de0
--- /dev/null
+++ b/lib/dialyzer/doc/src/Makefile
@@ -0,0 +1,113 @@
+# ``The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved via the world wide web at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+# Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+# AB. All Rights Reserved.''
+#
+# $Id$
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+VSN=$(DIALYZER_VSN)
+APPLICATION=dialyzer
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+XML_APPLICATION_FILES = ref_man.xml
+XML_REF3_FILES = dialyzer.xml
+
+XML_PART_FILES = part.xml part_notes.xml
+XML_CHAPTER_FILES = dialyzer_chapter.xml notes.xml
+
+BOOK_FILES = book.xml
+
+XML_FILES = \
+ $(BOOK_FILES) $(XML_CHAPTER_FILES) \
+ $(XML_PART_FILES) $(XML_REF3_FILES) $(XML_APPLICATION_FILES)
+
+
+# ----------------------------------------------------
+
+TEXT_FILES = \
+ ../about.txt \
+ ../manual.txt \
+ ../warnings.txt
+
+HTML_FILES = $(XML_APPLICATION_FILES:%.xml=$(HTMLDIR)/%.html) \
+ $(XML_PART_FILES:%.xml=$(HTMLDIR)/%.html)
+
+INFO_FILE = ../../info
+
+MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3)
+
+HTML_REF_MAN_FILE = $(HTMLDIR)/index.html
+
+TOP_PDF_FILE = $(PDFDIR)/$(APPLICATION)-$(VSN).pdf
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+XML_FLAGS +=
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+$(HTMLDIR)/%.gif: %.gif
+ $(INSTALL_DATA) $< $@
+
+docs: pdf html man
+
+$(TOP_PDF_FILE): $(XML_FILES)
+
+pdf: $(TOP_PDF_FILE)
+
+html: gifs $(HTML_REF_MAN_FILE)
+
+man: $(MAN3_FILES)
+
+gifs: $(GIF_FILES:%=$(HTMLDIR)/%)
+
+debug opt:
+
+clean clean_docs:
+ rm -rf $(HTMLDIR)/*
+ rm -f $(MAN3DIR)/*
+ rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo)
+ rm -f errs core *~
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_docs_spec: docs
+ $(INSTALL_DIR) $(RELSYSDIR)/doc/pdf
+ $(INSTALL_DATA) $(TOP_PDF_FILE) $(RELSYSDIR)/doc/pdf
+ $(INSTALL_DIR) $(RELSYSDIR)/doc/html
+ $(INSTALL_DATA) $(HTMLDIR)/* \
+ $(RELSYSDIR)/doc/html
+ $(INSTALL_DATA) $(INFO_FILE) $(RELSYSDIR)
+ $(INSTALL_DIR) $(RELEASE_PATH)/man/man3
+ $(INSTALL_DATA) $(MAN3DIR)/* $(RELEASE_PATH)/man/man3
+
+release_spec:
diff --git a/lib/dialyzer/doc/src/book.xml b/lib/dialyzer/doc/src/book.xml
new file mode 100755
index 0000000000..0b4e1cb617
--- /dev/null
+++ b/lib/dialyzer/doc/src/book.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE book SYSTEM "book.dtd">
+
+<book xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header titlestyle="normal">
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Dialyzer</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>book.xml</file>
+ </header>
+ <insidecover>
+ </insidecover>
+ <pagetext>Dialyzer</pagetext>
+ <preamble>
+ <contents level="2"></contents>
+ </preamble>
+ <parts lift="no">
+ <xi:include href="part.xml"/>
+ </parts>
+ <applications>
+ <xi:include href="ref_man.xml"/>
+ </applications>
+ <releasenotes>
+ <xi:include href="notes.xml"/>
+ </releasenotes>
+ <listofterms></listofterms>
+ <index></index>
+</book>
+
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
new file mode 100644
index 0000000000..7f983a2c0d
--- /dev/null
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -0,0 +1,267 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>dialyzer</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ </header>
+ <module>dialyzer</module>
+ <modulesummary>The Dialyzer, a DIscrepancy AnalYZer for ERlang programs</modulesummary>
+ <description>
+ <p>The Dialyzer is a static analysis tool that identifies software
+ discrepancies such as definite type errors, code which has become
+ dead or unreachable due to some programming error, unnecessary
+ tests, etc. in single Erlang modules or entire (sets of)
+ applications. Dialyzer starts its analysis from either
+ debug-compiled BEAM bytecode or from Erlang source code. The file
+ and line number of a discrepancy is reported along with an
+ indication of what the discrepancy is about. Dialyzer bases its
+ analysis on the concept of success typings which allows for sound
+ warnings (no false positives).</p>
+ <p>Read more about Dialyzer and about how to use it from the GUI
+ in <seealso marker="dialyzer_chapter">Dialyzer User's
+ Guide</seealso>.</p>
+ </description>
+
+ <section>
+ <title>Using the Dialyzer from the command line</title>
+ <p>Dialyzer also has a command line version for automated use. Below is a
+ brief description of the list of its options. The same information can
+ be obtained by writing</p>
+ <code type="none"><![CDATA[
+ dialyzer --help
+ ]]></code>
+ <p>in a shell. Please refer to the GUI description for more details on
+ the operation of Dialyzer.</p>
+ <p>The exit status of the command line version is:</p>
+ <code type="none"><![CDATA[
+ 0 - No problems were encountered during the analysis and no
+ warnings were emitted.
+ 1 - Problems were encountered during the analysis.
+ 2 - No problems were encountered, but warnings were emitted.
+ ]]></code>
+ <p>Usage:</p>
+ <code type="none"><![CDATA[
+ dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
+ [-pa dir]* [--plt plt] [-Ddefine]* [-I include_dir]*
+ [--output_plt file] [-Wwarn]* [--src]
+ [-c applications] [-r applications] [-o outfile]
+ [--build_plt] [--add_to_plt] [--remove_from_plt]
+ [--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
+ ]]></code>
+ <p>Options:</p>
+ <taglist>
+ <tag><c><![CDATA[-c applications]]></c>(or <c><![CDATA[--command-line applications]]></c>)</tag>
+ <item>use Dialyzer from the command line (no GUI) to detect defects in the
+ specified applications (directories or <c><![CDATA[.erl]]></c> or <c><![CDATA[.beam]]></c> files)</item>
+ <tag><c><![CDATA[-r applications]]></c></tag>
+ <item>same as <c><![CDATA[-c]]></c> only that directories are searched recursively for
+ subdirectories containing <c><![CDATA[.erl]]></c> or <c><![CDATA[.beam]]></c> files (depending on the
+ type of analysis)</item>
+ <tag><c><![CDATA[-o outfile]]></c>(or <c><![CDATA[--output outfile]]></c>)</tag>
+ <item>when using Dialyzer from the command line, send the analysis
+ results in the specified <c><![CDATA[outfile]]></c> rather than in stdout</item>
+ <tag><c><![CDATA[--src]]></c></tag>
+ <item>override the default, which is to analyze debug compiled BEAM
+ bytecode, and analyze starting from Erlang source code instead</item>
+ <tag><c><![CDATA[--raw]]></c></tag>
+ <item>When using Dialyzer from the command line, output the raw analysis
+ results (Erlang terms) instead of the formatted result.
+ The raw format is easier to post-process (for instance, to filter
+ warnings or to output HTML pages).</item>
+ <tag><c><![CDATA[-Dname]]></c>(or <c><![CDATA[-Dname=value]]></c>)</tag>
+ <item>when analyzing from source, pass the define to Dialyzer (**)</item>
+ <tag><c><![CDATA[-I include_dir]]></c></tag>
+ <item>when analyzing from source, pass the <c><![CDATA[include_dir]]></c> to Dialyzer (**)</item>
+ <tag><c><![CDATA[-pa dir]]></c></tag>
+ <item>Include <c><![CDATA[dir]]></c> in the path for Erlang. Useful when analyzing files
+ that have <c><![CDATA[-include_lib()]]></c> directives.</item>
+ <tag><c><![CDATA[--output_plt file]]></c></tag>
+ <item>Store the PLT at the specified location after building it.</item>
+ <tag><c><![CDATA[--plt plt]]></c></tag>
+ <item>Use the specified plt as the initial persistent lookup table.</item>
+ <tag><c><![CDATA[-Wwarn]]></c></tag>
+ <item>a family of option which selectively turn on/off warnings.
+ (for help on the names of warnings use <c><![CDATA[dialyzer -Whelp]]></c>)</item>
+ <tag><c><![CDATA[--shell]]></c></tag>
+ <item>do not disable the Erlang shell while running the GUI</item>
+ <tag><c><![CDATA[--version (or -v)]]></c></tag>
+ <item>prints the Dialyzer version and some more information and exits</item>
+ <tag><c><![CDATA[--help (or -h)]]></c></tag>
+ <item>prints this message and exits</item>
+ <tag><c><![CDATA[--quiet (or -q)]]></c></tag>
+ <item>makes Dialyzer a bit more quiet</item>
+ <tag><c><![CDATA[--verbose]]></c></tag>
+ <item>makes Dialyzer a bit more verbose</item>
+ <tag><c><![CDATA[--check_plt]]></c></tag>
+ <item>Only checks if the initial PLT is up to date and rebuilds it if this is not the case</item>
+ <tag><c><![CDATA[--no_check_plt (or -n)]]></c></tag>
+ <item>Skip the PLT integrity check when running Dialyzer.
+ Useful when working with installed PLTs that never change.</item>
+ <tag><c><![CDATA[--build_plt]]></c></tag>
+ <item>The analysis starts from an empty PLT and creates a new one from
+ the files specified with -c and -r. Only works for beam files.
+ Use --plt or --output_plt to override the default PLT location.</item>
+ <tag><c><![CDATA[--add_to_plt]]></c></tag>
+ <item> The PLT is extended to also include the files specified with
+ -c and -r. Use --plt to specify which PLT to start from, and --output_plt
+ to specify where to put the PLT. Note that the analysis might include
+ files from the PLT if they depend on the new files.
+ This option only works with beam files.</item>
+ <tag><c><![CDATA[--remove_from_plt]]></c></tag>
+ <item>The information from the files specified with -c and -r is removed
+ from the PLT. Note that this may cause a re-analysis of the remaining
+ dependent files.</item>
+ <tag><c><![CDATA[--get_warnings]]></c></tag>
+ <item>Makes Dialyzer emit warnings even when manipulating the PLT. Only
+ emits warnings for files that are actually analyzed. The default is to
+ not emit any warnings when manipulating the PLT. This option has no
+ effect when performing a normal analysis.</item>
+ </taglist>
+ <note>
+ <p>* denotes that multiple occurrences of these options are possible.</p>
+ <p>** options <c><![CDATA[-D]]></c> and <c><![CDATA[-I]]></c> work both from command-line and in the Dialyzer GUI;
+ the syntax of defines and includes is the same as that used by <c><![CDATA[erlc]]></c>.</p>
+ </note>
+ <p>Warning options:</p>
+ <taglist>
+ <tag><c><![CDATA[-Wno_return]]></c></tag>
+ <item>Suppress warnings for functions of no return.</item>
+ <tag><c><![CDATA[-Wno_unused]]></c></tag>
+ <item>Suppress warnings for unused functions.</item>
+ <tag><c><![CDATA[-Wno_improper_lists]]></c></tag>
+ <item>Suppress warnings for construction of improper lists.</item>
+ <tag><c><![CDATA[-Wno_fun_app]]></c></tag>
+ <item>Suppress warnings for fun applications that will fail.</item>
+ <tag><c><![CDATA[-Wno_match]]></c></tag>
+ <item>Suppress warnings for patterns that are unused or cannot
+ match.</item>
+ <tag><c><![CDATA[-Werror_handling]]></c>***</tag>
+ <item>Include warnings for functions that only return by means of an
+ exception.</item>
+ <tag><c><![CDATA[-Wunmatched_returns]]></c>***</tag>
+ <item>Include warnings for function calls which ignore a structured return
+ value or do not match against one of many possible return value(s).</item>
+ <tag><c><![CDATA[-Wunderspecs]]></c>***</tag>
+ <item>Warn about underspecified functions
+ (the -spec is strictly more allowing than the success typing)</item>
+ <tag><c><![CDATA[-Woverspecs]]></c>***</tag>
+ <item>Warn about overspecified functions
+ (the -spec is strictly less allowing than the success typing)</item>
+ <tag><c><![CDATA[-Wspecdiffs]]></c>***</tag>
+ <item>Warn when the -spec is different than the success typing</item>
+ </taglist>
+ <note>
+ <p>*** These are options that turn on warnings rather than
+ turning them off.</p>
+ </note>
+ </section>
+
+ <section>
+ <title>Using the Dialyzer from Erlang</title>
+ <p>You can also use Dialyzer directly from Erlang. Both the GUI and the
+ command line versions are available. The options are similar to the ones
+ given from the command line, so please refer to the sections above for
+ a description of these.</p>
+ </section>
+ <funcs>
+ <func>
+ <name>gui() -> ok | {error, Msg}</name>
+ <name>gui(OptList) -> ok | {error, Msg}</name>
+ <fsummary>Dialyzer GUI version</fsummary>
+ <type>
+ <v>OptList -- see below</v>
+ </type>
+ <desc>
+ <p>Dialyzer GUI version.</p>
+ <code type="none"><![CDATA[
+OptList : [Option]
+Option : {files, [Filename : string()]}
+ | {files_rec, [DirName : string()]}
+ | {defines, [{Macro: atom(), Value : term()}]}
+ | {from, src_code | byte_code} %% Defaults to byte_code
+ | {init_plt, FileName : string()} %% If changed from default
+ | {include_dirs, [DirName : string()]}
+ | {output_file, FileName : string()}
+ | {output_plt, FileName :: string()}
+ | {analysis_type, 'success_typings' | 'plt_add' | 'plt_build' | 'plt_check' | 'plt_remove'}
+ | {warnings, [WarnOpts]}
+ | {get_warnings, bool()}
+
+WarnOpts : no_return
+ | no_unused
+ | no_improper_lists
+ | no_fun_app
+ | no_match
+ | no_fail_call
+ | error_handling
+ | unmatched_returns
+ | overspecs
+ | underspecs
+ | specdiffs
+ ]]></code>
+ </desc>
+ </func>
+ <func>
+ <name>run(OptList) -> Warnings</name>
+ <fsummary>Dialyzer command line version</fsummary>
+ <type>
+ <v>OptList -- see gui/0,1</v>
+ <v>Warnings -- see below </v>
+ </type>
+ <desc>
+ <p>Dialyzer command line version.</p>
+ <code type="none"><![CDATA[
+Warnings :: [{Tag, Id, Msg}]
+Tag : 'warn_return_no_exit' | 'warn_return_only_exit'
+ | 'warn_not_called' | 'warn_non_proper_list'
+ | 'warn_fun_app' | 'warn_matching'
+ | 'warn_failing_call' | 'warn_contract_types'
+ | 'warn_contract_syntax' | 'warn_contract_not_equal'
+ | 'warn_contract_subtype' | 'warn_contract_supertype'
+Id = {File :: string(), Line :: integer()}
+Msg = msg() -- Undefined
+]]></code>
+ </desc>
+ </func>
+ <func>
+ <name>format_warning(Msg) -> string()</name>
+ <fsummary>Get the string version of a warning message.</fsummary>
+ <type>
+ <v>Msg = {Tag, Id, msg()} -- See run/1</v>
+ </type>
+ <desc>
+ <p>Get a string from warnings as returned by dialyzer:run/1.</p>
+ </desc>
+ </func>
+ <func>
+ <name>plt_info(string()) -> {'ok', [{atom(), any()}]} | {'error', atom()}</name>
+ <fsummary>Returns information about the specified plt.</fsummary>
+ <desc>
+ <p>Returns information about the specified plt.</p>
+ </desc>
+ </func>
+ </funcs>
+</erlref>
diff --git a/lib/dialyzer/doc/src/dialyzer_chapter.xml b/lib/dialyzer/doc/src/dialyzer_chapter.xml
new file mode 100644
index 0000000000..d15069991e
--- /dev/null
+++ b/lib/dialyzer/doc/src/dialyzer_chapter.xml
@@ -0,0 +1,217 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Dialyzer</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>dialyzer_chapter.xml</file>
+ </header>
+
+ <section>
+ <title>Introduction</title>
+ <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies
+ such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules
+ or entire (sets of) applications.</p>
+ </section>
+
+ <section>
+ <title>Using the Dialyzer from the GUI</title>
+
+ <section>
+ <title>Choosing the applications or modules</title>
+ <p>In the "File" window you will find a listing of the current directory.
+ Click your way to the directories/modules you want to add or type the
+ correct path in the entry.</p>
+ <p>Mark the directories/modules you want to analyze for discrepancies and
+ click "Add". You can either add the <c><![CDATA[.beam]]></c> and <c><![CDATA[.erl]]></c>-files directly, or
+ you can add directories that contain these kinds of files. Note that
+ you are only allowed to add the type of files that can be analyzed in
+ the current mode of operation (see below), and that you cannot mix
+ <c><![CDATA[.beam]]></c> and <c><![CDATA[.erl]]></c>-files.</p>
+ </section>
+
+ <section>
+ <title>The analysis modes</title>
+ <p>Dialyzer has two modes of analysis, "Byte Code" or "Source Code".
+ These are controlled by the buttons in the top-middle part of the
+ main window, under "Analysis Options".</p>
+ </section>
+
+ <section>
+ <title>Controlling the discrepancies reported by the Dialyzer</title>
+ <p>Under the "Warnings" pull-down menu, there are buttons that control
+ which discrepancies are reported to the user in the "Warnings" window.
+ By clicking on these buttons, one can enable/disable a whole class of
+ warnings. Information about the classes of warnings can be found on
+ the "Warnings" item under the "Help" menu (at the rightmost top corner).</p>
+ <p>If modules are compiled with inlining, spurious warnings may be emitted.
+ In the "Options" menu you can choose to ignore inline-compiled modules
+ when analyzing byte code. When starting from source code this is not a
+ problem since the inlining is explicitly turned off by Dialyzer. The
+ option causes Dialyzer to suppress all warnings from inline-compiled
+ modules, since there is currently no way for Dialyzer to find what
+ parts of the code have been produced by inlining. </p>
+ </section>
+
+ <section>
+ <title>Running the analysis</title>
+ <p>Once you have chosen the modules or directories you want to analyze,
+ click the "Run" button to start the analysis. If for some reason you
+ want to stop the analysis while it is running, push the "Stop" button.</p>
+ <p>The information from the analysis will be displayed in the Log and the
+ Warnings windows.</p>
+ </section>
+
+ <section>
+ <title>Include directories and macro definitions</title>
+ <p>When analyzing from source you might have to supply Dialyzer with a
+ list of include directories and macro definitions (as you can do with
+ the <c><![CDATA[erlc]]></c> flags <c><![CDATA[-I]]></c> and <c><![CDATA[-D]]></c>). This can be done either by starting Dialyzer
+ with these flags from the command line as in:</p>
+ <code type="none">
+
+ dialyzer -I my_includes -DDEBUG -Dvsn=42 -I one_more_dir
+ </code>
+ <p>or by adding these explicitly using the "Manage Macro Definitions" or
+ "Manage Include Directories" sub-menus in the "Options" menu.</p>
+ </section>
+
+ <section>
+ <title>Saving the information on the Log and Warnings windows</title>
+ <p>In the "File" menu there are options to save the contents of the Log
+ and the Warnings window. Just choose the options and enter the file to
+ save the contents in.</p>
+ <p>There are also buttons to clear the contents of each window.</p>
+ </section>
+
+ <section>
+ <title>Inspecting the inferred types of the analyzed functions</title>
+ <p>Dialyzer stores the information of the analyzed functions in a
+ Persistent Lookup Table (PLT). After an analysis you can inspect this
+ information. In the PLT menu you can choose to either search the PLT
+ or inspect the contents of the whole PLT. The information is presented
+ in edoc format.</p>
+ </section>
+ </section>
+
+ <section>
+ <title>Using the Dialyzer from the command line</title>
+ <p>See <seealso marker="dialyzer">dialyzer(3)</seealso>.</p>
+ </section>
+
+ <section>
+ <title>Using the Dialyzer from Erlang</title>
+ <p>See <seealso marker="dialyzer">dialyzer(3)</seealso>.</p>
+ </section>
+
+ <section>
+ <title>More on the Persistent Lookup Table (PLT)</title>
+
+ <p> The persistent lookup table, or PLT, is used to store the
+ result of an analysis. The PLT can then be used as a starting
+ point for later analyses. It is recommended to build a PLT with
+ the otp applications that you are using, but also to include your
+ own applications that you are using frequently.</p>
+
+ <p>The PLT is built using the --build_plt option to dialyzer. The
+ following command builds the recommended minimal PLT for OTP.</p>
+
+ <code type="none">
+
+ dialyzer --build_plt -r $ERL_TOP/lib/stdlib/ebin $ERL_TOP/lib/kernel/ebin $ERL_TOP/lib/mnesia/ebin
+ </code>
+
+ <p>Dialyzer will look if there is an environment variable called
+ $DIALYZER_PLT and place the PLT at this location. If no such
+ variable is set, Dialyzer will place the PLT at
+ $HOME/.dialyzer_plt. The placement can also be specified using the
+ --plt, or --output_plt options.</p>
+
+ <p>You can also add information to an existing plt using the
+ --add_to_plt option. Suppose you want to also include the compiler
+ in the PLT and place it in a new PLT, then give the command</p>
+
+ <code type="none">
+
+ dialyzer --add_to_plt -r $ERL_TOP/lib/compiler/ebin --output_plt my.plt
+ </code>
+
+ <p>Then you would like to add your favorite application my_app to
+ the new plt.</p>
+
+ <code type="none">
+
+ dialyzer --add_to_plt --plt my.plt -r my_app/ebin
+ </code>
+
+ <p>But you realize that it is unnecessary to have compiler in this one.</p>
+
+ <code type="none">
+
+ dialyzer --remove_from_plt --plt my.plt -r $ERL_TOP/lib/compiler/ebin
+ </code>
+
+ <p> Later, when you have fixed a bug in your application my_app,
+ you want to update the plt so that it will be fresh the next time
+ you run Dialyzer, run the command</p>
+
+ <code type="none">
+
+ dialyzer --check_plt --plt my.plt
+ </code>
+
+ <p> Dialyzer will then reanalyze the files that have been changed,
+ and the files that depend on these files. Note that this
+ consistency check will be performed automatically the next time
+ you run Dialyzer with this plt. The --check_plt option is merely
+ for doing so without doing any other analysis.</p>
+
+ <p> To get some information about a plt use the option</p>
+ <code type="none">
+
+ dialyzer --plt_info
+ </code>
+
+ <p>You can also specify which plt with the --plt option, and get the
+ output printed to a file with --output_file</p>
+
+ <p>Note that when manipulating the plt, no warnings are
+ emitted. To turn on warnings during (re)analysis of the plt, use
+ the option --get_warnings.</p>
+
+ </section>
+
+ <section>
+ <title>Feedback and bug reports</title>
+ <p>At this point, we very much welcome user feedback (even wish-lists!).
+ If you notice something weird, especially if the Dialyzer reports any
+ discrepancy that is a false positive, please send an error report
+ describing the symptoms and how to reproduce them to:</p>
+ <code type="none"><![CDATA[
+ ]]></code>
+ </section>
+</chapter>
+
diff --git a/lib/dialyzer/doc/src/fascicules.xml b/lib/dialyzer/doc/src/fascicules.xml
new file mode 100755
index 0000000000..0678195e07
--- /dev/null
+++ b/lib/dialyzer/doc/src/fascicules.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE fascicules SYSTEM "fascicules.dtd">
+
+<fascicules>
+ <fascicule file="part" href="part_frame.html" entry="no">
+ User's Guide
+ </fascicule>
+ <fascicule file="ref_man" href="ref_man_frame.html" entry="yes">
+ Reference Manual
+ </fascicule>
+ <fascicule file="part_notes" href="part_notes_frame.html" entry="no">
+ Release Notes
+ </fascicule>
+ <fascicule file="" href="../../../../doc/print.html" entry="no">
+ Off-Print
+ </fascicule>
+</fascicules>
+
diff --git a/lib/dialyzer/doc/src/make.dep b/lib/dialyzer/doc/src/make.dep
new file mode 100755
index 0000000000..f8177cd419
--- /dev/null
+++ b/lib/dialyzer/doc/src/make.dep
@@ -0,0 +1,20 @@
+# ----------------------------------------------------
+# >>>> Do not edit this file <<<<
+# This file was automaticly generated by
+# /home/otp/bin/docdepend
+# ----------------------------------------------------
+
+
+# ----------------------------------------------------
+# TeX files that the DVI file depend on
+# ----------------------------------------------------
+
+book.dvi: book.tex dialyzer.tex dialyzer_chapter.tex \
+ part.tex ref_man.tex
+
+# ----------------------------------------------------
+# Source inlined when transforming from source to LaTeX
+# ----------------------------------------------------
+
+book.tex: ref_man.xml
+
diff --git a/lib/dialyzer/doc/src/note.gif b/lib/dialyzer/doc/src/note.gif
new file mode 100644
index 0000000000..6fffe30419
--- /dev/null
+++ b/lib/dialyzer/doc/src/note.gif
Binary files differ
diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml
new file mode 100755
index 0000000000..8e8aab60b8
--- /dev/null
+++ b/lib/dialyzer/doc/src/notes.xml
@@ -0,0 +1,746 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Dialyzer Release Notes</title>
+ <prepared>otp_appnotes</prepared>
+ <docno>nil</docno>
+ <date>nil</date>
+ <rev>nil</rev>
+ <file>notes.xml</file>
+ </header>
+ <p>This document describes the changes made to the Dialyzer
+ application.</p>
+
+<section><title>Dialyzer 2.1.0</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ The documentation is now built with open source tools
+ (xsltproc and fop) that exists on most platforms. One
+ visible change is that the frames are removed.</p>
+ <p>
+ Own Id: OTP-8201</p>
+ </item>
+ <item>
+ <p>
+ Dialyzer can statically detect some kinds of data races
+ in Erlang programs. Use the new option -Wrace_conditions
+ to enable the race analysis. The technique is described
+ in a paper which is available at:
+ http://www.it.uu.se/research/group/hipe/dialyzer/publications/races.pdf</p>
+ <p>
+ Added support for packages (thanks to Maria Christakis).</p>
+ <p>
+ There has been a major change in the default mode of
+ Dialyzer. Previously, the default mode was the GUI, while
+ now it is the command line. As a result of this change,
+ it is now possible to analyze a set of files/dirs with
+ the command: </p><list> <item>dialyzer file1 ...
+ fileN</item> </list><p> In other words, the -c
+ (--command-line) option is no longer necessary, though it
+ will be retained for some time for backwards
+ compatibility. To start dialyzer's GUI use either of the
+ following commands: </p><list> <item>dialyzer --gui %%
+ for the old gs-based GUI</item> <item>dialyzer --wx %%
+ for the new wx-based GUI (where available)</item>
+ </list><p></p>
+ <p>
+ There is a new option --apps which allows the user to
+ easily refer to Erlang/OTP applications and include them
+ in the analysis or in the building of the PLT. For
+ example, we recommend building the PLT with: </p><list>
+ <item>dialyzer --build_plt --apps erts kernel stdlib
+ mnesia ...</item> </list><p></p>
+ <p>
+ The new option can also take absolute file names as well
+ as applications. Note that the application versions that
+ will be included in the PLT are those that correspond to
+ the Erlang/OTP system which is used.</p>
+ <p>
+ Dialyzer has a new wxWidgets based GUI (thanks to Elli
+ Frangaki) for platforms where the wx application is
+ available.</p>
+ <p>
+ Own Id: OTP-8300</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 2.0.0</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>There is a major addition to the capabilities of
+ dialyzer, worthy of bumping the version number. Starting
+ with this version, dialyzer not only accepts but also
+ properly processes remote types (i.e., types of the form
+ ModuleName:TypeName()). Previous dialyzer versions only
+ accepted this notation in -type and -spec declarations,
+ but effectively ignored its information by mapping remote
+ types to the type any(). In contrast, starting with this
+ version, remote types are used in the analysis and are
+ also stored in the PLTs. (This addition changes the
+ format of PLTs and requires rebuilding any PLTs created
+ by an older dialyzer version.) Note that dialyzer will
+ complain and abort the analysis of a set of modules if it
+ needs to process a remote type without a definition
+ (either because the module does not include a definition
+ of the type or the module is not included in the
+ analysis). We may relax this restriction in a future
+ version.</p>
+ <p>Fixed minor issue with dialyzer:run/1 crashing (upon
+ its return) when used for adding type information to an
+ existing PLT.</p>
+ <p>Fixed minor but quite annoying issues in dialyzer's
+ GUI.</p>
+ <p>
+ Own Id: OTP-8187</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.9.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>Fixed problem with type inference going into an
+ infinite loop when analyzing a strongly connected
+ component of functions that do not return but also
+ contain an erroneous call which makes them fail and be
+ assigned the type none() instead of the type unit().</p>
+ <p>More accurate type information for some BIFs and
+ library files.</p>
+ <p>Introduced boolean() as the `official' name for the
+ type was so far known as bool(). The latter is still
+ accepted as well as boolean().</p>
+ <p>
+ Own Id: OTP-8037</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.9.1</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>Has better handling of opaque types.</p>
+ <p>The handling of UTF segments of bitstreams has been
+ significantly strengthened and revised. In all
+ probability, now it is correct.</p>
+ <p>
+ Own Id: OTP-7958</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+<section><title>Dialyzer 1.9.0</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>The analysis accepts opaque type declarations and
+ detects violations of opaqueness of terms of such types.
+ Starting with R13, many Erlang/OTP standard libraries
+ (array, dict, digraph, ets, gb_sets, gb_trees, queue, and
+ sets) contain opaque type declarations of their main data
+ types. Dialyzer will spit out warnings in code that
+ explicitly depends on the structure of these terms.</p>
+ <p>Added support for handling UTF segments in bitstreams
+ and for detecting obvious type errors in these segments.
+ Warning: This code is not terribly tested though since
+ there are very few Erlang programs which use
+ Unicode-based binaries - not surprising since this is a
+ new language feature of R13.</p>
+ <p>Strengthened the discrepancy identification when
+ testing for equality and matching between terms of
+ different types. This detects more bugs in code.</p>
+ <p>Added warning for M:F(...) calls where M is not a
+ module term and F is not an atom. Previously, such calls
+ where found to fail but the reason for the failure was
+ not reported.</p>
+ <p>Added a convenient shorthand for the --no_check_plt
+ option (-n).</p>
+ <p>Added the --dump_callgraph option for dumping the
+ callgraph of all files that are analyzed into a specified
+ file. The callgraph either be dumped in raw format, in
+ .dot format, or converted to a .ps (postscript) file.
+ Note that in large callgraphs the generated postscript
+ file might not be interpretable by Ghostview. (Thanks to
+ Ilya Khlopotov for the initial version of this
+ functionality.)</p>
+ <p>
+ Own Id: OTP-7864</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.8.3</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>Added the <c>--no_check_plt</c> option that makes the
+ startup time faster when working with stable PLTs that do
+ not change.</p>
+ <p>Changed the phrasing of some warnings so that they do
+ not cause confusion to some users and correspond better
+ to reality.</p>
+ <p>
+ Own Id: OTP-7632</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.8.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Minor updates.</p>
+ <p>
+ Own Id: OTP-7522</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.8.1</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ There is new <c>--raw</c> option for Dialyzer to output
+ the result of the analysis in Erlang term, to facilitate
+ later filtering and/or formatting.</p>
+ <p>
+ Own Id: OTP-7386</p>
+ </item>
+ <item>
+ <p>The return type of the Erlang interface dialyzer:run/1
+ has changed to only return a list of warnings. If
+ something goes wrong dialyzer dies with an exception.</p>
+ <p>The handling of the PLT is now more flexible. There is
+ no longer any default PLT included with OTP. Please
+ consult the manual for the changes.</p>
+ <p>
+ Own Id: OTP-7389</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.8.0</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>Dialyzer's analysis is from now on exclusively based
+ on success typings. In particular, support for options
+ <c>--old_style</c> and <c>--dataflow</c> has been
+ discontinued.</p>
+ <p>Better and more aggressive handling of type
+ information in records.</p>
+ <p>Dialyzer has a new warning option
+ <c>-Wunmatched_returns</c> which warns for function calls
+ that ignore the return value. This catches many common
+ programming errors (e.g. calling <c>file:close/1</c> and
+ not checking for the absence of errors), interface
+ discrepancies (e.g. a function returning multiple values
+ when in reality the function is void and only called for
+ its side-effects), calling the wrong function (e.g.
+ <c>io_lib:format/1</c> instead of <c>io:format/1</c>),
+ and even possible performance defects (e.g. unnecessarily
+ constructing a list using a list comprehension instead of
+ using <c>lists:foreach/2</c>). Whenever a function
+ returns a single atomic value (e.g. 'ok' or pid()), the
+ warning is suppressed. This allows for "void" functions
+ (returning a single atom like 'ok') or for calls to
+ certain builtins like <c>spawn</c>. Because not all calls
+ which ignore the return value are discrepancies, the
+ option is off by default and has to be explicitly
+ requested by the user. But we recommend it
+ nevertheless.</p>
+ <p>Some warning options (<c>-Wno_comp</c>,
+ <c>-Wno_guards</c>, <c>-Wno_unsafe_beam</c>, etc.) which
+ could be used when analyzing bytecode produced by an old
+ BEAM compiler have been removed.</p>
+ <p>
+ Own Id: OTP-7241</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.7.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ The warnings returned by the Erlang interface now
+ contains a tag describing the type of warning.</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-7134</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 1.7.1</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>Use of success typings is now default, is robust and
+ detects significantly more errors than in previous
+ versions.</p>
+ <p>Dialyzer now accepts and takes into account type
+ information in record declarations and in contracts --
+ see related paper in Erlang'07 workshop. Various OTP
+ applications (e.g. stdlib and kernel) are partially
+ annotated with appropriate contracts specifying types
+ information for their functions.</p>
+ <p>The type previously known as unit() has been renamed
+ to no_return(). Its use in a contract as the return type
+ of a function now silences off the "function has no local
+ return" dialyzer warning without use of the corresponding
+ option.</p>
+ <p>
+ Own Id: OTP-6997</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+
+ <section>
+ <title>Dialyzer 1.7.0</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Minor Makefile changes.</p>
+ <p>Own Id: OTP-6689</p>
+ </item>
+ <item>
+ <p>Dialyzer can now use success typings to find
+ discrepancies. As a consequence significantly more
+ discrepancies are detected. The downside is that the
+ analysis takes about 2 to 2.5 times longer than in
+ previous versions, and may also result in some warnings
+ that might be harder to interpret even though they are
+ correct. This has been a major change, worth of
+ increasing the version number.</p>
+ <p>New command-line options:</p>
+ <p>--succ_typings Use the success typings analysis.</p>
+ <p>--dataflow (default) Use the previous analysis.</p>
+ <p>The new type unit() has been introduced to handle the
+ return type of non-terminating functions such as
+ servers.</p>
+ <p>Dialyzer's code server uses a compressed
+ representation and the analysis requires significantly
+ less memory than in previous versions.</p>
+ <p>Own Id: OTP-6736</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.6.0</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Dialyzer (in the modes where either source or BEAM
+ code which contains debug_info is analyzed) spits out
+ line information for all discrepancies. This has been a
+ major change, worth of increasing the version number.</p>
+ <p>We warn users that *considerably* more discrepancies
+ are identified by this version of Dialyzer compared with
+ previous ones and applications. If, for some reason,
+ warnings generated by previous versions of Dialyzer are
+ preferable, the command line option --old_style can be
+ employed.</p>
+ <p>Own Id: OTP-6546</p>
+ </item>
+ <item>
+ <p>Dialyzer handles term comparison operators more
+ precisely and is able to identify more discrepancies in
+ 'if' or 'case' statements with comparisons.</p>
+ <p></p>
+ <p>Dialyzer has more precise type information for many
+ standard OTP functions.</p>
+ <p>Own Id: OTP-6547</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.5.1</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Updated the chapter "More on the Persistent Lookup Table
+ (PLT)" in Dialyzer User's Guide and added information on
+ how to use Dialyzer from Erlang to <c><![CDATA[dialyzer(3)]]></c>.
+ Also, the Dialyzer text files used by its GUI are now
+ included in the Erlang/OTP release.</p>
+ <p>Own Id: OTP-6361</p>
+ </item>
+ <item>
+ <p>New options <c><![CDATA[--check_init_plt]]></c> and
+ <c><![CDATA[--verbose]]></c>.</p>
+ <p>Improvements in the analysis (meaning that this
+ version can find more discrepancies than the previous
+ version).</p>
+ <p>Own Id: OTP-6421</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.5.0</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Dialyzer's building of PLT is based on a different
+ type inference algorithm. More specifically, Dialyzer
+ uses inference of refined success typings to infer
+ function prototypes. As a result, Dialyzer bases its
+ analysis on a significantly more powerful basis and thus
+ is able to detect more discrepancies. The downside is
+ that building the PLT is a considerably slower process.
+ We will work on improving that.</p>
+ <p>Dialyzer takes into account the BEAM compiler
+ directive <c><![CDATA[-compile({nowarn_unused_function, {F,A}}).]]></c> and then suppresses the warning that
+ function F/A will never be called.</p>
+ <p>Dialyzer's default initial PLT now also includes
+ "mnesia".</p>
+ <p>Own Id: OTP-6304</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.4.2</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Improvements in PLT management.</p>
+ <p>Own Id: OTP-6128</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.4.1</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Some minor changes.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Some minor changes.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.4.0</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Changes for Dialyzer to work with Erlang/OTP R10B-10.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Dialyzer's analysis is significantly faster as it uses a global function
+ call-graph for obtaining type signatures for all analyzed functions.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.3.1</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Small changes for Dialyzer to work with Erlang/OTP R10B-5.</p>
+ </item>
+ <item>
+ <p>Fixed a small buglet in the analysis; this affected one of HiPE's files.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Modified setup script for execution under Cygwin (patch by Vlad Dumitrescu).</p>
+ </item>
+ <item>
+ <p>Added command line option --no_warn_on_inline.</p>
+ </item>
+ <item>
+ <p>Dialyzer now explicitly warns when modules with the same name
+ but from different dirs are being analyzed (contribution by Ulf Wiger).</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.3.0</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Fixed a number of false positives that Dialyzer 1.2.0 was spitting out.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Requires the presence of an Erlang/OTP R10B-4 system.</p>
+ </item>
+ <item>
+ <p>Dialyzer is significantly (approx 40%) faster since it now uses 'ets'
+ rather than 'dets' for its PLT.</p>
+ </item>
+ <item>
+ <p>Slightly improved the precision of the analysis.</p>
+ </item>
+ <item>
+ <p>In the GUI version, Dialyzer now reports the list of modules that should
+ be included in the modules to analyze in order to possibly improve
+ the accuracy of the reported results.</p>
+ </item>
+ <item>
+ <p>Some more information is displayed when calling a function or closure
+ with arguments of the wrong type.</p>
+ </item>
+ <item>
+ <p>The record guard now allows discrepancies involving tuples that are known
+ to be records to be displayed as #rec{} rather than {'rec',_,...,_}</p>
+ </item>
+ <item>
+ <p>Added -q option which makes the command-line version of Dialyzer
+ a bit more silent.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.2.0</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Dialyzer works with the open source and commercial versions of Erlang/OTP
+ R10B-2 on all platforms (i.e., HiPE support is not a prerequisite anymore).</p>
+ </item>
+ <item>
+ <p>Whenever a .beam file contains abstract code (i.e., has been compiled
+ with the +debug_info option), the analysis starts from this code rather
+ than from BEAM bytecode -- this makes the results identical to those
+ obtained by analysis starting from source code.
+ (This is a contribution from Bjorn Gustavsson - Thanks!)</p>
+ </item>
+ <item>
+ <p>Added -pa command line option for easier handling of -include_lib()
+ directives in source code.</p>
+ </item>
+ <item>
+ <p>Includes all changes added in v1.1.1; see below. The "Add Recursively"
+ functionality is also available in the command-line mode (-r option).</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.1.1</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Fixed problems using the shell script on Solaris machines.</p>
+ </item>
+ <item>
+ <p>Fixed small inconsistencies in Dialyzer's documentation and help.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>The command-line mode of Dialyzer spits discrepancies in stdout
+ by default and returns a numeric value indicating its exit status.</p>
+ </item>
+ <item>
+ <p>Added "Add Recursively" button contributed by Martin Bjorklund (thanks!).</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.1.0</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Dialyzer works with the open source version of Erlang/OTP R10B-1 (on
+ platforms where HiPE support is available) and it does not require
+ installation of a customized Erlang/OTP system anymore.</p>
+ </item>
+ <item>
+ <p>Dialyzer comes with an extensive command-line interface, in addition
+ to an enhanced GUI.</p>
+ </item>
+ <item>
+ <p>Analysis can start from source code (through Core Erlang) as well as
+ from BEAM bytecode.</p>
+ </item>
+ <item>
+ <p>Dialyzer finds considerably more discrepancies in Erlang code than
+ previous versions.</p>
+ </item>
+ <item>
+ <p>Added ability to selectively turn on/off different warning categories.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.0.1</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Fixed major bug of v1.0.0 which caused the analysis to stop prematurely
+ (i.e., without properly reaching fixpoint) when analyzing more than one
+ module. This did not affect correctness of the results, but lost many
+ opportunities to detect discrepancies.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Performance improvements in the analysis.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>Dialyzer 1.0.0</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>Initial Dialyzer version: Analysis starts from BEAM bytecode only and
+ only a GUI version is available. Dialyzer reports discrepancies in the
+ use of the "sequential" part of Erlang. To be used, Dialyzer requires
+ the presence of a specific snapshot of Erlang/OTP which can be obtained
+ from Dialyzer's homepage.</p>
+ </item>
+ </list>
+ </section>
+ </section>
+</chapter>
+
diff --git a/lib/dialyzer/doc/src/part.xml b/lib/dialyzer/doc/src/part.xml
new file mode 100755
index 0000000000..4410840660
--- /dev/null
+++ b/lib/dialyzer/doc/src/part.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE part SYSTEM "part.dtd">
+
+<part xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Dialyzer User's Guide</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>part.xml</file>
+ </header>
+ <description>
+ <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules or entire (sets of) applications.</p>
+ </description>
+ <xi:include href="dialyzer_chapter.xml"/>
+</part>
+
diff --git a/lib/dialyzer/doc/src/part_notes.xml b/lib/dialyzer/doc/src/part_notes.xml
new file mode 100755
index 0000000000..cb048d55dd
--- /dev/null
+++ b/lib/dialyzer/doc/src/part_notes.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE part SYSTEM "part.dtd">
+
+<part xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Dialyzer Release Notes</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ <file>part_notes.xml</file>
+ </header>
+ <description>
+ <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules or entire (sets of) applications.</p>
+ </description>
+ <xi:include href="notes.xml"/>
+</part>
+
diff --git a/lib/dialyzer/doc/src/ref_man.xml b/lib/dialyzer/doc/src/ref_man.xml
new file mode 100755
index 0000000000..ca5410f6b8
--- /dev/null
+++ b/lib/dialyzer/doc/src/ref_man.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE application SYSTEM "application.dtd">
+
+<application xmlns:xi="http://www.w3.org/2001/XInclude">
+ <header>
+ <copyright>
+ <year>2006</year><year>2009</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Dialyzer Reference Manual</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
+ </header>
+ <description>
+ <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules or entire (sets of) applications.</p>
+ </description>
+ <xi:include href="dialyzer.xml"/>
+</application>
+
diff --git a/lib/dialyzer/doc/src/warning.gif b/lib/dialyzer/doc/src/warning.gif
new file mode 100644
index 0000000000..96af52360e
--- /dev/null
+++ b/lib/dialyzer/doc/src/warning.gif
Binary files differ
diff --git a/lib/dialyzer/doc/warnings.txt b/lib/dialyzer/doc/warnings.txt
new file mode 100644
index 0000000000..fd8cc0dc60
--- /dev/null
+++ b/lib/dialyzer/doc/warnings.txt
@@ -0,0 +1,114 @@
+##----------------------------------------------------------------------------
+## File: doc/warnings.txt
+## Author(s): Tobias Lindahl <[email protected]>
+## Kostis Sagonas <[email protected]>
+##
+## $Id$
+##----------------------------------------------------------------------------
+
+
+The discrepancies currently identified by Dialyzer can be classified
+in the following categories:
+
+TYPE ERRORS
+===========
+
+* Match failure
+ - Warnings:
+ "The clause matching on X will never match; argument is of type T"
+ "The clause matching on tuple with arity N will never match; "
+ " argument is of type T!"
+ - Description:
+ The function or case clause will never match since the calling
+ argument has a different type than the expected one.
+ Note that due to pattern-matching compilation the X above may be
+ an argument enclosed in some structured term (tuple or list).
+
+* Function call with wrong arguments
+ - Warning:
+ "Call to function X with signature S will fail since the arguments are of type T!"
+ - Description:
+ The arguments which the function is called with are
+ not what the function implicitly expects.
+
+* Closure of wrong type
+ - Warnings:
+ "Fun application using type T instead of a fun!"
+ "Trying to use fun with type T with arguments AT"
+ - Description:
+ The variable that is used in the fun application is either not
+ a closure (fun entry) or a closure with the wrong domain.
+
+* Improper list construction
+ - Warnings:
+ "Cons will produce a non-proper list since its 2nd arg is of type T!"
+ "Call to '++'/2 will produce a non-proper list since its 2nd arg is of type T!"
+ - Description:
+ This is a place where an improper list (i.e., a list whose last
+ element is not the empty list []) is constructed. Strictly, these
+ are not discrepancies in Erlang, but we strongly recommend that
+ you fix these; there is ABSOLUTELY NO reason to create improper lists.
+
+* Function of no return
+ - Warning:
+ "Function will never return a proper value!"
+ - Description:
+ This is a function that never returns. Strictly speaking, this
+ is not a function and the code is OK only if this is used as a
+ point where an exception is thrown when handling an error.
+
+REDUNDANT OR UNREACHABLE CODE
+=============================
+
+* Unreachable case clause
+ - Warning:
+ "Type guard X will always fail since variable is of type T!"
+ - Description:
+ The case clause is redundant since the input argument is of a
+ different type.
+
+* Unreachable function clause
+ - Warning:
+ "The guard X will always fail since the arguments are of type T!"
+ - Description:
+ The clause is made redundant due to one of its guards always failing.
+
+* Term comparison failure
+ - Warnings:
+ "=:= between T1 and T2 will always fail!"
+ "=/= between T1 and T2 will always fail!"
+ - Description:
+ The term comparison will always fail making the test always return
+ 'false' or, in a guard context, making the clause redundant.
+
+* Unused function
+ - Warning:
+ "Function will never be called!"
+ - Description:
+ The function is unused; no need to have it uncommented in the code.
+
+
+CODE RELICS
+===========
+
+* Tuple used as fun
+ - Warnings:
+ "Unsafe use of tuple as a fun in call to X"
+ "Tuple used as fun will fail in native compiled code"
+ - Description:
+ A 2-tuple is used as a function closure. The modern way of
+ calling higher-order code in Erlang is by using proper funs.
+ The code should be rewritten using a proper 'fun' rather than
+ a 2-tuple since among other things makes the code cleaner and
+ is safer for execution in native code.
+
+* Unsafe BEAM code
+ - Warning:
+ "Unsafe BEAM code! Please recompile with a newer BEAM compiler."
+ - Description:
+ The analysis has encountered BEAM bytecode which will fail in a
+ really bad way (even with a seg-fault) if used in an impoper way.
+ Such code was produced by the BEAM compiler of R9C-0 (and prior)
+ for some record expressions. The recommended action is to
+ generate a new .beam file using a newer version of the BEAM
+ compiler.
diff --git a/lib/dialyzer/ebin/.gitignore b/lib/dialyzer/ebin/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/ebin/.gitignore
diff --git a/lib/dialyzer/info b/lib/dialyzer/info
new file mode 100755
index 0000000000..9fba4b54ad
--- /dev/null
+++ b/lib/dialyzer/info
@@ -0,0 +1,2 @@
+group: tools
+short: The DIALYZER, a DIscrepany AnaLYZer for ERlang programs.
diff --git a/lib/dialyzer/plt/.gitignore b/lib/dialyzer/plt/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/plt/.gitignore
diff --git a/lib/dialyzer/src/Makefile b/lib/dialyzer/src/Makefile
new file mode 100644
index 0000000000..ffdc0c6dcd
--- /dev/null
+++ b/lib/dialyzer/src/Makefile
@@ -0,0 +1,159 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2006-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+#=============================================================================
+#
+# File: lib/dialyzer/src/Makefile
+# Authors: Kostis Sagonas and Tobias Lindahl
+#
+#=============================================================================
+
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+VSN=$(DIALYZER_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/dialyzer-$(VSN)
+
+# ----------------------------------------------------
+# Orientation information
+# ----------------------------------------------------
+DIALYZER_DIR = $(ERL_TOP)/lib/dialyzer
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULES = \
+ dialyzer \
+ dialyzer_analysis_callgraph \
+ dialyzer_callgraph \
+ dialyzer_cl \
+ dialyzer_cl_parse \
+ dialyzer_codeserver \
+ dialyzer_contracts \
+ dialyzer_dataflow \
+ dialyzer_dep \
+ dialyzer_explanation \
+ dialyzer_gui \
+ dialyzer_gui_wx \
+ dialyzer_options \
+ dialyzer_plt \
+ dialyzer_races \
+ dialyzer_succ_typings \
+ dialyzer_typesig \
+ dialyzer_utils
+
+HRL_FILES= dialyzer.hrl dialyzer_gui_wx.hrl
+ERL_FILES= $(MODULES:%=%.erl)
+INSTALL_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
+TARGET_FILES= $(INSTALL_FILES)
+
+APP_FILE= dialyzer.app
+APP_SRC= $(APP_FILE).src
+APP_TARGET= $(EBIN)/$(APP_FILE)
+
+APPUP_FILE= dialyzer.appup
+APPUP_SRC= $(APPUP_FILE).src
+APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+
+ifeq ($(NATIVE_LIBS_ENABLED),yes)
+ERL_COMPILE_FLAGS += +native
+endif
+ERL_COMPILE_FLAGS += +warn_exported_vars +warn_unused_import +warn_untyped_record +warn_missing_spec
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt: $(TARGET_FILES)
+
+docs:
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(EBIN)/dialyzer_cl_parse.$(EMULATOR): dialyzer_cl_parse.erl ../vsn.mk
+ erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) dialyzer_cl_parse.erl
+
+$(EBIN)/dialyzer_plt.$(EMULATOR): dialyzer_plt.erl ../vsn.mk
+ erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) dialyzer_plt.erl
+
+$(EBIN)/dialyzer_gui.$(EMULATOR): dialyzer_gui.erl ../vsn.mk
+ erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) dialyzer_gui.erl
+
+$(EBIN)/dialyzer_gui_wx.$(EMULATOR): dialyzer_gui_wx.erl ../vsn.mk
+ erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) dialyzer_gui_wx.erl
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+# ---------------------------------------------------------------------
+# dependencies -- I wish they were somehow automatically generated
+# ---------------------------------------------------------------------
+
+$(EBIN)/dialyzer.beam: dialyzer.hrl
+$(EBIN)/dialyzer_analysis_callgraph.beam: dialyzer.hrl
+$(EBIN)/dialyzer_callgraph.beam: dialyzer.hrl
+$(EBIN)/dialyzer_cl.beam: dialyzer.hrl ../../kernel/include/file.hrl
+$(EBIN)/dialyzer_cl_parse.beam: dialyzer.hrl
+$(EBIN)/dialyzer_codeserver.beam: dialyzer.hrl
+$(EBIN)/dialyzer_contracts.beam: dialyzer.hrl
+$(EBIN)/dialyzer_dataflow.beam: dialyzer.hrl
+$(EBIN)/dialyzer_dep.beam: dialyzer.hrl
+$(EBIN)/dialyzer_explanation.beam: dialyzer.hrl
+$(EBIN)/dialyzer_gui.beam: dialyzer.hrl
+$(EBIN)/dialyzer_gui_wx.beam: dialyzer.hrl dialyzer_gui_wx.hrl
+$(EBIN)/dialyzer_options.beam: dialyzer.hrl
+$(EBIN)/dialyzer_plt.beam: dialyzer.hrl
+$(EBIN)/dialyzer_races.beam: dialyzer.hrl
+$(EBIN)/dialyzer_succ_typings.beam: dialyzer.hrl
+$(EBIN)/dialyzer_typesig.beam: dialyzer.hrl
+$(EBIN)/dialyzer_utils.beam: dialyzer.hrl
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(ERL_FILES) $(HRL_FILES) $(EXTRA_FILES) \
+ $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(INSTALL_FILES) $(RELSYSDIR)/ebin
+
+release_docs_spec:
diff --git a/lib/dialyzer/src/dialyzer.app.src b/lib/dialyzer/src/dialyzer.app.src
new file mode 100644
index 0000000000..c1d109812c
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer.app.src
@@ -0,0 +1,42 @@
+%% This is an -*- erlang -*- file.
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+{application, dialyzer,
+ [{description, "DIscrepancy AnaLYZer of ERlang programs, version %VSN%"},
+ {vsn, "%VSN%"},
+ {modules, [dialyzer,
+ dialyzer_analysis_callgraph,
+ dialyzer_callgraph,
+ dialyzer_cl,
+ dialyzer_cl_parse,
+ dialyzer_codeserver,
+ dialyzer_contracts,
+ dialyzer_dataflow,
+ dialyzer_dep,
+ dialyzer_gui,
+ dialyzer_options,
+ dialyzer_plt,
+ dialyzer_races,
+ dialyzer_succ_typings,
+ dialyzer_typesig,
+ dialyzer_utils]},
+ {registered, []},
+ {applications, [compiler, gs, hipe, kernel, stdlib]},
+ {env, []}]}.
diff --git a/lib/dialyzer/src/dialyzer.appup.src b/lib/dialyzer/src/dialyzer.appup.src
new file mode 100644
index 0000000000..26d14ee8f4
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer.appup.src
@@ -0,0 +1,20 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+{"%VSN%",[],[]}.
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
new file mode 100644
index 0000000000..c1897ed892
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -0,0 +1,480 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer.erl
+%%% Authors : Tobias Lindahl <[email protected]>
+%%% Kostis Sagonas <[email protected]>
+%%% Description : This is the interface for the Dialyzer tool.
+%%%
+%%% Created : 27 Apr 2004 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+
+-module(dialyzer).
+
+%%--------------------------------------------------------------------
+%% NOTE: Only functions exported by this module are available to
+%% other applications.
+%%--------------------------------------------------------------------
+-export([plain_cl/0,
+ run/1,
+ gui/0,
+ gui/1,
+ plt_info/1,
+ format_warning/1]).
+
+-include("dialyzer.hrl").
+
+%%--------------------------------------------------------------------
+%% Interfaces:
+%% - plain_cl/0 : to be used ONLY by the dialyzer C program.
+%% - run/1: Erlang interface for a command line-like analysis
+%% - gui/0/1: Erlang interface for the gui.
+%% - format_warning/1: Get the string representation of a warning.
+%% - plt_info/1: Get information of the specified plt.
+%%--------------------------------------------------------------------
+
+-spec plain_cl() -> no_return().
+
+plain_cl() ->
+ case dialyzer_cl_parse:start() of
+ {check_init, Opts} ->
+ cl_halt(cl_check_init(Opts), Opts);
+ {plt_info, Opts} ->
+ cl_halt(cl_print_plt_info(Opts), Opts);
+ {{gui, Type}, Opts} ->
+ try check_gui_options(Opts)
+ catch throw:{dialyzer_error, Msg} -> cl_error(Msg)
+ end,
+ case Opts#options.check_plt of
+ true ->
+ case cl_check_init(Opts#options{get_warnings = false}) of
+ {ok, _} -> gui_halt(internal_gui(Type, Opts), Opts);
+ {error, _} = Error -> cl_halt(Error, Opts)
+ end;
+ false ->
+ gui_halt(internal_gui(Type, Opts), Opts)
+ end;
+ {cl, Opts} ->
+ case Opts#options.check_plt of
+ true ->
+ case cl_check_init(Opts#options{get_warnings = false}) of
+ {error, _} = Error -> cl_halt(Error, Opts);
+ {ok, _} -> cl_halt(cl(Opts), Opts)
+ end;
+ false ->
+ cl_halt(cl(Opts), Opts)
+ end;
+ {error, Msg} ->
+ cl_error(Msg)
+ end.
+
+cl_check_init(#options{analysis_type = AnalType} = Opts) ->
+ case AnalType of
+ plt_build -> {ok, ?RET_NOTHING_SUSPICIOUS};
+ plt_add -> {ok, ?RET_NOTHING_SUSPICIOUS};
+ plt_remove -> {ok, ?RET_NOTHING_SUSPICIOUS};
+ Other when Other =:= succ_typings; Other =:= plt_check ->
+ F = fun() ->
+ NewOpts = Opts#options{analysis_type = plt_check},
+ {Ret, _Warnings} = dialyzer_cl:start(NewOpts),
+ Ret
+ end,
+ doit(F)
+ end.
+
+cl_print_plt_info(Opts) ->
+ F = fun() ->
+ print_plt_info(Opts)
+ end,
+ doit(F).
+
+print_plt_info(#options{init_plt = PLT, output_file = OutputFile}) ->
+ String =
+ case dialyzer_plt:included_files(PLT) of
+ {ok, Files} ->
+ io_lib:format("The PLT ~s includes the following files:\n~p\n",
+ [PLT, Files]);
+ {error, read_error} ->
+ Msg = io_lib:format("Could not read the PLT file ~p\n", [PLT]),
+ throw({dialyzer_error, Msg});
+ {error, no_such_file} ->
+ Msg = io_lib:format("The PLT file ~p does not exist\n", [PLT]),
+ throw({dialyzer_error, Msg})
+ end,
+ case OutputFile =:= none of
+ true ->
+ io:format("~s", [String]),
+ ?RET_NOTHING_SUSPICIOUS;
+ false ->
+ case file:open(OutputFile, [write]) of
+ {ok, FileDesc} ->
+ io:format(FileDesc, "~s", [String]),
+ ok = file:close(FileDesc),
+ ?RET_NOTHING_SUSPICIOUS;
+ {error, Reason} ->
+ Msg1 = io_lib:format("Could not open output file ~p, Reason: ~p\n",
+ [OutputFile, Reason]),
+ throw({dialyzer_error, Msg1})
+ end
+ end.
+
+cl(Opts) ->
+ F = fun() ->
+ {Ret, _Warnings} = dialyzer_cl:start(Opts),
+ Ret
+ end,
+ doit(F).
+
+-spec run(dial_options()) -> [dial_warning()].
+
+run(Opts) ->
+ try dialyzer_options:build([{report_mode, quiet},
+ {erlang_mode, true}|Opts]) of
+ {error, Msg} ->
+ throw({dialyzer_error, Msg});
+ OptsRecord ->
+ case cl_check_init(OptsRecord) of
+ {ok, ?RET_NOTHING_SUSPICIOUS} ->
+ case dialyzer_cl:start(OptsRecord) of
+ {?RET_DISCREPANCIES, Warnings} -> Warnings;
+ {?RET_NOTHING_SUSPICIOUS, []} -> []
+ end;
+ {error, ErrorMsg1} ->
+ throw({dialyzer_error, ErrorMsg1})
+ end
+ catch
+ throw:{dialyzer_error, ErrorMsg} ->
+ erlang:error({dialyzer_error, lists:flatten(ErrorMsg)})
+ end.
+
+internal_gui(Type, OptsRecord) ->
+ F = fun() ->
+ case Type of
+ gs -> dialyzer_gui:start(OptsRecord);
+ wx -> dialyzer_gui_wx:start(OptsRecord)
+ end,
+ ?RET_NOTHING_SUSPICIOUS
+ end,
+ doit(F).
+
+-spec gui() -> 'ok'.
+
+gui() ->
+ gui([]).
+
+-spec gui(dial_options()) -> 'ok'.
+
+gui(Opts) ->
+ try dialyzer_options:build([{report_mode, quiet}|Opts]) of
+ {error, Msg} ->
+ throw({dialyzer_error, Msg});
+ OptsRecord ->
+ ok = check_gui_options(OptsRecord),
+ case cl_check_init(OptsRecord) of
+ {ok, ?RET_NOTHING_SUSPICIOUS} ->
+ F = fun() ->
+ dialyzer_gui:start(OptsRecord)
+ end,
+ case doit(F) of
+ {ok, _} -> ok;
+ {error, Msg} -> throw({dialyzer_error, Msg})
+ end;
+ {error, ErrorMsg1} ->
+ throw({dialyzer_error, ErrorMsg1})
+ end
+ catch
+ throw:{dialyzer_error, ErrorMsg} ->
+ erlang:error({dialyzer_error, lists:flatten(ErrorMsg)})
+ end.
+
+check_gui_options(#options{analysis_type = succ_typings}) ->
+ ok;
+check_gui_options(#options{analysis_type = Mode}) ->
+ Msg = io_lib:format("Analysis mode ~w is illegal in GUI mode", [Mode]),
+ throw({dialyzer_error, Msg}).
+
+-spec plt_info(file:filename()) ->
+ {'ok', [{'files', [file:filename()]}]} | {'error', atom()}.
+
+plt_info(Plt) ->
+ case dialyzer_plt:included_files(Plt) of
+ {ok, Files} -> {ok, [{files, Files}]};
+ Error -> Error
+ end.
+
+
+%%-----------
+%% Machinery
+%%-----------
+
+doit(F) ->
+ try
+ {ok, F()}
+ catch
+ throw:{dialyzer_error, Msg} ->
+ {error, lists:flatten(Msg)}
+ end.
+
+cl_error(Msg) ->
+ cl_halt({error, Msg}, #options{}).
+
+gui_halt(R, Opts) ->
+ cl_halt(R, Opts#options{report_mode = quiet}).
+
+-spec cl_halt({'ok',dial_ret()} | {'error',string()}, #options{}) -> no_return().
+
+cl_halt({ok, R = ?RET_NOTHING_SUSPICIOUS}, #options{report_mode = quiet}) ->
+ halt(R);
+cl_halt({ok, R = ?RET_DISCREPANCIES}, #options{report_mode = quiet}) ->
+ halt(R);
+cl_halt({ok, R = ?RET_NOTHING_SUSPICIOUS}, #options{}) ->
+ io:put_chars("done (passed successfully)\n"),
+ halt(R);
+cl_halt({ok, R = ?RET_DISCREPANCIES}, #options{output_file = Output}) ->
+ io:put_chars("done (warnings were emitted)\n"),
+ cl_check_log(Output),
+ halt(R);
+cl_halt({error, Msg1}, #options{output_file = Output}) ->
+ %% Msg2 = "dialyzer: Internal problems were encountered in the analysis",
+ io:format("\ndialyzer: ~s\n", [Msg1]),
+ cl_check_log(Output),
+ halt(?RET_INTERNAL_ERROR).
+
+-spec cl_check_log('none' | file:filename()) -> 'ok'.
+
+cl_check_log(none) ->
+ ok;
+cl_check_log(Output) ->
+ io:format(" Check output file `~s' for details\n", [Output]).
+
+-spec format_warning(dial_warning()) -> string().
+
+format_warning({_Tag, {File, Line}, Msg}) when is_list(File),
+ is_integer(Line) ->
+ BaseName = filename:basename(File),
+ String = lists:flatten(message_to_string(Msg)),
+ lists:flatten(io_lib:format("~s:~w: ~s", [BaseName, Line, String])).
+
+
+%%-----------------------------------------------------------------------------
+%% Message classification and pretty-printing below. Messages appear in
+%% categories and in more or less alphabetical ordering within each category.
+%%-----------------------------------------------------------------------------
+
+%%----- Warnings for general discrepancies ----------------
+message_to_string({apply, [Args, ArgNs, FailReason,
+ SigArgs, SigRet, Contract]}) ->
+ io_lib:format("Fun application with arguments ~s ", [Args]) ++
+ call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
+message_to_string({app_call, [M, F, Args, Culprit, ExpectedType, FoundType]}) ->
+ io_lib:format("The call ~s:~s~s requires that ~s is of type ~s not ~s\n",
+ [M, F, Args, Culprit, ExpectedType, FoundType]);
+message_to_string({bin_construction, [Culprit, Size, Seg, Type]}) ->
+ io_lib:format("Binary construction will fail since the ~s field ~s in"
+ " segment ~s has type ~s\n", [Culprit, Size, Seg, Type]);
+message_to_string({call, [M, F, Args, ArgNs, FailReason,
+ SigArgs, SigRet, Contract]}) ->
+ io_lib:format("The call ~w:~w~s ", [M, F, Args]) ++
+ call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
+message_to_string({call_to_missing, [M, F, A]}) ->
+ io_lib:format("Call to missing or unexported function ~w:~w/~w\n", [M, F, A]);
+message_to_string({exact_eq, [Type1, Op, Type2]}) ->
+ io_lib:format("The test ~s ~s ~s can never evaluate to 'true'\n",
+ [Type1, Op, Type2]);
+message_to_string({fun_app_args, [Args, Type]}) ->
+ io_lib:format("Fun application with arguments ~s will fail"
+ " since the function has type ~s\n", [Args, Type]);
+message_to_string({fun_app_no_fun, [Op, Type, Arity]}) ->
+ io_lib:format("Fun application will fail since ~s :: ~s"
+ " is not a function of arity ~w\n", [Op, Type, Arity]);
+message_to_string({guard_fail, []}) ->
+ "Clause guard cannot succeed.\n";
+message_to_string({guard_fail, [Arg1, Infix, Arg2]}) ->
+ io_lib:format("Guard test ~s ~s ~s can never succeed\n", [Arg1, Infix, Arg2]);
+message_to_string({guard_fail, [Guard, Args]}) ->
+ io_lib:format("Guard test ~w~s can never succeed\n", [Guard, Args]);
+message_to_string({guard_fail_pat, [Pat, Type]}) ->
+ io_lib:format("Clause guard cannot succeed. The ~s was matched"
+ " against the type ~s\n", [Pat, Type]);
+message_to_string({improper_list_constr, [TlType]}) ->
+ io_lib:format("Cons will produce an improper list"
+ " since its 2nd argument is ~s\n", [TlType]);
+message_to_string({no_return, [Type|Name]}) ->
+ NameString =
+ case Name of
+ [] -> "The created fun ";
+ [F, A] -> io_lib:format("Function ~w/~w ", [F, A])
+ end,
+ case Type of
+ no_match -> NameString ++ "has no clauses that will ever match\n";
+ only_explicit -> NameString ++ "only terminates with explicit exception\n";
+ only_normal -> NameString ++ "has no local return\n";
+ both -> NameString ++ "has no local return\n"
+ end;
+message_to_string({record_constr, [Types, Name]}) ->
+ io_lib:format("Record construction ~s violates the"
+ " declared type for #~w{}\n", [Types, Name]);
+message_to_string({record_constr, [Name, Field, Type]}) ->
+ io_lib:format("Record construction violates the declared type for #~w{}"
+ " since ~s cannot be of type ~s\n", [Name, Field, Type]);
+message_to_string({record_matching, [String, Name]}) ->
+ io_lib:format("The ~s violates the"
+ " declared type for #~w{}\n", [String, Name]);
+message_to_string({pattern_match, [Pat, Type]}) ->
+ io_lib:format("The ~s can never match the type ~s\n", [Pat, Type]);
+message_to_string({pattern_match_cov, [Pat, Type]}) ->
+ io_lib:format("The ~s can never match since previous"
+ " clauses completely covered the type ~s\n",
+ [Pat, Type]);
+message_to_string({unmatched_return, [Type]}) ->
+ io_lib:format("Expression produces a value of type ~s,"
+ " but this value is unmatched\n", [Type]);
+message_to_string({unused_fun, []}) ->
+ io_lib:format("Function will never be called\n", []);
+message_to_string({unused_fun, [F, A]}) ->
+ io_lib:format("Function ~w/~w will never be called\n", [F, A]);
+%%----- Warnings for specs and contracts -------------------
+message_to_string({contract_diff, [M, F, _A, Contract, Sig]}) ->
+ io_lib:format("Type specification ~w:~w~s"
+ " is not equal to the success typing: ~w:~w~s\n",
+ [M, F, Contract, M, F, Sig]);
+message_to_string({contract_subtype, [M, F, _A, Contract, Sig]}) ->
+ io_lib:format("Type specification ~w:~w~s"
+ " is a subtype of the success typing: ~w:~w~s\n",
+ [M, F, Contract, M, F, Sig]);
+message_to_string({contract_supertype, [M, F, _A, Contract, Sig]}) ->
+ io_lib:format("Type specification ~w:~w~s"
+ " is a supertype of the success typing: ~w:~w~s\n",
+ [M, F, Contract, M, F, Sig]);
+message_to_string({invalid_contract, [M, F, A, Sig]}) ->
+ io_lib:format("Invalid type specification for function ~w:~w/~w."
+ " The success typing is ~s\n", [M, F, A, Sig]);
+message_to_string({extra_range, [M, F, A, ExtraRanges, SigRange]}) ->
+ io_lib:format("The specification for ~w:~w/~w states that the function"
+ " might also return ~s but the inferred return is ~s\n",
+ [M, F, A, ExtraRanges, SigRange]);
+message_to_string({overlapping_contract, []}) ->
+ "Overloaded contract has overlapping domains;"
+ " such contracts are currently unsupported and are simply ignored\n";
+message_to_string({spec_missing_fun, [M, F, A]}) ->
+ io_lib:format("Contract for function that does not exist: ~w:~w/~w\n",
+ [M, F, A]);
+%%----- Warnings for opaque type violations -------------------
+message_to_string({call_with_opaque, [M, F, Args, ArgNs, ExpArgs]}) ->
+ io_lib:format("The call ~w:~w~s contains ~s when ~s\n",
+ [M, F, Args, form_positions(ArgNs), form_expected(ExpArgs)]);
+message_to_string({call_without_opaque, [M, F, Args, ExpectedTriples]}) ->
+ io_lib:format("The call ~w:~w~s does not have ~s\n",
+ [M, F, Args, form_expected_without_opaque(ExpectedTriples)]);
+message_to_string({opaque_eq, [Type, _Op, OpaqueType]}) ->
+ io_lib:format("Attempt to test for equality between a term of type ~s"
+ " and a term of opaque type ~s\n", [Type, OpaqueType]);
+message_to_string({opaque_guard, [Guard, Args]}) ->
+ io_lib:format("Guard test ~w~s breaks the opaqueness of its argument\n",
+ [Guard, Args]);
+message_to_string({opaque_match, [Pat, OpaqueType, OpaqueTerm]}) ->
+ Term = if OpaqueType =:= OpaqueTerm -> "the term";
+ true -> OpaqueTerm
+ end,
+ io_lib:format("The attempt to match a term of type ~s against the ~s"
+ " breaks the opaqueness of ~s\n", [OpaqueType, Pat, Term]);
+message_to_string({opaque_neq, [Type, _Op, OpaqueType]}) ->
+ io_lib:format("Attempt to test for inequality between a term of type ~s"
+ " and a term of opaque type ~s\n", [Type, OpaqueType]);
+message_to_string({opaque_type_test, [Fun, Opaque]}) ->
+ io_lib:format("The type test ~s(~s) breaks the opaqueness of the term ~s\n", [Fun, Opaque, Opaque]);
+%%----- Warnings for concurrency errors --------------------
+message_to_string({race_condition, [M, F, Args, Reason]}) ->
+ io_lib:format("The call ~w:~w~s ~s\n", [M, F, Args, Reason]).
+
+
+%%-----------------------------------------------------------------------------
+%% Auxiliary functions below
+%%-----------------------------------------------------------------------------
+
+call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
+ {IsOverloaded, Contract}) ->
+ PositionString = form_position_string(ArgNs),
+ case FailReason of
+ only_sig ->
+ case ArgNs =:= [] of
+ true ->
+ %% We do not know which argument(s) caused the failure
+ io_lib:format("will never return since the success typing arguments"
+ " are ~s\n", [SigArgs]);
+ false ->
+ io_lib:format("will never return since it differs in argument"
+ " ~s from the success typing arguments: ~s\n",
+ [PositionString, SigArgs])
+ end;
+ only_contract ->
+ case (ArgNs =:= []) orelse IsOverloaded of
+ true ->
+ %% We do not know which arguments caused the failure
+ io_lib:format("breaks the contract ~s\n", [Contract]);
+ false ->
+ io_lib:format("breaks the contract ~s in argument ~s\n",
+ [Contract, PositionString])
+ end;
+ both ->
+ io_lib:format("will never return since the success typing is ~s -> ~s"
+ " and the contract is ~s\n", [SigArgs, SigRet, Contract])
+ end.
+
+form_positions(ArgNs) ->
+ case ArgNs of
+ [_] -> "an opaque term in ";
+ [_,_|_] -> "opaque terms in "
+ end ++ form_position_string(ArgNs).
+
+%% We know which positions N are to blame;
+%% the list of triples will never be empty.
+form_expected_without_opaque([{N, T, TStr}]) ->
+ case erl_types:t_is_opaque(T) of
+ true ->
+ io_lib:format("an opaque term of type ~s in ", [TStr]);
+ false ->
+ io_lib:format("a term of type ~s (with opaque subterms) in ", [TStr])
+ end ++ form_position_string([N]);
+form_expected_without_opaque(ExpectedTriples) -> %% TODO: can do much better here
+ {ArgNs, _Ts, _TStrs} = lists:unzip3(ExpectedTriples),
+ "opaque terms in " ++ form_position_string(ArgNs).
+
+form_expected(ExpectedArgs) ->
+ case ExpectedArgs of
+ [T] ->
+ TS = erl_types:t_to_string(T),
+ case erl_types:t_is_opaque(T) of
+ true -> io_lib:format("an opaque term of type ~s is expected", [TS]);
+ false -> io_lib:format("a structured term of type ~s is expected", [TS])
+ end;
+ [_,_|_] -> "terms of different types are expected in these positions"
+ end.
+
+form_position_string(ArgNs) ->
+ case ArgNs of
+ [] -> "";
+ [N1] -> io_lib:format("position ~w", [N1]);
+ [_,_|_] ->
+ " and"++ArgString = lists:flatten([io_lib:format(" and ~w", [N])
+ || N <- ArgNs]),
+ "positions" ++ ArgString
+ end.
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
new file mode 100644
index 0000000000..f0f9bd25d7
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -0,0 +1,146 @@
+%%% This is an -*- Erlang -*- file.
+%%%
+%%% %CopyrightBegin%
+%%%
+%%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%%
+%%% The contents of this file are subject to the Erlang Public License,
+%%% Version 1.1, (the "License"); you may not use this file except in
+%%% compliance with the License. You should have received a copy of the
+%%% Erlang Public License along with this software. If not, it can be
+%%% retrieved online at http://www.erlang.org/.
+%%%
+%%% Software distributed under the License is distributed on an "AS IS"
+%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%% the License for the specific language governing rights and limitations
+%%% under the License.
+%%%
+%%% %CopyrightEnd%
+%%%
+%%%-------------------------------------------------------------------
+%%% File : dialyzer.hrl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Kostis Sagonas <[email protected]>
+%%% Description : Header file for Dialyzer.
+%%%
+%%% Created : 1 Oct 2004 by Kostis Sagonas <[email protected]>
+%%%-------------------------------------------------------------------
+
+-define(RET_NOTHING_SUSPICIOUS, 0).
+-define(RET_INTERNAL_ERROR, 1).
+-define(RET_DISCREPANCIES, 2).
+
+-type dial_ret() :: ?RET_NOTHING_SUSPICIOUS
+ | ?RET_INTERNAL_ERROR
+ | ?RET_DISCREPANCIES.
+
+%%--------------------------------------------------------------------
+%% Warning classification
+%%--------------------------------------------------------------------
+
+-define(WARN_RETURN_NO_RETURN, warn_return_no_exit).
+-define(WARN_RETURN_ONLY_EXIT, warn_return_only_exit).
+-define(WARN_NOT_CALLED, warn_not_called).
+-define(WARN_NON_PROPER_LIST, warn_non_proper_list).
+-define(WARN_FUN_APP, warn_fun_app).
+-define(WARN_MATCHING, warn_matching).
+-define(WARN_OPAQUE, warn_opaque).
+-define(WARN_FAILING_CALL, warn_failing_call).
+-define(WARN_BIN_CONSTRUCTION, warn_bin_construction).
+-define(WARN_CONTRACT_TYPES, warn_contract_types).
+-define(WARN_CONTRACT_SYNTAX, warn_contract_syntax).
+-define(WARN_CONTRACT_NOT_EQUAL, warn_contract_not_equal).
+-define(WARN_CONTRACT_SUBTYPE, warn_contract_subtype).
+-define(WARN_CONTRACT_SUPERTYPE, warn_contract_supertype).
+-define(WARN_CALLGRAPH, warn_callgraph).
+-define(WARN_UNMATCHED_RETURN, warn_umatched_return).
+-define(WARN_RACE_CONDITION, warn_race_condition).
+
+%%
+%% The following type has double role:
+%% 1. It is the set of warnings that will be collected.
+%% 2. It is also the set of tags for warnings that will be returned.
+%%
+-type dial_warn_tag() :: ?WARN_RETURN_NO_RETURN | ?WARN_RETURN_ONLY_EXIT
+ | ?WARN_NOT_CALLED | ?WARN_NON_PROPER_LIST
+ | ?WARN_MATCHING | ?WARN_OPAQUE | ?WARN_FUN_APP
+ | ?WARN_FAILING_CALL | ?WARN_BIN_CONSTRUCTION
+ | ?WARN_CONTRACT_TYPES | ?WARN_CONTRACT_SYNTAX
+ | ?WARN_CONTRACT_NOT_EQUAL | ?WARN_CONTRACT_SUBTYPE
+ | ?WARN_CONTRACT_SUPERTYPE | ?WARN_CALLGRAPH
+ | ?WARN_UNMATCHED_RETURN | ?WARN_RACE_CONDITION.
+
+%%
+%% This is the representation of each warning as they will be returned
+%% to dialyzer's callers
+%%
+-type file_line() :: {file:filename(), non_neg_integer()}.
+-type dial_warning() :: {dial_warn_tag(), file_line(), {atom(), [term()]}}.
+
+%%
+%% This is the representation of dialyzer's internal errors
+%%
+-type dial_error() :: any(). %% XXX: underspecified
+
+%%--------------------------------------------------------------------
+%% THIS TYPE SHOULD ONE DAY DISAPPEAR -- IT DOES NOT BELONG HERE
+%%--------------------------------------------------------------------
+
+-type ordset(T) :: [T] . %% XXX: temporarily
+
+%%--------------------------------------------------------------------
+%% Basic types used either in the record definitions below or in other
+%% parts of the application
+%%--------------------------------------------------------------------
+
+-type anal_type() :: 'succ_typings' | 'plt_build'.
+-type anal_type1() :: anal_type() | 'plt_add' | 'plt_check' | 'plt_remove'.
+-type contr_constr() :: {'subtype', erl_types:erl_type(), erl_types:erl_type()}.
+-type contract_pair() :: {erl_types:erl_type(), [contr_constr()]}.
+-type dial_define() :: {atom(), term()}.
+-type dial_option() :: {atom(), term()}.
+-type dial_options() :: [dial_option()].
+-type label() :: non_neg_integer().
+-type rep_mode() :: 'quiet' | 'normal' | 'verbose'.
+-type start_from() :: 'byte_code' | 'src_code'.
+
+%%--------------------------------------------------------------------
+%% Record declarations used by various files
+%%--------------------------------------------------------------------
+
+-record(analysis, {analysis_pid :: pid(),
+ type = succ_typings :: anal_type(),
+ defines = [] :: [dial_define()],
+ doc_plt :: dialyzer_plt:plt(),
+ files = [] :: [file:filename()],
+ include_dirs = [] :: [file:filename()],
+ start_from = byte_code :: start_from(),
+ plt :: dialyzer_plt:plt(),
+ use_contracts = true :: boolean(),
+ race_detection = false :: boolean(),
+ callgraph_file = "" :: file:filename()}).
+
+-record(options, {files = [] :: [file:filename()],
+ files_rec = [] :: [file:filename()],
+ analysis_type = succ_typings :: anal_type1(),
+ defines = [] :: [dial_define()],
+ from = byte_code :: start_from(),
+ get_warnings = maybe :: boolean() | 'maybe',
+ init_plt = none :: 'none' | file:filename(),
+ include_dirs = [] :: [file:filename()],
+ output_plt = none :: 'none' | file:filename(),
+ legal_warnings = ordsets:new() :: ordset(dial_warn_tag()),
+ report_mode = normal :: rep_mode(),
+ erlang_mode = false :: boolean(),
+ use_contracts = true :: boolean(),
+ output_file = none :: 'none' | file:filename(),
+ output_format = formatted :: 'raw' | 'formatted',
+ callgraph_file = "" :: file:filename(),
+ check_plt = true :: boolean()
+ }).
+
+-record(contract, {contracts = [] :: [contract_pair()],
+ args = [] :: [erl_types:erl_type()],
+ forms = [] :: [{_, _}]}).
+
+%%--------------------------------------------------------------------
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
new file mode 100644
index 0000000000..97d63a1f14
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -0,0 +1,530 @@
+%% -*- erlang-indent-level: 2 -*-
+%%--------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_analysis_callgraph.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 5 Apr 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+
+-module(dialyzer_analysis_callgraph).
+
+-export([start/3]).
+
+-include("dialyzer.hrl").
+
+-record(analysis_state,
+ {
+ codeserver :: dialyzer_codeserver:codeserver(),
+ analysis_type = succ_typings :: anal_type(),
+ defines = [] :: [dial_define()],
+ doc_plt :: dialyzer_plt:plt(),
+ include_dirs = [] :: [file:filename()],
+ no_warn_unused :: set(),
+ parent :: pid(),
+ plt :: dialyzer_plt:plt(),
+ start_from = byte_code :: start_from(),
+ use_contracts = true :: boolean()
+ }).
+
+-record(server_state, {parent :: pid(), legal_warnings :: [dial_warn_tag()]}).
+
+%%--------------------------------------------------------------------
+%% Main
+%%--------------------------------------------------------------------
+
+-spec start(pid(), [dial_warn_tag()], #analysis{}) -> 'ok'.
+
+start(Parent, LegalWarnings, Analysis) ->
+ RacesOn = ordsets:is_element(?WARN_RACE_CONDITION, LegalWarnings),
+ Analysis0 = Analysis#analysis{race_detection = RacesOn},
+ Analysis1 = expand_files(Analysis0),
+ Analysis2 = run_analysis(Analysis1),
+ State = #server_state{parent = Parent, legal_warnings = LegalWarnings},
+ loop(State, Analysis2, none).
+
+run_analysis(Analysis) ->
+ Self = self(),
+ Fun = fun() -> analysis_start(Self, Analysis) end,
+ Analysis#analysis{analysis_pid = spawn_link(Fun)}.
+
+loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
+ #analysis{analysis_pid = AnalPid} = Analysis, ExtCalls) ->
+ receive
+ {AnalPid, log, LogMsg} ->
+ send_log(Parent, LogMsg),
+ loop(State, Analysis, ExtCalls);
+ {AnalPid, warnings, Warnings} ->
+ case filter_warnings(LegalWarnings, Warnings) of
+ [] -> ok;
+ SendWarnings ->
+ send_warnings(Parent, SendWarnings)
+ end,
+ loop(State, Analysis, ExtCalls);
+ {AnalPid, cserver, CServer, Plt} ->
+ send_codeserver_plt(Parent, CServer, Plt),
+ loop(State, Analysis, ExtCalls);
+ {AnalPid, done, Plt, DocPlt} ->
+ case ExtCalls =:= none of
+ true ->
+ send_analysis_done(Parent, Plt, DocPlt);
+ false ->
+ send_ext_calls(Parent, ExtCalls),
+ send_analysis_done(Parent, Plt, DocPlt)
+ end;
+ {AnalPid, ext_calls, NewExtCalls} ->
+ loop(State, Analysis, NewExtCalls);
+ {AnalPid, mod_deps, ModDeps} ->
+ send_mod_deps(Parent, ModDeps),
+ loop(State, Analysis, ExtCalls);
+ {Parent, stop} ->
+ exit(AnalPid, kill),
+ ok
+ end.
+
+%%--------------------------------------------------------------------
+%% The Analysis
+%%--------------------------------------------------------------------
+
+analysis_start(Parent, Analysis) ->
+ CServer = dialyzer_codeserver:new(),
+ Plt = Analysis#analysis.plt,
+ State = #analysis_state{codeserver = CServer,
+ analysis_type = Analysis#analysis.type,
+ defines = Analysis#analysis.defines,
+ doc_plt = Analysis#analysis.doc_plt,
+ include_dirs = Analysis#analysis.include_dirs,
+ plt = Plt,
+ parent = Parent,
+ start_from = Analysis#analysis.start_from,
+ use_contracts = Analysis#analysis.use_contracts
+ },
+ Files = ordsets:from_list(Analysis#analysis.files),
+ {Callgraph, NoWarn, TmpCServer0} = compile_and_store(Files, State),
+ %% Remote type postprocessing
+ NewCServer =
+ try
+ NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0),
+ OldRecords = dialyzer_plt:get_types(State#analysis_state.plt),
+ MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
+ TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
+ TmpCServer2 = dialyzer_utils:process_record_remote_types(TmpCServer1),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer2)
+ catch
+ throw:{error, _ErrorMsg} = Error -> exit(Error)
+ end,
+ NewPlt = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
+ State0 = State#analysis_state{plt = NewPlt},
+ dump_callgraph(Callgraph, State0, Analysis),
+ State1 = State0#analysis_state{codeserver = NewCServer},
+ State2 = State1#analysis_state{no_warn_unused = NoWarn},
+ %% Remove all old versions of the files being analyzed
+ AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
+ Plt1 = dialyzer_plt:delete_list(NewPlt, AllNodes),
+ Exports = dialyzer_codeserver:get_exports(NewCServer),
+ NewCallgraph =
+ case Analysis#analysis.race_detection of
+ true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
+ false -> Callgraph
+ end,
+ State3 = analyze_callgraph(NewCallgraph, State2#analysis_state{plt = Plt1}),
+ NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
+ NonExportsList = sets:to_list(NonExports),
+ Plt3 = dialyzer_plt:delete_list(State3#analysis_state.plt, NonExportsList),
+ Plt4 = dialyzer_plt:delete_contract_list(Plt3, NonExportsList),
+ send_codeserver_plt(Parent, CServer, State3#analysis_state.plt),
+ send_analysis_done(Parent, Plt4, State3#analysis_state.doc_plt).
+
+analyze_callgraph(Callgraph, State) ->
+ Plt = State#analysis_state.plt,
+ Codeserver = State#analysis_state.codeserver,
+ Parent = State#analysis_state.parent,
+ case State#analysis_state.analysis_type of
+ plt_build ->
+ Callgraph1 = dialyzer_callgraph:finalize(Callgraph),
+ NewPlt = dialyzer_succ_typings:analyze_callgraph(Callgraph1, Plt,
+ Codeserver, Parent),
+ dialyzer_callgraph:delete(Callgraph1),
+ State#analysis_state{plt = NewPlt};
+ succ_typings ->
+ NoWarn = State#analysis_state.no_warn_unused,
+ DocPlt = State#analysis_state.doc_plt,
+ Callgraph1 = dialyzer_callgraph:finalize(Callgraph),
+ {Warnings, NewPlt, NewDocPlt} =
+ dialyzer_succ_typings:get_warnings(Callgraph1, Plt, DocPlt,
+ Codeserver, NoWarn, Parent),
+ dialyzer_callgraph:delete(Callgraph1),
+ send_warnings(State#analysis_state.parent, Warnings),
+ State#analysis_state{plt = NewPlt, doc_plt = NewDocPlt}
+ end.
+
+%%--------------------------------------------------------------------
+%% Build the callgraph and fill the codeserver.
+%%--------------------------------------------------------------------
+
+compile_and_store(Files, #analysis_state{codeserver = CServer,
+ defines = Defs,
+ include_dirs = Dirs,
+ parent = Parent,
+ use_contracts = UseContracts,
+ start_from = StartFrom} = State) ->
+ send_log(Parent, "Reading files and computing callgraph... "),
+ {T1, _} = statistics(runtime),
+ Includes = [{i, D} || D <- Dirs],
+ Defines = [{d, Macro, Val} || {Macro, Val} <- Defs],
+ Callgraph = dialyzer_callgraph:new(),
+ Fun = case StartFrom of
+ src_code ->
+ fun(File, {TmpCG, TmpCServer, TmpFailed, TmpNoWarn, TmpMods}) ->
+ case compile_src(File, Includes, Defines, TmpCG,
+ TmpCServer, UseContracts) of
+ {error, Reason} ->
+ {TmpCG, TmpCServer, [{File, Reason}|TmpFailed], TmpNoWarn,
+ TmpMods};
+ {ok, NewCG, NoWarn, NewCServer, Mod} ->
+ {NewCG, NewCServer, TmpFailed, NoWarn++TmpNoWarn,
+ [Mod|TmpMods]}
+ end
+ end;
+ byte_code ->
+ fun(File, {TmpCG, TmpCServer, TmpFailed, TmpNoWarn, TmpMods}) ->
+ case compile_byte(File, TmpCG, TmpCServer, UseContracts) of
+ {error, Reason} ->
+ {TmpCG, TmpCServer, [{File, Reason}|TmpFailed], TmpNoWarn,
+ TmpMods};
+ {ok, NewCG, NoWarn, NewCServer, Mod} ->
+ {NewCG, NewCServer, TmpFailed, NoWarn++TmpNoWarn,
+ [Mod|TmpMods]}
+ end
+ end
+ end,
+ {NewCallgraph1, NewCServer, Failed, NoWarn, Modules} =
+ lists:foldl(Fun, {Callgraph, CServer, [], [], []}, Files),
+ case Failed =:= [] of
+ true ->
+ NewFiles = lists:zip(lists:reverse(Modules), Files),
+ ModDict =
+ lists:foldl(fun({Mod, F}, Dict) -> dict:append(Mod, F, Dict) end,
+ dict:new(), NewFiles),
+ check_for_duplicate_modules(ModDict);
+ false ->
+ Msg = io_lib:format("Could not scan the following file(s): ~p",
+ [lists:flatten(Failed)]),
+ exit({error, Msg})
+ end,
+ {T2, _} = statistics(runtime),
+ Msg1 = io_lib:format("done in ~.2f secs\nRemoving edges... ", [(T2-T1)/1000]),
+ send_log(Parent, Msg1),
+ NewCallgraph2 = cleanup_callgraph(State, NewCServer, NewCallgraph1, Modules),
+ {T3, _} = statistics(runtime),
+ Msg2 = io_lib:format("done in ~.2f secs\n", [(T3-T2)/1000]),
+ send_log(Parent, Msg2),
+ {NewCallgraph2, sets:from_list(NoWarn), NewCServer}.
+
+cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
+ codeserver = CodeServer},
+ CServer, Callgraph, Modules) ->
+ ModuleDeps = dialyzer_callgraph:module_deps(Callgraph),
+ send_mod_deps(Parent, ModuleDeps),
+ {Callgraph1, ExtCalls} = dialyzer_callgraph:remove_external(Callgraph),
+ ExtCalls1 = [Call || Call = {_From, To} <- ExtCalls,
+ not dialyzer_plt:contains_mfa(InitPlt, To)],
+ {BadCalls1, RealExtCalls} =
+ if ExtCalls1 =:= [] -> {[], []};
+ true ->
+ ModuleSet = sets:from_list(Modules),
+ lists:partition(fun({_From, {M, _F, _A}}) ->
+ sets:is_element(M, ModuleSet) orelse
+ dialyzer_plt:contains_module(InitPlt, M)
+ end, ExtCalls1)
+ end,
+ NonLocalCalls = dialyzer_callgraph:non_local_calls(Callgraph1),
+ BadCalls2 = [Call || Call = {_From, To} <- NonLocalCalls,
+ not dialyzer_codeserver:is_exported(To, CServer)],
+ case BadCalls1 ++ BadCalls2 of
+ [] -> ok;
+ BadCalls -> send_bad_calls(Parent, BadCalls, CodeServer)
+ end,
+ if RealExtCalls =:= [] -> ok;
+ true ->
+ send_ext_calls(Parent, lists:usort([To || {_From, To} <- RealExtCalls]))
+ end,
+ Callgraph1.
+
+compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts) ->
+ DefaultIncludes = default_includes(filename:dirname(File)),
+ SrcCompOpts = dialyzer_utils:src_compiler_opts(),
+ CompOpts = SrcCompOpts ++ Includes ++ Defines ++ DefaultIncludes,
+ case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of
+ {error, _Msg} = Error -> Error;
+ {ok, AbstrCode} ->
+ case dialyzer_utils:get_core_from_abstract_code(AbstrCode, CompOpts) of
+ error -> {error, " Could not find abstract code for: " ++ File};
+ {ok, Core} ->
+ Mod = cerl:concrete(cerl:module_name(Core)),
+ NoWarn = abs_get_nowarn(AbstrCode, Mod),
+ case dialyzer_utils:get_record_and_type_info(AbstrCode) of
+ {error, _} = Error -> Error;
+ {ok, RecInfo} ->
+ CServer2 =
+ dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
+ case UseContracts of
+ true ->
+ case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of
+ {error, _} = Error -> Error;
+ {ok, SpecInfo} ->
+ CServer3 =
+ dialyzer_codeserver:store_temp_contracts(Mod,
+ SpecInfo,
+ CServer2),
+ store_core(Mod, Core, NoWarn, Callgraph, CServer3)
+ end;
+ false ->
+ store_core(Mod, Core, NoWarn, Callgraph, CServer2)
+ end
+ end
+ end
+ end.
+
+compile_byte(File, Callgraph, CServer, UseContracts) ->
+ case dialyzer_utils:get_abstract_code_from_beam(File) of
+ error ->
+ {error, " Could not get abstract code for: " ++ File ++ "\n" ++
+ " Recompile with +debug_info or analyze starting from source code"};
+ {ok, AbstrCode} ->
+ case dialyzer_utils:get_core_from_abstract_code(AbstrCode) of
+ error -> {error, " Could not get core for: "++File};
+ {ok, Core} ->
+ Mod = cerl:concrete(cerl:module_name(Core)),
+ NoWarn = abs_get_nowarn(AbstrCode, Mod),
+ case dialyzer_utils:get_record_and_type_info(AbstrCode) of
+ {error, _} = Error -> Error;
+ {ok, RecInfo} ->
+ CServer1 =
+ dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
+ case UseContracts of
+ true ->
+ case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of
+ {error, _} = Error -> Error;
+ {ok, SpecInfo} ->
+ CServer2 =
+ dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo,
+ CServer1),
+ store_core(Mod, Core, NoWarn, Callgraph, CServer2)
+ end;
+ false ->
+ store_core(Mod, Core, NoWarn, Callgraph, CServer1)
+ end
+ end
+ end
+ end.
+
+store_core(Mod, Core, NoWarn, Callgraph, CServer) ->
+ Exp = get_exports_from_core(Core),
+ CServer1 = dialyzer_codeserver:insert_exports(Exp, CServer),
+ {LabeledCore, CServer2} = label_core(Core, CServer1),
+ store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer2, NoWarn).
+
+abs_get_nowarn(Abs, M) ->
+ [{M, F, A}
+ || {attribute, _, compile, {nowarn_unused_function, {F, A}}} <- Abs].
+
+get_exports_from_core(Core) ->
+ Tree = cerl:from_records(Core),
+ Exports1 = cerl:module_exports(Tree),
+ Exports2 = [cerl:var_name(V) || V <- Exports1],
+ M = cerl:atom_val(cerl:module_name(Tree)),
+ [{M, F, A} || {F, A} <- Exports2].
+
+label_core(Core, CServer) ->
+ NextLabel = dialyzer_codeserver:get_next_core_label(CServer),
+ CoreTree = cerl:from_records(Core),
+ {LabeledTree, NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
+ {cerl:to_records(LabeledTree),
+ dialyzer_codeserver:set_next_core_label(NewNextLabel, CServer)}.
+
+store_code_and_build_callgraph(Mod, Core, Callgraph, CServer, NoWarn) ->
+ CoreTree = cerl:from_records(Core),
+ NewCallgraph = dialyzer_callgraph:scan_core_tree(CoreTree, Callgraph),
+ CServer2 = dialyzer_codeserver:insert(Mod, CoreTree, CServer),
+ {ok, NewCallgraph, NoWarn, CServer2, Mod}.
+
+%%--------------------------------------------------------------------
+%% Utilities
+%%--------------------------------------------------------------------
+
+expand_files(Analysis = #analysis{files = Files, start_from = StartFrom}) ->
+ Ext = case StartFrom of
+ byte_code -> ".beam";
+ src_code -> ".erl"
+ end,
+ case expand_files(Files, Ext, []) of
+ [] ->
+ Msg = "No " ++ Ext ++ " files to analyze" ++
+ case StartFrom of
+ byte_code -> " (no --src specified?)";
+ src_code -> ""
+ end,
+ exit({error, Msg});
+ NewFiles ->
+ Analysis#analysis{files = NewFiles}
+ end.
+
+expand_files([File|Left], Ext, FileAcc) ->
+ case filelib:is_dir(File) of
+ true ->
+ {ok, List} = file:list_dir(File),
+ NewFiles =
+ [filename:join(File, X) || X <- List, filename:extension(X) =:= Ext],
+ expand_files(Left, Ext, NewFiles);
+ false ->
+ expand_files(Left, Ext, [File|FileAcc])
+ end;
+expand_files([], _Ext, FileAcc) ->
+ FileAcc.
+
+check_for_duplicate_modules(ModDict) ->
+ Duplicates = dict:filter(fun(_, [_]) -> false;
+ (_, _Files) -> true
+ end, ModDict),
+ case dict:size(Duplicates) =:= 0 of
+ true ->
+ ok;
+ false ->
+ Mods = [X || {_, X} <- dict:to_list(Duplicates)],
+ Msg = io_lib:format("Duplicate modules: ~p", [Mods]),
+ exit({error, Msg})
+ end.
+
+default_includes(Dir) ->
+ L1 = ["..", "../incl", "../inc", "../include"],
+ [{i, filename:join(Dir, X)} || X <- L1].
+
+%%-------------------------------------------------------------------
+%% Handle Messages
+%%-------------------------------------------------------------------
+
+send_log(Parent, Msg) ->
+ Parent ! {self(), log, Msg},
+ ok.
+
+send_warnings(_Parent, []) ->
+ ok;
+send_warnings(Parent, Warnings) ->
+ Parent ! {self(), warnings, Warnings},
+ ok.
+
+filter_warnings(LegalWarnings, Warnings) ->
+ [TIW || {Tag, _Id, _Warning} = TIW <- Warnings,
+ ordsets:is_element(Tag, LegalWarnings)].
+
+send_analysis_done(Parent, Plt, DocPlt) ->
+ Parent ! {self(), done, Plt, DocPlt},
+ ok.
+
+send_ext_calls(Parent, ExtCalls) ->
+ Parent ! {self(), ext_calls, ExtCalls},
+ ok.
+
+send_codeserver_plt(Parent, CServer, Plt ) ->
+ Parent ! {self(), cserver, CServer, Plt},
+ ok.
+
+send_bad_calls(Parent, BadCalls, CodeServer) ->
+ send_warnings(Parent, format_bad_calls(BadCalls, CodeServer, [])).
+
+send_mod_deps(Parent, ModuleDeps) ->
+ Parent ! {self(), mod_deps, ModuleDeps},
+ ok.
+
+format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
+ when A =:= 0; A =:= 1 ->
+ format_bad_calls(Left, CodeServer, Acc);
+format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
+ {_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
+ Msg = {call_to_missing, [M, F, A]},
+ FileLine = find_call_file_and_line(FunCode, To),
+ NewAcc = [{?WARN_CALLGRAPH, FileLine, Msg}|Acc],
+ format_bad_calls(Left, CodeServer, NewAcc);
+format_bad_calls([], _CodeServer, Acc) ->
+ Acc.
+
+find_call_file_and_line(Tree, MFA) ->
+ Fun =
+ fun(SubTree, Acc) ->
+ case cerl:is_c_call(SubTree) of
+ true ->
+ M = cerl:call_module(SubTree),
+ F = cerl:call_name(SubTree),
+ A = cerl:call_arity(SubTree),
+ case cerl:is_c_atom(M) andalso cerl:is_c_atom(F) of
+ true ->
+ case {cerl:concrete(M), cerl:concrete(F), A} of
+ MFA ->
+ Ann = cerl:get_ann(SubTree),
+ [{get_file(Ann), get_line(Ann)}|Acc];
+ _ -> Acc
+ end;
+ false -> Acc
+ end;
+ false -> Acc
+ end
+ end,
+ hd(cerl_trees:fold(Fun, [], Tree)).
+
+get_line([Line|_]) when is_integer(Line) -> Line;
+get_line([_|Tail]) -> get_line(Tail);
+get_line([]) -> -1.
+
+get_file([{file, File}|_]) -> File;
+get_file([_|Tail]) -> get_file(Tail).
+
+-spec dump_callgraph(dialyzer_callgraph:callgraph(), #analysis_state{}, #analysis{}) ->
+ 'ok'.
+
+dump_callgraph(_CallGraph, _State, #analysis{callgraph_file = ""}) -> ok;
+dump_callgraph(CallGraph, State, #analysis{callgraph_file = File} = Analysis) ->
+ Extension = filename:extension(File),
+ Start_Msg = io_lib:format("Dumping the callgraph... ", []),
+ send_log(State#analysis_state.parent, Start_Msg),
+ {T1, _} = statistics(runtime),
+ dump_callgraph(CallGraph, State, Analysis, Extension),
+ {T2, _} = statistics(runtime),
+ Finish_Msg = io_lib:format("done in ~2f secs\n", [(T2-T1)/1000]),
+ send_log(State#analysis_state.parent, Finish_Msg),
+ ok.
+
+dump_callgraph(CallGraph, _State, #analysis{callgraph_file = File}, ".dot") ->
+ dialyzer_callgraph:to_dot(CallGraph, File);
+dump_callgraph(CallGraph, _State, #analysis{callgraph_file = File}, ".ps") ->
+ Args = "-Gratio=compress -Gsize=\"100,100\"",
+ dialyzer_callgraph:to_ps(CallGraph, File, Args);
+dump_callgraph(CallGraph, State, #analysis{callgraph_file = File}, _Ext) ->
+ case file:open(File, [write]) of
+ {ok, Fd} ->
+ io:format(Fd, "~p", [CallGraph]),
+ ok = file:close(Fd);
+ {error, Reason} ->
+ Msg = io_lib:format("Could not open output file ~p, Reason: ~p\n",
+ [File, Reason]),
+ send_log(State#analysis_state.parent, Msg)
+ end.
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
new file mode 100644
index 0000000000..21d31df71c
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -0,0 +1,697 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_callgraph.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 30 Mar 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_callgraph).
+
+-export([all_nodes/1,
+ delete/1,
+ finalize/1,
+ is_escaping/2,
+ is_self_rec/2,
+ non_local_calls/1,
+ lookup_rec_var/2,
+ lookup_call_site/2,
+ lookup_label/2,
+ lookup_name/2,
+ modules/1,
+ module_deps/1,
+ %% module_postorder/1,
+ module_postorder_from_funs/2,
+ new/0,
+ in_neighbours/2,
+ renew_race_info/4,
+ reset_from_funs/2,
+ scan_core_tree/2,
+ strip_module_deps/2,
+ take_scc/1,
+ remove_external/1,
+ to_dot/2,
+ to_ps/3]).
+
+-export([cleanup/1, get_digraph/1, get_named_tables/1, get_public_tables/1,
+ get_race_code/1, get_race_detection/1, race_code_new/1,
+ put_race_code/2, put_race_detection/2, put_named_tables/2,
+ put_public_tables/2]).
+
+-include("dialyzer.hrl").
+
+%%----------------------------------------------------------------------
+
+-type mfa_or_funlbl() :: label() | mfa().
+-type scc() :: [mfa_or_funlbl()].
+-type mfa_calls() :: [{mfa_or_funlbl(), mfa_or_funlbl()}].
+
+%%-----------------------------------------------------------------------------
+%% A callgraph is a directed graph where the nodes are functions and a
+%% call between two functions is an edge from the caller to the callee.
+%%
+%% calls - A mapping from call site (and apply site) labels
+%% to the possible functions that can be called.
+%% digraph - A digraph representing the callgraph.
+%% Nodes are represented as MFAs or labels.
+%% esc - A set of all escaping functions as reported by dialyzer_dep.
+%% postorder - A list of strongly connected components of the callgraph
+%% sorted in a topological bottom-up order.
+%% This is produced by calling finalize/1.
+%% name_map - A mapping from label to MFA.
+%% rev_name_map - A reverse mapping of the name_map.
+%% rec_var_map - A dict mapping from letrec bound labels to function names.
+%% Only for top level functions (from module defs).
+%% self_rec - A set containing all self recursive functions.
+%% Note that this contains MFAs for named functions and labels
+%% whenever applicable.
+%%-----------------------------------------------------------------------------
+
+-record(callgraph, {digraph = digraph:new() :: digraph(),
+ esc = sets:new() :: set(),
+ name_map = dict:new() :: dict(),
+ rev_name_map = dict:new() :: dict(),
+ postorder = [] :: [scc()],
+ rec_var_map = dict:new() :: dict(),
+ self_rec = sets:new() :: set(),
+ calls = dict:new() :: dict(),
+ race_code = dict:new() :: dict(),
+ public_tables = [] :: [label()],
+ named_tables = [] :: [string()],
+ race_detection = false :: boolean()}).
+
+%% Exported Types
+
+-type callgraph() :: #callgraph{}.
+
+%%----------------------------------------------------------------------
+
+-spec new() -> callgraph().
+
+new() ->
+ #callgraph{}.
+
+-spec delete(callgraph()) -> 'true'.
+
+delete(#callgraph{digraph = Digraph}) ->
+ digraph_delete(Digraph).
+
+-spec all_nodes(callgraph()) -> [mfa()].
+
+all_nodes(#callgraph{digraph = DG}) ->
+ digraph_vertices(DG).
+
+-spec lookup_rec_var(label(), callgraph()) -> 'error' | {'ok', mfa()}.
+
+lookup_rec_var(Label, #callgraph{rec_var_map = RecVarMap})
+ when is_integer(Label) ->
+ dict:find(Label, RecVarMap).
+
+-spec lookup_call_site(label(), callgraph()) -> 'error' | {'ok', [_]}. % XXX: refine
+
+lookup_call_site(Label, #callgraph{calls = Calls})
+ when is_integer(Label) ->
+ dict:find(Label, Calls).
+
+-spec lookup_name(label(), callgraph()) -> 'error' | {'ok', mfa()}.
+
+lookup_name(Label, #callgraph{name_map = NameMap})
+ when is_integer(Label) ->
+ dict:find(Label, NameMap).
+
+-spec lookup_label(mfa_or_funlbl(), callgraph()) -> 'error' | {'ok', integer()}.
+
+lookup_label({_,_,_} = MFA, #callgraph{rev_name_map = RevNameMap}) ->
+ dict:find(MFA, RevNameMap);
+lookup_label(Label, #callgraph{}) when is_integer(Label) ->
+ {ok, Label}.
+
+-spec in_neighbours(mfa_or_funlbl(), callgraph()) -> 'none' | [mfa_or_funlbl(),...].
+
+in_neighbours(Label, #callgraph{digraph = Digraph, name_map = NameMap})
+ when is_integer(Label) ->
+ Name = case dict:find(Label, NameMap) of
+ {ok, Val} -> Val;
+ error -> Label
+ end,
+ digraph_in_neighbours(Name, Digraph);
+in_neighbours({_, _, _} = MFA, #callgraph{digraph = Digraph}) ->
+ digraph_in_neighbours(MFA, Digraph).
+
+-spec is_self_rec(mfa_or_funlbl(), callgraph()) -> boolean().
+
+is_self_rec(MfaOrLabel, #callgraph{self_rec = SelfRecs}) ->
+ sets:is_element(MfaOrLabel, SelfRecs).
+
+-spec is_escaping(label(), callgraph()) -> boolean().
+
+is_escaping(Label, #callgraph{esc = Esc}) when is_integer(Label) ->
+ sets:is_element(Label, Esc).
+
+-type callgraph_edge() :: {mfa_or_funlbl(),mfa_or_funlbl()}.
+-spec add_edges([callgraph_edge()], callgraph()) -> callgraph().
+
+add_edges([], CG) ->
+ CG;
+add_edges(Edges, #callgraph{digraph = Callgraph} = CG) ->
+ CG#callgraph{digraph = digraph_add_edges(Edges, Callgraph)}.
+
+-spec add_edges([callgraph_edge()], [mfa_or_funlbl()], callgraph()) -> callgraph().
+
+add_edges(Edges, MFAs, #callgraph{digraph = DG} = CG) ->
+ DG1 = digraph_confirm_vertices(MFAs, DG),
+ add_edges(Edges, CG#callgraph{digraph = DG1}).
+
+-spec take_scc(callgraph()) -> 'none' | {'ok', scc(), callgraph()}.
+
+take_scc(#callgraph{postorder = [SCC|SCCs]} = CG) ->
+ {ok, SCC, CG#callgraph{postorder = SCCs}};
+take_scc(#callgraph{postorder = []}) ->
+ none.
+
+-spec remove_external(callgraph()) -> {callgraph(), [tuple()]}.
+
+remove_external(#callgraph{digraph = DG} = CG) ->
+ {NewDG, External} = digraph_remove_external(DG),
+ {CG#callgraph{digraph = NewDG}, External}.
+
+-spec non_local_calls(callgraph()) -> mfa_calls().
+
+non_local_calls(#callgraph{digraph = DG}) ->
+ Edges = digraph_edges(DG),
+ find_non_local_calls(Edges, sets:new()).
+
+-spec find_non_local_calls([{mfa_or_funlbl(), mfa_or_funlbl()}], set()) -> mfa_calls().
+
+find_non_local_calls([{{M,_,_}, {M,_,_}}|Left], Set) ->
+ find_non_local_calls(Left, Set);
+find_non_local_calls([{{M1,_,_}, {M2,_,_}} = Edge|Left], Set) when M1 =/= M2 ->
+ find_non_local_calls(Left, sets:add_element(Edge, Set));
+find_non_local_calls([{{_,_,_}, Label}|Left], Set) when is_integer(Label) ->
+ find_non_local_calls(Left, Set);
+find_non_local_calls([{Label, {_,_,_}}|Left], Set) when is_integer(Label) ->
+ find_non_local_calls(Left, Set);
+find_non_local_calls([{Label1, Label2}|Left], Set) when is_integer(Label1),
+ is_integer(Label2) ->
+ find_non_local_calls(Left, Set);
+find_non_local_calls([], Set) ->
+ sets:to_list(Set).
+
+-spec renew_race_info(callgraph(), dict(), [label()], [string()]) ->
+ callgraph().
+
+renew_race_info(CG, RaceCode, PublicTables, NamedTables) ->
+ CG#callgraph{race_code = RaceCode,
+ public_tables = PublicTables,
+ named_tables = NamedTables}.
+
+%%----------------------------------------------------------------------
+%% Handling of modules & SCCs
+%%----------------------------------------------------------------------
+
+-spec modules(callgraph()) -> [module()].
+
+modules(#callgraph{digraph = DG}) ->
+ ordsets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]).
+
+-spec module_postorder(callgraph()) -> [[module()]].
+
+module_postorder(#callgraph{digraph = DG}) ->
+ Edges = digraph_edges(DG),
+ Nodes = ordsets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
+ MDG = digraph:new(),
+ MDG1 = digraph_confirm_vertices(Nodes, MDG),
+ MDG2 = create_module_digraph(Edges, MDG1),
+ MDG3 = digraph_utils:condensation(MDG2),
+ PostOrder = digraph_utils:postorder(MDG3),
+ PostOrder1 = sort_sccs_internally(PostOrder, MDG2),
+ digraph:delete(MDG2),
+ digraph_delete(MDG3),
+ PostOrder1.
+
+%% The module deps of a module are modules that depend on the module
+-spec module_deps(callgraph()) -> dict().
+
+module_deps(#callgraph{digraph = DG}) ->
+ Edges = digraph_edges(DG),
+ Nodes = ordsets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
+ MDG = digraph:new(),
+ MDG1 = digraph_confirm_vertices(Nodes, MDG),
+ MDG2 = create_module_digraph(Edges, MDG1),
+ Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG2, N))}
+ || N <- Nodes],
+ digraph_delete(MDG2),
+ dict:from_list(Deps).
+
+-spec strip_module_deps(dict(), set()) -> dict().
+
+strip_module_deps(ModDeps, StripSet) ->
+ FilterFun1 = fun(Val) -> not sets:is_element(Val, StripSet) end,
+ MapFun = fun(_Key, ValSet) -> ordsets:filter(FilterFun1, ValSet) end,
+ ModDeps1 = dict:map(MapFun, ModDeps),
+ FilterFun2 = fun(_Key, ValSet) -> ValSet =/= [] end,
+ dict:filter(FilterFun2, ModDeps1).
+
+sort_sccs_internally(PO, MDG) ->
+ sort_sccs_internally(PO, MDG, []).
+
+sort_sccs_internally([SCC|SCCs], MDG, Acc) ->
+ case SCC of
+ [_, _, _ | _] -> % length(SCC) >= 3
+ TmpDG = digraph_utils:subgraph(MDG, SCC),
+ NewSCC = digraph_utils:postorder(TmpDG),
+ digraph_delete(TmpDG),
+ sort_sccs_internally(SCCs, MDG, [NewSCC|Acc]);
+ _ ->
+ sort_sccs_internally(SCCs, MDG, [SCC|Acc])
+ end;
+sort_sccs_internally([], _MDG, Acc) ->
+ lists:reverse(Acc).
+
+create_module_digraph([{{M, _, _}, {M, _, _}}|Left], MDG) ->
+ create_module_digraph(Left, MDG);
+create_module_digraph([{{M1, _, _}, {M2, _, _}}|Left], MDG) ->
+ create_module_digraph(Left, digraph_add_edge(M1, M2, MDG));
+create_module_digraph([{_, _}|Left], MDG) ->
+ create_module_digraph(Left, MDG);
+create_module_digraph([], MDG) ->
+ MDG.
+
+-spec finalize(callgraph()) -> callgraph().
+
+finalize(#callgraph{digraph = DG} = CG) ->
+ CG#callgraph{postorder = digraph_finalize(DG)}.
+
+-spec reset_from_funs([mfa_or_funlbl()], callgraph()) -> callgraph().
+
+reset_from_funs(Funs, #callgraph{digraph = DG} = CG) ->
+ SubGraph = digraph_reaching_subgraph(Funs, DG),
+ Postorder = digraph_finalize(SubGraph),
+ digraph_delete(SubGraph),
+ CG#callgraph{postorder = Postorder}.
+
+-spec module_postorder_from_funs([mfa_or_funlbl()], callgraph()) -> [[module()]].
+
+module_postorder_from_funs(Funs, #callgraph{digraph = DG} = CG) ->
+ SubGraph = digraph_reaching_subgraph(Funs, DG),
+ PO = module_postorder(CG#callgraph{digraph = SubGraph}),
+ digraph_delete(SubGraph),
+ PO.
+
+%%----------------------------------------------------------------------
+%% Core code
+%%----------------------------------------------------------------------
+
+%% The core tree must be labeled as by cerl_trees:label/1 (or /2).
+%% The set of labels in the tree must be disjoint from the set of
+%% labels already occuring in the callgraph.
+
+-spec scan_core_tree(cerl:c_module(), callgraph()) -> callgraph().
+
+scan_core_tree(Tree, #callgraph{calls = OldCalls,
+ esc = OldEsc,
+ name_map = OldNameMap,
+ rec_var_map = OldRecVarMap,
+ rev_name_map = OldRevNameMap,
+ self_rec = OldSelfRec} = CG) ->
+ %% Build name map and recursion variable maps.
+ {NewNameMap, NewRevNameMap, NewRecVarMap} =
+ build_maps(Tree, OldRecVarMap, OldNameMap, OldRevNameMap),
+
+ %% First find the module-local dependencies.
+ {Deps0, EscapingFuns, Calls} = dialyzer_dep:analyze(Tree),
+ NewCalls = dict:merge(fun(_Key, Val, Val) -> Val end, OldCalls, Calls),
+ NewEsc = sets:union(sets:from_list(EscapingFuns), OldEsc),
+ LabelEdges = get_edges_from_deps(Deps0),
+
+ %% Find the self recursive functions. Named functions get both the
+ %% key and their name for convenience.
+ SelfRecs0 = lists:foldl(fun({Key, Key}, Acc) ->
+ case dict:find(Key, NewNameMap) of
+ error -> [Key|Acc];
+ {ok, Name} -> [Key, Name|Acc]
+ end;
+ (_, Acc) -> Acc
+ end, [], LabelEdges),
+ SelfRecs = sets:union(sets:from_list(SelfRecs0), OldSelfRec),
+
+ NamedEdges1 = name_edges(LabelEdges, NewNameMap),
+
+ %% We need to scan for inter-module calls since these are not tracked
+ %% by dialyzer_dep. Note that the caller is always recorded as the
+ %% top level function. This is OK since the included functions are
+ %% stored as scc with the parent.
+ NamedEdges2 = scan_core_funs(Tree),
+
+ %% Confirm all nodes in the tree.
+ Names1 = lists:append([[X, Y] || {X, Y} <- NamedEdges1]),
+ Names2 = ordsets:from_list(Names1),
+
+ %% Get rid of the 'top' function from nodes and edges.
+ Names3 = ordsets:del_element(top, Names2),
+ NewNamedEdges2 =
+ [E || {From, To} = E <- NamedEdges2, From =/= top, To =/= top],
+ NewNamedEdges1 =
+ [E || {From, To} = E <- NamedEdges1, From =/= top, To =/= top],
+ NamedEdges3 = NewNamedEdges1 ++ NewNamedEdges2,
+ CG1 = add_edges(NamedEdges3, Names3, CG),
+ CG1#callgraph{calls = NewCalls,
+ esc = NewEsc,
+ name_map = NewNameMap,
+ rec_var_map = NewRecVarMap,
+ rev_name_map = NewRevNameMap,
+ self_rec = SelfRecs}.
+
+build_maps(Tree, RecVarMap, NameMap, RevNameMap) ->
+ %% We only care about the named (top level) functions. The anonymous
+ %% functions will be analysed together with their parents.
+ Defs = cerl:module_defs(Tree),
+ Mod = cerl:atom_val(cerl:module_name(Tree)),
+ lists:foldl(fun({Var, Function}, {AccNameMap, AccRevNameMap, AccRecVarMap}) ->
+ FunName = cerl:fname_id(Var),
+ Arity = cerl:fname_arity(Var),
+ MFA = {Mod, FunName, Arity},
+ {dict:store(get_label(Function), MFA, AccNameMap),
+ dict:store(MFA, get_label(Function), AccRevNameMap),
+ dict:store(get_label(Var), MFA, AccRecVarMap)}
+ end, {NameMap, RevNameMap, RecVarMap}, Defs).
+
+get_edges_from_deps(Deps) ->
+ %% Convert the dependencies as produced by dialyzer_dep to a list of
+ %% edges. Also, remove 'external' since we are not interested in
+ %% this information.
+ Edges = dict:fold(fun(external, _Set, Acc) -> Acc;
+ (Caller, Set, Acc) ->
+ [[{Caller, Callee} || Callee <- Set,
+ Callee =/= external]|Acc]
+ end, [], Deps),
+ lists:flatten(Edges).
+
+name_edges(Edges, NameMap) ->
+ %% If a label is present in the name map it is renamed. Otherwise
+ %% keep the label as the identity.
+ MapFun = fun(X) ->
+ case dict:find(X, NameMap) of
+ error -> X;
+ {ok, MFA} -> MFA
+ end
+ end,
+ name_edges(Edges, MapFun, NameMap, []).
+
+name_edges([{From, To}|Left], MapFun, NameMap, Acc) ->
+ NewFrom = MapFun(From),
+ NewTo = MapFun(To),
+ name_edges(Left, MapFun, NameMap, [{NewFrom, NewTo}|Acc]);
+name_edges([], _MapFun, _NameMap, Acc) ->
+ Acc.
+
+scan_core_funs(Tree) ->
+ Defs = cerl:module_defs(Tree),
+ Mod = cerl:atom_val(cerl:module_name(Tree)),
+ DeepEdges = lists:foldl(fun({Var, Function}, Edges) ->
+ FunName = cerl:fname_id(Var),
+ Arity = cerl:fname_arity(Var),
+ MFA = {Mod, FunName, Arity},
+ [scan_one_core_fun(Function, MFA)|Edges]
+ end, [], Defs),
+ lists:flatten(DeepEdges).
+
+scan_one_core_fun(TopTree, FunName) ->
+ FoldFun = fun(Tree, Acc) ->
+ case cerl:type(Tree) of
+ call ->
+ CalleeM = cerl:call_module(Tree),
+ CalleeF = cerl:call_name(Tree),
+ A = length(cerl:call_args(Tree)),
+ case (cerl:is_c_atom(CalleeM) andalso
+ cerl:is_c_atom(CalleeF)) of
+ true ->
+ M = cerl:atom_val(CalleeM),
+ F = cerl:atom_val(CalleeF),
+ case erl_bif_types:is_known(M, F, A) of
+ true -> Acc;
+ false -> [{FunName, {M, F, A}}|Acc]
+ end;
+ false ->
+ %% We cannot handle run-time bindings
+ Acc
+ end;
+ _ ->
+ %% Nothing that can introduce new edges in the callgraph.
+ Acc
+ end
+ end,
+ cerl_trees:fold(FoldFun, [], TopTree).
+
+get_label(T) ->
+ case cerl:get_ann(T) of
+ [{label, L} | _] when is_integer(L) -> L;
+ _ -> erlang:error({missing_label, T})
+ end.
+
+%%----------------------------------------------------------------------
+%% Digraph
+%%----------------------------------------------------------------------
+
+digraph_add_edges([{From, To}|Left], DG) ->
+ digraph_add_edges(Left, digraph_add_edge(From, To, DG));
+digraph_add_edges([], DG) ->
+ DG.
+
+digraph_add_edge(From, To, DG) ->
+ case digraph:vertex(DG, From) of
+ false -> digraph:add_vertex(DG, From);
+ {From, _} -> ok
+ end,
+ case digraph:vertex(DG, To) of
+ false -> digraph:add_vertex(DG, To);
+ {To, _} -> ok
+ end,
+ digraph:add_edge(DG, {From, To}, From, To, []),
+ DG.
+
+digraph_confirm_vertices([MFA|Left], DG) ->
+ digraph:add_vertex(DG, MFA, confirmed),
+ digraph_confirm_vertices(Left, DG);
+digraph_confirm_vertices([], DG) ->
+ DG.
+
+digraph_remove_external(DG) ->
+ Vertices = digraph:vertices(DG),
+ Unconfirmed = remove_unconfirmed(Vertices, DG),
+ {DG, Unconfirmed}.
+
+remove_unconfirmed(Vertexes, DG) ->
+ remove_unconfirmed(Vertexes, DG, []).
+
+remove_unconfirmed([V|Left], DG, Unconfirmed) ->
+ case digraph:vertex(DG, V) of
+ {V, confirmed} -> remove_unconfirmed(Left, DG, Unconfirmed);
+ {V, []} -> remove_unconfirmed(Left, DG, [V|Unconfirmed])
+ end;
+remove_unconfirmed([], DG, Unconfirmed) ->
+ BadCalls = lists:append([digraph:in_edges(DG, V) || V <- Unconfirmed]),
+ BadCallsSorted = lists:keysort(1, BadCalls),
+ digraph:del_vertices(DG, Unconfirmed),
+ BadCallsSorted.
+
+digraph_delete(DG) ->
+ digraph:delete(DG).
+
+digraph_edges(DG) ->
+ digraph:edges(DG).
+
+digraph_vertices(DG) ->
+ digraph:vertices(DG).
+
+digraph_in_neighbours(V, DG) ->
+ case digraph:in_neighbours(DG, V) of
+ [] -> none;
+ List -> List
+ end.
+
+%% Pick all the independent nodes (leaves) from one module. Then try
+%% to stay within the module until no more independent nodes can be
+%% chosen. Then pick a new module and so on.
+%%
+%% Note that an SCC that ranges over more than one module is
+%% considered to belong to all modules to make sure that we do not
+%% lose any nodes.
+
+digraph_postorder(Digraph) ->
+ %% Remove all self-edges for SCCs.
+ Edges = [digraph:edge(Digraph, E) || E <- digraph:edges(Digraph)],
+ SelfEdges = [E || {E, V, V, _} <- Edges],
+ true = digraph:del_edges(Digraph, SelfEdges),
+ %% Determine the first module outside of the loop.
+ Leaves = digraph_leaves(Digraph),
+ case Leaves =:= [] of
+ true -> [];
+ false ->
+ {Module, Taken} = take_sccs_from_fresh_module(Leaves),
+ true = digraph:del_vertices(Digraph, Taken),
+ digraph_postorder(Digraph, Module, [Taken])
+ end.
+
+digraph_postorder(Digraph, LastModule, Acc) ->
+ Leaves = digraph_leaves(Digraph),
+ case Leaves =:= [] of
+ true -> lists:append(lists:reverse(Acc));
+ false ->
+ case [SCC || SCC <- Leaves, scc_belongs_to_module(SCC, LastModule)] of
+ [] ->
+ {NewModule, NewTaken} = take_sccs_from_fresh_module(Leaves),
+ true = digraph:del_vertices(Digraph, NewTaken),
+ digraph_postorder(Digraph, NewModule, [NewTaken|Acc]);
+ NewTaken ->
+ true = digraph:del_vertices(Digraph, NewTaken),
+ digraph_postorder(Digraph, LastModule, [NewTaken|Acc])
+ end
+ end.
+
+digraph_leaves(Digraph) ->
+ [V || V <- digraph:vertices(Digraph), digraph:out_degree(Digraph, V) =:= 0].
+
+take_sccs_from_fresh_module(Leaves) ->
+ NewModule = find_module(hd(Leaves)),
+ {NewModule,
+ [SCC || SCC <- Leaves, scc_belongs_to_module(SCC, NewModule)]}.
+
+-spec scc_belongs_to_module(scc(), module()) -> boolean().
+
+scc_belongs_to_module([Label|Left], Module) when is_integer(Label) ->
+ scc_belongs_to_module(Left, Module);
+scc_belongs_to_module([{M, _, _}|Left], Module) ->
+ if M =:= Module -> true;
+ true -> scc_belongs_to_module(Left, Module)
+ end;
+scc_belongs_to_module([], _Module) ->
+ false.
+
+-spec find_module(scc()) -> module().
+
+find_module([{M, _, _}|_]) -> M;
+find_module([Label|Left]) when is_integer(Label) -> find_module(Left).
+
+digraph_finalize(DG) ->
+ DG1 = digraph_utils:condensation(DG),
+ Postorder = digraph_postorder(DG1),
+ digraph:delete(DG1),
+ Postorder.
+
+digraph_reaching_subgraph(Funs, DG) ->
+ Vertices = digraph_utils:reaching(Funs, DG),
+ digraph_utils:subgraph(DG, Vertices).
+
+%%----------------------------------------------------------------------
+%% Races
+%%----------------------------------------------------------------------
+
+-spec cleanup(callgraph()) -> callgraph().
+
+cleanup(#callgraph{name_map = NameMap,
+ rev_name_map = RevNameMap,
+ public_tables = PublicTables,
+ named_tables = NamedTables,
+ race_code = RaceCode}) ->
+ #callgraph{name_map = NameMap,
+ rev_name_map = RevNameMap,
+ public_tables = PublicTables,
+ named_tables = NamedTables,
+ race_code = RaceCode}.
+
+-spec get_digraph(callgraph()) -> digraph().
+
+get_digraph(#callgraph{digraph = Digraph}) ->
+ Digraph.
+
+-spec get_named_tables(callgraph()) -> [string()].
+
+get_named_tables(#callgraph{named_tables = NamedTables}) ->
+ NamedTables.
+
+-spec get_public_tables(callgraph()) -> [label()].
+
+get_public_tables(#callgraph{public_tables = PT}) ->
+ PT.
+
+-spec get_race_code(callgraph()) -> dict().
+
+get_race_code(#callgraph{race_code = RaceCode}) ->
+ RaceCode.
+
+-spec get_race_detection(callgraph()) -> boolean().
+
+get_race_detection(#callgraph{race_detection = RD}) ->
+ RD.
+
+-spec race_code_new(callgraph()) -> callgraph().
+
+race_code_new(Callgraph) ->
+ Callgraph#callgraph{race_code = dict:new()}.
+
+-spec put_race_code(dict(), callgraph()) -> callgraph().
+
+put_race_code(RaceCode, Callgraph) ->
+ Callgraph#callgraph{race_code = RaceCode}.
+
+-spec put_race_detection(boolean(), callgraph()) -> callgraph().
+
+put_race_detection(RaceDetection, Callgraph) ->
+ Callgraph#callgraph{race_detection = RaceDetection}.
+
+-spec put_named_tables([string()], callgraph()) -> callgraph().
+
+put_named_tables(NamedTables, Callgraph) ->
+ Callgraph#callgraph{named_tables = NamedTables}.
+
+-spec put_public_tables([label()], callgraph()) -> callgraph().
+
+put_public_tables(PublicTables, Callgraph) ->
+ Callgraph#callgraph{public_tables = PublicTables}.
+
+%%=============================================================================
+%% Utilities for 'dot'
+%%=============================================================================
+
+-spec to_dot(callgraph(), file:filename()) -> 'ok'.
+
+to_dot(#callgraph{digraph = DG, esc = Esc} = CG, File) ->
+ Fun = fun(L) ->
+ case lookup_name(L, CG) of
+ error -> L;
+ {ok, Name} -> Name
+ end
+ end,
+ Escaping = [{Fun(L), {color, red}}
+ || L <- sets:to_list(Esc), L =/= external],
+ Vertices = digraph_edges(DG),
+ hipe_dot:translate_list(Vertices, File, "CG", Escaping).
+
+-spec to_ps(callgraph(), file:filename(), string()) -> 'ok'.
+
+to_ps(#callgraph{} = CG, File, Args) ->
+ Dot_File = filename:rootname(File) ++ ".dot",
+ to_dot(CG, Dot_File),
+ Command = io_lib:format("dot -Tps ~s -o ~s ~s", [Args, File, Dot_File]),
+ _ = os:cmd(Command),
+ ok.
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
new file mode 100644
index 0000000000..ab56a4e6d3
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -0,0 +1,717 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_cl.erl
+%%% Authors : Tobias Lindahl <[email protected]>
+%%% Kostis Sagonas <[email protected]>
+%%% Description : The command line interface for the Dialyzer tool.
+%%%
+%%% Created : 27 Apr 2004 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+
+-module(dialyzer_cl).
+
+-export([start/1]).
+
+-include("dialyzer.hrl").
+-include_lib("kernel/include/file.hrl"). % needed for #file_info{}
+
+-record(cl_state,
+ {backend_pid :: pid(),
+ erlang_mode = false :: boolean(),
+ external_calls = [] :: [mfa()],
+ legal_warnings = ordsets:new() :: [dial_warn_tag()],
+ mod_deps = dict:new() :: dict(),
+ output = standard_io :: io:device(),
+ output_format = formatted :: 'raw' | 'formatted',
+ output_plt = none :: 'none' | file:filename(),
+ plt_info = none :: 'none' | dialyzer_plt:plt_info(),
+ report_mode = normal :: rep_mode(),
+ return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(),
+ stored_warnings = [] :: [dial_warning()]
+ }).
+
+%%--------------------------------------------------------------------
+
+-spec start(#options{}) -> {dial_ret(), [dial_warning()]}.
+
+start(#options{analysis_type = AnalysisType} = Options) ->
+ process_flag(trap_exit, true),
+ case AnalysisType of
+ plt_check -> check_plt(Options);
+ plt_build -> build_plt(Options);
+ plt_add -> add_to_plt(Options);
+ plt_remove -> remove_from_plt(Options);
+ succ_typings -> do_analysis(Options)
+ end.
+
+%%--------------------------------------------------------------------
+
+build_plt(Opts) ->
+ Opts1 = init_opts_for_build(Opts),
+ Files = get_files_from_opts(Opts1),
+ Md5 = dialyzer_plt:compute_md5_from_files(Files),
+ PltInfo = {Md5, dict:new()},
+ do_analysis(Files, Opts1, dialyzer_plt:new(), PltInfo).
+
+init_opts_for_build(Opts) ->
+ case Opts#options.output_plt =:= none of
+ true ->
+ case Opts#options.init_plt of
+ none -> Opts#options{init_plt = none, output_plt = get_default_plt()};
+ Plt -> Opts#options{init_plt = none, output_plt = Plt}
+ end;
+ false -> Opts#options{init_plt = none}
+ end.
+
+%%--------------------------------------------------------------------
+
+add_to_plt(Opts) ->
+ Opts1 = init_opts_for_add(Opts),
+ AddFiles = get_files_from_opts(Opts1),
+ plt_common(Opts1, [], AddFiles).
+
+init_opts_for_add(Opts) ->
+ case Opts#options.output_plt =:= none of
+ true ->
+ case Opts#options.init_plt of
+ none -> Opts#options{output_plt = get_default_plt(),
+ init_plt = get_default_plt()};
+ Plt -> Opts#options{output_plt = Plt}
+ end;
+ false ->
+ case Opts#options.init_plt =:= none of
+ true -> Opts#options{init_plt = get_default_plt()};
+ false -> Opts
+ end
+ end.
+
+%%--------------------------------------------------------------------
+
+check_plt(Opts) ->
+ Opts1 = init_opts_for_check(Opts),
+ report_check(Opts),
+ plt_common(Opts1, [], []).
+
+init_opts_for_check(Opts) ->
+ Plt =
+ case Opts#options.init_plt of
+ none -> get_default_plt();
+ Plt0 -> Plt0
+ end,
+ Opts#options{files = [],
+ files_rec = [],
+ analysis_type = plt_check,
+ defines = [],
+ from = byte_code,
+ init_plt = Plt,
+ include_dirs = [],
+ output_plt = Plt,
+ use_contracts = true
+ }.
+
+%%--------------------------------------------------------------------
+
+remove_from_plt(Opts) ->
+ Opts1 = init_opts_for_remove(Opts),
+ Files = get_files_from_opts(Opts1),
+ plt_common(Opts1, Files, []).
+
+init_opts_for_remove(Opts) ->
+ case Opts#options.output_plt =:= none of
+ true ->
+ case Opts#options.init_plt of
+ none -> Opts#options{output_plt = get_default_plt(),
+ init_plt = get_default_plt()};
+ Plt -> Opts#options{output_plt = Plt}
+ end;
+ false ->
+ case Opts#options.init_plt =:= none of
+ true -> Opts#options{init_plt = get_default_plt()};
+ false -> Opts
+ end
+ end.
+
+%%--------------------------------------------------------------------
+
+plt_common(Opts, RemoveFiles, AddFiles) ->
+ case check_plt(Opts, RemoveFiles, AddFiles) of
+ ok ->
+ case Opts#options.report_mode of
+ quiet -> ok;
+ _ -> io:put_chars(" yes\n")
+ end,
+ {?RET_NOTHING_SUSPICIOUS, []};
+ {old_version, Md5} ->
+ PltInfo = {Md5, dict:new()},
+ Files = [F || {F, _} <- Md5],
+ do_analysis(Files, Opts, dialyzer_plt:new(), PltInfo);
+ {differ, Md5, DiffMd5, ModDeps} ->
+ report_failed_plt_check(Opts, DiffMd5),
+ {AnalFiles, RemovedMods, ModDeps1} =
+ expand_dependent_modules(Md5, DiffMd5, ModDeps),
+ Plt = clean_plt(Opts#options.init_plt, RemovedMods),
+ case AnalFiles =:= [] of
+ true ->
+ %% Only removed stuff. Just write the PLT.
+ dialyzer_plt:to_file(Opts#options.output_plt, Plt, ModDeps,
+ {Md5, ModDeps}),
+ {?RET_NOTHING_SUSPICIOUS, []};
+ false ->
+ do_analysis(AnalFiles, Opts, Plt, {Md5, ModDeps1})
+ end;
+ {error, no_such_file} ->
+ Msg = io_lib:format("Could not find the PLT: ~s\n~s",
+ [Opts#options.init_plt, default_plt_error_msg()]),
+ error(Msg);
+ {error, not_valid} ->
+ Msg = io_lib:format("The file: ~s is not a valid PLT file\n~s",
+ [Opts#options.init_plt, default_plt_error_msg()]),
+ error(Msg);
+ {error, read_error} ->
+ Msg = io_lib:format("Could not read the PLT: ~s\n~s",
+ [Opts#options.init_plt, default_plt_error_msg()]),
+ error(Msg);
+ {error, {no_file_to_remove, F}} ->
+ Msg = io_lib:format("Could not remove the file ~s from the PLT: ~s\n",
+ [F, Opts#options.init_plt]),
+ error(Msg)
+ end.
+
+default_plt_error_msg() ->
+ "Use the options:\n"
+ " --build_plt to build a new PLT; or\n"
+ " --add_to_plt to add to an existing PLT\n"
+ "\n"
+ "For example, use a command like the following:\n"
+ " dialyzer --build_plt --apps erts kernel stdlib mnesia\n"
+ "Note that building a PLT such as the above may take 20 mins or so\n"
+ "\n"
+ "If you later need information about other applications, say crypto,\n"
+ "you can extend the PLT by the command:\n"
+ " dialyzer --add_to_plt --apps crypto\n"
+ "For applications that are not in Erlang/OTP use an absolute file name.\n".
+
+%%--------------------------------------------------------------------
+
+check_plt(Opts, RemoveFiles, AddFiles) ->
+ Plt = Opts#options.init_plt,
+ case dialyzer_plt:check_plt(Plt, RemoveFiles, AddFiles) of
+ {old_version, _MD5} = OldVersion ->
+ report_old_version(Opts),
+ OldVersion;
+ {differ, _MD5, _DiffMd5, _ModDeps} = Differ ->
+ Differ;
+ ok ->
+ ok;
+ {error, _Reason} = Error ->
+ Error
+ end.
+
+%%--------------------------------------------------------------------
+
+report_check(#options{report_mode = ReportMode, init_plt = InitPlt}) ->
+ case ReportMode of
+ quiet -> ok;
+ _ ->
+ io:format(" Checking whether the PLT ~s is up-to-date...", [InitPlt])
+ end.
+
+report_old_version(#options{report_mode = ReportMode, init_plt = InitPlt}) ->
+ case ReportMode of
+ quiet -> ok;
+ _ ->
+ io:put_chars(" no\n"),
+ io:format(" (the PLT ~s was built with an old version of Dialyzer)\n",
+ [InitPlt])
+ end.
+
+report_failed_plt_check(#options{analysis_type = AnalType,
+ report_mode = ReportMode}, DiffMd5) ->
+ case AnalType =:= plt_check of
+ true ->
+ case ReportMode of
+ quiet -> ok;
+ normal -> io:format(" no\n", []);
+ verbose -> report_md5_diff(DiffMd5)
+ end;
+ false -> ok
+ end.
+
+report_analysis_start(#options{analysis_type = Type,
+ report_mode = ReportMode,
+ init_plt = InitPlt,
+ output_plt = OutputPlt}) ->
+ case ReportMode of
+ quiet -> ok;
+ _ ->
+ io:format(" "),
+ case Type of
+ plt_add ->
+ case InitPlt =:= OutputPlt of
+ true -> io:format("Adding information to ~s...", [OutputPlt]);
+ false -> io:format("Adding information from ~s to ~s...",
+ [InitPlt, OutputPlt])
+ end;
+ plt_build ->
+ io:format("Creating PLT ~s ...", [OutputPlt]);
+ plt_check ->
+ io:format("Rebuilding the information in ~s...", [OutputPlt]);
+ plt_remove ->
+ case InitPlt =:= OutputPlt of
+ true -> io:format("Removing information from ~s...", [OutputPlt]);
+ false -> io:format("Removing information from ~s to ~s...",
+ [InitPlt, OutputPlt])
+ end;
+ succ_typings -> io:format("Proceeding with analysis...")
+ end
+ end.
+
+report_native_comp(#options{report_mode = ReportMode}) ->
+ case ReportMode of
+ quiet -> ok;
+ _ -> io:format(" Compiling some key modules to native code...")
+ end.
+
+report_elapsed_time(T1, T2, #options{report_mode = ReportMode}) ->
+ case ReportMode of
+ quiet -> ok;
+ _ ->
+ ElapsedTime = T2 - T1,
+ Mins = ElapsedTime div 60000,
+ Secs = (ElapsedTime rem 60000) / 1000,
+ io:format(" done in ~wm~.2fs\n", [Mins, Secs])
+ end.
+
+report_md5_diff(List) ->
+ io:format(" The PLT information is not up to date:\n", []),
+ case [Mod || {removed, Mod} <- List] of
+ [] -> ok;
+ RemovedMods -> io:format(" Removed modules: ~p\n", [RemovedMods])
+ end,
+ case [Mod || {differ, Mod} <- List] of
+ [] -> ok;
+ ChangedMods -> io:format(" Changed modules: ~p\n", [ChangedMods])
+ end.
+
+%%--------------------------------------------------------------------
+
+get_default_plt() ->
+ dialyzer_plt:get_default_plt().
+
+%%--------------------------------------------------------------------
+
+do_analysis(Options) ->
+ Files = get_files_from_opts(Options),
+ case Options#options.init_plt of
+ none -> do_analysis(Files, Options, dialyzer_plt:new(), none);
+ File -> do_analysis(Files, Options, dialyzer_plt:from_file(File), none)
+ end.
+
+do_analysis(Files, Options, Plt, PltInfo) ->
+ assert_writable(Options#options.output_plt),
+ hipe_compile(Files, Options),
+ report_analysis_start(Options),
+ State0 = new_state(),
+ State1 = init_output(State0, Options),
+ State2 = State1#cl_state{legal_warnings = Options#options.legal_warnings,
+ output_plt = Options#options.output_plt,
+ plt_info = PltInfo,
+ erlang_mode = Options#options.erlang_mode,
+ report_mode = Options#options.report_mode},
+ AnalysisType = convert_analysis_type(Options#options.analysis_type,
+ Options#options.get_warnings),
+ InitAnalysis = #analysis{type = AnalysisType,
+ defines = Options#options.defines,
+ include_dirs = Options#options.include_dirs,
+ files = Files,
+ start_from = Options#options.from,
+ plt = Plt,
+ use_contracts = Options#options.use_contracts,
+ callgraph_file = Options#options.callgraph_file},
+ State3 = start_analysis(State2, InitAnalysis),
+ {T1, _} = statistics(wall_clock),
+ Return = cl_loop(State3),
+ {T2, _} = statistics(wall_clock),
+ report_elapsed_time(T1, T2, Options),
+ Return.
+
+convert_analysis_type(plt_check, true) -> succ_typings;
+convert_analysis_type(plt_check, false) -> plt_build;
+convert_analysis_type(plt_add, true) -> succ_typings;
+convert_analysis_type(plt_add, false) -> plt_build;
+convert_analysis_type(plt_build, true) -> succ_typings;
+convert_analysis_type(plt_build, false) -> plt_build;
+convert_analysis_type(plt_remove, true) -> succ_typings;
+convert_analysis_type(plt_remove, false) -> plt_build;
+convert_analysis_type(succ_typings, _) -> succ_typings.
+
+%%--------------------------------------------------------------------
+
+assert_writable(none) ->
+ ok;
+assert_writable(PltFile) ->
+ case check_if_writable(PltFile) of
+ true -> ok;
+ false ->
+ Msg = io_lib:format(" The PLT file ~s is not writable", [PltFile]),
+ error(Msg)
+ end.
+
+check_if_writable(PltFile) ->
+ case filelib:is_regular(PltFile) of
+ true -> is_writable_file_or_dir(PltFile);
+ false ->
+ case filelib:is_dir(PltFile) of
+ true -> false;
+ false ->
+ DirName = filename:dirname(PltFile),
+ filelib:is_dir(DirName) andalso is_writable_file_or_dir(DirName)
+ end
+ end.
+
+is_writable_file_or_dir(PltFile) ->
+ case file:read_file_info(PltFile) of
+ {ok, #file_info{access = A}} ->
+ (A =:= write) orelse (A =:= read_write);
+ {error, _} ->
+ false
+ end.
+
+%%--------------------------------------------------------------------
+
+clean_plt(PltFile, RemovedMods) ->
+ %% Clean the plt from the removed modules.
+ Plt = dialyzer_plt:from_file(PltFile),
+ sets:fold(fun(M, AccPlt) -> dialyzer_plt:delete_module(AccPlt, M) end,
+ Plt, RemovedMods).
+
+expand_dependent_modules(Md5, DiffMd5, ModDeps) ->
+ ChangedMods = sets:from_list([M || {differ, M} <- DiffMd5]),
+ RemovedMods = sets:from_list([M || {removed, M} <- DiffMd5]),
+ BigSet = sets:union(ChangedMods, RemovedMods),
+ BigList = sets:to_list(BigSet),
+ ExpandedSet = expand_dependent_modules_1(BigList, BigSet, ModDeps),
+ NewModDeps = dialyzer_callgraph:strip_module_deps(ModDeps, BigSet),
+ AnalyzeMods = sets:subtract(ExpandedSet, RemovedMods),
+ FilterFun = fun(File) ->
+ Mod = list_to_atom(filename:basename(File, ".beam")),
+ sets:is_element(Mod, AnalyzeMods)
+ end,
+ {[F || {F, _} <- Md5, FilterFun(F)], RemovedMods, NewModDeps}.
+
+expand_dependent_modules_1([Mod|Mods], Included, ModDeps) ->
+ case dict:find(Mod, ModDeps) of
+ {ok, Deps} ->
+ NewDeps = sets:subtract(sets:from_list(Deps), Included),
+ case sets:size(NewDeps) =:= 0 of
+ true -> expand_dependent_modules_1(Mods, Included, ModDeps);
+ false ->
+ NewIncluded = sets:union(Included, NewDeps),
+ expand_dependent_modules_1(sets:to_list(NewDeps) ++ Mods,
+ NewIncluded, ModDeps)
+ end;
+ error ->
+ expand_dependent_modules_1(Mods, Included, ModDeps)
+ end;
+expand_dependent_modules_1([], Included, _ModDeps) ->
+ Included.
+
+-define(MIN_FILES_FOR_NATIVE_COMPILE, 20).
+
+-spec hipe_compile([file:filename()], #options{}) -> 'ok'.
+
+hipe_compile(Files, #options{erlang_mode = ErlangMode} = Options) ->
+ case (length(Files) < ?MIN_FILES_FOR_NATIVE_COMPILE) orelse ErlangMode of
+ true -> ok;
+ false ->
+ case erlang:system_info(hipe_architecture) of
+ undefined -> ok;
+ _ ->
+ Mods = [lists, dict, gb_sets, gb_trees, ordsets, sets,
+ cerl, cerl_trees, erl_types, erl_bif_types,
+ dialyzer_analysis_callgraph, dialyzer_codeserver,
+ dialyzer_dataflow, dialyzer_dep, dialyzer_plt,
+ dialyzer_succ_typings, dialyzer_typesig],
+ report_native_comp(Options),
+ {T1, _} = statistics(wall_clock),
+ native_compile(Mods),
+ {T2, _} = statistics(wall_clock),
+ report_elapsed_time(T1, T2, Options)
+ end
+ end.
+
+native_compile(Mods) ->
+ case erlang:system_info(schedulers) of
+ %% N when N > 1 ->
+ %% Parent = self(),
+ %% Pids = [spawn(fun () -> Parent ! {self(), hc(M)} end) || M <- Mods],
+ %% lists:foreach(fun (Pid) -> receive {Pid, Res} -> Res end end, Pids);
+ _ -> % 1 ->
+ lists:foreach(fun (Mod) -> hc(Mod) end, Mods)
+ end.
+
+hc(Mod) ->
+ case code:ensure_loaded(Mod) of
+ {module, Mod} -> ok;
+ {error, sticky_directory} -> ok
+ end,
+ case code:is_module_native(Mod) of
+ true -> ok;
+ false ->
+ {ok, Mod} = hipe:c(Mod),
+ ok
+ end.
+
+new_state() ->
+ #cl_state{}.
+
+init_output(State0, #options{output_file = OutFile, output_format = OutFormat}) ->
+ State = State0#cl_state{output_format = OutFormat},
+ case OutFile =:= none of
+ true ->
+ State;
+ false ->
+ case file:open(OutFile, [write]) of
+ {ok, File} ->
+ State#cl_state{output = File};
+ {error, Reason} ->
+ Msg = io_lib:format("Could not open output file ~p, Reason: ~p\n",
+ [OutFile, Reason]),
+ error(State, lists:flatten(Msg))
+ end
+ end.
+
+-spec maybe_close_output_file(#cl_state{}) -> 'ok'.
+
+maybe_close_output_file(State) ->
+ case State#cl_state.output of
+ standard_io -> ok;
+ File -> ok = file:close(File)
+ end.
+
+%% ----------------------------------------------------------------
+%%
+%% Main Loop
+%%
+
+-define(LOG_CACHE_SIZE, 10).
+
+%%-spec cl_loop(#cl_state{}) ->
+cl_loop(State) ->
+ cl_loop(State, []).
+
+cl_loop(State, LogCache) ->
+ BackendPid = State#cl_state.backend_pid,
+ receive
+ {BackendPid, log, LogMsg} ->
+ %%io:format(State#cl_state.output ,"Log: ~s\n", [LogMsg]),
+ cl_loop(State, lists:sublist([LogMsg|LogCache], ?LOG_CACHE_SIZE));
+ {BackendPid, warnings, Warnings} ->
+ NewState = store_warnings(State, Warnings),
+ cl_loop(NewState, LogCache);
+ {BackendPid, done, NewPlt, _NewDocPlt} ->
+ return_value(State, NewPlt);
+ {BackendPid, ext_calls, ExtCalls} ->
+ cl_loop(State#cl_state{external_calls = ExtCalls}, LogCache);
+ {BackendPid, mod_deps, ModDeps} ->
+ NewState = State#cl_state{mod_deps = ModDeps},
+ cl_loop(NewState, LogCache);
+ {'EXIT', BackendPid, {error, Reason}} ->
+ Msg = failed_anal_msg(Reason, LogCache),
+ error(State, Msg);
+ {'EXIT', BackendPid, Reason} when Reason =/= 'normal' ->
+ Msg = failed_anal_msg(io_lib:format("~P", [Reason, 12]), LogCache),
+ error(State, Msg);
+ _Other ->
+ %% io:format("Received ~p\n", [_Other]),
+ cl_loop(State, LogCache)
+ end.
+
+-spec failed_anal_msg(string(), [_]) -> string().
+
+failed_anal_msg(Reason, LogCache) ->
+ Msg = "Analysis failed with error: " ++ Reason ++ "\n",
+ case LogCache =:= [] of
+ true -> Msg;
+ false ->
+ Msg ++ "Last messages in the log cache:\n " ++ format_log_cache(LogCache)
+ end.
+
+%%
+%% formats the log cache (treating it as a string) for pretty-printing
+%%
+format_log_cache(LogCache) ->
+ Str = lists:append(lists:reverse(LogCache)),
+ string:join(string:tokens(Str, "\n"), "\n ").
+
+-spec store_warnings(#cl_state{}, [dial_warning()]) -> #cl_state{}.
+
+store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) ->
+ St#cl_state{stored_warnings = StoredWarnings ++ Warnings}.
+
+-spec error(string()) -> no_return().
+
+error(Msg) ->
+ throw({dialyzer_error, Msg}).
+
+-spec error(#cl_state{}, string()) -> no_return().
+
+error(State, Msg) ->
+ case State#cl_state.output of
+ standard_io -> ok;
+ Outfile -> io:format(Outfile, "\n~s\n", [Msg])
+ end,
+ maybe_close_output_file(State),
+ throw({dialyzer_error, Msg}).
+
+return_value(State = #cl_state{erlang_mode = ErlangMode,
+ mod_deps = ModDeps,
+ output_plt = OutputPlt,
+ plt_info = PltInfo,
+ stored_warnings = StoredWarnings},
+ Plt) ->
+ case OutputPlt =:= none of
+ true -> ok;
+ false -> dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
+ end,
+ RetValue =
+ case StoredWarnings =:= [] of
+ true -> ?RET_NOTHING_SUSPICIOUS;
+ false -> ?RET_DISCREPANCIES
+ end,
+ case ErlangMode of
+ false ->
+ print_warnings(State),
+ print_ext_calls(State),
+ maybe_close_output_file(State),
+ {RetValue, []};
+ true ->
+ {RetValue, process_warnings(StoredWarnings)}
+ end.
+
+print_ext_calls(#cl_state{report_mode = quiet}) ->
+ ok;
+print_ext_calls(#cl_state{output = Output,
+ external_calls = Calls,
+ stored_warnings = Warnings,
+ output_format = Format}) ->
+ case Calls =:= [] of
+ true -> ok;
+ false ->
+ case Warnings =:= [] of
+ true -> io:nl(Output); %% Need to do a newline first
+ false -> ok
+ end,
+ case Format of
+ formatted ->
+ io:put_chars(Output, "Unknown functions:\n"),
+ do_print_ext_calls(Output, Calls, " ");
+ raw ->
+ io:put_chars(Output, "%% Unknown functions:\n"),
+ do_print_ext_calls(Output, Calls, "%% ")
+ end
+ end.
+
+do_print_ext_calls(Output, [{M,F,A}|T], Before) ->
+ io:format(Output, "~s~p:~p/~p\n", [Before,M,F,A]),
+ do_print_ext_calls(Output, T, Before);
+do_print_ext_calls(_, [], _) ->
+ ok.
+
+print_warnings(#cl_state{stored_warnings = []}) ->
+ ok;
+print_warnings(#cl_state{output = Output,
+ output_format = Format,
+ stored_warnings = Warnings}) ->
+ PrWarnings = process_warnings(Warnings),
+ case PrWarnings of
+ [] -> ok;
+ [_|_] ->
+ S = case Format of
+ formatted ->
+ [dialyzer:format_warning(W) || W <- PrWarnings];
+ raw ->
+ [io_lib:format("~p. \n", [W]) || W <- PrWarnings]
+ end,
+ io:format(Output, "\n~s", [S])
+ end.
+
+-spec process_warnings([dial_warning()]) -> [dial_warning()].
+
+process_warnings(Warnings) ->
+ Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line
+ remove_duplicate_warnings(Warnings1, []).
+
+remove_duplicate_warnings([Duplicate, Duplicate|Left], Acc) ->
+ remove_duplicate_warnings([Duplicate|Left], Acc);
+remove_duplicate_warnings([NotDuplicate|Left], Acc) ->
+ remove_duplicate_warnings(Left, [NotDuplicate|Acc]);
+remove_duplicate_warnings([], Acc) ->
+ lists:reverse(Acc).
+
+get_files_from_opts(Options) ->
+ From = Options#options.from,
+ Files1 = add_files(Options#options.files, From),
+ Files2 = add_files_rec(Options#options.files_rec, From),
+ ordsets:union(Files1, Files2).
+
+add_files_rec(Files, From) ->
+ add_files(Files, From, true).
+
+add_files(Files, From) ->
+ add_files(Files, From, false).
+
+add_files(Files, From, Rec) ->
+ Files1 = [filename:absname(F) || F <- Files],
+ Files2 = ordsets:from_list(Files1),
+ Dirs = ordsets:filter(fun(X) -> filelib:is_dir(X) end, Files2),
+ Files3 = ordsets:subtract(Files2, Dirs),
+ Extension = case From of
+ byte_code -> ".beam";
+ src_code -> ".erl"
+ end,
+ Fun = add_file_fun(Extension),
+ lists:foldl(fun(Dir, Acc) ->
+ filelib:fold_files(Dir, Extension, Rec, Fun, Acc)
+ end, Files3, Dirs).
+
+add_file_fun(Extension) ->
+ fun(File, AccFiles) ->
+ case filename:extension(File) =:= Extension of
+ true ->
+ AbsName = filename:absname(File),
+ ordsets:add_element(AbsName, AccFiles);
+ false -> AccFiles
+ end
+ end.
+
+-spec start_analysis(#cl_state{}, #analysis{}) -> #cl_state{}.
+
+start_analysis(State, Analysis) ->
+ Self = self(),
+ LegalWarnings = State#cl_state.legal_warnings,
+ Fun = fun() ->
+ dialyzer_analysis_callgraph:start(Self, LegalWarnings, Analysis)
+ end,
+ BackendPid = spawn_link(Fun),
+ State#cl_state{backend_pid = BackendPid}.
+
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
new file mode 100644
index 0000000000..ae466e5c01
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -0,0 +1,448 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(dialyzer_cl_parse).
+
+-export([start/0]).
+-export([collect_args/1]). % used also by typer_options.erl
+
+-include("dialyzer.hrl").
+
+%%-----------------------------------------------------------------------
+
+-type dial_cl_parse_ret() :: {'check_init', #options{}}
+ | {'plt_info', #options{}}
+ | {'cl', #options{}}
+ | {{'gui', 'gs' | 'wx'}, #options{}}
+ | {'error', string()}.
+
+%%-----------------------------------------------------------------------
+
+-spec start() -> dial_cl_parse_ret().
+
+start() ->
+ init(),
+ Args = init:get_plain_arguments(),
+ try
+ cl(Args)
+ catch
+ throw:{dialyzer_cl_parse_error, Msg} -> {error, Msg};
+ _:R ->
+ Msg = io_lib:format("~p\n~p\n", [R, erlang:get_stacktrace()]),
+ {error, lists:flatten(Msg)}
+ end.
+
+cl(["--add_to_plt"|T]) ->
+ put(dialyzer_options_analysis_type, plt_add),
+ cl(T);
+cl(["--apps"|T]) ->
+ T1 = get_lib_dir(T, []),
+ {Args, T2} = collect_args(T1),
+ append_var(dialyzer_options_files_rec, Args),
+ cl(T2);
+cl(["--build_plt"|T]) ->
+ put(dialyzer_options_analysis_type, plt_build),
+ cl(T);
+cl(["--check_plt"|T]) ->
+ put(dialyzer_options_analysis_type, plt_check),
+ cl(T);
+cl(["-n"|T]) ->
+ cl(["--no_check_plt"|T]);
+cl(["--no_check_plt"|T]) ->
+ put(dialyzer_options_check_plt, false),
+ cl(T);
+cl(["--plt_info"|T]) ->
+ put(dialyzer_options_analysis_type, plt_info),
+ cl(T);
+cl(["--get_warnings"|T]) ->
+ put(dialyzer_options_get_warnings, true),
+ cl(T);
+cl(["-D"|_]) ->
+ error("No defines specified after -D");
+cl(["-D"++Define|T]) ->
+ Def = re:split(Define, "=", [{return, list}]),
+ append_defines(Def),
+ cl(T);
+cl(["-h"|_]) ->
+ help_message();
+cl(["--help"|_]) ->
+ help_message();
+cl(["-I"]) ->
+ error("no include directory specified after -I");
+cl(["-I", Dir|T]) ->
+ append_include(Dir),
+ cl(T);
+cl(["-I"++Dir|T]) ->
+ append_include(Dir),
+ cl(T);
+cl(["-c"++_|T]) ->
+ NewTail = command_line(T),
+ cl(NewTail);
+cl(["-r"++_|T0]) ->
+ {Args, T} = collect_args(T0),
+ append_var(dialyzer_options_files_rec, Args),
+ cl(T);
+cl(["--remove_from_plt"|T]) ->
+ put(dialyzer_options_analysis_type, plt_remove),
+ cl(T);
+cl(["--com"++_|T]) ->
+ NewTail = command_line(T),
+ cl(NewTail);
+cl(["--output"]) ->
+ error("No outfile specified");
+cl(["-o"]) ->
+ error("No outfile specified");
+cl(["--output",Output|T]) ->
+ put(dialyzer_output, Output),
+ cl(T);
+cl(["--output_plt"]) ->
+ error("No outfile specified for --output_plt");
+cl(["--output_plt",Output|T]) ->
+ put(dialyzer_output_plt, Output),
+ cl(T);
+cl(["-o", Output|T]) ->
+ put(dialyzer_output, Output),
+ cl(T);
+cl(["-o"++Output|T]) ->
+ put(dialyzer_output, Output),
+ cl(T);
+cl(["--raw"|T]) ->
+ put(dialyzer_output_format, raw),
+ cl(T);
+cl(["-pa", Path|T]) ->
+ case code:add_patha(Path) of
+ true -> cl(T);
+ {error, _} -> error("Bad directory for -pa: "++Path)
+ end;
+cl(["--plt", PLT|T]) ->
+ put(dialyzer_init_plt, PLT),
+ cl(T);
+cl(["--plt"]) ->
+ error("No plt specified for --plt");
+cl(["-q"|T]) ->
+ put(dialyzer_options_report_mode, quiet),
+ cl(T);
+cl(["--quiet"|T]) ->
+ put(dialyzer_options_report_mode, quiet),
+ cl(T);
+cl(["--src"|T]) ->
+ put(dialyzer_options_from, src_code),
+ cl(T);
+cl(["--no_spec"|T]) ->
+ put(dialyzer_options_use_contracts, false),
+ cl(T);
+cl(["-v"|_]) ->
+ io:format("Dialyzer version "++?VSN++"\n"),
+ erlang:halt(?RET_NOTHING_SUSPICIOUS);
+cl(["--version"|_]) ->
+ io:format("Dialyzer version "++?VSN++"\n"),
+ erlang:halt(?RET_NOTHING_SUSPICIOUS);
+cl(["--verbose"|T]) ->
+ put(dialyzer_options_report_mode, verbose),
+ cl(T);
+cl(["-W"|_]) ->
+ error("-W given without warning");
+cl(["-Whelp"|_]) ->
+ help_warnings();
+cl(["-W"++Warn|T]) ->
+ append_var(dialyzer_warnings, [list_to_atom(Warn)]),
+ cl(T);
+cl(["--dump_callgraph"]) ->
+ error("No outfile specified for --dump_callgraph");
+cl(["--dump_callgraph", File|T]) ->
+ put(dialyzer_callgraph_file, File),
+ cl(T);
+cl(["--gui"|T]) ->
+ put(dialyzer_options_mode, {gui, gs}),
+ cl(T);
+cl(["--wx"|T]) ->
+ put(dialyzer_options_mode, {gui, wx}),
+ cl(T);
+cl([H|_] = L) ->
+ case filelib:is_file(H) orelse filelib:is_dir(H) of
+ true ->
+ NewTail = command_line(L),
+ cl(NewTail);
+ false ->
+ error("Unknown option: "++H)
+ end;
+cl([]) ->
+ {RetTag, Opts} =
+ case get(dialyzer_options_analysis_type) =:= plt_info of
+ true ->
+ put(dialyzer_options_analysis_type, plt_check),
+ {plt_info, cl_options()};
+ false ->
+ case get(dialyzer_options_mode) of
+ {gui,_} = GUI -> {GUI, common_options()};
+ cl ->
+ case get(dialyzer_options_analysis_type) =:= plt_check of
+ true -> {check_init, cl_options()};
+ false -> {cl, cl_options()}
+ end
+ end
+ end,
+ case dialyzer_options:build(Opts) of
+ {error, Msg} -> error(Msg);
+ OptsRecord -> {RetTag, OptsRecord}
+ end.
+
+%%-----------------------------------------------------------------------
+
+command_line(T0) ->
+ {Args, T} = collect_args(T0),
+ append_var(dialyzer_options_files, Args),
+ %% if all files specified are ".erl" files, set the 'src' flag automatically
+ case lists:all(fun(F) -> filename:extension(F) =:= ".erl" end, Args) of
+ true -> put(dialyzer_options_from, src_code);
+ false -> ok
+ end,
+ T.
+
+error(Str) ->
+ Msg = lists:flatten(Str),
+ throw({dialyzer_cl_parse_error, Msg}).
+
+init() ->
+ put(dialyzer_options_mode, cl),
+ put(dialyzer_options_files_rec, []),
+ put(dialyzer_options_report_mode, normal),
+ put(dialyzer_warnings, []),
+ DefaultOpts = #options{},
+ put(dialyzer_include, DefaultOpts#options.include_dirs),
+ put(dialyzer_options_defines, DefaultOpts#options.defines),
+ put(dialyzer_options_files, DefaultOpts#options.files),
+ put(dialyzer_output_format, formatted),
+ put(dialyzer_options_check_plt, DefaultOpts#options.check_plt),
+ ok.
+
+append_defines([Def, Val]) ->
+ {ok, Tokens, _} = erl_scan:string(Val++"."),
+ {ok, ErlVal} = erl_parse:parse_term(Tokens),
+ append_var(dialyzer_options_defines, [{list_to_atom(Def), ErlVal}]);
+append_defines([Def]) ->
+ append_var(dialyzer_options_defines, [{list_to_atom(Def), true}]).
+
+append_include(Dir) ->
+ append_var(dialyzer_include, [Dir]).
+
+append_var(Var, List) when is_list(List) ->
+ put(Var, get(Var) ++ List),
+ ok.
+
+%%-----------------------------------------------------------------------
+
+-spec collect_args([string()]) -> {[string()], [string()]}.
+
+collect_args(List) ->
+ collect_args_1(List, []).
+
+collect_args_1(["-"++_|_] = L, Acc) ->
+ {lists:reverse(Acc), L};
+collect_args_1([Arg|T], Acc) ->
+ collect_args_1(T, [Arg|Acc]);
+collect_args_1([], Acc) ->
+ {lists:reverse(Acc), []}.
+
+%%-----------------------------------------------------------------------
+
+cl_options() ->
+ [{files, get(dialyzer_options_files)},
+ {files_rec, get(dialyzer_options_files_rec)},
+ {output_file, get(dialyzer_output)},
+ {output_format, get(dialyzer_output_format)},
+ {analysis_type, get(dialyzer_options_analysis_type)},
+ {get_warnings, get(dialyzer_options_get_warnings)},
+ {callgraph_file, get(dialyzer_callgraph_file)}
+ |common_options()].
+
+common_options() ->
+ [{defines, get(dialyzer_options_defines)},
+ {from, get(dialyzer_options_from)},
+ {include_dirs, get(dialyzer_include)},
+ {init_plt, get(dialyzer_init_plt)},
+ {output_plt, get(dialyzer_output_plt)},
+ {report_mode, get(dialyzer_options_report_mode)},
+ {use_spec, get(dialyzer_options_use_contracts)},
+ {warnings, get(dialyzer_warnings)},
+ {check_plt, get(dialyzer_options_check_plt)}].
+
+%%-----------------------------------------------------------------------
+
+get_lib_dir([H|T], Acc) ->
+ NewElem =
+ case code:lib_dir(list_to_atom(H)) of
+ {error, bad_name} ->
+ case H =:= "erts" of % hack for including erts in an un-installed system
+ true -> filename:join(code:root_dir(), "erts/preloaded/ebin");
+ false -> H
+ end;
+ LibDir -> LibDir ++ "/ebin"
+ end,
+ get_lib_dir(T, [NewElem|Acc]);
+get_lib_dir([], Acc) ->
+ lists:reverse(Acc).
+
+%%-----------------------------------------------------------------------
+
+help_warnings() ->
+ S = warning_options_msg(),
+ io:put_chars(S),
+ erlang:halt(?RET_NOTHING_SUSPICIOUS).
+
+help_message() ->
+ S = "Usage: dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
+ [-pa dir]* [--plt plt] [-Ddefine]* [-I include_dir]*
+ [--output_plt file] [-Wwarn]* [--src] [--gui | --wx]
+ [-c applications] [-r applications] [-o outfile]
+ [--build_plt] [--add_to_plt] [--remove_from_plt]
+ [--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
+Options:
+ -c applications (or --command-line applications)
+ Use Dialyzer from the command line (no GUI) to detect defects in the
+ specified applications (directories or .erl or .beam files)
+ -r applications
+ Same as -c only that directories are searched recursively for
+ subdirectories containing .erl or .beam files (depending on the
+ type of analysis)
+ -o outfile (or --output outfile)
+ When using Dialyzer from the command line, send the analysis
+ results to the specified \"outfile\" rather than to stdout
+ --raw
+ When using Dialyzer from the command line, output the raw analysis
+ results (Erlang terms) instead of the formatted result.
+ The raw format is easier to post-process (for instance, to filter
+ warnings or to output HTML pages)
+ --src
+ Override the default, which is to analyze BEAM files, and
+ analyze starting from Erlang source code instead
+ -Dname (or -Dname=value)
+ When analyzing from source, pass the define to Dialyzer (**)
+ -I include_dir
+ When analyzing from source, pass the include_dir to Dialyzer (**)
+ -pa dir
+ Include dir in the path for Erlang (useful when analyzing files
+ that have '-include_lib()' directives)
+ --output_plt file
+ Store the plt at the specified file after building it
+ --plt plt
+ Use the specified plt as the initial plt (if the plt was built
+ during setup the files will be checked for consistency)
+ -Wwarn
+ A family of options which selectively turn on/off warnings
+ (for help on the names of warnings use dialyzer -Whelp)
+ --shell
+ Do not disable the Erlang shell while running the GUI
+ --version (or -v)
+ Prints the Dialyzer version and some more information and exits
+ --help (or -h)
+ Prints this message and exits
+ --quiet (or -q)
+ Makes Dialyzer a bit more quiet
+ --verbose
+ Makes Dialyzer a bit more verbose
+ --build_plt
+ The analysis starts from an empty plt and creates a new one from the
+ files specified with -c and -r. Only works for beam files.
+ Use --plt or --output_plt to override the default plt location.
+ --add_to_plt
+ The plt is extended to also include the files specified with -c and -r.
+ Use --plt to specify wich plt to start from, and --output_plt to
+ specify where to put the plt. Note that the analysis might include
+ files from the plt if they depend on the new files.
+ This option only works with beam files.
+ --remove_from_plt
+ The information from the files specified with -c and -r is removed
+ from the plt. Note that this may cause a re-analysis of the remaining
+ dependent files.
+ --check_plt
+ Checks the plt for consistency and rebuilds it if it is not up-to-date.
+ Actually, this option is of rare use as it is on by default.
+ --no_check_plt (or -n)
+ Skip the plt check when running Dialyzer. Useful when working with
+ installed plts that never change.
+ --plt_info
+ Makes Dialyzer print information about the plt and then quit. The plt
+ can be specified with --plt.
+ --get_warnings
+ Makes Dialyzer emit warnings even when manipulating the plt. Only
+ emits warnings for files that are actually analyzed.
+ --dump_callgraph file
+ Dump the call graph into the specified file whose format is determined
+ by the file name extension. Supported extensions are: raw, dot, and ps.
+ If something else is used as file name extension, default format '.raw'
+ will be used.
+ --gui
+ Use the gs-based GUI.
+ --wx
+ Use the wx-based GUI.
+
+Note:
+ * denotes that multiple occurrences of these options are possible.
+ ** options -D and -I work both from command-line and in the Dialyzer GUI;
+ the syntax of defines and includes is the same as that used by \"erlc\".
+
+" ++ warning_options_msg() ++ "
+The exit status of the command line version is:
+ 0 - No problems were encountered during the analysis and no
+ warnings were emitted.
+ 1 - Problems were encountered during the analysis.
+ 2 - No problems were encountered, but warnings were emitted.
+",
+ io:put_chars(S),
+ erlang:halt(?RET_NOTHING_SUSPICIOUS).
+
+warning_options_msg() ->
+ "Warning options:
+ -Wno_return
+ Suppress warnings for functions that will never return a value.
+ -Wno_unused
+ Suppress warnings for unused functions.
+ -Wno_improper_lists
+ Suppress warnings for construction of improper lists.
+ -Wno_tuple_as_fun
+ Suppress warnings for using tuples instead of funs.
+ -Wno_fun_app
+ Suppress warnings for fun applications that will fail.
+ -Wno_match
+ Suppress warnings for patterns that are unused or cannot match.
+ -Wno_opaque
+ Suppress warnings for violations of opaqueness of data types.
+ -Wunmatched_returns ***
+ Include warnings for function calls which ignore a structured return
+ value or do not match against one of many possible return value(s).
+ -Werror_handling ***
+ Include warnings for functions that only return by means of an exception.
+ -Wrace_conditions ***
+ Include warnings for possible race conditions.
+ -Wunderspecs ***
+ Warn about underspecified functions
+ (those whose -spec is strictly more allowing than the success typing).
+
+The following options are also available but their use is not recommended:
+(they are mostly for Dialyzer developers and internal debugging)
+ -Woverspecs ***
+ Warn about overspecified functions
+ (those whose -spec is strictly less allowing than the success typing).
+ -Wspecdiffs ***
+ Warn when the -spec is different than the success typing.
+
+*** Identifies options that turn on warnings rather than turning them off.
+".
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
new file mode 100644
index 0000000000..624501fc49
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -0,0 +1,282 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_codeserver.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 4 Apr 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_codeserver).
+
+-export([delete/1,
+ finalize_contracts/2,
+ finalize_records/2,
+ get_contracts/1,
+ get_exports/1,
+ get_records/1,
+ get_next_core_label/1,
+ get_temp_contracts/1,
+ get_temp_records/1,
+ insert/3,
+ insert_exports/2,
+ is_exported/2,
+ lookup_mod_code/2,
+ lookup_mfa_code/2,
+ lookup_mod_records/2,
+ lookup_mod_contracts/2,
+ lookup_mfa_contract/2,
+ new/0,
+ set_next_core_label/2,
+ set_temp_records/2,
+ store_records/3,
+ store_temp_records/3,
+ store_contracts/3,
+ store_temp_contracts/3]).
+
+-include("dialyzer.hrl").
+
+%%--------------------------------------------------------------------
+
+-record(dialyzer_codeserver, {table_pid :: pid(),
+ exports = sets:new() :: set(), % set(mfa())
+ next_core_label = 0 :: label(),
+ records = dict:new() :: dict(),
+ temp_records = dict:new() :: dict(),
+ contracts = dict:new() :: dict(),
+ temp_contracts = dict:new() :: dict()}).
+
+-opaque codeserver() :: #dialyzer_codeserver{}.
+
+%%--------------------------------------------------------------------
+
+-spec new() -> codeserver().
+
+new() ->
+ #dialyzer_codeserver{table_pid = table__new()}.
+
+-spec delete(codeserver()) -> 'ok'.
+
+delete(#dialyzer_codeserver{table_pid = TablePid}) ->
+ table__delete(TablePid).
+
+-spec insert(module(), cerl:c_module(), codeserver()) -> codeserver().
+
+insert(Mod, ModCode, CS) ->
+ NewTablePid = table__insert(CS#dialyzer_codeserver.table_pid, Mod, ModCode),
+ CS#dialyzer_codeserver{table_pid = NewTablePid}.
+
+-spec insert_exports([mfa()], codeserver()) -> codeserver().
+
+insert_exports(List, #dialyzer_codeserver{exports = Exports} = CS) ->
+ Set = sets:from_list(List),
+ NewExports = sets:union(Exports, Set),
+ CS#dialyzer_codeserver{exports = NewExports}.
+
+-spec is_exported(mfa(), codeserver()) -> boolean().
+
+is_exported(MFA, #dialyzer_codeserver{exports = Exports}) ->
+ sets:is_element(MFA, Exports).
+
+-spec get_exports(codeserver()) -> set(). % set(mfa())
+
+get_exports(#dialyzer_codeserver{exports = Exports}) ->
+ Exports.
+
+-spec lookup_mod_code(module(), codeserver()) -> cerl:c_module().
+
+lookup_mod_code(Mod, CS) when is_atom(Mod) ->
+ table__lookup(CS#dialyzer_codeserver.table_pid, Mod).
+
+-spec lookup_mfa_code(mfa(), codeserver()) -> {cerl:c_var(), cerl:c_fun()}.
+
+lookup_mfa_code({_M, _F, _A} = MFA, CS) ->
+ table__lookup(CS#dialyzer_codeserver.table_pid, MFA).
+
+-spec get_next_core_label(codeserver()) -> label().
+
+get_next_core_label(#dialyzer_codeserver{next_core_label = NCL}) ->
+ NCL.
+
+-spec set_next_core_label(label(), codeserver()) -> codeserver().
+
+set_next_core_label(NCL, CS) ->
+ CS#dialyzer_codeserver{next_core_label = NCL}.
+
+-spec store_records(module(), dict(), codeserver()) -> codeserver().
+
+store_records(Mod, Dict, #dialyzer_codeserver{records = RecDict} = CS)
+ when is_atom(Mod) ->
+ case dict:size(Dict) =:= 0 of
+ true -> CS;
+ false -> CS#dialyzer_codeserver{records = dict:store(Mod, Dict, RecDict)}
+ end.
+
+-spec lookup_mod_records(module(), codeserver()) -> dict().
+
+lookup_mod_records(Mod, #dialyzer_codeserver{records = RecDict})
+ when is_atom(Mod) ->
+ case dict:find(Mod, RecDict) of
+ error -> dict:new();
+ {ok, Dict} -> Dict
+ end.
+
+-spec get_records(codeserver()) -> dict().
+
+get_records(#dialyzer_codeserver{records = RecDict}) ->
+ RecDict.
+
+-spec store_temp_records(module(), dict(), codeserver()) -> codeserver().
+
+store_temp_records(Mod, Dict, #dialyzer_codeserver{temp_records = TempRecDict} = CS)
+ when is_atom(Mod) ->
+ case dict:size(Dict) =:= 0 of
+ true -> CS;
+ false -> CS#dialyzer_codeserver{temp_records = dict:store(Mod, Dict, TempRecDict)}
+ end.
+
+-spec get_temp_records(codeserver()) -> dict().
+
+get_temp_records(#dialyzer_codeserver{temp_records = TempRecDict}) ->
+ TempRecDict.
+
+-spec set_temp_records(dict(), codeserver()) -> codeserver().
+
+set_temp_records(Dict, CS) ->
+ CS#dialyzer_codeserver{temp_records = Dict}.
+
+-spec finalize_records(dict(), codeserver()) -> codeserver().
+
+finalize_records(Dict, CS) ->
+ CS#dialyzer_codeserver{records = Dict, temp_records = dict:new()}.
+
+-spec store_contracts(module(), dict(), codeserver()) -> codeserver().
+
+store_contracts(Mod, Dict, #dialyzer_codeserver{contracts = C} = CS)
+ when is_atom(Mod) ->
+ case dict:size(Dict) =:= 0 of
+ true -> CS;
+ false -> CS#dialyzer_codeserver{contracts = dict:store(Mod, Dict, C)}
+ end.
+
+-spec lookup_mod_contracts(module(), codeserver()) -> dict().
+
+lookup_mod_contracts(Mod, #dialyzer_codeserver{contracts = ContDict})
+ when is_atom(Mod) ->
+ case dict:find(Mod, ContDict) of
+ error -> dict:new();
+ {ok, Dict} -> Dict
+ end.
+
+-spec lookup_mfa_contract(mfa(), codeserver()) ->
+ 'error' | {'ok', dialyzer_contracts:file_contract()}.
+
+lookup_mfa_contract({M,_F,_A} = MFA, #dialyzer_codeserver{contracts = ContDict}) ->
+ case dict:find(M, ContDict) of
+ error -> error;
+ {ok, Dict} -> dict:find(MFA, Dict)
+ end.
+
+-spec get_contracts(codeserver()) -> dict().
+
+get_contracts(#dialyzer_codeserver{contracts = ContDict}) ->
+ ContDict.
+
+-spec store_temp_contracts(module(), dict(), codeserver()) -> codeserver().
+
+store_temp_contracts(Mod, Dict, #dialyzer_codeserver{temp_contracts = C} = CS)
+ when is_atom(Mod) ->
+ case dict:size(Dict) =:= 0 of
+ true -> CS;
+ false -> CS#dialyzer_codeserver{temp_contracts = dict:store(Mod, Dict, C)}
+ end.
+
+-spec get_temp_contracts(codeserver()) -> dict().
+
+get_temp_contracts(#dialyzer_codeserver{temp_contracts = TempContDict}) ->
+ TempContDict.
+
+-spec finalize_contracts(dict(), codeserver()) -> codeserver().
+
+finalize_contracts(Dict, CS) ->
+ CS#dialyzer_codeserver{contracts = Dict, temp_contracts = dict:new()}.
+
+table__new() ->
+ spawn_link(fun() -> table__loop(none, dict:new()) end).
+
+table__delete(TablePid) ->
+ TablePid ! stop,
+ ok.
+
+table__lookup(TablePid, Key) ->
+ TablePid ! {self(), lookup, Key},
+ receive
+ {TablePid, Key, Ans} -> Ans
+ end.
+
+table__insert(TablePid, Key, Val) ->
+ TablePid ! {insert, [{Key, term_to_binary(Val, [compressed])}]},
+ TablePid.
+
+table__loop(Cached, Map) ->
+ receive
+ stop -> ok;
+ {Pid, lookup, {M, F, A} = MFA} ->
+ {NewCached, Ans} =
+ case Cached of
+ {M, Tree} ->
+ [Val] = [VarFun || {Var, _Fun} = VarFun <- cerl:module_defs(Tree),
+ cerl:fname_id(Var) =:= F,
+ cerl:fname_arity(Var) =:= A],
+ {Cached, Val};
+ _ ->
+ Tree = fetch_and_expand(M, Map),
+ [Val] = [VarFun || {Var, _Fun} = VarFun <- cerl:module_defs(Tree),
+ cerl:fname_id(Var) =:= F,
+ cerl:fname_arity(Var) =:= A],
+ {{M, Tree}, Val}
+ end,
+ Pid ! {self(), MFA, Ans},
+ table__loop(NewCached, Map);
+ {Pid, lookup, Mod} when is_atom(Mod) ->
+ Ans = case Cached of
+ {Mod, Tree} -> Tree;
+ _ -> fetch_and_expand(Mod, Map)
+ end,
+ Pid ! {self(), Mod, Ans},
+ table__loop({Mod, Ans}, Map);
+ {insert, List} ->
+ NewMap = lists:foldl(fun({Key, Val}, AccMap) ->
+ dict:store(Key, Val, AccMap)
+ end, Map, List),
+ table__loop(Cached, NewMap)
+ end.
+
+fetch_and_expand(Mod, Map) ->
+ try
+ Bin = dict:fetch(Mod, Map),
+ binary_to_term(Bin)
+ catch
+ _:_ ->
+ S = atom_to_list(Mod),
+ Msg = "found no module named '" ++ S ++ "' in the analyzed files",
+ exit({error, Msg})
+ end.
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
new file mode 100644
index 0000000000..e2680bb03d
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -0,0 +1,492 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(dialyzer_contracts).
+
+-export([check_contract/2,
+ check_contracts/3,
+ contracts_without_fun/3,
+ contract_to_string/1,
+ get_invalid_contract_warnings/3,
+ get_contract_args/1,
+ get_contract_return/1,
+ get_contract_return/2,
+ %% get_contract_signature/1,
+ is_overloaded/1,
+ process_contract_remote_types/1,
+ store_tmp_contract/5]).
+
+%%-----------------------------------------------------------------------
+
+-include("dialyzer.hrl").
+
+%%-----------------------------------------------------------------------
+%% Types used in other parts of the system below
+%%-----------------------------------------------------------------------
+
+-type file_contract() :: {file_line(), #contract{}}.
+
+-type plt_contracts() :: [{mfa(), #contract{}}]. % actually, an orddict()
+
+%%-----------------------------------------------------------------------
+%% Internal record for contracts whose components have not been processed
+%% to expand records and/or remote types that they might contain.
+%%-----------------------------------------------------------------------
+
+-type tmp_contract_fun() :: fun((dict()) -> contract_pair()).
+
+-record(tmp_contract, {contract_funs = [] :: [tmp_contract_fun()],
+ forms = [] :: [{_, _}]}).
+
+%%-----------------------------------------------------------------------
+
+%%-define(DEBUG, true).
+
+-ifdef(DEBUG).
+-define(debug(X__, Y__), io:format(X__, Y__)).
+-else.
+-define(debug(X__, Y__), ok).
+-endif.
+
+%%-----------------------------------------------------------------------
+
+-spec get_contract_return(#contract{}) -> erl_types:erl_type().
+
+get_contract_return(#contract{contracts = Cs, args = GenArgs}) ->
+ process_contracts(Cs, GenArgs).
+
+-spec get_contract_return(#contract{}, [erl_types:erl_type()]) -> erl_types:erl_type().
+
+get_contract_return(#contract{contracts = Cs}, Args) ->
+ process_contracts(Cs, Args).
+
+-spec get_contract_args(#contract{}) -> [erl_types:erl_type()].
+
+get_contract_args(#contract{args = Args}) ->
+ Args.
+
+-spec get_contract_signature(#contract{}) -> erl_types:erl_type().
+
+get_contract_signature(#contract{contracts = Cs, args = GeneralDomain}) ->
+ Range = process_contracts(Cs, GeneralDomain),
+ erl_types:t_fun(GeneralDomain, Range).
+
+-spec is_overloaded(#contract{}) -> boolean().
+
+is_overloaded(#contract{contracts = Cs}) ->
+ case Cs of
+ [_] -> true;
+ [_,_|_] -> false
+ end.
+
+-spec contract_to_string(#contract{}) -> string().
+
+contract_to_string(#contract{forms = Forms}) ->
+ contract_to_string_1(Forms).
+
+contract_to_string_1([{Contract, []}]) ->
+ strip_fun(erl_types:t_form_to_string(Contract));
+contract_to_string_1([{Contract, []}|Rest]) ->
+ strip_fun(erl_types:t_form_to_string(Contract)) ++ "\n ; "
+ ++ contract_to_string_1(Rest);
+contract_to_string_1([{Contract, Constraints}]) ->
+ strip_fun(erl_types:t_form_to_string(Contract)) ++ " when "
+ ++ constraints_to_string(Constraints);
+contract_to_string_1([{Contract, Constraints}|Rest]) ->
+ strip_fun(erl_types:t_form_to_string(Contract)) ++ " when "
+ ++ constraints_to_string(Constraints) ++ ";" ++
+ contract_to_string_1(Rest).
+
+strip_fun("fun(" ++ String) ->
+ butlast(String).
+
+butlast([]) -> [];
+butlast([_]) -> [];
+butlast([H|T]) -> [H|butlast(T)].
+
+constraints_to_string([]) ->
+ "";
+constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}]) ->
+ atom_to_list(What) ++ "(" ++
+ sequence([erl_types:t_form_to_string(T) || T <- Types], ",") ++ ")";
+constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}|Rest]) ->
+ atom_to_list(What) ++ "(" ++
+ sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
+ ++ "), " ++ constraints_to_string(Rest).
+
+sequence([], _Delimiter) -> "";
+sequence([H], _Delimiter) -> H;
+sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
+
+-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) ->
+ dialyzer_codeserver:codeserver().
+
+process_contract_remote_types(CodeServer) ->
+ TmpContractDict = dialyzer_codeserver:get_temp_contracts(CodeServer),
+ RecordDict = dialyzer_codeserver:get_records(CodeServer),
+ ContractFun =
+ fun({_M, _F, _A}, {File, #tmp_contract{contract_funs = CFuns, forms = Forms}}) ->
+ NewCs = [CFun(RecordDict) || CFun <- CFuns],
+ Args = general_domain(NewCs),
+ {File, #contract{contracts = NewCs, args = Args, forms = Forms}}
+ end,
+ ModuleFun =
+ fun(_ModuleName, ContractDict) ->
+ dict:map(ContractFun, ContractDict)
+ end,
+ NewContractDict = dict:map(ModuleFun, TmpContractDict),
+ dialyzer_codeserver:finalize_contracts(NewContractDict, CodeServer).
+
+-spec check_contracts([{mfa(), file_contract()}],
+ dialyzer_callgraph:callgraph(), dict()) -> plt_contracts().
+
+check_contracts(Contracts, Callgraph, FunTypes) ->
+ FoldFun =
+ fun(Label, Type, NewContracts) ->
+ {ok, {M,F,A} = MFA} = dialyzer_callgraph:lookup_name(Label, Callgraph),
+ case orddict:find(MFA, Contracts) of
+ {ok, {_FileLine, Contract}} ->
+ case check_contract(Contract, Type) of
+ ok ->
+ case erl_bif_types:is_known(M, F, A) of
+ true ->
+ %% Disregard the contracts since
+ %% this is a known function.
+ NewContracts;
+ false ->
+ [{MFA, Contract}|NewContracts]
+ end;
+ {error, _Error} -> NewContracts
+ end;
+ error -> NewContracts
+ end
+ end,
+ dict:fold(FoldFun, [], FunTypes).
+
+%% Checks all components of a contract
+-spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}.
+
+check_contract(#contract{contracts = Contracts}, SuccType) ->
+ try
+ Contracts1 = [{Contract, insert_constraints(Constraints, dict:new())}
+ || {Contract, Constraints} <- Contracts],
+ Contracts2 = [erl_types:t_subst(Contract, Dict)
+ || {Contract, Dict} <- Contracts1],
+ GenDomains = [erl_types:t_fun_args(C) || C <- Contracts2],
+ case check_domains(GenDomains) of
+ error ->
+ {error, {overlapping_contract, []}};
+ ok ->
+ InfList = [erl_types:t_inf(Contract, SuccType, opaque)
+ || Contract <- Contracts2],
+ check_contract_inf_list(InfList, SuccType)
+ end
+ catch throw:{error, _} = Error -> Error
+ end.
+
+check_domains([_]) -> ok;
+check_domains([Dom|Doms]) ->
+ Fun = fun(D) ->
+ erl_types:any_none_or_unit(erl_types:t_inf_lists(Dom, D, opaque))
+ end,
+ case lists:all(Fun, Doms) of
+ true -> check_domains(Doms);
+ false -> error
+ end.
+
+%% Allow a contract if one of the overloaded contracts is possible.
+%% We used to be more strict, e.g., all overloaded contracts had to be
+%% possible.
+check_contract_inf_list([FunType|Left], SuccType) ->
+ FunArgs = erl_types:t_fun_args(FunType),
+ case lists:any(fun erl_types:t_is_none_or_unit/1, FunArgs) of
+ true -> check_contract_inf_list(Left, SuccType);
+ false ->
+ STRange = erl_types:t_fun_range(SuccType),
+ case erl_types:t_is_none_or_unit(STRange) of
+ true -> ok;
+ false ->
+ Range = erl_types:t_fun_range(FunType),
+ case erl_types:t_is_none(erl_types:t_inf(STRange, Range, opaque)) of
+ true -> check_contract_inf_list(Left, SuccType);
+ false -> ok
+ end
+ end
+ end;
+check_contract_inf_list([], _SuccType) ->
+ {error, invalid_contract}.
+
+%% This is the heart of the "range function"
+-spec process_contracts([contract_pair()], [erl_types:erl_type()]) -> erl_types:erl_type().
+
+process_contracts(OverContracts, Args) ->
+ process_contracts(OverContracts, Args, erl_types:t_none()).
+
+process_contracts([OverContract|Left], Args, AccRange) ->
+ NewAccRange =
+ case process_contract(OverContract, Args) of
+ error -> AccRange;
+ {ok, Range} -> erl_types:t_sup(AccRange, Range)
+ end,
+ process_contracts(Left, Args, NewAccRange);
+process_contracts([], _Args, AccRange) ->
+ AccRange.
+
+-spec process_contract(contract_pair(), [erl_types:erl_type()]) -> 'error' | {'ok', erl_types:erl_type()}.
+
+process_contract({Contract, Constraints}, CallTypes0) ->
+ CallTypesFun = erl_types:t_fun(CallTypes0, erl_types:t_any()),
+ ContArgsFun = erl_types:t_fun(erl_types:t_fun_args(Contract),
+ erl_types:t_any()),
+ ?debug("Instance: Contract: ~s\n Arguments: ~s\n",
+ [erl_types:t_to_string(ContArgsFun),
+ erl_types:t_to_string(CallTypesFun)]),
+ case solve_constraints(ContArgsFun, CallTypesFun, Constraints) of
+ {ok, VarDict} ->
+ {ok, erl_types:t_subst(erl_types:t_fun_range(Contract), VarDict)};
+ error -> error
+ end.
+
+solve_constraints(Contract, Call, Constraints) ->
+ %% First make sure the call follows the constraints
+ CDict = insert_constraints(Constraints, dict:new()),
+ Contract1 = erl_types:t_subst(Contract, CDict),
+ %% Just a safe over-approximation.
+ %% TODO: Find the types for type variables properly
+ ContrArgs = erl_types:t_fun_args(Contract1),
+ CallArgs = erl_types:t_fun_args(Call),
+ InfList = erl_types:t_inf_lists(ContrArgs, CallArgs),
+ case erl_types:any_none_or_unit(InfList) of
+ true -> error;
+ false -> {ok, CDict}
+ end.
+ %%Inf = erl_types:t_inf(Contract1, Call),
+ %% Then unify with the constrained call type.
+ %% ?debug("Call: ~s\n", [erl_types:t_to_string(Call)]),
+ %% ?debug("Contract: ~s\n", [erl_types:t_to_string(Contract)]),
+ %% ?debug("Contract1: ~s\n", [erl_types:t_to_string(Contract1)]),
+ %% ?debug("Inf: ~s\n", [erl_types:t_to_string(Inf)]),
+ %% erl_types:t_assign_variables_to_subtype(Contract, Inf).
+
+%% Checks the contracts for functions that are not implemented
+-spec contracts_without_fun(dict(), [_], dialyzer_callgraph:callgraph()) -> [dial_warning()].
+
+contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
+ AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
+ || {Label, Arity} <- AllFuns0],
+ AllFuns2 = [{M, F, A} || {{ok, {M, F, _}}, A} <- AllFuns1],
+ AllContractMFAs = dict:fetch_keys(Contracts),
+ ErrorContractMFAs = AllContractMFAs -- AllFuns2,
+ [warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
+
+warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
+ {FileLine, _Contract} = dict:fetch(MFA, Contracts),
+ {?WARN_CONTRACT_SYNTAX, FileLine, {spec_missing_fun, [M, F, A]}}.
+
+%% This treats the "when" constraints. It will be extended, we hope.
+insert_constraints([{subtype, Type1, Type2}|Left], Dict) ->
+ case erl_types:t_is_var(Type1) of
+ true ->
+ Name = erl_types:t_var_name(Type1),
+ Dict1 = case dict:find(Name, Dict) of
+ error ->
+ dict:store(Name, Type2, Dict);
+ {ok, VarType} ->
+ dict:store(Name, erl_types:t_inf(VarType, Type2), Dict)
+ end,
+ insert_constraints(Left, Dict1);
+ false ->
+ %% A lot of things should change to add supertypes
+ throw({error, io_lib:format("First argument of is_subtype constraint "
+ "must be a type variable\n", [])})
+ end;
+insert_constraints([], Dict) -> Dict.
+
+-spec store_tmp_contract(mfa(), file_line(), [_], dict(), dict()) -> dict().
+
+store_tmp_contract(MFA, FileLine, TypeSpec, SpecDict, RecordsDict) ->
+ %% io:format("contract from form: ~p\n", [TypeSpec]),
+ TmpContract = contract_from_form(TypeSpec, RecordsDict),
+ %% io:format("contract: ~p\n", [Contract]),
+ dict:store(MFA, {FileLine, TmpContract}, SpecDict).
+
+contract_from_form(Forms, RecDict) ->
+ {CFuns, Forms1} = contract_from_form(Forms, RecDict, [], []),
+ #tmp_contract{contract_funs = CFuns, forms = Forms1}.
+
+contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
+ TypeAcc, FormAcc) ->
+ TypeFun =
+ fun(AllRecords) ->
+ Type = erl_types:t_from_form(Form, RecDict),
+ NewType = erl_types:t_solve_remote(Type, AllRecords),
+ {NewType, []}
+ end,
+ NewTypeAcc = [TypeFun | TypeAcc],
+ NewFormAcc = [{Form, []} | FormAcc],
+ contract_from_form(Left, RecDict, NewTypeAcc, NewFormAcc);
+contract_from_form([{type, _L1, bounded_fun,
+ [{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
+ RecDict, TypeAcc, FormAcc) ->
+ TypeFun =
+ fun(AllRecords) ->
+ Constr1 = [constraint_from_form(C, RecDict, AllRecords) || C <- Constr],
+ VarDict = insert_constraints(Constr1, dict:new()),
+ Type = erl_types:t_from_form(Form, RecDict, VarDict),
+ NewType = erl_types:t_solve_remote(Type, AllRecords),
+ {NewType, Constr1}
+ end,
+ NewTypeAcc = [TypeFun | TypeAcc],
+ NewFormAcc = [{Form, Constr} | FormAcc],
+ contract_from_form(Left, RecDict, NewTypeAcc, NewFormAcc);
+contract_from_form([], _RecDict, TypeAcc, FormAcc) ->
+ {lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
+
+constraint_from_form({type, _, constraint, [{atom, _, is_subtype},
+ [Type1, Type2]]}, RecDict, AllRecords) ->
+ T1 = erl_types:t_from_form(Type1, RecDict),
+ T2 = erl_types:t_from_form(Type2, RecDict),
+ T3 = erl_types:t_solve_remote(T1, AllRecords),
+ T4 = erl_types:t_solve_remote(T2, AllRecords),
+ {subtype, T3, T4};
+constraint_from_form({type, _, constraint, [{atom,_,Name}, List]}, _RecDict, _) ->
+ N = length(List),
+ throw({error, io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])}).
+
+%% Gets the most general domain of a list of domains of all
+%% the overloaded contracts
+
+general_domain(List) ->
+ general_domain(List, erl_types:t_none()).
+
+general_domain([{Sig, Constraints}|Left], AccSig) ->
+ Dict = insert_constraints(Constraints, dict:new()),
+ Sig1 = erl_types:t_subst(Sig, Dict),
+ general_domain(Left, erl_types:t_sup(AccSig, Sig1));
+general_domain([], AccSig) ->
+ %% Get rid of all variables in the domain.
+ AccSig1 = erl_types:subst_all_vars_to_any(AccSig),
+ erl_types:t_fun_args(AccSig1).
+
+-spec get_invalid_contract_warnings([module()], dialyzer_codeserver:codeserver(), dialyzer_plt:plt()) -> [dial_warning()].
+
+get_invalid_contract_warnings(Modules, CodeServer, Plt) ->
+ get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, []).
+
+get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, Acc) ->
+ Contracts1 = dialyzer_codeserver:lookup_mod_contracts(Mod, CodeServer),
+ Contracts2 = dict:to_list(Contracts1),
+ Records = dialyzer_codeserver:lookup_mod_records(Mod, CodeServer),
+ NewAcc = get_invalid_contract_warnings_funs(Contracts2, Plt, Records, Acc),
+ get_invalid_contract_warnings_modules(Mods, CodeServer, Plt, NewAcc);
+get_invalid_contract_warnings_modules([], _CodeServer, _Plt, Acc) ->
+ Acc.
+
+get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left],
+ Plt, RecDict, Acc) ->
+ case dialyzer_plt:lookup(Plt, MFA) of
+ none ->
+ %% This must be a contract for a non-available function. Just accept it.
+ get_invalid_contract_warnings_funs(Left, Plt, RecDict, Acc);
+ {value, {Ret, Args}} ->
+ Sig = erl_types:t_fun(Args, Ret),
+ NewAcc =
+ case check_contract(Contract, Sig) of
+ {error, invalid_contract} ->
+ [invalid_contract_warning(MFA, FileLine, Sig, RecDict)|Acc];
+ {error, Msg} ->
+ [{?WARN_CONTRACT_SYNTAX, FileLine, Msg}|Acc];
+ ok ->
+ {M, F, A} = MFA,
+ CSig0 = get_contract_signature(Contract),
+ CSig = erl_types:subst_all_vars_to_any(CSig0),
+ case erl_bif_types:is_known(M, F, A) of
+ true ->
+ %% This is strictly for contracts of functions also in
+ %% erl_bif_types
+ BifArgs = erl_bif_types:arg_types(M, F, A),
+ BifRet = erl_bif_types:type(M, F, A),
+ BifSig = erl_types:t_fun(BifArgs, BifRet),
+ case check_contract(Contract, BifSig) of
+ {error, _} ->
+ [invalid_contract_warning(MFA, FileLine, BifSig, RecDict)
+ |Acc];
+ ok ->
+ picky_contract_check(CSig, BifSig, MFA, FileLine,
+ Contract, RecDict, Acc)
+ end;
+ false ->
+ picky_contract_check(CSig, Sig, MFA, FileLine, Contract,
+ RecDict, Acc)
+ end
+ end,
+ get_invalid_contract_warnings_funs(Left, Plt, RecDict, NewAcc)
+ end;
+get_invalid_contract_warnings_funs([], _Plt, _RecDict, Acc) ->
+ Acc.
+
+invalid_contract_warning({M, F, A}, FileLine, Type, RecDict) ->
+ {?WARN_CONTRACT_TYPES, FileLine,
+ {invalid_contract, [M, F, A, dialyzer_utils:format_sig(Type, RecDict)]}}.
+
+picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
+ CSig = erl_types:t_abstract_records(CSig0, RecDict),
+ Sig = erl_types:t_abstract_records(Sig0, RecDict),
+ case erl_types:t_is_equal(CSig, Sig) of
+ true -> Acc;
+ false ->
+ case (erl_types:t_is_none(erl_types:t_fun_range(Sig)) andalso
+ erl_types:t_is_unit(erl_types:t_fun_range(CSig))) of
+ true -> Acc;
+ false ->
+ case extra_contract_warning(MFA, FileLine, Contract,
+ CSig, Sig, RecDict) of
+ no_warning -> Acc;
+ {warning, Warning} -> [Warning|Acc]
+ end
+ end
+ end.
+
+extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
+ SigString = lists:flatten(dialyzer_utils:format_sig(Sig, RecDict)),
+ ContractString0 = lists:flatten(dialyzer_utils:format_sig(CSig, RecDict)),
+ case SigString =:= ContractString0 of
+ true ->
+ %% The only difference is in record fields containing 'undefined' or not.
+ no_warning;
+ false ->
+ ContractString = contract_to_string(Contract),
+ {Tag, Msg} =
+ case erl_types:t_is_subtype(CSig, Sig) of
+ true ->
+ {?WARN_CONTRACT_SUBTYPE,
+ {contract_subtype, [M, F, A, ContractString, SigString]}};
+ false ->
+ case erl_types:t_is_subtype(Sig, CSig) of
+ true ->
+ {?WARN_CONTRACT_SUPERTYPE,
+ {contract_supertype, [M, F, A, ContractString, SigString]}};
+ false ->
+ {?WARN_CONTRACT_NOT_EQUAL,
+ {contract_diff, [M, F, A, ContractString, SigString]}}
+ end
+ end,
+ {warning, {Tag, FileLine, Msg}}
+ end.
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
new file mode 100644
index 0000000000..178321ea18
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -0,0 +1,3468 @@
+%% -*- erlang-indent-level: 2 -*-
+%%--------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_dataflow.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 19 Apr 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+
+-module(dialyzer_dataflow).
+
+-export([get_fun_types/4, get_warnings/5, format_args/3]).
+
+-export([state__add_warning/2, state__cleanup/1,
+ state__get_callgraph/1, state__get_races/1,
+ state__get_records/1, state__put_callgraph/2,
+ state__put_races/2, state__records_only/1]).
+
+%% Debug and test interfaces.
+-export([get_top_level_signatures/2, pp/1]).
+
+-include("dialyzer.hrl").
+
+-import(erl_types,
+ [any_none/1, t_any/0, t_atom/0, t_atom/1, t_atom_vals/1,
+ t_binary/0, t_boolean/0,
+ t_bitstr/0, t_bitstr/2, t_bitstr_concat/1, t_bitstr_match/2,
+ t_cons/0, t_cons/2, t_cons_hd/1, t_cons_tl/1, t_contains_opaque/1,
+ t_find_opaque_mismatch/2, t_float/0, t_from_range/2, t_from_term/1,
+ t_fun/0, t_fun/2, t_fun_args/1, t_fun_range/1,
+ t_inf/2, t_inf/3, t_inf_lists/2, t_inf_lists/3,
+ t_integer/0, t_integers/1,
+ t_is_any/1, t_is_atom/1, t_is_atom/2, t_is_boolean/1, t_is_equal/2,
+ t_is_integer/1, t_is_nil/1, t_is_none/1, t_is_none_or_unit/1,
+ t_is_number/1, t_is_reference/1, t_is_pid/1, t_is_port/1,
+ t_is_subtype/2, t_is_unit/1,
+ t_limit/2, t_list/0, t_maybe_improper_list/0, t_module/0,
+ t_none/0, t_non_neg_integer/0, t_number/0, t_number_vals/1,
+ t_opaque_match_atom/2, t_opaque_match_record/2,
+ t_opaque_matching_structure/2,
+ t_pid/0, t_port/0, t_product/1, t_reference/0,
+ t_sup/1, t_sup/2, t_subtract/2, t_to_string/2, t_to_tlist/1,
+ t_tuple/0, t_tuple/1, t_tuple_args/1, t_tuple_subtypes/1,
+ t_unit/0, t_unopaque/1]).
+
+%%-define(DEBUG, true).
+%%-define(DEBUG_PP, true).
+%%-define(DEBUG_TIME, true).
+%%-define(DOT, true).
+
+-ifdef(DEBUG).
+-import(erl_types, [t_to_string/1]).
+-define(debug(S_, L_), io:format(S_, L_)).
+-else.
+-define(debug(S_, L_), ok).
+-endif.
+
+%%-define(debug1(S_, L_), io:format(S_, L_)).
+%%-define(debug1(S_, L_), ok).
+
+%%--------------------------------------------------------------------
+
+-define(no_arg, no_arg).
+
+-define(TYPE_LIMIT, 3).
+
+-record(state, {callgraph :: dialyzer_callgraph:callgraph(),
+ envs :: dict(),
+ fun_tab :: dict(),
+ plt :: dialyzer_plt:plt(),
+ opaques :: [erl_types:erl_type()],
+ races :: dialyzer_races:races(),
+ records :: dict(),
+ tree_map :: dict(),
+ warning_mode = false :: boolean(),
+ warnings = [] :: [dial_warning()],
+ work :: {[_], [_], set()}}).
+
+%% Exported Types
+
+-type state() :: #state{}.
+
+%%--------------------------------------------------------------------
+
+-spec get_warnings(cerl:c_module(), dialyzer_plt:plt(),
+ dialyzer_callgraph:callgraph(), dict(), set()) ->
+ {[dial_warning()], dict(), dict(), [label()], [string()]}.
+
+get_warnings(Tree, Plt, Callgraph, Records, NoWarnUnused) ->
+ State1 = analyze_module(Tree, Plt, Callgraph, Records, true),
+ State2 = find_mismatched_record_patterns(Tree, State1),
+ State3 =
+ state__renew_warnings(state__get_warnings(State2, NoWarnUnused), State2),
+ State4 = state__get_race_warnings(State3),
+ Callgraph1 = State2#state.callgraph,
+ {State4#state.warnings, state__all_fun_types(State4),
+ dialyzer_callgraph:get_race_code(Callgraph1),
+ dialyzer_callgraph:get_public_tables(Callgraph1),
+ dialyzer_callgraph:get_named_tables(Callgraph1)}.
+
+-spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(),
+ dialyzer_callgraph:callgraph(), dict()) ->
+ {dict(), dict(), [label()], [string()]}.
+
+get_fun_types(Tree, Plt, Callgraph, Records) ->
+ State = analyze_module(Tree, Plt, Callgraph, Records, false),
+ Callgraph1 = State#state.callgraph,
+ {state__all_fun_types(State),
+ dialyzer_callgraph:get_race_code(Callgraph1),
+ dialyzer_callgraph:get_public_tables(Callgraph1),
+ dialyzer_callgraph:get_named_tables(Callgraph1)}.
+
+%%--------------------------------------------------------------------
+
+-spec pp(file:filename()) -> 'ok'.
+
+pp(File) ->
+ {ok, Code} = dialyzer_utils:get_core_from_src(File, [no_copt]),
+ Plt = get_def_plt(),
+ AnnTree = annotate_module(Code, Plt),
+ io:put_chars(cerl_prettypr:format(AnnTree, [{hook, cerl_typean:pp_hook()}])),
+ io:nl().
+
+%%--------------------------------------------------------------------
+%% This is used in the testsuite.
+
+-spec get_top_level_signatures(cerl:c_module(), dict()) ->
+ [{{atom(), arity()}, erl_types:erl_type()}].
+
+get_top_level_signatures(Code, Records) ->
+ {Tree, _} = cerl_trees:label(cerl:from_records(Code)),
+ Callgraph0 = dialyzer_callgraph:new(),
+ Callgraph1 = dialyzer_callgraph:scan_core_tree(Tree, Callgraph0),
+ {Callgraph2, _} = dialyzer_callgraph:remove_external(Callgraph1),
+ Callgraph = dialyzer_callgraph:finalize(Callgraph2),
+ to_dot(Callgraph),
+ Plt = get_def_plt(),
+ FunTypes = get_fun_types(Tree, Plt, Callgraph, Records),
+ FunTypes1 = lists:foldl(fun({V, F}, Acc) ->
+ Label = get_label(F),
+ case dict:find(Label, Acc) of
+ error ->
+ Arity = cerl:fname_arity(V),
+ Type = t_fun(lists:duplicate(Arity,
+ t_none()),
+ t_none()),
+ dict:store(Label, Type, Acc);
+ {ok, _} -> Acc
+ end
+ end, FunTypes, cerl:module_defs(Tree)),
+ dialyzer_callgraph:delete(Callgraph),
+ Sigs = [{{cerl:fname_id(V), cerl:fname_arity(V)},
+ dict:fetch(get_label(F), FunTypes1)}
+ || {V, F} <- cerl:module_defs(Tree)],
+ ordsets:from_list(Sigs).
+
+get_def_plt() ->
+ try
+ dialyzer_plt:from_file(dialyzer_plt:get_default_plt())
+ catch
+ throw:{dialyzer_error, _} -> dialyzer_plt:new()
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Annotate all top level funs.
+%%%
+%%% ===========================================================================
+
+annotate_module(Code, Plt) ->
+ {Tree, _} = cerl_trees:label(cerl:from_records(Code)),
+ Callgraph0 = dialyzer_callgraph:new(),
+ Callgraph1 = dialyzer_callgraph:scan_core_tree(Tree, Callgraph0),
+ {Callgraph2, _} = dialyzer_callgraph:remove_external(Callgraph1),
+ Callgraph = dialyzer_callgraph:finalize(Callgraph2),
+ State = analyze_module(Tree, Plt, Callgraph),
+ Res = annotate(Tree, State),
+ dialyzer_callgraph:delete(Callgraph),
+ Res.
+
+annotate(Tree, State) ->
+ case cerl:subtrees(Tree) of
+ [] -> set_type(Tree, State);
+ List ->
+ NewSubTrees = [[annotate(Subtree, State) || Subtree <- Group]
+ || Group <- List],
+ NewTree = cerl:update_tree(Tree, NewSubTrees),
+ set_type(NewTree, State)
+ end.
+
+set_type(Tree, State) ->
+ case cerl:type(Tree) of
+ 'fun' ->
+ Type = state__fun_type(Tree, State),
+ case t_is_any(Type) of
+ true ->
+ cerl:set_ann(Tree, delete_ann(typesig, cerl:get_ann(Tree)));
+ false ->
+ cerl:set_ann(Tree, append_ann(typesig, Type, cerl:get_ann(Tree)))
+ end;
+ apply ->
+ case state__find_apply_return(Tree, State) of
+ unknown -> Tree;
+ ReturnType ->
+ case t_is_any(ReturnType) of
+ true ->
+ cerl:set_ann(Tree, delete_ann(type, cerl:get_ann(Tree)));
+ false ->
+ cerl:set_ann(Tree, append_ann(type, ReturnType,
+ cerl:get_ann(Tree)))
+ end
+ end;
+ _ ->
+ Tree
+ end.
+
+append_ann(Tag, Val, [X | Xs]) ->
+ if tuple_size(X) >= 1, element(1, X) =:= Tag ->
+ append_ann(Tag, Val, Xs);
+ true ->
+ [X | append_ann(Tag, Val, Xs)]
+ end;
+append_ann(Tag, Val, []) ->
+ [{Tag, Val}].
+
+delete_ann(Tag, [X | Xs]) ->
+ if tuple_size(X) >= 1, element(1, X) =:= Tag ->
+ delete_ann(Tag, Xs);
+ true ->
+ [X | delete_ann(Tag, Xs)]
+ end;
+delete_ann(_, []) ->
+ [].
+
+%%% ===========================================================================
+%%%
+%%% The analysis.
+%%%
+%%% ===========================================================================
+
+analyze_module(Tree, Plt, Callgraph) ->
+ analyze_module(Tree, Plt, Callgraph, dict:new(), false).
+
+analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
+ debug_pp(Tree, false),
+ Module = cerl:atom_val(cerl:module_name(Tree)),
+ TopFun = cerl:ann_c_fun([{label, top}], [], Tree),
+ State =
+ state__new(dialyzer_callgraph:race_code_new(Callgraph),
+ TopFun, Plt, Module, Records),
+ State1 = state__race_analysis(not GetWarnings, State),
+ State2 = analyze_loop(State1),
+ RaceCode = dialyzer_callgraph:get_race_code(Callgraph),
+ Callgraph1 = State2#state.callgraph,
+ RaceCode1 = dialyzer_callgraph:get_race_code(Callgraph1),
+ case GetWarnings of
+ true ->
+ State3 = state__set_warning_mode(State2),
+ State4 = analyze_loop(State3),
+ State5 = state__restore_race_code(RaceCode, State4),
+ dialyzer_races:race(State5);
+ false ->
+ state__restore_race_code(
+ dict:merge(fun (_K, V1, _V2) -> V1 end,
+ RaceCode, RaceCode1), State2)
+ end.
+
+analyze_loop(#state{callgraph = Callgraph, races = Races} = State) ->
+ case state__get_work(State) of
+ none -> state__clean_not_called(State);
+ {Fun, NewState} ->
+ ArgTypes = state__get_args(Fun, NewState),
+ case any_none(ArgTypes) of
+ true ->
+ ?debug("Not handling1 ~w: ~s\n",
+ [state__lookup_name(get_label(Fun), State),
+ t_to_string(t_product(ArgTypes))]),
+ analyze_loop(NewState);
+ false ->
+ case state__fun_env(Fun, NewState) of
+ none ->
+ ?debug("Not handling2 ~w: ~s\n",
+ [state__lookup_name(get_label(Fun), State),
+ t_to_string(t_product(ArgTypes))]),
+ analyze_loop(NewState);
+ Map ->
+ ?debug("Handling fun ~p: ~s\n",
+ [state__lookup_name(get_label(Fun), State),
+ t_to_string(state__fun_type(Fun, NewState))]),
+ NewState1 = state__mark_fun_as_handled(NewState, Fun),
+ Vars = cerl:fun_vars(Fun),
+ Map1 = enter_type_lists(Vars, ArgTypes, Map),
+ Body = cerl:fun_body(Fun),
+ FunLabel = get_label(Fun),
+ RaceDetection = dialyzer_callgraph:get_race_detection(Callgraph),
+ RaceAnalysis = dialyzer_races:get_race_analysis(Races),
+ NewState3 =
+ case RaceDetection andalso RaceAnalysis of
+ true ->
+ NewState2 = state__renew_curr_fun(
+ state__lookup_name(FunLabel, NewState1), FunLabel,
+ NewState1),
+ state__renew_race_list([], 0, NewState2);
+ false -> NewState1
+ end,
+ {NewState4, _Map2, BodyType} =
+ traverse(Body, Map1, NewState3),
+ ?debug("Done analyzing: ~w:~s\n",
+ [state__lookup_name(get_label(Fun), State),
+ t_to_string(t_fun(ArgTypes, BodyType))]),
+ NewState5 =
+ case RaceDetection andalso RaceAnalysis of
+ true ->
+ Races1 = NewState4#state.races,
+ Code = lists:reverse(dialyzer_races:get_race_list(Races1)),
+ Callgraph1 =
+ renew_code(dialyzer_races:get_curr_fun(Races1),
+ dialyzer_races:get_curr_fun_args(Races1),
+ Code,
+ state__warning_mode(NewState4),
+ NewState4#state.callgraph),
+ NewState4#state{callgraph = Callgraph1};
+ false -> NewState4
+ end,
+ NewState6 =
+ state__update_fun_entry(Fun, ArgTypes, BodyType, NewState5),
+ ?debug("done adding stuff for ~w\n",
+ [state__lookup_name(get_label(Fun), State)]),
+ analyze_loop(NewState6)
+ end
+ end
+ end.
+
+traverse(Tree, Map, State) ->
+ ?debug("Handling ~p\n", [cerl:type(Tree)]),
+ %%debug_pp_map(Map),
+ case cerl:type(Tree) of
+ alias ->
+ %% This only happens when checking for illegal record patterns
+ %% so the handling is a bit rudimentary.
+ traverse(cerl:alias_pat(Tree), Map, State);
+ apply ->
+ handle_apply(Tree, Map, State);
+ binary ->
+ Segs = cerl:binary_segments(Tree),
+ {State1, Map1, SegTypes} = traverse_list(Segs, Map, State),
+ {State1, Map1, t_bitstr_concat(SegTypes)};
+ bitstr ->
+ handle_bitstr(Tree, Map, State);
+ call ->
+ handle_call(Tree, Map, State);
+ 'case' ->
+ handle_case(Tree, Map, State);
+ 'catch' ->
+ {State1, _Map1, _} = traverse(cerl:catch_body(Tree), Map, State),
+ {State1, Map, t_any()};
+ cons ->
+ handle_cons(Tree, Map, State);
+ 'fun' ->
+ Type = state__fun_type(Tree, State),
+ case state__warning_mode(State) of
+ true -> {State, Map, Type};
+ false ->
+ State2 = state__add_work(get_label(Tree), State),
+ State3 = state__update_fun_env(Tree, Map, State2),
+ {State3, Map, Type}
+ end;
+ 'let' ->
+ handle_let(Tree, Map, State);
+ letrec ->
+ Defs = cerl:letrec_defs(Tree),
+ Body = cerl:letrec_body(Tree),
+ %% By not including the variables in scope we can assure that we
+ %% will get the current function type when using the variables.
+ FoldFun = fun({Var, Fun}, {AccState, AccMap}) ->
+ {NewAccState, NewAccMap0, FunType} =
+ traverse(Fun, AccMap, AccState),
+ NewAccMap = enter_type(Var, FunType, NewAccMap0),
+ {NewAccState, NewAccMap}
+ end,
+ {State1, Map1} = lists:foldl(FoldFun, {State, Map}, Defs),
+ traverse(Body, Map1, State1);
+ literal ->
+ %% This is needed for finding records
+ case cerl:unfold_literal(Tree) of
+ Tree ->
+ Type = literal_type(Tree),
+ NewType =
+ case erl_types:t_opaque_match_atom(Type, State#state.opaques) of
+ [Opaque] -> Opaque;
+ _ -> Type
+ end,
+ {State, Map, NewType};
+ NewTree -> traverse(NewTree, Map, State)
+ end;
+ module ->
+ handle_module(Tree, Map, State);
+ primop ->
+ Type =
+ case cerl:atom_val(cerl:primop_name(Tree)) of
+ match_fail -> t_none();
+ raise -> t_none();
+ bs_init_writable -> t_from_term(<<>>);
+ Other -> erlang:error({'Unsupported primop', Other})
+ end,
+ {State, Map, Type};
+ 'receive' ->
+ handle_receive(Tree, Map, State);
+ seq ->
+ Arg = cerl:seq_arg(Tree),
+ Body = cerl:seq_body(Tree),
+ {State1, Map1, ArgType} = SMA = traverse(Arg, Map, State),
+ case t_is_none_or_unit(ArgType) of
+ true ->
+ SMA;
+ false ->
+ State2 =
+ case (t_is_any(ArgType) orelse t_is_simple(ArgType)
+ orelse is_call_to_send(Arg)) of
+ true -> % do not warn in these cases
+ State1;
+ false ->
+ state__add_warning(State1, ?WARN_UNMATCHED_RETURN, Arg,
+ {unmatched_return,
+ [format_type(ArgType, State1)]})
+ end,
+ traverse(Body, Map1, State2)
+ end;
+ 'try' ->
+ handle_try(Tree, Map, State);
+ tuple ->
+ handle_tuple(Tree, Map, State);
+ values ->
+ Elements = cerl:values_es(Tree),
+ {State1, Map1, EsType} = traverse_list(Elements, Map, State),
+ Type = t_product(EsType),
+ {State1, Map1, Type};
+ var ->
+ ?debug("Looking up unknown variable: ~p\n", [Tree]),
+ case state__lookup_type_for_rec_var(Tree, State) of
+ error ->
+ LType = lookup_type(Tree, Map),
+ Opaques = State#state.opaques,
+ case t_opaque_match_record(LType, Opaques) of
+ [Opaque] -> {State, Map, Opaque};
+ _ ->
+ case t_opaque_match_atom(LType, Opaques) of
+ [Opaque] -> {State, Map, Opaque};
+ _ -> {State, Map, LType}
+ end
+ end;
+ {ok, Type} -> {State, Map, Type}
+ end;
+ Other ->
+ erlang:error({'Unsupported type', Other})
+ end.
+
+traverse_list(Trees, Map, State) ->
+ traverse_list(Trees, Map, State, []).
+
+traverse_list([Tree|Tail], Map, State, Acc) ->
+ {State1, Map1, Type} = traverse(Tree, Map, State),
+ traverse_list(Tail, Map1, State1, [Type|Acc]);
+traverse_list([], Map, State, Acc) ->
+ {State, Map, lists:reverse(Acc)}.
+
+%%________________________________________
+%%
+%% Special instructions
+%%
+
+handle_apply(Tree, Map, State) ->
+ Args = cerl:apply_args(Tree),
+ Op = cerl:apply_op(Tree),
+ {State1, Map1, ArgTypes} = traverse_list(Args, Map, State),
+ {State2, Map2, OpType} = traverse(Op, Map1, State1),
+ case any_none(ArgTypes) of
+ true ->
+ {State2, Map2, t_none()};
+ false ->
+ {CallSitesKnown, FunList} =
+ case state__lookup_call_site(Tree, State2) of
+ error -> {false, []};
+ {ok, [external]} -> {false, {}};
+ {ok, List} -> {true, List}
+ end,
+ case CallSitesKnown of
+ false ->
+ Arity = length(Args),
+ OpType1 = t_inf(OpType, t_fun(Arity, t_any())),
+ case t_is_none(OpType1) of
+ true ->
+ Msg = {fun_app_no_fun,
+ [format_cerl(Op), format_type(OpType, State2), Arity]},
+ State3 = state__add_warning(State2, ?WARN_FAILING_CALL,
+ Tree, Msg),
+ {State3, Map2, t_none()};
+ false ->
+ NewArgs = t_inf_lists(ArgTypes, t_fun_args(OpType1)),
+ case any_none(NewArgs) of
+ true ->
+ Msg = {fun_app_args,
+ [format_args(Args, ArgTypes, State),
+ format_type(OpType, State)]},
+ State3 = state__add_warning(State2, ?WARN_FAILING_CALL,
+ Tree, Msg),
+ {State3, enter_type(Op, OpType1, Map2), t_none()};
+ false ->
+ Map3 = enter_type_lists(Args, NewArgs, Map2),
+ {State2, enter_type(Op, OpType1, Map3), t_fun_range(OpType1)}
+ end
+ end;
+ true ->
+ FunInfoList = [{local, state__fun_info(Fun, State)}
+ || Fun <- FunList],
+ handle_apply_or_call(FunInfoList, Args, ArgTypes, Map2, Tree, State1)
+ end
+ end.
+
+handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State) ->
+ None = t_none(),
+ handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State,
+ [None || _ <- ArgTypes], None).
+
+handle_apply_or_call([{local, external}|Left], Args, ArgTypes, Map, Tree, State,
+ _AccArgTypes, _AccRet) ->
+ handle_apply_or_call(Left, Args, ArgTypes, Map, Tree, State,
+ ArgTypes, t_any());
+handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
+ Args, ArgTypes, Map, Tree,
+ #state{callgraph = Callgraph, races = Races,
+ opaques = Opaques} = State,
+ AccArgTypes, AccRet) ->
+ Any = t_any(),
+ AnyArgs = [Any || _ <- Args],
+ GenSig = {AnyArgs, fun(_) -> t_any() end},
+ {CArgs, CRange} =
+ case Contr of
+ {value, #contract{args = As} = C} ->
+ {As, fun(FunArgs) ->
+ dialyzer_contracts:get_contract_return(C, FunArgs)
+ end};
+ none -> GenSig
+ end,
+ {BifArgs, BifRange} =
+ case TypeOfApply of
+ remote ->
+ {M, F, A} = Fun,
+ case erl_bif_types:is_known(M, F, A) of
+ true ->
+ BArgs = erl_bif_types:arg_types(M, F, A),
+ BRange =
+ fun(FunArgs) ->
+ ArgPos = erl_bif_types:structure_inspecting_args(M, F, A),
+ NewFunArgs =
+ case ArgPos =:= [] of
+ true -> FunArgs;
+ false -> % some positions need to be un-opaqued
+ N = length(FunArgs),
+ PFs = lists:zip(lists:seq(1, N), FunArgs),
+ [case ordsets:is_element(P, ArgPos) of
+ true -> erl_types:t_unopaque(FArg, Opaques);
+ false -> FArg
+ end || {P, FArg} <- PFs]
+ end,
+ erl_bif_types:type(M, F, A, NewFunArgs)
+ end,
+ {BArgs, BRange};
+ false -> GenSig
+ end;
+ local -> GenSig
+ end,
+ {SigArgs, SigRange} =
+ %% if there is hard-coded or contract information with opaque types,
+ %% the checking for possible type violations needs to take place w.r.t.
+ %% this information and not w.r.t. the structure-based success typing.
+ case prefer_opaque_types(CArgs, BifArgs) of
+ true -> {AnyArgs, t_any()}; % effectively forgets the success typing
+ false ->
+ case Sig of
+ {value, {SR, SA}} -> {SA, SR};
+ none -> {AnyArgs, t_any()}
+ end
+ end,
+ NewArgsSig = t_inf_lists(SigArgs, ArgTypes),
+ NewArgsContract = t_inf_lists(CArgs, ArgTypes),
+ NewArgsBif = t_inf_lists(BifArgs, ArgTypes),
+ NewArgTypes0 = t_inf_lists(NewArgsSig, NewArgsContract),
+ NewArgTypes = t_inf_lists(NewArgTypes0, NewArgsBif),
+ BifRet = BifRange(NewArgTypes),
+ ContrRet = CRange(NewArgTypes),
+ Mode = case t_contains_opaque(ContrRet) orelse t_contains_opaque(BifRet) of
+ true -> opaque;
+ false -> structured
+ end,
+ RetWithoutLocal = t_inf(t_inf(ContrRet, BifRet, Mode), SigRange, Mode),
+ ?debug("--------------------------------------------------------\n", []),
+ ?debug("Fun: ~p\n", [Fun]),
+ ?debug("Args: ~s\n", [erl_types:t_to_string(t_product(ArgTypes))]),
+ ?debug("NewArgsSig: ~s\n", [erl_types:t_to_string(t_product(NewArgsSig))]),
+ ?debug("NewArgsContract: ~s\n",
+ [erl_types:t_to_string(t_product(NewArgsContract))]),
+ ?debug("NewArgsBif: ~s\n", [erl_types:t_to_string(t_product(NewArgsBif))]),
+ ?debug("NewArgTypes: ~s\n", [erl_types:t_to_string(t_product(NewArgTypes))]),
+ ?debug("RetWithoutLocal: ~s\n", [erl_types:t_to_string(RetWithoutLocal)]),
+ ?debug("BifRet: ~s\n", [erl_types:t_to_string(BifRange(NewArgTypes))]),
+ ?debug("ContrRet: ~s\n", [erl_types:t_to_string(CRange(NewArgTypes))]),
+ ?debug("SigRet: ~s\n", [erl_types:t_to_string(SigRange)]),
+ State1 =
+ case dialyzer_callgraph:get_race_detection(Callgraph) andalso
+ dialyzer_races:get_race_analysis(Races) of
+ true ->
+ Ann = cerl:get_ann(Tree),
+ File = get_file(Ann),
+ Line = abs(get_line(Ann)),
+ dialyzer_races:store_race_call(Fun, ArgTypes, Args, {File, Line},
+ State);
+ false -> State
+ end,
+ FailedConj = any_none([RetWithoutLocal|NewArgTypes]),
+ IsFailBif = t_is_none(BifRange(BifArgs)),
+ IsFailSig = t_is_none(SigRange),
+ State2 =
+ case FailedConj andalso not (IsFailBif orelse IsFailSig) of
+ true ->
+ FailedSig = any_none(NewArgsSig),
+ FailedContract = any_none([CRange(NewArgsContract)|NewArgsContract]),
+ FailedBif = any_none([BifRange(NewArgsBif)|NewArgsBif]),
+ InfSig = t_inf(t_fun(SigArgs, SigRange),
+ t_fun(BifArgs, BifRange(BifArgs))),
+ FailReason = apply_fail_reason(FailedSig, FailedBif, FailedContract),
+ Msg = get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes, InfSig,
+ Contr, CArgs, State1, FailReason),
+ WarnType = case Msg of
+ {call, _} -> ?WARN_FAILING_CALL;
+ {apply, _} -> ?WARN_FAILING_CALL;
+ {call_with_opaque, _} -> ?WARN_OPAQUE;
+ {call_without_opaque, _} -> ?WARN_OPAQUE;
+ {opaque_type_test, _} -> ?WARN_OPAQUE
+ end,
+ state__add_warning(State1, WarnType, Tree, Msg);
+ false -> State1
+ end,
+ State3 =
+ case TypeOfApply of
+ local ->
+ case state__is_escaping(Fun, State2) of
+ true -> State2;
+ false ->
+ ForwardArgs = [t_limit(X, ?TYPE_LIMIT) || X <- ArgTypes],
+ forward_args(Fun, ForwardArgs, State2)
+ end;
+ remote ->
+ add_bif_warnings(Fun, NewArgTypes, Tree, State2)
+ end,
+ NewAccArgTypes =
+ case FailedConj of
+ true -> AccArgTypes;
+ false -> [t_sup(X, Y) || {X, Y} <- lists:zip(NewArgTypes, AccArgTypes)]
+ end,
+ NewAccRet = t_sup(AccRet, t_inf(RetWithoutLocal, LocalRet, opaque)),
+ handle_apply_or_call(Left, Args, ArgTypes, Map, Tree,
+ State3, NewAccArgTypes, NewAccRet);
+handle_apply_or_call([], Args, _ArgTypes, Map, _Tree, State,
+ AccArgTypes, AccRet) ->
+ NewMap = enter_type_lists(Args, AccArgTypes, Map),
+ {State, NewMap, AccRet}.
+
+apply_fail_reason(FailedSig, FailedBif, FailedContract) ->
+ if
+ (FailedSig orelse FailedBif) andalso (not FailedContract) -> only_sig;
+ FailedContract andalso (not (FailedSig orelse FailedBif)) -> only_contract;
+ true -> both
+ end.
+
+get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
+ Sig, Contract, ContrArgs, State, FailReason) ->
+ ArgStrings = format_args(Args, ArgTypes, State),
+ ContractInfo =
+ case Contract of
+ {value, #contract{} = C} ->
+ {dialyzer_contracts:is_overloaded(C),
+ dialyzer_contracts:contract_to_string(C)};
+ none -> {false, none}
+ end,
+ EnumArgTypes =
+ case NewArgTypes of
+ [] -> [];
+ _ -> lists:zip(lists:seq(1, length(NewArgTypes)), NewArgTypes)
+ end,
+ ArgNs = [Arg || {Arg, Type} <- EnumArgTypes, t_is_none(Type)],
+ case state__lookup_name(Fun, State) of
+ {M, F, _A} ->
+ case is_opaque_type_test_problem(Fun, NewArgTypes, State) of
+ true ->
+ [Opaque] = NewArgTypes,
+ {opaque_type_test, [atom_to_list(F), erl_types:t_to_string(Opaque)]};
+ false ->
+ SigArgs = t_fun_args(Sig),
+ case is_opaque_related_problem(ArgNs, ArgTypes) of
+ true -> %% an opaque term is used where a structured term is expected
+ ExpectedArgs =
+ case FailReason of
+ only_sig -> SigArgs;
+ _ -> ContrArgs
+ end,
+ {call_with_opaque, [M, F, ArgStrings, ArgNs, ExpectedArgs]};
+ false ->
+ case is_opaque_related_problem(ArgNs, SigArgs) orelse
+ is_opaque_related_problem(ArgNs, ContrArgs) of
+ true -> %% a structured term is used where an opaque is expected
+ ExpectedTriples =
+ case FailReason of
+ only_sig -> expected_arg_triples(ArgNs, SigArgs, State);
+ _ -> expected_arg_triples(ArgNs, ContrArgs, State)
+ end,
+ {call_without_opaque, [M, F, ArgStrings, ExpectedTriples]};
+ false -> %% there is a structured term clash in some argument
+ {call, [M, F, ArgStrings,
+ ArgNs, FailReason,
+ format_sig_args(Sig, State),
+ format_type(t_fun_range(Sig), State),
+ ContractInfo]}
+ end
+ end
+ end;
+ Label when is_integer(Label) ->
+ {apply, [ArgStrings,
+ ArgNs, FailReason,
+ format_sig_args(Sig, State),
+ format_type(t_fun_range(Sig), State),
+ ContractInfo]}
+ end.
+
+%% returns 'true' if we are running with opaque on (not checked yet),
+%% and there is either a contract or hard-coded type information with
+%% opaque types
+%% TODO: check that we are running with opaque types
+%% TODO: check the return type also
+prefer_opaque_types(CArgs, BifArgs) ->
+ t_contains_opaque(t_product(CArgs))
+ orelse t_contains_opaque(t_product(BifArgs)).
+
+is_opaque_related_problem(ArgNs, ArgTypes) ->
+ Fun = fun (N) -> erl_types:t_contains_opaque(lists:nth(N, ArgTypes)) end,
+ ArgNs =/= [] andalso lists:all(Fun, ArgNs).
+
+is_opaque_type_test_problem(Fun, ArgTypes, State) ->
+ case Fun of
+ {erlang, FN, 1} when FN =:= is_atom; FN =:= is_boolean;
+ FN =:= is_binary; FN =:= is_bitstring;
+ FN =:= is_float; FN =:= is_function;
+ FN =:= is_integer; FN =:= is_list;
+ FN =:= is_number; FN =:= is_pid; FN =:= is_port;
+ FN =:= is_reference; FN =:= is_tuple ->
+ [Type] = ArgTypes,
+ erl_types:t_is_opaque(Type) andalso
+ not lists:member(Type, State#state.opaques);
+ _ -> false
+ end.
+
+expected_arg_triples(ArgNs, ArgTypes, State) ->
+ [begin
+ Arg = lists:nth(N, ArgTypes),
+ {N, Arg, format_type(Arg, State)}
+ end || N <- ArgNs].
+
+add_bif_warnings({erlang, Op, 2}, [T1, T2] = Ts, Tree, State)
+ when Op =:= '=:='; Op =:= '==' ->
+ Inf = t_inf(T1, T2),
+ case t_is_none(Inf) andalso (not any_none(Ts))
+ andalso (not is_int_float_eq_comp(T1, Op, T2)) of
+ true ->
+ Args = case erl_types:t_is_opaque(T1) of
+ true -> [format_type(T2, State), Op, format_type(T1, State)];
+ false -> [format_type(T1, State), Op, format_type(T2, State)]
+ end,
+ case any_opaque(Ts) of
+ true ->
+ state__add_warning(State, ?WARN_OPAQUE, Tree, {opaque_eq, Args});
+ false ->
+ state__add_warning(State, ?WARN_MATCHING, Tree, {exact_eq, Args})
+ end;
+ false ->
+ State
+ end;
+add_bif_warnings({erlang, Op, 2}, [T1, T2] = Ts, Tree, State)
+ when Op =:= '=/='; Op =:= '/=' ->
+ Inf = t_inf(T1, T2),
+ case t_is_none(Inf) andalso (not any_none(Ts))
+ andalso (not is_int_float_eq_comp(T1, Op, T2)) andalso any_opaque(Ts) of
+ true ->
+ Args = case erl_types:t_is_opaque(T1) of
+ true -> [format_type(T2, State), Op, format_type(T1, State)];
+ false -> [format_type(T1, State), Op, format_type(T2, State)]
+ end,
+ state__add_warning(State, ?WARN_OPAQUE, Tree, {opaque_neq, Args});
+ false ->
+ State
+ end;
+add_bif_warnings(_, _, _, State) ->
+ State.
+
+is_int_float_eq_comp(T1, Op, T2) ->
+ (Op =:= '==' orelse Op =:= '/=') andalso
+ ((erl_types:t_is_float(T1) andalso erl_types:t_is_integer(T2)) orelse
+ (erl_types:t_is_integer(T1) andalso erl_types:t_is_float(T2))).
+
+%%----------------------------------------
+
+handle_bitstr(Tree, Map, State) ->
+ %% Construction of binaries.
+ Size = cerl:bitstr_size(Tree),
+ Val = cerl:bitstr_val(Tree),
+ BitstrType = cerl:concrete(cerl:bitstr_type(Tree)),
+ {State1, Map1, SizeType0} = traverse(Size, Map, State),
+ {State2, Map2, ValType0} = traverse(Val, Map1, State1),
+ case cerl:bitstr_bitsize(Tree) of
+ BitSz when BitSz =:= all orelse BitSz =:= utf ->
+ ValType =
+ case BitSz of
+ all ->
+ true = (BitstrType =:= binary),
+ t_inf(ValType0, t_bitstr());
+ utf ->
+ true = lists:member(BitstrType, [utf8, utf16, utf32]),
+ t_inf(ValType0, t_integer())
+ end,
+ Map3 = enter_type(Val, ValType, Map2),
+ case t_is_none(ValType) of
+ true ->
+ Msg = {bin_construction, ["value",
+ format_cerl(Val), format_cerl(Tree),
+ format_type(ValType0, State2)]},
+ State3 = state__add_warning(State2, ?WARN_BIN_CONSTRUCTION, Val, Msg),
+ {State3, Map3, t_none()};
+ false ->
+ {State2, Map3, t_bitstr()}
+ end;
+ BitSz when is_integer(BitSz) orelse BitSz =:= any ->
+ SizeType = t_inf(SizeType0, t_non_neg_integer()),
+ ValType =
+ case BitstrType of
+ binary -> t_inf(ValType0, t_bitstr());
+ float -> t_inf(ValType0, t_number());
+ integer -> t_inf(ValType0, t_integer())
+ end,
+ case any_none([SizeType, ValType]) of
+ true ->
+ {Msg, Offending} =
+ case t_is_none(SizeType) of
+ true ->
+ {{bin_construction,
+ ["size", format_cerl(Size), format_cerl(Tree),
+ format_type(SizeType0, State2)]},
+ Size};
+ false ->
+ {{bin_construction,
+ ["value", format_cerl(Val), format_cerl(Tree),
+ format_type(ValType0, State2)]},
+ Val}
+ end,
+ State3 = state__add_warning(State2, ?WARN_BIN_CONSTRUCTION,
+ Offending, Msg),
+ {State3, Map2, t_none()};
+ false ->
+ UnitVal = cerl:concrete(cerl:bitstr_unit(Tree)),
+ Type =
+ case t_number_vals(SizeType) of
+ [OneSize] -> t_bitstr(0, OneSize * UnitVal);
+ _ ->
+ MinSize = erl_types:number_min(SizeType),
+ t_bitstr(UnitVal, UnitVal * MinSize)
+ end,
+ Map3 = enter_type_lists([Val, Size, Tree],
+ [ValType, SizeType, Type], Map2),
+ {State2, Map3, Type}
+ end
+ end.
+
+%%----------------------------------------
+
+handle_call(Tree, Map, State) ->
+ M = cerl:call_module(Tree),
+ F = cerl:call_name(Tree),
+ Args = cerl:call_args(Tree),
+ MFAList = [M, F|Args],
+ {State1, Map1, [MType0, FType0|As]} = traverse_list(MFAList, Map, State),
+ %% Module and function names should be treated as *atoms* even if
+ %% they happen to be identical to an atom which is also involved in
+ %% the definition of an opaque data type
+ MType = t_inf(t_module(), t_unopaque(MType0)),
+ FType = t_inf(t_atom(), t_unopaque(FType0)),
+ Map2 = enter_type_lists([M, F], [MType, FType], Map1),
+ case any_none([MType, FType|As]) of
+ true ->
+ State2 =
+ case t_is_none(MType) andalso (not t_is_none(MType0)) of
+ true -> % This is a problem we just detected; not a known one
+ MS = format_cerl(M),
+ Msg = {app_call, [MS, format_cerl(F),
+ format_args(Args, As, State1),
+ MS, format_type(t_module(), State1),
+ format_type(MType0, State1)]},
+ state__add_warning(State1, ?WARN_FAILING_CALL, Tree, Msg);
+ false ->
+ case t_is_none(FType) andalso (not t_is_none(FType0)) of
+ true ->
+ FS = format_cerl(F),
+ Msg = {app_call, [format_cerl(M), FS,
+ format_args(Args, As, State1),
+ FS, format_type(t_atom(), State1),
+ format_type(FType0, State1)]},
+ state__add_warning(State1, ?WARN_FAILING_CALL, Tree, Msg);
+ false -> State1
+ end
+ end,
+ {State2, Map2, t_none()};
+ false ->
+ %% XXX: Consider doing this for all combinations of MF
+ case {t_atom_vals(MType), t_atom_vals(FType)} of
+ {[MAtom], [FAtom]} ->
+ FunInfo = [{remote, state__fun_info({MAtom, FAtom, length(Args)},
+ State1)}],
+ handle_apply_or_call(FunInfo, Args, As, Map2, Tree, State1);
+ {_MAtoms, _FAtoms} ->
+ {State1, Map2, t_any()}
+ end
+ end.
+
+%%----------------------------------------
+
+handle_case(Tree, Map, #state{callgraph = Callgraph} = State) ->
+ Arg = cerl:case_arg(Tree),
+ Clauses = filter_match_fail(cerl:case_clauses(Tree)),
+ {State1, Map1, ArgType} = SMA = traverse(Arg, Map, State),
+ case t_is_none_or_unit(ArgType) of
+ true -> SMA;
+ false ->
+ Races = State1#state.races,
+ State2 =
+ case dialyzer_callgraph:get_race_detection(Callgraph) andalso
+ dialyzer_races:get_race_analysis(Races) of
+ true ->
+ RaceList = dialyzer_races:get_race_list(Races),
+ RaceListSize = dialyzer_races:get_race_list_size(Races),
+ state__renew_race_list([beg_case|RaceList],
+ RaceListSize + 1, State1);
+ false -> State1
+ end,
+ {MapList, State3, Type} =
+ handle_clauses(Clauses, Arg, ArgType, ArgType, State2,
+ [], Map1, [], []),
+ Map2 = join_maps(MapList, Map1),
+ debug_pp_map(Map2),
+ {State3, Map2, Type}
+ end.
+
+%%----------------------------------------
+
+handle_cons(Tree, Map, State) ->
+ Hd = cerl:cons_hd(Tree),
+ Tl = cerl:cons_tl(Tree),
+ {State1, Map1, HdType} = traverse(Hd, Map, State),
+ {State2, Map2, TlType} = traverse(Tl, Map1, State1),
+ State3 =
+ case t_is_none(t_inf(TlType, t_list())) of
+ true ->
+ Msg = {improper_list_constr, [format_type(TlType, State2)]},
+ state__add_warning(State2, ?WARN_NON_PROPER_LIST, Tree, Msg);
+ false ->
+ State2
+ end,
+ Type = t_cons(HdType, TlType),
+ {State3, Map2, Type}.
+
+%%----------------------------------------
+
+handle_let(Tree, Map, #state{callgraph = Callgraph, races = Races} = State) ->
+ RaceAnalysis = dialyzer_races:get_race_analysis(Races),
+ RaceDetection = dialyzer_callgraph:get_race_detection(Callgraph),
+ Arg = cerl:let_arg(Tree),
+ Vars = cerl:let_vars(Tree),
+ {Map0, State0} =
+ case cerl:is_c_var(Arg) of
+ true ->
+ [Var] = Vars,
+ {enter_subst(Var, Arg, Map),
+ case RaceDetection andalso RaceAnalysis of
+ true ->
+ RaceList = dialyzer_races:get_race_list(Races),
+ RaceListSize = dialyzer_races:get_race_list_size(Races),
+ state__renew_race_list(
+ [dialyzer_races:let_tag_new(Var, Arg)|RaceList],
+ RaceListSize + 1, State);
+ false -> State
+ end};
+ false -> {Map, State}
+ end,
+ Body = cerl:let_body(Tree),
+ {State1, Map1, ArgTypes} = SMA = traverse(Arg, Map0, State0),
+ Callgraph1 = State1#state.callgraph,
+ Callgraph2 =
+ case RaceDetection andalso RaceAnalysis andalso cerl:is_c_call(Arg) of
+ true ->
+ Mod = cerl:call_module(Arg),
+ Name = cerl:call_name(Arg),
+ case cerl:is_literal(Mod) andalso
+ cerl:concrete(Mod) =:= ets andalso
+ cerl:is_literal(Name) andalso
+ cerl:concrete(Name) =:= new of
+ true ->
+ NewTable = dialyzer_races:get_new_table(State1#state.races),
+ renew_public_tables(Vars, NewTable,
+ state__warning_mode(State1),
+ Callgraph1);
+ false -> Callgraph1
+ end;
+ false -> Callgraph1
+ end,
+ State2 = State1#state{callgraph = Callgraph2},
+ case t_is_none_or_unit(ArgTypes) of
+ true -> SMA;
+ false ->
+ Map2 = enter_type_lists(Vars, t_to_tlist(ArgTypes), Map1),
+ traverse(Body, Map2, State2)
+ end.
+
+%%----------------------------------------
+
+handle_module(Tree, Map, State) ->
+ %% By not including the variables in scope we can assure that we
+ %% will get the current function type when using the variables.
+ Defs = cerl:module_defs(Tree),
+ PartFun = fun({_Var, Fun}) ->
+ state__is_escaping(get_label(Fun), State)
+ end,
+ {Defs1, Defs2} = lists:partition(PartFun, Defs),
+ Letrec = cerl:c_letrec(Defs1, cerl:c_int(42)),
+ {State1, Map1, _FunTypes} = traverse(Letrec, Map, State),
+ %% Also add environments for the other top-level functions.
+ VarTypes = [{Var, state__fun_type(Fun, State1)} || {Var, Fun} <- Defs],
+ EnvMap = enter_type_list(VarTypes, Map),
+ FoldFun = fun({_Var, Fun}, AccState) ->
+ state__update_fun_env(Fun, EnvMap, AccState)
+ end,
+ State2 = lists:foldl(FoldFun, State1, Defs2),
+ {State2, Map1, t_any()}.
+
+%%----------------------------------------
+
+handle_receive(Tree, Map,
+ #state{callgraph = Callgraph, races = Races} = State) ->
+ Clauses = filter_match_fail(cerl:receive_clauses(Tree)),
+ Timeout = cerl:receive_timeout(Tree),
+ State1 =
+ case dialyzer_callgraph:get_race_detection(Callgraph) andalso
+ dialyzer_races:get_race_analysis(Races) of
+ true ->
+ RaceList = dialyzer_races:get_race_list(Races),
+ RaceListSize = dialyzer_races:get_race_list_size(Races),
+ state__renew_race_list([beg_case|RaceList],
+ RaceListSize + 1, State);
+ false -> State
+ end,
+ {MapList, State2, ReceiveType} =
+ handle_clauses(Clauses, ?no_arg, t_any(), t_any(), State1, [], Map,
+ [], []),
+ Map1 = join_maps(MapList, Map),
+ {State3, Map2, TimeoutType} = traverse(Timeout, Map1, State2),
+ case (t_is_atom(TimeoutType) andalso
+ (t_atom_vals(TimeoutType) =:= ['infinity'])) of
+ true ->
+ {State3, Map2, ReceiveType};
+ false ->
+ Action = cerl:receive_action(Tree),
+ {State4, Map3, ActionType} = traverse(Action, Map, State3),
+ Map4 = join_maps([Map3, Map1], Map),
+ Type = t_sup(ReceiveType, ActionType),
+ {State4, Map4, Type}
+ end.
+
+%%----------------------------------------
+
+handle_try(Tree, Map, State) ->
+ Arg = cerl:try_arg(Tree),
+ EVars = cerl:try_evars(Tree),
+ Vars = cerl:try_vars(Tree),
+ Body = cerl:try_body(Tree),
+ Handler = cerl:try_handler(Tree),
+ {State1, Map1, ArgType} = traverse(Arg, Map, State),
+ Map2 = mark_as_fresh(Vars, Map1),
+ {SuccState, SuccMap, SuccType} =
+ case bind_pat_vars(Vars, t_to_tlist(ArgType), [], Map2, State1) of
+ {error, _, _, _, _} ->
+ {State1, map__new(), t_none()};
+ {SuccMap1, VarTypes} ->
+ %% Try to bind the argument. Will only succeed if
+ %% it is a simple structured term.
+ SuccMap2 =
+ case bind_pat_vars_reverse([Arg], [t_product(VarTypes)], [],
+ SuccMap1, State1) of
+ {error, _, _, _, _} -> SuccMap1;
+ {SM, _} -> SM
+ end,
+ traverse(Body, SuccMap2, State1)
+ end,
+ ExcMap1 = mark_as_fresh(EVars, Map),
+ {State2, ExcMap2, HandlerType} = traverse(Handler, ExcMap1, SuccState),
+ TryType = t_sup(SuccType, HandlerType),
+ {State2, join_maps([ExcMap2, SuccMap], Map1), TryType}.
+
+%%----------------------------------------
+
+handle_tuple(Tree, Map, State) ->
+ Elements = cerl:tuple_es(Tree),
+ {State1, Map1, EsType} = traverse_list(Elements, Map, State),
+ TupleType = t_tuple(EsType),
+ case t_is_none(TupleType) of
+ true ->
+ {State1, Map1, t_none()};
+ false ->
+ %% Let's find out if this is a record or opaque construction.
+ case Elements of
+ [Tag|Left] ->
+ case cerl:is_c_atom(Tag) of
+ true ->
+ TagVal = cerl:atom_val(Tag),
+ case t_opaque_match_record(TupleType, State1#state.opaques) of
+ [Opaque] ->
+ RecStruct = t_opaque_matching_structure(TupleType, Opaque),
+ RecFields = t_tuple_args(RecStruct),
+ case bind_pat_vars(Elements, RecFields, [], Map1, State1) of
+ {error, _, ErrorPat, ErrorType, _} ->
+ Msg = {record_constr,
+ [TagVal, format_patterns(ErrorPat),
+ format_type(ErrorType, State1)]},
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
+ Tree, Msg),
+ {State2, Map1, t_none()};
+ {Map2, _ETypes} ->
+ {State1, Map2, Opaque}
+ end;
+ _ ->
+ case state__lookup_record(TagVal, length(Left), State1) of
+ error -> {State1, Map1, TupleType};
+ {ok, Prototype} ->
+ %% io:format("In handle_tuple:\n Prototype = ~p\n", [Prototype]),
+ InfTupleType = t_inf(Prototype, TupleType),
+ %% io:format(" TupleType = ~p,\n Inf = ~p\n", [TupleType, InfTupleType]),
+ case t_is_none(InfTupleType) of
+ true ->
+ Msg = {record_constr,
+ [format_type(TupleType, State1), TagVal]},
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
+ Tree, Msg),
+ {State2, Map1, t_none()};
+ false ->
+ case bind_pat_vars(Elements, t_tuple_args(Prototype),
+ [], Map1, State1) of
+ {error, bind, ErrorPat, ErrorType, _} ->
+ %% io:format("error\n", []),
+ Msg = {record_constr,
+ [TagVal, format_patterns(ErrorPat),
+ format_type(ErrorType, State1)]},
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
+ Tree, Msg),
+ {State2, Map1, t_none()};
+ {Map2, ETypes} ->
+ {State1, Map2, t_tuple(ETypes)}
+ end
+ end
+ end
+ end;
+ false ->
+ {State1, Map1, t_tuple(EsType)}
+ end;
+ [] ->
+ {State1, Map1, t_tuple([])}
+ end
+ end.
+
+%%----------------------------------------
+%% Clauses
+%%
+handle_clauses([C|Left], Arg, ArgType, OrigArgType,
+ #state{callgraph = Callgraph, races = Races} = State,
+ CaseTypes, MapIn, Acc, ClauseAcc) ->
+ RaceDetection = dialyzer_callgraph:get_race_detection(Callgraph),
+ RaceAnalysis = dialyzer_races:get_race_analysis(Races),
+ State1 =
+ case RaceDetection andalso RaceAnalysis of
+ true ->
+ RaceList = dialyzer_races:get_race_list(Races),
+ RaceListSize = dialyzer_races:get_race_list_size(Races),
+ state__renew_race_list(
+ [dialyzer_races:beg_clause_new(Arg, cerl:clause_pats(C),
+ cerl:clause_guard(C))|
+ RaceList], RaceListSize + 1,
+ State);
+ false -> State
+ end,
+ {State2, ClauseMap, BodyType, NewArgType} =
+ do_clause(C, Arg, ArgType, OrigArgType, MapIn, State1),
+ {NewClauseAcc, State3} =
+ case RaceDetection andalso RaceAnalysis of
+ true ->
+ Races1 = State2#state.races,
+ RaceList1 = dialyzer_races:get_race_list(Races1),
+ RaceListSize1 = dialyzer_races:get_race_list_size(Races1),
+ EndClause = dialyzer_races:end_clause_new(Arg, cerl:clause_pats(C),
+ cerl:clause_guard(C)),
+ {[EndClause|ClauseAcc],
+ state__renew_race_list([EndClause|RaceList1],
+ RaceListSize1 + 1, State2)};
+ false -> {ClauseAcc, State2}
+ end,
+ {NewCaseTypes, NewAcc} =
+ case t_is_none(BodyType) of
+ true -> {CaseTypes, Acc};
+ false -> {[BodyType|CaseTypes], [ClauseMap|Acc]}
+ end,
+ handle_clauses(Left, Arg, NewArgType, OrigArgType, State3,
+ NewCaseTypes, MapIn, NewAcc, NewClauseAcc);
+handle_clauses([], _Arg, _ArgType, _OrigArgType,
+ #state{callgraph = Callgraph, races = Races} = State,
+ CaseTypes, _MapIn, Acc, ClauseAcc) ->
+ State1 =
+ case dialyzer_callgraph:get_race_detection(Callgraph) andalso
+ dialyzer_races:get_race_analysis(Races) of
+ true ->
+ state__renew_race_list(
+ [dialyzer_races:end_case_new(ClauseAcc)|
+ dialyzer_races:get_race_list(Races)],
+ dialyzer_races:get_race_list_size(Races) + 1, State);
+ false -> State
+ end,
+ {lists:reverse(Acc), State1, t_sup(CaseTypes)}.
+
+do_clause(C, Arg, ArgType0, OrigArgType, Map,
+ #state{callgraph = Callgraph, races = Races} = State) ->
+ Pats = cerl:clause_pats(C),
+ Guard = cerl:clause_guard(C),
+ Body = cerl:clause_body(C),
+ RaceDetection = dialyzer_callgraph:get_race_detection(Callgraph),
+ RaceAnalysis = dialyzer_races:get_race_analysis(Races),
+ State1 =
+ case RaceDetection andalso RaceAnalysis of
+ true ->
+ state__renew_fun_args(Pats, State);
+ false -> State
+ end,
+ Map0 = mark_as_fresh(Pats, Map),
+ Map1 = if Arg =:= ?no_arg -> Map0;
+ true -> bind_subst(Arg, Pats, Map0)
+ end,
+ BindRes =
+ case t_is_none(ArgType0) of
+ true ->
+ {error, bind, Pats, ArgType0, ArgType0};
+ false ->
+ ArgTypes =
+ case t_is_any(ArgType0) of
+ true -> [ArgType0 || _ <- Pats];
+ false -> t_to_tlist(ArgType0)
+ end,
+ bind_pat_vars(Pats, ArgTypes, [], Map1, State1)
+ end,
+ case BindRes of
+ {error, BindOrOpaque, NewPats, Type, OpaqueTerm} ->
+ ?debug("Failed binding pattern: ~s\nto ~s\n",
+ [cerl_prettypr:format(C), format_type(ArgType0, State1)]),
+ case state__warning_mode(State1) of
+ false ->
+ {State1, Map, t_none(), ArgType0};
+ true ->
+ PatString =
+ case BindOrOpaque of
+ bind -> format_patterns(Pats);
+ opaque -> format_patterns(NewPats)
+ end,
+ {Msg, Force} =
+ case t_is_none(ArgType0) of
+ true ->
+ PatTypes = [PatString, format_type(OrigArgType, State1)],
+ %% See if this is covered by an earlier clause or if it
+ %% simply cannot match
+ OrigArgTypes =
+ case t_is_any(OrigArgType) of
+ true -> Any = t_any(), [Any || _ <- Pats];
+ false -> t_to_tlist(OrigArgType)
+ end,
+ case bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1) of
+ {error, bind, _, _, _} -> {{pattern_match, PatTypes}, false};
+ {_, _} -> {{pattern_match_cov, PatTypes}, false}
+ end;
+ false ->
+ %% Try to find out if this is a default clause in a list
+ %% comprehension and supress this. A real Hack(tm)
+ Force0 =
+ case is_compiler_generated(cerl:get_ann(C)) of
+ true ->
+ case Pats of
+ [Pat] ->
+ case cerl:is_c_cons(Pat) of
+ true ->
+ not (cerl:is_c_var(cerl:cons_hd(Pat)) andalso
+ cerl:is_c_var(cerl:cons_tl(Pat)) andalso
+ cerl:is_literal(Guard) andalso
+ (cerl:concrete(Guard) =:= true));
+ false ->
+ true
+ end;
+ _ -> true
+ end;
+ false ->
+ true
+ end,
+ PatTypes = case BindOrOpaque of
+ bind -> [PatString, format_type(ArgType0, State1)];
+ opaque -> [PatString, format_type(Type, State1),
+ format_type(OpaqueTerm, State1)]
+ end,
+ FailedMsg = case BindOrOpaque of
+ bind -> {pattern_match, PatTypes};
+ opaque -> {opaque_match, PatTypes}
+ end,
+ {FailedMsg, Force0}
+ end,
+ WarnType = case Msg of
+ {opaque_match, _} -> ?WARN_OPAQUE;
+ {pattern_match, _} -> ?WARN_MATCHING;
+ {pattern_match_cov, _} -> ?WARN_MATCHING
+ end,
+ {state__add_warning(State1, WarnType, C, Msg, Force),
+ Map, t_none(), ArgType0}
+ end;
+ {Map2, PatTypes} ->
+ Map3 =
+ case Arg =:= ?no_arg of
+ true -> Map2;
+ false ->
+ %% Try to bind the argument. Will only succeed if
+ %% it is a simple structured term.
+ case bind_pat_vars_reverse([Arg], [t_product(PatTypes)],
+ [], Map2, State1) of
+ {error, _, _, _, _} -> Map2;
+ {NewMap, _} -> NewMap
+ end
+ end,
+ NewArgType =
+ case Arg =:= ?no_arg of
+ true -> ArgType0;
+ false ->
+ GenType = dialyzer_typesig:get_safe_underapprox(Pats, Guard),
+ t_subtract(t_product(t_to_tlist(ArgType0)), GenType)
+ end,
+ case bind_guard(Guard, Map3, State1) of
+ {error, Reason} ->
+ ?debug("Failed guard: ~s\n",
+ [cerl_prettypr:format(C, [{hook, cerl_typean:pp_hook()}])]),
+ PatString = format_patterns(Pats),
+ DefaultMsg =
+ case Pats =:= [] of
+ true -> {guard_fail, []};
+ false ->
+ {guard_fail_pat, [PatString, format_type(ArgType0, State1)]}
+ end,
+ State2 =
+ case Reason of
+ none -> state__add_warning(State1, ?WARN_MATCHING, C, DefaultMsg);
+ {FailGuard, Msg} ->
+ case is_compiler_generated(cerl:get_ann(FailGuard)) of
+ false ->
+ WarnType = case Msg of
+ {guard_fail, _} -> ?WARN_MATCHING;
+ {opaque_guard, _} -> ?WARN_OPAQUE
+ end,
+ state__add_warning(State1, WarnType, FailGuard, Msg);
+ true ->
+ state__add_warning(State1, ?WARN_MATCHING, C, Msg)
+ end
+ end,
+ {State2, Map, t_none(), NewArgType};
+ Map4 ->
+ {RetState, RetMap, BodyType} = traverse(Body, Map4, State1),
+ {RetState, RetMap, BodyType, NewArgType}
+ end
+ end.
+
+bind_subst(Arg, Pats, Map) ->
+ case cerl:type(Arg) of
+ values ->
+ bind_subst_list(cerl:values_es(Arg), Pats, Map);
+ var ->
+ [Pat] = Pats,
+ enter_subst(Arg, Pat, Map);
+ _ ->
+ Map
+ end.
+
+bind_subst_list([Arg|ArgLeft], [Pat|PatLeft], Map) ->
+ NewMap =
+ case {cerl:type(Arg), cerl:type(Pat)} of
+ {var, var} -> enter_subst(Arg, Pat, Map);
+ {var, alias} -> enter_subst(Arg, cerl:alias_pat(Pat), Map);
+ {literal, literal} -> Map;
+ {T, T} -> bind_subst_list(lists:flatten(cerl:subtrees(Arg)),
+ lists:flatten(cerl:subtrees(Pat)),
+ Map);
+ _ -> Map
+ end,
+ bind_subst_list(ArgLeft, PatLeft, NewMap);
+bind_subst_list([], [], Map) ->
+ Map.
+
+%%----------------------------------------
+%% Patterns
+%%
+
+bind_pat_vars(Pats, Types, Acc, Map, State) ->
+ try
+ bind_pat_vars(Pats, Types, Acc, Map, State, false)
+ catch
+ throw:Error -> Error % Error = {error, bind | opaque, ErrorPats, ErrorType}
+ end.
+
+bind_pat_vars_reverse(Pats, Types, Acc, Map, State) ->
+ try
+ bind_pat_vars(Pats, Types, Acc, Map, State, true)
+ catch
+ throw:Error -> Error % Error = {error, bind | opaque, ErrorPats, ErrorType}
+ end.
+
+bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
+ ?debug("Binding pat: ~w to ~s\n", [cerl:type(Pat), format_type(Type, State)]),
+ {NewMap, TypeOut} =
+ case cerl:type(Pat) of
+ alias ->
+ AliasPat = cerl:alias_pat(Pat),
+ Var = cerl:alias_var(Pat),
+ Map1 = enter_subst(Var, AliasPat, Map),
+ {Map2, [PatType]} = bind_pat_vars([AliasPat], [Type], [],
+ Map1, State, Rev),
+ {enter_type(Var, PatType, Map2), PatType};
+ binary ->
+ %% Cannot bind the binary if we are in reverse match since
+ %% binary patterns and binary construction are not symmetric.
+ case Rev of
+ true -> {Map, t_bitstr()};
+ false ->
+ BinType = t_inf(t_bitstr(), Type),
+ case t_is_none(BinType) of
+ true -> bind_error([Pat], Type, t_none(), bind);
+ false ->
+ Segs = cerl:binary_segments(Pat),
+ {Map1, SegTypes} = bind_bin_segs(Segs, BinType, Map, State),
+ {Map1, t_bitstr_concat(SegTypes)}
+ end
+ end;
+ cons ->
+ Cons = t_inf(Type, t_cons()),
+ case t_is_none(Cons) of
+ true ->
+ case t_find_opaque_mismatch(t_cons(), Type) of
+ {ok, T1, T2} -> bind_error([Pat], T1, T2, opaque);
+ error -> bind_error([Pat], Type, t_none(), bind)
+ end;
+ false ->
+ {Map1, [HdType, TlType]} =
+ bind_pat_vars([cerl:cons_hd(Pat), cerl:cons_tl(Pat)],
+ [t_cons_hd(Cons), t_cons_tl(Cons)],
+ [], Map, State, Rev),
+ {Map1, t_cons(HdType, TlType)}
+ end;
+ literal ->
+ Literal = literal_type(Pat),
+ LiteralOrOpaque =
+ case t_opaque_match_atom(Literal, State#state.opaques) of
+ [Opaque] -> Opaque;
+ _ -> Literal
+ end,
+ case t_is_none(t_inf(LiteralOrOpaque, Type)) of
+ true ->
+ case t_find_opaque_mismatch(Literal, Type) of
+ {ok, T1, T2} ->
+ case lists:member(T2, State#state.opaques) of
+ true ->
+ NewType = erl_types:t_struct_from_opaque(Type, T2),
+ {Map1, _} =
+ bind_pat_vars([Pat], [NewType], [], Map, State, Rev),
+ {Map1, T2};
+ false -> bind_error([Pat], T1, T2, opaque)
+ end;
+ error -> bind_error([Pat], Type, t_none(), bind)
+ end;
+ false -> {Map, LiteralOrOpaque}
+ end;
+ tuple ->
+ Es = cerl:tuple_es(Pat),
+ Prototype =
+ case Es of
+ [] -> t_tuple([]);
+ [Tag|Left] ->
+ case cerl:is_c_atom(Tag) of
+ true ->
+ TagAtom = cerl:atom_val(Tag),
+ case state__lookup_record(TagAtom, length(Left), State) of
+ error -> t_tuple(length(Es));
+ {ok, Record} -> Record
+ end;
+ false -> t_tuple(length(Es))
+ end
+ end,
+ Tuple = t_inf(Prototype, Type),
+ case t_is_none(Tuple) of
+ true ->
+ case t_find_opaque_mismatch(Prototype, Type) of
+ {ok, T1, T2} ->
+ case lists:member(T2, State#state.opaques) of
+ true ->
+ NewType = erl_types:t_struct_from_opaque(Type, T2),
+ {Map1, _} =
+ bind_pat_vars([Pat], [NewType], [], Map, State, Rev),
+ {Map1, T2};
+ false -> bind_error([Pat], T1, T2, opaque)
+ end;
+ error -> bind_error([Pat], Type, t_none(), bind)
+ end;
+ false ->
+ SubTuples = t_tuple_subtypes(Tuple),
+ %% Need to call the top function to get the try-catch wrapper
+ Results =
+ case Rev of
+ true ->
+ [bind_pat_vars_reverse(Es, t_tuple_args(SubTuple), [],
+ Map, State)
+ || SubTuple <- SubTuples];
+ false ->
+ [bind_pat_vars(Es, t_tuple_args(SubTuple), [], Map, State)
+ || SubTuple <- SubTuples]
+ end,
+ case lists:keyfind(opaque, 2, Results) of
+ {error, opaque, _PatList, _Type, Opaque} ->
+ bind_error([Pat], Tuple, Opaque, opaque);
+ false ->
+ case [M || {M, _} <- Results, M =/= error] of
+ [] -> bind_error([Pat], Tuple, t_none(), bind);
+ Maps ->
+ Map1 = join_maps(Maps, Map),
+ TupleType = t_sup([t_tuple(EsTypes)
+ || {M, EsTypes} <- Results, M =/= error]),
+ {Map1, TupleType}
+ end
+ end
+ end;
+ values ->
+ Es = cerl:values_es(Pat),
+ {Map1, EsTypes} =
+ bind_pat_vars(Es, t_to_tlist(Type), [], Map, State, Rev),
+ {Map1, t_product(EsTypes)};
+ var ->
+ Opaques = State#state.opaques,
+ VarType1 =
+ case state__lookup_type_for_rec_var(Pat, State) of
+ error ->
+ LType = lookup_type(Pat, Map),
+ case t_opaque_match_record(LType, Opaques) of
+ [Opaque] -> Opaque;
+ _ ->
+ case t_opaque_match_atom(LType, Opaques) of
+ [Opaque] -> Opaque;
+ _ -> LType
+ end
+ end;
+ {ok, RecType} -> RecType
+ end,
+ %% Must do inf when binding args to pats. Vars in pats are fresh.
+ VarType2 = t_inf(VarType1, Type),
+ VarType3 =
+ case Opaques =/= [] of
+ true ->
+ case t_opaque_match_record(VarType2, Opaques) of
+ [OpaqueRec] -> OpaqueRec;
+ _ ->
+ case t_opaque_match_atom(VarType2, Opaques) of
+ [OpaqueAtom] -> OpaqueAtom;
+ _ -> VarType2
+ end
+ end;
+ false -> VarType2
+ end,
+ case t_is_none(VarType3) of
+ true ->
+ case t_find_opaque_mismatch(VarType1, Type) of
+ {ok, T1, T2} ->
+ bind_error([Pat], T1, T2, opaque);
+ error ->
+ bind_error([Pat], Type, t_none(), bind)
+ end;
+ false ->
+ Map1 = enter_type(Pat, VarType3, Map),
+ {Map1, VarType3}
+ end;
+ _Other ->
+ %% Catch all is needed when binding args to pats
+ ?debug("Failed match for ~p\n", [_Other]),
+ bind_error([Pat], Type, t_none(), bind)
+ end,
+ bind_pat_vars(PatLeft, TypeLeft, [TypeOut|Acc], NewMap, State, Rev);
+bind_pat_vars([], [], Acc, Map, _State, _Rev) ->
+ {Map, lists:reverse(Acc)}.
+
+bind_bin_segs(BinSegs, BinType, Map, State) ->
+ bind_bin_segs(BinSegs, BinType, [], Map, State).
+
+bind_bin_segs([Seg|Segs], BinType, Acc, Map, State) ->
+ Val = cerl:bitstr_val(Seg),
+ SegType = cerl:concrete(cerl:bitstr_type(Seg)),
+ UnitVal = cerl:concrete(cerl:bitstr_unit(Seg)),
+ case cerl:bitstr_bitsize(Seg) of
+ all ->
+ binary = SegType, [] = Segs, %% just an assert
+ T = t_inf(t_bitstr(UnitVal, 0), BinType),
+ {Map1, [Type]} = bind_pat_vars([Val], [T], [], Map, State, false),
+ bind_bin_segs(Segs, t_bitstr(0, 0), [Type|Acc], Map1, State);
+ utf -> % XXX: possibly can be strengthened
+ true = lists:member(SegType, [utf8, utf16, utf32]),
+ {Map1, [_]} = bind_pat_vars([Val], [t_integer()], [], Map, State, false),
+ Type = t_binary(),
+ bind_bin_segs(Segs, BinType, [Type|Acc], Map1, State);
+ BitSz when is_integer(BitSz) orelse BitSz =:= any ->
+ Size = cerl:bitstr_size(Seg),
+ {Map1, [SizeType]} =
+ bind_pat_vars([Size], [t_non_neg_integer()], [], Map, State, false),
+ Type =
+ case t_number_vals(SizeType) of
+ [OneSize] -> t_bitstr(0, UnitVal * OneSize);
+ _ ->
+ MinSize = erl_types:number_min(SizeType),
+ t_bitstr(UnitVal, UnitVal * MinSize)
+ end,
+ ValConstr =
+ case SegType of
+ binary -> Type; %% The same constraints as for the whole bitstr
+ float -> t_float();
+ integer ->
+ case t_number_vals(SizeType) of
+ unknown -> t_integer();
+ List ->
+ SizeVal = lists:max(List),
+ Flags = cerl:concrete(cerl:bitstr_flags(Seg)),
+ N = SizeVal * UnitVal,
+ case lists:member(signed, Flags) of
+ true -> t_from_range(-(1 bsl (N - 1)), 1 bsl (N - 1) - 1);
+ false -> t_from_range(0, 1 bsl N - 1)
+ end
+ end
+ end,
+ {Map2, [_]} = bind_pat_vars([Val], [ValConstr], [], Map1, State, false),
+ NewBinType = t_bitstr_match(Type, BinType),
+ case t_is_none(NewBinType) of
+ true -> bind_error([Seg], BinType, t_none(), bind);
+ false -> bind_bin_segs(Segs, NewBinType, [Type|Acc], Map2, State)
+ end
+ end;
+bind_bin_segs([], _BinType, Acc, Map, _State) ->
+ {Map, lists:reverse(Acc)}.
+
+bind_error(Pats, Type, OpaqueType, Error) ->
+ throw({error, Error, Pats, Type, OpaqueType}).
+
+%%----------------------------------------
+%% Guards
+%%
+
+bind_guard(Guard, Map, State) ->
+ try bind_guard(Guard, Map, dict:new(), pos, State) of
+ {Map1, _Type} -> Map1
+ catch
+ throw:{fail, Warning} -> {error, Warning};
+ throw:{fatal_fail, Warning} -> {error, Warning}
+ end.
+
+bind_guard(Guard, Map, Env, Eval, State) ->
+ ?debug("Handling ~w guard: ~s\n",
+ [Eval, cerl_prettypr:format(Guard, [{noann, true}])]),
+ case cerl:type(Guard) of
+ binary ->
+ {Map, t_binary()};
+ 'case' ->
+ Arg = cerl:case_arg(Guard),
+ Clauses = cerl:case_clauses(Guard),
+ bind_guard_case_clauses(Arg, Clauses, Map, Env, Eval, State);
+ cons ->
+ Hd = cerl:cons_hd(Guard),
+ Tl = cerl:cons_tl(Guard),
+ {Map1, HdType} = bind_guard(Hd, Map, Env, dont_know, State),
+ {Map2, TlType} = bind_guard(Tl, Map1, Env, dont_know, State),
+ {Map2, t_cons(HdType, TlType)};
+ literal ->
+ {Map, literal_type(Guard)};
+ 'try' ->
+ Arg = cerl:try_arg(Guard),
+ [Var] = cerl:try_vars(Guard),
+ %%?debug("Storing: ~w\n", [Var]),
+ NewEnv = dict:store(get_label(Var), Arg, Env),
+ bind_guard(cerl:try_body(Guard), Map, NewEnv, Eval, State);
+ tuple ->
+ Es0 = cerl:tuple_es(Guard),
+ {Map1, Es} = bind_guard_list(Es0, Map, Env, dont_know, State),
+ {Map1, t_tuple(Es)};
+ 'let' ->
+ Arg = cerl:let_arg(Guard),
+ [Var] = cerl:let_vars(Guard),
+ %%?debug("Storing: ~w\n", [Var]),
+ NewEnv = dict:store(get_label(Var), Arg, Env),
+ bind_guard(cerl:let_body(Guard), Map, NewEnv, Eval, State);
+ values ->
+ Es = cerl:values_es(Guard),
+ List = [bind_guard(V, Map, Env, dont_know, State) || V <- Es],
+ Type = t_product([T || {_, T} <- List]),
+ {Map, Type};
+ var ->
+ ?debug("Looking for var(~w)...", [cerl_trees:get_label(Guard)]),
+ case dict:find(get_label(Guard), Env) of
+ error ->
+ ?debug("Did not find it\n", []),
+ Type = lookup_type(Guard, Map),
+ Constr =
+ case Eval of
+ pos -> t_atom(true);
+ neg -> t_atom(false);
+ dont_know -> Type
+ end,
+ Inf = t_inf(Constr, Type),
+ {enter_type(Guard, Inf, Map), Inf};
+ {ok, Tree} ->
+ ?debug("Found it\n", []),
+ {Map1, Type} = bind_guard(Tree, Map, Env, Eval, State),
+ {enter_type(Guard, Type, Map1), Type}
+ end;
+ call ->
+ handle_guard_call(Guard, Map, Env, Eval, State)
+ end.
+
+handle_guard_call(Guard, Map, Env, Eval, State) ->
+ MFA = {cerl:atom_val(cerl:call_module(Guard)),
+ cerl:atom_val(cerl:call_name(Guard)),
+ cerl:call_arity(Guard)},
+ case MFA of
+ {erlang, F, 1} when F =:= is_atom; F =:= is_boolean;
+ F =:= is_binary; F =:= is_bitstring;
+ F =:= is_float; F =:= is_function;
+ F =:= is_integer; F =:= is_list;
+ F =:= is_number; F =:= is_pid; F =:= is_port;
+ F =:= is_reference; F =:= is_tuple ->
+ handle_guard_type_test(Guard, F, Map, Env, Eval, State);
+ {erlang, is_function, 2} ->
+ handle_guard_is_function(Guard, Map, Env, Eval, State);
+ MFA when (MFA =:= {erlang, internal_is_record, 3}) or
+ (MFA =:= {erlang, is_record, 3}) ->
+ handle_guard_is_record(Guard, Map, Env, Eval, State);
+ {erlang, '=:=', 2} ->
+ handle_guard_eqeq(Guard, Map, Env, Eval, State);
+ {erlang, '==', 2} ->
+ handle_guard_eq(Guard, Map, Env, Eval, State);
+ {erlang, 'and', 2} ->
+ handle_guard_and(Guard, Map, Env, Eval, State);
+ {erlang, 'or', 2} ->
+ handle_guard_or(Guard, Map, Env, Eval, State);
+ {erlang, 'not', 1} ->
+ handle_guard_not(Guard, Map, Env, Eval, State);
+ {erlang, Comp, 2} when Comp =:= '<'; Comp =:= '=<';
+ Comp =:= '>'; Comp =:= '>=' ->
+ handle_guard_comp(Guard, Comp, Map, Env, Eval, State);
+ _ ->
+ handle_guard_gen_fun(MFA, Guard, Map, Env, Eval, State)
+ end.
+
+handle_guard_gen_fun({M, F, A}, Guard, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ {Map1, As0} = bind_guard_list(Args, Map, Env, dont_know, State),
+ MapFun = fun(Type) ->
+ case lists:member(Type, State#state.opaques) of
+ true -> erl_types:t_opaque_structure(Type);
+ false -> Type
+ end
+ end,
+ As = lists:map(MapFun, As0),
+ Mode = case As =:= As0 of
+ true -> structured;
+ false -> opaque
+ end,
+ BifRet = erl_bif_types:type(M, F, A, As),
+ case t_is_none(BifRet) of
+ true ->
+ %% Is this an error-bif?
+ case t_is_none(erl_bif_types:type(M, F, A)) of
+ true -> signal_guard_fail(Guard, As, State);
+ false -> signal_guard_fatal_fail(Guard, As, State)
+ end;
+ false ->
+ BifArgs = case erl_bif_types:arg_types(M, F, A) of
+ unknown -> lists:duplicate(A, t_any());
+ List -> List
+ end,
+ Map2 = enter_type_lists(Args, t_inf_lists(BifArgs, As0, Mode), Map1),
+ Ret =
+ case Eval of
+ pos -> t_inf(t_atom(true), BifRet);
+ neg -> t_inf(t_atom(false), BifRet);
+ dont_know -> BifRet
+ end,
+ case t_is_none(Ret) of
+ true ->
+ case Eval =:= pos of
+ true -> signal_guard_fail(Guard, As, State);
+ false -> throw({fail, none})
+ end;
+ false -> {Map2, Ret}
+ end
+ end.
+
+handle_guard_type_test(Guard, F, Map, Env, Eval, State) ->
+ [Arg] = cerl:call_args(Guard),
+ {Map1, ArgType} = bind_guard(Arg, Map, Env, dont_know, State),
+ case bind_type_test(Eval, F, ArgType, State) of
+ error ->
+ ?debug("Type test: ~w failed\n", [F]),
+ signal_guard_fail(Guard, [ArgType], State);
+ {ok, NewArgType, Ret} ->
+ ?debug("Type test: ~w succeeded, NewType: ~s, Ret: ~s\n",
+ [F, t_to_string(NewArgType), t_to_string(Ret)]),
+ {enter_type(Arg, NewArgType, Map1), Ret}
+ end.
+
+bind_type_test(Eval, TypeTest, ArgType, State) ->
+ Type = case TypeTest of
+ is_atom -> t_atom();
+ is_boolean -> t_boolean();
+ is_binary -> t_binary();
+ is_bitstring -> t_bitstr();
+ is_float -> t_float();
+ is_function -> t_fun();
+ is_integer -> t_integer();
+ is_list -> t_maybe_improper_list();
+ is_number -> t_number();
+ is_pid -> t_pid();
+ is_port -> t_port();
+ is_reference -> t_reference();
+ is_tuple -> t_tuple()
+ end,
+ Mode = determine_mode(ArgType, State#state.opaques),
+ case Eval of
+ pos ->
+ Inf = t_inf(Type, ArgType, Mode),
+ case t_is_none(Inf) of
+ true -> error;
+ false -> {ok, Inf, t_atom(true)}
+ end;
+ neg ->
+ case Mode of
+ opaque ->
+ Struct = erl_types:t_opaque_structure(ArgType),
+ case t_is_none(t_subtract(Struct, Type)) of
+ true -> error;
+ false -> {ok, ArgType, t_atom(false)}
+ end;
+ structured ->
+ Sub = t_subtract(ArgType, Type),
+ case t_is_none(Sub) of
+ true -> error;
+ false -> {ok, Sub, t_atom(false)}
+ end
+ end;
+ dont_know ->
+ {ok, ArgType, t_boolean()}
+ end.
+
+handle_guard_comp(Guard, Comp, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ [Arg1, Arg2] = Args,
+ {Map1, ArgTypes} = bind_guard_list(Args, Map, Env, dont_know, State),
+ [Type1, Type2] = ArgTypes,
+ IsInt1 = t_is_integer(Type1),
+ IsInt2 = t_is_integer(Type2),
+ case {cerl:type(Arg1), cerl:type(Arg2)} of
+ {literal, literal} ->
+ case erlang:Comp(cerl:concrete(Arg1), cerl:concrete(Arg2)) of
+ true when Eval =:= pos -> {Map, t_atom(true)};
+ true when Eval =:= dont_know -> {Map, t_atom(true)};
+ true when Eval =:= neg -> {Map, t_atom(true)};
+ false when Eval =:= pos -> signal_guard_fail(Guard, ArgTypes, State);
+ false when Eval =:= dont_know -> {Map, t_atom(false)};
+ false when Eval =:= neg -> {Map, t_atom(false)}
+ end;
+ {literal, var} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
+ case bind_comp_literal_var(Arg1, Arg2, Type2, Comp, Map1) of
+ error -> signal_guard_fail(Guard, ArgTypes, State);
+ {ok, NewMap} -> {NewMap, t_atom(true)}
+ end;
+ {var, literal} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
+ case bind_comp_literal_var(Arg2, Arg1, Type1, invert_comp(Comp), Map1) of
+ error -> signal_guard_fail(Guard, ArgTypes, State);
+ {ok, NewMap} -> {NewMap, t_atom(true)}
+ end;
+ {_, _} ->
+ handle_guard_gen_fun({erlang, Comp, 2}, Guard, Map, Env, Eval, State)
+ end.
+
+invert_comp('=<') -> '>=';
+invert_comp('<') -> '>';
+invert_comp('>=') -> '=<';
+invert_comp('>') -> '<'.
+
+bind_comp_literal_var(Lit, Var, VarType, CompOp, Map) ->
+ LitVal = cerl:concrete(Lit),
+ NewVarType =
+ case t_number_vals(VarType) of
+ unknown ->
+ Range =
+ case CompOp of
+ '=<' -> t_from_range(LitVal, pos_inf);
+ '<' -> t_from_range(LitVal + 1, pos_inf);
+ '>=' -> t_from_range(neg_inf, LitVal);
+ '>' -> t_from_range(neg_inf, LitVal - 1)
+ end,
+ t_inf(Range, VarType);
+ NumberVals ->
+ NewNumberVals = [X || X <- NumberVals, erlang:CompOp(LitVal, X)],
+ t_integers(NewNumberVals)
+ end,
+ case t_is_none(NewVarType) of
+ true -> error;
+ false -> {ok, enter_type(Var, NewVarType, Map)}
+ end.
+
+handle_guard_is_function(Guard, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ {Map1, ArgTypes0} = bind_guard_list(Args, Map, Env, dont_know, State),
+ [FunType0, ArityType0] = ArgTypes0,
+ ArityType = t_inf(ArityType0, t_integer()),
+ case t_is_none(ArityType) of
+ true -> signal_guard_fail(Guard, ArgTypes0, State);
+ false ->
+ FunTypeConstr =
+ case t_number_vals(ArityType) of
+ unknown -> t_fun();
+ Vals ->
+ t_sup([t_fun(lists:duplicate(X, t_any()), t_any()) || X <- Vals])
+ end,
+ FunType = t_inf(FunType0, FunTypeConstr),
+ case t_is_none(FunType) of
+ true ->
+ case Eval of
+ pos -> signal_guard_fail(Guard, ArgTypes0, State);
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_atom(false)}
+ end;
+ false ->
+ case Eval of
+ pos -> {enter_type_lists(Args, [FunType, ArityType], Map1),
+ t_atom(true)};
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_boolean()}
+ end
+ end
+ end.
+
+handle_guard_is_record(Guard, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ [Rec, Tag0, Arity0] = Args,
+ Tag = cerl:atom_val(Tag0),
+ Arity = cerl:int_val(Arity0),
+ {Map1, RecType} = bind_guard(Rec, Map, Env, dont_know, State),
+ ArityMin1 = Arity - 1,
+ TupleType =
+ case state__lookup_record(Tag, ArityMin1, State) of
+ error -> t_tuple([t_atom(Tag)|lists:duplicate(ArityMin1, t_any())]);
+ {ok, Prototype} -> Prototype
+ end,
+ Mode = determine_mode(RecType, State#state.opaques),
+ NewTupleType =
+ case t_opaque_match_record(TupleType, State#state.opaques) of
+ [Opaque] -> Opaque;
+ _ -> TupleType
+ end,
+ Type = t_inf(NewTupleType, RecType, Mode),
+ case t_is_none(Type) of
+ true ->
+ case Eval of
+ pos -> signal_guard_fail(Guard,
+ [RecType, t_from_term(Tag),
+ t_from_term(Arity)],
+ State);
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_atom(false)}
+ end;
+ false ->
+ case Eval of
+ pos -> {enter_type(Rec, Type, Map1), t_atom(true)};
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_boolean()}
+ end
+ end.
+
+handle_guard_eq(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ case {cerl:type(Arg1), cerl:type(Arg2)} of
+ {literal, literal} ->
+ case cerl:concrete(Arg1) =:= cerl:concrete(Arg2) of
+ true ->
+ if
+ Eval =:= pos -> {Map, t_atom(true)};
+ Eval =:= neg -> throw({fail, none});
+ Eval =:= dont_know -> {Map, t_atom(true)}
+ end;
+ false ->
+ if
+ Eval =:= neg -> {Map, t_atom(false)};
+ Eval =:= dont_know -> {Map, t_atom(false)};
+ Eval =:= pos ->
+ ArgTypes = [t_from_term(cerl:concrete(Arg1)),
+ t_from_term(cerl:concrete(Arg2))],
+ signal_guard_fail(Guard, ArgTypes, State)
+ end
+ end;
+ {literal, _} when Eval =:= pos ->
+ case cerl:concrete(Arg1) of
+ Atom when is_atom(Atom) ->
+ bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State);
+ [] ->
+ bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State);
+ _ ->
+ bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ end;
+ {_, literal} when Eval =:= pos ->
+ case cerl:concrete(Arg2) of
+ Atom when is_atom(Atom) ->
+ bind_eqeq_guard_lit_other(Guard, Arg2, Arg1, Map, Env, State);
+ [] ->
+ bind_eqeq_guard_lit_other(Guard, Arg2, Arg1, Map, Env, State);
+ _ ->
+ bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ end;
+ {_, _} ->
+ bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ end.
+
+bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, dont_know, State),
+ case (t_is_nil(Type1) orelse t_is_nil(Type2) orelse
+ t_is_atom(Type1) orelse t_is_atom(Type2)) of
+ true -> bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State);
+ false ->
+ case Eval of
+ pos -> {Map2, t_atom(true)};
+ neg -> {Map2, t_atom(false)};
+ dont_know -> {Map2, t_boolean()}
+ end
+ end.
+
+handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ case {cerl:type(Arg1), cerl:type(Arg2)} of
+ {literal, literal} ->
+ case cerl:concrete(Arg1) =:= cerl:concrete(Arg2) of
+ true ->
+ if Eval =:= neg -> throw({fail, none});
+ Eval =:= pos -> {Map, t_atom(true)};
+ Eval =:= dont_know -> {Map, t_atom(true)}
+ end;
+ false ->
+ if Eval =:= neg -> {Map, t_atom(false)};
+ Eval =:= dont_know -> {Map, t_atom(false)};
+ Eval =:= pos ->
+ ArgTypes = [t_from_term(cerl:concrete(Arg1)),
+ t_from_term(cerl:concrete(Arg2))],
+ signal_guard_fail(Guard, ArgTypes, State)
+ end
+ end;
+ {literal, _} when Eval =:= pos ->
+ bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State);
+ {_, literal} when Eval =:= pos ->
+ bind_eqeq_guard_lit_other(Guard, Arg2, Arg1, Map, Env, State);
+ {_, _} ->
+ bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ end.
+
+bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, dont_know, State),
+ ?debug("Types are:~s =:= ~s\n", [t_to_string(Type1),
+ t_to_string(Type2)]),
+ Inf = t_inf(Type1, Type2),
+ case t_is_none(Inf) of
+ true ->
+ case Eval of
+ neg -> {Map2, t_atom(false)};
+ dont_know -> {Map2, t_atom(false)};
+ pos -> signal_guard_fail(Guard, [Type1, Type2], State)
+ end;
+ false ->
+ case Eval of
+ pos ->
+ case {cerl:type(Arg1), cerl:type(Arg2)} of
+ {var, var} ->
+ Map3 = enter_subst(Arg1, Arg2, Map2),
+ Map4 = enter_type(Arg2, Inf, Map3),
+ {Map4, t_atom(true)};
+ {var, _} ->
+ Map3 = enter_type(Arg1, Inf, Map2),
+ {Map3, t_atom(true)};
+ {_, var} ->
+ Map3 = enter_type(Arg2, Inf, Map2),
+ {Map3, t_atom(true)};
+ {_, _} ->
+ {Map2, t_atom(true)}
+ end;
+ neg ->
+ {Map2, t_atom(false)};
+ dont_know ->
+ {Map2, t_boolean()}
+ end
+ end.
+
+bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State) ->
+ %% Assumes positive evaluation
+ case cerl:concrete(Arg1) of
+ true ->
+ {_, Type} = MT = bind_guard(Arg2, Map, Env, pos, State),
+ case t_is_atom(true, Type) of
+ true -> MT;
+ false ->
+ {_, Type0} = bind_guard(Arg2, Map, Env, dont_know, State),
+ signal_guard_fail(Guard, [Type0, t_atom(true)], State)
+ end;
+ false ->
+ {Map1, Type} = bind_guard(Arg2, Map, Env, neg, State),
+ case t_is_atom(false, Type) of
+ true -> {Map1, t_atom(true)};
+ false ->
+ {_, Type0} = bind_guard(Arg2, Map, Env, dont_know, State),
+ signal_guard_fail(Guard, [Type0, t_atom(true)], State)
+ end;
+ Term ->
+ LitType = t_from_term(Term),
+ {Map1, Type} = bind_guard(Arg2, Map, Env, dont_know, State),
+ case t_is_subtype(LitType, Type) of
+ false -> signal_guard_fail(Guard, [Type, LitType], State);
+ true ->
+ case cerl:is_c_var(Arg2) of
+ true -> {enter_type(Arg2, LitType, Map1), t_atom(true)};
+ false -> {Map1, t_atom(true)}
+ end
+ end
+ end.
+
+handle_guard_and(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ case Eval of
+ pos ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, Eval, State),
+ case t_is_atom(true, Type1) of
+ false -> throw({fail, none});
+ true ->
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, Eval, State),
+ case t_is_atom(true, Type2) of
+ false -> throw({fail, none});
+ true -> {Map2, t_atom(true)}
+ end
+ end;
+ neg ->
+ {Map1, Type1} =
+ try bind_guard(Arg1, Map, Env, neg, State)
+ catch throw:{fail, _} -> bind_guard(Arg2, Map, Env, pos, State)
+ end,
+ {Map2, Type2} =
+ try bind_guard(Arg1, Map, Env, neg, State)
+ catch throw:{fail, _} -> bind_guard(Arg2, Map, Env, pos, State)
+ end,
+ case t_is_atom(false, Type1) orelse t_is_atom(false, Type2) of
+ true -> {join_maps([Map1, Map2], Map), t_atom(false)};
+ false -> throw({fail, none})
+ end;
+ dont_know ->
+ True = t_atom(true),
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ case t_is_none(t_inf(Type1, t_boolean())) of
+ true -> throw({fail, none});
+ false ->
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, Eval, State),
+ case t_is_none(t_inf(Type2, t_boolean())) of
+ true -> throw({fail, none});
+ false -> {Map2, True}
+ end
+ end
+ end.
+
+handle_guard_or(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ case Eval of
+ pos ->
+ {Map1, Bool1} =
+ try bind_guard(Arg1, Map, Env, pos, State)
+ catch
+ throw:{fail,_} -> bind_guard(Arg1, Map, Env, dont_know, State)
+ end,
+ {Map2, Bool2} =
+ try bind_guard(Arg2, Map, Env, pos, State)
+ catch
+ throw:{fail,_} -> bind_guard(Arg2, Map, Env, dont_know, State)
+ end,
+ case ((t_is_atom(true, Bool1) andalso t_is_boolean(Bool2))
+ orelse
+ (t_is_atom(true, Bool2) andalso t_is_boolean(Bool1))) of
+ true -> {join_maps([Map1, Map2], Map), t_atom(true)};
+ false -> throw({fail, none})
+ end;
+ neg ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, neg, State),
+ case t_is_atom(true, Type1) of
+ false -> throw({fail, none});
+ true ->
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, neg, State),
+ case t_is_atom(true, Type2) of
+ false -> throw({fail, none});
+ true -> {Map2, t_atom(false)}
+ end
+ end;
+ dont_know ->
+ {Map1, Bool1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ {Map2, Bool2} = bind_guard(Arg2, Map, Env, dont_know, State),
+ case t_is_boolean(Bool1) andalso t_is_boolean(Bool2) of
+ true -> {join_maps([Map1, Map2], Map), t_sup(Bool1, Bool2)};
+ false -> throw({fail, none})
+ end
+ end.
+
+handle_guard_not(Guard, Map, Env, Eval, State) ->
+ [Arg] = cerl:call_args(Guard),
+ case Eval of
+ neg ->
+ {Map1, Type} = bind_guard(Arg, Map, Env, pos, State),
+ case t_is_atom(true, Type) of
+ true -> {Map1, t_atom(false)};
+ false -> throw({fail, none})
+ end;
+ pos ->
+ {Map1, Type} = bind_guard(Arg, Map, Env, neg, State),
+ case t_is_atom(false, Type) of
+ true -> {Map1, t_atom(true)};
+ false -> throw({fail, none})
+ end;
+ dont_know ->
+ {Map1, Type} = bind_guard(Arg, Map, Env, dont_know, State),
+ Bool = t_inf(Type, t_boolean()),
+ case t_is_none(Bool) of
+ true -> throw({fatal_fail, none});
+ false ->
+ case t_atom_vals(Bool) of
+ ['true'] -> {Map1, t_atom(false)};
+ ['false'] -> {Map1, t_atom(true)};
+ [_, _] -> {Map1, Bool}
+ end
+ end
+ end.
+
+bind_guard_list(Guards, Map, Env, Eval, State) ->
+ bind_guard_list(Guards, Map, Env, Eval, State, []).
+
+bind_guard_list([G|Gs], Map, Env, Eval, State, Acc) ->
+ {Map1, T} = bind_guard(G, Map, Env, Eval, State),
+ bind_guard_list(Gs, Map1, Env, Eval, State, [T|Acc]);
+bind_guard_list([], Map, _Env, _Eval, _State, Acc) ->
+ {Map, lists:reverse(Acc)}.
+
+-spec signal_guard_fail(cerl:c_call(), [erl_types:erl_type()], #state{}) ->
+ no_return().
+
+signal_guard_fail(Guard, ArgTypes, State) ->
+ Args = cerl:call_args(Guard),
+ F = cerl:atom_val(cerl:call_name(Guard)),
+ MFA = {cerl:atom_val(cerl:call_module(Guard)), F, length(Args)},
+ Msg =
+ case is_infix_op(MFA) of
+ true ->
+ [ArgType1, ArgType2] = ArgTypes,
+ [Arg1, Arg2] = Args,
+ {guard_fail, [format_args_1([Arg1], [ArgType1], State),
+ atom_to_list(F),
+ format_args_1([Arg2], [ArgType2], State)]};
+ false ->
+ mk_guard_msg(F, Args, ArgTypes, State)
+ end,
+ throw({fail, {Guard, Msg}}).
+
+is_infix_op({erlang, '=:=', 2}) -> true;
+is_infix_op({erlang, '==', 2}) -> true;
+is_infix_op({erlang, '=/=', 2}) -> true;
+is_infix_op({erlang, '=/', 2}) -> true;
+is_infix_op({erlang, '<', 2}) -> true;
+is_infix_op({erlang, '=<', 2}) -> true;
+is_infix_op({erlang, '>', 2}) -> true;
+is_infix_op({erlang, '>=', 2}) -> true;
+is_infix_op({M, F, A}) when is_atom(M), is_atom(F),
+ is_integer(A), 0 =< A, A =< 255 -> false.
+
+-spec signal_guard_fatal_fail(cerl:c_call(), [erl_types:erl_type()], #state{}) ->
+ no_return().
+
+signal_guard_fatal_fail(Guard, ArgTypes, State) ->
+ Args = cerl:call_args(Guard),
+ F = cerl:atom_val(cerl:call_name(Guard)),
+ Msg = mk_guard_msg(F, Args, ArgTypes, State),
+ throw({fatal_fail, {Guard, Msg}}).
+
+mk_guard_msg(F, Args, ArgTypes, State) ->
+ FArgs = [F, format_args(Args, ArgTypes, State)],
+ case any_has_opaque_subtype(ArgTypes) of
+ true -> {opaque_guard, FArgs};
+ false -> {guard_fail, FArgs}
+ end.
+
+bind_guard_case_clauses(Arg, Clauses, Map, Env, Eval, State) ->
+ Clauses1 = filter_fail_clauses(Clauses),
+ {GenMap, GenArgType} = bind_guard(Arg, Map, Env, dont_know, State),
+ bind_guard_case_clauses(GenArgType, GenMap, Arg, Clauses1, Map, Env, Eval,
+ t_none(), [], State).
+
+filter_fail_clauses([Clause|Left]) ->
+ case (cerl:clause_pats(Clause) =:= []) of
+ true ->
+ Body = cerl:clause_body(Clause),
+ case cerl:is_literal(Body) andalso (cerl:concrete(Body) =:= fail) of
+ true -> filter_fail_clauses(Left);
+ false -> [Clause|filter_fail_clauses(Left)]
+ end;
+ false ->
+ [Clause|filter_fail_clauses(Left)]
+ end;
+filter_fail_clauses([]) ->
+ [].
+
+bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
+ Map, Env, Eval, AccType, AccMaps, State) ->
+ Pats = cerl:clause_pats(Clause),
+ {NewMap0, ArgType} =
+ case Pats of
+ [Pat] ->
+ case cerl:is_literal(Pat) of
+ true ->
+ try
+ case cerl:concrete(Pat) of
+ true -> bind_guard(ArgExpr, Map, Env, pos, State);
+ false -> bind_guard(ArgExpr, Map, Env, neg, State);
+ _ -> {GenMap, GenArgType}
+ end
+ catch
+ throw:{fail, _} -> {none, GenArgType}
+ end;
+ false ->
+ {GenMap, GenArgType}
+ end;
+ _ -> {GenMap, GenArgType}
+ end,
+ NewMap1 =
+ case Pats =:= [] of
+ true -> NewMap0;
+ false ->
+ case t_is_none(ArgType) of
+ true -> none;
+ false ->
+ ArgTypes = case t_is_any(ArgType) of
+ true -> Any = t_any(), [Any || _ <- Pats];
+ false -> t_to_tlist(ArgType)
+ end,
+ case bind_pat_vars(Pats, ArgTypes, [], NewMap0, State) of
+ {error, _, _, _, _} -> none;
+ {PatMap, _PatTypes} -> PatMap
+ end
+ end
+ end,
+ Guard = cerl:clause_guard(Clause),
+ GenPatType = dialyzer_typesig:get_safe_underapprox(Pats, Guard),
+ NewGenArgType = t_subtract(GenArgType, GenPatType),
+ case (NewMap1 =:= none) orelse t_is_none(GenArgType) of
+ true ->
+ bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
+ Eval, AccType, AccMaps, State);
+ false ->
+ {NewAccType, NewAccMaps} =
+ try
+ {NewMap2, GuardType} = bind_guard(Guard, NewMap1, Env, pos, State),
+ case t_is_none(t_inf(t_atom(true), GuardType)) of
+ true -> throw({fail, none});
+ false -> ok
+ end,
+ {NewMap3, CType} = bind_guard(cerl:clause_body(Clause), NewMap2,
+ Env, Eval, State),
+ case Eval of
+ pos ->
+ case t_is_atom(true, CType) of
+ true -> ok;
+ false -> throw({fail, none})
+ end;
+ neg ->
+ case t_is_atom(false, CType) of
+ true -> ok;
+ false -> throw({fail, none})
+ end;
+ dont_know ->
+ ok
+ end,
+ {t_sup(AccType, CType), [NewMap3|AccMaps]}
+ catch
+ throw:{fail, _What} -> {AccType, AccMaps}
+ end,
+ bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
+ Eval, NewAccType, NewAccMaps, State)
+ end;
+bind_guard_case_clauses(_GenArgType, _GenMap, _ArgExpr, [], Map, _Env, _Eval,
+ AccType, AccMaps, _State) ->
+ case t_is_none(AccType) of
+ true -> throw({fail, none});
+ false -> {join_maps(AccMaps, Map), AccType}
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Maps and types.
+%%%
+%%% ===========================================================================
+
+map__new() ->
+ {dict:new(), dict:new()}.
+
+join_maps(Maps, MapOut) ->
+ {Map, Subst} = MapOut,
+ Keys = ordsets:from_list(dict:fetch_keys(Map) ++ dict:fetch_keys(Subst)),
+ join_maps(Keys, Maps, MapOut).
+
+join_maps([Key|Left], Maps, MapOut) ->
+ Type = join_maps_one_key(Maps, Key, t_none()),
+ case t_is_equal(lookup_type(Key, MapOut), Type) of
+ true -> join_maps(Left, Maps, MapOut);
+ false -> join_maps(Left, Maps, enter_type(Key, Type, MapOut))
+ end;
+join_maps([], _Maps, MapOut) ->
+ MapOut.
+
+join_maps_one_key([Map|Left], Key, AccType) ->
+ case t_is_any(AccType) of
+ true ->
+ %% We can stop here
+ AccType;
+ false ->
+ join_maps_one_key(Left, Key, t_sup(lookup_type(Key, Map), AccType))
+ end;
+join_maps_one_key([], _Key, AccType) ->
+ AccType.
+
+enter_type_lists([Key|KeyTail], [Val|ValTail], Map) ->
+ Map1 = enter_type(Key, Val, Map),
+ enter_type_lists(KeyTail, ValTail, Map1);
+enter_type_lists([], [], Map) ->
+ Map.
+
+enter_type_list([{Key, Val}|Left], Map) ->
+ Map1 = enter_type(Key, Val, Map),
+ enter_type_list(Left, Map1);
+enter_type_list([], Map) ->
+ Map.
+
+enter_type(Key, Val, {Map, Subst} = MS) ->
+ case cerl:is_literal(Key) of
+ true -> MS;
+ false ->
+ case cerl:is_c_values(Key) of
+ true ->
+ Keys = cerl:values_es(Key),
+ case t_is_any(Val) orelse t_is_none(Val) of
+ true ->
+ enter_type_lists(Keys, [Val || _ <- Keys], MS);
+ false ->
+ enter_type_lists(cerl:values_es(Key), t_to_tlist(Val), MS)
+ end;
+ false ->
+ KeyLabel = get_label(Key),
+ case dict:find(KeyLabel, Subst) of
+ {ok, NewKey} ->
+ ?debug("Binding ~p to ~p\n", [KeyLabel, NewKey]),
+ enter_type(NewKey, Val, MS);
+ error ->
+ ?debug("Entering ~p :: ~s\n", [KeyLabel, t_to_string(Val)]),
+ case dict:find(KeyLabel, Map) of
+ {ok, Val} -> MS;
+ {ok, _OldVal} -> {dict:store(KeyLabel, Val, Map), Subst};
+ error -> {dict:store(KeyLabel, Val, Map), Subst}
+ end
+ end
+ end
+ end.
+
+enter_subst(Key, Val, {Map, Subst} = MS) ->
+ KeyLabel = get_label(Key),
+ case cerl:is_literal(Val) of
+ true ->
+ NewMap = dict:store(KeyLabel, literal_type(Val), Map),
+ {NewMap, Subst};
+ false ->
+ case cerl:is_c_var(Val) of
+ false -> MS;
+ true ->
+ ValLabel = get_label(Val),
+ case dict:find(ValLabel, Subst) of
+ {ok, NewVal} ->
+ enter_subst(Key, NewVal, MS);
+ error ->
+ if KeyLabel =:= ValLabel -> MS;
+ true ->
+ ?debug("Subst: storing ~p = ~p\n", [KeyLabel, ValLabel]),
+ NewSubst = dict:store(KeyLabel, ValLabel, Subst),
+ {Map, NewSubst}
+ end
+ end
+ end
+ end.
+
+lookup_type(Key, {Map, Subst}) ->
+ lookup(Key, Map, Subst, t_none()).
+
+lookup(Key, Map, Subst, AnyNone) ->
+ case cerl:is_literal(Key) of
+ true -> literal_type(Key);
+ false ->
+ Label = get_label(Key),
+ case dict:find(Label, Subst) of
+ {ok, NewKey} -> lookup(NewKey, Map, Subst, AnyNone);
+ error ->
+ case dict:find(Label, Map) of
+ {ok, Val} -> Val;
+ error -> AnyNone
+ end
+ end
+ end.
+
+lookup_fun_sig(Fun, Callgraph, Plt) ->
+ MFAorLabel =
+ case dialyzer_callgraph:lookup_name(Fun, Callgraph) of
+ error -> Fun;
+ {ok, MFA} -> MFA
+ end,
+ dialyzer_plt:lookup(Plt, MFAorLabel).
+
+literal_type(Lit) ->
+ t_from_term(cerl:concrete(Lit)).
+
+mark_as_fresh([Tree|Left], Map) ->
+ SubTrees1 = lists:append(cerl:subtrees(Tree)),
+ {SubTrees2, Map1} =
+ case cerl:type(Tree) of
+ bitstr ->
+ %% The Size field is not fresh.
+ {SubTrees1 -- [cerl:bitstr_size(Tree)], Map};
+ var ->
+ {SubTrees1, enter_type(Tree, t_any(), Map)};
+ _ ->
+ {SubTrees1, Map}
+ end,
+ mark_as_fresh(SubTrees2 ++ Left, Map1);
+mark_as_fresh([], Map) ->
+ Map.
+
+-ifdef(DEBUG).
+debug_pp_map(Map = {Map0, _Subst}) ->
+ Keys = dict:fetch_keys(Map0),
+ io:format("Map:\n", []),
+ lists:foreach(fun (Key) ->
+ io:format("\t~w :: ~s\n",
+ [Key, t_to_string(lookup_type(Key, Map))])
+ end, Keys),
+ ok.
+-else.
+debug_pp_map(_Map) -> ok.
+-endif.
+
+%%% ===========================================================================
+%%%
+%%% Utilities
+%%%
+%%% ===========================================================================
+
+get_label(L) when is_integer(L) ->
+ L;
+get_label(T) ->
+ cerl_trees:get_label(T).
+
+t_is_simple(ArgType) ->
+ t_is_atom(ArgType) orelse t_is_number(ArgType) orelse t_is_port(ArgType)
+ orelse t_is_pid(ArgType) orelse t_is_reference(ArgType)
+ orelse t_is_nil(ArgType).
+
+%% t_is_structured(ArgType) ->
+%% case t_is_nil(ArgType) of
+%% true -> false;
+%% false ->
+%% SType = t_inf(t_sup([t_list(), t_tuple(), t_binary()]), ArgType),
+%% t_is_equal(ArgType, SType)
+%% end.
+
+is_call_to_send(Tree) ->
+ case cerl:is_c_call(Tree) of
+ false -> false;
+ true ->
+ Mod = cerl:call_module(Tree),
+ Name = cerl:call_name(Tree),
+ Arity = cerl:call_arity(Tree),
+ cerl:is_c_atom(Mod)
+ andalso cerl:is_c_atom(Name)
+ andalso (cerl:atom_val(Name) =:= '!')
+ andalso (cerl:atom_val(Mod) =:= erlang)
+ andalso (Arity =:= 2)
+ end.
+
+any_opaque(Ts) ->
+ lists:any(fun erl_types:t_is_opaque/1, Ts).
+
+any_has_opaque_subtype(Ts) ->
+ lists:any(fun erl_types:t_has_opaque_subtype/1, Ts).
+
+filter_match_fail([Clause] = Cls) ->
+ Body = cerl:clause_body(Clause),
+ case cerl:type(Body) of
+ primop ->
+ case cerl:atom_val(cerl:primop_name(Body)) of
+ match_fail -> [];
+ raise -> [];
+ _ -> Cls
+ end;
+ _ -> Cls
+ end;
+filter_match_fail([H|T]) ->
+ [H|filter_match_fail(T)];
+filter_match_fail([]) ->
+ %% This can actually happen, for example in
+ %% receive after 1 -> ok end
+ [].
+
+determine_mode(Type, Opaques) ->
+ case lists:member(Type, Opaques) of
+ true -> opaque;
+ false -> structured
+ end.
+
+%%% ===========================================================================
+%%%
+%%% The State.
+%%%
+%%% ===========================================================================
+
+state__new(Callgraph, Tree, Plt, Module, Records) ->
+ TreeMap = build_tree_map(Tree),
+ Funs = dict:fetch_keys(TreeMap),
+ FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
+ Work = init_work([get_label(Tree)]),
+ Env = dict:store(top, map__new(), dict:new()),
+ Opaques = erl_types:module_builtin_opaques(Module) ++
+ erl_types:t_opaque_from_records(Records),
+ #state{callgraph = Callgraph, envs = Env, fun_tab = FunTab, opaques = Opaques,
+ plt = Plt, races = dialyzer_races:new(), records = Records,
+ warning_mode = false, warnings = [], work = Work, tree_map = TreeMap}.
+
+state__mark_fun_as_handled(#state{fun_tab = FunTab} = State, Fun0) ->
+ Fun = get_label(Fun0),
+ case dict:find(Fun, FunTab) of
+ {ok, {not_handled, Entry}} ->
+ State#state{fun_tab = dict:store(Fun, Entry, FunTab)};
+ {ok, {_, _}} ->
+ State
+ end.
+
+state__warning_mode(#state{warning_mode = WM}) ->
+ WM.
+
+state__set_warning_mode(#state{tree_map = TreeMap, fun_tab = FunTab,
+ races = Races} = State) ->
+ ?debug("Starting warning pass\n", []),
+ Funs = dict:fetch_keys(TreeMap),
+ State#state{work = init_work([top|Funs--[top]]),
+ fun_tab = FunTab, warning_mode = true,
+ races = dialyzer_races:put_race_analysis(true, Races)}.
+
+state__restore_race_code(RaceCode, #state{callgraph = Callgraph} = State) ->
+ State#state{callgraph = dialyzer_callgraph:put_race_code(RaceCode,
+ Callgraph)}.
+
+state__race_analysis(Analysis, #state{races = Races} = State) ->
+ State#state{races = dialyzer_races:put_race_analysis(Analysis, Races)}.
+
+state__renew_curr_fun(CurrFun, CurrFunLabel,
+ #state{races = Races} = State) ->
+ State#state{races = dialyzer_races:put_curr_fun(CurrFun, CurrFunLabel,
+ Races)}.
+
+state__renew_fun_args(Args, #state{races = Races} = State) ->
+ case state__warning_mode(State) of
+ true -> State;
+ false ->
+ State#state{races = dialyzer_races:put_fun_args(Args, Races)}
+ end.
+
+state__renew_race_list(RaceList, RaceListSize,
+ #state{races = Races} = State) ->
+ State#state{races = dialyzer_races:put_race_list(RaceList, RaceListSize,
+ Races)}.
+
+state__renew_warnings(Warnings, State) ->
+ State#state{warnings = Warnings}.
+
+-spec state__add_warning(dial_warning(), state()) -> state().
+
+state__add_warning(Warn, #state{warnings = Warnings} = State) ->
+ State#state{warnings = [Warn|Warnings]}.
+
+state__add_warning(State, Tag, Tree, Msg) ->
+ state__add_warning(State, Tag, Tree, Msg, false).
+
+state__add_warning(#state{warning_mode = false} = State, _, _, _, _) ->
+ State;
+state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
+ Tag, Tree, Msg, Force) ->
+ Ann = cerl:get_ann(Tree),
+ case Force of
+ true ->
+ Warn = {Tag, {get_file(Ann), abs(get_line(Ann))}, Msg},
+ State#state{warnings = [Warn|Warnings]};
+ false ->
+ case is_compiler_generated(Ann) of
+ true -> State;
+ false ->
+ Warn = {Tag, {get_file(Ann), get_line(Ann)}, Msg},
+ State#state{warnings = [Warn|Warnings]}
+ end
+ end.
+
+state__get_race_warnings(#state{races = Races} = State) ->
+ {Races1, State1} = dialyzer_races:get_race_warnings(Races, State),
+ State1#state{races = Races1}.
+
+state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
+ callgraph = Callgraph, plt = Plt} = State,
+ NoWarnUnused) ->
+ FoldFun =
+ fun({top, _}, AccState) -> AccState;
+ ({FunLbl, Fun}, AccState) ->
+ {NotCalled, Ret} =
+ case dict:fetch(get_label(Fun), FunTab) of
+ {not_handled, {_Args0, Ret0}} -> {true, Ret0};
+ {Args0, Ret0} -> {any_none(Args0), Ret0}
+ end,
+ case NotCalled of
+ true ->
+ {Warn, Msg} =
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> {true, {unused_fun, []}};
+ {ok, {_M, F, A}} = MFA ->
+ {not sets:is_element(MFA, NoWarnUnused),
+ {unused_fun, [F, A]}}
+ end,
+ case Warn of
+ true -> state__add_warning(AccState, ?WARN_NOT_CALLED, Fun, Msg);
+ false -> AccState
+ end;
+ false ->
+ {Name, Contract} =
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> {[], none};
+ {ok, {_M, F, A} = MFA} ->
+ {[F, A], dialyzer_plt:lookup_contract(Plt, MFA)}
+ end,
+ case t_is_none(Ret) of
+ true ->
+ %% Check if the function has a contract that allows this.
+ Warn =
+ case Contract of
+ none -> true;
+ {value, C} ->
+ GenRet = dialyzer_contracts:get_contract_return(C),
+ not t_is_unit(GenRet)
+ end,
+ case Warn of
+ true ->
+ case classify_returns(Fun) of
+ no_match ->
+ Msg = {no_return, [no_match|Name]},
+ state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ Fun, Msg);
+ only_explicit ->
+ Msg = {no_return, [only_explicit|Name]},
+ state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT,
+ Fun, Msg);
+ only_normal ->
+ Msg = {no_return, [only_normal|Name]},
+ state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ Fun, Msg);
+ both ->
+ Msg = {no_return, [both|Name]},
+ state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ Fun, Msg)
+ end;
+ false ->
+ AccState
+ end;
+ false ->
+ AccState
+ end
+ end
+ end,
+ #state{warnings = Warn} = lists:foldl(FoldFun, State, dict:to_list(TreeMap)),
+ Warn.
+
+state__is_escaping(Fun, #state{callgraph = Callgraph}) ->
+ dialyzer_callgraph:is_escaping(Fun, Callgraph).
+
+state__lookup_type_for_rec_var(Var, #state{callgraph = Callgraph} = State) ->
+ Label = get_label(Var),
+ case dialyzer_callgraph:lookup_rec_var(Label, Callgraph) of
+ error -> error;
+ {ok, MFA} ->
+ {ok, FunLabel} = dialyzer_callgraph:lookup_label(MFA, Callgraph),
+ {ok, state__fun_type(FunLabel, State)}
+ end.
+
+state__lookup_name({_, _, _} = MFA, #state{}) ->
+ MFA;
+state__lookup_name(top, #state{}) ->
+ top;
+state__lookup_name(Fun, #state{callgraph = Callgraph}) ->
+ case dialyzer_callgraph:lookup_name(Fun, Callgraph) of
+ {ok, MFA} -> MFA;
+ error -> Fun
+ end.
+
+state__lookup_record(Tag, Arity, #state{records = Records}) ->
+ case erl_types:lookup_record(Tag, Arity, Records) of
+ {ok, Fields} ->
+ {ok, t_tuple([t_atom(Tag)|
+ [FieldType || {_FieldName, FieldType} <- Fields]])};
+ error ->
+ error
+ end.
+
+state__get_args(Tree, #state{fun_tab = FunTab}) ->
+ Fun = get_label(Tree),
+ case dict:find(Fun, FunTab) of
+ {ok, {not_handled, {ArgTypes, _}}} -> ArgTypes;
+ {ok, {ArgTypes, _}} -> ArgTypes
+ end.
+
+build_tree_map(Tree) ->
+ Fun =
+ fun(T, Dict) ->
+ case cerl:is_c_fun(T) of
+ true ->
+ dict:store(get_label(T), T, Dict);
+ false ->
+ Dict
+ end
+ end,
+ cerl_trees:fold(Fun, dict:new(), Tree).
+
+init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) ->
+ NewDict = dict:store(top, {not_handled, {[], t_none()}}, Dict),
+ init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt);
+init_fun_tab([Fun|Left], Dict, TreeMap, Callgraph, Plt) ->
+ Arity = cerl:fun_arity(dict:fetch(Fun, TreeMap)),
+ FunEntry =
+ case dialyzer_callgraph:is_escaping(Fun, Callgraph) of
+ true ->
+ Args = lists:duplicate(Arity, t_any()),
+ case lookup_fun_sig(Fun, Callgraph, Plt) of
+ none -> {Args, t_unit()};
+ {value, {RetType, _}} ->
+ case t_is_none(RetType) of
+ true -> {Args, t_none()};
+ false -> {Args, t_unit()}
+ end
+ end;
+ false -> {lists:duplicate(Arity, t_none()), t_unit()}
+ end,
+ NewDict = dict:store(Fun, {not_handled, FunEntry}, Dict),
+ init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt);
+init_fun_tab([], Dict, _TreeMap, _Callgraph, _Plt) ->
+ Dict.
+
+state__update_fun_env(Tree, Map, #state{envs = Envs} = State) ->
+ NewEnvs = dict:store(get_label(Tree), Map, Envs),
+ State#state{envs = NewEnvs}.
+
+state__fun_env(Tree, #state{envs = Envs}) ->
+ Fun = get_label(Tree),
+ case dict:find(Fun, Envs) of
+ error -> none;
+ {ok, Map} -> Map
+ end.
+
+state__clean_not_called(#state{fun_tab = FunTab} = State) ->
+ NewFunTab =
+ dict:map(fun(top, Entry) -> Entry;
+ (_Fun, {not_handled, {Args, _}}) -> {Args, t_none()};
+ (_Fun, Entry) -> Entry
+ end, FunTab),
+ State#state{fun_tab = NewFunTab}.
+
+state__all_fun_types(#state{fun_tab = FunTab}) ->
+ Tab1 = dict:erase(top, FunTab),
+ dict:map(fun(_Fun, {Args, Ret}) -> t_fun(Args, Ret)end, Tab1).
+
+state__fun_type(Fun, #state{fun_tab = FunTab}) ->
+ Label =
+ if is_integer(Fun) -> Fun;
+ true -> get_label(Fun)
+ end,
+ case dict:find(Label, FunTab) of
+ {ok, {not_handled, {A, R}}} ->
+ t_fun(A, R);
+ {ok, {A, R}} ->
+ t_fun(A, R)
+ end.
+
+state__update_fun_entry(Tree, ArgTypes, Out0,
+ #state{fun_tab=FunTab, callgraph=CG, plt=Plt} = State)->
+ Fun = get_label(Tree),
+ Out1 =
+ if Fun =:= top -> Out0;
+ true ->
+ case lookup_fun_sig(Fun, CG, Plt) of
+ {value, {SigRet, _}} -> t_inf(SigRet, Out0, opaque);
+ none -> Out0
+ end
+ end,
+ Out = t_limit(Out1, ?TYPE_LIMIT),
+ case dict:find(Fun, FunTab) of
+ {ok, {ArgTypes, OldOut}} ->
+ case t_is_equal(OldOut, Out) of
+ true ->
+ ?debug("Fixpoint for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_fun(ArgTypes, Out))]),
+ State;
+ false ->
+ NewEntry = {ArgTypes, Out},
+ ?debug("New Entry for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_fun(ArgTypes, Out))]),
+ NewFunTab = dict:store(Fun, NewEntry, FunTab),
+ State1 = State#state{fun_tab = NewFunTab},
+ state__add_work_from_fun(Tree, State1)
+ end;
+ {ok, {NewArgTypes, _OldOut}} ->
+ %% Can only happen in self-recursive functions. Only update the out type.
+ NewEntry = {NewArgTypes, Out},
+ ?debug("New Entry for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_fun(NewArgTypes, Out))]),
+ NewFunTab = dict:store(Fun, NewEntry, FunTab),
+ State1 = State#state{fun_tab = NewFunTab},
+ state__add_work_from_fun(Tree, State1)
+ end.
+
+state__add_work_from_fun(_Tree, #state{warning_mode = true} = State) ->
+ State;
+state__add_work_from_fun(Tree, #state{callgraph = Callgraph,
+ tree_map = TreeMap} = State) ->
+ case get_label(Tree) of
+ top -> State;
+ Label when is_integer(Label) ->
+ case dialyzer_callgraph:in_neighbours(Label, Callgraph) of
+ none -> State;
+ MFAList ->
+ LabelList = [dialyzer_callgraph:lookup_label(MFA, Callgraph)
+ || MFA <- MFAList],
+ %% Must filter the result for results in this module.
+ FilteredList = [L || {ok, L} <- LabelList, dict:is_key(L, TreeMap)],
+ ?debug("~w: Will try to add:~w\n",
+ [state__lookup_name(get_label(Tree), State), MFAList]),
+ lists:foldl(fun(L, AccState) ->
+ state__add_work(L, AccState)
+ end, State, FilteredList)
+ end
+ end.
+
+state__add_work(external, State) ->
+ State;
+state__add_work(top, State) ->
+ State;
+state__add_work(Fun, #state{work = Work} = State) ->
+ NewWork = add_work(Fun, Work),
+ State#state{work = NewWork}.
+
+state__get_work(#state{work = Work, tree_map = TreeMap} = State) ->
+ case get_work(Work) of
+ none -> none;
+ {Fun, NewWork} ->
+ {dict:fetch(Fun, TreeMap), State#state{work = NewWork}}
+ end.
+
+state__lookup_call_site(Tree, #state{callgraph = Callgraph}) ->
+ Label = get_label(Tree),
+ dialyzer_callgraph:lookup_call_site(Label, Callgraph).
+
+state__fun_info(external, #state{}) ->
+ external;
+state__fun_info({_, _, _} = MFA, #state{plt = PLT}) ->
+ {MFA,
+ dialyzer_plt:lookup(PLT, MFA),
+ dialyzer_plt:lookup_contract(PLT, MFA),
+ t_any()};
+state__fun_info(Fun, #state{callgraph = CG, fun_tab = FunTab, plt = PLT}) ->
+ {Sig, Contract} =
+ case dialyzer_callgraph:lookup_name(Fun, CG) of
+ error ->
+ {dialyzer_plt:lookup(PLT, Fun), none};
+ {ok, MFA} ->
+ {dialyzer_plt:lookup(PLT, MFA), dialyzer_plt:lookup_contract(PLT, MFA)}
+ end,
+ LocalRet =
+ case dict:fetch(Fun, FunTab) of
+ {not_handled, {_Args, Ret}} -> Ret;
+ {_Args, Ret} -> Ret
+ end,
+ {Fun, Sig, Contract, LocalRet}.
+
+state__find_apply_return(Tree, #state{callgraph = Callgraph} = State) ->
+ Apply = get_label(Tree),
+ case dialyzer_callgraph:lookup_call_site(Apply, Callgraph) of
+ error ->
+ unknown;
+ {ok, List} ->
+ case lists:member(external, List) of
+ true -> t_any();
+ false ->
+ FunTypes = [state__fun_type(F, State) || F <- List],
+ Returns = [t_fun_range(F) || F <- FunTypes],
+ t_sup(Returns)
+ end
+ end.
+
+forward_args(Fun, ArgTypes, #state{work = Work, fun_tab = FunTab} = State) ->
+ {OldArgTypes, OldOut, Fixpoint} =
+ case dict:find(Fun, FunTab) of
+ {ok, {not_handled, {OldArgTypes0, OldOut0}}} ->
+ {OldArgTypes0, OldOut0, false};
+ {ok, {OldArgTypes0, OldOut0}} ->
+ {OldArgTypes0, OldOut0,
+ t_is_subtype(t_product(ArgTypes), t_product(OldArgTypes0))}
+ end,
+ case Fixpoint of
+ true -> State;
+ false ->
+ NewArgTypes = [t_sup(X, Y) || {X, Y} <- lists:zip(ArgTypes, OldArgTypes)],
+ NewWork = add_work(Fun, Work),
+ ?debug("~w: forwarding args ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_product(NewArgTypes))]),
+ NewFunTab = dict:store(Fun, {NewArgTypes, OldOut}, FunTab),
+ State#state{work = NewWork, fun_tab = NewFunTab}
+ end.
+
+-spec state__cleanup(state()) -> state().
+
+state__cleanup(#state{callgraph = Callgraph,
+ races = Races,
+ records = Records}) ->
+ #state{callgraph = dialyzer_callgraph:cleanup(Callgraph),
+ races = dialyzer_races:cleanup(Races),
+ records = Records}.
+
+-spec state__get_callgraph(state()) -> dialyzer_callgraph:callgraph().
+
+state__get_callgraph(#state{callgraph = Callgraph}) ->
+ Callgraph.
+
+-spec state__get_races(state()) -> dialyzer_races:races().
+
+state__get_races(#state{races = Races}) ->
+ Races.
+
+-spec state__get_records(state()) -> dict().
+
+state__get_records(#state{records = Records}) ->
+ Records.
+
+-spec state__put_callgraph(dialyzer_callgraph:callgraph(), state()) ->
+ state().
+
+state__put_callgraph(Callgraph, State) ->
+ State#state{callgraph = Callgraph}.
+
+-spec state__put_races(dialyzer_races:races(), state()) -> state().
+
+state__put_races(Races, State) ->
+ State#state{races = Races}.
+
+-spec state__records_only(state()) -> state().
+
+state__records_only(#state{records = Records}) ->
+ #state{records = Records}.
+
+%%% ===========================================================================
+%%%
+%%% Races
+%%%
+%%% ===========================================================================
+
+renew_code(Fun, FunArgs, Code, WarningMode, Callgraph) ->
+ case WarningMode of
+ true -> Callgraph;
+ false ->
+ RaceCode = dialyzer_callgraph:get_race_code(Callgraph),
+ dialyzer_callgraph:put_race_code(
+ dict:store(Fun, [FunArgs, Code], RaceCode), Callgraph)
+ end.
+
+renew_public_tables([Var], Table, WarningMode, Callgraph) ->
+ case WarningMode of
+ true -> Callgraph;
+ false ->
+ case Table of
+ no_t -> Callgraph;
+ _Other ->
+ VarLabel = get_label(Var),
+ PTables = dialyzer_callgraph:get_public_tables(Callgraph),
+ dialyzer_callgraph:put_public_tables(
+ lists:usort([VarLabel|PTables]), Callgraph)
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Worklist
+%%%
+%%% ===========================================================================
+
+init_work(List) ->
+ {List, [], sets:from_list(List)}.
+
+get_work({[], [], _Set}) ->
+ none;
+get_work({[H|T], Rev, Set}) ->
+ {H, {T, Rev, sets:del_element(H, Set)}};
+get_work({[], Rev, Set}) ->
+ get_work({lists:reverse(Rev), [], Set}).
+
+add_work(New, {List, Rev, Set} = Work) ->
+ case sets:is_element(New, Set) of
+ true -> Work;
+ false -> {List, [New|Rev], sets:add_element(New, Set)}
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Utilities.
+%%%
+%%% ===========================================================================
+
+get_line([Line|_]) when is_integer(Line) -> Line;
+get_line([_|Tail]) -> get_line(Tail);
+get_line([]) -> -1.
+
+get_file([]) -> [];
+get_file([{file, File}|_]) -> File;
+get_file([_|Tail]) -> get_file(Tail).
+
+is_compiler_generated(Ann) ->
+ lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
+
+-spec format_args([term()], [erl_types:erl_type()], #state{}) ->
+ nonempty_string().
+
+format_args([], [], _State) ->
+ "()";
+format_args(ArgList, TypeList, State) ->
+ "(" ++ format_args_1(ArgList, TypeList, State) ++ ")".
+
+-spec format_args_1([term(),...], [erl_types:erl_type(),...], #state{}) ->
+ string().
+
+format_args_1([Arg], [Type], State) ->
+ format_arg(Arg) ++ format_type(Type, State);
+format_args_1([Arg|Args], [Type|Types], State) ->
+ String =
+ case cerl:is_literal(Arg) of
+ true -> format_cerl(Arg);
+ false -> format_arg(Arg) ++ format_type(Type, State)
+ end,
+ String ++ "," ++ format_args_1(Args, Types, State).
+
+format_arg(Arg) ->
+ Default = "",
+ case cerl:is_c_var(Arg) of
+ true ->
+ case cerl:var_name(Arg) of
+ Atom when is_atom(Atom) ->
+ case atom_to_list(Atom) of
+ "cor"++_ -> Default;
+ "rec"++_ -> Default;
+ Name -> Name ++ "::"
+ end;
+ _What -> Default
+ end;
+ false ->
+ Default
+ end.
+
+-spec format_type(erl_types:erl_type(), #state{}) -> string().
+
+format_type(Type, #state{records = R}) ->
+ t_to_string(Type, R).
+
+-spec format_sig_args(erl_types:erl_type(), #state{}) -> string().
+
+format_sig_args(Type, #state{records = R}) ->
+ SigArgs = t_fun_args(Type),
+ case SigArgs of
+ [] -> "()";
+ [SArg|SArgs] ->
+ lists:flatten("(" ++ t_to_string(SArg, R)
+ ++ ["," ++ t_to_string(T, R) || T <- SArgs] ++ ")")
+ end.
+
+format_cerl(Tree) ->
+ cerl_prettypr:format(cerl:set_ann(Tree, []),
+ [{hook, dialyzer_utils:pp_hook()},
+ {noann, true},
+ {paper, 100000}, %% These guys strip
+ {ribbon, 100000} %% newlines.
+ ]).
+
+format_patterns(Pats) ->
+ NewPats = map_pats(cerl:c_values(Pats)),
+ String = format_cerl(NewPats),
+ case Pats of
+ [PosVar] ->
+ case cerl:is_c_var(PosVar) andalso (cerl:var_name(PosVar) =/= '') of
+ true -> "variable "++String;
+ false -> "pattern "++String
+ end;
+ _ ->
+ "pattern "++String
+ end.
+
+map_pats(Pats) ->
+ Fun = fun(Tree) ->
+ case cerl:is_c_var(Tree) of
+ true ->
+ case cerl:var_name(Tree) of
+ Atom when is_atom(Atom) ->
+ case atom_to_list(Atom) of
+ "cor"++_ -> cerl:c_var('');
+ "rec"++_ -> cerl:c_var('');
+ _ -> cerl:set_ann(Tree, [])
+ end;
+ _What -> cerl:c_var('')
+ end;
+ false ->
+ cerl:set_ann(Tree, [])
+ end
+ end,
+ cerl_trees:map(Fun, Pats).
+
+classify_returns(Tree) ->
+ case find_terminals(cerl:fun_body(Tree)) of
+ {false, false} -> no_match;
+ {true, false} -> only_explicit;
+ {false, true} -> only_normal;
+ {true, true} -> both
+ end.
+
+find_terminals(Tree) ->
+ case cerl:type(Tree) of
+ apply -> {false, true};
+ binary -> {false, true};
+ bitstr -> {false, true};
+ call ->
+ M0 = cerl:call_module(Tree),
+ F0 = cerl:call_name(Tree),
+ A = length(cerl:call_args(Tree)),
+ case cerl:is_literal(M0) andalso cerl:is_literal(F0) of
+ false ->
+ %% We cannot make assumptions. Say that both are true.
+ {true, true};
+ true ->
+ M = cerl:concrete(M0),
+ F = cerl:concrete(F0),
+ case (erl_bif_types:is_known(M, F, A)
+ andalso t_is_none(erl_bif_types:type(M, F, A))) of
+ true -> {true, false};
+ false -> {false, true}
+ end
+ end;
+ 'case' -> find_terminals_list(cerl:case_clauses(Tree));
+ 'catch' -> find_terminals(cerl:catch_body(Tree));
+ clause -> find_terminals(cerl:clause_body(Tree));
+ cons -> {false, true};
+ 'fun' -> {false, true};
+ 'let' -> find_terminals(cerl:let_body(Tree));
+ letrec -> find_terminals(cerl:letrec_body(Tree));
+ literal -> {false, true};
+ primop -> {false, false}; %% match_fail, etc. are not explicit exits.
+ 'receive' ->
+ Timeout = cerl:receive_timeout(Tree),
+ Clauses = cerl:receive_clauses(Tree),
+ case (cerl:is_literal(Timeout) andalso
+ (cerl:concrete(Timeout) =:= infinity)) of
+ true ->
+ if Clauses =:= [] -> {false, true}; %% A never ending receive.
+ true -> find_terminals_list(Clauses)
+ end;
+ false -> find_terminals_list([cerl:receive_action(Tree)|Clauses])
+ end;
+ seq -> find_terminals(cerl:seq_body(Tree));
+ 'try' ->
+ find_terminals_list([cerl:try_handler(Tree), cerl:try_body(Tree)]);
+ tuple -> {false, true};
+ values -> {false, true};
+ var -> {false, true}
+ end.
+
+find_terminals_list(List) ->
+ find_terminals_list(List, false, false).
+
+find_terminals_list([Tree|Left], Explicit1, Normal1) ->
+ {Explicit2, Normal2} = find_terminals(Tree),
+ case {Explicit1 or Explicit2, Normal1 or Normal2} of
+ {true, true} = Ans -> Ans;
+ {NewExplicit, NewNormal} ->
+ find_terminals_list(Left, NewExplicit, NewNormal)
+ end;
+find_terminals_list([], Explicit, Normal) ->
+ {Explicit, Normal}.
+
+%%----------------------------------------------------------------------------
+
+%% If you write a record pattern in a matching that violates the
+%% definition it will never match. However, the warning is lost in the
+%% regular analysis. This after-pass catches it.
+
+find_mismatched_record_patterns(Tree, State) ->
+ cerl_trees:fold(
+ fun(SubTree, AccState) ->
+ case cerl:is_c_clause(SubTree) of
+ true -> lists:foldl(fun(P, AccState1) ->
+ find_rec_warnings(P, AccState1)
+ end, AccState, cerl:clause_pats(SubTree));
+ false -> AccState
+ end
+ end, State, Tree).
+
+find_rec_warnings(Tree, State) ->
+ cerl_trees:fold(
+ fun(SubTree, AccState) ->
+ case cerl:is_c_tuple(SubTree) of
+ true -> find_rec_warnings_tuple(SubTree, AccState);
+ false -> AccState
+ end
+ end, State, Tree).
+
+find_rec_warnings_tuple(Tree, State) ->
+ Elements = cerl:tuple_es(Tree),
+ {_, _, EsType} = traverse_list(Elements, map__new(), State),
+ TupleType = t_tuple(EsType),
+ case t_is_none(TupleType) of
+ true -> State;
+ false ->
+ %% Let's find out if this is a record construction.
+ case Elements of
+ [Tag|Left] ->
+ case cerl:is_c_atom(Tag) of
+ true ->
+ TagVal = cerl:atom_val(Tag),
+ case state__lookup_record(TagVal, length(Left), State) of
+ error -> State;
+ {ok, Prototype} ->
+ InfTupleType = t_inf(Prototype, TupleType),
+ case t_is_none(InfTupleType) of
+ true ->
+ Msg = {record_matching,
+ [format_patterns([Tree]), TagVal]},
+ state__add_warning(State, ?WARN_MATCHING, Tree, Msg);
+ false ->
+ State
+ end
+ end;
+ false ->
+ State
+ end;
+ _ ->
+ State
+ end
+ end.
+
+%%----------------------------------------------------------------------------
+
+-ifdef(DEBUG_PP).
+debug_pp(Tree, true) ->
+ io:put_chars(cerl_prettypr:format(Tree, [{hook, cerl_typean:pp_hook()}])),
+ io:nl(),
+ ok;
+debug_pp(Tree, false) ->
+ io:put_chars(cerl_prettypr:format(strip_annotations(Tree))),
+ io:nl(),
+ ok.
+
+strip_annotations(Tree) ->
+ Fun = fun(T) ->
+ case cerl:type(T) of
+ var ->
+ cerl:set_ann(T, [{label, cerl_trees:get_label(T)}]);
+ 'fun' ->
+ cerl:set_ann(T, [{label, cerl_trees:get_label(T)}]);
+ _ ->
+ cerl:set_ann(T, [])
+ end
+ end,
+ cerl_trees:map(Fun, Tree).
+
+-else.
+
+debug_pp(_Tree, _UseHook) ->
+ ok.
+-endif.
+
+%%----------------------------------------------------------------------------
+
+-spec to_dot(dialyzer_callgraph:callgraph()) -> 'ok'.
+
+-ifdef(DOT).
+to_dot(CG) ->
+ dialyzer_callgraph:to_dot(CG).
+-else.
+to_dot(_CG) ->
+ ok.
+-endif.
+
+%%----------------------------------------------------------------------------
diff --git a/lib/dialyzer/src/dialyzer_dep.erl b/lib/dialyzer/src/dialyzer_dep.erl
new file mode 100644
index 0000000000..670433f003
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_dep.erl
@@ -0,0 +1,580 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_dep.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%%
+%%% Description: A pretty limited but efficient escape/dependency
+%%% analysis of Core Erlang.
+%%%
+%%% Created : 28 Oct 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_dep).
+
+-export([analyze/1]).
+-define(NO_UNUSED, true).
+-ifndef(NO_UNUSED).
+-export([test/1]).
+-endif.
+
+-include("dialyzer.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% analyze(CoreTree) -> {Deps, Esc, Calls}.
+%%
+%% Deps = a dict mapping labels of functions to an ordset of functions
+%% it calls.
+%%
+%% Esc = an ordset of the labels of escaping functions. A function
+%% is considered to escape if the control escapes a function,
+%% i.e., this analysis is not module-local but rather
+%% function-local.
+%%
+%% Calls = a dict mapping apply:s to an ordset of function labels to
+%% which the operation can refer to. If 'external' is part of
+%% the set the operation can be externally defined.
+%%
+
+-spec analyze(cerl:c_module()) -> {dict(), ordset('external' | label()), dict()}.
+
+analyze(Tree) ->
+ %% io:format("Handling ~w\n", [cerl:atom_val(cerl:module_name(Tree))]),
+ {_, State} = traverse(Tree, map__new(), state__new(Tree), top),
+ Esc = state__esc(State),
+ %% Add dependency from 'external' to all escaping function
+ State1 = state__add_deps(external, output(Esc), State),
+ Deps = state__deps(State1),
+ Calls = state__calls(State1),
+ {map__finalize(Deps), set__to_ordsets(Esc), map__finalize(Calls)}.
+
+traverse(Tree, Out, State, CurrentFun) ->
+ %% io:format("Type: ~w\n", [cerl:type(Tree)]),
+ case cerl:type(Tree) of
+ apply ->
+ Op = cerl:apply_op(Tree),
+ Args = cerl:apply_args(Tree),
+ %% Op is always a variable and should not be marked as escaping
+ %% based on its use.
+ case var =:= cerl:type(Op) of
+ false -> erlang:error({apply_op_not_a_variable, cerl:type(Op)});
+ true -> ok
+ end,
+ OpFuns = case map__lookup(cerl_trees:get_label(Op), Out) of
+ none -> output(none);
+ {value, OF} -> OF
+ end,
+ {ArgFuns, State2} = traverse_list(Args, Out, State, CurrentFun),
+ State3 = state__add_esc(merge_outs(ArgFuns), State2),
+ State4 = state__add_deps(CurrentFun, OpFuns, State3),
+ State5 = state__store_callsite(cerl_trees:get_label(Tree),
+ OpFuns, length(Args), State4),
+ {output(set__singleton(external)), State5};
+ binary ->
+ {output(none), State};
+ 'case' ->
+ Arg = cerl:case_arg(Tree),
+ {Funs, NewState} = traverse(Arg, Out, State, CurrentFun),
+ Clauses = cerl:case_clauses(Tree),
+ traverse_clauses(Clauses, Funs, Out, NewState, CurrentFun);
+ call ->
+ Args = cerl:call_args(Tree),
+ {ArgFuns, State1} = traverse_list(Args, Out, State, CurrentFun),
+ remote_call(Tree, merge_outs(ArgFuns), State1);
+ 'catch' ->
+ traverse(cerl:catch_body(Tree), Out, State, CurrentFun);
+ cons ->
+ {HdFuns, State1} = traverse(cerl:cons_hd(Tree), Out, State, CurrentFun),
+ {TlFuns, State2} = traverse(cerl:cons_tl(Tree), Out, State1, CurrentFun),
+ {merge_outs([HdFuns, TlFuns]), State2};
+ 'fun' ->
+ %% io:format("Entering fun: ~w\n", [cerl_trees:get_label(Tree)]),
+ Body = cerl:fun_body(Tree),
+ Label = cerl_trees:get_label(Tree),
+ State1 =
+ if CurrentFun =:= top ->
+ state__add_deps(top, output(set__singleton(Label)), State);
+ true ->
+ O1 = output(set__singleton(CurrentFun)),
+ O2 = output(set__singleton(Label)),
+ TmpState = state__add_deps(Label, O1, State),
+ state__add_deps(CurrentFun, O2,TmpState)
+ end,
+ {BodyFuns, State2} = traverse(Body, Out, State1,
+ cerl_trees:get_label(Tree)),
+ {output(set__singleton(Label)), state__add_esc(BodyFuns, State2)};
+ 'let' ->
+ Vars = cerl:let_vars(Tree),
+ Arg = cerl:let_arg(Tree),
+ Body = cerl:let_body(Tree),
+ {ArgFuns, State1} = traverse(Arg, Out, State, CurrentFun),
+ Out1 = bind_list(Vars, ArgFuns, Out),
+ traverse(Body, Out1, State1, CurrentFun);
+ letrec ->
+ Defs = cerl:letrec_defs(Tree),
+ Body = cerl:letrec_body(Tree),
+ Out1 = bind_defs(Defs, Out),
+ State1 = traverse_defs(Defs, Out1, State, CurrentFun),
+ traverse(Body, Out1, State1, CurrentFun);
+ literal ->
+ {output(none), State};
+ module ->
+ Defs = cerl:module_defs(Tree),
+ Out1 = bind_defs(Defs, Out),
+ State1 = traverse_defs(Defs, Out1, State, CurrentFun),
+ {output(none), State1};
+ primop ->
+ Args = cerl:primop_args(Tree),
+ {ArgFuns, State1} = traverse_list(Args, Out, State, CurrentFun),
+ primop(Tree, merge_outs(ArgFuns), State1);
+ 'receive' ->
+ Clauses = cerl:receive_clauses(Tree),
+ TimeOut = cerl:receive_timeout(Tree),
+ Action = cerl:receive_action(Tree),
+ {ClauseFuns, State1} =
+ traverse_clauses(Clauses, output(none), Out, State, CurrentFun),
+ {_, State2} = traverse(TimeOut, Out, State1, CurrentFun),
+ {ActionFuns, State3} = traverse(Action, Out, State2, CurrentFun),
+ {merge_outs([ClauseFuns, ActionFuns]), State3};
+ seq ->
+ {_, State1} = traverse(cerl:seq_arg(Tree), Out, State, CurrentFun),
+ traverse(cerl:seq_body(Tree), Out, State1, CurrentFun);
+ 'try' ->
+ Arg = cerl:try_arg(Tree),
+ Body = cerl:try_body(Tree),
+ Vars = cerl:try_vars(Tree),
+ EVars = cerl:try_evars(Tree),
+ Handler = cerl:try_handler(Tree),
+ {ArgFuns, State1} = traverse(Arg, Out, State, CurrentFun),
+ Out1 = bind_list(Vars, ArgFuns, Out),
+ {BodyFuns, State2} = traverse(Body, Out1, State1, CurrentFun),
+ Out2 = bind_single(EVars, output(set__singleton(external)), Out),
+ {HandlerFuns, State3} = traverse(Handler, Out2, State2, CurrentFun),
+ {merge_outs([BodyFuns, HandlerFuns]), State3};
+ tuple ->
+ Args = cerl:tuple_es(Tree),
+ {List, State1} = traverse_list(Args, Out, State, CurrentFun),
+ {merge_outs(List), State1};
+ values ->
+ traverse_list(cerl:values_es(Tree), Out, State, CurrentFun);
+ var ->
+ case map__lookup(cerl_trees:get_label(Tree), Out) of
+ none -> {output(none), State};
+ {value, Val} ->
+ case is_only_external(Val) of
+ true ->
+ %% Do nothing
+ {Val, State};
+ false ->
+ %% If this is used in a function this means a dependency.
+ {Val, state__add_deps(CurrentFun, Val, State)}
+ end
+ end
+ end.
+
+traverse_list(Trees, Out, State, CurrentFun) ->
+ traverse_list(Trees, Out, State, CurrentFun, []).
+
+traverse_list([Tree|Left], Out, State, CurrentFun, Acc) ->
+ {X, State1} = traverse(Tree, Out, State, CurrentFun),
+ traverse_list(Left, Out, State1, CurrentFun, [X|Acc]);
+traverse_list([], _Out, State, _CurrentFun, Acc) ->
+ {output(lists:reverse(Acc)), State}.
+
+traverse_defs([{_, Fun}|Left], Out, State, CurrentFun) ->
+ {_, State1} = traverse(Fun, Out, State, CurrentFun),
+ traverse_defs(Left, Out, State1, CurrentFun);
+traverse_defs([], _Out, State, _CurrentFun) ->
+ State.
+
+traverse_clauses(Clauses, ArgFuns, Out, State, CurrentFun) ->
+ case filter_match_fail(Clauses) of
+ [] ->
+ %% Can happen for example with receives used as timouts.
+ {output(none), State};
+ Clauses1 ->
+ traverse_clauses(Clauses1, ArgFuns, Out, State, CurrentFun, [])
+ end.
+
+traverse_clauses([Clause|Left], ArgFuns, Out, State, CurrentFun, Acc) ->
+ Pats = cerl:clause_pats(Clause),
+ Guard = cerl:clause_guard(Clause),
+ Body = cerl:clause_body(Clause),
+ Out1 = bind_pats_list(Pats, ArgFuns, Out),
+ {_, State2} = traverse(Guard, Out1, State, CurrentFun),
+ {BodyFuns, State3} = traverse(Body, Out1, State2, CurrentFun),
+ traverse_clauses(Left, ArgFuns, Out, State3, CurrentFun, [BodyFuns|Acc]);
+traverse_clauses([], _ArgFuns, _Out, State, _CurrentFun, Acc) ->
+ {merge_outs(Acc), State}.
+
+filter_match_fail([Clause]) ->
+ Body = cerl:clause_body(Clause),
+ case cerl:type(Body) of
+ primop ->
+ case cerl:atom_val(cerl:primop_name(Body)) of
+ match_fail -> [];
+ raise -> [];
+ _ -> [Clause]
+ end;
+ _ -> [Clause]
+ end;
+filter_match_fail([H|T]) ->
+ [H|filter_match_fail(T)];
+filter_match_fail([]) ->
+ %% This can actually happen, for example in
+ %% receive after 1 -> ok end
+ [].
+
+remote_call(Tree, ArgFuns, State) ->
+ M = cerl:call_module(Tree),
+ F = cerl:call_name(Tree),
+ A = length(cerl:call_args(Tree)),
+ case cerl:is_c_atom(M) andalso cerl:is_c_atom(F) of
+ false ->
+ %% Unknown function.
+ {output(set__singleton(external)), state__add_esc(ArgFuns, State)};
+ true ->
+ M1 = cerl:atom_val(M),
+ F1 = cerl:atom_val(F),
+ Literal = cerl_closurean:is_literal_op(M1, F1, A),
+ case erl_bifs:is_pure(M1, F1, A) of
+ true ->
+ case Literal of
+ true ->
+ {output(none), State};
+ false ->
+ {output(set__singleton(external)), state__add_esc(ArgFuns, State)}
+ end;
+ false ->
+ State1 = case cerl_closurean:is_escape_op(M1, F1, A) of
+ true -> state__add_esc(ArgFuns, State);
+ false -> State
+ end,
+ case Literal of
+ true -> {output(none), State1};
+ false -> {add_external(ArgFuns), State1}
+ end
+ end
+ end.
+
+primop(Tree, ArgFuns, State) ->
+ F = cerl:atom_val(cerl:primop_name(Tree)),
+ A = length(cerl:primop_args(Tree)),
+ State1 = case cerl_closurean:is_escape_op(F, A) of
+ true -> state__add_esc(ArgFuns, State);
+ false -> State
+ end,
+ case cerl_closurean:is_literal_op(F, A) of
+ true -> {output(none), State1};
+ false -> {ArgFuns, State1}
+ end.
+
+%%------------------------------------------------------------
+%% Set
+%%
+
+-record(set, {set :: set()}).
+
+set__singleton(Val) ->
+ #set{set = sets:add_element(Val, sets:new())}.
+
+set__from_list(List) ->
+ #set{set = sets:from_list(List)}.
+
+set__is_element(_El, none) ->
+ false;
+set__is_element(El, #set{set = Set}) ->
+ sets:is_element(El, Set).
+
+set__union(none, Set) -> Set;
+set__union(Set, none) -> Set;
+set__union(#set{set = S1}, #set{set = S2}) -> #set{set = sets:union(S1, S2)}.
+
+set__to_ordsets(none) -> [];
+set__to_ordsets(#set{set = Set}) -> ordsets:from_list(sets:to_list(Set)).
+
+set__size(none) -> 0;
+set__size(#set{set = Set}) -> sets:size(Set).
+
+set__filter(#set{set = Set}, Fun) ->
+ NewSet = sets:filter(Fun, Set),
+ case sets:size(NewSet) =:= 0 of
+ true -> none;
+ false -> #set{set = NewSet}
+ end.
+
+%%------------------------------------------------------------
+%% Outputs
+%%
+
+-record(output, {type :: 'single' | 'list',
+ content :: 'none' | #set{} | [{output,_,_}]}).
+
+output(none) -> #output{type = single, content = none};
+output(S = #set{}) -> #output{type = single, content = S};
+output(List) when is_list(List) -> #output{type = list, content = List}.
+
+merge_outs([H|T]) ->
+ merge_outs(T, H);
+merge_outs(#output{type = list, content = [H|T]}) ->
+ merge_outs(T, H);
+merge_outs(#output{type = list, content = []}) ->
+ output(none).
+
+merge_outs([#output{content = none}|Left], O) ->
+ merge_outs(Left, O);
+merge_outs([O|Left], #output{content = none}) ->
+ merge_outs(Left, O);
+merge_outs([#output{type = single, content = S1}|Left],
+ #output{type = single, content = S2}) ->
+ merge_outs(Left, output(set__union(S1, S2)));
+merge_outs([#output{type = list, content = L1}|Left],
+ #output{type = list, content = L2}) ->
+ NewList = [merge_outs([X, Y]) || {X, Y} <- lists:zip(L1, L2)],
+ merge_outs(Left, output(NewList));
+merge_outs([], Res) ->
+ Res.
+
+filter_outs(#output{type = single, content = S}, Fun) ->
+ output(set__filter(S, Fun)).
+
+add_external(#output{type = single, content = Set}) ->
+ output(set__union(Set, set__singleton(external)));
+add_external(#output{type = list, content = List}) ->
+ output([add_external(O) || O <- List]).
+
+is_only_external(#output{type = single, content = Set}) ->
+ set__is_element(external, Set) andalso (set__size(Set) =:= 1).
+
+%%------------------------------------------------------------
+%% Map
+%%
+
+map__new() ->
+ dict:new().
+
+map__add(_Label, none, Map) ->
+ Map;
+map__add(Label, Set, Map) ->
+ case map__lookup(Label, Map) of
+ {value, OldSet} ->
+ NewSet = set__union(OldSet, Set),
+ map__store(Label, NewSet, Map);
+ none ->
+ map__store(Label, Set, Map)
+ end.
+
+map__store(Label, Val, Map) ->
+ dict:store(Label, Val, Map).
+
+map__lookup(Label, Map) ->
+ case dict:find(Label, Map) of
+ {ok, Val} -> {value, Val};
+ error -> none
+ end.
+
+map__finalize(Map) ->
+ dict:map(fun (_Key, #set{} = Set) -> set__to_ordsets(Set);
+ (_Key, #output{type = single, content = Set}) ->
+ set__to_ordsets(Set)
+ end, Map).
+
+%%------------------------------------------------------------
+%% Binding outs in the map
+%%
+
+bind_pats_list(_Pats, #output{content = none}, Map) ->
+ Map;
+bind_pats_list([Pat], #output{type = single} = O, Map) ->
+ bind_single(all_vars(Pat), O, Map);
+bind_pats_list(Pats, #output{type = list, content = List}, Map) ->
+ bind_pats_list(Pats, List, Map);
+bind_pats_list([Pat|PatLeft],
+ [#output{type = single} = O|SetLeft], Map)->
+ Map1 = bind_single(all_vars(Pat), O, Map),
+ bind_pats_list(PatLeft, SetLeft, Map1);
+bind_pats_list([Pat|PatLeft],
+ [#output{type = list, content = List}|SetLeft], Map) ->
+ Map1 = case cerl:is_c_values(Pat) of
+ true -> bind_pats_list(cerl:values_es(Pat), List, Map);
+ false -> bind_single(all_vars(Pat), merge_outs(List), Map)
+ end,
+ bind_pats_list(PatLeft, SetLeft, Map1);
+bind_pats_list([], [], Map) ->
+ Map.
+
+bind_single([Var|Left], O, Map) ->
+ bind_single(Left, O, map__store(cerl_trees:get_label(Var), O, Map));
+bind_single([], _O, Map) ->
+ Map.
+
+bind_list(List, #output{type = single} = O, Map) ->
+ bind_single(List, O, Map);
+bind_list(List1, #output{type = list, content = List2}, Map) ->
+ bind_list1(List1, List2, Map).
+
+bind_list1([Var|VarLeft], [O|OLeft], Map) ->
+ bind_list1(VarLeft, OLeft, map__store(cerl_trees:get_label(Var), O, Map));
+bind_list1([], [], Map) ->
+ Map.
+
+bind_defs([{Var, Fun}|Left], Map) ->
+ O = output(set__singleton(cerl_trees:get_label(Fun))),
+ Map1 = map__store(cerl_trees:get_label(Var), O, Map),
+ bind_defs(Left, Map1);
+bind_defs([], Map) ->
+ Map.
+
+all_vars(Tree) ->
+ all_vars(Tree, []).
+
+all_vars(Tree, AccIn) ->
+ cerl_trees:fold(fun(SubTree, Acc) ->
+ case cerl:is_c_var(SubTree) of
+ true -> [SubTree|Acc];
+ false -> Acc
+ end
+ end, AccIn, Tree).
+
+%%------------------------------------------------------------
+%% The state
+%%
+
+-type local_set() :: 'none' | #set{}.
+
+-record(state, {deps :: dict(),
+ esc :: local_set(),
+ call :: dict(),
+ arities :: dict()}).
+
+state__new(Tree) ->
+ Exports = set__from_list([X || X <- cerl:module_exports(Tree)]),
+ InitEsc = set__from_list([cerl_trees:get_label(Fun)
+ || {Var, Fun} <- cerl:module_defs(Tree),
+ set__is_element(Var, Exports)]),
+ Arities = cerl_trees:fold(fun find_arities/2, dict:new(), Tree),
+ #state{deps = map__new(), esc = InitEsc, call = map__new(), arities = Arities}.
+
+find_arities(Tree, AccMap) ->
+ case cerl:is_c_fun(Tree) of
+ true ->
+ Label = cerl_trees:get_label(Tree),
+ Arity = cerl:fun_arity(Tree),
+ dict:store(Label, Arity, AccMap);
+ false ->
+ AccMap
+ end.
+
+state__add_deps(_From, #output{content = none}, State) ->
+ State;
+state__add_deps(From, #output{type = single, content=To},
+ #state{deps = Map} = State) ->
+ %% io:format("Adding deps from ~w to ~w\n", [From, set__to_ordsets(To)]),
+ State#state{deps = map__add(From, To, Map)}.
+
+state__deps(#state{deps = Deps}) ->
+ Deps.
+
+state__add_esc(#output{content = none}, State) ->
+ State;
+state__add_esc(#output{type = single, content = Set},
+ #state{esc = Esc} = State) ->
+ State#state{esc = set__union(Set, Esc)}.
+
+state__esc(#state{esc = Esc}) ->
+ Esc.
+
+state__store_callsite(_From, #output{content = none}, _CallArity, State) ->
+ State;
+state__store_callsite(From, To, CallArity,
+ #state{call = Calls, arities = Arities} = State) ->
+ Filter = fun(external) -> true;
+ (Fun) -> CallArity =:= dict:fetch(Fun, Arities)
+ end,
+ case filter_outs(To, Filter) of
+ #output{content = none} -> State;
+ To1 -> State#state{call = map__store(From, To1, Calls)}
+ end.
+
+state__calls(#state{call = Calls}) ->
+ Calls.
+
+%%------------------------------------------------------------
+%% A test function. Not part of the intended interface.
+%%
+
+-ifndef(NO_UNUSED).
+
+test(Mod) ->
+ {ok, _, Code} = compile:file(Mod, [to_core, binary]),
+ Tree = cerl:from_records(Code),
+ {LabeledTree, _} = cerl_trees:label(Tree),
+ {Deps, Esc, Calls} = analyze(LabeledTree),
+ Edges0 = dict:fold(fun(Caller, Set, Acc) ->
+ [[{Caller, Callee} || Callee <- Set]|Acc]
+ end, [], Deps),
+ Edges1 = lists:flatten(Edges0),
+ Edges = [Edge || {X,_Y} = Edge <- Edges1, X =/= top],
+ Fun = fun(SubTree, Acc) ->
+ case cerl:type(SubTree) of
+ 'fun' ->
+ case lists:keyfind(id, 1, cerl:get_ann(SubTree)) of
+ false -> Acc;
+ {id, ID} ->
+ dict:store(cerl_trees:get_label(SubTree), ID, Acc)
+ end;
+ module ->
+ Defs = cerl:module_defs(SubTree),
+ lists:foldl(fun({Var, Fun}, Acc1) ->
+ dict:store(cerl_trees:get_label(Fun),
+ {cerl:fname_id(Var),
+ cerl:fname_arity(Var)},
+ Acc1)
+ end, Acc, Defs);
+ letrec ->
+ Defs = cerl:letrec_defs(SubTree),
+ lists:foldl(fun({Var, Fun}, Acc1) ->
+ dict:store(cerl_trees:get_label(Fun),
+ {cerl:fname_id(Var),
+ cerl:fname_arity(Var)},
+ Acc1)
+ end, Acc, Defs);
+ _ -> Acc
+ end
+ end,
+ NameMap1 = cerl_trees:fold(Fun, dict:new(), LabeledTree),
+ NameMap = dict:store(external, external, NameMap1),
+ NamedEdges = [{dict:fetch(X, NameMap), dict:fetch(Y, NameMap)}
+ || {X, Y} <- Edges],
+ NamedEsc = [dict:fetch(X, NameMap) || X <- Esc],
+ %% Color the edges
+ ColorEsc = [{X, {color, red}} || X <- NamedEsc],
+ CallEdges0 = dict:fold(fun(Caller, Set, Acc) ->
+ [[{Caller, Callee} || Callee <- Set]|Acc]
+ end, [], Calls),
+ CallEdges = lists:flatten(CallEdges0),
+ NamedCallEdges = [{X, dict:fetch(Y, NameMap)} || {X, Y} <- CallEdges],
+ AllNamedEdges = NamedEdges ++ NamedCallEdges,
+ hipe_dot:translate_list(AllNamedEdges, "/tmp/cg.dot", "CG", ColorEsc),
+ os:cmd("dot -T ps -o /tmp/cg.ps /tmp/cg.dot"),
+ ok.
+
+-endif.
diff --git a/lib/dialyzer/src/dialyzer_explanation.erl b/lib/dialyzer/src/dialyzer_explanation.erl
new file mode 100644
index 0000000000..afc2c1965f
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_explanation.erl
@@ -0,0 +1,52 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_explanation.erl
+%%% Author : Elli Fragkaki <[email protected]>
+%%% Description :
+%%%-------------------------------------------------------------------
+
+-module(dialyzer_explanation).
+
+-export([expl_loop/3]).
+
+-include("dialyzer.hrl").
+
+-spec expl_loop(pid(), dialyzer_codeserver:codeserver(), dialyzer_plt:plt()) ->
+ no_return().
+
+expl_loop(Parent, CServer, Plt) ->
+ receive
+ {Parent, warning, _Warning} ->
+ send_explanation(Parent, none),
+ expl_loop(Parent, CServer, Plt);
+ {Parent, further, _Explanation} ->
+ Parent ! {self(), further, none},
+ expl_loop(Parent, CServer, Plt);
+ Other ->
+ io:format("Unknown message: ~p\n", [Other]),
+ expl_loop(Parent, CServer, Plt)
+ end.
+
+send_explanation(Parent, Expl) ->
+ Parent ! {self(), explanation, Expl},
+ ok.
+
diff --git a/lib/dialyzer/src/dialyzer_gui.erl b/lib/dialyzer/src/dialyzer_gui.erl
new file mode 100644
index 0000000000..f353638cdf
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_gui.erl
@@ -0,0 +1,1349 @@
+%% -*- erlang-indent-level: 2 -*-
+%%------------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-----------------------------------------------------------------------
+%%% File : dialyzer_gui.erl
+%%% Authors : Tobias Lindahl <[email protected]>
+%%% Kostis Sagonas <[email protected]>
+%%% Description : The graphical user interface for the Dialyzer tool.
+%%%
+%%% Created : 27 Apr 2004 by Tobias Lindahl <[email protected]>
+%%%-----------------------------------------------------------------------
+
+-module(dialyzer_gui).
+
+-export([start/1]).
+
+-include("dialyzer.hrl").
+
+%%------------------------------------------------------------------------
+
+-define(DIALYZER_ERROR_TITLE, "Dialyzer Error").
+-define(DIALYZER_MESSAGE_TITLE, "Dialyzer Message").
+
+%%------------------------------------------------------------------------
+
+-type gs_object() :: any(). %% XXX: should be imported from gs
+
+-record(mode, {start_byte_code :: gs_object(),
+ start_src_code :: gs_object()}).
+
+-record(menu, {file_save_log :: gs_object(),
+ file_save_warn :: gs_object(),
+ file_quit :: gs_object(),
+ help_about :: gs_object(),
+ help_manual :: gs_object(),
+ help_warnings :: gs_object(),
+ opts_macros :: gs_object(),
+ opts_includes :: gs_object(),
+ plt_empty :: gs_object(),
+ plt_search_doc :: gs_object(),
+ plt_show_doc :: gs_object(),
+ warnings :: gs_object()}).
+
+-record(gui_state, {add_all :: gs_object(),
+ add_file :: gs_object(),
+ add_rec :: gs_object(),
+ chosen_box :: gs_object(),
+ analysis_pid :: pid(),
+ del_file :: gs_object(),
+ doc_plt :: dialyzer_plt:plt(),
+ clear_chosen :: gs_object(),
+ clear_log :: gs_object(),
+ clear_warn :: gs_object(),
+ init_plt :: dialyzer_plt:plt(),
+ dir_entry :: gs_object(),
+ file_box :: gs_object(),
+ file_wd :: gs_object(),
+ gs :: gs_object(),
+ log :: gs_object(),
+ menu :: #menu{},
+ mode :: #mode{},
+ options :: #options{},
+ packer :: gs_object(),
+ run :: gs_object(),
+ stop :: gs_object(),
+ top :: gs_object(),
+ warnings_box :: gs_object(),
+ backend_pid :: pid()}).
+
+%%------------------------------------------------------------------------
+
+-spec start(#options{}) -> ?RET_NOTHING_SUSPICIOUS.
+
+start(DialyzerOptions = #options{from = From, init_plt = InitPltFile,
+ legal_warnings = LegalWarnings}) ->
+ process_flag(trap_exit, true),
+
+ GS = gs:start(),
+ code:add_pathsa(["."]),
+ WH = [{width, 1000}, {height, 550}],
+ EmptySpace = {stretch, 1},
+
+ {ok, Host} = inet:gethostname(),
+ %% --------- Top Window --------------
+ TopWin = gs:window(GS, [{title, "Dialyzer " ++ ?VSN ++ " @ " ++ Host},
+ {configure, true},
+ {default, listbox, {bg, white}},
+ {default, editor, {bg, white}},
+ {default, entry, {bg, white}},
+ {default, button, {font, {helvetica, bold, 12}}},
+ {default, label, {font, {helvetica, bold, 12}}}
+ |WH]),
+ Packer = gs:frame(TopWin, [{packer_x, [{stretch, 3},{fixed, 200},
+ {stretch, 7}]},
+ {packer_y, [{fixed, 25}, {fixed, 20},
+ {stretch, 1, 50},
+ {fixed, 25}, {fixed, 20},
+ {stretch, 1, 50},
+ {fixed, 25}]}]),
+
+ %% --------- Chosen box --------------
+ gs:label(Packer, [{label, {text, "Directories or modules to analyze"}},
+ {height, 20}, {pack_xy, {1, 2}}]),
+ ChosenBox = gs:listbox(Packer, [{pack_xy, {1, 3}}, {vscroll, right},
+ {selectmode, multiple}]),
+
+ %% --------- File box --------------
+ gs:label(Packer, [{label, {text, "File"}}, {height, 20}, {pack_xy, {1,5}}]),
+ FilePacker = gs:frame(Packer, [{packer_x, [{fixed, 30}, {stretch, 1, 100}]},
+ {packer_y, [{fixed, 25}, {stretch, 1, 25}]},
+ {pack_xy, {1, 6}}]),
+ gs:label(FilePacker, [{label, {text, "Dir:"}}, {pack_xy, {1, 1}}]),
+ DirEntry = gs:entry(FilePacker, [{height, 30}, {pack_xy, {2, 1}},
+ {keypress, true}]),
+ File = gs:listbox(FilePacker, [{pack_x, {1,2}}, {pack_y, 2},
+ {selectmode, multiple}, {doubleclick, true},
+ {vscroll, right}]),
+
+ %% --------- Options --------------
+ gs:label(Packer, [{label, {text, "Analysis Options"}},
+ {height, 20}, {pack_xy, {2, 2}}]),
+ ModePacker = gs:frame(Packer, [{packer_x, [{fixed, 75}, {fixed, 120}]},
+ {packer_y, [{fixed, 20}, {fixed, 20},
+ {fixed, 20},
+ %% EmptySpace,
+ {fixed, 20}, {fixed, 20},
+ {fixed, 20}, EmptySpace]},
+ {bw, 10}, {relief, flat},
+ {default, {radiobutton, {align, w}}},
+ {default, {label, {align, w}}},
+ {pack_xy, {2, 3}}]),
+
+ %% Bytecode vs. Source code
+ gs:label(ModePacker, [{label, {text, "File Type:"}},
+ {height, 20}, {pack_xy, {1,1}}]),
+ {ByteSel, SrcSel} = case From of
+ byte_code -> {[{select, true}], []};
+ src_code -> {[], [{select, true}]}
+ end,
+ ModeByteCode = gs:radiobutton(ModePacker,
+ ByteSel ++ [{group, start_from},
+ {label, {text,"BeamFiles"}},
+ {pack_xy, {2,1}}]),
+ ModeSrcCode = gs:radiobutton(ModePacker,
+ SrcSel ++ [{group, start_from},
+ {label, {text,"SourceFiles"}},
+ {pack_xy, {2,2}}]),
+ Mode = #mode{start_byte_code = ModeByteCode,
+ start_src_code = ModeSrcCode},
+
+ %% --------- Log box --------------
+ gs:label(Packer, [{label, {text, "Log"}}, {height, 20}, {pack_xy, {3,2}}]),
+ Log = gs:editor(Packer, [{pack_x, 3}, {pack_y, 3}, {enable, false},
+ {font, {courier, 12}}, {vscroll, right},
+ {wrap, word}]),
+
+ %% --------- Warnings box --------------
+ gs:label(Packer, [{label, {text, "Warnings"}},{height, 20},{pack_xy, {3,5}}]),
+ WarningsBox = gs:editor(Packer, [{pack_x, {2,3}}, {pack_y, 6},
+ {enable, false},
+ {font, {courier, 12}}, {vscroll, right},
+ {wrap, word}]),
+
+ %% --------- Buttons --------------
+ ButtonPackerHighLeft =
+ gs:frame(Packer, [{packer_x, [{fixed, 50}, {fixed, 65}, EmptySpace]},
+ {pack_xy, {1,4}}]),
+ ButtonPackerHighRight =
+ gs:frame(Packer, [{packer_x, [{fixed, 70}, {fixed, 70}, EmptySpace]},
+ {pack_xy, {3,4}}]),
+ ButtonPackerLowLeft =
+ gs:frame(Packer, [{packer_x, [{fixed, 50},
+ {fixed, 60},
+ {fixed, 110},
+ EmptySpace]},
+ {pack_xy, {1,7}}]),
+ ButtonPackerLowRight =
+ gs:frame(Packer, [{packer_x, [{fixed, 100},
+ {fixed, 70},
+ EmptySpace,
+ {fixed, 70},
+ {fixed, 70}]},
+ {pack_x, {2,3}}, {pack_y, 7}]),
+
+ WHButton = [{width, 60}, {height, 20}],
+ AddFile = gs:button(ButtonPackerLowLeft, [{pack_xy, {1, 1}},
+ {label, {text,"Add"}}|WHButton]),
+ AddAll = gs:button(ButtonPackerLowLeft, [{pack_xy, {2, 1}},
+ {label, {text,"Add All"}}|WHButton]),
+ AddRec = gs:button(ButtonPackerLowLeft, [{pack_xy, {3, 1}},
+ {label, {text,"Add Recursively"}}
+ |WHButton]),
+ DelFile = gs:button(ButtonPackerHighLeft, [{pack_xy, {1, 1}},
+ {label, {text,"Delete"}}|WHButton]),
+ ClearChosen = gs:button(ButtonPackerHighLeft, [{pack_xy, {2, 1}},
+ {label, {text,"Delete All"}}
+ |WHButton]),
+ ClearLog = gs:button(ButtonPackerHighRight, [{pack_xy, {1, 1}},
+ {label, {text,"Clear Log"}}
+ |WHButton]),
+ ClearWarn = gs:button(ButtonPackerLowRight, [{pack_xy, {1, 1}},
+ {label, {text,"Clear Warnings"}}
+ |WHButton]),
+
+ Run = gs:button(ButtonPackerLowRight, [{pack_xy, {4, 1}},
+ {label, {text,"Run"}}|WHButton]),
+ Stop = gs:button(ButtonPackerLowRight, [{pack_xy, {5, 1}}, {enable, false},
+ {label, {text,"Stop"}}|WHButton]),
+
+ %% --------- Menu --------------
+ MenuBar = gs:menubar(TopWin, []),
+
+ %% File Menu
+ MenuBarFile = gs:menubutton(MenuBar, [{label, {text, "File"}}]),
+ MenuFile = gs:menu(MenuBarFile, []),
+ MenuFileSaveWarn = gs:menuitem(MenuFile, [{label, {text, "Save Warnings"}}]),
+ MenuFileSaveLog = gs:menuitem(MenuFile, [{label, {text, "Save Log"}}]),
+ MenuFileQuit = gs:menuitem(MenuFile, [{label, {text, "Quit"}}]),
+
+ %% Warnings Menu
+ MenuBarWarn = gs:menubutton(MenuBar, [{label, {text, "Warnings"}}]),
+ MenuWarn = gs:menu(MenuBarWarn, []),
+ MenuWarnMatch = gs:menuitem(MenuWarn, [{label, {text, "Match failures"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnFailingCall = gs:menuitem(MenuWarn,
+ [{label, {text, "Failing function calls"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnFunApp = gs:menuitem(MenuWarn, [{label,
+ {text, "Bad fun applications"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnOpaque = gs:menuitem(MenuWarn, [{label,
+ {text, "Opaqueness violations"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnLists = gs:menuitem(MenuWarn,
+ [{label, {text, "Improper list constructions"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnNotCalled = gs:menuitem(MenuWarn,
+ [{label, {text, "Unused functions"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnReturnOnlyExit = gs:menuitem(MenuWarn,
+ [{label,
+ {text, "Error handling functions"}},
+ {itemtype, check}, {select, false}]),
+ MenuWarnReturnNoReturn = gs:menuitem(MenuWarn,
+ [{label,
+ {text, "Functions of no return"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnCallNonExported = gs:menuitem(MenuWarn,
+ [{label,
+ {text, "Call to unexported function"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnRaceCondition = gs:menuitem(MenuWarn,
+ [{label,
+ {text,"Possible race conditions"}},
+ {itemtype, check}, {select, false}]),
+ MenuWarnContractTypes = gs:menuitem(MenuWarn,
+ [{label, {text, "Wrong contracts"}},
+ {itemtype, check}, {select, true}]),
+ MenuWarnContractSyntax = gs:menuitem(MenuWarn,
+ [{label,
+ {text, "Wrong contract syntax"}},
+ {itemtype, check}, {select, true}]),
+
+ %% PLT Menu
+ MenuBarPLT = gs:menubutton(MenuBar, [{label, {text,"PLT"}}]),
+ MenuPLT = gs:menu(MenuBarPLT, []),
+ MenuPLTEmpty = gs:menuitem(MenuPLT, [{label, {text, "Init with empty PLT"}},
+ {itemtype, check}, {select, false}]),
+ MenuPLTShow = gs:menuitem(MenuPLT, [{label, {text, "Show contents"}}]),
+ MenuPLTSearch = gs:menuitem(MenuPLT, [{label, {text, "Search contents"}}]),
+
+ %% Options Menu
+ MenuBarOpts = gs:menubutton(MenuBar, [{label,{text,"Options"}}]),
+ MenuOpts = gs:menu(MenuBarOpts, []),
+ MenuOptsMacros = gs:menuitem(MenuOpts,
+ [{label, {text, "Manage Macro Definitions"}}]),
+ MenuOptsIncludes = gs:menuitem(MenuOpts,
+ [{label, {text, "Manage Include Directories"}}]),
+
+ %% Help
+ MenuBarHelp = gs:menubutton(MenuBar, [{label, {text, "Help"}}, {side, right}]),
+ MenuHelp = gs:menu(MenuBarHelp, []),
+ MenuHelpManual = gs:menuitem(MenuHelp, [{label, {text, "Manual"}}]),
+ MenuHelpWarnings = gs:menuitem(MenuHelp, [{label, {text, "Warning Options"}}]),
+ MenuHelpAbout = gs:menuitem(MenuHelp, [{label, {text, "About"}}]),
+
+ Warnings = [{?WARN_RETURN_NO_RETURN, MenuWarnReturnNoReturn},
+ {?WARN_RETURN_ONLY_EXIT, MenuWarnReturnOnlyExit},
+ {?WARN_NOT_CALLED, MenuWarnNotCalled},
+ {?WARN_NON_PROPER_LIST, MenuWarnLists},
+ {?WARN_FUN_APP, MenuWarnFunApp},
+ {?WARN_MATCHING, MenuWarnMatch},
+ {?WARN_OPAQUE, MenuWarnOpaque},
+ {?WARN_FAILING_CALL, MenuWarnFailingCall},
+ {?WARN_CALLGRAPH, MenuWarnCallNonExported},
+ {?WARN_RACE_CONDITION, MenuWarnRaceCondition},
+ %% For contracts.
+ {?WARN_CONTRACT_TYPES, MenuWarnContractTypes},
+ {?WARN_CONTRACT_SYNTAX, MenuWarnContractSyntax}
+ ],
+
+ init_warnings(Warnings, LegalWarnings),
+
+ Menu = #menu{file_quit = MenuFileQuit,
+ plt_empty = MenuPLTEmpty,
+ help_manual = MenuHelpManual,
+ help_about = MenuHelpAbout,
+ help_warnings = MenuHelpWarnings,
+ opts_macros = MenuOptsMacros,
+ opts_includes = MenuOptsIncludes,
+ plt_search_doc = MenuPLTSearch,
+ plt_show_doc = MenuPLTShow,
+ file_save_log = MenuFileSaveLog,
+ file_save_warn = MenuFileSaveWarn,
+ warnings = Warnings},
+
+ %% --------- Init --------------
+ gs:config(TopWin, [{map, true}]),
+ gs:config(Packer, WH),
+ {ok, CWD} = file:get_cwd(),
+
+ InitPlt = try dialyzer_plt:from_file(InitPltFile)
+ catch throw:{dialyzer_error, _} -> dialyzer_plt:new()
+ end,
+
+ State = #gui_state{add_all = AddAll,
+ add_file = AddFile,
+ add_rec = AddRec,
+ chosen_box = ChosenBox,
+ clear_chosen = ClearChosen,
+ clear_log = ClearLog,
+ clear_warn = ClearWarn,
+ del_file = DelFile,
+ doc_plt = dialyzer_plt:new(),
+ dir_entry = DirEntry,
+ file_box = File,
+ file_wd = CWD,
+ gs = GS,
+ init_plt = InitPlt,
+ log = Log,
+ menu = Menu,
+ mode = Mode,
+ options = DialyzerOptions,
+ packer = Packer,
+ run = Run,
+ stop = Stop,
+ top = TopWin,
+ warnings_box = WarningsBox},
+ NewState = change_dir_or_add_file(State, "."),
+ gui_loop(NewState).
+
+%% ----------------------------------------------------------------
+%%
+%% Main GUI Loop
+%%
+
+-spec gui_loop(#gui_state{}) -> ?RET_NOTHING_SUSPICIOUS.
+
+gui_loop(#gui_state{add_all = AddAll, add_file = AddFile, add_rec = AddRec,
+ backend_pid = BackendPid, chosen_box = ChosenBox,
+ clear_chosen = ClearChosen, clear_log = ClearLog,
+ clear_warn = ClearWarn, del_file = DelFile,
+ dir_entry = DirEntry, file_box = File, log = Log,
+ menu = Menu, packer = Packer, run = Run, stop = Stop,
+ top = TopWin, warnings_box = Warn} = State) ->
+ %% --- Menu ---
+ Quit = Menu#menu.file_quit,
+ Manual = Menu#menu.help_manual,
+ Warnings = Menu#menu.help_warnings,
+ About = Menu#menu.help_about,
+ SaveLog = Menu#menu.file_save_log,
+ SaveWarn = Menu#menu.file_save_warn,
+ SearchPlt = Menu#menu.plt_search_doc,
+ ShowPlt = Menu#menu.plt_show_doc,
+ Macros = Menu#menu.opts_macros,
+ Includes = Menu#menu.opts_includes,
+
+ receive
+ {gs, TopWin, configure, _Data, [W, H|_]} ->
+ gs:config(Packer, [{width, W}, {height, H}]),
+ gui_loop(State);
+ {gs, TopWin, destroy, _Data, _Args} ->
+ ?RET_NOTHING_SUSPICIOUS;
+ {gs, File, doubleclick, _, [_Id, Text|_]} ->
+ NewState = change_dir_or_add_file(State, Text),
+ gui_loop(NewState);
+ {gs, DirEntry, keypress, _, ['Return'|_]} ->
+ gs:config(TopWin, [{setfocus, true}]),
+ NewState = change_dir_absolute(State, gs:read(DirEntry, text)),
+ gui_loop(NewState);
+ {gs, DirEntry, keypress, _, _} ->
+ gui_loop(State);
+ %% ----- Buttons -----
+ {gs, AddFile, click, _, _} ->
+ handle_add_files(State),
+ gui_loop(State);
+ {gs, AddAll, click, _, _} ->
+ handle_add_all_click(State),
+ gui_loop(State);
+ {gs, AddRec, click, _, _} ->
+ handle_add_rec_click(State),
+ gui_loop(State);
+ {gs, DelFile, click, _, _} ->
+ handle_file_delete(State),
+ gui_loop(State);
+ {gs, ClearChosen, click, _, _} ->
+ gs:config(ChosenBox, [clear]),
+ gui_loop(State);
+ {gs, ClearLog, click, _, _} ->
+ Log = State#gui_state.log,
+ gs:config(Log, [{enable, true}]),
+ gs:config(Log, [clear]),
+ gs:config(Log, [{enable, false}]),
+ gui_loop(State);
+ {gs, ClearWarn, click, _, _} ->
+ Warn = State#gui_state.warnings_box,
+ gs:config(Warn, [{enable, true}]),
+ gs:config(Warn, [clear]),
+ gs:config(Warn, [{enable, false}]),
+ gui_loop(State);
+ {gs, Run, click, _, _} ->
+ NewState = start_analysis(State),
+ gui_loop(NewState);
+ {gs, Stop, click, _, _} ->
+ config_gui_stop(State),
+ BackendPid ! {self(), stop},
+ update_editor(Log, "\n***** Analysis stopped ****\n"),
+ gui_loop(State);
+ %% ----- Menu -----
+ {gs, Quit, click, _, _} ->
+ case maybe_quit(State) of
+ true -> ?RET_NOTHING_SUSPICIOUS;
+ false -> gui_loop(State)
+ end;
+ {gs, Manual, click, _, _} ->
+ spawn_link(fun() -> manual(State) end),
+ gui_loop(State);
+ {gs, Warnings, click, _, _} ->
+ spawn_link(fun() -> warnings(State) end),
+ gui_loop(State);
+ {gs, About, click, _, _} ->
+ spawn_link(fun() -> about(State) end),
+ gui_loop(State);
+ {gs, SaveLog, click, _, _} ->
+ save_log(State),
+ gui_loop(State);
+ {gs, SaveWarn, click, _, _} ->
+ save_warn(State),
+ gui_loop(State);
+ {gs, SearchPlt, click, _, _} ->
+ spawn_link(fun() -> search_doc_plt(State) end),
+ gui_loop(State);
+ {gs, ShowPlt, click, _, _} ->
+ spawn_link(fun() -> show_doc_plt(State) end),
+ gui_loop(State);
+ {gs, Macros, click, _, _} ->
+ Self = self(),
+ spawn_link(fun() -> macro_dialog(State, Self) end),
+ gui_loop(State);
+ {gs, Includes, click, _, _} ->
+ Self = self(),
+ spawn_link(fun() -> include_dialog(State, Self) end),
+ gui_loop(State);
+ {new_options, NewOptions} ->
+ NewState = State#gui_state{options = NewOptions},
+ gui_loop(NewState);
+ %% ----- Analysis -----
+ {BackendPid, ext_calls, ExtCalls} ->
+ Msg = io_lib:format("The following functions are called "
+ "but type information about them is not available.\n"
+ "The analysis might get more precise by including "
+ "the modules containing these functions:\n\n\t~p\n",
+ [ExtCalls]),
+ free_editor(State, "Analysis done", Msg),
+ gui_loop(State);
+ {BackendPid, log, LogMsg} ->
+ update_editor(Log, LogMsg),
+ gui_loop(State);
+ {BackendPid, warnings, Warns} ->
+ SortedWarns = lists:keysort(2, Warns), %% Sort on file/line
+ WarnList = [dialyzer:format_warning(W) || W <- SortedWarns],
+ update_editor(Warn, lists:flatten(WarnList)),
+ gui_loop(State);
+ {BackendPid, done, _NewPlt, NewDocPlt} ->
+ message(State, "Analysis done"),
+ config_gui_stop(State),
+ gui_loop(State#gui_state{doc_plt = NewDocPlt});
+ {'EXIT', BackendPid, {error, Reason}} ->
+ free_editor(State, ?DIALYZER_ERROR_TITLE, Reason),
+ config_gui_stop(State),
+ gui_loop(State);
+ {'EXIT', BackendPid, Reason} when Reason =/= 'normal' ->
+ free_editor(State, ?DIALYZER_ERROR_TITLE, io_lib:format("~p", [Reason])),
+ config_gui_stop(State),
+ gui_loop(State);
+ _Other ->
+ %% io:format("Received ~p\n", [Other]),
+ gui_loop(State)
+ end.
+
+%% ----------------------------------------------------------------
+%%
+%% Main window actions
+%%
+
+%% ---- Adding and deleting files ----
+
+handle_add_all_click(#gui_state{chosen_box = ChosenBox, file_box = File,
+ file_wd = FWD, mode = Mode}) ->
+ case gs:read(File, items) of
+ [] ->
+ ok;
+ Add0 ->
+ gs:config(File, [{selection, clear}]),
+ Add1 = ordsets:subtract(Add0, [".."]),
+ Add = ordsets:from_list([filename:join(FWD, X) || X <- Add1]),
+ case gs:read(Mode#mode.start_byte_code, select) of
+ true ->
+ add_files(filter_mods(Add, ".beam"), ChosenBox, byte_code);
+ false ->
+ add_files(filter_mods(Add, ".erl"), ChosenBox, src_code)
+ end
+ end.
+
+all_subdirs(Dirs) ->
+ all_subdirs(Dirs, []).
+
+all_subdirs([Dir|T], Acc) ->
+ {ok, Files} = file:list_dir(Dir),
+ SubDirs = lists:zf(fun(F) ->
+ SubDir = filename:join(Dir, F),
+ case filelib:is_dir(SubDir) of
+ true -> {true, SubDir};
+ false -> false
+ end
+ end, Files),
+ NewAcc = ordsets:union(ordsets:from_list(SubDirs), Acc),
+ all_subdirs(T ++ SubDirs, NewAcc);
+all_subdirs([], Acc) ->
+ Acc.
+
+handle_add_rec_click(#gui_state{chosen_box = ChosenBox, file_box = File,
+ file_wd = FWD, mode = Mode}) ->
+ case gs:read(File, selection) of
+ [] ->
+ ok;
+ List ->
+ gs:config(File, [{selection, clear}]),
+ Dirs1 = [gs:read(File, {get, X}) || X <- List],
+ Dirs2 = ordsets:from_list([filename:join(FWD, X) || X <- Dirs1]),
+ Dirs3 = ordsets:filter(fun(X) -> filelib:is_dir(X) end, Dirs2),
+ TargetDirs = ordsets:union(Dirs3, all_subdirs(Dirs3)),
+ {Code, Ext} = case gs:read(Mode#mode.start_byte_code, select) of
+ true -> {byte_code, ".beam"};
+ false -> {src_code, ".erl"}
+ end,
+ add_files(filter_mods(TargetDirs, Ext), ChosenBox, Code)
+ end.
+
+handle_add_files(#gui_state{chosen_box = ChosenBox, file_box = File,
+ file_wd = FWD, mode = Mode}) ->
+ case gs:read(File, selection) of
+ [] ->
+ ok;
+ List ->
+ gs:config(File, [{selection, clear}]),
+ Add0 = [gs:read(File, {get, X}) || X <- List],
+ Add = ordsets:from_list([filename:join(FWD, X) || X <- Add0]),
+ case gs:read(Mode#mode.start_byte_code, select) of
+ true ->
+ add_files(filter_mods(Add, ".beam"), ChosenBox, byte_code);
+ false ->
+ add_files(filter_mods(Add, ".erl"), ChosenBox, src_code)
+ end
+ end.
+
+filter_mods(Mods, Extension) ->
+ Fun = fun(X) ->
+ filename:extension(X) =:= Extension
+ orelse
+ (filelib:is_dir(X) andalso
+ contains_files(X, Extension))
+ end,
+ ordsets:filter(Fun, Mods).
+
+contains_files(Dir, Extension) ->
+ {ok, Files} = file:list_dir(Dir),
+ lists:any(fun(X) -> filename:extension(X) =:= Extension end, Files).
+
+add_files(Add, ChosenBox, Type) ->
+ Set = gs:read(ChosenBox, items),
+ Set1 =
+ case Type of
+ byte_code -> filter_mods(Set, ".beam");
+ src_code -> filter_mods(Set, ".erl")
+ end,
+ Files = ordsets:union(Add, Set1),
+ gs:config(ChosenBox, [{items, Files}]),
+ ok.
+
+handle_file_delete(#gui_state{chosen_box = ChosenBox}) ->
+ List = gs:read(ChosenBox, selection),
+ lists:foreach(fun(X) -> gs:config(ChosenBox, [{del, X}]) end,
+ lists:reverse(lists:sort(List))).
+
+%% ---- Other ----
+
+change_dir_or_add_file(#gui_state{file_wd = FWD, mode = Mode, dir_entry = Dir,
+ chosen_box = CBox, file_box = File} = State,
+ Text) ->
+ NewWDorFile =
+ case Text of
+ ".." -> filename:join(butlast(filename:split(FWD)));
+ "." -> FWD;
+ _ -> filename:join(FWD, Text)
+ end,
+ case filelib:is_dir(NewWDorFile) of
+ true ->
+ gs:config(Dir, [{text, NewWDorFile}]),
+ {ok, List} = file:list_dir(NewWDorFile),
+ gs:config(File, [{items, [".."|lists:sort(List)]}]),
+ State#gui_state{file_wd = NewWDorFile};
+ false ->
+ case gs:read(Mode#mode.start_byte_code, select) of
+ true ->
+ case filter_mods([NewWDorFile], ".beam") of
+ [] -> ok;
+ RealFiles -> add_files(RealFiles, CBox, byte_code)
+ end;
+ false ->
+ case filter_mods([NewWDorFile], ".erl") of
+ [] -> ok;
+ RealFiles -> add_files(RealFiles, CBox, src_code)
+ end
+ end,
+ State
+ end.
+
+butlast([H1, H2 | T]) ->
+ [H1 | butlast([H2|T])];
+butlast([_]) ->
+ [];
+butlast([]) ->
+ ["/"].
+
+change_dir_absolute(#gui_state{file_wd = FWD, dir_entry = Dir,
+ file_box = File} = State,
+ Text) ->
+ case filelib:is_dir(Text) of
+ true ->
+ WD = filename:join(FWD, Text),
+ gs:config(Dir, [{text, WD}]),
+ {ok, List} = file:list_dir(WD),
+ gs:config(File, [{items, [".."|lists:sort(List)]}]),
+ State#gui_state{file_wd = WD};
+ false ->
+ State
+ end.
+
+init_warnings([{Tag, GSItem}|Left], LegalWarnings) ->
+ Select = ordsets:is_element(Tag, LegalWarnings),
+ gs:config(GSItem, [{select, Select}]),
+ init_warnings(Left, LegalWarnings);
+init_warnings([], _LegalWarnings) ->
+ ok.
+
+config_gui_start(State) ->
+ Enabled = [{enable, true}],
+ Disabled = [{enable, false}],
+ gs:config(State#gui_state.stop, Enabled),
+ gs:config(State#gui_state.run, Disabled),
+ gs:config(State#gui_state.del_file, Disabled),
+ gs:config(State#gui_state.clear_chosen, Disabled),
+ gs:config(State#gui_state.add_file, Disabled),
+ gs:config(State#gui_state.add_all, Disabled),
+ gs:config(State#gui_state.add_rec, Disabled),
+ gs:config(State#gui_state.clear_warn, Disabled),
+ gs:config(State#gui_state.clear_log, Disabled),
+ Menu = State#gui_state.menu,
+ gs:config(Menu#menu.file_save_warn, Disabled),
+ gs:config(Menu#menu.file_save_log, Disabled),
+ gs:config(Menu#menu.opts_macros, Disabled),
+ gs:config(Menu#menu.opts_includes, Disabled),
+ gs:config(Menu#menu.plt_empty, Disabled),
+ gs:config(Menu#menu.plt_search_doc, Disabled),
+ gs:config(Menu#menu.plt_show_doc, Disabled),
+ Mode = State#gui_state.mode,
+ gs:config(Mode#mode.start_byte_code, Disabled),
+ gs:config(Mode#mode.start_src_code, Disabled).
+
+config_gui_stop(State) ->
+ Enabled = [{enable, true}],
+ Disabled = [{enable, false}],
+ gs:config(State#gui_state.stop, Disabled),
+ gs:config(State#gui_state.run, Enabled),
+ gs:config(State#gui_state.del_file, Enabled),
+ gs:config(State#gui_state.clear_chosen, Enabled),
+ gs:config(State#gui_state.add_file, Enabled),
+ gs:config(State#gui_state.add_all, Enabled),
+ gs:config(State#gui_state.add_rec, Enabled),
+ gs:config(State#gui_state.clear_warn, Enabled),
+ gs:config(State#gui_state.clear_log, Enabled),
+ Menu = State#gui_state.menu,
+ gs:config(Menu#menu.file_save_warn, Enabled),
+ gs:config(Menu#menu.file_save_log, Enabled),
+ gs:config(Menu#menu.opts_macros, Enabled),
+ gs:config(Menu#menu.opts_includes, Enabled),
+ gs:config(Menu#menu.plt_empty, Enabled),
+ gs:config(Menu#menu.plt_search_doc, Enabled),
+ gs:config(Menu#menu.plt_show_doc, Enabled),
+ Mode = State#gui_state.mode,
+ gs:config(Mode#mode.start_byte_code, Enabled),
+ gs:config(Mode#mode.start_src_code, Enabled).
+
+%% ----------------------------------------------------------------
+%%
+%% Messages
+%%
+
+message(State, Message) ->
+ output_sms(State, ?DIALYZER_MESSAGE_TITLE, Message).
+
+error_sms(State, Message) ->
+ output_sms(State, ?DIALYZER_ERROR_TITLE, Message).
+
+%%
+%% This function is to be used *only* for small messages because lines
+%% are not wrapped and the created window has a limited area for text.
+%% For bigger messages, the function free_editor/3 is to be used.
+%%
+output_sms(#gui_state{gs = GS, top = TopWin}, Title, Message) ->
+ %% Lines = string:words(Message, $\n),
+ %% io:format("The message has ~w lines\n", [Lines]),
+ WH = [{width, 400}, {height, 100}],
+ MessageWin = gs:window(GS, [{title, Title},
+ {default, button, {font, {helvetica, bold, 12}}}
+ |WH]),
+ MessagePacker = gs:frame(MessageWin, [{packer_y, [{fixed, 75}, {fixed, 25}]},
+ {packer_x, [{fixed, 175},{fixed, 50},
+ {fixed, 175}]}]),
+ gs:label(MessagePacker, [{pack_x, {1, 3}}, {pack_y, 1},
+ {label, {text, Message}}]),
+ OK = gs:button(MessagePacker, [{label, {text, "OK"}}, {pack_xy, {2, 2}}]),
+ gs:config(MessageWin, [{map, true}]),
+ gs:config(MessagePacker, WH),
+ message_loop(OK, MessageWin, TopWin).
+
+message_loop(Ok, Win, TopWin) ->
+ receive
+ {gs, Ok, click, _, _} ->
+ gs:destroy(Win);
+ {gs, Win, destroy, _, _} ->
+ ok;
+ {gs, TopWin, destroy, _, _} ->
+ exit(normal);
+ {gs, _, _, _, _} ->
+ message_loop(Ok, Win, TopWin)
+ end.
+
+dialog(#gui_state{gs = GS, top = TopWin}, Message, OkLabel, CancelLabel) ->
+ WH = [{width, 400}, {height, 100}],
+ WHButton = [{width, 70}, {height, 20}],
+ DialogWin = gs:window(GS, [{title, "Dialyzer Message"},
+ {default, button, {font, {helvetica, bold, 12}}}
+ |WH]),
+ DialogPacker = gs:frame(DialogWin, [{packer_y, [{fixed, 75}, {fixed, 25}]},
+ {packer_x, [{fixed, 150}, {fixed, 50},
+ {fixed, 50}, {fixed, 150}]}]),
+ gs:label(DialogPacker, [{pack_x, {1,4}}, {pack_y, 1},
+ {label, {text, Message}}]),
+ Ok = gs:button(DialogPacker, [{label, {text, OkLabel}},
+ {pack_xy, {2,2}}|WHButton]),
+ Cancel = gs:button(DialogPacker, [{label, {text, CancelLabel}},
+ {pack_xy, {3,2}}|WHButton]),
+ gs:config(DialogWin, [{map, true}]),
+ gs:config(DialogPacker, WH),
+ dialog_loop(Ok, Cancel, DialogWin, TopWin).
+
+dialog_loop(Ok, Cancel, Win, TopWin) ->
+ receive
+ {gs, Ok, click, _, _} ->
+ gs:destroy(Win),
+ true;
+ {gs, Cancel, click, _, _} ->
+ gs:destroy(Win),
+ false;
+ {gs, Win, destroy, _, _} ->
+ false;
+ {gs, TopWin, destroy, _, _} ->
+ exit(normal);
+ {gs, _, _, _, _} ->
+ dialog_loop(Ok, Cancel, Win, TopWin)
+ end.
+
+maybe_quit(#gui_state{top = TopWin} = State) ->
+ case dialog(State, "Do you really want to quit?", "Yes", "No") of
+ true ->
+ flush(),
+ gs:destroy(TopWin),
+ gs:stop(),
+ true;
+ false ->
+ false
+ end.
+
+
+%% ----------------------------------------------------------------
+%%
+%% Menu actions
+%%
+
+%% ---- Help Menu ----
+
+manual(State) ->
+ help_menu_common(State, "Dialyzer Manual", 500, "manual.txt", white).
+
+warnings(State) ->
+ help_menu_common(State, "Dialyzer Warnings", 500, "warnings.txt", white).
+
+about(State) ->
+ help_menu_common(State, "About Dialyzer", 160, "about.txt", yellow).
+
+help_menu_common(#gui_state{gs = GS, top = TopWin} = State,
+ Title, Height, TxtFileName, BackGroundColor) ->
+ WH = [{width, 600}, {height, Height}],
+ Win = gs:window(GS, [{title, Title}, {configure, true},
+ {default, editor, {bg, BackGroundColor}} | WH]),
+ EmptySpace = {stretch, 1},
+ Frame = gs:frame(Win, [{packer_x, [EmptySpace, {fixed, 60}, EmptySpace]},
+ {packer_y, [EmptySpace, {fixed, 30}]} | WH]),
+ Editor = gs:editor(Frame, [{pack_x, {1, 3}}, {pack_y, 1},
+ {font, {courier, 12}}, {vscroll, right},
+ {wrap, word}]),
+ Button = gs:button(Frame, [{label, {text, "Ok"}}, {pack_xy, {2, 2}}]),
+ gs:config(Win, [{map, true}]),
+ gs:config(Frame, WH),
+ AboutFile = filename:join([code:lib_dir(dialyzer), "doc", TxtFileName]),
+ case gs:config(Editor, {load, AboutFile}) of
+ {error, Reason} ->
+ gs:destroy(Win),
+ error_sms(State,
+ io_lib:format("Could not find doc/~s file!\n\n ~p",
+ [TxtFileName, Reason]));
+ ok ->
+ gs:config(Editor, [{enable, false}]),
+ show_info_loop(TopWin, Win, Frame, Button)
+ end.
+
+%% ---- File Menu ----
+
+save_log(#gui_state{file_wd = CWD, log = Log} = State) ->
+ {Win, Entry, OkButton, CancelButton} = file_box(State, "Save Log", CWD),
+ save_loop(State, OkButton, CancelButton, Entry, Win, Log).
+
+save_warn(#gui_state{file_wd = CWD, warnings_box = WBox} = State) ->
+ {Win, Entry, OkButton, CancelButton} = file_box(State, "Save Warnings", CWD),
+ save_loop(State, OkButton, CancelButton, Entry, Win, WBox).
+
+file_box(#gui_state{gs = GS}, Title, Default) ->
+ WH = [{width, 400}, {height, 75}],
+ Win = gs:window(GS, [{title, Title}|WH]),
+ Fix25 = {fixed, 27}, Fix75 = {fixed, 75},
+ WinPacker = gs:frame(Win, [{packer_y, [Fix25, Fix25, Fix25]},
+ {packer_x, [Fix75, Fix75, Fix75, {fixed, 175}]}]),
+ gs:label(WinPacker, [{pack_xy, {1,2}}, {label, {text, "Enter file:"}}]),
+ Entry = gs:entry(WinPacker, [{pack_x, {2,4}}, {pack_y, 2}, {keypress, true}]),
+ OkButton = gs:button(WinPacker, [{label, {text, "Ok"}}, {pack_xy, {2,3}}]),
+ CancelButton = gs:button(WinPacker, [{label, {text, "Cancel"}},
+ {pack_xy, {3,3}}]),
+ gs:config(Entry, [{text, Default}]),
+ gs:config(Win, [{map, true}]),
+ gs:config(WinPacker, WH),
+ {Win, Entry, OkButton, CancelButton}.
+
+save_loop(#gui_state{top = TopWin} = State,
+ OkButton, CancelButton, Entry, Save, Editor) ->
+ receive
+ {gs, OkButton, click, _, _} ->
+ File = gs:read(Entry, text),
+ case gs:config(Editor, [{save, File}]) of
+ {error, _} ->
+ error_sms(State, "Could not write to file:\n" ++ File),
+ save_loop(State, OkButton, CancelButton, Entry, Save, Editor);
+ _ ->
+ gs:destroy(Save)
+ end;
+ {gs, Entry, keypress, _, ['Return'|_]} ->
+ File = gs:read(Entry, text),
+ case gs:config(Editor, [{save, File}]) of
+ {error, _} ->
+ error_sms(State, "Could not write to file:\n" ++ File),
+ save_loop(State, OkButton, CancelButton, Entry, Save, Editor);
+ _ ->
+ gs:destroy(Save)
+ end;
+ {gs, Entry, keypress, _, _} ->
+ save_loop(State, OkButton, CancelButton, Entry, Save, Editor);
+ {gs, CancelButton, click, _, _} ->
+ gs:destroy(Save);
+ {gs, TopWin, destroy, _, _} ->
+ exit(normal);
+ {gs, Save, destroy, _, _} ->
+ ok;
+ {gs, _, _, _, _} ->
+ save_loop(State, OkButton, CancelButton, Entry, Save, Editor)
+ end.
+
+%% ---- Plt Menu ----
+
+search_doc_plt(#gui_state{gs = GS, top = TopWin} = State) ->
+ WH = [{width, 400}, {height, 100}],
+ WHB = [{width, 120}, {height, 30}],
+ Title = io_lib:format("Search the PLT", []),
+ Win = gs:window(GS, [{title, Title}, {configure, true},
+ {default, editor, {bg, white}} | WH]),
+ EmptySpace = {stretch, 1},
+ Frame = gs:frame(Win, [{packer_x, [EmptySpace, EmptySpace, EmptySpace]},
+ {packer_y, [{fixed, 30}, {fixed, 30},
+ EmptySpace, {fixed, 30}]} | WH]),
+ gs:label(Frame, [{pack_xy, {1,1}}, {label, {text, "Module"}}]),
+ ModEntry = gs:entry(Frame, [{pack_xy, {1,2}}]),
+ gs:label(Frame, [{pack_xy, {2,1}}, {label, {text, "Function"}}]),
+ FunEntry = gs:entry(Frame, [{pack_xy, {2,2}}]),
+ gs:label(Frame, [{pack_xy, {3,1}}, {label, {text, "Arity"}}]),
+ ArityEntry = gs:entry(Frame, [{pack_xy, {3,2}}]),
+ ButtonPacker = gs:frame(Frame, [{pack_xy, {2,4}},
+ {packer_x, [{fixed, 60}, {fixed, 60}]},
+ {packer_y, {fixed, 30}}]),
+ SearchButton = gs:button(ButtonPacker, [{label, {text, "Search"}},
+ {pack_xy, {1,1}}]),
+ CancelButton = gs:button(ButtonPacker, [{label, {text, "Cancel"}},
+ {pack_xy, {2,1}}]),
+ gs:config(Win, [{map, true}]),
+ gs:config(Frame, WH),
+ gs:config(ButtonPacker, WHB),
+ search_doc_plt_loop(State, CancelButton, SearchButton, ModEntry,
+ FunEntry, ArityEntry, Win, TopWin).
+
+search_doc_plt_loop(State, CancelButton, SearchButton, ModEntry,
+ FunEntry, ArityEntry, Win, TopWin) ->
+ receive
+ {gs, CancelButton, click, _, _} ->
+ gs:destroy(Win),
+ ok;
+ {gs, TopWin, destroy, _, _} ->
+ exit(normal);
+ {gs, SearchButton, click, _, _} ->
+ M = format_search(gs:read(ModEntry, text)),
+ F = format_search(gs:read(FunEntry, text)),
+ A = format_search(gs:read(ArityEntry, text)),
+ case dialyzer_plt:get_specs(State#gui_state.doc_plt, M, F, A) of
+ "" ->
+ error_sms(State, "No such function"),
+ search_doc_plt_loop(State, CancelButton, SearchButton, ModEntry,
+ FunEntry, ArityEntry, Win, TopWin);
+ NonEmptyString ->
+ gs:destroy(Win),
+ free_editor(State, "Content of PLT", NonEmptyString)
+ end
+ end.
+
+format_search([]) ->
+ '_';
+format_search(String) ->
+ try list_to_integer(String)
+ catch error:_ -> list_to_atom(String)
+ end.
+
+show_doc_plt(#gui_state{doc_plt = DocPLT} = State) ->
+ case dialyzer_plt:get_specs(DocPLT) of
+ "" -> error_sms(State, "No analysis has been made yet!\n");
+ NonEmptyString -> free_editor(State, "Content of PLT", NonEmptyString)
+ end.
+
+free_editor(#gui_state{gs = GS, top = TopWin}, Title, Contents0) ->
+ Contents = lists:flatten(Contents0),
+ Tokens = string:tokens(Contents, "\n"),
+ NofLines = length(Tokens),
+ LongestLine = lists:max([length(X) || X <- Tokens]),
+ Height0 = NofLines * 25 + 80,
+ Height = if Height0 > 500 -> 500; true -> Height0 end,
+ Width0 = LongestLine * 7 + 60,
+ Width = if Width0 > 800 -> 800; true -> Width0 end,
+ WH = [{width, Width}, {height, Height}],
+ Win = gs:window(GS, [{title, Title}, {configure, true},
+ {default, editor, {bg, white}} | WH]),
+ EmptySpace = {stretch, 1},
+ Frame = gs:frame(Win, [{packer_x, [EmptySpace, {fixed, 60}, EmptySpace]},
+ {packer_y, [EmptySpace, {fixed, 30}]}
+ | WH]),
+ Editor = gs:editor(Frame, [{pack_x, {1,3}}, {pack_y, 1},
+ {font, {courier, 12}}, {vscroll, right},
+ {wrap, word}, {enable, true}]),
+ Button = gs:button(Frame, [{label, {text, "Ok"}}, {pack_xy, {2,2}}]),
+ gs:config(Editor, [{insert, {insert, Contents}}]),
+ gs:config(Editor, [{enable, false}]),
+ gs:config(Win, [{map, true}]),
+ gs:config(Frame, WH),
+ show_info_loop(TopWin, Win, Frame, Button).
+
+%% ---- Common ----
+
+show_info_loop(TopWin, Win, Frame, Button) ->
+ receive
+ {gs, Button, click, _, _} ->
+ gs:destroy(Win);
+ {gs, TopWin, destroy, _, _} ->
+ exit(normal);
+ {gs, Win, destroy, _, _} ->
+ ok;
+ {gs, Win, configure, _Data, [W, H|_]} ->
+ gs:config(Frame, [{width, W}, {height, H}]),
+ show_info_loop(TopWin, Win, Frame, Button)
+ end.
+
+include_dialog(#gui_state{gs = GS, options = Options}, Parent) ->
+ WH = [{width, 300}, {height, 400}],
+ Title = io_lib:format("Include Directories", []),
+ Win = gs:window(GS, [{title, Title}, {configure, true},
+ {default, entry, {bg, white}}| WH]),
+ EmptySpace = {stretch, 1},
+ Frame = gs:frame(Win, [{packer_x, [EmptySpace]},
+ {packer_y, [{fixed, 30}, {fixed, 30}, {fixed, 30},
+ EmptySpace, {fixed, 30}, {fixed, 30}]}
+ | WH]),
+ gs:label(Frame, [{pack_xy, {1,1}}, {label, {text, "Directory"}}]),
+ DirEntry = gs:entry(Frame, [{pack_xy, {1,2}}]),
+ ButtonPacker1 = gs:frame(Frame, [{pack_xy, {1,3}},
+ {packer_x, [{fixed, 70}, {fixed, 70},
+ EmptySpace]},
+ {packer_y, {fixed, 30}}]),
+ AddButton = gs:button(ButtonPacker1, [{label, {text, "Add"}},
+ {pack_xy, {1,1}}]),
+ Dirs = [io_lib:format("~s", [X]) || X <- Options#options.include_dirs],
+ DirBox = gs:listbox(Frame, [{pack_xy, {1,4}}, {vscroll, right},
+ {bg, white}, {configure, true},
+ {selectmode, multiple}, {items, Dirs}]),
+ ButtonPacker2 = gs:frame(Frame, [{pack_xy, {1,5}},
+ {packer_x, [{fixed, 60}, {fixed, 70},
+ EmptySpace]},
+ {packer_y, {fixed, 30}}]),
+ DeleteButton = gs:button(ButtonPacker2, [{label, {text, "Delete"}},
+ {pack_xy, {1,1}}]),
+ DeleteAllButton = gs:button(ButtonPacker2, [{label, {text, "Delete All"}},
+ {pack_xy, {2,1}}]),
+ ButtonPacker3 = gs:frame(Frame, [{pack_xy, {1,6}},
+ {packer_x, [EmptySpace,
+ {fixed, 60}, {fixed, 60}]},
+ {packer_y, {fixed, 30}}]),
+ OkButton = gs:button(ButtonPacker3, [{label, {text, "Ok"}},
+ {pack_xy, {2,1}}]),
+ CancelButton = gs:button(ButtonPacker3, [{label, {text, "Cancel"}},
+ {pack_xy, {3,1}}]),
+ gs:config(Win, [{map, true}]),
+ gs:config(Frame, WH),
+ include_loop(Parent, Options, Frame, AddButton, DeleteAllButton, DeleteButton,
+ DirBox, DirEntry, OkButton, CancelButton, Win).
+
+include_loop(Parent, Options, Frame, AddButton, DeleteAllButton, DeleteButton,
+ DirBox, DirEntry, OkButton, CancelButton, Win) ->
+ receive
+ {gs, CancelButton, click, _, _} ->
+ gs:destroy(Win),
+ ok;
+ {gs, OkButton, click, _, _} ->
+ gs:destroy(Win),
+ Parent ! {new_options, Options},
+ ok;
+ {gs, Win, configure, _Data, [W, H|_]} ->
+ gs:config(Frame, [{width, W}, {height, H}]),
+ include_loop(Parent, Options, Frame, AddButton, DeleteAllButton,
+ DeleteButton, DirBox, DirEntry, OkButton, CancelButton, Win);
+ {gs, AddButton, click, _, _} ->
+ Dirs = Options#options.include_dirs,
+ NewDirs =
+ case gs:read(DirEntry, text) of
+ [] -> Dirs;
+ Add -> [Add|Dirs]
+ end,
+ NewOptions = Options#options{include_dirs = NewDirs},
+ gs:config(DirBox, [{items, NewDirs}]),
+ include_loop(Parent, NewOptions, Frame, AddButton, DeleteAllButton,
+ DeleteButton, DirBox, DirEntry, OkButton, CancelButton, Win);
+ {gs, DeleteAllButton, click, _, _} ->
+ gs:config(DirBox, [clear]),
+ NewOptions = Options#options{include_dirs = []},
+ include_loop(Parent, NewOptions, Frame, AddButton, DeleteAllButton,
+ DeleteButton, DirBox, DirEntry, OkButton, CancelButton, Win);
+ {gs, DeleteButton, click, _, _} ->
+ NewOptions =
+ case gs:read(DirBox, selection) of
+ [] ->
+ Options;
+ List ->
+ lists:foreach(fun(X) -> gs:config(DirBox, [{del, X}]) end,
+ lists:sort(List)),
+ NewDirs = gs:read(DirBox, items),
+ Options#options{include_dirs = NewDirs}
+ end,
+ include_loop(Parent, NewOptions, Frame, AddButton, DeleteAllButton,
+ DeleteButton, DirBox, DirEntry, OkButton, CancelButton, Win);
+ {gs, Win, destroy, _, _} ->
+ ok
+ end.
+
+macro_dialog(#gui_state{gs = GS, options = Options}, Parent) ->
+ WH = [{width, 300}, {height, 400}],
+ Title = io_lib:format("Macro Definitions", []),
+ Win = gs:window(GS, [{title, Title}, {configure, true},
+ {default, entry, {bg, white}}| WH]),
+ EmptySpace = {stretch, 1},
+ Frame = gs:frame(Win, [{packer_x, [EmptySpace, EmptySpace]},
+ {packer_y, [{fixed, 30}, {fixed, 30}, {fixed, 30},
+ EmptySpace, {fixed, 30}, {fixed, 30}]}
+ | WH]),
+ gs:label(Frame, [{pack_xy, {1,1}}, {label, {text, "Macro"}}]),
+ MacroEntry = gs:entry(Frame, [{pack_xy, {1,2}}]),
+ gs:label(Frame, [{pack_xy, {2,1}}, {label, {text, "Term"}}]),
+ TermEntry = gs:entry(Frame, [{pack_xy, {2,2}}]),
+ ButtonPacker1 = gs:frame(Frame, [{pack_x, {1,2}}, {pack_y, 3},
+ {packer_x, [{fixed, 70},{fixed, 70},
+ EmptySpace]},
+ {packer_y, {fixed, 30}}]),
+ AddButton = gs:button(ButtonPacker1, [{label, {text, "Add"}},
+ {pack_xy, {1,1}}]),
+ Macros = [io_lib:format("~p = ~p",[X,Y]) || {X,Y} <- Options#options.defines],
+ MacroBox = gs:listbox(Frame, [{pack_x, {1,2}}, {pack_y, 4}, {vscroll, right},
+ {bg, white}, {configure, true},
+ {selectmode, multiple},
+ {items, Macros}]),
+ ButtonPacker2 = gs:frame(Frame, [{pack_x, {1,2}}, {pack_y, 5},
+ {packer_x, [{fixed, 60}, {fixed, 70},
+ EmptySpace]},
+ {packer_y, {fixed, 30}}]),
+ DeleteButton = gs:button(ButtonPacker2, [{label, {text, "Delete"}},
+ {pack_xy, {1,1}}]),
+ DeleteAllButton = gs:button(ButtonPacker2, [{label, {text, "Delete All"}},
+ {pack_xy, {2,1}}]),
+ ButtonPacker3 = gs:frame(Frame, [{pack_x, {1,2}}, {pack_y, 6},
+ {packer_x, [EmptySpace,
+ {fixed, 60}, {fixed, 60}]},
+ {packer_y, {fixed, 30}}]),
+ OkButton = gs:button(ButtonPacker3, [{label, {text, "Ok"}},
+ {pack_xy, {2,1}}]),
+ CancelButton = gs:button(ButtonPacker3, [{label, {text, "Cancel"}},
+ {pack_xy, {3,1}}]),
+ gs:config(Win, [{map, true}]),
+ gs:config(Frame, WH),
+ macro_loop(Parent, Options, Frame, AddButton, DeleteAllButton, DeleteButton,
+ MacroBox, MacroEntry, TermEntry, OkButton, CancelButton, Win).
+
+macro_loop(Parent, Options, Frame, AddButton, DeleteAllButton, DeleteButton,
+ MacroBox, MacroEntry, TermEntry, OkButton, CancelButton, Win) ->
+ receive
+ {gs, CancelButton, click, _, _} ->
+ gs:destroy(Win),
+ ok;
+ {gs, OkButton, click, _, _} ->
+ gs:destroy(Win),
+ Parent ! {new_options, Options},
+ ok;
+ {gs, Win, configure, _Data, [W, H|_]} ->
+ gs:config(Frame, [{width, W}, {height, H}]),
+ macro_loop(Parent, Options, Frame, AddButton, DeleteAllButton,
+ DeleteButton, MacroBox, MacroEntry, TermEntry, OkButton,
+ CancelButton, Win);
+ {gs, AddButton, click, _, _} ->
+ Defines = Options#options.defines,
+ NewDefines =
+ case gs:read(MacroEntry, text) of
+ "" -> Defines;
+ Macro ->
+ Empty = [{text, ""}],
+ case gs:read(TermEntry, text) of
+ "" ->
+ gs:config(MacroEntry, Empty),
+ orddict:store(list_to_atom(Macro), true, Defines);
+ String ->
+ case parse(String) of
+ {ok, Term} ->
+ gs:config(MacroEntry, Empty),
+ gs:config(TermEntry, Empty),
+ orddict:store(list_to_atom(Macro), Term, Defines);
+ {error, _Reason} ->
+ Defines
+ end
+ end
+ end,
+ NewOptions = Options#options{defines = NewDefines},
+ NewEntries = [io_lib:format("~p = ~p", [X, Y]) || {X, Y} <- NewDefines],
+ gs:config(MacroBox, [{items, NewEntries}]),
+ macro_loop(Parent, NewOptions, Frame, AddButton, DeleteAllButton,
+ DeleteButton, MacroBox, MacroEntry, TermEntry, OkButton,
+ CancelButton, Win);
+ {gs, DeleteAllButton, click, _, _} ->
+ gs:config(MacroBox, [clear]),
+ NewOptions = Options#options{defines = []},
+ macro_loop(Parent, NewOptions, Frame, AddButton, DeleteAllButton,
+ DeleteButton, MacroBox, MacroEntry, TermEntry, OkButton,
+ CancelButton, Win);
+ {gs, DeleteButton, click, _, _} ->
+ NewOptions =
+ case gs:read(MacroBox, selection) of
+ [] ->
+ Options;
+ List ->
+ gs:config(MacroBox, [{selection, clear}]),
+ Fun =
+ fun(X) ->
+ Val = gs:read(MacroBox, {get, X}),
+ [MacroName|_] = re:split(Val, " ", [{return, list}]),
+ list_to_atom(MacroName)
+ end,
+ Delete = [Fun(X) || X <- List],
+ lists:foreach(fun(X) -> gs:config(MacroBox, [{del, X}]) end,
+ lists:reverse(lists:sort(List))),
+ Defines = Options#options.defines,
+ NewDefines = lists:foldl(fun(X, Acc) ->
+ orddict:erase(X, Acc)
+ end,
+ Defines, Delete),
+ Options#options{defines = NewDefines}
+ end,
+ macro_loop(Parent, NewOptions, Frame, AddButton, DeleteAllButton,
+ DeleteButton, MacroBox, MacroEntry, TermEntry, OkButton,
+ CancelButton, Win);
+ {gs, Win, destroy, _, _} ->
+ ok
+ end.
+
+parse(String) ->
+ case erl_scan:string(String ++ ".", 1) of
+ {ok, Ts, _} ->
+ case erl_parse:parse_exprs(Ts) of
+ {ok, [Expr]} ->
+ try erl_parse:normalise(Expr)
+ catch error:Reason -> {error, Reason}
+ end;
+ {error, E} ->
+ parse_error(E)
+ end;
+ {error, E, _} ->
+ parse_error(E)
+ end.
+
+parse_error(E) ->
+ S = io_lib:fwrite("Error parsing expression: ~P.", [E,15]),
+ {error, S}.
+
+%% ----------------------------------------------------------------
+%%
+%% Run the analysis
+%%
+
+start_analysis(State) ->
+ Analysis = build_analysis_record(State),
+ case get_anal_files(State, Analysis#analysis.start_from) of
+ error ->
+ Msg = "You must choose one or more files or dirs\n"
+ "before starting the analysis!",
+ error_sms(State, Msg),
+ config_gui_stop(State),
+ State;
+ {ok, Files} ->
+ Msg = "\n========== Starting Analysis ==========\n\n",
+ update_editor(State#gui_state.log, Msg),
+ NewAnalysis = Analysis#analysis{files = Files},
+ run_analysis(State, NewAnalysis)
+ end.
+
+build_analysis_record(#gui_state{mode = Mode, menu = Menu, options = Options,
+ init_plt = InitPlt0}) ->
+ StartFrom =
+ case gs:read(Mode#mode.start_byte_code, select) of
+ true -> byte_code;
+ false -> src_code
+ end,
+ InitPlt =
+ case gs:read(Menu#menu.plt_empty, select) of
+ true -> dialyzer_plt:new();
+ false -> InitPlt0
+ end,
+ #analysis{defines = Options#options.defines,
+ include_dirs = Options#options.include_dirs,
+ plt = InitPlt,
+ start_from = StartFrom}.
+
+get_anal_files(#gui_state{chosen_box = ChosenBox}, StartFrom) ->
+ Files = gs:read(ChosenBox, items),
+ FilteredMods =
+ case StartFrom of
+ src_code -> filter_mods(Files, ".erl");
+ byte_code -> filter_mods(Files, ".beam")
+ end,
+ FilteredDirs = [X || X <- Files, filelib:is_dir(X)],
+ case ordsets:union(FilteredMods, FilteredDirs) of
+ [] -> error;
+ Set -> {ok, Set}
+ end.
+
+run_analysis(State, Analysis) ->
+ config_gui_start(State),
+ Self = self(),
+ NewAnalysis = Analysis#analysis{doc_plt = dialyzer_plt:new()},
+ LegalWarnings = find_legal_warnings(State),
+ Fun =
+ fun() ->
+ dialyzer_analysis_callgraph:start(Self, LegalWarnings, NewAnalysis)
+ end,
+ BackendPid = spawn_link(Fun),
+ State#gui_state{backend_pid = BackendPid}.
+
+find_legal_warnings(#gui_state{menu = #menu{warnings = Warnings}}) ->
+ ordsets:from_list([Tag || {Tag, GSItem} <- Warnings,
+ gs:read(GSItem, select) =:= true]).
+
+flush() ->
+ receive
+ _ -> flush()
+ after
+ 0 -> ok
+ end.
+
+update_editor(Editor, Msg) ->
+ gs:config(Editor, [{enable, true}]),
+ NofRows = gs:read(Editor, size),
+ gs:config(Editor, [{insertpos, 'end'}]),
+ gs:config(Editor, [{insert, {insert, Msg}}]),
+ NewNofRows = gs:read(Editor, size),
+ ScrollPos = gs:read(Editor, vscrollpos),
+ gs:config(Editor, [{vscrollpos, ScrollPos + NewNofRows - NofRows}]),
+ gs:config(Editor, [{enable, false}]).
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
new file mode 100644
index 0000000000..2d97f88680
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -0,0 +1,1243 @@
+%% -*- erlang-indent-level: 2 -*-
+%%------------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-----------------------------------------------------------------------
+%%% File : dialyzer_gui_wx.erl
+%%% Authors : Elli Fragkaki <[email protected]>
+%%% Description : The wx-based graphical user interface of dialyzer.
+%%%
+%%% Created : 07 Oct 2009 by Elli Fragkaki <[email protected]>
+%%%-----------------------------------------------------------------------
+
+-module(dialyzer_gui_wx).
+
+-export([start/1]).
+
+-include("dialyzer.hrl").
+-include("dialyzer_gui_wx.hrl").
+
+%%------------------------------------------------------------------------
+
+-define(DIALYZER_ERROR_TITLE, "Dialyzer Error").
+-define(DIALYZER_MESSAGE_TITLE, "Dialyzer Message").
+
+%%------------------------------------------------------------------------
+
+-type wx_object() :: any(). %% XXX: should be imported from wx
+
+-record(menu, {file :: wx_object(),
+ warnings :: wx_object(),
+ plt :: wx_object(),
+ options :: wx_object(),
+ help :: wx_object()}).
+
+-record(gui_state, {add :: wx_object(),
+ add_dir :: wx_object(),
+ add_rec :: wx_object(),
+ chosen_box :: wx_object(),
+ analysis_pid :: pid(),
+ del_file :: wx_object(),
+ doc_plt :: dialyzer_plt:plt(),
+ clear_chosen :: wx_object(),
+ clear_log :: wx_object(),
+ explain_warn :: wx_object(),
+ clear_warn :: wx_object(),
+ init_plt :: dialyzer_plt:plt(),
+ dir_entry :: wx_object(),
+ file_box :: wx_object(),
+ files_to_analyze :: ordset(string()),
+ gui :: wx_object(),
+ log :: wx_object(),
+ menu :: #menu{},
+ mode :: wx_object(),
+ options :: #options{},
+ run :: wx_object(),
+ stop :: wx_object(),
+ frame :: wx_object(),
+ warnings_box :: wx_object(),
+ explanation_box :: wx_object(),
+ wantedWarnings :: list(),
+ rawWarnings :: list(),
+ backend_pid :: pid(),
+ expl_pid :: pid()}).
+
+%%------------------------------------------------------------------------
+
+-spec start(#options{}) -> ?RET_NOTHING_SUSPICIOUS.
+
+start(DialyzerOptions) ->
+ process_flag(trap_exit, true),
+ Wx = wx:new(),
+ State = wx:batch(fun() -> create_window(Wx, DialyzerOptions) end),
+ gui_loop(State).
+
+create_window(Wx, DialyzerOptions) ->
+ {ok, Host} = inet:gethostname(),
+
+ %%---------- initializing frame ---------
+ Frame = wxFrame:new(Wx, -1, "Dialyzer " ++ ?VSN ++ " @ " ++ Host),
+ wxFrame:connect(Frame, close_window),
+ FileMenu = createFileMenu(),
+ WarningsMenu = createWarningsMenu(),
+ PltMenu = createPltMenu(),
+ OptionsMenu = createOptionsMenu(),
+ HelpMenu = createHelpMenu(),
+
+ MenuBar = wxMenuBar:new(),
+ wxMenuBar:append(MenuBar, FileMenu, "File"),
+ wxMenuBar:append(MenuBar, WarningsMenu, "Warnings"),
+ wxMenuBar:append(MenuBar, PltMenu, "Plt"),
+ wxMenuBar:append(MenuBar, OptionsMenu, "Options"),
+ wxMenuBar:append(MenuBar, HelpMenu, "Help"),
+ wxFrame:setMenuBar(Frame, MenuBar),
+ ok = wxFrame:connect(Frame, command_menu_selected),
+
+ %%----------- Set Labels -------------
+ Lab1 = wxStaticText:new(Frame, ?LABEL1, "Directories or modules to analyze"),
+ OptionsLabel = wxStaticText:new(Frame, ?LABEL2, "Analysis Options"),
+ LogLabel = wxStaticText:new(Frame, ?LABEL3, "Log"),
+ FileLabel = wxStaticText:new(Frame, ?LABEL4, "File: "),
+ DirLabel = wxStaticText:new(Frame, ?LABEL5, "Dir: "),
+ WarningsLabel = wxStaticText:new(Frame, ?LABEL6, "Warnings"),
+
+ %%---------- Set TextBoxes -----------
+ ChosenBox = wxListBox:new(Frame, ?ChosenBox,
+ [{size, {250,200}},
+ {style, ?wxLB_EXTENDED bor ?wxLB_HSCROLL
+ bor ?wxLB_NEEDED_SB}]),
+ LogBox = wxTextCtrl:new(Frame, ?LogBox,
+ [{size, {530,200}},
+ {style, ?wxTE_MULTILINE
+ bor ?wxTE_READONLY bor ?wxHSCROLL}]),
+ DefaultPath = code:root_dir(),
+
+ FilePicker = wxFilePickerCtrl:new(Frame, ?FilePicker,
+ [{path, DefaultPath},
+ {message, "Choose File to Analyse"},
+ {style,?wxFLP_FILE_MUST_EXIST bor ?wxFLP_USE_TEXTCTRL}]),
+ wxPickerBase:setTextCtrlProportion(FilePicker,3),
+ wxPickerBase:setPickerCtrlProportion(FilePicker,2),
+ DirPicker = wxDirPickerCtrl:new(Frame, ?DirPicker,
+ [{path, DefaultPath},
+ {message, "Choose Directory to Analyze"},
+ {style,?wxDIRP_DIR_MUST_EXIST bor ?wxDIRP_USE_TEXTCTRL}]),
+ WarningsBox = wxListBox:new(Frame, ?WarningsBox,
+ [{size, {700,200}},
+ {style, ?wxLB_HSCROLL
+ bor ?wxLB_NEEDED_SB}]),
+
+ %%--------- Set Buttons --------------
+ DeleteButton = wxButton:new(Frame, ?Del_Button, [{label, "Delete"}]),
+ DeleteAllButton = wxButton:new(Frame, ?DelAll_Button, [{label, "Delete All"}]),
+ FileType = wxRadioBox:new(Frame, ?RADIOBOX, " File Type: " , {1,1}, {150,90},
+ [["BeamFiles"],["SourceFiles"]]),
+ ClearLogButton = wxButton:new(Frame, ?ClearLog_Button, [{label, "Clear Log"}]),
+ AddButton = wxButton:new(Frame, ?Add_Button, [{label, "Add"}]),
+ AddDirButton = wxButton:new(Frame, ?AddDir_Button, [{label, "Add Dir"}]),
+ AddRecButton = wxButton:new(Frame, ?AddRec_Button, [{label, "Add Recursively"}]),
+ ExplainWarnButton = wxButton:new(Frame, ?ExplWarn_Button, [{label, "Explain Warning"}]),
+ ClearWarningsButton = wxButton:new(Frame, ?ClearWarn_Button, [{label, "Clear Warnings"}]),
+ RunButton = wxButton:new(Frame, ?Run_Button, [{label, "Run"}]),
+ StopButton = wxButton:new(Frame, ?Stop_Button, [{label, "Stop"}]),
+ wxWindow:disable(StopButton),
+ %%--------- Connect Buttons -----------
+ wxButton:connect(DeleteButton, command_button_clicked),
+ wxButton:connect(DeleteAllButton, command_button_clicked),
+ wxButton:connect(ClearLogButton, command_button_clicked),
+ wxButton:connect(AddButton, command_button_clicked),
+ wxButton:connect(AddDirButton, command_button_clicked),
+ wxButton:connect(AddRecButton, command_button_clicked),
+ wxButton:connect(ExplainWarnButton, command_button_clicked),
+ wxButton:connect(ClearWarningsButton, command_button_clicked),
+ wxButton:connect(RunButton, command_button_clicked),
+ wxButton:connect(StopButton, command_button_clicked),
+
+ %%------------Set Layout ------------
+ All = wxBoxSizer:new(?wxVERTICAL),
+ Top = wxBoxSizer:new(?wxHORIZONTAL),
+ Left = wxBoxSizer:new(?wxVERTICAL),
+ Right = wxBoxSizer:new(?wxVERTICAL),
+ RightUp = wxBoxSizer:new(?wxHORIZONTAL),
+
+ Opts = [{flag,?wxEXPAND bor ?wxALL}, {proportion,1}, {border, 1}],
+ Opts3 = [{flag,?wxEXPAND bor ?wxALL}, {proportion,3}, {border, 1}],
+ Center = [{flag, ?wxALIGN_CENTER_HORIZONTAL}],
+
+ ChooseItem = wxBoxSizer:new(?wxVERTICAL),
+ FileTypeItem = wxBoxSizer:new(?wxVERTICAL),
+ LogItem = wxBoxSizer:new(?wxVERTICAL),
+ FileDirItem = wxBoxSizer:new(?wxVERTICAL),
+ FileItem = wxBoxSizer:new(?wxHORIZONTAL),
+ DirItem = wxBoxSizer:new(?wxHORIZONTAL),
+ AddDirButtons = wxBoxSizer:new(?wxHORIZONTAL),
+ WarningsItem = wxBoxSizer:new(?wxVERTICAL),
+ ChooseButtons = wxBoxSizer:new(?wxHORIZONTAL),
+ WarnButtons = wxBoxSizer:new(?wxHORIZONTAL),
+ RunButtons = wxBoxSizer:new(?wxHORIZONTAL),
+ Buttons = wxFlexGridSizer:new(3),
+
+ wxSizer:add(ChooseButtons, DeleteButton, ?BorderOpt),
+ wxSizer:add(ChooseButtons, DeleteAllButton, ?BorderOpt),
+ wxSizer:add(ChooseItem, Lab1, Center),
+ wxSizer:add(ChooseItem, ChosenBox, Opts),
+ wxSizer:add(ChooseItem, ChooseButtons, ?BorderOpt),
+ wxSizer:add(FileTypeItem, OptionsLabel),
+ wxSizer:add(FileTypeItem, FileType, [{border, 5}, {flag, ?wxALL}]),
+ wxSizer:add(LogItem, LogLabel, Center),
+ wxSizer:add(LogItem, LogBox, Opts3),
+ wxSizer:add(LogItem, ClearLogButton, ?BorderOpt),
+ wxSizer:add(FileItem, FileLabel),
+ wxSizer:add(FileItem, FilePicker),
+ wxSizer:add(DirItem, DirLabel),
+ wxSizer:add(DirItem, DirPicker),
+ wxSizer:add(AddDirButtons, AddDirButton, ?BorderOpt),
+ wxSizer:add(AddDirButtons, AddRecButton, ?BorderOpt),
+ wxSizer:add(FileDirItem, FileItem),
+ wxSizer:add(FileDirItem, AddButton, ?BorderOpt),
+ wxSizer:add(FileDirItem, DirItem, ?BorderOpt),
+ wxSizer:add(FileDirItem, AddDirButtons, ?BorderOpt),
+ wxSizer:add(WarnButtons, ExplainWarnButton, ?BorderOpt),
+ wxSizer:add(WarnButtons, ClearWarningsButton, ?BorderOpt),
+ wxSizer:add(RunButtons, RunButton, ?BorderOpt),
+ wxSizer:add(RunButtons, StopButton, ?BorderOpt),
+ wxSizer:add(Buttons, WarnButtons),
+ wxSizer:add(Buttons, wxStaticText:new(Frame, ?LABEL7, ""), [{flag, ?wxEXPAND}]),
+ wxSizer:add(Buttons, RunButtons),
+ wxFlexGridSizer:addGrowableCol(Buttons, 1),
+ wxSizer:add(WarningsItem, WarningsLabel, Center),
+ wxSizer:add(WarningsItem, WarningsBox, Opts3),
+ wxSizer:add(WarningsItem, Buttons, [{flag, ?wxEXPAND bor ?wxALL},?Border]),
+
+ wxSizer:add(Left, ChooseItem, Opts),
+ wxSizer:add(Left, FileDirItem, [{proportion, 1}, {border, 60}, {flag, ?wxTOP}]),
+ wxSizer:add(RightUp, FileTypeItem, ?BorderOpt),
+ wxSizer:add(RightUp, LogItem, Opts3),
+ wxSizer:add(Right, RightUp, Opts3),
+ wxSizer:add(Right, WarningsItem, Opts3),
+ wxSizer:add(Top, Left, Opts),
+ wxSizer:add(Top, Right, Opts3),
+
+ wxSizer:add(All, Top, Opts),
+ wxWindow:setSizer(Frame, All),
+ wxWindow:setSizeHints(Frame, {1150,600}),
+ wxWindow:show(Frame),
+
+ Warnings = [{?WARN_RETURN_NO_RETURN, ?menuID_WARN_NO_RETURN_FUN},
+ {?WARN_RETURN_ONLY_EXIT, ?menuID_WARN_ERROR_HANDLING_FUN},
+ {?WARN_NOT_CALLED, ?menuID_WARN_UNUSED_FUN},
+ {?WARN_NON_PROPER_LIST, ?menuID_WARN_LIST_CONSTR},
+ {?WARN_FUN_APP, ?menuID_WARN_BAD_FUN},
+ {?WARN_MATCHING, ?menuID_WARN_MATCH_FAILURES},
+ {?WARN_OPAQUE, ?menuID_WARN_OPAQUE},
+ {?WARN_FAILING_CALL, ?menuID_WARN_FAIL_FUN_CALLS},
+ {?WARN_CALLGRAPH, ?menuID_WARN_UNEXPORTED_FUN},
+ {?WARN_RACE_CONDITION, ?menuID_WARN_RACE_CONDITIONS},
+ %% For contracts.
+ {?WARN_CONTRACT_TYPES,?menuID_WARN_WRONG_CONTRACTS},
+ {?WARN_CONTRACT_SYNTAX, ?menuID_WARN_CONTRACT_SYNTAX}
+ ],
+ Menu = #menu{file = FileMenu,
+ warnings = WarningsMenu,
+ plt = PltMenu,
+ options =OptionsMenu,
+ help = HelpMenu},
+
+ InitPlt = try dialyzer_plt:from_file(DialyzerOptions#options.init_plt)
+ catch throw:{dialyzer_error, _} -> dialyzer_plt:new()
+ end,
+
+ #gui_state{add = AddButton,
+ add_dir = AddDirButton,
+ add_rec = AddRecButton,
+ chosen_box = ChosenBox,
+ clear_chosen = DeleteAllButton,
+ clear_log = ClearLogButton,
+ explain_warn = ExplainWarnButton,
+ clear_warn = ClearWarningsButton,
+ del_file = DeleteButton,
+ doc_plt = dialyzer_plt:new(),
+ dir_entry = DirPicker,
+ file_box = FilePicker,
+ files_to_analyze = ordsets:new(),
+ gui = Wx,
+ init_plt = InitPlt,
+ log = LogBox,
+ menu = Menu,
+ mode = FileType,
+ options = DialyzerOptions,
+ run = RunButton,
+ stop = StopButton,
+ frame = Frame,
+ warnings_box = WarningsBox,
+ wantedWarnings = Warnings,
+ rawWarnings = []}.
+
+createFileMenu() ->
+ FileMenu = wxMenu:new(),
+ wxMenu:append(FileMenu, wxMenuItem:new([{id, ?menuID_FILE_SAVE_WARNINGS},
+ {text, "Save &Warnings"}])),
+ wxMenu:append(FileMenu, wxMenuItem:new([{id, ?menuID_FILE_SAVE_LOG},
+ {text, "Save &Log"}])),
+ wxMenu:append(FileMenu, wxMenuItem:new([{id, ?menuID_FILE_QUIT},
+ {text, "E&xit\tAlt-X"}])),
+ FileMenu.
+
+createWarningsMenu() ->
+ WarningsMenu = wxMenu:new(),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_MATCH_FAILURES,
+ "Match failures"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_MATCH_FAILURES, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_FAIL_FUN_CALLS,
+ "Failing function calls"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_FAIL_FUN_CALLS, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_BAD_FUN,
+ "Bad fun applications"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_BAD_FUN, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_OPAQUE,
+ "Opaqueness violations"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_OPAQUE, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_LIST_CONSTR,
+ "Improper list constructions"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_LIST_CONSTR, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_UNUSED_FUN,
+ "Unused functions"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_UNUSED_FUN, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_ERROR_HANDLING_FUN,
+ "Error handling functions"),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_NO_RETURN_FUN,
+ "Functions of no return"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_NO_RETURN_FUN, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_UNEXPORTED_FUN,
+ "Call to unexported function"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_UNEXPORTED_FUN, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_RACE_CONDITIONS,
+ "Possible race conditions"),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_WRONG_CONTRACTS,
+ "Wrong contracts"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_WRONG_CONTRACTS, true),
+ wxMenu:appendCheckItem(WarningsMenu,
+ ?menuID_WARN_CONTRACT_SYNTAX,
+ "Wrong contract syntax"),
+ wxMenu:check(WarningsMenu, ?menuID_WARN_CONTRACT_SYNTAX, true),
+ WarningsMenu.
+
+createPltMenu() ->
+ PltMenu = wxMenu:new(),
+ wxMenu:appendCheckItem(PltMenu,
+ ?menuID_PLT_INIT_EMPTY,
+ "Init with empty PLT"),
+ wxMenu:append(PltMenu, wxMenuItem:new([{id, ?menuID_PLT_SHOW_CONTENTS},
+ {text, "Show contents"}])),
+ wxMenu:append(PltMenu, wxMenuItem:new([{id, ?menuID_PLT_SEARCH_CONTENTS},
+ {text, "Search contents"}])),
+ PltMenu.
+
+createOptionsMenu() ->
+ OptsMenu = wxMenu:new(),
+ wxMenu:append(OptsMenu, wxMenuItem:new([{id, ?menuID_OPTIONS_MACRO},
+ {text, "Manage Macro Definitions"}])),
+ wxMenu:append(OptsMenu, wxMenuItem:new([{id, ?menuID_OPTIONS_INCLUDE_DIR},
+ {text, "Manage Include Directories"}])),
+ OptsMenu.
+
+createHelpMenu() ->
+ HelpMenu = wxMenu:new(),
+ wxMenu:append(HelpMenu, wxMenuItem:new([{id, ?menuID_HELP_MANUAL},
+ {text, "Manual"}])),
+ wxMenu:append(HelpMenu, wxMenuItem:new([{id, ?menuID_HELP_WARNING_OPTIONS},
+ {text, "Warning Options"}])),
+ wxMenu:append(HelpMenu, wxMenuItem:new([{id, ?menuID_HELP_ABOUT},
+ {text, "About"}])),
+ HelpMenu.
+
+%% ----------------------------------------------------------------
+%%
+%% Main GUI Loop
+%%
+
+-spec gui_loop(#gui_state{}) -> ?RET_NOTHING_SUSPICIOUS.
+
+gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
+ log = Log, frame = Frame,
+ warnings_box = WarningsBox} = State) ->
+ receive
+ #wx{event = #wxClose{}} ->
+ io:format("~p Closing window ~n", [self()]),
+ ok = wxFrame:setStatusText(Frame, "Closing...",[]),
+ wxWindow:destroy(Frame),
+ ?RET_NOTHING_SUSPICIOUS;
+ %% ----- Menu -----
+ #wx{id = ?menuID_FILE_SAVE_LOG, obj = Frame,
+ event = #wxCommand{type = command_menu_selected}} ->
+ save_file(State, log),
+ gui_loop(State);
+ #wx{id=?menuID_FILE_SAVE_WARNINGS, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ save_file(State, warnings),
+ gui_loop(State);
+ #wx{id=?menuID_FILE_QUIT, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ case maybe_quit(State) of
+ true -> ?RET_NOTHING_SUSPICIOUS;
+ false -> gui_loop(State)
+ end;
+ #wx{id=?menuID_PLT_SHOW_CONTENTS, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ show_doc_plt(State),
+ gui_loop(State);
+ #wx{id=?menuID_PLT_SEARCH_CONTENTS, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ case dialyzer_plt:get_specs(DocPlt) of
+ "" -> error_sms(State, "No analysis has been made yet!\n");
+ _ -> search_doc_plt(State)
+ end,
+ gui_loop(State);
+ #wx{id=?menuID_OPTIONS_INCLUDE_DIR, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ NewOptions = include_dialog(State),
+ NewState = State#gui_state{options = NewOptions},
+ gui_loop(NewState);
+ #wx{id=?menuID_OPTIONS_MACRO, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ NewOptions = macro_dialog(State),
+ NewState = State#gui_state{options = NewOptions},
+ gui_loop(NewState);
+ #wx{id=?menuID_HELP_MANUAL, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ handle_help(State, "Dialyzer Manual", "manual.txt"),
+ gui_loop(State);
+ #wx{id=?menuID_HELP_WARNING_OPTIONS, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ handle_help(State, "Dialyzer Warnings", "warnings.txt"),
+ gui_loop(State);
+ #wx{id=?menuID_HELP_ABOUT, obj=Frame,
+ event=#wxCommand{type=command_menu_selected}} ->
+ Message = " This is DIALYZER version " ++ ?VSN ++ " \n"++
+ "DIALYZER is a DIscrepany AnaLYZer for ERlang programs.\n\n"++
+ " Copyright (C) Tobias Lindahl <[email protected]>\n"++
+ " Kostis Sagonas <[email protected]>\n\n",
+ output_sms(State, "About Dialyzer", Message, info),
+ gui_loop(State);
+ %% ------ Buttons ---------
+ #wx{id=?Add_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ State1 = handle_add_files(State),
+ gui_loop(State1);
+ #wx{id=?AddDir_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ State1 = handle_add_dir(State),
+ gui_loop(State1);
+ #wx{id=?AddRec_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ State1 = handle_add_rec(State),
+ gui_loop(State1);
+ #wx{id=?Del_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ State1 = handle_file_delete(State),
+ gui_loop(State1);
+ #wx{id=?DelAll_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ State1 = handle_file_delete_all(State),
+ gui_loop(State1);
+ #wx{id=?ClearLog_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ wxTextCtrl:clear(State#gui_state.log),
+ gui_loop(State);
+ #wx{id=?ExplWarn_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ handle_explanation(State),
+ gui_loop(State);
+ #wx{id=?ClearWarn_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ wxListBox:clear(WarningsBox),
+ NewState = State#gui_state{rawWarnings = []},
+ gui_loop(NewState);
+ #wx{id=?Run_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ NewState = start_analysis(State),
+ gui_loop(NewState);
+ #wx{id=?Stop_Button,
+ event=#wxCommand{type=command_button_clicked}} ->
+ BackendPid ! {self(), stop},
+ config_gui_stop(State),
+ update_editor(Log, "\n***** Analysis stopped ****\n"),
+ gui_loop(State);
+ %% ----- Analysis -----
+ {BackendPid, ext_calls, ExtCalls} ->
+ Msg = io_lib:format("The following functions are called "
+ "but type information about them is not available.\n"
+ "The analysis might get more precise by including "
+ "the modules containing these functions:\n\n\t~p\n",
+ [ExtCalls]),
+ free_editor(State,"Analysis Done", Msg),
+ gui_loop(State);
+ {BackendPid, log, LogMsg} ->
+ update_editor(Log, LogMsg),
+ gui_loop(State);
+ {BackendPid, warnings, Warns} ->
+ SortedWarns = lists:keysort(2, Warns), %% Sort on file/line
+ NewState = add_warnings(State, SortedWarns),
+ gui_loop(NewState);
+ {BackendPid, cserver, CServer, Plt} ->
+ Self = self(),
+ Fun =
+ fun() ->
+ dialyzer_explanation:expl_loop(Self, CServer, Plt)
+ end,
+ ExplanationPid = spawn_link(Fun),
+ gui_loop(State#gui_state{expl_pid = ExplanationPid});
+ {BackendPid, done, _NewPlt, NewDocPlt} ->
+ message(State, "Analysis done"),
+ config_gui_stop(State),
+ gui_loop(State#gui_state{doc_plt = NewDocPlt});
+ {'EXIT', BackendPid, {error, Reason}} ->
+ free_editor(State, ?DIALYZER_ERROR_TITLE, Reason),
+ config_gui_stop(State),
+ gui_loop(State);
+ {'EXIT', BackendPid, Reason} when Reason =/= 'normal' ->
+ free_editor(State, ?DIALYZER_ERROR_TITLE, io_lib:format("~p", [Reason])),
+ config_gui_stop(State),
+ gui_loop(State)
+ end.
+
+maybe_quit(#gui_state{frame = Frame} = State) ->
+ case dialog(State, "Do you really want to quit?", ?DIALYZER_MESSAGE_TITLE) of
+ true ->
+ wxWindow:destroy(Frame),
+ true;
+ false ->
+ false
+ end.
+
+%% ------------ Yes/No Question ------------
+dialog(#gui_state{frame = Frame}, Message, Title) ->
+ MessageWin = wxMessageDialog:new(Frame,Message,[{caption, Title},{style, ?wxYES_NO bor ?wxICON_QUESTION bor ?wxNO_DEFAULT}]),
+ case wxDialog:showModal(MessageWin) of
+ ?wxID_YES ->
+ true;
+ ?wxID_NO ->
+ false;
+ ?wxID_CANCEL ->
+ false
+ end.
+
+search_doc_plt(#gui_state{gui = Wx} = State) ->
+ Dialog = wxFrame:new(Wx, ?SearchPltDialog, "Search the PLT",[{size,{400,100}},{style, ?wxSTAY_ON_TOP}]),
+ Size = {size,{120,30}},
+ ModLabel = wxStaticText:new(Dialog, ?ModLabel, "Module"),
+ ModText = wxTextCtrl:new(Dialog, ?ModText,[Size]),
+ FunLabel = wxStaticText:new(Dialog, ?FunLabel, "Function"),
+ FunText = wxTextCtrl:new(Dialog, ?FunText,[Size]),
+ ArLabel = wxStaticText:new(Dialog, ?ArLabel, "Arity"),
+ ArText = wxTextCtrl:new(Dialog, ?ArText,[Size]),
+ SearchButton = wxButton:new(Dialog, ?SearchButton, [{label, "Search"}]),
+ wxButton:connect(SearchButton, command_button_clicked),
+ Cancel = wxButton:new(Dialog, ?Search_Cancel, [{label, "Cancel"}]),
+ wxButton:connect(Cancel, command_button_clicked),
+
+ Layout = wxBoxSizer:new(?wxVERTICAL),
+ Top = wxBoxSizer:new(?wxHORIZONTAL),
+ ModLayout = wxBoxSizer:new(?wxVERTICAL),
+ FunLayout = wxBoxSizer:new(?wxVERTICAL),
+ ArLayout = wxBoxSizer:new(?wxVERTICAL),
+ Buttons = wxBoxSizer:new(?wxHORIZONTAL),
+
+ wxSizer:add(ModLayout, ModLabel, ?BorderOpt),
+ wxSizer:add(ModLayout,ModText, ?BorderOpt),
+ wxSizer:add(FunLayout, FunLabel, ?BorderOpt),
+ wxSizer:add(FunLayout,FunText, ?BorderOpt),
+ wxSizer:add(ArLayout, ArLabel, ?BorderOpt),
+ wxSizer:add(ArLayout,ArText, ?BorderOpt),
+ wxSizer:add(Buttons, SearchButton, ?BorderOpt),
+ wxSizer:add(Buttons,Cancel, ?BorderOpt),
+
+ wxSizer:add(Top, ModLayout),
+ wxSizer:add(Top, FunLayout),
+ wxSizer:add(Top, ArLayout),
+ wxSizer:add(Layout, Top,[{flag, ?wxALIGN_CENTER}]),
+ wxSizer:add(Layout, Buttons,[{flag, ?wxALIGN_CENTER bor ?wxBOTTOM}]),
+ wxFrame:connect(Dialog, close_window),
+ wxWindow:setSizer(Dialog, Layout),
+ wxFrame:show(Dialog),
+ search_plt_loop(State, Dialog, ModText, FunText, ArText, SearchButton, Cancel).
+
+search_plt_loop(State= #gui_state{doc_plt = DocPlt, frame = Frame}, Win, ModText, FunText, ArText, Search, Cancel) ->
+ receive
+ #wx{id = ?Search_Cancel,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win);
+ #wx{id = ?SearchPltDialog, event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win);
+ #wx{event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win),
+ wxWindow:destroy(Frame);
+ #wx{id = ?SearchButton,
+ event = #wxCommand{type = command_button_clicked}} ->
+ M = format_search(wxTextCtrl:getValue(ModText)),
+ F = format_search(wxTextCtrl:getValue(FunText)),
+ A = format_search(wxTextCtrl:getValue(ArText)),
+
+ if
+ (M == '_') or (F == '_') or (A == '_') ->
+ error_sms(State, "Please give:\n Module (atom)\n Function (atom)\n Arity (integer)\n"),
+ search_plt_loop(State, Win, ModText, FunText, ArText, Search, Cancel);
+ true ->
+ case dialyzer_plt:get_specs(DocPlt, M, F, A) of
+ none ->
+ error_sms(State, "No such function"),
+ search_plt_loop(State, Win, ModText, FunText, ArText, Search, Cancel);
+ NonEmptyString ->
+ wxWindow:destroy(Win),
+ free_editor(State, "Content of PLT", NonEmptyString)
+ end
+ end
+ end.
+
+format_search([]) ->
+ '_';
+format_search(String) ->
+ try list_to_integer(String)
+ catch error:_ -> list_to_atom(String)
+ end.
+
+show_doc_plt(#gui_state{doc_plt = DocPLT} = State) ->
+ case dialyzer_plt:get_specs(DocPLT) of
+ "" -> error_sms(State, "No analysis has been made yet!\n");
+ NonEmptyString -> free_editor(State, "Content of PLT", NonEmptyString)
+ end.
+
+message(State, Message) ->
+ output_sms(State, ?DIALYZER_MESSAGE_TITLE, Message, info).
+
+error_sms(State, Message) ->
+ output_sms(State, ?DIALYZER_ERROR_TITLE, Message, error).
+
+output_sms(#gui_state{frame = Frame}, Title, Message, Type) ->
+ case Type of
+ error ->
+ MessageWin = wxMessageDialog:new(Frame,Message,[{caption, Title},{style, ?wxOK bor ?wxICON_ERROR}]);
+ info ->
+ MessageWin = wxMessageDialog:new(Frame,Message,[{caption, Title},{style, ?wxOK bor ?wxICON_INFORMATION}])
+ end,
+ wxWindow:setSizeHints(MessageWin, {350,100}),
+ wxDialog:showModal(MessageWin).
+
+free_editor(#gui_state{gui = Wx, frame = Frame}, Title, Contents0) ->
+ Contents = lists:flatten(Contents0),
+ Tokens = string:tokens(Contents, "\n"),
+ NofLines = length(Tokens),
+ LongestLine = lists:max([length(X) || X <- Tokens]),
+ Height0 = NofLines * 25 + 80,
+ Height = if Height0 > 500 -> 500; true -> Height0 end,
+ Width0 = LongestLine * 7 + 60,
+ Width = if Width0 > 800 -> 800; true -> Width0 end,
+ Size = {size,{Width, Height}},
+ Win = wxFrame:new(Wx, ?Message, Title, [{size,{Width+4, Height+50}}]),
+
+ Editor = wxTextCtrl:new(Win, ?Message_Info,
+ [Size,
+ {style, ?wxTE_MULTILINE
+ bor ?wxTE_READONLY bor ?wxVSCROLL bor ?wxEXPAND}]),
+ wxTextCtrl:appendText(Editor, Contents),
+ wxFrame:connect(Win, close_window),
+ Ok = wxButton:new(Win, ?Message_Ok, [{label, "OK"}]),
+ wxButton:connect(Ok, command_button_clicked),
+ Layout = wxBoxSizer:new(?wxVERTICAL),
+
+ wxSizer:add(Layout, Editor, ?BorderOpt),
+ wxSizer:add(Layout, Ok, [{flag, ?wxALIGN_CENTER bor ?wxBOTTOM bor ?wxALL}, ?Border]),
+ wxWindow:setSizer(Win, Layout),
+ wxWindow:show(Win),
+ show_info_loop(Frame, Win).
+
+show_info_loop(Frame, Win) ->
+ receive
+ #wx{id = ?Message_Ok, event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win);
+ #wx{id = ?Message, event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win);
+ #wx{event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Frame)
+ end.
+
+handle_add_files(#gui_state{chosen_box = ChosenBox, file_box = FileBox,
+ files_to_analyze = FileList,
+ mode = Mode} = State) ->
+ case wxFilePickerCtrl:getPath(FileBox) of
+ "" ->
+ State;
+ File ->
+ NewFile = ordsets:new(),
+ NewFile1 = ordsets:add_element(File,NewFile),
+ Ext =
+ case wxRadioBox:getSelection(Mode) of
+ 0 -> ".beam";
+ 1-> ".erl"
+ end,
+ State#gui_state{files_to_analyze = add_files(filter_mods(NewFile1, Ext), FileList, ChosenBox, Ext)}
+ end.
+
+handle_add_dir(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox,
+ files_to_analyze = FileList,
+ mode = Mode} = State) ->
+ case wxDirPickerCtrl:getPath(DirBox) of
+ "" ->
+ State;
+ Dir ->
+ NewDir = ordsets:new(),
+ NewDir1 = ordsets:add_element(Dir,NewDir),
+ Ext = case wxRadioBox:getSelection(Mode) of
+ 0 -> ".beam";
+ 1-> ".erl"
+ end,
+ State#gui_state{files_to_analyze = add_files(filter_mods(NewDir1,Ext), FileList, ChosenBox, Ext)}
+ end.
+
+handle_add_rec(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox, files_to_analyze = FileList,
+ mode = Mode} = State) ->
+ case wxDirPickerCtrl:getPath(DirBox) of
+ "" ->
+ State;
+ Dir ->
+ NewDir = ordsets:new(),
+ NewDir1 = ordsets:add_element(Dir,NewDir),
+ TargetDirs = ordsets:union(NewDir1, all_subdirs(NewDir1)),
+ case wxRadioBox:getSelection(Mode) of
+ 0 -> Ext = ".beam";
+ 1-> Ext = ".erl"
+ end,
+ State#gui_state{files_to_analyze = add_files(filter_mods(TargetDirs,Ext), FileList, ChosenBox, Ext)}
+ end.
+
+handle_file_delete(#gui_state{chosen_box = ChosenBox,
+ files_to_analyze = FileList} = State) ->
+ {_, List} = wxListBox:getSelections(ChosenBox),
+ Set = ordsets:from_list([wxControlWithItems:getString(ChosenBox, X) || X <- List]),
+ FileList1 = ordsets:subtract(FileList,Set),
+ lists:foreach(fun (X) -> wxListBox:delete(ChosenBox, X) end, List),
+ State#gui_state{files_to_analyze = FileList1}.
+
+handle_file_delete_all(#gui_state{chosen_box = ChosenBox} = State) ->
+ wxListBox:clear(ChosenBox),
+ State#gui_state{files_to_analyze = ordsets:new()}.
+
+add_files(File, FileList, ChosenBox, Ext) ->
+ Set = filter_mods(FileList, Ext),
+ Files = ordsets:union(File, Set),
+ Files1 = ordsets:to_list(Files),
+ wxListBox:set(ChosenBox, Files1),
+ Files.
+
+filter_mods(Mods, Extension) ->
+ Fun = fun(X) ->
+ filename:extension(X) =:= Extension
+ orelse
+ (filelib:is_dir(X) andalso
+ contains_files(X, Extension))
+ end,
+ ordsets:filter(Fun, Mods).
+
+contains_files(Dir, Extension) ->
+ {ok, Files} = file:list_dir(Dir),
+ lists:any(fun(X) -> filename:extension(X) =:= Extension end, Files).
+
+all_subdirs(Dirs) ->
+ all_subdirs(Dirs, []).
+
+all_subdirs([Dir|T], Acc) ->
+ {ok, Files} = file:list_dir(Dir),
+ SubDirs = lists:zf(fun(F) ->
+ SubDir = filename:join(Dir, F),
+ case filelib:is_dir(SubDir) of
+ true -> {true, SubDir};
+ false -> false
+ end
+ end, Files),
+ NewAcc = ordsets:union(ordsets:from_list(SubDirs), Acc),
+ all_subdirs(T ++ SubDirs, NewAcc);
+all_subdirs([], Acc) ->
+ Acc.
+
+start_analysis(State) ->
+ Analysis = build_analysis_record(State),
+ case get_anal_files(State, Analysis#analysis.start_from) of
+ error ->
+ Msg = "You must choose one or more files or dirs\n"
+ "before starting the analysis!",
+ error_sms(State, Msg),
+ config_gui_stop(State),
+ State;
+ {ok, Files} ->
+ Msg = "\n========== Starting Analysis ==========\n\n",
+ update_editor(State#gui_state.log, Msg),
+ NewAnalysis = Analysis#analysis{files = Files},
+ run_analysis(State, NewAnalysis)
+ end.
+
+build_analysis_record(#gui_state{mode = Mode, menu = Menu, options = Options,
+ init_plt = InitPlt0}) ->
+ StartFrom =
+ case wxRadioBox:getSelection(Mode) of
+ 0 -> byte_code;
+ 1 -> src_code
+ end,
+ InitPlt =
+ case wxMenu:isChecked(Menu#menu.plt,?menuID_PLT_INIT_EMPTY) of
+ true -> dialyzer_plt:new();
+ false -> InitPlt0
+ end,
+ #analysis{defines = Options#options.defines,
+ include_dirs = Options#options.include_dirs,
+ plt = InitPlt,
+ start_from = StartFrom}.
+
+get_anal_files(#gui_state{files_to_analyze = Files}, StartFrom) ->
+ FilteredMods =
+ case StartFrom of
+ src_code -> filter_mods(Files, ".erl");
+ byte_code -> filter_mods(Files, ".beam")
+ end,
+ FilteredDirs = [X || X <- Files, filelib:is_dir(X)],
+ case ordsets:union(FilteredMods, FilteredDirs) of
+ [] -> error;
+ Set -> {ok, Set}
+ end.
+
+run_analysis(State, Analysis) ->
+ config_gui_start(State),
+ Self = self(),
+ NewAnalysis = Analysis#analysis{doc_plt = dialyzer_plt:new()},
+ LegalWarnings = find_legal_warnings(State),
+ Fun =
+ fun() ->
+ dialyzer_analysis_callgraph:start(Self, LegalWarnings, NewAnalysis)
+ end,
+ BackendPid = spawn_link(Fun),
+ State#gui_state{backend_pid = BackendPid}.
+
+find_legal_warnings(#gui_state{menu = #menu{warnings = MenuWarnings},
+ wantedWarnings = Warnings }) ->
+ ordsets:from_list([Tag || {Tag, MenuItem} <- Warnings,
+ wxMenu:isChecked(MenuWarnings, MenuItem)]).
+
+update_editor(Editor, Msg) ->
+ wxTextCtrl:appendText(Editor,Msg).
+
+config_gui_stop(State) ->
+ wxWindow:disable(State#gui_state.stop),
+ wxWindow:enable(State#gui_state.run),
+ wxWindow:enable(State#gui_state.del_file),
+ wxWindow:enable(State#gui_state.clear_chosen),
+ wxWindow:enable(State#gui_state.add),
+ wxWindow:enable(State#gui_state.add_dir),
+ wxWindow:enable(State#gui_state.add_rec),
+ wxWindow:enable(State#gui_state.clear_warn),
+ wxWindow:enable(State#gui_state.clear_log),
+ Menu = State#gui_state.menu,
+ wxMenu:enable(Menu#menu.file,?menuID_FILE_SAVE_WARNINGS,true),
+ wxMenu:enable(Menu#menu.file,?menuID_FILE_SAVE_LOG,true),
+ wxMenu:enable(Menu#menu.options,?menuID_OPTIONS_MACRO,true),
+ wxMenu:enable(Menu#menu.options,?menuID_OPTIONS_INCLUDE_DIR,true),
+ wxMenu:enable(Menu#menu.plt,?menuID_PLT_INIT_EMPTY,true),
+ wxMenu:enable(Menu#menu.plt,?menuID_PLT_SHOW_CONTENTS,true),
+ wxMenu:enable(Menu#menu.plt,?menuID_PLT_SEARCH_CONTENTS,true),
+ wxRadioBox:enable(State#gui_state.mode).
+
+config_gui_start(State) ->
+ wxWindow:enable(State#gui_state.stop),
+ wxWindow:disable(State#gui_state.run),
+ wxWindow:disable(State#gui_state.del_file),
+ wxWindow:disable(State#gui_state.clear_chosen),
+ wxWindow:disable(State#gui_state.add),
+ wxWindow:disable(State#gui_state.add_dir),
+ wxWindow:disable(State#gui_state.add_rec),
+ wxWindow:disable(State#gui_state.clear_warn),
+ wxWindow:disable(State#gui_state.clear_log),
+ Menu = State#gui_state.menu,
+ wxMenu:enable(Menu#menu.file,?menuID_FILE_SAVE_WARNINGS, false),
+ wxMenu:enable(Menu#menu.file,?menuID_FILE_SAVE_LOG, false),
+ wxMenu:enable(Menu#menu.options,?menuID_OPTIONS_MACRO, false),
+ wxMenu:enable(Menu#menu.options,?menuID_OPTIONS_INCLUDE_DIR, false),
+ wxMenu:enable(Menu#menu.plt,?menuID_PLT_INIT_EMPTY, false),
+ wxMenu:enable(Menu#menu.plt,?menuID_PLT_SHOW_CONTENTS, false),
+ wxMenu:enable(Menu#menu.plt,?menuID_PLT_SEARCH_CONTENTS, false),
+ wxRadioBox:disable(State#gui_state.mode).
+
+save_file(#gui_state{frame = Frame, warnings_box = WBox, log = Log} = State, Type) ->
+ case Type of
+ warnings ->
+ Message = "Save Warnings",
+ Box = WBox;
+ log -> Message = "Save Log",
+ Box = Log
+ end,
+ case wxTextCtrl:getValue(Box) of
+ "" -> error_sms(State,"There is nothing to save...\n");
+ _ ->
+ DefaultPath = code:root_dir(),
+ FileDialog = wxFileDialog:new(Frame,
+ [{defaultDir, DefaultPath},
+ {message, Message},
+ {style,?wxFD_SAVE bor ?wxFD_OVERWRITE_PROMPT}]),
+ case wxFileDialog:showModal(FileDialog) of
+ ?wxID_OK -> Path = wxFileDialog:getPath(FileDialog),
+ case wxTextCtrl:saveFile(Box,[{file,Path}]) of
+ true -> ok;
+ false -> error_sms(State,"Could not write to file:\n" ++ Path)
+ end;
+ ?wxID_CANCEL -> wxWindow:destroy(FileDialog);
+ _ -> error_sms(State,"Could not write to file:\n")
+ end
+ end.
+
+include_dialog(#gui_state{gui = Wx, frame = Frame, options = Options}) ->
+ Size = {size,{300,480}},
+ Dialog = wxFrame:new(Wx, ?IncludeDir, "Include Directories",[Size]),
+ DirLabel = wxStaticText:new(Dialog, ?InclLabel, "Directory: "),
+ DefaultPath = code:root_dir(),
+ DirPicker = wxDirPickerCtrl:new(Dialog, ?InclPicker,
+ [{path, DefaultPath},
+ {message, "Choose Directory to Include"},
+ {style,?wxDIRP_DIR_MUST_EXIST bor ?wxDIRP_USE_TEXTCTRL}]),
+ Box = wxListBox:new(Dialog, ?InclBox,
+ [{size, {200,300}},
+ {style, ?wxLB_EXTENDED bor ?wxLB_HSCROLL
+ bor ?wxLB_NEEDED_SB}]),
+ AddButton = wxButton:new(Dialog, ?InclAdd, [{label, "Add"}]),
+ DeleteButton = wxButton:new(Dialog, ?InclDel, [{label, "Delete"}]),
+ DeleteAllButton = wxButton:new(Dialog, ?InclDelAll, [{label, "Delete All"}]),
+ Ok = wxButton:new(Dialog, ?InclOk, [{label, "OK"}]),
+ Cancel = wxButton:new(Dialog, ?InclCancel, [{label, "Cancel"}]),
+ wxButton:connect(AddButton, command_button_clicked),
+ wxButton:connect(DeleteButton, command_button_clicked),
+ wxButton:connect(DeleteAllButton, command_button_clicked),
+ wxButton:connect(Ok, command_button_clicked),
+ wxButton:connect(Cancel, command_button_clicked),
+ Dirs = [io_lib:format("~s", [X])
+ || X <- Options#options.include_dirs],
+ wxListBox:set(Box, Dirs),
+ Layout = wxBoxSizer:new(?wxVERTICAL),
+ Buttons = wxBoxSizer:new(?wxHORIZONTAL),
+ Buttons1 = wxBoxSizer:new(?wxHORIZONTAL),
+
+ wxSizer:add(Layout, DirLabel, [{flag, ?wxALIGN_CENTER_HORIZONTAL}]),
+ wxSizer:add(Layout, DirPicker, [{flag, ?wxALIGN_CENTER_HORIZONTAL}]),
+ wxSizer:add(Layout,AddButton, [{flag, ?wxALIGN_CENTER_HORIZONTAL bor ?wxALL}, ?Border]),
+ wxSizer:add(Layout,Box, [{flag, ?wxALIGN_CENTER_HORIZONTAL bor ?wxALL}, ?Border]),
+ wxSizer:add(Buttons, DeleteButton, ?BorderOpt),
+ wxSizer:add(Buttons, DeleteAllButton, ?BorderOpt),
+ wxSizer:add(Layout,Buttons, [{flag, ?wxALIGN_CENTER_HORIZONTAL}]),
+ wxSizer:add(Buttons1, Ok, ?BorderOpt),
+ wxSizer:add(Buttons1,Cancel, ?BorderOpt),
+ wxSizer:add(Layout,Buttons1,[{flag, ?wxALIGN_RIGHT bor ?wxBOTTOM}]),
+
+ wxFrame:connect(Dialog, close_window),
+ wxWindow:setSizer(Dialog, Layout),
+ wxFrame:show(Dialog),
+ include_loop(Options, Dialog, Box, DirPicker, Frame).
+
+include_loop(Options, Win, Box, DirPicker, Frame) ->
+ receive
+ #wx{id = ?InclCancel,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win),
+ Options;
+ #wx{id = ?IncludeDir, event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win),
+ Options;
+ #wx{event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win),
+ wxWindow:destroy(Frame);
+ #wx{id = ?InclOk,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win),
+ Options;
+ #wx{id = ?InclAdd,
+ event = #wxCommand{type = command_button_clicked}} ->
+ Dirs = Options#options.include_dirs,
+ NewDirs =
+ case wxDirPickerCtrl:getPath(DirPicker) of
+ "" -> Dirs;
+ Add -> [Add|Dirs]
+ end,
+ NewOptions = Options#options{include_dirs = NewDirs},
+ wxListBox:set(Box, NewDirs),
+ include_loop(NewOptions, Win, Box, DirPicker, Frame);
+ #wx{id = ?InclDel,
+ event = #wxCommand{type = command_button_clicked}} ->
+ NewOptions =
+ case wxListBox:getSelections(Box) of
+ {0,_} -> Options;
+ {_,List} ->
+ DelList = [wxControlWithItems:getString(Box,X) || X <- List],
+ NewDirs = Options#options.include_dirs -- DelList,
+ lists:foreach(fun (X) -> wxListBox:delete(Box, X) end, List),
+ Options#options{include_dirs = NewDirs}
+ end,
+ include_loop(NewOptions, Win, Box, DirPicker, Frame);
+ #wx{id = ?InclDelAll,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxListBox:clear(Box),
+ NewOptions = Options#options{include_dirs = []},
+ include_loop(NewOptions, Win, Box, DirPicker, Frame)
+ end.
+
+macro_dialog(#gui_state{gui = Wx, frame = Frame, options = Options}) ->
+ Size = {size,{300,480}},
+ Size1 = {size,{120,30}},
+ Dialog = wxFrame:new(Wx, ?MacroDir, "Macro Definitions",[Size]),
+ MacroLabel = wxStaticText:new(Dialog, ?MacroLabel, "Macro"),
+ TermLabel = wxStaticText:new(Dialog, ?TermLabel, "Term"),
+ MacroText = wxTextCtrl:new(Dialog, ?MacroText, [Size1]),
+ TermText = wxTextCtrl:new(Dialog, ?TermText, [Size1]),
+ Box = wxListBox:new(Dialog, ?MacroBox,
+ [{size, {250,300}},
+ {style, ?wxLB_EXTENDED bor ?wxLB_HSCROLL
+ bor ?wxLB_NEEDED_SB}]),
+
+ AddButton = wxButton:new(Dialog, ?MacroAdd, [{label, "Add"}]),
+ DeleteButton = wxButton:new(Dialog, ?MacroDel, [{label, "Delete"}]),
+ DeleteAllButton = wxButton:new(Dialog, ?MacroDelAll, [{label, "Delete All"}]),
+ Ok = wxButton:new(Dialog, ?MacroOk, [{label, "OK"}]),
+ Cancel = wxButton:new(Dialog, ?MacroCancel, [{label, "Cancel"}]),
+ wxButton:connect(AddButton, command_button_clicked),
+ wxButton:connect(DeleteButton, command_button_clicked),
+ wxButton:connect(DeleteAllButton, command_button_clicked),
+ wxButton:connect(Ok, command_button_clicked),
+ wxButton:connect(Cancel, command_button_clicked),
+
+ Macros = [io_lib:format("~p = ~p", [X, Y])
+ || {X,Y} <- Options#options.defines],
+
+ wxListBox:set(Box, Macros),
+ Layout = wxBoxSizer:new(?wxVERTICAL),
+ Item = wxBoxSizer:new(?wxHORIZONTAL),
+ MacroItem = wxBoxSizer:new(?wxVERTICAL),
+ TermItem = wxBoxSizer:new(?wxVERTICAL),
+ Buttons = wxBoxSizer:new(?wxHORIZONTAL),
+ Buttons1 = wxBoxSizer:new(?wxHORIZONTAL),
+
+ wxSizer:add(MacroItem, MacroLabel, ?BorderOpt),
+ wxSizer:add(MacroItem, MacroText, ?BorderOpt),
+ wxSizer:add(TermItem, TermLabel, ?BorderOpt),
+ wxSizer:add(TermItem, TermText, ?BorderOpt),
+ wxSizer:add(Item, MacroItem),
+ wxSizer:add(Item, TermItem),
+ wxSizer:add(Layout, Item, [{flag, ?wxALIGN_CENTER_HORIZONTAL}]),
+ wxSizer:add(Layout, AddButton, [{flag, ?wxALIGN_CENTER_HORIZONTAL bor ?wxALL}, ?Border]),
+ wxSizer:add(Layout, Box, [{flag, ?wxALIGN_CENTER_HORIZONTAL bor ?wxALL}, ?Border]),
+ wxSizer:add(Buttons, DeleteButton, ?BorderOpt),
+ wxSizer:add(Buttons, DeleteAllButton, ?BorderOpt),
+ wxSizer:add(Layout, Buttons, [{flag, ?wxALIGN_CENTER_HORIZONTAL}]),
+ wxSizer:add(Buttons1, Ok, ?BorderOpt),
+ wxSizer:add(Buttons1, Cancel, ?BorderOpt),
+ wxSizer:add(Layout, Buttons1, [{flag, ?wxALIGN_RIGHT bor ?wxBOTTOM}]),
+
+ wxFrame:connect(Dialog, close_window),
+ wxWindow:setSizer(Dialog, Layout),
+ wxFrame:show(Dialog),
+ macro_loop(Options, Dialog, Box, MacroText, TermText, Frame).
+
+macro_loop(Options, Win, Box, MacroText, TermText, Frame) ->
+ receive
+ #wx{id = ?MacroCancel,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win),
+ Options;
+ #wx{id = ?MacroDir, event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win),
+ Options;
+ #wx{event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win),
+ wxWindow:destroy(Frame);
+ #wx{id = ?MacroOk,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win),
+ Options;
+ #wx{id = ?MacroAdd,
+ event = #wxCommand{type = command_button_clicked}} ->
+ Defines = Options#options.defines,
+ NewDefines =
+ case wxTextCtrl:getValue(MacroText) of
+ "" -> Defines;
+ Macro ->
+ case wxTextCtrl:getValue(TermText) of
+ "" ->
+ orddict:store(list_to_atom(Macro), true, Defines);
+ String ->
+ orddict:store(list_to_atom(Macro), String, Defines)
+ end
+ end,
+ NewOptions = Options#options{defines = NewDefines},
+ NewEntries = [io_lib:format("~p = ~p", [X, Y]) || {X, Y} <- NewDefines],
+ wxListBox:set(Box, NewEntries),
+ macro_loop(NewOptions, Win, Box, MacroText, TermText, Frame);
+ #wx{id = ?MacroDel,
+ event = #wxCommand{type = command_button_clicked}} ->
+ NewOptions =
+ case wxListBox:getSelections(Box) of
+ {0, _} -> Options;
+ {_, List} ->
+ Fun =
+ fun(X) ->
+ Val = wxControlWithItems:getString(Box,X),
+ [MacroName|_] = re:split(Val, " ", [{return, list}]),
+ list_to_atom(MacroName)
+ end,
+ Delete = [Fun(X) || X <- List],
+ lists:foreach(fun (X) -> wxListBox:delete(Box, X) end, List),
+ Defines = Options#options.defines,
+ NewDefines = lists:foldl(fun(X, Acc) ->
+ orddict:erase(X, Acc)
+ end,
+ Defines, Delete),
+ Options#options{defines = NewDefines}
+ end,
+ macro_loop(NewOptions, Win, Box, MacroText, TermText, Frame);
+ #wx{id = ?MacroDelAll,
+ event = #wxCommand{type = command_button_clicked}} ->
+ wxListBox:clear(Box),
+ NewOptions = Options#options{defines = []},
+ macro_loop(NewOptions, Win, Box, MacroText, TermText, Frame)
+ end.
+
+handle_help(State, Title, Txt) ->
+ FileName = filename:join([code:lib_dir(dialyzer), "doc", Txt]),
+ case file:open(FileName, [read]) of
+ {error, Reason} ->
+ error_sms(State,
+ io_lib:format("Could not find doc/~s file!\n\n ~p",
+ [Txt, Reason]));
+ {ok, _Handle} ->
+ case file:read_file(FileName) of
+ {error, Reason} ->
+ error_sms(State,
+ io_lib:format("Could not read doc/~s file!\n\n ~p",
+ [Txt, Reason]));
+ {ok, Binary} ->
+ Contents = binary_to_list(Binary),
+ free_editor(State, Title, Contents)
+ end
+ end.
+
+add_warnings(#gui_state{warnings_box = WarnBox,
+ rawWarnings = RawWarns} = State, Warnings) ->
+ NewRawWarns = RawWarns ++ Warnings,
+ WarnList = [dialyzer:format_warning(W) -- "\n" || W <- NewRawWarns],
+ wxListBox:set(WarnBox, WarnList),
+ State#gui_state{rawWarnings = NewRawWarns}.
+
+handle_explanation(#gui_state{rawWarnings = RawWarns,
+ warnings_box = WarnBox,
+ expl_pid = ExplPid} = State) ->
+ case wxListBox:isEmpty(WarnBox) of
+ true -> error_sms(State, "\nThere are no warnings.\nRun the dialyzer first.");
+ false ->
+ case wxListBox:getSelections(WarnBox)of
+ {0, []} ->
+ error_sms(State,"\nYou must choose a warning to be explained\n");
+ {_, [WarnNumber]} ->
+ Warn = lists:nth(WarnNumber+1,RawWarns),
+ Self = self(),
+ ExplPid ! {Self, warning, Warn},
+ explanation_loop(State)
+ end
+ end.
+
+explanation_loop(#gui_state{expl_pid = ExplPid} = State) ->
+ receive
+ {ExplPid, explanation, Explanation} ->
+ show_explanation(State, Explanation);
+ _ -> io:format("Unknown message\n"),
+ explanation_loop(State)
+ end.
+
+show_explanation(#gui_state{gui = Wx} = State, Explanation) ->
+ case Explanation of
+ none ->
+ output_sms(State, ?DIALYZER_MESSAGE_TITLE,
+ "There is not any explanation for this error!\n", info);
+ Expl ->
+ ExplString = format_explanation(Expl),
+ Size = {size,{700, 300}},
+ Win = wxFrame:new(Wx, ?ExplWin, "Dialyzer Explanation", [{size,{740, 350}}]),
+
+ Editor = wxTextCtrl:new(Win, ?ExplText,
+ [Size,
+ {style, ?wxTE_MULTILINE
+ bor ?wxTE_READONLY bor ?wxVSCROLL bor ?wxEXPAND}]),
+ wxTextCtrl:appendText(Editor, ExplString),
+ wxFrame:connect(Win, close_window),
+ ExplButton = wxButton:new(Win, ?ExplButton, [{label, "Further Explain"}]),
+ wxButton:connect(ExplButton, command_button_clicked),
+ Ok = wxButton:new(Win, ?ExplOk, [{label, "OK"}]),
+ wxButton:connect(Ok, command_button_clicked),
+ Layout = wxBoxSizer:new(?wxVERTICAL),
+ Buttons = wxBoxSizer:new(?wxHORIZONTAL),
+ wxSizer:add(Buttons, ExplButton, ?BorderOpt),
+ wxSizer:add(Buttons, Ok, ?BorderOpt),
+ wxSizer:add(Layout, Editor,[{flag, ?wxALIGN_CENTER_HORIZONTAL bor ?wxALL}, ?Border]),
+ wxSizer:add(Layout, Buttons,[{flag, ?wxALIGN_CENTER_HORIZONTAL}]),
+ wxWindow:setSizer(Win, Layout),
+ wxWindow:show(Win),
+ show_explanation_loop(State#gui_state{explanation_box = Editor}, Win, Explanation)
+ end.
+
+show_explanation_loop(#gui_state{frame = Frame, expl_pid = ExplPid} = State, Win, Explanation) ->
+ receive
+ {ExplPid, none, _} ->
+ output_sms(State, ?DIALYZER_MESSAGE_TITLE,
+ "There is not any other explanation for this error!\n", info),
+ show_explanation_loop(State, Win, Explanation);
+ {ExplPid, further, NewExplanation} ->
+ update_explanation(State, NewExplanation),
+ show_explanation_loop(State, Win, NewExplanation);
+ #wx{id = ?ExplButton, event = #wxCommand{type = command_button_clicked}} ->
+ ExplPid ! {self(), further, Explanation},
+ show_explanation_loop(State, Win, Explanation);
+ #wx{id = ?ExplOk, event = #wxCommand{type = command_button_clicked}} ->
+ wxWindow:destroy(Win);
+ #wx{id = ?ExplWin, event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Win);
+ #wx{event = #wxClose{type = close_window}} ->
+ wxWindow:destroy(Frame)
+ end.
+
+update_explanation(#gui_state{explanation_box = Box}, Explanation) ->
+ ExplString = format_explanation(Explanation),
+ wxTextCtrl:appendText(Box, "\n --------------------------- \n"),
+ wxTextCtrl:appendText(Box, ExplString).
+
+format_explanation({function_return, {M, F, A}, NewList}) ->
+ io_lib:format("The function ~p: ~p/~p returns ~p\n",
+ [M, F, A, erl_types:t_to_string(NewList)]);
+format_explanation(Explanation) ->
+ io_lib:format("~p\n", [Explanation]).
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.hrl b/lib/dialyzer/src/dialyzer_gui_wx.hrl
new file mode 100644
index 0000000000..e81eeb1ab5
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_gui_wx.hrl
@@ -0,0 +1,112 @@
+-include_lib("wx/include/wx.hrl").
+
+
+-define(TEXTSIZE, 9).
+
+-define(Border, {border, 2}).
+-define(BorderOpt,[{flag,?wxALL}, ?Border]).
+
+-define(menuID_FILE_QUIT, 102).
+-define(menuID_FILE_SAVE_WARNINGS, 100).
+-define(menuID_FILE_SAVE_LOG, 101).
+
+-define(menuID_WARN_MATCH_FAILURES, 200).
+-define(menuID_WARN_FAIL_FUN_CALLS, 201).
+-define(menuID_WARN_BAD_FUN, 202).
+-define(menuID_WARN_OPAQUE, 203).
+-define(menuID_WARN_LIST_CONSTR, 204).
+-define(menuID_WARN_UNUSED_FUN, 205).
+-define(menuID_WARN_ERROR_HANDLING_FUN, 206).
+-define(menuID_WARN_NO_RETURN_FUN, 207).
+-define(menuID_WARN_UNEXPORTED_FUN, 208).
+-define(menuID_WARN_RACE_CONDITIONS, 209).
+-define(menuID_WARN_WRONG_CONTRACTS, 210).
+-define(menuID_WARN_CONTRACT_SYNTAX, 211).
+
+-define(menuID_PLT_INIT_EMPTY, 300).
+-define(menuID_PLT_SHOW_CONTENTS, 301).
+-define(menuID_PLT_SEARCH_CONTENTS, 302).
+
+-define(menuID_OPTIONS_MACRO, 400).
+-define(menuID_OPTIONS_INCLUDE_DIR, 401).
+
+-define(menuID_HELP_MANUAL, 500).
+-define(menuID_HELP_WARNING_OPTIONS, 501).
+-define(menuID_HELP_ABOUT, 499).
+
+-define(LABEL1,502).
+-define(LABEL2,503).
+-define(LABEL3,504).
+-define(LABEL4,505).
+-define(LABEL5,505).
+-define(LABEL6,506).
+-define(LABEL7,507).
+-define(LABEL8,508).
+-define(LABEL9,509).
+
+-define(ChosenBox,510).
+-define(LogBox,511).
+-define(FilePicker,512).
+-define(DirPicker,513).
+-define(WarningsBox,521).
+
+-define(Del_Button,514).
+-define(DelAll_Button,515).
+-define(ClearLog_Button,516).
+-define(Add_Button,517).
+-define(AddDir_Button,532).
+-define(AddRec_Button,518).
+-define(ClearWarn_Button,519).
+-define(Run_Button,520).
+-define(Stop_Button,522).
+-define(ExplWarn_Button,523).
+-define(RADIOBOX, 524).
+
+-define(Dialog, 525).
+-define(Dialog_Ok, 526).
+-define(Dialog_Cancel, 527).
+-define(Dialog_Mes, 528).
+
+-define(MESSAGE, 529).
+-define(Message_Info, 530).
+-define(Message_Ok, 531).
+
+
+-define(Message, 534).
+-define(SaveWarn, 533).
+-define(SearchPltDialog, 535).
+-define(ModLabel, 536).
+-define(FunLabel, 537).
+-define(ArLabel, 538).
+-define(ModText, 539).
+-define(FunText, 540).
+-define(ArText, 541).
+-define(SearchButton, 542).
+-define(Search_Cancel, 543).
+
+-define(IncludeDir, 544).
+-define(InclLabel, 545).
+-define(InclPicker, 546).
+-define(InclBox, 547).
+-define(InclAdd, 548).
+-define(InclDel, 549).
+-define(InclDelAll, 550).
+-define(InclOk, 551).
+-define(InclCancel, 552).
+
+-define(MacroDir, 553).
+-define(MacroLabel, 554).
+-define(MacroText, 555).
+-define(TermLabel, 556).
+-define(TermText, 557).
+-define(MacroBox, 558).
+-define(MacroAdd, 559).
+-define(MacroDel, 560).
+-define(MacroDelAll, 561).
+-define(MacroOk, 562).
+-define(MacroCancel, 563).
+
+-define(ExplWin, 564).
+-define(ExplText, 565).
+-define(ExplButton, 566).
+-define(ExplOk, 567).
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
new file mode 100644
index 0000000000..6531073072
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -0,0 +1,269 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%----------------------------------------------------------------------
+%%% File : dialyzer_options.erl
+%%% Authors : Richard Carlsson <[email protected]>
+%%% Description : Provides a better way to start Dialyzer from a script.
+%%%
+%%% Created : 17 Oct 2004 by Richard Carlsson <[email protected]>
+%%%----------------------------------------------------------------------
+
+-module(dialyzer_options).
+
+-export([build/1]).
+
+-include("dialyzer.hrl").
+
+%%-----------------------------------------------------------------------
+
+-spec build(dial_options()) -> #options{} | {'error', string()}.
+
+build(Opts) ->
+ DefaultWarns = [?WARN_RETURN_NO_RETURN,
+ ?WARN_NOT_CALLED,
+ ?WARN_NON_PROPER_LIST,
+ ?WARN_FUN_APP,
+ ?WARN_MATCHING,
+ ?WARN_OPAQUE,
+ ?WARN_CALLGRAPH,
+ ?WARN_FAILING_CALL,
+ ?WARN_BIN_CONSTRUCTION,
+ ?WARN_CALLGRAPH,
+ ?WARN_CONTRACT_TYPES,
+ ?WARN_CONTRACT_SYNTAX],
+ DefaultWarns1 = ordsets:from_list(DefaultWarns),
+ InitPlt = dialyzer_plt:get_default_plt(),
+ DefaultOpts = #options{},
+ DefaultOpts1 = DefaultOpts#options{legal_warnings = DefaultWarns1,
+ init_plt = InitPlt},
+ try
+ NewOpts = build_options(Opts, DefaultOpts1),
+ postprocess_opts(NewOpts)
+ catch
+ throw:{dialyzer_options_error, Msg} -> {error, Msg}
+ end.
+
+postprocess_opts(Opts = #options{}) ->
+ Opts1 = check_output_plt(Opts),
+ adapt_get_warnings(Opts1).
+
+check_output_plt(Opts = #options{analysis_type = Mode, from = From,
+ output_plt = OutPLT}) ->
+ case is_plt_mode(Mode) of
+ true ->
+ case From =:= byte_code of
+ true -> Opts;
+ false ->
+ Msg = "Byte code compiled with debug_info is needed to build the PLT",
+ throw({dialyzer_error, Msg})
+ end;
+ false ->
+ case OutPLT =:= none of
+ true -> Opts;
+ false ->
+ Msg = io_lib:format("Output PLT cannot be specified "
+ "in analysis mode ~w", [Mode]),
+ throw({dialyzer_error, lists:flatten(Msg)})
+ end
+ end.
+
+adapt_get_warnings(Opts = #options{analysis_type = Mode,
+ get_warnings = Warns}) ->
+ %% Warnings are off by default in plt mode, and on by default in
+ %% success typings mode. User defined warning mode overrides the
+ %% default.
+ case is_plt_mode(Mode) of
+ true ->
+ case Warns =:= maybe of
+ true -> Opts#options{get_warnings = false};
+ false -> Opts
+ end;
+ false ->
+ case Warns =:= maybe of
+ true -> Opts#options{get_warnings = true};
+ false -> Opts
+ end
+ end.
+
+-spec bad_option(string(), term()) -> no_return().
+
+bad_option(String, Term) ->
+ Msg = io_lib:format("~s: ~P", [String, Term, 25]),
+ throw({dialyzer_options_error, lists:flatten(Msg)}).
+
+build_options([{OptName, undefined}|Rest], Options) when is_atom(OptName) ->
+ build_options(Rest, Options);
+build_options([{OptionName, Value} = Term|Rest], Options) ->
+ case OptionName of
+ files ->
+ assert_filenames(Term, Value),
+ build_options(Rest, Options#options{files = Value});
+ files_rec ->
+ assert_filenames(Term, Value),
+ build_options(Rest, Options#options{files_rec = Value});
+ analysis_type ->
+ NewOptions =
+ case Value of
+ succ_typings -> Options#options{analysis_type = Value};
+ plt_add -> Options#options{analysis_type = Value};
+ plt_build -> Options#options{analysis_type = Value};
+ plt_check -> Options#options{analysis_type = Value};
+ plt_remove -> Options#options{analysis_type = Value};
+ dataflow -> bad_option("Analysis type is no longer supported", Term);
+ old_style -> bad_option("Analysis type is no longer supported", Term);
+ Other -> bad_option("Unknown analysis type", Other)
+ end,
+ assert_plt_op(Options, NewOptions),
+ build_options(Rest, NewOptions);
+ check_plt when is_boolean(Value) ->
+ build_options(Rest, Options#options{check_plt = Value});
+ defines ->
+ assert_defines(Term, Value),
+ OldVal = Options#options.defines,
+ NewVal = ordsets:union(ordsets:from_list(Value), OldVal),
+ build_options(Rest, Options#options{defines = NewVal});
+ from when Value =:= byte_code; Value =:= src_code ->
+ build_options(Rest, Options#options{from = Value});
+ get_warnings ->
+ build_options(Rest, Options#options{get_warnings = Value});
+ init_plt ->
+ assert_filenames([Term], [Value]),
+ build_options(Rest, Options#options{init_plt = Value});
+ include_dirs ->
+ assert_filenames(Term, Value),
+ OldVal = Options#options.include_dirs,
+ NewVal = ordsets:union(ordsets:from_list(Value), OldVal),
+ build_options(Rest, Options#options{include_dirs = NewVal});
+ use_spec ->
+ build_options(Rest, Options#options{use_contracts = Value});
+ old_style ->
+ bad_option("Analysis type is no longer supported", old_style);
+ output_file ->
+ assert_filename(Value),
+ build_options(Rest, Options#options{output_file = Value});
+ output_format ->
+ assert_output_format(Value),
+ build_options(Rest, Options#options{output_format = Value});
+ output_plt ->
+ assert_filename(Value),
+ build_options(Rest, Options#options{output_plt = Value});
+ report_mode ->
+ build_options(Rest, Options#options{report_mode = Value});
+ erlang_mode ->
+ build_options(Rest, Options#options{erlang_mode = true});
+ warnings ->
+ NewWarnings = build_warnings(Value, Options#options.legal_warnings),
+ build_options(Rest, Options#options{legal_warnings = NewWarnings});
+ callgraph_file ->
+ assert_filename(Value),
+ build_options(Rest, Options#options{callgraph_file = Value});
+ _ ->
+ bad_option("Unknown dialyzer command line option", Term)
+ end;
+build_options([], Options) ->
+ Options.
+
+assert_filenames(Term, [FileName|Left]) when length(FileName) >= 0 ->
+ case filelib:is_file(FileName) orelse filelib:is_dir(FileName) of
+ true -> ok;
+ false -> bad_option("No such file or directory", FileName)
+ end,
+ assert_filenames(Term, Left);
+assert_filenames(_Term, []) ->
+ ok;
+assert_filenames(Term, [_|_]) ->
+ bad_option("Malformed or non-existing filename", Term).
+
+assert_filename(FileName) when length(FileName) >= 0 ->
+ ok;
+assert_filename(FileName) ->
+ bad_option("Malformed or non-existing filename", FileName).
+
+assert_defines(Term, [{Macro, _Value}|Defs]) when is_atom(Macro) ->
+ assert_defines(Term, Defs);
+assert_defines(_Term, []) ->
+ ok;
+assert_defines(Term, [_|_]) ->
+ bad_option("Malformed define", Term).
+
+assert_output_format(raw) ->
+ ok;
+assert_output_format(formatted) ->
+ ok;
+assert_output_format(Term) ->
+ bad_option("Illegal value for output_format", Term).
+
+assert_plt_op(#options{analysis_type = OldVal},
+ #options{analysis_type = NewVal}) ->
+ case is_plt_mode(OldVal) andalso is_plt_mode(NewVal) of
+ true -> bad_option("Options cannot be combined", [OldVal, NewVal]);
+ false -> ok
+ end.
+
+is_plt_mode(plt_add) -> true;
+is_plt_mode(plt_build) -> true;
+is_plt_mode(plt_remove) -> true;
+is_plt_mode(plt_check) -> true;
+is_plt_mode(succ_typings) -> false.
+
+-spec build_warnings([atom()], [dial_warning()]) -> [dial_warning()].
+
+build_warnings([Opt|Opts], Warnings) ->
+ NewWarnings =
+ case Opt of
+ no_return ->
+ ordsets:del_element(?WARN_RETURN_NO_RETURN, Warnings);
+ no_unused ->
+ ordsets:del_element(?WARN_NOT_CALLED, Warnings);
+ no_improper_lists ->
+ ordsets:del_element(?WARN_NON_PROPER_LIST, Warnings);
+ no_fun_app ->
+ ordsets:del_element(?WARN_FUN_APP, Warnings);
+ no_match ->
+ ordsets:del_element(?WARN_MATCHING, Warnings);
+ no_opaque ->
+ ordsets:del_element(?WARN_OPAQUE, Warnings);
+ no_fail_call ->
+ ordsets:del_element(?WARN_FAILING_CALL, Warnings);
+ no_contracts ->
+ Warnings1 = ordsets:del_element(?WARN_CONTRACT_SYNTAX, Warnings),
+ ordsets:del_element(?WARN_CONTRACT_TYPES, Warnings1);
+ unmatched_returns ->
+ ordsets:add_element(?WARN_UNMATCHED_RETURN, Warnings);
+ error_handling ->
+ ordsets:add_element(?WARN_RETURN_ONLY_EXIT, Warnings);
+ race_conditions ->
+ ordsets:add_element(?WARN_RACE_CONDITION, Warnings);
+ specdiffs ->
+ S = ordsets:from_list([?WARN_CONTRACT_SUBTYPE,
+ ?WARN_CONTRACT_SUPERTYPE,
+ ?WARN_CONTRACT_NOT_EQUAL]),
+ ordsets:union(S, Warnings);
+ overspecs ->
+ ordsets:add_element(?WARN_CONTRACT_SUBTYPE, Warnings);
+ underspecs ->
+ ordsets:add_element(?WARN_CONTRACT_SUPERTYPE, Warnings);
+ OtherAtom ->
+ bad_option("Unknown dialyzer warning option", OtherAtom)
+ end,
+ build_warnings(Opts, NewWarnings);
+build_warnings([], Warnings) ->
+ Warnings.
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
new file mode 100644
index 0000000000..f2e0fe1e97
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -0,0 +1,576 @@
+%% -*- erlang-indent-level: 2 -*-
+%%----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_plt.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Interface to display information in the persistent
+%%% lookup tables.
+%%%
+%%% Created : 23 Jul 2004 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_plt).
+
+-export([check_plt/3,
+ compute_md5_from_files/1,
+ contains_mfa/2,
+ contains_module/2,
+ delete_contract_list/2,
+ delete_list/2,
+ delete_module/2,
+ included_files/1,
+ from_file/1,
+ get_default_plt/0,
+ get_types/1,
+ %% insert/3,
+ insert_list/2,
+ insert_contract_list/2,
+ insert_types/2,
+ lookup/2,
+ lookup_contract/2,
+ lookup_module/2,
+ merge_plts/1,
+ new/0,
+ plt_and_info_from_file/1,
+ get_specs/1,
+ get_specs/4,
+ to_file/4
+ ]).
+
+%% Debug utilities
+-export([pp_non_returning/0, pp_mod/1]).
+
+%%----------------------------------------------------------------------
+
+-type mod_deps() :: dict().
+
+%% The following are used for searching the PLT when using the GUI
+%% (e.g. in show or search PLT contents). The user might be searching
+%% with a partial specification, in which case the missing items
+%% default to '_'
+-type arity_patt() :: '_' | arity().
+-type mfa_patt() :: {module(), atom(), arity_patt()}.
+
+%%----------------------------------------------------------------------
+
+-record(dialyzer_plt, {info = table_new() :: dict(),
+ types = table_new() :: dict(),
+ contracts = table_new() :: dict()}).
+-opaque plt() :: #dialyzer_plt{}.
+
+-include("dialyzer.hrl").
+
+-type file_md5() :: {file:filename(), binary()}.
+-type plt_info() :: {[file_md5()], dict()}.
+
+-record(dialyzer_file_plt, {version = "" :: string(),
+ file_md5_list = [] :: [file_md5()],
+ info = dict:new() :: dict(),
+ contracts = dict:new() :: dict(),
+ types = dict:new() :: dict(),
+ mod_deps :: mod_deps(),
+ implementation_md5 = [] :: [file_md5()]
+ }).
+
+%%----------------------------------------------------------------------
+
+-spec new() -> plt().
+
+new() ->
+ #dialyzer_plt{}.
+
+-spec delete_module(plt(), module()) -> plt().
+
+delete_module(#dialyzer_plt{info = Info, types = Types, contracts = Contracts},
+ Mod) ->
+ #dialyzer_plt{info = table_delete_module(Info, Mod),
+ types = table_delete_module2(Types, Mod),
+ contracts = table_delete_module(Contracts, Mod)}.
+
+-spec delete_list(plt(), [mfa() | integer()]) -> plt().
+
+delete_list(#dialyzer_plt{info = Info, types = Types, contracts = Contracts},
+ List) ->
+ #dialyzer_plt{info = table_delete_list(Info, List),
+ types = Types,
+ contracts = table_delete_list(Contracts, List)}.
+
+-spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt().
+
+insert_contract_list(#dialyzer_plt{contracts = Contracts} = PLT, List) ->
+ PLT#dialyzer_plt{contracts = table_insert_list(Contracts, List)}.
+
+-spec lookup_contract(plt(), mfa_patt()) -> 'none' | {'value', #contract{}}.
+
+lookup_contract(#dialyzer_plt{contracts = Contracts},
+ {M, F, _} = MFA) when is_atom(M), is_atom(F) ->
+ table_lookup(Contracts, MFA).
+
+-spec delete_contract_list(plt(), [mfa()]) -> plt().
+
+delete_contract_list(#dialyzer_plt{contracts = Contracts} = PLT, List) ->
+ PLT#dialyzer_plt{contracts = table_delete_list(Contracts, List)}.
+
+%% -spec insert(plt(), mfa() | integer(), {_, _}) -> plt().
+%%
+%% insert(#dialyzer_plt{info = Info} = PLT, Id, Types) ->
+%% PLT#dialyzer_plt{info = table_insert(Info, Id, Types)}.
+
+-type ret_args_types() :: {erl_types:erl_type(), [erl_types:erl_type()]}.
+
+-spec insert_list(plt(), [{mfa() | integer(), ret_args_types()}]) -> plt().
+
+insert_list(#dialyzer_plt{info = Info} = PLT, List) ->
+ PLT#dialyzer_plt{info = table_insert_list(Info, List)}.
+
+-spec lookup(plt(), integer() | mfa_patt()) ->
+ 'none' | {'value', ret_args_types()}.
+
+lookup(#dialyzer_plt{info = Info}, {M, F, _} = MFA) when is_atom(M), is_atom(F) ->
+ table_lookup(Info, MFA);
+lookup(#dialyzer_plt{info = Info}, Label) when is_integer(Label) ->
+ table_lookup(Info, Label).
+
+-spec insert_types(plt(), dict()) -> plt().
+
+insert_types(PLT, Rec) ->
+ PLT#dialyzer_plt{types = Rec}.
+
+-spec get_types(plt()) -> dict().
+
+get_types(#dialyzer_plt{types = Types}) ->
+ Types.
+
+-type mfa_types() :: {mfa(), erl_types:erl_type(), [erl_types:erl_type()]}.
+
+-spec lookup_module(plt(), module()) -> 'none' | {'value', [mfa_types()]}.
+
+lookup_module(#dialyzer_plt{info = Info}, M) when is_atom(M) ->
+ table_lookup_module(Info, M).
+
+-spec contains_module(plt(), module()) -> boolean().
+
+contains_module(#dialyzer_plt{info = Info, contracts = Cs}, M) when is_atom(M) ->
+ table_contains_module(Info, M) orelse table_contains_module(Cs, M).
+
+-spec contains_mfa(plt(), mfa()) -> boolean().
+
+contains_mfa(#dialyzer_plt{info = Info, contracts = Contracts}, MFA) ->
+ (table_lookup(Info, MFA) =/= none)
+ orelse (table_lookup(Contracts, MFA) =/= none).
+
+-spec get_default_plt() -> file:filename().
+
+get_default_plt() ->
+ case os:getenv("DIALYZER_PLT") of
+ false ->
+ case os:getenv("HOME") of
+ false ->
+ error("The HOME environment variable needs to be set " ++
+ "so that Dialyzer knows where to find the default PLT");
+ HomeDir -> filename:join(HomeDir, ".dialyzer_plt")
+ end;
+ UserSpecPlt -> UserSpecPlt
+ end.
+
+-spec plt_and_info_from_file(file:filename()) -> {plt(), plt_info()}.
+
+plt_and_info_from_file(FileName) ->
+ from_file(FileName, true).
+
+-spec from_file(file:filename()) -> plt().
+
+from_file(FileName) ->
+ from_file(FileName, false).
+
+from_file(FileName, ReturnInfo) ->
+ case get_record_from_file(FileName) of
+ {ok, Rec} ->
+ case check_version(Rec) of
+ error ->
+ Msg = io_lib:format("Old PLT file ~s\n", [FileName]),
+ error(Msg);
+ ok ->
+ Plt = #dialyzer_plt{info = Rec#dialyzer_file_plt.info,
+ types = Rec#dialyzer_file_plt.types,
+ contracts = Rec#dialyzer_file_plt.contracts},
+ case ReturnInfo of
+ false -> Plt;
+ true ->
+ PltInfo = {Rec#dialyzer_file_plt.file_md5_list,
+ Rec#dialyzer_file_plt.mod_deps},
+ {Plt, PltInfo}
+ end
+ end;
+ {error, Reason} ->
+ error(io_lib:format("Could not read PLT file ~s: ~p\n",
+ [FileName, Reason]))
+ end.
+
+-type inc_file_err_rsn() :: 'no_such_file' | 'read_error'.
+-spec included_files(file:filename()) -> {'ok', [file:filename()]}
+ | {'error', inc_file_err_rsn()}.
+
+included_files(FileName) ->
+ case get_record_from_file(FileName) of
+ {ok, #dialyzer_file_plt{file_md5_list = Md5}} ->
+ {ok, [File || {File, _} <- Md5]};
+ {error, _What} = Error ->
+ Error
+ end.
+
+check_version(#dialyzer_file_plt{version=?VSN, implementation_md5=ImplMd5}) ->
+ case compute_new_md5(ImplMd5, [], []) of
+ ok -> ok;
+ {differ, _, _} -> error;
+ {error, _} -> error
+ end;
+check_version(#dialyzer_file_plt{}) -> error.
+
+get_record_from_file(FileName) ->
+ case file:read_file(FileName) of
+ {ok, Bin} ->
+ try binary_to_term(Bin) of
+ #dialyzer_file_plt{} = FilePLT -> {ok, FilePLT};
+ _ -> {error, not_valid}
+ catch
+ _:_ -> {error, not_valid}
+ end;
+ {error, enoent} ->
+ {error, no_such_file};
+ {error, _} ->
+ {error, read_error}
+ end.
+
+-spec merge_plts([plt()]) -> plt().
+
+merge_plts(List) ->
+ InfoList = [Info || #dialyzer_plt{info = Info} <- List],
+ TypesList = [Types || #dialyzer_plt{types = Types} <- List],
+ ContractsList = [Contracts || #dialyzer_plt{contracts = Contracts} <- List],
+ #dialyzer_plt{info = table_merge(InfoList),
+ types = table_merge(TypesList),
+ contracts = table_merge(ContractsList)}.
+
+-spec to_file(file:filename(), plt(), mod_deps(), {[file_md5()], mod_deps()}) -> 'ok'.
+
+to_file(FileName,
+ #dialyzer_plt{info = Info, types = Types, contracts = Contracts},
+ ModDeps, {MD5, OldModDeps}) ->
+ NewModDeps = dict:merge(fun(_Key, OldVal, NewVal) ->
+ ordsets:union(OldVal, NewVal)
+ end,
+ OldModDeps, ModDeps),
+ ImplMd5 = compute_implementation_md5(),
+ Record = #dialyzer_file_plt{version = ?VSN,
+ file_md5_list = MD5,
+ info = Info,
+ contracts = Contracts,
+ types = Types,
+ mod_deps = NewModDeps,
+ implementation_md5 = ImplMd5},
+ Bin = term_to_binary(Record, [compressed]),
+ case file:write_file(FileName, Bin) of
+ ok -> ok;
+ {error, Reason} ->
+ Msg = io_lib:format("Could not write PLT file ~s: ~w\n",
+ [FileName, Reason]),
+ throw({dialyzer_error, Msg})
+ end.
+
+-type md5_diff() :: [{'differ', atom()} | {'removed', atom()}].
+-type check_error() :: 'not_valid' | 'no_such_file' | 'read_error'
+ | {'no_file_to_remove', file:filename()}.
+
+-spec check_plt(file:filename(), [file:filename()], [file:filename()]) ->
+ 'ok'
+ | {'error', check_error()}
+ | {'differ', [file_md5()], md5_diff(), mod_deps()}
+ | {'old_version', [file_md5()]}.
+
+check_plt(FileName, RemoveFiles, AddFiles) ->
+ case get_record_from_file(FileName) of
+ {ok, #dialyzer_file_plt{file_md5_list = Md5, mod_deps = ModDeps} = Rec} ->
+ case check_version(Rec) of
+ ok ->
+ case compute_new_md5(Md5, RemoveFiles, AddFiles) of
+ ok -> ok;
+ {differ, NewMd5, DiffMd5} -> {differ, NewMd5, DiffMd5, ModDeps};
+ {error, _What} = Err -> Err
+ end;
+ error ->
+ case compute_new_md5(Md5, RemoveFiles, AddFiles) of
+ ok -> {old_version, Md5};
+ {differ, NewMd5, _DiffMd5} -> {old_version, NewMd5};
+ {error, _What} = Err -> Err
+ end
+ end;
+ Error -> Error
+ end.
+
+compute_new_md5(Md5, [], []) ->
+ compute_new_md5_1(Md5, [], []);
+compute_new_md5(Md5, RemoveFiles0, AddFiles0) ->
+ %% Assume that files are first removed and then added. Files that
+ %% are both removed and added will be checked for consistency in the
+ %% normal way. If they have moved, we assume that they differ.
+ RemoveFiles = RemoveFiles0 -- AddFiles0,
+ AddFiles = AddFiles0 -- RemoveFiles0,
+ InitDiffList = init_diff_list(RemoveFiles, AddFiles),
+ case init_md5_list(Md5, RemoveFiles, AddFiles) of
+ {ok, NewMd5} -> compute_new_md5_1(NewMd5, [], InitDiffList);
+ {error, _What} = Error -> Error
+ end.
+
+compute_new_md5_1([{File, Md5} = Entry|Entries], NewList, Diff) ->
+ case compute_md5_from_file(File) of
+ Md5 -> compute_new_md5_1(Entries, [Entry|NewList], Diff);
+ NewMd5 ->
+ ModName = beam_file_to_module(File),
+ compute_new_md5_1(Entries, [{File, NewMd5}|NewList], [{differ, ModName}|Diff])
+ end;
+compute_new_md5_1([], _NewList, []) ->
+ ok;
+compute_new_md5_1([], NewList, Diff) ->
+ {differ, lists:keysort(1, NewList), Diff}.
+
+-spec compute_implementation_md5() -> [file_md5()].
+
+compute_implementation_md5() ->
+ Dir = code:lib_dir(hipe),
+ Files1 = ["erl_bif_types.beam", "erl_types.beam"],
+ Files2 = [filename:join([Dir, "ebin", F]) || F <- Files1],
+ compute_md5_from_files(Files2).
+
+-spec compute_md5_from_files([file:filename()]) -> [file_md5()].
+
+compute_md5_from_files(Files) ->
+ lists:keysort(1, [{F, compute_md5_from_file(F)} || F <- Files]).
+
+compute_md5_from_file(File) ->
+ case filelib:is_regular(File) of
+ false ->
+ Msg = io_lib:format("Not a regular file: ~s\n", [File]),
+ throw({dialyzer_error, Msg});
+ true ->
+ case dialyzer_utils:get_abstract_code_from_beam(File) of
+ error ->
+ Msg = io_lib:format("Could not get abstract code for file: ~s (please recompile it with +debug_info)\n", [File]),
+ throw({dialyzer_error, Msg});
+ {ok, Abs} ->
+ erlang:md5(term_to_binary(Abs))
+ end
+ end.
+
+init_diff_list(RemoveFiles, AddFiles) ->
+ RemoveSet0 = sets:from_list([beam_file_to_module(F) || F <- RemoveFiles]),
+ AddSet0 = sets:from_list([beam_file_to_module(F) || F <- AddFiles]),
+ DiffSet = sets:intersection(AddSet0, RemoveSet0),
+ RemoveSet = sets:subtract(RemoveSet0, DiffSet),
+ %% Added files and diff files will appear as diff files from the md5 check.
+ [{removed, F} || F <- sets:to_list(RemoveSet)].
+
+init_md5_list(Md5, RemoveFiles, AddFiles) ->
+ Files = [{remove, F} || F <- RemoveFiles] ++ [{add, F} || F <- AddFiles],
+ DiffFiles = lists:keysort(2, Files),
+ Md5Sorted = lists:keysort(1, Md5),
+ init_md5_list_1(Md5Sorted, DiffFiles, []).
+
+init_md5_list_1([{File, _Md5}|Md5Left], [{remove, File}|DiffLeft], Acc) ->
+ init_md5_list_1(Md5Left, DiffLeft, Acc);
+init_md5_list_1([{File, _Md5} = Entry|Md5Left], [{add, File}|DiffLeft], Acc) ->
+ init_md5_list_1(Md5Left, DiffLeft, [Entry|Acc]);
+init_md5_list_1([{File1, _Md5} = Entry|Md5Left] = Md5List,
+ [{Tag, File2}|DiffLeft] = DiffList, Acc) ->
+ case File1 < File2 of
+ true -> init_md5_list_1(Md5Left, DiffList, [Entry|Acc]);
+ false ->
+ %% Just an assert.
+ true = File1 > File2,
+ case Tag of
+ add -> init_md5_list_1(Md5List, DiffLeft, [{File2, <<>>}|Acc]);
+ remove -> {error, {no_file_to_remove, File2}}
+ end
+ end;
+init_md5_list_1([], DiffList, Acc) ->
+ AddFiles = [{F, <<>>} || {add, F} <- DiffList],
+ {ok, lists:reverse(Acc, AddFiles)};
+init_md5_list_1(Md5List, [], Acc) ->
+ {ok, lists:reverse(Acc, Md5List)}.
+
+%%---------------------------------------------------------------------------
+%% Edoc
+
+-spec get_specs(plt()) -> string().
+
+get_specs(#dialyzer_plt{info = Info}) ->
+ %% TODO: Should print contracts as well.
+ List =
+ lists:sort([{MFA, Val} || {MFA = {_,_,_}, Val} <- table_to_list(Info)]),
+ lists:flatten(create_specs(List, [])).
+
+beam_file_to_module(Filename) ->
+ list_to_atom(filename:basename(Filename, ".beam")).
+
+-spec get_specs(plt(), module(), atom(), arity_patt()) -> 'none' | string().
+
+get_specs(#dialyzer_plt{info = Info}, M, F, A) when is_atom(M), is_atom(F) ->
+ MFA = {M, F, A},
+ case table_lookup(Info, MFA) of
+ none -> none;
+ {value, Val} -> lists:flatten(create_specs([{MFA, Val}], []))
+ end.
+
+create_specs([{{M, F, _A}, {Ret, Args}}|Left], M) ->
+ [io_lib:format("-spec ~w(~s) -> ~s\n",
+ [F, expand_args(Args), erl_types:t_to_string(Ret)])
+ | create_specs(Left, M)];
+create_specs(List = [{{M, _F, _A}, {_Ret, _Args}}| _], _M) ->
+ [io_lib:format("\n\n%% ------- Module: ~w -------\n\n", [M])
+ | create_specs(List, M)];
+create_specs([], _) ->
+ [].
+
+expand_args([]) ->
+ [];
+expand_args([ArgType]) ->
+ case erl_types:t_is_any(ArgType) of
+ true -> ["_"];
+ false -> [erl_types:t_to_string(ArgType)]
+ end;
+expand_args([ArgType|Left]) ->
+ [case erl_types:t_is_any(ArgType) of
+ true -> "_";
+ false -> erl_types:t_to_string(ArgType)
+ end ++
+ ","|expand_args(Left)].
+
+error(Msg) ->
+ throw({dialyzer_error, lists:flatten(Msg)}).
+
+%%---------------------------------------------------------------------------
+%% Ets table
+
+table_new() ->
+ dict:new().
+
+table_to_list(Plt) ->
+ dict:to_list(Plt).
+
+table_delete_module(Plt, Mod) ->
+ dict:filter(fun({M, _F, _A}, _Val) -> M =/= Mod;
+ (_, _) -> true
+ end, Plt).
+
+table_delete_module2(Plt, Mod) ->
+ dict:filter(fun(M, _Val) -> M =/= Mod end, Plt).
+
+table_delete_list(Plt, [H|T]) ->
+ table_delete_list(dict:erase(H, Plt), T);
+table_delete_list(Plt, []) ->
+ Plt.
+
+table_insert_list(Plt, [{Key, Val}|Left]) ->
+ table_insert_list(table_insert(Plt, Key, Val), Left);
+table_insert_list(Plt, []) ->
+ Plt.
+
+table_insert(Plt, Key, {_Ret, _Arg} = Obj) ->
+ dict:store(Key, Obj, Plt);
+table_insert(Plt, Key, #contract{} = C) ->
+ dict:store(Key, C, Plt).
+
+table_lookup(Plt, Obj) ->
+ case dict:find(Obj, Plt) of
+ error -> none;
+ {ok, Val} -> {value, Val}
+ end.
+
+table_lookup_module(Plt, Mod) ->
+ List = dict:fold(fun(Key, Val, Acc) ->
+ case Key of
+ {Mod, _F, _A} -> [{Key, element(1, Val),
+ element(2, Val)}|Acc];
+ _ -> Acc
+ end
+ end, [], Plt),
+ case List =:= [] of
+ true -> none;
+ false -> {value, List}
+ end.
+
+table_contains_module(Plt, Mod) ->
+ dict:fold(fun({M, _F, _A}, _Val, _Acc) when M =:= Mod -> true;
+ (_, _, Acc) -> Acc
+ end, false, Plt).
+
+table_merge([H|T]) ->
+ table_merge(T, H).
+
+table_merge([], Acc) ->
+ Acc;
+table_merge([Plt|Left], Acc) ->
+ NewAcc = dict:merge(fun(_Key, Val, Val) -> Val end, Plt, Acc),
+ table_merge(Left, NewAcc).
+
+%%---------------------------------------------------------------------------
+%% Debug utilities.
+
+-spec pp_non_returning() -> 'ok'.
+
+pp_non_returning() ->
+ PltFile = get_default_plt(),
+ Plt = from_file(PltFile),
+ List = table_to_list(Plt#dialyzer_plt.info),
+ Unit = [{MFA, erl_types:t_fun(Args, Ret)} || {MFA, {Ret, Args}} <- List,
+ erl_types:t_is_unit(Ret)],
+ None = [{MFA, erl_types:t_fun(Args, Ret)} || {MFA, {Ret, Args}} <- List,
+ erl_types:t_is_none(Ret)],
+ io:format("=========================================\n"),
+ io:format("= Loops =\n"),
+ io:format("=========================================\n\n"),
+ lists:foreach(fun({{M, F, _}, Type}) ->
+ io:format("~w:~w~s.\n",
+ [M, F, dialyzer_utils:format_sig(Type)])
+ end, lists:sort(Unit)),
+ io:format("\n"),
+ io:format("=========================================\n"),
+ io:format("= Errors =\n"),
+ io:format("=========================================\n\n"),
+ lists:foreach(fun({{M, F, _}, Type}) ->
+ io:format("~w:~w~s.\n",
+ [M, F, dialyzer_utils:format_sig(Type)])
+ end, lists:sort(None)).
+
+-spec pp_mod(module()) -> 'ok'.
+
+pp_mod(Mod) when is_atom(Mod) ->
+ PltFile = get_default_plt(),
+ Plt = from_file(PltFile),
+ case lookup_module(Plt, Mod) of
+ {value, List} ->
+ lists:foreach(fun({{_, F, _}, Ret, Args}) ->
+ T = erl_types:t_fun(Args, Ret),
+ S = dialyzer_utils:format_sig(T),
+ io:format("-spec ~w~s.\n", [F, S])
+ end, lists:sort(List));
+ none ->
+ io:format("dialyzer: Found no module named '~s' in the PLT\n", [Mod])
+ end.
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
new file mode 100644
index 0000000000..5857f7a03d
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -0,0 +1,2426 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%----------------------------------------------------------------------
+%%% File : dialyzer_races.erl
+%%% Author : Maria Christakis <[email protected]>
+%%% Description : Utility functions for race condition detection
+%%%
+%%% Created : 21 Nov 2008 by Maria Christakis <[email protected]>
+%%%----------------------------------------------------------------------
+-module(dialyzer_races).
+
+%% Race Analysis
+
+-export([store_race_call/5, race/1, get_race_warnings/2, format_args/4]).
+
+%% Record Interfaces
+
+-export([beg_clause_new/3, cleanup/1, end_case_new/1, end_clause_new/3,
+ get_curr_fun/1, get_curr_fun_args/1, get_new_table/1,
+ get_race_analysis/1, get_race_list/1, get_race_list_size/1,
+ let_tag_new/2, new/0, put_curr_fun/3, put_fun_args/2,
+ put_race_analysis/2, put_race_list/3]).
+
+-include("dialyzer.hrl").
+
+%%% ===========================================================================
+%%%
+%%% Definitions
+%%%
+%%% ===========================================================================
+
+-define(local, 5).
+-define(no_arg, no_arg).
+-define(no_label, no_label).
+
+-define(WARN_WHEREIS_REGISTER, warn_whereis_register).
+-define(WARN_ETS_LOOKUP_INSERT, warn_ets_lookup_insert).
+-define(WARN_MNESIA_DIRTY_READ_WRITE, warn_mnesia_dirty_read_write).
+-define(WARN_NO_WARN, warn_no_warn).
+
+%%% ===========================================================================
+%%%
+%%% Local Types
+%%%
+%%% ===========================================================================
+
+-type mfa_or_funlbl() :: label() | mfa().
+
+-type label_type() :: label() | [label()] | {label()} | ?no_label.
+-type args() :: [label_type() | [string()]].
+-type core_vars() :: cerl:cerl() | ?no_arg.
+-type var_to_map() :: core_vars() | [cerl:cerl()].
+-type core_args() :: [core_vars()] | 'empty'.
+-type op() :: 'bind' | 'unbind'.
+
+-type dep_calls() :: 'whereis' | 'ets_lookup' | 'mnesia_dirty_read'.
+-type warn_calls() :: 'register' | 'ets_insert' | 'mnesia_dirty_write'.
+-type call() :: 'whereis' | 'register' | 'ets_new' | 'ets_lookup'
+ | 'ets_insert' | 'mnesia_dirty_read1'
+ | 'mnesia_dirty_read2' | 'mnesia_dirty_write1'
+ | 'mnesia_dirty_write2' | 'function_call'.
+-type race_tag() :: 'whereis_register' | 'ets_lookup_insert'
+ | 'mnesia_dirty_read_write'.
+
+-record(beg_clause, {arg :: var_to_map(),
+ pats :: var_to_map(),
+ guard :: cerl:cerl()}).
+-record(end_clause, {arg :: var_to_map(),
+ pats :: var_to_map(),
+ guard :: cerl:cerl()}).
+-record(end_case, {clauses :: [#end_clause{}]}).
+-record(curr_fun, {status :: 'in' | 'out',
+ mfa :: mfa_or_funlbl(),
+ label :: label(),
+ def_vars :: [core_vars()],
+ arg_types :: [erl_types:erl_type()],
+ call_vars :: [core_vars()],
+ var_map :: dict()}).
+-record(dep_call, {call_name :: dep_calls(),
+ args :: args(),
+ arg_types :: [erl_types:erl_type()],
+ vars :: [core_vars()],
+ state :: _,
+ file_line :: file_line(),
+ var_map :: dict()}).
+-record(fun_call, {caller :: mfa_or_funlbl(),
+ callee :: mfa_or_funlbl(),
+ arg_types :: [erl_types:erl_type()],
+ vars :: [core_vars()]}).
+-record(let_tag, {var :: var_to_map(),
+ arg :: var_to_map()}).
+-record(warn_call, {call_name :: warn_calls(),
+ args :: args(),
+ var_map :: dict()}).
+
+-type case_tags() :: 'beg_case' | #beg_clause{} | #end_clause{} | #end_case{}.
+-type code() :: [#dep_call{} | #warn_call{} | #fun_call{} |
+ #curr_fun{} | #let_tag{} | case_tags() | race_tag()].
+
+-type table_var() :: label() | ?no_label.
+-type table() :: {'named', table_var(), [string()]} | 'other' | 'no_t'.
+
+-record(race_fun, {mfa :: mfa(),
+ args :: args(),
+ arg_types :: [erl_types:erl_type()],
+ vars :: [core_vars()],
+ file_line :: file_line(),
+ index :: non_neg_integer(),
+ fun_mfa :: mfa_or_funlbl(),
+ fun_label :: label()}).
+
+-record(races, {curr_fun :: mfa_or_funlbl(),
+ curr_fun_label :: label(),
+ curr_fun_args = 'empty' :: core_args(),
+ new_table = 'no_t' :: table(),
+ race_list = [] :: code(),
+ race_list_size = 0 :: non_neg_integer(),
+ race_tags = [] :: [#race_fun{}],
+ %% true for fun types and warning mode
+ race_analysis = false :: boolean(),
+ race_warnings = [] :: [dial_warning()]}).
+
+%%% ===========================================================================
+%%%
+%%% Exported Types
+%%%
+%%% ===========================================================================
+
+-opaque races() :: #races{}.
+
+%%% ===========================================================================
+%%%
+%%% Race Analysis
+%%%
+%%% ===========================================================================
+
+-spec store_race_call(mfa_or_funlbl(), [erl_types:erl_type()], [core_vars()],
+ file_line(), dialyzer_dataflow:state()) ->
+ dialyzer_dataflow:state().
+
+store_race_call(Fun, ArgTypes, Args, FileLine, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ CurrFun = Races#races.curr_fun,
+ CurrFunLabel = Races#races.curr_fun_label,
+ RaceTags = Races#races.race_tags,
+ CleanState = dialyzer_dataflow:state__records_only(State),
+ {NewRaceList, NewRaceListSize, NewRaceTags, NewTable} =
+ case CurrFun of
+ {_Module, module_info, A} when A =:= 0 orelse A =:= 1 ->
+ {[], 0, RaceTags, no_t};
+ _Thing ->
+ RaceList = Races#races.race_list,
+ RaceListSize = Races#races.race_list_size,
+ case Fun of
+ {erlang, get_module_info, A} when A =:= 1 orelse A =:= 2 ->
+ {[], 0, RaceTags, no_t};
+ {erlang, register, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, register),
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = register, args = VarArgs}|
+ RaceList], RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ {erlang, whereis, 1} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, whereis),
+ {[#dep_call{call_name = whereis, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ state = CleanState, file_line = FileLine}|
+ RaceList], RaceListSize + 1, RaceTags, no_t};
+ {ets, insert, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, ets_insert),
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = ets_insert, args = VarArgs}|
+ RaceList], RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ {ets, lookup, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, ets_lookup),
+ {[#dep_call{call_name = ets_lookup, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ state = CleanState, file_line = FileLine}|
+ RaceList], RaceListSize + 1, RaceTags, no_t};
+ {ets, new, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, ets_new),
+ [VarArgs1, VarArgs2, _, Options] = VarArgs,
+ NewTable1 =
+ case lists:member("'public'", Options) of
+ true ->
+ case lists:member("'named_table'", Options) of
+ true ->
+ {named, VarArgs1, VarArgs2};
+ false -> other
+ end;
+ false -> no_t
+ end,
+ {RaceList, RaceListSize, RaceTags, NewTable1};
+ {mnesia, dirty_read, A} when A =:= 1 orelse A =:= 2 ->
+ VarArgs =
+ case A of
+ 1 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_read1);
+ 2 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_read2)
+ end,
+ {[#dep_call{call_name = mnesia_dirty_read, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ state = CleanState, file_line = FileLine}|RaceList],
+ RaceListSize + 1, RaceTags, no_t};
+ {mnesia, dirty_write, A} when A =:= 1 orelse A =:= 2 ->
+ VarArgs =
+ case A of
+ 1 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_write1);
+ 2 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_write2)
+ end,
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = mnesia_dirty_write,
+ args = VarArgs}|RaceList],
+ RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ Int when is_integer(Int) ->
+ {[#fun_call{caller = CurrFun, callee = Int, arg_types = ArgTypes,
+ vars = Args}|RaceList],
+ RaceListSize + 1, RaceTags, no_t};
+ _Other ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ case digraph:vertex(dialyzer_callgraph:get_digraph(Callgraph),
+ Fun) of
+ {Fun, confirmed} ->
+ {[#fun_call{caller = CurrFun, callee = Fun,
+ arg_types = ArgTypes, vars = Args}|RaceList],
+ RaceListSize + 1, RaceTags, no_t};
+ false ->
+ {RaceList, RaceListSize, RaceTags, no_t}
+ end
+ end
+ end,
+ state__renew_info(NewRaceList, NewRaceListSize, NewRaceTags, NewTable, State).
+
+-spec race(dialyzer_dataflow:state()) -> dialyzer_dataflow:state().
+
+race(State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ RaceTags = Races#races.race_tags,
+ RetState =
+ case RaceTags of
+ [] -> State;
+ [#race_fun{mfa = Fun,
+ args = VarArgs, arg_types = ArgTypes,
+ vars = Args, file_line = FileLine,
+ index = Index, fun_mfa = CurrFun,
+ fun_label = CurrFunLabel}|T] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ {ok, [_Args, Code]} =
+ dict:find(CurrFun, dialyzer_callgraph:get_race_code(Callgraph)),
+ RaceList = lists:reverse(Code),
+ RaceWarnTag =
+ case Fun of
+ {erlang, register, 2} -> ?WARN_WHEREIS_REGISTER;
+ {ets, insert, 2} -> ?WARN_ETS_LOOKUP_INSERT;
+ {mnesia, dirty_write, _A} -> ?WARN_MNESIA_DIRTY_READ_WRITE
+ end,
+ State1 =
+ state__renew_curr_fun(CurrFun,
+ state__renew_curr_fun_label(CurrFunLabel,
+ state__renew_race_list(lists:nthtail(length(RaceList) - Index,
+ RaceList), State))),
+ DepList = fixup_race_list(RaceWarnTag, VarArgs, State1),
+ {State2, RaceWarn} =
+ get_race_warn(Fun, Args, ArgTypes, DepList, State),
+ race(
+ state__add_race_warning(
+ state__renew_race_tags(T, State2), RaceWarn, RaceWarnTag,
+ FileLine))
+ end,
+ state__renew_race_tags([], RetState).
+
+fixup_race_list(RaceWarnTag, WarnVarArgs, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ CurrFun = Races#races.curr_fun,
+ CurrFunLabel = Races#races.curr_fun_label,
+ RaceList = Races#races.race_list,
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ Digraph = dialyzer_callgraph:get_digraph(Callgraph),
+ Calls = digraph:edges(Digraph),
+ RaceTag =
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER -> whereis_register;
+ ?WARN_ETS_LOOKUP_INSERT -> ets_lookup_insert;
+ ?WARN_MNESIA_DIRTY_READ_WRITE -> mnesia_dirty_read_write
+ end,
+ NewRaceList = [RaceTag|RaceList],
+ CleanState = dialyzer_dataflow:state__cleanup(State),
+ NewState = state__renew_race_list(NewRaceList, CleanState),
+ DepList1 =
+ fixup_race_forward_pullout(CurrFun, CurrFunLabel, Calls,
+ lists:reverse(NewRaceList), [], CurrFun,
+ WarnVarArgs, RaceWarnTag, dict:new(),
+ [], [], [], 2 * ?local, NewState),
+ Parents =
+ fixup_race_backward(CurrFun, Calls, Calls, [], ?local),
+ UParents = lists:usort(Parents),
+ Filtered =
+ filter_parents(UParents, UParents, Digraph),
+ NewParents =
+ case lists:member(CurrFun, Filtered) of
+ true -> Filtered;
+ false -> [CurrFun|Filtered]
+ end,
+ DepList2 =
+ fixup_race_list_helper(NewParents, Calls, CurrFun, WarnVarArgs,
+ RaceWarnTag, NewState),
+ lists:usort(cleanup_dep_calls(DepList1 ++ DepList2)).
+
+fixup_race_list_helper(Parents, Calls, CurrFun, WarnVarArgs, RaceWarnTag,
+ State) ->
+ case Parents of
+ [] -> [];
+ [Head|Tail] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ Code =
+ case dict:find(Head, dialyzer_callgraph:get_race_code(Callgraph)) of
+ error -> [];
+ {ok, [_A, C]} -> C
+ end,
+ {ok, FunLabel} = dialyzer_callgraph:lookup_label(Head, Callgraph),
+ DepList1 =
+ fixup_race_forward_pullout(Head, FunLabel, Calls, Code, [], CurrFun,
+ WarnVarArgs, RaceWarnTag, dict:new(),
+ [], [], [], 2 * ?local, State),
+ DepList2 =
+ fixup_race_list_helper(Tail, Calls, CurrFun, WarnVarArgs,
+ RaceWarnTag, State),
+ DepList1 ++ DepList2
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Forward Analysis
+%%%
+%%% ===========================================================================
+
+fixup_race_forward_pullout(CurrFun, CurrFunLabel, Calls, Code, RaceList,
+ InitFun, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel,
+ State) ->
+ {DepList, NewCurrFun, NewCurrFunLabel, NewCalls,
+ NewCode, NewRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, NewNestingLevel} =
+ fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
+ InitFun, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel,
+ cleanup_race_code(State)),
+ case NewCode of
+ [] -> DepList;
+ [#fun_call{caller = NewCurrFun, callee = Call, arg_types = FunTypes,
+ vars = FunArgs}|Tail] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ OkCall = {ok, Call},
+ {Name, Label} =
+ case is_integer(Call) of
+ true ->
+ case dialyzer_callgraph:lookup_name(Call, Callgraph) of
+ error -> {OkCall, OkCall};
+ N -> {N, OkCall}
+ end;
+ false ->
+ {OkCall, dialyzer_callgraph:lookup_label(Call, Callgraph)}
+ end,
+ {NewCurrFun1, NewCurrFunLabel1, NewCalls1, NewCode1, NewRaceList1,
+ NewRaceVarMap1, NewFunDefVars1, NewFunCallVars1, NewFunArgTypes1,
+ NewNestingLevel1} =
+ case Label =:= error of
+ true ->
+ {NewCurrFun, NewCurrFunLabel, NewCalls, Tail, NewRaceList,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel};
+ false ->
+ {ok, Fun} = Name,
+ {ok, Int} = Label,
+ case dict:find(Fun,
+ dialyzer_callgraph:get_race_code(Callgraph)) of
+ error ->
+ {NewCurrFun, NewCurrFunLabel, NewCalls, Tail, NewRaceList,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel};
+ {ok, [Args, CodeB]} ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ {RetCurrFun, RetCurrFunLabel, RetCalls, RetCode,
+ RetRaceList, RetRaceVarMap, RetFunDefVars, RetFunCallVars,
+ RetFunArgTypes, RetNestingLevel} =
+ fixup_race_forward_helper(NewCurrFun,
+ NewCurrFunLabel, Fun, Int, NewCalls, NewCalls,
+ [#curr_fun{status = out, mfa = NewCurrFun,
+ label = NewCurrFunLabel,
+ var_map = NewRaceVarMap,
+ def_vars = NewFunDefVars,
+ call_vars = NewFunCallVars,
+ arg_types = NewFunArgTypes}|
+ Tail],
+ NewRaceList, InitFun, FunArgs, FunTypes, RaceWarnTag,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars,
+ NewFunArgTypes, NewNestingLevel, Args, CodeB,
+ Races#races.race_list),
+ case RetCode of
+ [#curr_fun{}|_CodeTail] ->
+ {NewCurrFun, NewCurrFunLabel, RetCalls, RetCode,
+ RetRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, RetNestingLevel};
+ _Else ->
+ {RetCurrFun, RetCurrFunLabel, RetCalls, RetCode,
+ RetRaceList, RetRaceVarMap, RetFunDefVars,
+ RetFunCallVars, RetFunArgTypes, RetNestingLevel}
+ end
+ end
+ end,
+ DepList ++
+ fixup_race_forward_pullout(NewCurrFun1, NewCurrFunLabel1, NewCalls1,
+ NewCode1, NewRaceList1, InitFun, WarnVarArgs,
+ RaceWarnTag, NewRaceVarMap1, NewFunDefVars1,
+ NewFunCallVars1, NewFunArgTypes1,
+ NewNestingLevel1, State)
+ end.
+
+fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
+ InitFun, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel,
+ State) ->
+ case Code of
+ [] ->
+ {[], CurrFun, CurrFunLabel, Calls, Code, RaceList, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel};
+ [Head|Tail] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ {NewRL, DepList, NewNL, Return} =
+ case Head of
+ #dep_call{call_name = whereis} ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ {[Head#dep_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #dep_call{call_name = ets_lookup, args = DepCallArgs} ->
+ case RaceWarnTag of
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [Tab, Names, _, _] = DepCallArgs,
+ case compare_var_list(Tab,
+ dialyzer_callgraph:get_public_tables(Callgraph),
+ RaceVarMap)
+ orelse
+ length(Names --
+ dialyzer_callgraph:get_named_tables(Callgraph)) <
+ length(Names) of
+ true ->
+ {[Head#dep_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ false ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #dep_call{call_name = mnesia_dirty_read} ->
+ case RaceWarnTag of
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ {[Head#dep_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #warn_call{call_name = register} ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ {[Head#warn_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #warn_call{call_name = ets_insert, args = WarnCallArgs} ->
+ case RaceWarnTag of
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [Tab, Names, _, _] = WarnCallArgs,
+ case compare_var_list(Tab,
+ dialyzer_callgraph:get_public_tables(Callgraph),
+ RaceVarMap)
+ orelse
+ length(Names --
+ dialyzer_callgraph:get_named_tables(Callgraph)) <
+ length(Names) of
+ true ->
+ {[Head#warn_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ false ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #warn_call{call_name = mnesia_dirty_write} ->
+ case RaceWarnTag of
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ {[Head#warn_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #fun_call{caller = CurrFun, callee = InitFun} ->
+ {RaceList, [], NestingLevel, false};
+ #fun_call{caller = CurrFun} ->
+ {RaceList, [], NestingLevel - 1, false};
+ beg_case ->
+ {[Head|RaceList], [], NestingLevel, false};
+ #beg_clause{} ->
+ {[#beg_clause{}|RaceList], [], NestingLevel, false};
+ #end_clause{} ->
+ {[#end_clause{}|RaceList], [], NestingLevel, false};
+ #end_case{} ->
+ {[Head|RaceList], [], NestingLevel, false};
+ #let_tag{} ->
+ {RaceList, [], NestingLevel, false};
+ #curr_fun{status = in, mfa = InitFun,
+ label = _InitFunLabel, var_map = _NewRVM,
+ def_vars = NewFDV, call_vars = NewFCV,
+ arg_types = _NewFAT} ->
+ {[#curr_fun{status = out, var_map = RaceVarMap,
+ def_vars = NewFDV, call_vars = NewFCV}|
+ RaceList], [], NestingLevel - 1, false};
+ #curr_fun{status = in, def_vars = NewFDV,
+ call_vars = NewFCV} ->
+ {[#curr_fun{status = out, var_map = RaceVarMap,
+ def_vars = NewFDV, call_vars = NewFCV}|
+ RaceList],
+ [], NestingLevel - 1, false};
+ #curr_fun{status = out} ->
+ {[#curr_fun{status = in, var_map = RaceVarMap}|RaceList], [],
+ NestingLevel + 1, false};
+ RaceTag ->
+ PublicTables = dialyzer_callgraph:get_public_tables(Callgraph),
+ NamedTables = dialyzer_callgraph:get_named_tables(Callgraph),
+ WarnVarArgs1 =
+ var_type_analysis(FunDefVars, FunArgTypes, WarnVarArgs,
+ RaceWarnTag, RaceVarMap,
+ dialyzer_dataflow:state__records_only(State)),
+ {NewDepList, IsPublic, _Return} =
+ get_deplist_paths(RaceList, WarnVarArgs1, RaceWarnTag,
+ RaceVarMap, 0, PublicTables, NamedTables),
+ {NewHead, NewDepList1} =
+ case RaceTag of
+ whereis_register ->
+ {[#warn_call{call_name = register, args = WarnVarArgs,
+ var_map = RaceVarMap}],
+ NewDepList};
+ ets_lookup_insert ->
+ NewWarnCall =
+ [#warn_call{call_name = ets_insert, args = WarnVarArgs,
+ var_map = RaceVarMap}],
+ [Tab, Names, _, _] = WarnVarArgs,
+ case IsPublic orelse
+ compare_var_list(Tab, PublicTables, RaceVarMap)
+ orelse
+ length(Names -- NamedTables) < length(Names) of
+ true ->
+ {NewWarnCall, NewDepList};
+ false -> {NewWarnCall, []}
+ end;
+ mnesia_dirty_read_write ->
+ {[#warn_call{call_name = mnesia_dirty_write,
+ args = WarnVarArgs, var_map = RaceVarMap}],
+ NewDepList}
+ end,
+ {NewHead ++ RaceList, NewDepList1, NestingLevel,
+ is_last_race(RaceTag, InitFun, Tail, Callgraph)}
+ end,
+ {NewCurrFun, NewCurrFunLabel, NewCode, NewRaceList, NewRaceVarMap,
+ NewFunDefVars, NewFunCallVars, NewFunArgTypes, NewNestingLevel,
+ PullOut} =
+ case Head of
+ #fun_call{caller = CurrFun} ->
+ case NewNL =:= 0 of
+ true ->
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false};
+ false ->
+ {CurrFun, CurrFunLabel, Code, NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, true}
+ end;
+ #beg_clause{arg = Arg, pats = Pats, guard = Guard} ->
+ {RaceVarMap1, RemoveClause} =
+ race_var_map_guard(Arg, Pats, Guard, RaceVarMap, bind),
+ case RemoveClause of
+ true ->
+ {RaceList2,
+ #curr_fun{mfa = CurrFun2, label = CurrFunLabel2,
+ var_map = RaceVarMap2, def_vars = FunDefVars2,
+ call_vars = FunCallVars2, arg_types = FunArgTypes2},
+ Code2, NestingLevel2} =
+ remove_clause(NewRL,
+ #curr_fun{mfa = CurrFun, label = CurrFunLabel,
+ var_map = RaceVarMap1,
+ def_vars = FunDefVars,
+ call_vars = FunCallVars,
+ arg_types = FunArgTypes},
+ Tail, NewNL),
+ {CurrFun2, CurrFunLabel2, Code2, RaceList2,
+ RaceVarMap2, FunDefVars2, FunCallVars2, FunArgTypes2,
+ NestingLevel2, false};
+ false ->
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false}
+ end;
+ #end_clause{arg = Arg, pats = Pats, guard = Guard} ->
+ {RaceVarMap1, _RemoveClause} =
+ race_var_map_guard(Arg, Pats, Guard, RaceVarMap, unbind),
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL,
+ false};
+ #end_case{clauses = Clauses} ->
+ RaceVarMap1 =
+ race_var_map_clauses(Clauses, RaceVarMap),
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL,
+ false};
+ #let_tag{var = Var, arg = Arg} ->
+ {CurrFun, CurrFunLabel, Tail, NewRL,
+ race_var_map(Var, Arg, RaceVarMap, bind), FunDefVars,
+ FunCallVars, FunArgTypes, NewNL, false};
+ #curr_fun{mfa = CurrFun1, label = CurrFunLabel1,
+ var_map = RaceVarMap1, def_vars = FunDefVars1,
+ call_vars = FunCallVars1, arg_types = FunArgTypes1} ->
+ case NewNL =:= 0 of
+ true ->
+ {CurrFun, CurrFunLabel,
+ remove_nonlocal_functions(Tail, 1), NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false};
+ false ->
+ {CurrFun1, CurrFunLabel1, Tail, NewRL, RaceVarMap1,
+ FunDefVars1, FunCallVars1, FunArgTypes1, NewNL, false}
+ end;
+ _Thing ->
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false}
+ end,
+ case Return of
+ true ->
+ {DepList, NewCurrFun, NewCurrFunLabel, Calls,
+ [], NewRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, NewNestingLevel};
+ false ->
+ NewNestingLevel1 =
+ case NewNestingLevel =:= 0 of
+ true -> NewNestingLevel + 1;
+ false -> NewNestingLevel
+ end,
+ case PullOut of
+ true ->
+ {DepList, NewCurrFun, NewCurrFunLabel, Calls,
+ NewCode, NewRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, NewNestingLevel1};
+ false ->
+ {RetDepList, NewCurrFun1, NewCurrFunLabel1, NewCalls1,
+ NewCode1, NewRaceList1, NewRaceVarMap1, NewFunDefVars1,
+ NewFunCallVars1, NewFunArgTypes1, NewNestingLevel2} =
+ fixup_race_forward(NewCurrFun, NewCurrFunLabel, Calls,
+ NewCode, NewRaceList, InitFun, WarnVarArgs,
+ RaceWarnTag, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel1, State),
+ {DepList ++ RetDepList, NewCurrFun1, NewCurrFunLabel1,
+ NewCalls1, NewCode1, NewRaceList1, NewRaceVarMap1,
+ NewFunDefVars1, NewFunCallVars1, NewFunArgTypes1,
+ NewNestingLevel2}
+ end
+ end
+ end.
+
+get_deplist_paths(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables) ->
+ case RaceList of
+ [] -> {[], false, true};
+ [Head|Tail] ->
+ case Head of
+ #end_case{} ->
+ {RaceList1, DepList1, IsPublic1, Continue1} =
+ handle_case(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables),
+ case Continue1 of
+ true ->
+ {DepList2, IsPublic2, Continue2} =
+ get_deplist_paths(RaceList1, WarnVarArgs, RaceWarnTag,
+ RaceVarMap, CurrLevel, PublicTables,
+ NamedTables),
+ {DepList1 ++ DepList2, IsPublic1 orelse IsPublic2, Continue2};
+ false -> {DepList1, IsPublic1, false}
+ end;
+ #beg_clause{} ->
+ get_deplist_paths(fixup_before_case_path(Tail), WarnVarArgs,
+ RaceWarnTag, RaceVarMap, CurrLevel, PublicTables,
+ NamedTables);
+ #curr_fun{status = in, var_map = RaceVarMap1} ->
+ {DepList, IsPublic, Continue} =
+ get_deplist_paths(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel + 1, PublicTables, NamedTables),
+ IsPublic1 =
+ case RaceWarnTag of
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [Tabs, Names, _, _] = WarnVarArgs,
+ IsPublic orelse
+ lists:any(
+ fun (T) ->
+ compare_var_list(T, PublicTables, RaceVarMap1)
+ end, Tabs)
+ orelse
+ length(Names -- NamedTables) < length(Names);
+ _ -> true
+ end,
+ {DepList, IsPublic1, Continue};
+ #curr_fun{status = out, var_map = RaceVarMap1, def_vars = FunDefVars,
+ call_vars = FunCallVars} ->
+ WarnVarArgs1 =
+ var_analysis([format_arg(DefVar) || DefVar <- FunDefVars],
+ [format_arg(CallVar) || CallVar <- FunCallVars],
+ WarnVarArgs, RaceWarnTag),
+ {WarnVarArgs2, Stop} =
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs1,
+ Vars =
+ lists:flatten(
+ [find_all_bound_vars(V, RaceVarMap1) || V <- WVA1]),
+ case {Vars, CurrLevel} of
+ {[], 0} ->
+ {WarnVarArgs, true};
+ {[], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars, WVA2, WVA3, WVA4], false}
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs1,
+ Vars1 =
+ lists:flatten(
+ [find_all_bound_vars(V1, RaceVarMap1) || V1 <- WVA1]),
+ Vars2 =
+ lists:flatten(
+ [find_all_bound_vars(V2, RaceVarMap1) || V2 <- WVA3]),
+ case {Vars1, Vars2, CurrLevel} of
+ {[], _, 0} ->
+ {WarnVarArgs, true};
+ {[], _, _} ->
+ {WarnVarArgs, false};
+ {_, [], 0} ->
+ {WarnVarArgs, true};
+ {_, [], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars1, WVA2, Vars2, WVA4], false}
+ end;
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [WVA1, WVA2|T] = WarnVarArgs1,
+ Vars =
+ lists:flatten(
+ [find_all_bound_vars(V, RaceVarMap1) || V <- WVA1]),
+ case {Vars, CurrLevel} of
+ {[], 0} ->
+ {WarnVarArgs, true};
+ {[], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars, WVA2|T], false}
+ end
+ end,
+ case Stop of
+ true -> {[], false, false};
+ false ->
+ CurrLevel1 =
+ case CurrLevel of
+ 0 -> CurrLevel;
+ _ -> CurrLevel - 1
+ end,
+ get_deplist_paths(Tail, WarnVarArgs2, RaceWarnTag, RaceVarMap1,
+ CurrLevel1, PublicTables, NamedTables)
+ end;
+ #warn_call{call_name = register, args = WarnVarArgs1,
+ var_map = RaceVarMap1} ->
+ case compare_first_arg(WarnVarArgs, WarnVarArgs1, RaceVarMap1) of
+ true -> {[], false, false};
+ NewWarnVarArgs ->
+ get_deplist_paths(Tail, NewWarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables)
+ end;
+ #warn_call{call_name = ets_insert, args = WarnVarArgs1,
+ var_map = RaceVarMap1} ->
+ case compare_ets_insert(WarnVarArgs, WarnVarArgs1, RaceVarMap1) of
+ true -> {[], false, false};
+ NewWarnVarArgs ->
+ get_deplist_paths(Tail, NewWarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables)
+ end;
+ #warn_call{call_name = mnesia_dirty_write, args = WarnVarArgs1,
+ var_map = RaceVarMap1} ->
+ case compare_first_arg(WarnVarArgs, WarnVarArgs1, RaceVarMap1) of
+ true -> {[], false, false};
+ NewWarnVarArgs ->
+ get_deplist_paths(Tail, NewWarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables)
+ end;
+ #dep_call{var_map = RaceVarMap1} ->
+ {DepList, IsPublic, Continue} =
+ get_deplist_paths(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables),
+ {refine_race(Head, WarnVarArgs, RaceWarnTag, DepList, RaceVarMap1),
+ IsPublic, Continue}
+ end
+ end.
+
+handle_case(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables) ->
+ case RaceList of
+ [] -> {[], [], false, true};
+ [Head|Tail] ->
+ case Head of
+ #end_clause{} ->
+ {RestRaceList, DepList1, IsPublic1, Continue1} =
+ do_clause(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables),
+ {RetRaceList, DepList2, IsPublic2, Continue2} =
+ handle_case(RestRaceList, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables),
+ {RetRaceList, DepList1 ++ DepList2, IsPublic1 orelse IsPublic2,
+ Continue1 orelse Continue2};
+ beg_case -> {Tail, [], false, false}
+ end
+ end.
+
+do_clause(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables) ->
+ {DepList, IsPublic, Continue} =
+ get_deplist_paths(fixup_case_path(RaceList, 0), WarnVarArgs,
+ RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables),
+ {fixup_case_rest_paths(RaceList, 0), DepList, IsPublic, Continue}.
+
+fixup_case_path(RaceList, NestingLevel) ->
+ case RaceList of
+ [] -> [];
+ [Head|Tail] ->
+ {NewNestingLevel, Return} =
+ case Head of
+ beg_case -> {NestingLevel - 1, false};
+ #end_case{} -> {NestingLevel + 1, false};
+ #beg_clause{} ->
+ case NestingLevel =:= 0 of
+ true -> {NestingLevel, true};
+ false -> {NestingLevel, false}
+ end;
+ _Other -> {NestingLevel, false}
+ end,
+ case Return of
+ true -> [];
+ false -> [Head|fixup_case_path(Tail, NewNestingLevel)]
+ end
+ end.
+
+%% Gets the race list before a case clause.
+fixup_before_case_path(RaceList) ->
+ case RaceList of
+ [] -> [];
+ [Head|Tail] ->
+ case Head of
+ #end_clause{} ->
+ fixup_before_case_path(fixup_case_rest_paths(Tail, 0));
+ beg_case -> Tail
+ end
+ end.
+
+fixup_case_rest_paths(RaceList, NestingLevel) ->
+ case RaceList of
+ [] -> [];
+ [Head|Tail] ->
+ {NewNestingLevel, Return} =
+ case Head of
+ beg_case -> {NestingLevel - 1, false};
+ #end_case{} -> {NestingLevel + 1, false};
+ #beg_clause{} ->
+ case NestingLevel =:= 0 of
+ true -> {NestingLevel, true};
+ false -> {NestingLevel, false}
+ end;
+ _Other -> {NestingLevel, false}
+ end,
+ case Return of
+ true -> Tail;
+ false -> fixup_case_rest_paths(Tail, NewNestingLevel)
+ end
+ end.
+
+fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
+ Calls, CallsToAnalyze, Code, RaceList,
+ InitFun, NewFunArgs, NewFunTypes,
+ RaceWarnTag, RaceVarMap, FunDefVars,
+ FunCallVars, FunArgTypes, NestingLevel,
+ Args, CodeB, StateRaceList) ->
+ case Calls of
+ [] ->
+ {NewRaceList,
+ #curr_fun{mfa = NewCurrFun, label = NewCurrFunLabel,
+ var_map = NewRaceVarMap, def_vars = NewFunDefVars,
+ call_vars = NewFunCallVars, arg_types = NewFunArgTypes},
+ NewCode, NewNestingLevel} =
+ remove_clause(RaceList,
+ #curr_fun{mfa = CurrFun, label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes},
+ Code, NestingLevel),
+ {NewCurrFun, NewCurrFunLabel, CallsToAnalyze, NewCode, NewRaceList,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel};
+ [Head|Tail] ->
+ case Head of
+ {InitFun, InitFun} when CurrFun =:= InitFun, Fun =:= InitFun ->
+ NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
+ NewRaceVarMap =
+ race_var_map(Args, NewFunArgs, RaceVarMap, bind),
+ RetC =
+ fixup_all_calls(InitFun, InitFun, FunLabel, Args,
+ CodeB ++
+ [#curr_fun{status = out, mfa = InitFun,
+ label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes}],
+ Code, RaceVarMap),
+ NewCode =
+ fixup_all_calls(InitFun, InitFun, FunLabel, Args,
+ CodeB ++
+ [#curr_fun{status = out, mfa = InitFun,
+ label = CurrFunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}],
+ [#curr_fun{status = in, mfa = Fun,
+ label = FunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}|
+ lists:reverse(StateRaceList)] ++
+ RetC, NewRaceVarMap),
+ {InitFun, FunLabel, NewCallsToAnalyze, NewCode, RaceList,
+ NewRaceVarMap, Args, NewFunArgs, NewFunTypes, NestingLevel};
+ {CurrFun, Fun} ->
+ NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
+ NewRaceVarMap =
+ race_var_map(Args, NewFunArgs, RaceVarMap, bind),
+ RetC =
+ case Fun of
+ InitFun ->
+ fixup_all_calls(CurrFun, Fun, FunLabel, Args,
+ lists:reverse(StateRaceList) ++
+ [#curr_fun{status = out, mfa = CurrFun,
+ label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes}],
+ Code, RaceVarMap);
+ _Other1 ->
+ fixup_all_calls(CurrFun, Fun, FunLabel, Args,
+ CodeB ++
+ [#curr_fun{status = out, mfa = CurrFun,
+ label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes}],
+ Code, RaceVarMap)
+ end,
+ NewCode =
+ case Fun of
+ InitFun ->
+ [#curr_fun{status = in, mfa = Fun,
+ label = FunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}|
+ lists:reverse(StateRaceList)] ++
+ RetC;
+ _ ->
+ [#curr_fun{status = in, mfa = Fun,
+ label = FunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}|CodeB] ++
+ RetC
+ end,
+ {Fun, FunLabel, NewCallsToAnalyze, NewCode, RaceList, NewRaceVarMap,
+ Args, NewFunArgs, NewFunTypes, NestingLevel};
+ {_TupleA, _TupleB} ->
+ fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
+ Tail, CallsToAnalyze, Code, RaceList, InitFun, NewFunArgs,
+ NewFunTypes, RaceWarnTag, RaceVarMap, FunDefVars, FunCallVars,
+ FunArgTypes, NestingLevel, Args, CodeB, StateRaceList)
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Backward Analysis
+%%%
+%%% ===========================================================================
+
+fixup_race_backward(CurrFun, Calls, CallsToAnalyze, Parents, Height) ->
+ case Height =:= 0 of
+ true -> Parents;
+ false ->
+ case Calls of
+ [] ->
+ case is_integer(CurrFun) orelse lists:member(CurrFun, Parents) of
+ true -> Parents;
+ false -> [CurrFun|Parents]
+ end;
+ [Head|Tail] ->
+ MorePaths =
+ case Head of
+ {Parent, CurrFun} -> true;
+ {Parent, _TupleB} -> false
+ end,
+ case MorePaths of
+ true ->
+ NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
+ NewParents =
+ fixup_race_backward(Parent, NewCallsToAnalyze,
+ NewCallsToAnalyze, Parents, Height - 1),
+ fixup_race_backward(CurrFun, Tail, NewCallsToAnalyze, NewParents,
+ Height);
+ false ->
+ fixup_race_backward(CurrFun, Tail, CallsToAnalyze, Parents,
+ Height)
+ end
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Utilities
+%%%
+%%% ===========================================================================
+
+are_bound_labels(Label1, Label2, RaceVarMap) ->
+ case dict:find(Label1, RaceVarMap) of
+ error -> false;
+ {ok, Labels} ->
+ lists:member(Label2, Labels) orelse
+ are_bound_labels_helper(Labels, Label1, Label2, RaceVarMap)
+ end.
+
+are_bound_labels_helper(Labels, OldLabel, CompLabel, RaceVarMap) ->
+ case dict:size(RaceVarMap) of
+ 0 -> false;
+ _ ->
+ case Labels of
+ [] -> false;
+ [Head|Tail] ->
+ NewRaceVarMap = dict:erase(OldLabel, RaceVarMap),
+ are_bound_labels(Head, CompLabel, NewRaceVarMap) orelse
+ are_bound_labels_helper(Tail, Head, CompLabel, NewRaceVarMap)
+ end
+ end.
+
+are_bound_vars(Vars1, Vars2, RaceVarMap) ->
+ case is_list(Vars1) andalso is_list(Vars2) of
+ true ->
+ case Vars1 of
+ [] -> false;
+ [AHead|ATail] ->
+ case Vars2 of
+ [] -> false;
+ [PHead|PTail] ->
+ are_bound_vars(AHead, PHead, RaceVarMap) andalso
+ are_bound_vars(ATail, PTail, RaceVarMap)
+ end
+ end;
+ false ->
+ {NewVars1, NewVars2, IsList} =
+ case is_list(Vars1) of
+ true ->
+ case Vars1 of
+ [Var1] -> {Var1, Vars2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false ->
+ case is_list(Vars2) of
+ true ->
+ case Vars2 of
+ [Var2] -> {Vars1, Var2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false -> {Vars1, Vars2, true}
+ end
+ end,
+ case IsList of
+ true ->
+ case cerl:type(NewVars1) of
+ var ->
+ case cerl:type(NewVars2) of
+ var ->
+ ALabel = cerl_trees:get_label(NewVars1),
+ PLabel = cerl_trees:get_label(NewVars2),
+ are_bound_labels(ALabel, PLabel, RaceVarMap) orelse
+ are_bound_labels(PLabel, ALabel, RaceVarMap);
+ alias ->
+ are_bound_vars(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap);
+ values ->
+ are_bound_vars(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap);
+ _Other -> false
+ end;
+ tuple ->
+ case cerl:type(NewVars2) of
+ tuple ->
+ are_bound_vars(cerl:tuple_es(NewVars1),
+ cerl:tuple_es(NewVars2), RaceVarMap);
+ alias ->
+ are_bound_vars(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap);
+ values ->
+ are_bound_vars(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap);
+ _Other -> false
+ end;
+ cons ->
+ case cerl:type(NewVars2) of
+ cons ->
+ are_bound_vars(cerl:cons_hd(NewVars1),
+ cerl:cons_hd(NewVars2), RaceVarMap)
+ andalso
+ are_bound_vars(cerl:cons_tl(NewVars1),
+ cerl:cons_tl(NewVars2), RaceVarMap);
+ alias ->
+ are_bound_vars(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap);
+ values ->
+ are_bound_vars(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap);
+ _Other -> false
+ end;
+ alias ->
+ case cerl:type(NewVars2) of
+ alias ->
+ are_bound_vars(cerl:alias_var(NewVars1),
+ cerl:alias_var(NewVars2), RaceVarMap);
+ _Other ->
+ are_bound_vars(cerl:alias_var(NewVars1),
+ NewVars2, RaceVarMap)
+ end;
+ values ->
+ case cerl:type(NewVars2) of
+ values ->
+ are_bound_vars(cerl:values_es(NewVars1),
+ cerl:values_es(NewVars2), RaceVarMap);
+ _Other ->
+ are_bound_vars(cerl:values_es(NewVars1),
+ NewVars2, RaceVarMap)
+ end;
+ _Other -> false
+ end;
+ false -> false
+ end
+ end.
+
+callgraph__renew_tables(Table, Callgraph) ->
+ case Table of
+ {named, NameLabel, Names} ->
+ PTablesToAdd =
+ case NameLabel of
+ ?no_label -> [];
+ _Other -> [NameLabel]
+ end,
+ NamesToAdd = filter_named_tables(Names),
+ PTables = dialyzer_callgraph:get_public_tables(Callgraph),
+ NTables = dialyzer_callgraph:get_named_tables(Callgraph),
+ dialyzer_callgraph:put_public_tables(
+ lists:usort(PTablesToAdd ++ PTables),
+ dialyzer_callgraph:put_named_tables(
+ NamesToAdd ++ NTables, Callgraph));
+ _Other ->
+ Callgraph
+ end.
+
+cleanup_clause_code(#curr_fun{mfa = CurrFun} = CurrTuple, Code,
+ NestingLevel, LocalNestingLevel) ->
+ case Code of
+ [] -> {CurrTuple, []};
+ [Head|Tail] ->
+ {NewLocalNestingLevel, NewNestingLevel, NewCurrTuple, Return} =
+ case Head of
+ beg_case ->
+ {LocalNestingLevel, NestingLevel + 1, CurrTuple, false};
+ #end_case{} ->
+ {LocalNestingLevel, NestingLevel - 1, CurrTuple, false};
+ #end_clause{} ->
+ case NestingLevel =:= 0 of
+ true ->
+ {LocalNestingLevel, NestingLevel, CurrTuple, true};
+ false ->
+ {LocalNestingLevel, NestingLevel, CurrTuple, false}
+ end;
+ #fun_call{caller = CurrFun} ->
+ {LocalNestingLevel - 1, NestingLevel, CurrTuple, false};
+ #curr_fun{status = in} ->
+ {LocalNestingLevel - 1, NestingLevel, Head, false};
+ #curr_fun{status = out} ->
+ {LocalNestingLevel + 1, NestingLevel, Head, false};
+ Other when Other =/= #fun_call{} ->
+ {LocalNestingLevel, NestingLevel, CurrTuple, false}
+ end,
+ case Return of
+ true -> {NewCurrTuple, Tail};
+ false ->
+ cleanup_clause_code(NewCurrTuple, Tail, NewNestingLevel,
+ NewLocalNestingLevel)
+ end
+ end.
+
+cleanup_dep_calls(DepList) ->
+ case DepList of
+ [] -> [];
+ [#dep_call{call_name = CallName, arg_types = ArgTypes,
+ vars = Vars, state = State, file_line = FileLine}|T] ->
+ [#dep_call{call_name = CallName, arg_types = ArgTypes,
+ vars = Vars, state = State, file_line = FileLine}|
+ cleanup_dep_calls(T)]
+ end.
+
+cleanup_race_code(State) ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ dialyzer_dataflow:state__put_callgraph(
+ dialyzer_callgraph:race_code_new(Callgraph), State).
+
+filter_named_tables(NamesList) ->
+ case NamesList of
+ [] -> [];
+ [Head|Tail] ->
+ NewHead =
+ case string:rstr(Head, "()") of
+ 0 -> [Head];
+ _Other -> []
+ end,
+ NewHead ++ filter_named_tables(Tail)
+ end.
+
+filter_parents(Parents, NewParents, Digraph) ->
+ case Parents of
+ [] -> NewParents;
+ [Head|Tail] ->
+ NewParents1 = filter_parents_helper1(Head, Tail, NewParents, Digraph),
+ filter_parents(Tail, NewParents1, Digraph)
+ end.
+
+filter_parents_helper1(First, Rest, NewParents, Digraph) ->
+ case Rest of
+ [] -> NewParents;
+ [Head|Tail] ->
+ NewParents1 = filter_parents_helper2(First, Head, NewParents, Digraph),
+ filter_parents_helper1(First, Tail, NewParents1, Digraph)
+ end.
+
+filter_parents_helper2(Parent1, Parent2, NewParents, Digraph) ->
+ case digraph:get_path(Digraph, Parent1, Parent2) of
+ false ->
+ case digraph:get_path(Digraph, Parent2, Parent1) of
+ false -> NewParents;
+ _Vertices -> NewParents -- [Parent1]
+ end;
+ _Vertices -> NewParents -- [Parent2]
+ end.
+
+find_all_bound_vars(Label, RaceVarMap) ->
+ case dict:find(Label, RaceVarMap) of
+ error -> [Label];
+ {ok, Labels} ->
+ lists:usort(Labels ++
+ find_all_bound_vars_helper(Labels, Label, RaceVarMap))
+ end.
+
+find_all_bound_vars_helper(Labels, Label, RaceVarMap) ->
+ case dict:size(RaceVarMap) of
+ 0 -> [];
+ _ ->
+ case Labels of
+ [] -> [];
+ [Head|Tail] ->
+ NewRaceVarMap = dict:erase(Label, RaceVarMap),
+ find_all_bound_vars(Head, NewRaceVarMap) ++
+ find_all_bound_vars_helper(Tail, Head, NewRaceVarMap)
+ end
+ end.
+
+fixup_all_calls(CurrFun, NextFun, NextFunLabel, Args, CodeToReplace,
+ Code, RaceVarMap) ->
+ case Code of
+ [] -> [];
+ [Head|Tail] ->
+ NewCode =
+ case Head of
+ #fun_call{caller = CurrFun, callee = Callee,
+ arg_types = FunArgTypes, vars = FunArgs}
+ when Callee =:= NextFun orelse Callee =:= NextFunLabel ->
+ RaceVarMap1 = race_var_map(Args, FunArgs, RaceVarMap, bind),
+ [#curr_fun{status = in, mfa = NextFun, label = NextFunLabel,
+ var_map = RaceVarMap1, def_vars = Args,
+ call_vars = FunArgs, arg_types = FunArgTypes}|
+ CodeToReplace];
+ _Other -> [Head]
+ end,
+ RetCode =
+ fixup_all_calls(CurrFun, NextFun, NextFunLabel, Args, CodeToReplace,
+ Tail, RaceVarMap),
+ NewCode ++ RetCode
+ end.
+
+is_last_race(RaceTag, InitFun, Code, Callgraph) ->
+ case Code of
+ [] -> true;
+ [Head|Tail] ->
+ case Head of
+ RaceTag -> false;
+ #fun_call{callee = Fun} ->
+ FunName =
+ case is_integer(Fun) of
+ true ->
+ case dialyzer_callgraph:lookup_name(Fun, Callgraph) of
+ error -> Fun;
+ {ok, Name} -> Name
+ end;
+ false -> Fun
+ end,
+ Digraph = dialyzer_callgraph:get_digraph(Callgraph),
+ case FunName =:= InitFun orelse
+ digraph:get_path(Digraph, FunName, InitFun) of
+ false -> is_last_race(RaceTag, InitFun, Tail, Callgraph);
+ _Vertices -> false
+ end;
+ _Other -> is_last_race(RaceTag, InitFun, Tail, Callgraph)
+ end
+ end.
+
+lists_key_member(Member, List, N) when is_integer(Member) ->
+ case List of
+ [] -> 0;
+ [Head|Tail] ->
+ NewN = N + 1,
+ case Head of
+ Member -> NewN;
+ _Other -> lists_key_member(Member, Tail, NewN)
+ end
+ end;
+lists_key_member(_M, _L, _N) ->
+ 0.
+
+lists_key_member_lists(MemberList, List) ->
+ case MemberList of
+ [] -> 0;
+ [Head|Tail] ->
+ case lists_key_member(Head, List, 0) of
+ 0 -> lists_key_member_lists(Tail, List);
+ Other -> Other
+ end
+ end.
+
+lists_key_members_lists(MemberList, List) ->
+ case MemberList of
+ [] -> [];
+ [Head|Tail] ->
+ lists:usort(
+ lists_key_members_lists_helper(Head, List, 1) ++
+ lists_key_members_lists(Tail, List))
+ end.
+
+lists_key_members_lists_helper(Elem, List, N) when is_integer(Elem) ->
+ case List of
+ [] -> [];
+ [Head|Tail] ->
+ NewHead =
+ case Head =:= Elem of
+ true -> [N];
+ false -> []
+ end,
+ NewHead ++ lists_key_members_lists_helper(Elem, Tail, N + 1)
+ end;
+lists_key_members_lists_helper(_Elem, _List, _N) ->
+ [0].
+
+lists_key_replace(N, List, NewMember) ->
+ {Before, [_|After]} = lists:split(N - 1, List),
+ Before ++ [NewMember|After].
+
+lists_get(0, _List) -> ?no_label;
+lists_get(N, List) -> lists:nth(N, List).
+
+refine_race(RaceCall, WarnVarArgs, RaceWarnTag, DependencyList, RaceVarMap) ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ case RaceCall of
+ #dep_call{call_name = ets_lookup} ->
+ DependencyList;
+ #dep_call{call_name = mnesia_dirty_read} ->
+ DependencyList;
+ #dep_call{call_name = whereis, args = VarArgs} ->
+ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag,
+ DependencyList, RaceVarMap)
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ case RaceCall of
+ #dep_call{call_name = whereis} ->
+ DependencyList;
+ #dep_call{call_name = mnesia_dirty_read} ->
+ DependencyList;
+ #dep_call{call_name = ets_lookup, args = VarArgs} ->
+ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag,
+ DependencyList, RaceVarMap)
+ end;
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ case RaceCall of
+ #dep_call{call_name = whereis} ->
+ DependencyList;
+ #dep_call{call_name = ets_lookup} ->
+ DependencyList;
+ #dep_call{call_name = mnesia_dirty_read, args = VarArgs} ->
+ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag,
+ DependencyList, RaceVarMap)
+ end
+ end.
+
+refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag, DependencyList,
+ RaceVarMap) ->
+ case compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) of
+ true -> [RaceCall|DependencyList];
+ false -> DependencyList
+ end.
+
+remove_clause(RaceList, CurrTuple, Code, NestingLevel) ->
+ NewRaceList = fixup_case_rest_paths(RaceList, 0),
+ {NewCurrTuple, NewCode} =
+ cleanup_clause_code(CurrTuple, Code, 0, NestingLevel),
+ ReturnTuple = {NewRaceList, NewCurrTuple, NewCode, NestingLevel},
+ case NewRaceList of
+ [beg_case|RTail] ->
+ case NewCode of
+ [#end_case{}|CTail] ->
+ remove_clause(RTail, NewCurrTuple, CTail, NestingLevel);
+ _Other -> ReturnTuple
+ end;
+ _Else -> ReturnTuple
+ end.
+
+remove_nonlocal_functions(Code, NestingLevel) ->
+ case Code of
+ [] -> [];
+ [H|T] ->
+ NewNL =
+ case H of
+ #curr_fun{status = in} ->
+ NestingLevel + 1;
+ #curr_fun{status = out} ->
+ NestingLevel - 1;
+ _Other ->
+ NestingLevel
+ end,
+ case NewNL =:= 0 of
+ true -> T;
+ false -> remove_nonlocal_functions(T, NewNL)
+ end
+ end.
+
+renew_curr_fun(CurrFun, Races) ->
+ Races#races{curr_fun = CurrFun}.
+
+renew_curr_fun_label(CurrFunLabel, Races) ->
+ Races#races{curr_fun_label = CurrFunLabel}.
+
+renew_race_list(RaceList, Races) ->
+ Races#races{race_list = RaceList}.
+
+renew_race_list_size(RaceListSize, Races) ->
+ Races#races{race_list_size = RaceListSize}.
+
+renew_race_tags(RaceTags, Races) ->
+ Races#races{race_tags = RaceTags}.
+
+renew_table(Table, Races) ->
+ Races#races{new_table = Table}.
+
+state__renew_curr_fun(CurrFun, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(renew_curr_fun(CurrFun, Races), State).
+
+state__renew_curr_fun_label(CurrFunLabel, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(
+ renew_curr_fun_label(CurrFunLabel, Races), State).
+
+state__renew_race_list(RaceList, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(renew_race_list(RaceList, Races), State).
+
+state__renew_race_tags(RaceTags, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(renew_race_tags(RaceTags, Races), State).
+
+state__renew_info(RaceList, RaceListSize, RaceTags, Table, State) ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_callgraph(
+ callgraph__renew_tables(Table, Callgraph),
+ dialyzer_dataflow:state__put_races(
+ renew_table(Table,
+ renew_race_list(RaceList,
+ renew_race_list_size(RaceListSize,
+ renew_race_tags(RaceTags, Races)))), State)).
+
+%%% ===========================================================================
+%%%
+%%% Variable and Type Utilities
+%%%
+%%% ===========================================================================
+
+any_args(StrList) ->
+ case StrList of
+ [] -> false;
+ [Head|Tail] ->
+ case string:rstr(Head, "()") of
+ 0 -> any_args(Tail);
+ _Other -> true
+ end
+ end.
+
+-spec bind_dict_vars(label(), label(), dict()) -> dict().
+
+bind_dict_vars(Key, Label, RaceVarMap) ->
+ case Key =:= Label of
+ true -> RaceVarMap;
+ false ->
+ case dict:find(Key, RaceVarMap) of
+ error -> dict:store(Key, [Label], RaceVarMap);
+ {ok, Labels} ->
+ case lists:member(Label, Labels) of
+ true -> RaceVarMap;
+ false -> dict:store(Key, [Label|Labels], RaceVarMap)
+ end
+ end
+ end.
+
+bind_dict_vars_list(Key, Labels, RaceVarMap) ->
+ case Labels of
+ [] -> RaceVarMap;
+ [Head|Tail] ->
+ bind_dict_vars_list(Key, Tail, bind_dict_vars(Key, Head, RaceVarMap))
+ end.
+
+compare_ets_insert(OldWarnVarArgs, NewWarnVarArgs, RaceVarMap) ->
+ [Old1, Old2, Old3, Old4] = OldWarnVarArgs,
+ [New1, New2, New3, New4] = NewWarnVarArgs,
+ Bool =
+ case any_args(Old2) of
+ true -> compare_var_list(New1, Old1, RaceVarMap);
+ false ->
+ case any_args(New2) of
+ true -> compare_var_list(New1, Old1, RaceVarMap);
+ false -> compare_var_list(New1, Old1, RaceVarMap)
+ orelse (Old2 =:= New2)
+ end
+ end,
+ case Bool of
+ true ->
+ case any_args(Old4) of
+ true ->
+ case compare_list_vars(Old3, ets_list_args(New3), [], RaceVarMap) of
+ true -> true;
+ Args3 -> lists_key_replace(3, OldWarnVarArgs, Args3)
+ end;
+ false ->
+ case any_args(New4) of
+ true ->
+ case compare_list_vars(Old3, ets_list_args(New3), [],
+ RaceVarMap) of
+ true -> true;
+ Args3 -> lists_key_replace(3, OldWarnVarArgs, Args3)
+ end;
+ false ->
+ case compare_list_vars(Old3, ets_list_args(New3), [],
+ RaceVarMap) of
+ true -> true;
+ Args3 ->
+ lists_key_replace(4,
+ lists_key_replace(3, OldWarnVarArgs, Args3), Old4 -- New4)
+ end
+ end
+ end;
+ false -> OldWarnVarArgs
+ end.
+
+compare_first_arg(OldWarnVarArgs, NewWarnVarArgs, RaceVarMap) ->
+ [Old1, Old2|_OldT] = OldWarnVarArgs,
+ [New1, New2|_NewT] = NewWarnVarArgs,
+ case any_args(Old2) of
+ true ->
+ case compare_var_list(New1, Old1, RaceVarMap) of
+ true -> true;
+ false -> OldWarnVarArgs
+ end;
+ false ->
+ case any_args(New2) of
+ true ->
+ case compare_var_list(New1, Old1, RaceVarMap) of
+ true -> true;
+ false -> OldWarnVarArgs
+ end;
+ false ->
+ case compare_var_list(New1, Old1, RaceVarMap) of
+ true -> true;
+ false -> lists_key_replace(2, OldWarnVarArgs, Old2 -- New2)
+ end
+ end
+ end.
+
+compare_argtypes(ArgTypes, WarnArgTypes) ->
+ lists:any(fun (X) -> lists:member(X, WarnArgTypes) end, ArgTypes).
+
+%% Compares the argument types of the two suspicious calls.
+compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [VA1, VA2] = VarArgs,
+ [WVA1, WVA2, _, _] = WarnVarArgs,
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+
+ end
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [VA1, VA2, VA3, VA4] = VarArgs,
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ Bool =
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+ end
+ end,
+ Bool andalso
+ (case any_args(VA4) of
+ true ->
+ compare_var_list(VA3, WVA3, RaceVarMap);
+ false ->
+ case any_args(WVA4) of
+ true ->
+ compare_var_list(VA3, WVA3, RaceVarMap);
+ false ->
+ compare_var_list(VA3, WVA3, RaceVarMap)
+ orelse compare_argtypes(VA4, WVA4)
+ end
+ end);
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [VA1, VA2|_] = VarArgs, %% Two or four elements
+ [WVA1, WVA2|_] = WarnVarArgs,
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+ end
+ end
+ end.
+
+compare_list_vars(VarList1, VarList2, NewVarList1, RaceVarMap) ->
+ case VarList1 of
+ [] ->
+ case NewVarList1 of
+ [] -> true;
+ _Other -> NewVarList1
+ end;
+ [Head|Tail] ->
+ NewHead =
+ case compare_var_list(Head, VarList2, RaceVarMap) of
+ true -> [];
+ false -> [Head]
+ end,
+ compare_list_vars(Tail, VarList2, NewHead ++ NewVarList1, RaceVarMap)
+ end.
+
+compare_vars(Var1, Var2, RaceVarMap) when is_integer(Var1), is_integer(Var2) ->
+ Var1 =:= Var2 orelse
+ are_bound_labels(Var1, Var2, RaceVarMap) orelse
+ are_bound_labels(Var2, Var1, RaceVarMap);
+compare_vars(_Var1, _Var2, _RaceVarMap) ->
+ false.
+
+-spec compare_var_list(label_type(), [label_type()], dict()) -> boolean().
+
+compare_var_list(Var, VarList, RaceVarMap) ->
+ lists:any(fun (V) -> compare_vars(Var, V, RaceVarMap) end, VarList).
+
+ets_list_args(MaybeList) ->
+ case is_list(MaybeList) of
+ true -> [ets_tuple_args(T) || T <- MaybeList];
+ false -> [ets_tuple_args(MaybeList)]
+ end.
+
+ets_list_argtypes(ListStr) ->
+ ListStr1 = string:strip(ListStr, left, $[),
+ ListStr2 = string:strip(ListStr1, right, $]),
+ ListStr3 = string:strip(ListStr2, right, $.),
+ string:strip(ListStr3, right, $,).
+
+ets_tuple_args(MaybeTuple) ->
+ case is_tuple(MaybeTuple) of
+ true -> element(1, MaybeTuple);
+ false -> ?no_label
+ end.
+
+ets_tuple_argtypes2(TupleList, ElemList) ->
+ case TupleList of
+ [] -> ElemList;
+ [H|T] ->
+ ets_tuple_argtypes2(T,
+ ets_tuple_argtypes2_helper(H, [], 0) ++ ElemList)
+ end.
+
+ets_tuple_argtypes2_helper(TupleStr, ElemStr, NestingLevel) ->
+ case TupleStr of
+ [] -> [];
+ [H|T] ->
+ {NewElemStr, NewNestingLevel, Return} =
+ case H of
+ ${ when NestingLevel =:= 0 ->
+ {ElemStr, NestingLevel + 1, false};
+ ${ ->
+ {[H|ElemStr], NestingLevel + 1, false};
+ $[ ->
+ {[H|ElemStr], NestingLevel + 1, false};
+ $( ->
+ {[H|ElemStr], NestingLevel + 1, false};
+ $} ->
+ {[H|ElemStr], NestingLevel - 1, false};
+ $] ->
+ {[H|ElemStr], NestingLevel - 1, false};
+ $) ->
+ {[H|ElemStr], NestingLevel - 1, false};
+ $, when NestingLevel =:= 1 ->
+ {lists:reverse(ElemStr), NestingLevel, true};
+ _Other ->
+ {[H|ElemStr], NestingLevel, false}
+ end,
+ case Return of
+ true -> string:tokens(NewElemStr, " |");
+ false ->
+ ets_tuple_argtypes2_helper(T, NewElemStr, NewNestingLevel)
+ end
+ end.
+
+ets_tuple_argtypes1(Str, Tuple, TupleList, NestingLevel) ->
+ case Str of
+ [] -> TupleList;
+ [H|T] ->
+ {NewTuple, NewNestingLevel, Add} =
+ case H of
+ ${ ->
+ {[H|Tuple], NestingLevel + 1, false};
+ $} ->
+ case NestingLevel of
+ 1 ->
+ {[H|Tuple], NestingLevel - 1, true};
+ _Else ->
+ {[H|Tuple], NestingLevel - 1, false}
+ end;
+ _Other1 when NestingLevel =:= 0 ->
+ {Tuple, NestingLevel, false};
+ _Other2 ->
+ {[H|Tuple], NestingLevel, false}
+ end,
+ case Add of
+ true ->
+ ets_tuple_argtypes1(T, [],
+ [lists:reverse(NewTuple)|TupleList],
+ NewNestingLevel);
+ false ->
+ ets_tuple_argtypes1(T, NewTuple, TupleList, NewNestingLevel)
+ end
+ end.
+
+format_arg(Arg) ->
+ case cerl:type(Arg) of
+ var -> cerl_trees:get_label(Arg);
+ tuple -> list_to_tuple([format_arg(A) || A <- cerl:tuple_es(Arg)]);
+ cons -> [format_arg(cerl:cons_hd(Arg))|format_arg(cerl:cons_tl(Arg))];
+ alias -> format_arg(cerl:alias_var(Arg));
+ literal ->
+ case cerl:is_c_nil(Arg) of
+ true -> [];
+ false -> ?no_label
+ end;
+ _Other -> ?no_label
+ end.
+
+-spec format_args([core_vars()], [erl_types:erl_type()],
+ dialyzer_dataflow:state(), call()) ->
+ args().
+
+format_args([], [], _State, _Call) ->
+ [];
+format_args(ArgList, TypeList, CleanState, Call) ->
+ format_args_2(format_args_1(ArgList, TypeList, CleanState), Call).
+
+format_args_1([Arg], [Type], CleanState) ->
+ [format_arg(Arg), format_type(Type, CleanState)];
+format_args_1([Arg|Args], [Type|Types], CleanState) ->
+ List =
+ case cerl:is_literal(Arg) of
+ true -> [?no_label, format_cerl(Arg)];
+ false -> [format_arg(Arg), format_type(Type, CleanState)]
+ end,
+ List ++ format_args_1(Args, Types, CleanState).
+
+format_args_2(StrArgList, Call) ->
+ case Call of
+ whereis ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ register ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ ets_new ->
+ StrArgList1 = lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |")),
+ lists_key_replace(4, StrArgList1,
+ string:tokens(ets_list_argtypes(lists:nth(4, StrArgList1)), " |"));
+ ets_lookup ->
+ StrArgList1 = lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |")),
+ lists_key_replace(4, StrArgList1,
+ string:tokens(lists:nth(4, StrArgList1), " |"));
+ ets_insert ->
+ StrArgList1 = lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |")),
+ lists_key_replace(4, StrArgList1,
+ ets_tuple_argtypes2(
+ ets_tuple_argtypes1(lists:nth(4, StrArgList1), [], [], 0),
+ []));
+ mnesia_dirty_read1 ->
+ lists_key_replace(2, StrArgList,
+ [mnesia_tuple_argtypes(T) || T <- string:tokens(
+ lists:nth(2, StrArgList), " |")]);
+ mnesia_dirty_read2 ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ mnesia_dirty_write1 ->
+ lists_key_replace(2, StrArgList,
+ [mnesia_record_tab(R) || R <- string:tokens(
+ lists:nth(2, StrArgList), " |")]);
+ mnesia_dirty_write2 ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ function_call -> StrArgList
+ end.
+
+format_cerl(Tree) ->
+ cerl_prettypr:format(cerl:set_ann(Tree, []),
+ [{hook, dialyzer_utils:pp_hook()},
+ {noann, true},
+ {paper, 100000},
+ {ribbon, 100000}
+ ]).
+
+format_type(Type, State) ->
+ R = dialyzer_dataflow:state__get_records(State),
+ erl_types:t_to_string(Type, R).
+
+mnesia_record_tab(RecordStr) ->
+ case string:str(RecordStr, "#") =:= 1 of
+ true ->
+ "'" ++
+ string:sub_string(RecordStr, 2, string:str(RecordStr, "{") - 1) ++
+ "'";
+ false -> RecordStr
+ end.
+
+mnesia_tuple_argtypes(TupleStr) ->
+ TupleStr1 = string:strip(TupleStr, left, ${),
+ [TupleStr2|_T] = string:tokens(TupleStr1, " ,"),
+ lists:flatten(string:tokens(TupleStr2, " |")).
+
+-spec race_var_map(var_to_map(), cerl:cerl() | [cerl:cerl()], dict(), op()) -> dict().
+
+race_var_map(Vars1, Vars2, RaceVarMap, Op) ->
+ case Vars1 =:= ?no_arg of
+ true -> RaceVarMap;
+ false ->
+ case is_list(Vars1) andalso is_list(Vars2) of
+ true ->
+ case Vars1 of
+ [] -> RaceVarMap;
+ [AHead|ATail] ->
+ case Vars2 of
+ [] -> RaceVarMap;
+ [PHead|PTail] ->
+ NewRaceVarMap = race_var_map(AHead, PHead, RaceVarMap, Op),
+ race_var_map(ATail, PTail, NewRaceVarMap, Op)
+ end
+ end;
+ false ->
+ {NewVars1, NewVars2, Bool} =
+ case is_list(Vars1) of
+ true ->
+ case Vars1 of
+ [Var1] -> {Var1, Vars2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false ->
+ case is_list(Vars2) of
+ true ->
+ case Vars2 of
+ [Var2] -> {Vars1, Var2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false -> {Vars1, Vars2, true}
+ end
+ end,
+ case Bool of
+ true ->
+ case cerl:type(NewVars1) of
+ var ->
+ case cerl:type(NewVars2) of
+ var ->
+ ALabel = cerl_trees:get_label(NewVars1),
+ PLabel = cerl_trees:get_label(NewVars2),
+ case Op of
+ bind ->
+ TempRaceVarMap =
+ bind_dict_vars(ALabel, PLabel, RaceVarMap),
+ bind_dict_vars(PLabel, ALabel, TempRaceVarMap);
+ unbind ->
+ TempRaceVarMap =
+ unbind_dict_vars(ALabel, PLabel, RaceVarMap),
+ unbind_dict_vars(PLabel, ALabel, TempRaceVarMap)
+ end;
+ alias ->
+ race_var_map(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap, Op);
+ values ->
+ race_var_map(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap, Op);
+ _Other -> RaceVarMap
+ end;
+ tuple ->
+ case cerl:type(NewVars2) of
+ tuple ->
+ race_var_map(cerl:tuple_es(NewVars1),
+ cerl:tuple_es(NewVars2), RaceVarMap, Op);
+ alias ->
+ race_var_map(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap, Op);
+ values ->
+ race_var_map(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap, Op);
+ _Other -> RaceVarMap
+ end;
+ cons ->
+ case cerl:type(NewVars2) of
+ cons ->
+ NewRaceVarMap = race_var_map(cerl:cons_hd(NewVars1),
+ cerl:cons_hd(NewVars2), RaceVarMap, Op),
+ race_var_map(cerl:cons_tl(NewVars1),
+ cerl:cons_tl(NewVars2), NewRaceVarMap, Op);
+ alias ->
+ race_var_map(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap, Op);
+ values ->
+ race_var_map(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap, Op);
+ _Other -> RaceVarMap
+ end;
+ alias ->
+ case cerl:type(NewVars2) of
+ alias ->
+ race_var_map(cerl:alias_var(NewVars1),
+ cerl:alias_var(NewVars2), RaceVarMap, Op);
+ _Other ->
+ race_var_map(cerl:alias_var(NewVars1),
+ NewVars2, RaceVarMap, Op)
+ end;
+ values ->
+ case cerl:type(NewVars2) of
+ values ->
+ race_var_map(cerl:values_es(NewVars1),
+ cerl:values_es(NewVars2), RaceVarMap, Op);
+ _Other ->
+ race_var_map(cerl:values_es(NewVars1),
+ NewVars2, RaceVarMap, Op)
+ end;
+ _Other -> RaceVarMap
+ end;
+ false -> RaceVarMap
+ end
+ end
+ end.
+
+race_var_map_clauses(Clauses, RaceVarMap) ->
+ case Clauses of
+ [] -> RaceVarMap;
+ [#end_clause{arg = Arg, pats = Pats, guard = Guard}|T] ->
+ {RaceVarMap1, _RemoveClause} =
+ race_var_map_guard(Arg, Pats, Guard, RaceVarMap, bind),
+ race_var_map_clauses(T, RaceVarMap1)
+ end.
+
+race_var_map_guard(Arg, Pats, Guard, RaceVarMap, Op) ->
+ {NewRaceVarMap, RemoveClause} =
+ case cerl:type(Guard) of
+ call ->
+ CallName = cerl:call_name(Guard),
+ case cerl:is_literal(CallName) of
+ true ->
+ case cerl:concrete(CallName) of
+ '=:=' ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ {race_var_map(Arg1, Arg2, RaceVarMap, Op), false};
+ '==' ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ {race_var_map(Arg1, Arg2, RaceVarMap, Op), false};
+ '=/=' ->
+ case Op of
+ bind ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ {RaceVarMap, are_bound_vars(Arg1, Arg2, RaceVarMap)};
+ unbind -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end;
+ false -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end,
+ {RaceVarMap1, RemoveClause1} =
+ race_var_map_guard_helper1(Arg, Pats,
+ race_var_map(Arg, Pats, NewRaceVarMap, Op), Op),
+ {RaceVarMap1, RemoveClause orelse RemoveClause1}.
+
+race_var_map_guard_helper1(Arg, Pats, RaceVarMap, Op) ->
+ case Arg =:= ?no_arg of
+ true -> {RaceVarMap, false};
+ false ->
+ case cerl:type(Arg) of
+ call ->
+ case Pats of
+ [NewPat] ->
+ ModName = cerl:call_module(Arg),
+ CallName = cerl:call_name(Arg),
+ case cerl:is_literal(ModName) andalso
+ cerl:is_literal(CallName) of
+ true ->
+ case {cerl:concrete(ModName),
+ cerl:concrete(CallName)} of
+ {erlang, '=:='} ->
+ race_var_map_guard_helper2(Arg, NewPat, true,
+ RaceVarMap, Op);
+ {erlang, '=='} ->
+ race_var_map_guard_helper2(Arg, NewPat, true,
+ RaceVarMap, Op);
+ {erlang, '=/='} ->
+ race_var_map_guard_helper2(Arg, NewPat, false,
+ RaceVarMap, Op);
+ _Else -> {RaceVarMap, false}
+ end;
+ false -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end
+ end.
+
+race_var_map_guard_helper2(Arg, Pat, Bool, RaceVarMap, Op) ->
+ case cerl:type(Pat) of
+ literal ->
+ [Arg1, Arg2] = cerl:call_args(Arg),
+ case cerl:concrete(Pat) of
+ Bool ->
+ {race_var_map(Arg1, Arg2, RaceVarMap, Op), false};
+ _Else ->
+ case Op of
+ bind ->
+ {RaceVarMap, are_bound_vars(Arg1, Arg2, RaceVarMap)};
+ unbind -> {RaceVarMap, false}
+ end
+ end;
+ _Else -> {RaceVarMap, false}
+ end.
+
+unbind_dict_vars(Var, Var, RaceVarMap) ->
+ RaceVarMap;
+unbind_dict_vars(Var1, Var2, RaceVarMap) ->
+ case dict:find(Var1, RaceVarMap) of
+ error -> RaceVarMap;
+ {ok, Labels} ->
+ case Labels of
+ [] -> dict:erase(Var1, RaceVarMap);
+ _Else ->
+ case lists:member(Var2, Labels) of
+ true ->
+ unbind_dict_vars(Var1, Var2,
+ bind_dict_vars_list(Var1, Labels -- [Var2],
+ dict:erase(Var1, RaceVarMap)));
+ false ->
+ unbind_dict_vars_helper(Labels, Var1, Var2, RaceVarMap)
+ end
+ end
+ end.
+
+unbind_dict_vars_helper(Labels, Key, CompLabel, RaceVarMap) ->
+ case dict:size(RaceVarMap) of
+ 0 -> RaceVarMap;
+ _ ->
+ case Labels of
+ [] -> RaceVarMap;
+ [Head|Tail] ->
+ NewRaceVarMap =
+ case are_bound_labels(Head, CompLabel, RaceVarMap) orelse
+ are_bound_labels(CompLabel, Head, RaceVarMap) of
+ true ->
+ bind_dict_vars_list(Key, Labels -- [Head],
+ dict:erase(Key, RaceVarMap));
+ false -> RaceVarMap
+ end,
+ unbind_dict_vars_helper(Tail, Key, CompLabel, NewRaceVarMap)
+ end
+ end.
+
+var_analysis(FunDefArgs, FunCallArgs, WarnVarArgs, RaceWarnTag) ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ ArgNos = lists_key_members_lists(WVA1, FunDefArgs),
+ [[lists_get(N, FunCallArgs) || N <- ArgNos], WVA2, WVA3, WVA4];
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ ArgNos1 = lists_key_members_lists(WVA1, FunDefArgs),
+ ArgNos2 = lists_key_members_lists(WVA3, FunDefArgs),
+ [[lists_get(N1, FunCallArgs) || N1 <- ArgNos1], WVA2,
+ [lists_get(N2, FunCallArgs) || N2 <- ArgNos2], WVA4];
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [WVA1, WVA2|T] = WarnVarArgs,
+ ArgNos = lists_key_members_lists(WVA1, FunDefArgs),
+ [[lists_get(N, FunCallArgs) || N <- ArgNos], WVA2|T]
+ end.
+
+var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
+ RaceVarMap, CleanState) ->
+ FunVarArgs = format_args(FunDefArgs, FunCallTypes, CleanState,
+ function_call),
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ Vars = find_all_bound_vars(WVA1, RaceVarMap),
+ case lists_key_member_lists(Vars, FunVarArgs) of
+ 0 -> [Vars, WVA2, WVA3, WVA4];
+ N when is_integer(N) ->
+ NewWVA2 = string:tokens(lists:nth(N + 1, FunVarArgs), " |"),
+ [Vars, NewWVA2, WVA3, WVA4]
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ Vars1 = find_all_bound_vars(WVA1, RaceVarMap),
+ FirstVarArg =
+ case lists_key_member_lists(Vars1, FunVarArgs) of
+ 0 -> [Vars1, WVA2];
+ N1 when is_integer(N1) ->
+ NewWVA2 = string:tokens(lists:nth(N1 + 1, FunVarArgs), " |"),
+ [Vars1, NewWVA2]
+ end,
+ Vars2 =
+ lists:flatten(
+ [find_all_bound_vars(A, RaceVarMap) || A <- ets_list_args(WVA3)]),
+ case lists_key_member_lists(Vars2, FunVarArgs) of
+ 0 -> FirstVarArg ++ [Vars2, WVA4];
+ N2 when is_integer(N2) ->
+ NewWVA4 =
+ ets_tuple_argtypes2(
+ ets_tuple_argtypes1(lists:nth(N2 + 1, FunVarArgs), [], [], 0),
+ []),
+ FirstVarArg ++ [Vars2, NewWVA4]
+
+ end;
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [WVA1, WVA2|T] = WarnVarArgs,
+ Arity =
+ case T of
+ [] -> 1;
+ _Else -> 2
+ end,
+ Vars = find_all_bound_vars(WVA1, RaceVarMap),
+ case lists_key_member_lists(Vars, FunVarArgs) of
+ 0 -> [Vars, WVA2|T];
+ N when is_integer(N) ->
+ NewWVA2 =
+ case Arity of
+ 1 ->
+ [mnesia_record_tab(R) || R <- string:tokens(
+ lists:nth(2, FunVarArgs), " |")];
+ 2 ->
+ string:tokens(lists:nth(N + 1, FunVarArgs), " |")
+ end,
+ [Vars, NewWVA2|T]
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Warning Format Utilities
+%%%
+%%% ===========================================================================
+
+add_race_warning(Warn, #races{race_warnings = Warns} = Races) ->
+ Races#races{race_warnings = [Warn|Warns]}.
+
+get_race_warn(Fun, Args, ArgTypes, DepList, State) ->
+ {M, F, _A} = Fun,
+ case DepList of
+ [] -> {State, no_race};
+ _Other ->
+ {State, {race_condition, [M, F, Args, ArgTypes, State, DepList]}}
+ end.
+
+-spec get_race_warnings(races(), dialyzer_dataflow:state()) ->
+ {races(), dialyzer_dataflow:state()}.
+
+get_race_warnings(#races{race_warnings = RaceWarnings}, State) ->
+ get_race_warnings_helper(RaceWarnings, State).
+
+get_race_warnings_helper(Warnings, State) ->
+ case Warnings of
+ [] ->
+ {dialyzer_dataflow:state__get_races(State), State};
+ [H|T] ->
+ {RaceWarnTag, FileLine, {race_condition, [M, F, A, AT, S, DepList]}} = H,
+ Reason =
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ get_reason(lists:keysort(7, DepList),
+ "might fail due to a possible race condition "
+ "caused by its combination with ");
+ ?WARN_ETS_LOOKUP_INSERT ->
+ get_reason(lists:keysort(7, DepList),
+ "might have an unintended effect due to " ++
+ "a possible race condition " ++
+ "caused by its combination with ");
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ get_reason(lists:keysort(7, DepList),
+ "might have an unintended effect due to " ++
+ "a possible race condition " ++
+ "caused by its combination with ")
+ end,
+ W =
+ {?WARN_RACE_CONDITION, FileLine,
+ {race_condition,
+ [M, F, dialyzer_dataflow:format_args(A, AT, S), Reason]}},
+ get_race_warnings_helper(T,
+ dialyzer_dataflow:state__add_warning(W, State))
+ end.
+
+get_reason(DependencyList, Reason) ->
+ case DependencyList of
+ [] -> "";
+ [#dep_call{call_name = Call, arg_types = ArgTypes, vars = Args,
+ state = State, file_line = {File, Line}}|T] ->
+ R =
+ Reason ++
+ case Call of
+ whereis -> "the erlang:whereis";
+ ets_lookup -> "the ets:lookup";
+ mnesia_dirty_read -> "the mnesia:dirty_read"
+ end ++
+ dialyzer_dataflow:format_args(Args, ArgTypes, State) ++
+ " call in " ++
+ filename:basename(File) ++
+ " on line " ++
+ lists:flatten(io_lib:write(Line)),
+ case T of
+ [] -> R;
+ _ -> get_reason(T, R ++ ", ")
+ end
+ end.
+
+state__add_race_warning(State, RaceWarn, RaceWarnTag, FileLine) ->
+ case RaceWarn of
+ no_race -> State;
+ _Else ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ Warn = {RaceWarnTag, FileLine, RaceWarn},
+ dialyzer_dataflow:state__put_races(add_race_warning(Warn, Races), State)
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Record Interfaces
+%%%
+%%% ===========================================================================
+
+-spec beg_clause_new(var_to_map(), var_to_map(), cerl:cerl()) ->
+ #beg_clause{}.
+
+beg_clause_new(Arg, Pats, Guard) ->
+ #beg_clause{arg = Arg, pats = Pats, guard = Guard}.
+
+-spec cleanup(races()) -> races().
+
+cleanup(#races{race_list = RaceList}) ->
+ #races{race_list = RaceList}.
+
+-spec end_case_new([#end_clause{}]) -> #end_case{}.
+
+end_case_new(Clauses) ->
+ #end_case{clauses = Clauses}.
+
+-spec end_clause_new(var_to_map(), var_to_map(), cerl:cerl()) ->
+ #end_clause{}.
+
+end_clause_new(Arg, Pats, Guard) ->
+ #end_clause{arg = Arg, pats = Pats, guard = Guard}.
+
+-spec get_curr_fun(races()) -> mfa_or_funlbl().
+
+get_curr_fun(#races{curr_fun = CurrFun}) ->
+ CurrFun.
+
+-spec get_curr_fun_args(races()) -> core_args().
+
+get_curr_fun_args(#races{curr_fun_args = CurrFunArgs}) ->
+ CurrFunArgs.
+
+-spec get_new_table(races()) -> table().
+
+get_new_table(#races{new_table = Table}) ->
+ Table.
+
+-spec get_race_analysis(races()) -> boolean().
+
+get_race_analysis(#races{race_analysis = RaceAnalysis}) ->
+ RaceAnalysis.
+
+-spec get_race_list(races()) -> code().
+
+get_race_list(#races{race_list = RaceList}) ->
+ RaceList.
+
+-spec get_race_list_size(races()) -> non_neg_integer().
+
+get_race_list_size(#races{race_list_size = RaceListSize}) ->
+ RaceListSize.
+
+-spec let_tag_new(var_to_map(), var_to_map()) -> #let_tag{}.
+
+let_tag_new(Var, Arg) ->
+ #let_tag{var = Var, arg = Arg}.
+
+-spec new() -> races().
+
+new() -> #races{}.
+
+-spec put_curr_fun(mfa_or_funlbl(), label(), races()) ->
+ races().
+
+put_curr_fun(CurrFun, CurrFunLabel, Races) ->
+ Races#races{curr_fun = CurrFun,
+ curr_fun_label = CurrFunLabel,
+ curr_fun_args = empty}.
+
+-spec put_fun_args(core_args(), races()) -> races().
+
+put_fun_args(Args, #races{curr_fun_args = CurrFunArgs} = Races) ->
+ case CurrFunArgs of
+ empty -> Races#races{curr_fun_args = Args};
+ _Other -> Races
+ end.
+
+-spec put_race_analysis(boolean(), races()) ->
+ races().
+
+put_race_analysis(Analysis, Races) ->
+ Races#races{race_analysis = Analysis}.
+
+-spec put_race_list(code(), non_neg_integer(), races()) ->
+ races().
+
+put_race_list(RaceList, RaceListSize, Races) ->
+ Races#races{race_list = RaceList,
+ race_list_size = RaceListSize}.
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
new file mode 100644
index 0000000000..dd8480f1f2
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -0,0 +1,540 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_succ_typings.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 11 Sep 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_succ_typings).
+
+-export([analyze_callgraph/3,
+ analyze_callgraph/4,
+ get_warnings/6]).
+
+%% These are only intended as debug functions.
+-export([doit/1,
+ get_top_level_signatures/3]).
+
+%%-define(DEBUG, true).
+%%-define(DEBUG_PP, true).
+
+-ifdef(DEBUG).
+-define(debug(X__, Y__), io:format(X__, Y__)).
+-else.
+-define(debug(X__, Y__), ok).
+-endif.
+
+-define(TYPE_LIMIT, 4).
+
+%%--------------------------------------------------------------------
+
+-include("dialyzer.hrl").
+
+%%--------------------------------------------------------------------
+%% State record -- local to this module
+
+-type parent() :: 'none' | pid().
+
+-record(st, {callgraph :: dialyzer_callgraph:callgraph(),
+ codeserver :: dialyzer_codeserver:codeserver(),
+ no_warn_unused :: set(),
+ parent = none :: parent(),
+ plt :: dialyzer_plt:plt()}).
+
+%%--------------------------------------------------------------------
+
+-spec analyze_callgraph(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
+ dialyzer_codeserver:codeserver()) ->
+ dialyzer_plt:plt().
+
+analyze_callgraph(Callgraph, Plt, Codeserver) ->
+ analyze_callgraph(Callgraph, Plt, Codeserver, none).
+
+-spec analyze_callgraph(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
+ dialyzer_codeserver:codeserver(), parent()) ->
+ dialyzer_plt:plt().
+
+analyze_callgraph(Callgraph, Plt, Codeserver, Parent) ->
+ State = #st{callgraph = Callgraph, plt = Plt,
+ codeserver = Codeserver, parent = Parent},
+ NewState = get_refined_success_typings(State),
+ NewState#st.plt.
+
+%%--------------------------------------------------------------------
+
+get_refined_success_typings(State) ->
+ case find_succ_typings(State) of
+ {fixpoint, State1} -> State1;
+ {not_fixpoint, NotFixpoint1, State1} ->
+ Callgraph = State1#st.callgraph,
+ NotFixpoint2 = [lookup_name(F, Callgraph) || F <- NotFixpoint1],
+ ModulePostorder =
+ dialyzer_callgraph:module_postorder_from_funs(NotFixpoint2, Callgraph),
+ case refine_succ_typings(ModulePostorder, State1) of
+ {fixpoint, State2} ->
+ State2;
+ {not_fixpoint, NotFixpoint3, State2} ->
+ Callgraph1 = State2#st.callgraph,
+ %% Need to reset the callgraph.
+ NotFixpoint4 = [lookup_name(F, Callgraph1) || F <- NotFixpoint3],
+ Callgraph2 = dialyzer_callgraph:reset_from_funs(NotFixpoint4,
+ Callgraph1),
+ get_refined_success_typings(State2#st{callgraph = Callgraph2})
+ end
+ end.
+
+-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
+-spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
+ doc_plt(), dialyzer_codeserver:codeserver(), set(),
+ pid()) ->
+ {[dial_warning()], dialyzer_plt:plt(), doc_plt()}.
+
+get_warnings(Callgraph, Plt, DocPlt, Codeserver, NoWarnUnused, Parent) ->
+ InitState = #st{callgraph = Callgraph, codeserver = Codeserver,
+ no_warn_unused = NoWarnUnused, parent = Parent, plt = Plt},
+ NewState = get_refined_success_typings(InitState),
+ Mods = dialyzer_callgraph:modules(NewState#st.callgraph),
+ CWarns = dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver,
+ NewState#st.plt),
+ get_warnings_from_modules(Mods, NewState, DocPlt, CWarns).
+
+get_warnings_from_modules([M|Ms], State, DocPlt, Acc) when is_atom(M) ->
+ send_log(State#st.parent, io_lib:format("Getting warnings for ~w\n", [M])),
+ #st{callgraph = Callgraph, codeserver = Codeserver,
+ no_warn_unused = NoWarnUnused, plt = Plt} = State,
+ ModCode = dialyzer_codeserver:lookup_mod_code(M, Codeserver),
+ Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver),
+ Contracts = dialyzer_codeserver:lookup_mod_contracts(M, Codeserver),
+ AllFuns = collect_fun_info([ModCode]),
+ %% Check if there are contracts for functions that do not exist
+ Warnings1 =
+ dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
+ {Warnings2, FunTypes, RaceCode, PublicTables, NamedTables} =
+ dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Records, NoWarnUnused),
+ NewDocPlt = insert_into_doc_plt(FunTypes, Callgraph, DocPlt),
+ NewCallgraph =
+ dialyzer_callgraph:renew_race_info(Callgraph, RaceCode, PublicTables,
+ NamedTables),
+ State1 = st__renew_state_calls(NewCallgraph, State),
+ get_warnings_from_modules(Ms, State1, NewDocPlt, [Warnings1,Warnings2|Acc]);
+get_warnings_from_modules([], #st{plt = Plt}, DocPlt, Acc) ->
+ {lists:flatten(Acc), Plt, DocPlt}.
+
+refine_succ_typings(ModulePostorder, State) ->
+ ?debug("Module postorder: ~p\n", [ModulePostorder]),
+ refine_succ_typings(ModulePostorder, State, []).
+
+refine_succ_typings([SCC|SCCs], State, Fixpoint) ->
+ Msg = io_lib:format("Dataflow of one SCC: ~w\n", [SCC]),
+ send_log(State#st.parent, Msg),
+ ?debug("~s\n", [Msg]),
+ {NewState, FixpointFromScc} =
+ case SCC of
+ [M] -> refine_one_module(M, State);
+ [_|_] -> refine_one_scc(SCC, State)
+ end,
+ NewFixpoint = ordsets:union(Fixpoint, FixpointFromScc),
+ refine_succ_typings(SCCs, NewState, NewFixpoint);
+refine_succ_typings([], State, Fixpoint) ->
+ case Fixpoint =:= [] of
+ true -> {fixpoint, State};
+ false -> {not_fixpoint, Fixpoint, State}
+ end.
+
+-spec refine_one_module(module(), #st{}) -> {#st{}, [label()]}. % ordset
+
+refine_one_module(M, State) ->
+ #st{callgraph = Callgraph, codeserver = CodeServer, plt = PLT} = State,
+ ModCode = dialyzer_codeserver:lookup_mod_code(M, CodeServer),
+ AllFuns = collect_fun_info([ModCode]),
+ FunTypes = get_fun_types_from_plt(AllFuns, State),
+ Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer),
+ {NewFunTypes, RaceCode, PublicTables, NamedTables} =
+ dialyzer_dataflow:get_fun_types(ModCode, PLT, Callgraph, Records),
+ NewCallgraph =
+ dialyzer_callgraph:renew_race_info(Callgraph, RaceCode, PublicTables,
+ NamedTables),
+ case reached_fixpoint(FunTypes, NewFunTypes) of
+ true ->
+ State1 = st__renew_state_calls(NewCallgraph, State),
+ {State1, ordsets:new()};
+ {false, NotFixpoint} ->
+ ?debug("Not fixpoint\n", []),
+ NewState = insert_into_plt(dict:from_list(NotFixpoint), State),
+ NewState1 = st__renew_state_calls(NewCallgraph, NewState),
+ {NewState1, ordsets:from_list([FunLbl || {FunLbl,_Type} <- NotFixpoint])}
+ end.
+
+st__renew_state_calls(Callgraph, State) ->
+ State#st{callgraph = Callgraph}.
+
+refine_one_scc(SCC, State) ->
+ refine_one_scc(SCC, State, []).
+
+refine_one_scc(SCC, State, AccFixpoint) ->
+ {NewState, FixpointFromScc} = refine_mods_in_scc(SCC, State, []),
+ case FixpointFromScc =:= [] of
+ true -> {NewState, AccFixpoint};
+ false ->
+ NewAccFixpoint = ordsets:union(AccFixpoint, FixpointFromScc),
+ refine_one_scc(SCC, NewState, NewAccFixpoint)
+ end.
+
+refine_mods_in_scc([Mod|Mods], State, Fixpoint) ->
+ {NewState, FixpointFromModule} = refine_one_module(Mod, State),
+ NewFixpoint = ordsets:union(FixpointFromModule, Fixpoint),
+ refine_mods_in_scc(Mods, NewState, NewFixpoint);
+refine_mods_in_scc([], State, Fixpoint) ->
+ {State, Fixpoint}.
+
+reached_fixpoint(OldTypes, NewTypes) ->
+ reached_fixpoint(OldTypes, NewTypes, false).
+
+reached_fixpoint_strict(OldTypes, NewTypes) ->
+ case reached_fixpoint(OldTypes, NewTypes, true) of
+ true -> true;
+ {false, _} -> false
+ end.
+
+reached_fixpoint(OldTypes0, NewTypes0, Strict) ->
+ MapFun = fun(_Key, Type) ->
+ case is_failed_or_not_called_fun(Type) of
+ true -> failed_fun;
+ false -> erl_types:t_limit(Type, ?TYPE_LIMIT)
+ end
+ end,
+ OldTypes = dict:map(MapFun, OldTypes0),
+ NewTypes = dict:map(MapFun, NewTypes0),
+ compare_types(OldTypes, NewTypes, Strict).
+
+is_failed_or_not_called_fun(Type) ->
+ erl_types:any_none([erl_types:t_fun_range(Type)|erl_types:t_fun_args(Type)]).
+
+compare_types(Dict1, Dict2, Strict) ->
+ List1 = lists:keysort(1, dict:to_list(Dict1)),
+ List2 = lists:keysort(1, dict:to_list(Dict2)),
+ compare_types_1(List1, List2, Strict, []).
+
+compare_types_1([{X, _Type1}|Left1], [{X, failed_fun}|Left2],
+ Strict, NotFixpoint) ->
+ compare_types_1(Left1, Left2, Strict, NotFixpoint);
+compare_types_1([{X, failed_fun}|Left1], [{X, _Type2}|Left2],
+ Strict, NotFixpoint) ->
+ compare_types_1(Left1, Left2, Strict, NotFixpoint);
+compare_types_1([{X, Type1}|Left1], [{X, Type2}|Left2], Strict, NotFixpoint) ->
+ Res = case Strict of
+ true -> erl_types:t_is_equal(Type1, Type2);
+ false -> erl_types:t_is_subtype(Type1, Type2)
+ end,
+ case Res of
+ true -> compare_types_1(Left1, Left2, Strict, NotFixpoint);
+ false ->
+ ?debug("Failed fixpoint for ~w: ~s =/= ~s\n",
+ [X, erl_types:t_to_string(Type1), erl_types:t_to_string(Type2)]),
+ compare_types_1(Left1, Left2, Strict, [{X, Type2}|NotFixpoint])
+ end;
+compare_types_1([_|Left1], List2, Strict, NotFixpoint) ->
+ %% If the function was not called.
+ compare_types_1(Left1, List2, Strict, NotFixpoint);
+compare_types_1([], [], _Strict, NotFixpoint) ->
+ case NotFixpoint =:= [] of
+ true -> true;
+ false -> {false, NotFixpoint}
+ end.
+
+find_succ_typings(State) ->
+ find_succ_typings(State, []).
+
+find_succ_typings(#st{callgraph = Callgraph, parent = Parent} = State,
+ NotFixpoint) ->
+ case dialyzer_callgraph:take_scc(Callgraph) of
+ {ok, SCC, NewCallgraph} ->
+ Msg = io_lib:format("Typesig analysis for SCC: ~w\n", [format_scc(SCC)]),
+ ?debug("~s", [Msg]),
+ send_log(Parent, Msg),
+ {NewState, NewNotFixpoint1} =
+ analyze_scc(SCC, State#st{callgraph = NewCallgraph}),
+ NewNotFixpoint2 = ordsets:union(NewNotFixpoint1, NotFixpoint),
+ find_succ_typings(NewState, NewNotFixpoint2);
+ none ->
+ ?debug("==================== Typesig done ====================\n\n", []),
+ case NotFixpoint =:= [] of
+ true -> {fixpoint, State};
+ false -> {not_fixpoint, NotFixpoint, State}
+ end
+ end.
+
+analyze_scc(SCC, #st{codeserver = Codeserver} = State) ->
+ SCC_Info = [{MFA,
+ dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ dialyzer_codeserver:lookup_mod_records(M, Codeserver)}
+ || {M, _, _} = MFA <- SCC],
+ Contracts1 = [{MFA, dialyzer_codeserver:lookup_mfa_contract(MFA, Codeserver)}
+ || {_, _, _} = MFA <- SCC],
+ Contracts2 = [{MFA, Contract} || {MFA, {ok, Contract}} <- Contracts1],
+ Contracts3 = orddict:from_list(Contracts2),
+ {SuccTypes, PltContracts, NotFixpoint} =
+ find_succ_types_for_scc(SCC_Info, Contracts3, State),
+ State1 = insert_into_plt(SuccTypes, State),
+ ContrPlt = dialyzer_plt:insert_contract_list(State1#st.plt, PltContracts),
+ {State1#st{plt = ContrPlt}, NotFixpoint}.
+
+find_succ_types_for_scc(SCC_Info, Contracts,
+ #st{codeserver = Codeserver,
+ callgraph = Callgraph, plt = Plt} = State) ->
+ %% Assume that the PLT contains the current propagated types
+ AllFuns = collect_fun_info([Fun || {_MFA, {_Var, Fun}, _Rec} <- SCC_Info]),
+ PropTypes = get_fun_types_from_plt(AllFuns, State),
+ MFAs = [MFA || {MFA, {_Var, _Fun}, _Rec} <- SCC_Info],
+ NextLabel = dialyzer_codeserver:get_next_core_label(Codeserver),
+ Plt1 = dialyzer_plt:delete_contract_list(Plt, MFAs),
+ FunTypes = dialyzer_typesig:analyze_scc(SCC_Info, NextLabel,
+ Callgraph, Plt1, PropTypes),
+ AllFunSet = sets:from_list([X || {X, _} <- AllFuns]),
+ FilteredFunTypes = dict:filter(fun(X, _) ->
+ sets:is_element(X, AllFunSet)
+ end, FunTypes),
+ %% Check contracts
+ PltContracts = dialyzer_contracts:check_contracts(Contracts, Callgraph,
+ FilteredFunTypes),
+ ContractFixpoint =
+ lists:all(fun({MFA, _C}) ->
+ %% Check the non-deleted PLT
+ case dialyzer_plt:lookup_contract(Plt, MFA) of
+ none -> false;
+ {value, _} -> true
+ end
+ end, PltContracts),
+ case (ContractFixpoint andalso
+ reached_fixpoint_strict(PropTypes, FilteredFunTypes)) of
+ true ->
+ {FilteredFunTypes, PltContracts, []};
+ false ->
+ ?debug("Not fixpoint for: ~w\n", [AllFuns]),
+ {FilteredFunTypes, PltContracts,
+ ordsets:from_list([Fun || {Fun, _Arity} <- AllFuns])}
+ end.
+
+get_fun_types_from_plt(FunList, State) ->
+ get_fun_types_from_plt(FunList, State, dict:new()).
+
+get_fun_types_from_plt([{FunLabel, Arity}|Left], State, Map) ->
+ Type = lookup_fun_type(FunLabel, Arity, State),
+ get_fun_types_from_plt(Left, State, dict:store(FunLabel, Type, Map));
+get_fun_types_from_plt([], _State, Map) ->
+ Map.
+
+collect_fun_info(Trees) ->
+ collect_fun_info(Trees, []).
+
+collect_fun_info([Tree|Trees], List) ->
+ Fun = fun(SubTree, Acc) ->
+ case cerl:is_c_fun(SubTree) of
+ true ->
+ [{cerl_trees:get_label(SubTree), cerl:fun_arity(SubTree)}|Acc];
+ false -> Acc
+ end
+ end,
+ collect_fun_info(Trees, cerl_trees:fold(Fun, List, Tree));
+collect_fun_info([], List) ->
+ List.
+
+lookup_fun_type(Label, Arity, #st{callgraph = Callgraph, plt = Plt}) ->
+ ID = lookup_name(Label, Callgraph),
+ case dialyzer_plt:lookup(Plt, ID) of
+ none -> erl_types:t_fun(Arity, erl_types:t_any());
+ {value, {RetT, ArgT}} -> erl_types:t_fun(ArgT, RetT)
+ end.
+
+insert_into_doc_plt(_FunTypes, _Callgraph, undefined) ->
+ undefined;
+insert_into_doc_plt(FunTypes, Callgraph, DocPlt) ->
+ SuccTypes = format_succ_types(FunTypes, Callgraph),
+ dialyzer_plt:insert_list(DocPlt, SuccTypes).
+
+insert_into_plt(SuccTypes0, #st{callgraph = Callgraph, plt = Plt} = State) ->
+ SuccTypes = format_succ_types(SuccTypes0, Callgraph),
+ debug_pp_succ_typings(SuccTypes),
+ State#st{plt = dialyzer_plt:insert_list(Plt, SuccTypes)}.
+
+format_succ_types(SuccTypes, Callgraph) ->
+ format_succ_types(dict:to_list(SuccTypes), Callgraph, []).
+
+format_succ_types([{Label, Type0}|Left], Callgraph, Acc) ->
+ Type = erl_types:t_limit(Type0, ?TYPE_LIMIT+1),
+ Id = lookup_name(Label, Callgraph),
+ NewTuple = {Id, {erl_types:t_fun_range(Type), erl_types:t_fun_args(Type)}},
+ format_succ_types(Left, Callgraph, [NewTuple|Acc]);
+format_succ_types([], _Callgraph, Acc) ->
+ Acc.
+
+-ifdef(DEBUG).
+debug_pp_succ_typings(SuccTypes) ->
+ ?debug("Succ typings:\n", []),
+ [?debug(" ~w :: ~s\n",
+ [MFA, erl_types:t_to_string(erl_types:t_fun(ArgT, RetT))])
+ || {MFA, {RetT, ArgT}} <- SuccTypes],
+ ?debug("Contracts:\n", []),
+ [?debug(" ~w :: ~s\n",
+ [MFA, erl_types:t_to_string(erl_types:t_fun(ArgT, RetFun(ArgT)))])
+ || {MFA, {contract, RetFun, ArgT}} <- SuccTypes],
+ ?debug("\n", []),
+ ok.
+-else.
+debug_pp_succ_typings(_) ->
+ ok.
+-endif.
+
+lookup_name(F, CG) ->
+ case dialyzer_callgraph:lookup_name(F, CG) of
+ error -> F;
+ {ok, Name} -> Name
+ end.
+
+send_log(none, _Msg) ->
+ ok;
+send_log(Parent, Msg) ->
+ Parent ! {self(), log, lists:flatten(Msg)},
+ ok.
+
+format_scc(SCC) ->
+ [MFA || {_M, _F, _A} = MFA <- SCC].
+
+%% ============================================================================
+%%
+%% Debug interface.
+%%
+%% ============================================================================
+
+-spec doit(module() | string()) -> 'ok'.
+
+doit(Module) ->
+ {ok, AbstrCode} = dialyzer_utils:get_abstract_code_from_src(Module),
+ {ok, Code} = dialyzer_utils:get_core_from_abstract_code(AbstrCode),
+ {ok, Records} = dialyzer_utils:get_record_and_type_info(AbstrCode),
+ %% contract typing info in dictionary format
+ {ok, Contracts} =
+ dialyzer_utils:get_spec_info(cerl:concrete(cerl:module_name(Code)),
+ AbstrCode, Records),
+ Sigs0 = get_top_level_signatures(Code, Records, Contracts),
+ M = if is_atom(Module) ->
+ list_to_atom(filename:basename(atom_to_list(Module)));
+ is_list(Module) ->
+ list_to_atom(filename:basename(Module))
+ end,
+ Sigs1 = [{{M, F, A}, Type} || {{F, A}, Type} <- Sigs0],
+ Sigs = ordsets:from_list(Sigs1),
+ io:format("==================== Final result ====================\n\n", []),
+ pp_signatures(Sigs, Records),
+ ok.
+
+-spec get_top_level_signatures(cerl:c_module(), dict(), dict()) ->
+ [{{atom(), arity()}, erl_types:erl_type()}].
+
+get_top_level_signatures(Code, Records, Contracts) ->
+ Tree = cerl:from_records(Code),
+ {LabeledTree, NextLabel} = cerl_trees:label(Tree),
+ Plt = get_def_plt(),
+ ModuleName = cerl:atom_val(cerl:module_name(LabeledTree)),
+ Plt1 = dialyzer_plt:delete_module(Plt, ModuleName),
+ Plt2 = analyze_module(LabeledTree, NextLabel, Plt1, Records, Contracts),
+ M = cerl:concrete(cerl:module_name(Tree)),
+ Functions = [{M, cerl:fname_id(V), cerl:fname_arity(V)}
+ || {V, _F} <- cerl:module_defs(LabeledTree)],
+ %% First contracts check
+ AllContracts = dict:fetch_keys(Contracts),
+ ErrorContracts = AllContracts -- Functions,
+ lists:foreach(fun(C) ->
+ io:format("Contract for non-existing function: ~w\n",[C])
+ end, ErrorContracts),
+ Types = [{MFA, dialyzer_plt:lookup(Plt2, MFA)} || MFA <- Functions],
+ Sigs = [{{F, A}, erl_types:t_fun(ArgT, RetT)}
+ || {{_M, F, A}, {value, {RetT, ArgT}}} <- Types],
+ ordsets:from_list(Sigs).
+
+get_def_plt() ->
+ try
+ dialyzer_plt:from_file(dialyzer_plt:get_default_plt())
+ catch
+ error:no_such_file -> dialyzer_plt:new();
+ throw:{dialyzer_error, _} -> dialyzer_plt:new()
+ end.
+
+pp_signatures([{{_, module_info, 0}, _}|Left], Records) ->
+ pp_signatures(Left, Records);
+pp_signatures([{{_, module_info, 1}, _}|Left], Records) ->
+ pp_signatures(Left, Records);
+pp_signatures([{{M, F, _A}, Type}|Left], Records) ->
+ TypeString =
+ case cerl:is_literal(Type) of
+%% Commented out so that dialyzer does not complain
+%% false ->
+%% "fun(" ++ String = erl_types:t_to_string(Type, Records),
+%% string:substr(String, 1, length(String)-1);
+ true ->
+ io_lib:format("~w", [cerl:concrete(Type)])
+ end,
+ io:format("~w:~w~s\n", [M, F, TypeString]),
+ pp_signatures(Left, Records);
+pp_signatures([], _Records) ->
+ ok.
+
+-ifdef(DEBUG_PP).
+debug_pp(Tree, _Map) ->
+ Tree1 = strip_annotations(Tree),
+ io:put_chars(cerl_prettypr:format(Tree1)),
+ io:nl().
+
+strip_annotations(Tree) ->
+ cerl_trees:map(fun(T) ->
+ case cerl:is_literal(T) orelse cerl:is_c_values(T) of
+ true -> cerl:set_ann(T, []);
+ false ->
+ Label = cerl_trees:get_label(T),
+ cerl:set_ann(T, [{'label', Label}])
+ end
+ end, Tree).
+-else.
+debug_pp(_Tree, _Map) ->
+ ok.
+-endif. % DEBUG_PP
+
+%%
+%% Analysis of a single module
+%%
+analyze_module(LabeledTree, NextLbl, Plt, Records, Contracts) ->
+ debug_pp(LabeledTree, dict:new()),
+ CallGraph1 = dialyzer_callgraph:new(),
+ CallGraph2 = dialyzer_callgraph:scan_core_tree(LabeledTree, CallGraph1),
+ {CallGraph3, _Ext} = dialyzer_callgraph:remove_external(CallGraph2),
+ CallGraph4 = dialyzer_callgraph:finalize(CallGraph3),
+ CodeServer1 = dialyzer_codeserver:new(),
+ Mod = cerl:concrete(cerl:module_name(LabeledTree)),
+ CodeServer2 = dialyzer_codeserver:insert(Mod, LabeledTree, CodeServer1),
+ CodeServer3 = dialyzer_codeserver:set_next_core_label(NextLbl, CodeServer2),
+ CodeServer4 = dialyzer_codeserver:store_records(Mod, Records, CodeServer3),
+ CodeServer5 = dialyzer_codeserver:store_contracts(Mod, Contracts, CodeServer4),
+ Res = analyze_callgraph(CallGraph4, Plt, CodeServer5),
+ dialyzer_callgraph:delete(CallGraph4),
+ dialyzer_codeserver:delete(CodeServer5),
+ Res.
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
new file mode 100644
index 0000000000..aeb20d4fae
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -0,0 +1,2756 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_typesig.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 25 Apr 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+
+-module(dialyzer_typesig).
+
+-export([analyze_scc/5]).
+-export([get_safe_underapprox/2]).
+
+-import(erl_types,
+ [t_any/0, t_atom/0, t_atom_vals/1,
+ t_binary/0, t_bitstr/0, t_bitstr/2, t_bitstr_concat/1, t_boolean/0,
+ t_collect_vars/1, t_cons/2, t_cons_hd/1, t_cons_tl/1,
+ t_float/0, t_from_range/2, t_from_term/1,
+ t_fun/0, t_fun/2, t_fun_args/1, t_fun_range/1,
+ t_has_var/1,
+ t_inf/2, t_inf/3, t_integer/0,
+ t_is_any/1, t_is_atom/1, t_is_atom/2, t_is_cons/1, t_is_equal/2,
+ t_is_float/1, t_is_fun/1,
+ t_is_integer/1, t_non_neg_integer/0,
+ t_is_list/1, t_is_nil/1, t_is_none/1, t_is_number/1,
+ t_is_subtype/2, t_limit/2, t_list/0, t_list/1,
+ t_list_elements/1, t_nonempty_list/1, t_maybe_improper_list/0,
+ t_module/0, t_number/0, t_number_vals/1,
+ t_opaque_match_record/2, t_opaque_matching_structure/2,
+ t_opaque_from_records/1,
+ t_pid/0, t_port/0, t_product/1, t_reference/0,
+ t_subst/2, t_subtract/2, t_subtract_list/2, t_sup/1, t_sup/2,
+ t_timeout/0, t_tuple/0, t_tuple/1,
+ t_unify/2, t_var/1, t_var_name/1,
+ t_none/0, t_unit/0]).
+
+-include("dialyzer.hrl").
+
+%%-----------------------------------------------------------------------------
+
+-type dep() :: integer(). %% type variable names used as constraint ids
+-type type_var() :: erl_types:erl_type(). %% actually: {'c','var',_,_}
+
+-record(fun_var, {'fun' :: fun((_) -> erl_types:erl_type()), deps :: [dep()]}).
+
+-type constr_op() :: 'eq' | 'sub'.
+-type fvar_or_type() :: #fun_var{} | erl_types:erl_type().
+
+-record(constraint, {lhs :: erl_types:erl_type(),
+ op :: constr_op(),
+ rhs :: fvar_or_type(),
+ deps :: [dep()]}).
+
+-record(constraint_list, {type :: 'conj' | 'disj',
+ list :: [_], % [constr()] but it needs recursion :-(
+ deps :: [dep()],
+ id :: {'list', dep()}}).
+
+-record(constraint_ref, {id :: type_var(), deps :: [dep()]}).
+
+-type constr() :: #constraint{} | #constraint_list{} | #constraint_ref{}.
+
+-type typesig_scc() :: [{mfa(), {cerl:c_var(), cerl:c_fun()}, dict()}].
+-type typesig_funmap() :: [{type_var(), type_var()}]. %% Orddict
+
+-record(state, {callgraph :: dialyzer_callgraph:callgraph(),
+ cs = [] :: [constr()],
+ cmap = dict:new() :: dict(),
+ fun_map = [] :: typesig_funmap(),
+ fun_arities = dict:new() :: dict(),
+ in_match = false :: boolean(),
+ in_guard = false :: boolean(),
+ name_map = dict:new() :: dict(),
+ next_label :: label(),
+ non_self_recs = [] :: [label()],
+ plt :: dialyzer_plt:plt(),
+ prop_types = dict:new() :: dict(),
+ records = dict:new() :: dict(),
+ opaques = [] :: [erl_types:erl_type()],
+ scc = [] :: [type_var()]}).
+
+%%-----------------------------------------------------------------------------
+
+-define(TYPE_LIMIT, 4).
+-define(INTERNAL_TYPE_LIMIT, 5).
+
+%%-define(DEBUG, true).
+%%-define(DEBUG_CONSTRAINTS, true).
+-ifdef(DEBUG).
+-define(DEBUG_NAME_MAP, true).
+-endif.
+%%-define(DEBUG_NAME_MAP, true).
+
+-ifdef(DEBUG).
+-define(debug(__String, __Args), io:format(__String, __Args)).
+-else.
+-define(debug(__String, __Args), ok).
+-endif.
+
+%% ============================================================================
+%%
+%% The analysis.
+%%
+%% ============================================================================
+
+%%-----------------------------------------------------------------------------
+%% Analysis of strongly connected components.
+%%
+%% analyze_scc(SCC, NextLabel, CallGraph, PLT, PropTypes) -> FunTypes
+%%
+%% SCC - [{MFA, Def, Records}]
+%% where Def = {Var, Fun} as in the Core Erlang module definitions.
+%% Records = dict(RecName, {Arity, [{FieldName, FieldType}]})
+%% NextLabel - An integer that is higher than any label in the code.
+%% CallGraph - A callgraph as produced by dialyzer_callgraph.erl
+%% Note: The callgraph must have been built with all the
+%% code that the SCC is a part of.
+%% PLT - A dialyzer PLT. This PLT should contain available information
+%% about functions that can be called by this SCC.
+%% PropTypes - A dictionary.
+%% FunTypes - A dictionary.
+%%-----------------------------------------------------------------------------
+
+-spec analyze_scc(typesig_scc(), label(),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_plt:plt(), dict()) -> dict().
+
+analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes) ->
+ assert_format_of_scc(SCC),
+ State1 = new_state(SCC, NextLabel, CallGraph, Plt, PropTypes),
+ DefSet = add_def_list([Var || {_MFA, {Var, _Fun}, _Rec} <- SCC], sets:new()),
+ State2 = traverse_scc(SCC, DefSet, State1),
+ State3 = state__finalize(State2),
+ Funs = state__scc(State3),
+ pp_constrs_scc(Funs, State3),
+ constraints_to_dot_scc(Funs, State3),
+ solve(Funs, State3).
+
+assert_format_of_scc([{_MFA, {_Var, _Fun}, _Records}|Left]) ->
+ assert_format_of_scc(Left);
+assert_format_of_scc([]) ->
+ ok.
+
+%% ============================================================================
+%%
+%% Gets the constraints by traversing the code.
+%%
+%% ============================================================================
+
+traverse_scc([{MFA, Def, Rec}|Left], DefSet, AccState) ->
+ TmpState1 = state__set_rec_dict(AccState, Rec),
+ TmpState2 = state__set_opaques(TmpState1, MFA),
+ DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)),
+ {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2),
+ traverse_scc(Left, DefSet, NewAccState);
+traverse_scc([], _DefSet, AccState) ->
+ AccState.
+
+traverse(Tree, DefinedVars, State) ->
+ ?debug("Handling ~p\n", [cerl:type(Tree)]),
+ case cerl:type(Tree) of
+ alias ->
+ Var = cerl:alias_var(Tree),
+ Pat = cerl:alias_pat(Tree),
+ DefinedVars1 = add_def(Var, DefinedVars),
+ {State1, PatVar} = traverse(Pat, DefinedVars1, State),
+ State2 = state__store_conj(mk_var(Var), eq, PatVar, State1),
+ {State2, PatVar};
+ apply ->
+ Args = cerl:apply_args(Tree),
+ Arity = length(Args),
+ Op = cerl:apply_op(Tree),
+ {State0, ArgTypes} = traverse_list(Args, DefinedVars, State),
+ {State1, OpType} = traverse(Op, DefinedVars, State0),
+ {State2, FunType} = state__get_fun_prototype(OpType, Arity, State1),
+ State3 = state__store_conj(FunType, eq, OpType, State2),
+ State4 = state__store_conj(mk_var(Tree), sub, t_fun_range(FunType),
+ State3),
+ State5 = state__store_conj_lists(ArgTypes, sub, t_fun_args(FunType),
+ State4),
+ case state__lookup_apply(Tree, State) of
+ unknown ->
+ {State5, mk_var(Tree)};
+ FunLabels ->
+ case get_apply_constr(FunLabels, mk_var(Tree), ArgTypes, State5) of
+ error -> {State5, mk_var(Tree)};
+ {ok, State6} -> {State6, mk_var(Tree)}
+ end
+ end;
+ binary ->
+ {State1, SegTypes} = traverse_list(cerl:binary_segments(Tree),
+ DefinedVars, State),
+ Type = mk_fun_var(fun(Map) ->
+ TmpSegTypes = lookup_type_list(SegTypes, Map),
+ t_bitstr_concat(TmpSegTypes)
+ end, SegTypes),
+ {state__store_conj(mk_var(Tree), sub, Type, State1), mk_var(Tree)};
+ bitstr ->
+ Size = cerl:bitstr_size(Tree),
+ UnitVal = cerl:int_val(cerl:bitstr_unit(Tree)),
+ Val = cerl:bitstr_val(Tree),
+ {State1, [SizeType, ValType]} =
+ traverse_list([Size, Val], DefinedVars, State),
+ {State2, TypeConstr} =
+ case cerl:bitstr_bitsize(Tree) of
+ all -> {State1, t_bitstr(UnitVal, 0)};
+ utf -> {State1, t_binary()}; % contains an integer number of bytes
+ N when is_integer(N) -> {State1, t_bitstr(0, N)};
+ any -> % Size is not a literal
+ {state__store_conj(SizeType, sub, t_non_neg_integer(), State1),
+ mk_fun_var(bitstr_constr(SizeType, UnitVal), [SizeType])}
+ end,
+ ValTypeConstr =
+ case cerl:concrete(cerl:bitstr_type(Tree)) of
+ binary -> TypeConstr;
+ float ->
+ case state__is_in_match(State1) of
+ true -> t_float();
+ false -> t_number()
+ end;
+ integer ->
+ case state__is_in_match(State1) of
+ true ->
+ Flags = cerl:concrete(cerl:bitstr_flags(Tree)),
+ mk_fun_var(bitstr_val_constr(SizeType, UnitVal, Flags),
+ [SizeType]);
+ false -> t_integer()
+ end;
+ utf8 -> t_integer();
+ utf16 -> t_integer();
+ utf32 -> t_integer()
+ end,
+ State3 = state__store_conj(ValType, sub, ValTypeConstr, State2),
+ State4 = state__store_conj(mk_var(Tree), sub, TypeConstr, State3),
+ {State4, mk_var(Tree)};
+ 'case' ->
+ Arg = cerl:case_arg(Tree),
+ Clauses = filter_match_fail(cerl:case_clauses(Tree)),
+ {State1, ArgVar} = traverse(Arg, DefinedVars, State),
+ handle_clauses(Clauses, mk_var(Tree), ArgVar, DefinedVars, State1);
+ call ->
+ handle_call(Tree, DefinedVars, State);
+ 'catch' ->
+ %% XXX: Perhaps there is something to say about this.
+ {State, mk_var(Tree)};
+ cons ->
+ Hd = cerl:cons_hd(Tree),
+ Tl = cerl:cons_tl(Tree),
+ {State1, [HdVar, TlVar]} = traverse_list([Hd, Tl], DefinedVars, State),
+ case cerl:is_literal(cerl:fold_literal(Tree)) of
+ true ->
+ %% We do not need to do anything more here.
+ {State, t_cons(HdVar, TlVar)};
+ false ->
+ ConsVar = mk_var(Tree),
+ ConsType = mk_fun_var(fun(Map) ->
+ t_cons(lookup_type(HdVar, Map),
+ lookup_type(TlVar, Map))
+ end, [HdVar, TlVar]),
+ HdType = mk_fun_var(fun(Map) ->
+ Cons = lookup_type(ConsVar, Map),
+ case t_is_cons(Cons) of
+ false -> t_any();
+ true -> t_cons_hd(Cons)
+ end
+ end, [ConsVar]),
+ TlType = mk_fun_var(fun(Map) ->
+ Cons = lookup_type(ConsVar, Map),
+ case t_is_cons(Cons) of
+ false -> t_any();
+ true -> t_cons_tl(Cons)
+ end
+ end, [ConsVar]),
+ State2 = state__store_conj_lists([HdVar, TlVar, ConsVar], sub,
+ [HdType, TlType, ConsType],
+ State1),
+ {State2, ConsVar}
+ end;
+ 'fun' ->
+ Body = cerl:fun_body(Tree),
+ Vars = cerl:fun_vars(Tree),
+ DefinedVars1 = add_def_list(Vars, DefinedVars),
+ State0 = state__new_constraint_context(State),
+ FunFailType =
+ case state__prop_domain(cerl_trees:get_label(Tree), State0) of
+ error -> t_fun(length(Vars), t_none());
+ {ok, Dom} -> t_fun(Dom, t_none())
+ end,
+ State2 =
+ try
+ State1 = case state__add_prop_constrs(Tree, State0) of
+ not_called -> State0;
+ PropState -> PropState
+ end,
+ {BodyState, BodyVar} = traverse(Body, DefinedVars1, State1),
+ state__store_conj(mk_var(Tree), eq,
+ t_fun(mk_var_list(Vars), BodyVar), BodyState)
+ catch
+ throw:error ->
+ state__store_conj(mk_var(Tree), eq, FunFailType, State0)
+ end,
+ Cs = state__cs(State2),
+ State3 = state__store_constrs(mk_var(Tree), Cs, State2),
+ Ref = mk_constraint_ref(mk_var(Tree), get_deps(Cs)),
+ OldCs = state__cs(State),
+ State4 = state__new_constraint_context(State3),
+ State5 = state__store_conj_list([OldCs, Ref], State4),
+ State6 = state__store_fun_arity(Tree, State5),
+ {State6, mk_var(Tree)};
+ 'let' ->
+ Vars = cerl:let_vars(Tree),
+ Arg = cerl:let_arg(Tree),
+ Body = cerl:let_body(Tree),
+ {State1, ArgVars} = traverse(Arg, DefinedVars, State),
+ State2 = state__store_conj(t_product(mk_var_list(Vars)), eq,
+ ArgVars, State1),
+ DefinedVars1 = add_def_list(Vars, DefinedVars),
+ traverse(Body, DefinedVars1, State2);
+ letrec ->
+ Defs = cerl:letrec_defs(Tree),
+ Body = cerl:letrec_body(Tree),
+ Funs = [Fun || {_Var, Fun} <- Defs],
+ Vars = [Var || {Var, _Fun} <- Defs],
+ State1 = state__store_funs(Vars, Funs, State),
+ DefinedVars1 = add_def_list(Vars, DefinedVars),
+ {State2, _} = traverse_list(Funs, DefinedVars1, State1),
+ traverse(Body, DefinedVars1, State2);
+ literal ->
+ %% This is needed for finding records
+ case cerl:unfold_literal(Tree) of
+ Tree ->
+ Type = t_from_term(cerl:concrete(Tree)),
+ NewType =
+ case erl_types:t_opaque_match_atom(Type, State#state.opaques) of
+ [Opaque] -> Opaque;
+ _ -> Type
+ end,
+ {State, NewType};
+ NewTree -> traverse(NewTree, DefinedVars, State)
+ end;
+ module ->
+ Defs = cerl:module_defs(Tree),
+ Funs = [Fun || {_Var, Fun} <- Defs],
+ Vars = [Var || {Var, _Fun} <- Defs],
+ DefinedVars1 = add_def_list(Vars, DefinedVars),
+ State1 = state__store_funs(Vars, Funs, State),
+ FoldFun = fun(Fun, AccState) ->
+ {S, _} = traverse(Fun, DefinedVars1,
+ state__new_constraint_context(AccState)),
+ S
+ end,
+ lists:foldl(FoldFun, State1, Funs);
+ primop ->
+ case cerl:atom_val(cerl:primop_name(Tree)) of
+ match_fail -> throw(error);
+ raise -> throw(error);
+ bs_init_writable -> {State, t_from_term(<<>>)};
+ Other -> erlang:error({'Unsupported primop', Other})
+ end;
+ 'receive' ->
+ Clauses = filter_match_fail(cerl:receive_clauses(Tree)),
+ Timeout = cerl:receive_timeout(Tree),
+ case (cerl:is_c_atom(Timeout) andalso
+ (cerl:atom_val(Timeout) =:= infinity)) of
+ true ->
+ handle_clauses(Clauses, mk_var(Tree), [], DefinedVars, State);
+ false ->
+ Action = cerl:receive_action(Tree),
+ {State1, TimeoutVar} = traverse(Timeout, DefinedVars, State),
+ State2 = state__store_conj(TimeoutVar, sub, t_timeout(), State1),
+ handle_clauses(Clauses, mk_var(Tree), [], Action, DefinedVars, State2)
+ end;
+ seq ->
+ Body = cerl:seq_body(Tree),
+ Arg = cerl:seq_arg(Tree),
+ {State1, _} = traverse(Arg, DefinedVars, State),
+ traverse(Body, DefinedVars, State1);
+ 'try' ->
+ handle_try(Tree, DefinedVars, State);
+ tuple ->
+ Elements = cerl:tuple_es(Tree),
+ {State1, EVars} = traverse_list(Elements, DefinedVars, State),
+ {State2, TupleType} =
+ case cerl:is_literal(cerl:fold_literal(Tree)) of
+ true ->
+ %% We do not need to do anything more here.
+ {State, t_tuple(EVars)};
+ false ->
+ %% We have the same basic problem as in products, but we want to
+ %% make sure that everything that can be used as tags for the
+ %% disjoint unions stays in the tuple.
+ Fun = fun(Var, AccState) ->
+ case t_has_var(Var) of
+ true ->
+ {AccState1, NewVar} = state__mk_var(AccState),
+ {NewVar,
+ state__store_conj(Var, eq, NewVar, AccState1)};
+ false ->
+ {Var, AccState}
+ end
+ end,
+ {NewEvars, TmpState} = lists:mapfoldl(Fun, State1, EVars),
+ {TmpState, t_tuple(NewEvars)}
+ end,
+ case Elements of
+ [Tag|Fields] ->
+ case cerl:is_c_atom(Tag) of
+ true ->
+ %% Check if an opaque term is constructed.
+ case t_opaque_match_record(TupleType, State#state.opaques) of
+ [Opaque] ->
+ OpStruct = t_opaque_matching_structure(TupleType, Opaque),
+ State3 = state__store_conj(TupleType, sub, OpStruct, State2),
+ {State3, Opaque};
+ %% Check if a record is constructed.
+ _ ->
+ Arity = length(Fields),
+ case state__lookup_record(State2, cerl:atom_val(Tag), Arity) of
+ error -> {State2, TupleType};
+ {ok, RecType} ->
+ State3 = state__store_conj(TupleType, sub, RecType, State2),
+ {State3, TupleType}
+ end
+ end;
+ false -> {State2, TupleType}
+ end;
+ [] -> {State2, TupleType}
+ end;
+ values ->
+ %% We can get into trouble when unifying products that have the
+ %% same element appearing several times. Handle these cases by
+ %% introducing fresh variables and constraining them to be equal
+ %% to the original ones. This is similar to what happens in
+ %% pattern matching where the matching is done on fresh
+ %% variables and guards assert that the matching is correct.
+ Elements = cerl:values_es(Tree),
+ {State1, EVars} = traverse_list(Elements, DefinedVars, State),
+ Arity = length(EVars),
+ Unique = length(ordsets:from_list(EVars)),
+ case Arity =:= Unique of
+ true -> {State1, t_product(EVars)};
+ false ->
+ {State2, Vars} = state__mk_vars(Arity, State1),
+ State3 = state__store_conj_lists(Vars, eq, EVars, State2),
+ {State3, t_product(Vars)}
+ end;
+ var ->
+ case is_def(Tree, DefinedVars) of
+ true -> {State, mk_var(Tree)};
+ false ->
+ %% If we are analyzing SCCs this can be a function variable.
+ case state__lookup_undef_var(Tree, State) of
+ error -> erlang:error({'Undefined variable', Tree});
+ {ok, Type} ->
+ {State1, NewVar} = state__mk_var(State),
+ {state__store_conj(NewVar, sub, Type, State1), NewVar}
+ end
+ end;
+ Other ->
+ erlang:error({'Unsupported type', Other})
+ end.
+
+traverse_list(Trees, DefinedVars, State) ->
+ traverse_list(Trees, DefinedVars, State, []).
+
+traverse_list([Tree|Tail], DefinedVars, State, Acc) ->
+ {State1, Var} = traverse(Tree, DefinedVars, State),
+ traverse_list(Tail, DefinedVars, State1, [Var|Acc]);
+traverse_list([], _DefinedVars, State, Acc) ->
+ {State, lists:reverse(Acc)}.
+
+add_def(Var, Set) ->
+ sets:add_element(cerl_trees:get_label(Var), Set).
+
+add_def_list([H|T], Set) ->
+ add_def_list(T, add_def(H, Set));
+add_def_list([], Set) ->
+ Set.
+
+add_def_from_tree(T, DefinedVars) ->
+ Vars = cerl_trees:fold(fun(X, Acc) ->
+ case cerl:is_c_var(X) of
+ true -> [X|Acc];
+ false -> Acc
+ end
+ end, [], T),
+ add_def_list(Vars, DefinedVars).
+
+add_def_from_tree_list([H|T], DefinedVars) ->
+ add_def_from_tree_list(T, add_def_from_tree(H, DefinedVars));
+add_def_from_tree_list([], DefinedVars) ->
+ DefinedVars.
+
+is_def(Var, Set) ->
+ sets:is_element(cerl_trees:get_label(Var), Set).
+
+%%----------------------------------------
+%% Try
+%%
+
+handle_try(Tree, DefinedVars, State) ->
+ Arg = cerl:try_arg(Tree),
+ Vars = cerl:try_vars(Tree),
+ EVars = cerl:try_evars(Tree),
+ Body = cerl:try_body(Tree),
+ Handler = cerl:try_handler(Tree),
+ State1 = state__new_constraint_context(State),
+ {ArgBodyState, BodyVar} =
+ try
+ {State2, ArgVar} = traverse(Arg, DefinedVars, State1),
+ DefinedVars1 = add_def_list(Vars, DefinedVars),
+ {State3, BodyVar1} = traverse(Body, DefinedVars1, State2),
+ State4 = state__store_conj(t_product(mk_var_list(Vars)), eq, ArgVar,
+ State3),
+ {State4, BodyVar1}
+ catch
+ throw:error ->
+ {State1, t_none()}
+ end,
+ State6 = state__new_constraint_context(ArgBodyState),
+ {HandlerState, HandlerVar} =
+ try
+ DefinedVars2 = add_def_list([X || X <- EVars, cerl:is_c_var(X)],
+ DefinedVars),
+ traverse(Handler, DefinedVars2, State6)
+ catch
+ throw:error ->
+ {State6, t_none()}
+ end,
+ ArgBodyCs = state__cs(ArgBodyState),
+ HandlerCs = state__cs(HandlerState),
+ TreeVar = mk_var(Tree),
+ OldCs = state__cs(State),
+ case state__is_in_guard(State) of
+ true ->
+ Conj1 = mk_conj_constraint_list([ArgBodyCs,
+ mk_constraint(BodyVar, eq, TreeVar)]),
+ Disj = mk_disj_constraint_list([Conj1,
+ mk_constraint(HandlerVar, eq, TreeVar)]),
+ NewState1 = state__new_constraint_context(HandlerState),
+ Conj2 = mk_conj_constraint_list([OldCs, Disj]),
+ NewState2 = state__store_conj(Conj2, NewState1),
+ {NewState2, TreeVar};
+ false ->
+ {NewCs, ReturnVar} =
+ case {t_is_none(BodyVar), t_is_none(HandlerVar)} of
+ {false, false} ->
+ Conj1 =
+ mk_conj_constraint_list([ArgBodyCs,
+ mk_constraint(TreeVar, eq, BodyVar)]),
+ Conj2 =
+ mk_conj_constraint_list([HandlerCs,
+ mk_constraint(TreeVar, eq, HandlerVar)]),
+ Disj = mk_disj_constraint_list([Conj1, Conj2]),
+ {Disj, mk_var(Tree)};
+ {false, true} ->
+ {mk_conj_constraint_list([ArgBodyCs,
+ mk_constraint(TreeVar, eq, BodyVar)]),
+ BodyVar};
+ {true, false} ->
+ {mk_conj_constraint_list([HandlerCs,
+ mk_constraint(TreeVar, eq, HandlerVar)]),
+ HandlerVar};
+ {true, true} ->
+ ?debug("Throw failed\n", []),
+ throw(error)
+ end,
+ Conj = mk_conj_constraint_list([OldCs, NewCs]),
+ NewState1 = state__new_constraint_context(HandlerState),
+ NewState2 = state__store_conj(Conj, NewState1),
+ {NewState2, ReturnVar}
+ end.
+
+%%----------------------------------------
+%% Call
+%%
+
+handle_call(Call, DefinedVars, State) ->
+ Args = cerl:call_args(Call),
+ Mod = cerl:call_module(Call),
+ Fun = cerl:call_name(Call),
+ Dst = mk_var(Call),
+ case cerl:is_c_atom(Mod) andalso cerl:is_c_atom(Fun) of
+ true ->
+ M = cerl:atom_val(Mod),
+ F = cerl:atom_val(Fun),
+ A = length(Args),
+ MFA = {M, F, A},
+ {State1, ArgVars} = traverse_list(Args, DefinedVars, State),
+ case state__lookup_rec_var_in_scope(MFA, State) of
+ error ->
+ case get_bif_constr(MFA, Dst, ArgVars, State1) of
+ none ->
+ {get_plt_constr(MFA, Dst, ArgVars, State1), Dst};
+ C ->
+ {state__store_conj(C, State1), Dst}
+ end;
+ {ok, Var} ->
+ %% This is part of the SCC currently analyzed.
+ %% Intercept and change this to an apply instead.
+ ?debug("Found the call to ~w\n", [MFA]),
+ Label = cerl_trees:get_label(Call),
+ Apply = cerl:ann_c_apply([{label, Label}], Var, Args),
+ traverse(Apply, DefinedVars, State)
+ end;
+ false ->
+ {State1, MF} = traverse_list([Mod, Fun], DefinedVars, State),
+ {state__store_conj_lists(MF, sub, [t_module(), t_atom()], State1), Dst}
+ end.
+
+get_plt_constr(MFA, Dst, ArgVars, State) ->
+ Plt = state__plt(State),
+ PltRes = dialyzer_plt:lookup(Plt, MFA),
+ case dialyzer_plt:lookup_contract(Plt, MFA) of
+ none ->
+ case PltRes of
+ none -> State;
+ {value, {PltRetType, PltArgTypes}} ->
+ state__store_conj_lists([Dst|ArgVars], sub,
+ [PltRetType|PltArgTypes], State)
+ end;
+ {value, #contract{args = GenArgs} = C} ->
+ {RetType, ArgCs} =
+ case PltRes of
+ none ->
+ {mk_fun_var(fun(Map) ->
+ ArgTypes = lookup_type_list(ArgVars, Map),
+ dialyzer_contracts:get_contract_return(C, ArgTypes)
+ end, ArgVars), GenArgs};
+ {value, {PltRetType, PltArgTypes}} ->
+ %% Need to combine the contract with the success typing.
+ {mk_fun_var(
+ fun(Map) ->
+ ArgTypes = lookup_type_list(ArgVars, Map),
+ CRet = dialyzer_contracts:get_contract_return(C, ArgTypes),
+ t_inf(CRet, PltRetType, opaque)
+ end, ArgVars),
+ [t_inf(X, Y, opaque) || {X, Y} <- lists:zip(GenArgs, PltArgTypes)]}
+ end,
+ state__store_conj_lists([Dst|ArgVars], sub, [RetType|ArgCs], State)
+ end.
+
+filter_match_fail([Clause] = Cls) ->
+ Body = cerl:clause_body(Clause),
+ case cerl:type(Body) of
+ primop ->
+ case cerl:atom_val(cerl:primop_name(Body)) of
+ match_fail -> [];
+ raise -> [];
+ _ -> Cls
+ end;
+ _ -> Cls
+ end;
+filter_match_fail([H|T]) ->
+ [H|filter_match_fail(T)];
+filter_match_fail([]) ->
+ %% This can actually happen, for example in
+ %% receive after 1 -> ok end
+ [].
+
+%% If there is a significant number of clauses, we cannot apply the
+%% list subtraction scheme since it causes the analysis to be too
+%% slow. Typically, this only affects automatically generated files.
+%% Anyway, and the dataflow analysis doesn't suffer from this, so we
+%% will get some information anyway.
+-define(MAX_NOF_CLAUSES, 15).
+
+handle_clauses(Clauses, TopVar, Arg, DefinedVars, State) ->
+ handle_clauses(Clauses, TopVar, Arg, none, DefinedVars, State).
+
+handle_clauses([], _, _, Action, DefinedVars, State) when Action =/= none ->
+ %% Can happen when a receive has no clauses, see filter_match_fail.
+ traverse(Action, DefinedVars, State);
+handle_clauses(Clauses, TopVar, Arg, Action, DefinedVars, State) ->
+ SubtrTypeList =
+ if length(Clauses) > ?MAX_NOF_CLAUSES -> overflow;
+ true -> []
+ end,
+ {State1, CList} = handle_clauses_1(Clauses, TopVar, Arg, DefinedVars,
+ State, SubtrTypeList, []),
+ {NewCs, NewState} =
+ case Action of
+ none ->
+ if CList =:= [] -> throw(error);
+ true -> {CList, State1}
+ end;
+ _ ->
+ try
+ {State2, ActionVar} = traverse(Action, DefinedVars, State1),
+ TmpC = mk_constraint(TopVar, eq, ActionVar),
+ ActionCs = mk_conj_constraint_list([state__cs(State2),TmpC]),
+ {[ActionCs|CList], State2}
+ catch
+ throw:error ->
+ if CList =:= [] -> throw(error);
+ true -> {CList, State1}
+ end
+ end
+ end,
+ OldCs = state__cs(State),
+ NewCList = mk_disj_constraint_list(NewCs),
+ FinalState = state__new_constraint_context(NewState),
+ {state__store_conj_list([OldCs, NewCList], FinalState), TopVar}.
+
+handle_clauses_1([Clause|Tail], TopVar, Arg, DefinedVars,
+ State, SubtrTypes, Acc) ->
+ State0 = state__new_constraint_context(State),
+ Pats = cerl:clause_pats(Clause),
+ Guard = cerl:clause_guard(Clause),
+ Body = cerl:clause_body(Clause),
+ NewSubtrTypes =
+ case SubtrTypes =:= overflow of
+ true -> overflow;
+ false ->
+ ordsets:add_element(get_safe_underapprox(Pats, Guard), SubtrTypes)
+ end,
+ try
+ DefinedVars1 = add_def_from_tree_list(Pats, DefinedVars),
+ State1 = state__set_in_match(State0, true),
+ {State2, PatVars} = traverse_list(Pats, DefinedVars1, State1),
+ State3 =
+ case Arg =:= [] of
+ true -> State2;
+ false ->
+ S = state__store_conj(Arg, eq, t_product(PatVars), State2),
+ case SubtrTypes =:= overflow of
+ true -> S;
+ false ->
+ SubtrPatVar = mk_fun_var(fun(Map) ->
+ TmpType = lookup_type(Arg, Map),
+ t_subtract_list(TmpType, SubtrTypes)
+ end, [Arg]),
+ state__store_conj(Arg, sub, SubtrPatVar, S)
+ end
+ end,
+ State4 = handle_guard(Guard, DefinedVars1, State3),
+ {State5, BodyVar} = traverse(Body, DefinedVars1,
+ state__set_in_match(State4, false)),
+ State6 = state__store_conj(TopVar, eq, BodyVar, State5),
+ Cs = state__cs(State6),
+ handle_clauses_1(Tail, TopVar, Arg, DefinedVars, State6,
+ NewSubtrTypes, [Cs|Acc])
+ catch
+ throw:error ->
+ handle_clauses_1(Tail, TopVar, Arg, DefinedVars,
+ State, NewSubtrTypes, Acc)
+ end;
+handle_clauses_1([], _TopVar, _Arg, _DefinedVars, State, _SubtrType, Acc) ->
+ {state__new_constraint_context(State), Acc}.
+
+-spec get_safe_underapprox([cerl:c_values()], cerl:cerl()) -> erl_types:erl_type().
+
+get_safe_underapprox(Pats, Guard) ->
+ try
+ Map1 = cerl_trees:fold(fun(X, Acc) ->
+ case cerl:is_c_var(X) of
+ true ->
+ dict:store(cerl_trees:get_label(X), t_any(),
+ Acc);
+ false -> Acc
+ end
+ end, dict:new(), cerl:c_values(Pats)),
+ {Type, Map2} = get_underapprox_from_guard(Guard, Map1),
+ Map3 = case t_is_none(t_inf(t_from_term(true), Type)) of
+ true -> throw(dont_know);
+ false ->
+ case cerl:is_c_var(Guard) of
+ false -> Map2;
+ true ->
+ dict:store(cerl_trees:get_label(Guard),
+ t_from_term(true), Map2)
+ end
+ end,
+ {Ts, _Map4} = get_safe_underapprox_1(Pats, [], Map3),
+ t_product(Ts)
+ catch
+ throw:dont_know -> t_none()
+ end.
+
+get_underapprox_from_guard(Tree, Map) ->
+ True = t_from_term(true),
+ case cerl:type(Tree) of
+ call ->
+ case {cerl:concrete(cerl:call_module(Tree)),
+ cerl:concrete(cerl:call_name(Tree)),
+ length(cerl:call_args(Tree))} of
+ {erlang, is_function, 2} ->
+ [Fun, Arity] = cerl:call_args(Tree),
+ case cerl:is_c_int(Arity) of
+ false -> throw(dont_know);
+ true ->
+ {FunType, Map1} = get_underapprox_from_guard(Fun, Map),
+ Inf = t_inf(FunType, t_fun(cerl:int_val(Arity), t_any())),
+ case t_is_none(Inf) of
+ true -> throw(dont_know);
+ false ->
+ {True, dict:store(cerl_trees:get_label(Fun), Inf, Map1)}
+ end
+ end;
+ MFA ->
+ case get_type_test(MFA) of
+ {ok, Type} ->
+ [Arg] = cerl:call_args(Tree),
+ {ArgType, Map1} = get_underapprox_from_guard(Arg, Map),
+ Inf = t_inf(Type, ArgType),
+ case t_is_none(Inf) of
+ true -> throw(dont_know);
+ false ->
+ case cerl:is_literal(Arg) of
+ true -> {True, Map1};
+ false ->
+ {True, dict:store(cerl_trees:get_label(Arg), Inf, Map1)}
+ end
+ end;
+ error ->
+ case MFA of
+ {erlang, '=:=', 2} -> throw(dont_know);
+ {erlang, '==', 2} -> throw(dont_know);
+ {erlang, 'and', 2} ->
+ [Arg1, Arg2] = cerl:call_args(Tree),
+ case ((cerl:is_c_var(Arg1) orelse cerl:is_literal(Arg1))
+ andalso
+ (cerl:is_c_var(Arg2) orelse cerl:is_literal(Arg2))) of
+ true ->
+ {Arg1Type, _} = get_underapprox_from_guard(Arg1, Map),
+ {Arg2Type, _} = get_underapprox_from_guard(Arg1, Map),
+ case (t_is_equal(True, Arg1Type) andalso
+ t_is_equal(True, Arg2Type)) of
+ true -> {True, Map};
+ false -> throw(dont_know)
+ end;
+ false ->
+ throw(dont_know)
+ end;
+ {erlang, 'or', 2} -> throw(dont_know);
+ _ -> throw(dont_know)
+ end
+ end
+ end;
+ var ->
+ Type =
+ case dict:find(cerl_trees:get_label(Tree), Map) of
+ error -> throw(dont_know);
+ {ok, T} -> T
+ end,
+ {Type, Map};
+ literal ->
+ case cerl:unfold_literal(Tree) of
+ Tree ->
+ Type =
+ case cerl:concrete(Tree) of
+ Int when is_integer(Int) -> t_from_term(Int);
+ Atom when is_atom(Atom) -> t_from_term(Atom);
+ _Other -> throw(dont_know)
+ end,
+ {Type, Map};
+ OtherTree ->
+ get_underapprox_from_guard(OtherTree, Map)
+ end;
+ _ ->
+ throw(dont_know)
+ end.
+
+%%
+%% The guard test {erlang, is_function, 2} is handled specially by the
+%% function get_underapprox_from_guard/2
+%%
+get_type_test({erlang, is_atom, 1}) -> {ok, t_atom()};
+get_type_test({erlang, is_boolean, 1}) -> {ok, t_boolean()};
+get_type_test({erlang, is_binary, 1}) -> {ok, t_binary()};
+get_type_test({erlang, is_bitstring, 1}) -> {ok, t_bitstr()};
+get_type_test({erlang, is_float, 1}) -> {ok, t_float()};
+get_type_test({erlang, is_function, 1}) -> {ok, t_fun()};
+get_type_test({erlang, is_integer, 1}) -> {ok, t_integer()};
+get_type_test({erlang, is_list, 1}) -> {ok, t_list()};
+get_type_test({erlang, is_number, 1}) -> {ok, t_number()};
+get_type_test({erlang, is_pid, 1}) -> {ok, t_pid()};
+get_type_test({erlang, is_port, 1}) -> {ok, t_port()};
+%% get_type_test({erlang, is_record, 2}) -> {ok, t_tuple()};
+%% get_type_test({erlang, is_record, 3}) -> {ok, t_tuple()};
+get_type_test({erlang, is_reference, 1}) -> {ok, t_reference()};
+get_type_test({erlang, is_tuple, 1}) -> {ok, t_tuple()};
+get_type_test({M, F, A}) when is_atom(M), is_atom(F), is_integer(A) -> error.
+
+bitstr_constr(SizeType, UnitVal) ->
+ fun(Map) ->
+ TmpSizeType = lookup_type(SizeType, Map),
+ case t_is_subtype(TmpSizeType, t_non_neg_integer()) of
+ true ->
+ case t_number_vals(TmpSizeType) of
+ [OneSize] -> t_bitstr(0, OneSize * UnitVal);
+ _ ->
+ MinSize = erl_types:number_min(TmpSizeType),
+ t_bitstr(UnitVal, MinSize * UnitVal)
+ end;
+ false ->
+ t_bitstr(UnitVal, 0)
+ end
+ end.
+
+bitstr_val_constr(SizeType, UnitVal, Flags) ->
+ fun(Map) ->
+ TmpSizeType = lookup_type(SizeType, Map),
+ case t_is_subtype(TmpSizeType, t_non_neg_integer()) of
+ true ->
+ case erl_types:number_max(TmpSizeType) of
+ N when is_integer(N), N < 128 -> %% Avoid illegal arithmetic
+ TotalSizeVal = N * UnitVal,
+ {RangeMin, RangeMax} =
+ case lists:member(signed, Flags) of
+ true -> {-(1 bsl (TotalSizeVal - 1)),
+ 1 bsl (TotalSizeVal - 1) - 1};
+ false -> {0, 1 bsl TotalSizeVal - 1}
+ end,
+ t_from_range(RangeMin, RangeMax);
+ _ ->
+ t_integer()
+ end;
+ false ->
+ t_integer()
+ end
+ end.
+
+get_safe_underapprox_1([Pat|Left], Acc, Map) ->
+ case cerl:type(Pat) of
+ alias ->
+ APat = cerl:alias_pat(Pat),
+ AVar = cerl:alias_var(Pat),
+ {[VarType], Map1} = get_safe_underapprox_1([AVar], [], Map),
+ {[PatType], Map2} = get_safe_underapprox_1([APat], [], Map1),
+ Inf = t_inf(VarType, PatType),
+ case t_is_none(Inf) of
+ true -> throw(dont_know);
+ false ->
+ Map3 = dict:store(cerl_trees:get_label(AVar), Inf, Map2),
+ get_safe_underapprox_1(Left, [Inf|Acc], Map3)
+ end;
+ binary ->
+ %% TODO: Can maybe do something here
+ throw(dont_know);
+ cons ->
+ {[Hd, Tl], Map1} =
+ get_safe_underapprox_1([cerl:cons_hd(Pat), cerl:cons_tl(Pat)], [], Map),
+ case t_is_any(Tl) of
+ true -> get_safe_underapprox_1(Left, [t_nonempty_list(Hd)|Acc], Map1);
+ false -> throw(dont_know)
+ end;
+ literal ->
+ case cerl:unfold_literal(Pat) of
+ Pat ->
+ Type =
+ case cerl:concrete(Pat) of
+ Int when is_integer(Int) -> t_from_term(Int);
+ Atom when is_atom(Atom) -> t_from_term(Atom);
+ [] -> t_from_term([]);
+ _Other -> throw(dont_know)
+ end,
+ get_safe_underapprox_1(Left, [Type|Acc], Map);
+ OtherPat ->
+ get_safe_underapprox_1([OtherPat|Left], Acc, Map)
+ end;
+ tuple ->
+ Es = cerl:tuple_es(Pat),
+ {Ts, Map1} = get_safe_underapprox_1(Es, [], Map),
+ Type = t_tuple(Ts),
+ get_safe_underapprox_1(Left, [Type|Acc], Map1);
+ values ->
+ Es = cerl:values_es(Pat),
+ {Ts, Map1} = get_safe_underapprox_1(Es, [], Map),
+ Type = t_product(Ts),
+ get_safe_underapprox_1(Left, [Type|Acc], Map1);
+ var ->
+ case dict:find(cerl_trees:get_label(Pat), Map) of
+ error -> throw(dont_know);
+ {ok, VarType} -> get_safe_underapprox_1(Left, [VarType|Acc], Map)
+ end
+ end;
+get_safe_underapprox_1([], Acc, Map) ->
+ {lists:reverse(Acc), Map}.
+
+%%----------------------------------------
+%% Guards
+%%
+
+handle_guard(Guard, DefinedVars, State) ->
+ True = t_from_term(true),
+ State1 = state__set_in_guard(State, true),
+ State2 = state__new_constraint_context(State1),
+ {State3, Return} = traverse(Guard, DefinedVars, State2),
+ State4 = state__store_conj(Return, eq, True, State3),
+ Cs = state__cs(State4),
+ NewCs = mk_disj_norm_form(Cs),
+ OldCs = state__cs(State),
+ State5 = state__set_in_guard(State4, state__is_in_guard(State)),
+ State6 = state__new_constraint_context(State5),
+ state__store_conj(mk_conj_constraint_list([OldCs, NewCs]), State6).
+
+%%=============================================================================
+%%
+%% BIF constraints
+%%
+%%=============================================================================
+
+get_bif_constr({erlang, Op, 2}, Dst, Args = [Arg1, Arg2], _State)
+ when Op =:= '+'; Op =:= '-'; Op =:= '*' ->
+ ReturnType = mk_fun_var(fun(Map) ->
+ TmpArgTypes = lookup_type_list(Args, Map),
+ erl_bif_types:type(erlang, Op, 2, TmpArgTypes)
+ end, Args),
+ ArgFun =
+ fun(A, Pos) ->
+ F =
+ fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ AType = lookup_type(A, Map),
+ case t_is_integer(DstType) of
+ true ->
+ case t_is_integer(AType) of
+ true ->
+ eval_inv_arith(Op, Pos, DstType, AType);
+ false ->
+ %% This must be temporary.
+ t_integer()
+ end;
+ false ->
+ case t_is_float(DstType) of
+ true ->
+ case t_is_integer(AType) of
+ true -> t_float();
+ false -> t_number()
+ end;
+ false ->
+ t_number()
+ end
+ end
+ end,
+ mk_fun_var(F, [Dst, A])
+ end,
+ Arg1FunVar = ArgFun(Arg2, 2),
+ Arg2FunVar = ArgFun(Arg1, 1),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType),
+ mk_constraint(Arg1, sub, Arg1FunVar),
+ mk_constraint(Arg2, sub, Arg2FunVar)]);
+get_bif_constr({erlang, Op, 2}, Dst, [Arg1, Arg2] = Args, _State)
+ when Op =:= '<'; Op =:= '=<'; Op =:= '>'; Op =:= '>=' ->
+ ArgFun =
+ fun(LocalArg1, LocalArg2, LocalOp) ->
+ fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ IsTrue = t_is_atom(true, DstType),
+ IsFalse = t_is_atom(false, DstType),
+ case IsTrue orelse IsFalse of
+ true ->
+ Arg1Type = lookup_type(LocalArg1, Map),
+ Arg2Type = lookup_type(LocalArg2, Map),
+ case t_is_integer(Arg1Type) andalso t_is_integer(Arg2Type) of
+ true ->
+ Max1 = erl_types:number_max(Arg1Type),
+ Min1 = erl_types:number_min(Arg1Type),
+ Max2 = erl_types:number_max(Arg2Type),
+ Min2 = erl_types:number_min(Arg2Type),
+ case LocalOp of
+ '=<' ->
+ if IsTrue -> t_from_range(Min1, Max2);
+ IsFalse -> t_from_range(range_inc(Min2), Max1)
+ end;
+ '<' ->
+ if IsTrue -> t_from_range(Min1, range_dec(Max2));
+ IsFalse -> t_from_range(Min2, Max1)
+ end;
+ '>=' ->
+ if IsTrue -> t_from_range(Min2, Max1);
+ IsFalse -> t_from_range(Min1, range_dec(Max2))
+ end;
+ '>' ->
+ if IsTrue -> t_from_range(range_inc(Min2), Max1);
+ IsFalse -> t_from_range(Min1, Max2)
+ end
+ end;
+ false -> t_any()
+ end;
+ false -> t_any()
+ end
+ end
+ end,
+ {Arg1Fun, Arg2Fun} =
+ case Op of
+ '<' -> {ArgFun(Arg1, Arg2, '<'), ArgFun(Arg2, Arg1, '>=')};
+ '=<' -> {ArgFun(Arg1, Arg2, '=<'), ArgFun(Arg2, Arg1, '>=')};
+ '>' -> {ArgFun(Arg1, Arg2, '>'), ArgFun(Arg2, Arg1, '<')};
+ '>=' -> {ArgFun(Arg1, Arg2, '>='), ArgFun(Arg2, Arg1, '=<')}
+ end,
+ DstArgs = [Dst, Arg1, Arg2],
+ Arg1Var = mk_fun_var(Arg1Fun, DstArgs),
+ Arg2Var = mk_fun_var(Arg2Fun, DstArgs),
+ DstVar = mk_fun_var(fun(Map) ->
+ TmpArgTypes = lookup_type_list(Args, Map),
+ erl_bif_types:type(erlang, Op, 2, TmpArgTypes)
+ end, Args),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstVar),
+ mk_constraint(Arg1, sub, Arg1Var),
+ mk_constraint(Arg2, sub, Arg2Var)]);
+get_bif_constr({erlang, '++', 2}, Dst, [Hd, Tl] = Args, _State) ->
+ HdFun = fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ case t_is_cons(DstType) of
+ true -> t_list(t_cons_hd(DstType));
+ false ->
+ case t_is_list(DstType) of
+ true ->
+ case t_is_nil(DstType) of
+ true -> DstType;
+ false -> t_list(t_list_elements(DstType))
+ end;
+ false -> t_list()
+ end
+ end
+ end,
+ TlFun = fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ case t_is_cons(DstType) of
+ true -> t_sup(t_cons_tl(DstType), DstType);
+ false ->
+ case t_is_list(DstType) of
+ true ->
+ case t_is_nil(DstType) of
+ true -> DstType;
+ false -> t_list(t_list_elements(DstType))
+ end;
+ false -> t_any()
+ end
+ end
+ end,
+ DstL = [Dst],
+ HdVar = mk_fun_var(HdFun, DstL),
+ TlVar = mk_fun_var(TlFun, DstL),
+ ArgTypes = erl_bif_types:arg_types(erlang, '++', 2),
+ ReturnType = mk_fun_var(fun(Map) ->
+ TmpArgTypes = lookup_type_list(Args, Map),
+ erl_bif_types:type(erlang, '++', 2, TmpArgTypes)
+ end, Args),
+ Cs = mk_constraints(Args, sub, ArgTypes),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType),
+ mk_constraint(Hd, sub, HdVar),
+ mk_constraint(Tl, sub, TlVar)
+ |Cs]);
+get_bif_constr({erlang, is_atom, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_atom(), State);
+get_bif_constr({erlang, is_binary, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_binary(), State);
+get_bif_constr({erlang, is_bitstring, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_bitstr(), State);
+get_bif_constr({erlang, is_boolean, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_boolean(), State);
+get_bif_constr({erlang, is_float, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_float(), State);
+get_bif_constr({erlang, is_function, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_fun(), State);
+get_bif_constr({erlang, is_function, 2}, Dst, [Fun, Arity], _State) ->
+ ArgFun = fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ case t_is_atom(true, DstType) of
+ true ->
+ ArityType = lookup_type(Arity, Map),
+ case t_number_vals(ArityType) of
+ unknown -> t_fun();
+ Vals -> t_sup([t_fun(X, t_any()) || X <- Vals])
+ end;
+ false -> t_any()
+ end
+ end,
+ ArgV = mk_fun_var(ArgFun, [Dst, Arity]),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, t_boolean()),
+ mk_constraint(Arity, sub, t_integer()),
+ mk_constraint(Fun, sub, ArgV)]);
+get_bif_constr({erlang, is_integer, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_integer(), State);
+get_bif_constr({erlang, is_list, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_maybe_improper_list(), State);
+get_bif_constr({erlang, is_number, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_number(), State);
+get_bif_constr({erlang, is_pid, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_pid(), State);
+get_bif_constr({erlang, is_port, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_port(), State);
+get_bif_constr({erlang, is_reference, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_reference(), State);
+get_bif_constr({erlang, is_record, 2}, Dst, [Var, Tag] = Args, _State) ->
+ ArgFun = fun(Map) ->
+ case t_is_atom(true, lookup_type(Dst, Map)) of
+ true -> t_tuple();
+ false -> t_any()
+ end
+ end,
+ ArgV = mk_fun_var(ArgFun, [Dst]),
+ DstFun = fun(Map) ->
+ TmpArgTypes = lookup_type_list(Args, Map),
+ erl_bif_types:type(erlang, is_record, 2, TmpArgTypes)
+ end,
+ DstV = mk_fun_var(DstFun, Args),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Tag, sub, t_atom()),
+ mk_constraint(Var, sub, ArgV)]);
+get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
+ %% TODO: Revise this to make it precise for Tag and Arity.
+ ArgFun =
+ fun(Map) ->
+ case t_is_atom(true, lookup_type(Dst, Map)) of
+ true ->
+ ArityType = lookup_type(Arity, Map),
+ case t_is_integer(ArityType) of
+ true ->
+ case t_number_vals(ArityType) of
+ [ArityVal] ->
+ TagType = lookup_type(Tag, Map),
+ case t_is_atom(TagType) of
+ true ->
+ AnyElems = lists:duplicate(ArityVal-1, t_any()),
+ GenRecord = t_tuple([TagType|AnyElems]),
+ case t_atom_vals(TagType) of
+ [TagVal] ->
+ case state__lookup_record(State, TagVal,
+ ArityVal - 1) of
+ {ok, Type} ->
+ AllOpaques = State#state.opaques,
+ case t_opaque_match_record(Type, AllOpaques) of
+ [Opaque] -> Opaque;
+ _ -> Type
+ end;
+ error -> GenRecord
+ end;
+ _ -> GenRecord
+ end;
+ false -> t_tuple(ArityVal)
+ end;
+ _ -> t_tuple()
+ end;
+ false -> t_tuple()
+ end;
+ false -> t_any()
+ end
+ end,
+ ArgV = mk_fun_var(ArgFun, [Tag, Arity, Dst]),
+ DstFun = fun(Map) ->
+ [TmpVar, TmpTag, TmpArity] = TmpArgTypes = lookup_type_list(Args, Map),
+ TmpArgTypes2 =
+ case lists:member(TmpVar, State#state.opaques) of
+ true ->
+ case t_is_integer(TmpArity) of
+ true ->
+ case t_number_vals(TmpArity) of
+ [TmpArityVal] ->
+ case t_is_atom(TmpTag) of
+ true ->
+ case t_atom_vals(TmpTag) of
+ [TmpTagVal] ->
+ case state__lookup_record(State, TmpTagVal, TmpArityVal - 1) of
+ {ok, TmpType} ->
+ case t_is_none(t_inf(TmpType, TmpVar, opaque)) of
+ true -> TmpArgTypes;
+ false -> [TmpType, TmpTag, TmpArity]
+ end;
+ error -> TmpArgTypes
+ end;
+ _ -> TmpArgTypes
+ end;
+ false -> TmpArgTypes
+ end;
+ _ -> TmpArgTypes
+ end;
+ false -> TmpArgTypes
+ end;
+ false -> TmpArgTypes
+ end,
+ erl_bif_types:type(erlang, is_record, 3, TmpArgTypes2)
+ end,
+ DstV = mk_fun_var(DstFun, Args),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Arity, sub, t_integer()),
+ mk_constraint(Tag, sub, t_atom()),
+ mk_constraint(Var, sub, ArgV)]);
+get_bif_constr({erlang, is_tuple, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_tuple(), State);
+get_bif_constr({erlang, 'and', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
+ True = t_from_term(true),
+ False = t_from_term(false),
+ ArgFun = fun(Var) ->
+ fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ case t_is_atom(true, DstType) of
+ true -> True;
+ false ->
+ case t_is_atom(false, DstType) of
+ true ->
+ case t_is_atom(true, lookup_type(Var, Map)) of
+ true -> False;
+ false -> t_boolean()
+ end;
+ false ->
+ t_boolean()
+ end
+ end
+ end
+ end,
+ DstFun = fun(Map) ->
+ Arg1Type = lookup_type(Arg1, Map),
+ case t_is_atom(false, Arg1Type) of
+ true -> False;
+ false ->
+ Arg2Type = lookup_type(Arg2, Map),
+ case t_is_atom(false, Arg2Type) of
+ true -> False;
+ false ->
+ case (t_is_atom(true, Arg1Type)
+ andalso t_is_atom(true, Arg2Type)) of
+ true -> True;
+ false -> t_boolean()
+ end
+ end
+ end
+ end,
+ ArgV1 = mk_fun_var(ArgFun(Arg2), [Arg2, Dst]),
+ ArgV2 = mk_fun_var(ArgFun(Arg1), [Arg1, Dst]),
+ DstV = mk_fun_var(DstFun, Args),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Arg1, sub, ArgV1),
+ mk_constraint(Arg2, sub, ArgV2)]);
+get_bif_constr({erlang, 'or', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
+ True = t_from_term(true),
+ False = t_from_term(false),
+ ArgFun = fun(Var) ->
+ fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ case t_is_atom(false, DstType) of
+ true -> False;
+ false ->
+ case t_is_atom(true, DstType) of
+ true ->
+ case t_is_atom(false, lookup_type(Var, Map)) of
+ true -> True;
+ false -> t_boolean()
+ end;
+ false ->
+ t_boolean()
+ end
+ end
+ end
+ end,
+ DstFun = fun(Map) ->
+ Arg1Type = lookup_type(Arg1, Map),
+ case t_is_atom(true, Arg1Type) of
+ true -> True;
+ false ->
+ Arg2Type = lookup_type(Arg2, Map),
+ case t_is_atom(true, Arg2Type) of
+ true -> True;
+ false ->
+ case (t_is_atom(false, Arg1Type)
+ andalso t_is_atom(false, Arg2Type)) of
+ true -> False;
+ false -> t_boolean()
+ end
+ end
+ end
+ end,
+ ArgV1 = mk_fun_var(ArgFun(Arg2), [Arg2, Dst]),
+ ArgV2 = mk_fun_var(ArgFun(Arg1), [Arg1, Dst]),
+ DstV = mk_fun_var(DstFun, Args),
+ Disj = mk_disj_constraint_list([mk_constraint(Arg1, sub, True),
+ mk_constraint(Arg2, sub, True),
+ mk_constraint(Dst, sub, False)]),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Arg1, sub, ArgV1),
+ mk_constraint(Arg2, sub, ArgV2),
+ Disj]);
+get_bif_constr({erlang, 'not', 1}, Dst, [Arg] = Args, _State) ->
+ True = t_from_term(true),
+ False = t_from_term(false),
+ Fun = fun(Var) ->
+ fun(Map) ->
+ Type = lookup_type(Var, Map),
+ case t_is_atom(true, Type) of
+ true -> False;
+ false ->
+ case t_is_atom(false, Type) of
+ true -> True;
+ false -> t_boolean()
+ end
+ end
+ end
+ end,
+ ArgV = mk_fun_var(Fun(Dst), [Dst]),
+ DstV = mk_fun_var(Fun(Arg), Args),
+ mk_conj_constraint_list([mk_constraint(Arg, sub, ArgV),
+ mk_constraint(Dst, sub, DstV)]);
+get_bif_constr({erlang, '=:=', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
+ ArgFun =
+ fun(Self, OtherVar) ->
+ fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ OtherVarType = lookup_type(OtherVar, Map),
+ case t_is_atom(true, DstType) of
+ true -> OtherVarType;
+ false ->
+ case t_is_atom(false, DstType) of
+ true ->
+ case is_singleton_type(OtherVarType) of
+ true -> t_subtract(lookup_type(Self, Map), OtherVarType);
+ false -> t_any()
+ end;
+ false ->
+ t_any()
+ end
+ end
+ end
+ end,
+ DstFun = fun(Map) ->
+ ArgType1 = lookup_type(Arg1, Map),
+ ArgType2 = lookup_type(Arg2, Map),
+ case t_is_none(t_inf(ArgType1, ArgType2)) of
+ true -> t_from_term(false);
+ false -> t_boolean()
+ end
+ end,
+ DstArgs = [Dst, Arg1, Arg2],
+ ArgV1 = mk_fun_var(ArgFun(Arg1, Arg2), DstArgs),
+ ArgV2 = mk_fun_var(ArgFun(Arg2, Arg1), DstArgs),
+ DstV = mk_fun_var(DstFun, Args),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Arg1, sub, ArgV1),
+ mk_constraint(Arg2, sub, ArgV2)]);
+get_bif_constr({erlang, '==', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
+ DstFun = fun(Map) ->
+ TmpArgTypes = lookup_type_list(Args, Map),
+ erl_bif_types:type(erlang, '==', 2, TmpArgTypes)
+ end,
+ ArgFun =
+ fun(Var, Self) ->
+ fun(Map) ->
+ VarType = lookup_type(Var, Map),
+ DstType = lookup_type(Dst, Map),
+ case is_singleton_non_number_type(VarType) of
+ true ->
+ case t_is_atom(true, DstType) of
+ true -> VarType;
+ false ->
+ case t_is_atom(false, DstType) of
+ true -> t_subtract(lookup_type(Self, Map), VarType);
+ false -> t_any()
+ end
+ end;
+ false ->
+ case t_is_atom(true, DstType) of
+ true ->
+ case t_is_number(VarType) of
+ true -> t_number();
+ false ->
+ case t_is_atom(VarType) of
+ true -> VarType;
+ false -> t_any()
+ end
+ end;
+ false ->
+ t_any()
+ end
+ end
+ end
+ end,
+ DstV = mk_fun_var(DstFun, Args),
+ ArgL = [Arg1, Arg2, Dst],
+ ArgV1 = mk_fun_var(ArgFun(Arg2, Arg1), ArgL),
+ ArgV2 = mk_fun_var(ArgFun(Arg1, Arg2), ArgL),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Arg1, sub, ArgV1),
+ mk_constraint(Arg2, sub, ArgV2)]);
+get_bif_constr({erlang, element, 2} = _BIF, Dst, Args, State) ->
+ GenType = erl_bif_types:type(erlang, element, 2),
+ case t_is_none(GenType) of
+ true -> ?debug("Bif: ~w failed\n", [_BIF]), throw(error);
+ false ->
+ Fun = fun(Map) ->
+ [I, T] = ATs = lookup_type_list(Args, Map),
+ ATs2 = case lists:member(T, State#state.opaques) of
+ true -> [I, erl_types:t_opaque_structure(T)];
+ false -> ATs
+ end,
+ erl_bif_types:type(erlang, element, 2, ATs2)
+ end,
+ ReturnType = mk_fun_var(Fun, Args),
+ ArgTypes = erl_bif_types:arg_types(erlang, element, 2),
+ Cs = mk_constraints(Args, sub, ArgTypes),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType)|Cs])
+ end;
+get_bif_constr({M, F, A} = _BIF, Dst, Args, _State) ->
+ GenType = erl_bif_types:type(M, F, A),
+ case t_is_none(GenType) of
+ true -> ?debug("Bif: ~w failed\n", [_BIF]), throw(error);
+ false ->
+ ReturnType = mk_fun_var(fun(Map) ->
+ TmpArgTypes = lookup_type_list(Args, Map),
+ erl_bif_types:type(M, F, A, TmpArgTypes)
+ end, Args),
+ case erl_bif_types:is_known(M, F, A) of
+ false ->
+ case t_is_any(GenType) of
+ true ->
+ none;
+ false ->
+ mk_constraint(Dst, sub, ReturnType)
+ end;
+ true ->
+ ArgTypes = erl_bif_types:arg_types(M, F, A),
+ Cs = mk_constraints(Args, sub, ArgTypes),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType)|Cs])
+ end
+ end.
+
+eval_inv_arith('+', _Pos, Dst, Arg) ->
+ erl_bif_types:type(erlang, '-', 2, [Dst, Arg]);
+eval_inv_arith('*', _Pos, Dst, Arg) ->
+ case t_number_vals(Arg) of
+ [0] -> t_integer();
+ _ ->
+ TmpRet = erl_bif_types:type(erlang, 'div', 2, [Dst, Arg]),
+ Zero = t_from_term(0),
+ %% If 0 is not part of the result, it cannot be part of the argument.
+ case t_is_subtype(Zero, Dst) of
+ false -> t_subtract(TmpRet, Zero);
+ true -> TmpRet
+ end
+ end;
+eval_inv_arith('-', 1, Dst, Arg) ->
+ erl_bif_types:type(erlang, '-', 2, [Arg, Dst]);
+eval_inv_arith('-', 2, Dst, Arg) ->
+ erl_bif_types:type(erlang, '+', 2, [Arg, Dst]).
+
+range_inc(neg_inf) -> neg_inf;
+range_inc(pos_inf) -> pos_inf;
+range_inc(Int) when is_integer(Int) -> Int + 1.
+
+range_dec(neg_inf) -> neg_inf;
+range_dec(pos_inf) -> pos_inf;
+range_dec(Int) when is_integer(Int) -> Int - 1.
+
+get_bif_test_constr(Dst, Arg, Type, State) ->
+ ArgFun = fun(Map) ->
+ DstType = lookup_type(Dst, Map),
+ case t_is_atom(true, DstType) of
+ true -> Type;
+ false -> t_any()
+ end
+ end,
+ ArgV = mk_fun_var(ArgFun, [Dst]),
+ DstFun = fun(Map) ->
+ ArgType = lookup_type(Arg, Map),
+ case t_is_none(t_inf(ArgType, Type)) of
+ true ->
+ case lists:member(ArgType, State#state.opaques) of
+ true ->
+ OpaqueStruct = erl_types:t_opaque_structure(ArgType),
+ case t_is_none(t_inf(OpaqueStruct, Type)) of
+ true -> t_from_term(false);
+ false ->
+ case t_is_subtype(ArgType, Type) of
+ true -> t_from_term(true);
+ false -> t_boolean()
+ end
+ end;
+ false -> t_from_term(false)
+ end;
+ false ->
+ case t_is_subtype(ArgType, Type) of
+ true -> t_from_term(true);
+ false -> t_boolean()
+ end
+ end
+ end,
+ DstV = mk_fun_var(DstFun, [Arg]),
+ mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
+ mk_constraint(Arg, sub, ArgV)]).
+
+%%=============================================================================
+%%
+%% Constraint solver.
+%%
+%%=============================================================================
+
+solve([Fun], State) ->
+ ?debug("============ Analyzing Fun: ~w ===========\n",
+ [debug_lookup_name(Fun)]),
+ solve_fun(Fun, dict:new(), State);
+solve([_|_] = SCC, State) ->
+ ?debug("============ Analyzing SCC: ~w ===========\n",
+ [[debug_lookup_name(F) || F <- SCC]]),
+ solve_scc(SCC, dict:new(), State, false).
+
+solve_fun(Fun, FunMap, State) ->
+ Cs = state__get_cs(Fun, State),
+ Deps = get_deps(Cs),
+ Ref = mk_constraint_ref(Fun, Deps),
+ %% Note that functions are always considered to succeed.
+ {ok, _MapDict, NewMap} = solve_ref_or_list(Ref, FunMap, dict:new(), State),
+ NewType = lookup_type(Fun, NewMap),
+ NewFunMap1 = case state__get_rec_var(Fun, State) of
+ error -> FunMap;
+ {ok, Var} -> enter_type(Var, NewType, FunMap)
+ end,
+ enter_type(Fun, NewType, NewFunMap1).
+
+solve_scc(SCC, Map, State, TryingUnit) ->
+ State1 = state__mark_as_non_self_rec(SCC, State),
+ Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
+ Vars = [Var || {_, {ok, Var}} <- Vars0],
+ Funs = [Fun || {Fun, {ok, _}} <- Vars0],
+ Types = unsafe_lookup_type_list(Funs, Map),
+ RecTypes = [t_limit(Type, ?TYPE_LIMIT) || Type <- Types],
+ CleanMap = lists:foldl(fun(Fun, AccFunMap) ->
+ dict:erase(t_var_name(Fun), AccFunMap)
+ end, Map, SCC),
+ Map1 = enter_type_lists(Vars, RecTypes, CleanMap),
+ ?debug("Checking SCC: ~w\n", [[debug_lookup_name(F) || F <- SCC]]),
+ SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State1) end,
+ Map2 = lists:foldl(SolveFun, Map1, SCC),
+ FunSet = ordsets:from_list([t_var_name(F) || F <- SCC]),
+ case maps_are_equal(Map2, Map, FunSet) of
+ true ->
+ ?debug("SCC ~w reached fixpoint\n", [SCC]),
+ NewTypes = unsafe_lookup_type_list(Funs, Map2),
+ case lists:all(fun(T) -> t_is_none(t_fun_range(T)) end, NewTypes)
+ andalso TryingUnit =:= false of
+ true ->
+ UnitTypes = [t_fun(state__fun_arity(F, State), t_unit())
+ || F <- Funs],
+ Map3 = enter_type_lists(Funs, UnitTypes, Map2),
+ solve_scc(SCC, Map3, State, true);
+ false ->
+ Map2
+ end;
+ false ->
+ ?debug("SCC ~w did not reach fixpoint\n", [SCC]),
+ solve_scc(SCC, Map2, State, TryingUnit)
+ end.
+
+scc_fold_fun(F, FunMap, State) ->
+ Deps = get_deps(state__get_cs(F, State)),
+ Cs = mk_constraint_ref(F, Deps),
+ %% Note that functions are always considered to succeed.
+ {ok, _NewMapDict, Map} = solve_ref_or_list(Cs, FunMap, dict:new(), State),
+ NewType0 = unsafe_lookup_type(F, Map),
+ NewType = t_limit(NewType0, ?TYPE_LIMIT),
+ NewFunMap = case state__get_rec_var(F, State) of
+ {ok, R} ->
+ enter_type(R, NewType, enter_type(F, NewType, FunMap));
+ error ->
+ enter_type(F, NewType, FunMap)
+ end,
+ ?debug("Done solving for function ~w :: ~s\n", [debug_lookup_name(F),
+ format_type(NewType)]),
+ NewFunMap.
+
+solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
+ Map, MapDict, State) ->
+ {OldLocalMap, Check} =
+ case dict:find(Id, MapDict) of
+ error -> {dict:new(), false};
+ {ok, M} -> {M, true}
+ end,
+ ?debug("Checking ref to fun: ~w\n", [debug_lookup_name(Id)]),
+ CheckDeps = ordsets:del_element(t_var_name(Id), Deps),
+ case Check andalso maps_are_equal(OldLocalMap, Map, CheckDeps) of
+ true ->
+ ?debug("Equal\n", []),
+ {ok, MapDict, Map};
+ false ->
+ ?debug("Not equal. Solving\n", []),
+ Cs = state__get_cs(Id, State),
+ Res =
+ case state__is_self_rec(Id, State) of
+ true -> solve_self_recursive(Cs, Map, MapDict, Id, t_none(), State);
+ false -> solve_ref_or_list(Cs, Map, MapDict, State)
+ end,
+ case Res of
+ {error, NewMapDict} ->
+ ?debug("Error solving for function ~p\n", [debug_lookup_name(Id)]),
+ Arity = state__fun_arity(Id, State),
+ FunType =
+ case state__prop_domain(t_var_name(Id), State) of
+ error -> t_fun(Arity, t_none());
+ {ok, Dom} -> t_fun(Dom, t_none())
+ end,
+ NewMap1 = enter_type(Id, FunType, Map),
+ NewMap2 =
+ case state__get_rec_var(Id, State) of
+ {ok, Var} -> enter_type(Var, FunType, NewMap1);
+ error -> NewMap1
+ end,
+ {ok, dict:store(Id, NewMap2, NewMapDict), NewMap2};
+ {ok, NewMapDict, NewMap} ->
+ ?debug("Done solving fun: ~p\n", [debug_lookup_name(Id)]),
+ FunType = lookup_type(Id, NewMap),
+ NewMap1 = enter_type(Id, FunType, Map),
+ NewMap2 =
+ case state__get_rec_var(Id, State) of
+ {ok, Var} -> enter_type(Var, FunType, NewMap1);
+ error -> NewMap1
+ end,
+ {ok, dict:store(Id, NewMap2, NewMapDict), NewMap2}
+ end
+ end;
+solve_ref_or_list(#constraint_list{type=Type, list = Cs, deps = Deps, id = Id},
+ Map, MapDict, State) ->
+ {OldLocalMap, Check} =
+ case dict:find(Id, MapDict) of
+ error -> {dict:new(), false};
+ {ok, M} -> {M, true}
+ end,
+ ?debug("Checking ref to list: ~w\n", [Id]),
+ case Check andalso maps_are_equal(OldLocalMap, Map, Deps) of
+ true ->
+ ?debug("~w equal ~w\n", [Type, Id]),
+ {ok, MapDict, Map};
+ false ->
+ ?debug("~w not equal: ~w. Solving\n", [Type, Id]),
+ solve_clist(Cs, Type, Id, Deps, MapDict, Map, State)
+ end.
+
+solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
+ ?debug("Solving self recursive ~w\n", [debug_lookup_name(Id)]),
+ {ok, RecVar} = state__get_rec_var(Id, State),
+ ?debug("OldRecType ~s\n", [format_type(RecType0)]),
+ RecType = t_limit(RecType0, ?TYPE_LIMIT),
+ Map1 = enter_type(RecVar, RecType, dict:erase(t_var_name(Id), Map)),
+ ?debug("\tMap in: ~p\n",[[{X, format_type(Y)}||{X, Y}<-dict:to_list(Map1)]]),
+ case solve_ref_or_list(Cs, Map1, MapDict, State) of
+ {error, _} = Error ->
+ case t_is_none(RecType0) of
+ true ->
+ %% Try again and assume that this is a non-terminating function.
+ Arity = state__fun_arity(Id, State),
+ NewRecType = t_fun(lists:duplicate(Arity, t_any()), t_unit()),
+ solve_self_recursive(Cs, Map, MapDict, Id, NewRecType, State);
+ false ->
+ Error
+ end;
+ {ok, NewMapDict, NewMap} ->
+ ?debug("\tMap: ~p\n",
+ [[{X, format_type(Y)} || {X, Y} <- dict:to_list(NewMap)]]),
+ NewRecType = unsafe_lookup_type(Id, NewMap),
+ case t_is_equal(NewRecType, RecType0) of
+ true ->
+ {ok, NewMapDict, enter_type(RecVar, NewRecType, NewMap)};
+ false ->
+ solve_self_recursive(Cs, Map, MapDict, Id, NewRecType, State)
+ end
+ end.
+
+solve_clist(Cs, conj, Id, Deps, MapDict, Map, State) ->
+ case solve_cs(Cs, Map, MapDict, State) of
+ {error, _} = Error -> Error;
+ {ok, NewMapDict, NewMap} = Ret ->
+ case Cs of
+ [_] ->
+ %% Just a special case for one conjunctive constraint.
+ Ret;
+ _ ->
+ case maps_are_equal(Map, NewMap, Deps) of
+ true -> {ok, dict:store(Id, NewMap, NewMapDict), NewMap};
+ false -> solve_clist(Cs, conj, Id, Deps, NewMapDict, NewMap, State)
+ end
+ end
+ end;
+solve_clist(Cs, disj, Id, _Deps, MapDict, Map, State) ->
+ Fun = fun(C, Dict) ->
+ case solve_ref_or_list(C, Map, Dict, State) of
+ {ok, NewDict, NewMap} -> {{ok, NewMap}, NewDict};
+ {error, _NewDict} = Error -> Error
+ end
+ end,
+ {Maps, NewMapDict} = lists:mapfoldl(Fun, MapDict, Cs),
+ case [X || {ok, X} <- Maps] of
+ [] -> {error, NewMapDict};
+ MapList ->
+ NewMap = join_maps(MapList),
+ {ok, dict:store(Id, NewMap, NewMapDict), NewMap}
+ end.
+
+solve_cs([#constraint_ref{} = C|Tail], Map, MapDict, State) ->
+ case solve_ref_or_list(C, Map, MapDict, State) of
+ {ok, NewMapDict, Map1} -> solve_cs(Tail, Map1, NewMapDict, State);
+ {error, _NewMapDict} = Error -> Error
+ end;
+solve_cs([#constraint_list{} = C|Tail], Map, MapDict, State) ->
+ case solve_ref_or_list(C, Map, MapDict, State) of
+ {ok, NewMapDict, Map1} -> solve_cs(Tail, Map1, NewMapDict, State);
+ {error, _NewMapDict} = Error -> Error
+ end;
+solve_cs([#constraint{} = C|Tail], Map, MapDict, State) ->
+ case solve_one_c(C, Map) of
+ error ->
+ ?debug("+++++++++++\nFailed: ~s :: ~s ~w ~s :: ~s\n+++++++++++\n",
+ [format_type(C#constraint.lhs),
+ format_type(lookup_type(C#constraint.lhs, Map)),
+ C#constraint.op,
+ format_type(C#constraint.rhs),
+ format_type(lookup_type(C#constraint.rhs, Map))]),
+ {error, MapDict};
+ {ok, NewMap} ->
+ solve_cs(Tail, NewMap, MapDict, State)
+ end;
+solve_cs([], Map, MapDict, _State) ->
+ {ok, MapDict, Map}.
+
+solve_one_c(#constraint{lhs = Lhs, rhs = Rhs, op = Op}, Map) ->
+ LhsType = lookup_type(Lhs, Map),
+ RhsType = lookup_type(Rhs, Map),
+ Inf = t_inf(LhsType, RhsType, opaque),
+ ?debug("Solving: ~s :: ~s ~w ~s :: ~s\n\tInf: ~s\n",
+ [format_type(Lhs), format_type(LhsType), Op,
+ format_type(Rhs), format_type(RhsType), format_type(Inf)]),
+ case t_is_none(Inf) of
+ true -> error;
+ false ->
+ case Op of
+ sub -> solve_subtype(Lhs, Inf, Map);
+ eq ->
+ case solve_subtype(Lhs, Inf, Map) of
+ error -> error;
+ {ok, Map1} -> solve_subtype(Rhs, Inf, Map1)
+ end
+ end
+ end.
+
+solve_subtype(Type, Inf, Map) ->
+ %% case cerl:is_literal(Type) of
+ %% true ->
+ %% case t_is_subtype(t_from_term(cerl:concrete(Type)), Inf) of
+ %% true -> {ok, Map};
+ %% false -> error
+ %% end;
+ %% false ->
+ try t_unify(Type, Inf) of
+ {_, List} -> {ok, enter_type_list(List, Map)}
+ catch
+ throw:{mismatch, _T1, _T2} ->
+ ?debug("Mismatch between ~s and ~s\n",
+ [format_type(_T1), format_type(_T2)]),
+ error
+ end.
+ %% end.
+
+%% ============================================================================
+%%
+%% Maps and types.
+%%
+%% ============================================================================
+
+join_maps(Maps) ->
+ Keys = lists:foldl(fun(TmpMap, AccKeys) ->
+ [Key || Key <- AccKeys, dict:is_key(Key, TmpMap)]
+ end,
+ dict:fetch_keys(hd(Maps)), tl(Maps)),
+ join_maps(Keys, Maps, dict:new()).
+
+join_maps([Key|Left], Maps = [Map|MapsLeft], AccMap) ->
+ NewType = join_one_key(Key, MapsLeft, lookup_type(Key, Map)),
+ NewAccMap = enter_type(Key, NewType, AccMap),
+ join_maps(Left, Maps, NewAccMap);
+join_maps([], _Maps, AccMap) ->
+ AccMap.
+
+join_one_key(Key, [Map|Maps], Type) ->
+ case t_is_any(Type) of
+ true -> Type;
+ false ->
+ NewType = lookup_type(Key, Map),
+ case t_is_equal(NewType, Type) of
+ true -> join_one_key(Key, Maps, Type);
+ false -> join_one_key(Key, Maps, t_sup(NewType, Type))
+ end
+ end;
+join_one_key(_Key, [], Type) ->
+ Type.
+
+maps_are_equal(Map1, Map2, Deps) ->
+ NewDeps = prune_keys(Map1, Map2, Deps),
+ maps_are_equal_1(Map1, Map2, NewDeps).
+
+maps_are_equal_1(Map1, Map2, [H|Tail]) ->
+ T1 = lookup_type(H, Map1),
+ T2 = lookup_type(H, Map2),
+ case t_is_equal(T1, T2) of
+ true -> maps_are_equal_1(Map1, Map2, Tail);
+ false ->
+ ?debug("~w: ~s =/= ~s\n", [H, format_type(T1), format_type(T2)]),
+ false
+ end;
+maps_are_equal_1(_Map1, _Map2, []) ->
+ true.
+
+-define(PRUNE_LIMIT, 100).
+
+prune_keys(Map1, Map2, Deps) ->
+ %% This is only worthwhile if the number of deps is reasonably large,
+ %% and also bigger than the number of elements in the maps.
+ NofDeps = length(Deps),
+ case NofDeps > ?PRUNE_LIMIT of
+ true ->
+ Keys1 = dict:fetch_keys(Map1),
+ case length(Keys1) > NofDeps of
+ true ->
+ Set1 = lists:sort(Keys1),
+ Set2 = lists:sort(dict:fetch_keys(Map2)),
+ ordsets:intersection(ordsets:union(Set1, Set2), Deps);
+ false ->
+ Deps
+ end;
+ false ->
+ Deps
+ end.
+
+enter_type(Key, Val, Map) when is_integer(Key) ->
+ ?debug("Entering ~s :: ~s\n", [format_type(t_var(Key)), format_type(Val)]),
+ case t_is_any(Val) of
+ true ->
+ dict:erase(Key, Map);
+ false ->
+ LimitedVal = t_limit(Val, ?INTERNAL_TYPE_LIMIT),
+ case dict:find(Key, Map) of
+ {ok, LimitedVal} -> Map;
+ {ok, _} -> dict:store(Key, LimitedVal, Map);
+ error -> dict:store(Key, LimitedVal, Map)
+ end
+ end;
+enter_type(Key, Val, Map) ->
+ ?debug("Entering ~s :: ~s\n", [format_type(Key), format_type(Val)]),
+ KeyName = t_var_name(Key),
+ case t_is_any(Val) of
+ true ->
+ dict:erase(KeyName, Map);
+ false ->
+ LimitedVal = t_limit(Val, ?INTERNAL_TYPE_LIMIT),
+ case dict:find(KeyName, Map) of
+ {ok, LimitedVal} -> Map;
+ {ok, _} -> dict:store(KeyName, LimitedVal, Map);
+ error -> dict:store(KeyName, LimitedVal, Map)
+ end
+ end.
+
+enter_type_lists([Key|KeyTail], [Val|ValTail], Map) ->
+ Map1 = enter_type(Key, Val, Map),
+ enter_type_lists(KeyTail, ValTail, Map1);
+enter_type_lists([], [], Map) ->
+ Map.
+
+enter_type_list([{Key, Val}|Tail], Map) ->
+ Map1 = enter_type(Key, Val, Map),
+ enter_type_list(Tail, Map1);
+enter_type_list([], Map) ->
+ Map.
+
+lookup_type_list(List, Map) ->
+ [lookup_type(X, Map) || X <- List].
+
+unsafe_lookup_type(Key, Map) ->
+ case dict:find(t_var_name(Key), Map) of
+ {ok, Type} -> Type;
+ error -> t_none()
+ end.
+
+unsafe_lookup_type_list(List, Map) ->
+ [unsafe_lookup_type(X, Map) || X <- List].
+
+lookup_type(Key, Map) when is_integer(Key) ->
+ case dict:find(Key, Map) of
+ error -> t_any();
+ {ok, Val} -> Val
+ end;
+lookup_type(#fun_var{'fun' = Fun}, Map) ->
+ Fun(Map);
+lookup_type(Key, Map) ->
+ %% Seems unused and dialyzer complains about it -- commented out.
+ %% case cerl:is_literal(Key) of
+ %% true -> t_from_term(cerl:concrete(Key));
+ %% false ->
+ Subst = t_subst(Key, Map),
+ t_sup(Subst, Subst).
+ %% end.
+
+mk_var(Var) ->
+ case cerl:is_literal(Var) of
+ true -> Var;
+ false ->
+ case cerl:is_c_values(Var) of
+ true -> t_product(mk_var_no_lit_list(cerl:values_es(Var)));
+ false -> t_var(cerl_trees:get_label(Var))
+ end
+ end.
+
+mk_var_list(List) ->
+ [mk_var(X) || X <- List].
+
+mk_var_no_lit(Var) ->
+ case cerl:is_literal(Var) of
+ true -> t_from_term(cerl:concrete(Var));
+ false -> mk_var(Var)
+ end.
+
+mk_var_no_lit_list(List) ->
+ [mk_var_no_lit(X) || X <- List].
+
+%% ============================================================================
+%%
+%% The State.
+%%
+%% ============================================================================
+
+new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes) ->
+ NameMap = dict:from_list([{MFA, Var} || {MFA, {Var, _Fun}, _Rec} <- SCC0]),
+ SCC = [mk_var(Fun) || {_MFA, {_Var, Fun}, _Rec} <- SCC0],
+ #state{callgraph = CallGraph, name_map = NameMap, next_label = NextLabel,
+ prop_types = PropTypes, plt = Plt, scc = SCC}.
+
+state__set_rec_dict(State, RecDict) ->
+ State#state{records = RecDict}.
+
+state__set_opaques(#state{records = RecDict} = State, {M, _F, _A}) ->
+ Opaques =
+ erl_types:module_builtin_opaques(M) ++ t_opaque_from_records(RecDict),
+ State#state{opaques = Opaques}.
+
+state__lookup_record(#state{records = Records}, Tag, Arity) ->
+ case erl_types:lookup_record(Tag, Arity, Records) of
+ {ok, Fields} ->
+ {ok, t_tuple([t_from_term(Tag)|
+ [FieldType || {_FieldName, FieldType} <- Fields]])};
+ error ->
+ error
+ end.
+
+state__set_in_match(State, Bool) ->
+ State#state{in_match = Bool}.
+
+state__is_in_match(#state{in_match = Bool}) ->
+ Bool.
+
+state__set_in_guard(State, Bool) ->
+ State#state{in_guard = Bool}.
+
+state__is_in_guard(#state{in_guard = Bool}) ->
+ Bool.
+
+state__get_fun_prototype(Op, Arity, State) ->
+ case t_is_fun(Op) of
+ true -> {State, Op};
+ false ->
+ {State1, [Ret|Args]} = state__mk_vars(Arity+1, State),
+ Fun = t_fun(Args, Ret),
+ {State1, Fun}
+ end.
+
+state__lookup_rec_var_in_scope(MFA, #state{name_map = NameMap}) ->
+ dict:find(MFA, NameMap).
+
+state__store_fun_arity(Tree, #state{fun_arities = Map} = State) ->
+ Arity = length(cerl:fun_vars(Tree)),
+ Id = mk_var(Tree),
+ State#state{fun_arities = dict:store(Id, Arity, Map)}.
+
+state__fun_arity(Id, #state{fun_arities = Map}) ->
+ dict:fetch(Id, Map).
+
+state__lookup_undef_var(Tree, #state{callgraph = CG, plt = Plt}) ->
+ Label = cerl_trees:get_label(Tree),
+ case dialyzer_callgraph:lookup_rec_var(Label, CG) of
+ error -> error;
+ {ok, MFA} ->
+ case dialyzer_plt:lookup(Plt, MFA) of
+ none -> error;
+ {value, {RetType, ArgTypes}} -> {ok, t_fun(ArgTypes, RetType)}
+ end
+ end.
+
+state__lookup_apply(Tree, #state{callgraph = Callgraph}) ->
+ Apply = cerl_trees:get_label(Tree),
+ case dialyzer_callgraph:lookup_call_site(Apply, Callgraph) of
+ error ->
+ unknown;
+ {ok, List} ->
+ case lists:member(external, List) of
+ true -> unknown;
+ false -> List
+ end
+ end.
+
+get_apply_constr(FunLabels, Dst, ArgTypes, #state{callgraph = CG} = State) ->
+ MFAs = [dialyzer_callgraph:lookup_name(Label, CG) || Label <- FunLabels],
+ case lists:member(error, MFAs) of
+ true -> error;
+ false ->
+ Constrs = [begin
+ State1 = state__new_constraint_context(State),
+ State2 = get_plt_constr(MFA, Dst, ArgTypes, State1),
+ state__cs(State2)
+ end || {ok, MFA} <- MFAs],
+ ApplyConstr = mk_disj_constraint_list(Constrs),
+ {ok, state__store_conj(ApplyConstr, State)}
+ end.
+
+state__scc(#state{scc = SCC}) ->
+ SCC.
+
+state__plt(#state{plt = PLT}) ->
+ PLT.
+
+state__new_constraint_context(State) ->
+ State#state{cs = []}.
+
+state__prop_domain(FunLabel, #state{prop_types = PropTypes}) ->
+ case dict:find(FunLabel, PropTypes) of
+ error -> error;
+ {ok, {_Range_Fun, Dom}} -> {ok, Dom};
+ {ok, FunType} -> {ok, t_fun_args(FunType)}
+ end.
+
+state__add_prop_constrs(Tree, #state{prop_types = PropTypes} = State) ->
+ Label = cerl_trees:get_label(Tree),
+ case dict:find(Label, PropTypes) of
+ error -> State;
+ {ok, FunType} ->
+ case t_fun_args(FunType) of
+ unknown -> State;
+ ArgTypes ->
+ case erl_types:any_none(ArgTypes) of
+ true -> not_called;
+ false ->
+ ?debug("Adding propagated constr: ~s for function ~w\n",
+ [format_type(FunType), debug_lookup_name(mk_var(Tree))]),
+ FunVar = mk_var(Tree),
+ state__store_conj(FunVar, sub, FunType, State)
+ end
+ end
+ end.
+
+state__cs(#state{cs = Cs}) ->
+ mk_conj_constraint_list(Cs).
+
+state__store_conj(C, #state{cs = Cs} = State) ->
+ State#state{cs = [C|Cs]}.
+
+state__store_conj_list([H|T], State) ->
+ State1 = state__store_conj(H, State),
+ state__store_conj_list(T, State1);
+state__store_conj_list([], State) ->
+ State.
+
+state__store_conj(Lhs, Op, Rhs, #state{cs = Cs} = State) ->
+ State#state{cs = [mk_constraint(Lhs, Op, Rhs)|Cs]}.
+
+state__store_conj_lists(List1, Op, List2, State) ->
+ {NewList1, NewList2} = strip_of_any_constrs(List1, List2),
+ state__store_conj_lists_1(NewList1, Op, NewList2, State).
+
+strip_of_any_constrs(List1, List2) ->
+ strip_of_any_constrs(List1, List2, [], []).
+
+strip_of_any_constrs([T1|Left1], [T2|Left2], Acc1, Acc2) ->
+ case t_is_any(T1) orelse constraint_opnd_is_any(T2) of
+ true -> strip_of_any_constrs(Left1, Left2, Acc1, Acc2);
+ false -> strip_of_any_constrs(Left1, Left2, [T1|Acc1], [T2|Acc2])
+ end;
+strip_of_any_constrs([], [], Acc1, Acc2) ->
+ {Acc1, Acc2}.
+
+state__store_conj_lists_1([Arg1|Arg1Tail], Op, [Arg2|Arg2Tail], State) ->
+ State1 = state__store_conj(Arg1, Op, Arg2, State),
+ state__store_conj_lists_1(Arg1Tail, Op, Arg2Tail, State1);
+state__store_conj_lists_1([], _Op, [], State) ->
+ State.
+
+state__mk_var(#state{next_label = NL} = State) ->
+ {State#state{next_label = NL+1}, t_var(NL)}.
+
+state__mk_vars(N, #state{next_label = NL} = State) ->
+ NewLabel = NL + N,
+ Vars = [t_var(X) || X <- lists:seq(NL, NewLabel-1)],
+ {State#state{next_label = NewLabel}, Vars}.
+
+state__store_constrs(Id, Cs, #state{cmap = Dict} = State) ->
+ NewDict = dict:store(Id, Cs, Dict),
+ State#state{cmap = NewDict}.
+
+state__get_cs(Var, #state{cmap = Dict}) ->
+ dict:fetch(Var, Dict).
+
+%% The functions here will not be treated as self recursive.
+%% These functions will need to be handled as such manually.
+state__mark_as_non_self_rec(SCC, #state{non_self_recs = NS} = State) ->
+ State#state{non_self_recs = ordsets:union(NS, ordsets:from_list(SCC))}.
+
+state__is_self_rec(Fun, #state{callgraph = CallGraph, non_self_recs = NS}) ->
+ case ordsets:is_element(Fun, NS) of
+ true -> false;
+ false -> dialyzer_callgraph:is_self_rec(t_var_name(Fun), CallGraph)
+ end.
+
+state__store_funs(Vars0, Funs0, #state{fun_map = Map} = State) ->
+ debug_make_name_map(Vars0, Funs0),
+ Vars = mk_var_list(Vars0),
+ Funs = mk_var_list(Funs0),
+ NewMap = lists:foldl(fun({Var, Fun}, MP) -> orddict:store(Var, Fun, MP) end,
+ Map, lists:zip(Vars, Funs)),
+ State#state{fun_map = NewMap}.
+
+state__get_rec_var(Fun, #state{fun_map = Map}) ->
+ case [V || {V, FV} <- Map, FV =:= Fun] of
+ [Var] -> {ok, Var};
+ [] -> error
+ end.
+
+state__finalize(State) ->
+ State1 = enumerate_constraints(State),
+ order_fun_constraints(State1).
+
+%% ============================================================================
+%%
+%% Constraints
+%%
+%% ============================================================================
+
+-spec mk_constraint(erl_types:erl_type(), constr_op(), fvar_or_type()) -> #constraint{}.
+
+mk_constraint(Lhs, Op, Rhs) ->
+ case t_is_any(Lhs) orelse constraint_opnd_is_any(Rhs) of
+ false ->
+ Deps = find_constraint_deps([Lhs, Rhs]),
+ C0 = mk_constraint_1(Lhs, Op, Rhs),
+ C = C0#constraint{deps = Deps},
+ case Deps =:= [] of
+ true ->
+ %% This constraint is constant. Solve it immediately.
+ case solve_one_c(C, dict:new()) of
+ error -> throw(error);
+ _ ->
+ %% This is always true, keep it anyway for logistic reasons
+ C
+ end;
+ false ->
+ C
+ end;
+ true ->
+ C = mk_constraint_1(t_any(), Op, t_any()),
+ C#constraint{deps = []}
+ end.
+
+%% the following function is used so that we do not call
+%% erl_types:t_is_any/1 with a term other than an erl_type()
+-spec constraint_opnd_is_any(fvar_or_type()) -> boolean().
+
+constraint_opnd_is_any(#fun_var{}) -> false;
+constraint_opnd_is_any(Type) -> t_is_any(Type).
+
+-spec mk_fun_var(fun((_) -> erl_types:erl_type()), [erl_types:erl_type()]) -> #fun_var{}.
+
+mk_fun_var(Fun, Types) ->
+ Deps = [t_var_name(Var) || Var <- t_collect_vars(t_product(Types))],
+ #fun_var{'fun' = Fun, deps = ordsets:from_list(Deps)}.
+
+-spec get_deps(constr()) -> [dep()].
+
+get_deps(#constraint{deps = D}) -> D;
+get_deps(#constraint_list{deps = D}) -> D;
+get_deps(#constraint_ref{deps = D}) -> D.
+
+-spec find_constraint_deps([fvar_or_type()]) -> [dep()].
+
+find_constraint_deps(List) ->
+ ordsets:from_list(find_constraint_deps(List, [])).
+
+find_constraint_deps([#fun_var{deps = Deps}|Tail], Acc) ->
+ find_constraint_deps(Tail, [Deps|Acc]);
+find_constraint_deps([Type|Tail], Acc) ->
+ NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc],
+ find_constraint_deps(Tail, NewAcc);
+find_constraint_deps([], Acc) ->
+ lists:flatten(Acc).
+
+mk_constraint_1(Lhs, eq, Rhs) when Lhs < Rhs ->
+ #constraint{lhs = Lhs, op = eq, rhs = Rhs};
+mk_constraint_1(Lhs, eq, Rhs) ->
+ #constraint{lhs = Rhs, op = eq, rhs = Lhs};
+mk_constraint_1(Lhs, Op, Rhs) ->
+ #constraint{lhs = Lhs, op = Op, rhs = Rhs}.
+
+mk_constraints([Lhs|LhsTail], Op, [Rhs|RhsTail]) ->
+ [mk_constraint(Lhs, Op, Rhs)|mk_constraints(LhsTail, Op, RhsTail)];
+mk_constraints([], _Op, []) ->
+ [].
+
+mk_constraint_ref(Id, Deps) ->
+ #constraint_ref{id = Id, deps = Deps}.
+
+mk_constraint_list(Type, List) ->
+ List1 = ordsets:from_list(lift_lists(Type, List)),
+ List2 = ordsets:filter(fun(X) -> get_deps(X) =/= [] end, List1),
+ Deps = calculate_deps(List2),
+ case Deps =:= [] of
+ true -> #constraint_list{type = conj,
+ list = [mk_constraint(t_any(), eq, t_any())],
+ deps = []};
+ false -> #constraint_list{type = Type, list = List2, deps = Deps}
+ end.
+
+lift_lists(Type, List) ->
+ lift_lists(Type, List, []).
+
+lift_lists(Type, [#constraint_list{type = Type, list = List}|Tail], Acc) ->
+ lift_lists(Type, Tail, List++Acc);
+lift_lists(Type, [C|Tail], Acc) ->
+ lift_lists(Type, Tail, [C|Acc]);
+lift_lists(_Type, [], Acc) ->
+ Acc.
+
+update_constraint_list(CL, List) ->
+ CL#constraint_list{list = List}.
+
+%% We expand guard constraints into dijunctive normal form to gain
+%% precision in simple guards. However, because of the exponential
+%% growth of this expansion in the presens of disjunctions we can even
+%% get into trouble while expanding.
+%%
+%% To limit this we only expand when the number of disjunctions are
+%% below a certain limit. This limit is currently set based on the
+%% behaviour of boolean 'or'.
+%%
+%% V1 = V2 or V3
+%%
+%% Gives us in simplified form the constraints
+%%
+%% <Some cs> * ((V1 = true) + (V2 = true) + (V1 = false))
+%%
+%% and thus a three-parted disjunction. If want to allow for two
+%% levels of disjunction we need to have 3^2 = 9 disjunctions. If we
+%% want three levels we need 3^3 = 27 disjunctions. More than that
+%% seems unnecessary and tends to blow up.
+%%
+%% Note that by not expanding we lose some precision, but we get a
+%% safe over approximation.
+
+-define(DISJ_NORM_FORM_LIMIT, 28).
+
+mk_disj_norm_form(#constraint_list{} = CL) ->
+ try
+ List1 = expand_to_conjunctions(CL),
+ mk_disj_constraint_list(List1)
+ catch
+ throw:too_many_disj -> CL
+ end.
+
+expand_to_conjunctions(#constraint_list{type = conj, list = List}) ->
+ List1 = [C || C <- List, is_simple_constraint(C)],
+ List2 = [expand_to_conjunctions(C) || #constraint_list{} = C <- List],
+ case List2 =:= [] of
+ true -> [mk_conj_constraint_list(List1)];
+ false ->
+ case List2 of
+ [JustOneList] ->
+ [mk_conj_constraint_list([L|List1]) || L <- JustOneList];
+ _ ->
+ combine_conj_lists(List2, List1)
+ end
+ end;
+expand_to_conjunctions(#constraint_list{type = disj, list = List}) ->
+ if length(List) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
+ true -> ok
+ end,
+ List1 = [C || C <- List, is_simple_constraint(C)],
+ %% Just an assert.
+ [] = [C || #constraint{} = C <- List1],
+ Expanded = lists:flatten([expand_to_conjunctions(C)
+ || #constraint_list{} = C <- List]),
+ ReturnList = Expanded ++ List1,
+ if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
+ true -> ReturnList
+ end.
+
+is_simple_constraint(#constraint{}) -> true;
+is_simple_constraint(#constraint_ref{}) -> true;
+is_simple_constraint(#constraint_list{}) -> false.
+
+combine_conj_lists([List1, List2|Left], Prefix) ->
+ NewList = [mk_conj_constraint_list([L1, L2]) || L1 <- List1, L2 <- List2],
+ if length(NewList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
+ true -> ok
+ end,
+ combine_conj_lists([NewList|Left], Prefix);
+combine_conj_lists([List], Prefix) ->
+ [mk_conj_constraint_list([mk_conj_constraint_list(Prefix), L]) || L <- List].
+
+calculate_deps(List) ->
+ calculate_deps(List, []).
+
+calculate_deps([H|Tail], Acc) ->
+ Deps = get_deps(H),
+ calculate_deps(Tail, [Deps|Acc]);
+calculate_deps([], Acc) ->
+ ordsets:from_list(lists:flatten(Acc)).
+
+mk_conj_constraint_list(List) ->
+ mk_constraint_list(conj, List).
+
+mk_disj_constraint_list([NotReallyAList]) ->
+ NotReallyAList;
+mk_disj_constraint_list(List) ->
+ %% Make sure each element in the list is either a conjunction or a
+ %% ref. Wrap single constraints into conjunctions.
+ List1 = [wrap_simple_constr(C) || C <- List],
+ mk_constraint_list(disj, List1).
+
+wrap_simple_constr(#constraint{} = C) -> mk_conj_constraint_list([C]);
+wrap_simple_constr(#constraint_list{} = C) -> C;
+wrap_simple_constr(#constraint_ref{} = C) -> C.
+
+enumerate_constraints(State) ->
+ Cs = [mk_constraint_ref(Id, get_deps(state__get_cs(Id, State)))
+ || Id <- state__scc(State)],
+ {_, _, NewState} = enumerate_constraints(Cs, 0, [], State),
+ NewState.
+
+enumerate_constraints([#constraint_ref{id = Id} = C|Tail], N, Acc, State) ->
+ Cs = state__get_cs(Id, State),
+ {[NewCs], NewN, NewState1} = enumerate_constraints([Cs], N, [], State),
+ NewState2 = state__store_constrs(Id, NewCs, NewState1),
+ enumerate_constraints(Tail, NewN+1, [C|Acc], NewState2);
+enumerate_constraints([#constraint_list{type = conj, list = List} = C|Tail],
+ N, Acc, State) ->
+ %% Separate the flat constraints from the deep ones to make a
+ %% separate fixpoint interation over the flat ones for speed.
+ {Flat, Deep} = lists:splitwith(fun(#constraint{}) -> true;
+ (#constraint_list{}) -> false;
+ (#constraint_ref{}) -> false
+ end, List),
+ {NewFlat, N1, State1} = enumerate_constraints(Flat, N, [], State),
+ {NewDeep, N2, State2} = enumerate_constraints(Deep, N1, [], State1),
+ {NewList, N3} =
+ case shorter_than_two(NewFlat) orelse (NewDeep =:= []) of
+ true -> {NewFlat ++ NewDeep, N2};
+ false ->
+ {NewCLists, TmpN} = group_constraints_in_components(NewFlat, N2),
+ {NewCLists ++ NewDeep, TmpN}
+ end,
+ NewAcc = [C#constraint_list{list = NewList, id = {list, N3}}|Acc],
+ enumerate_constraints(Tail, N3+1, NewAcc, State2);
+enumerate_constraints([#constraint_list{list = List, type = disj} = C|Tail],
+ N, Acc, State) ->
+ {NewList, NewN, NewState} = enumerate_constraints(List, N, [], State),
+ NewAcc = [C#constraint_list{list = NewList, id = {list, NewN}}|Acc],
+ enumerate_constraints(Tail, NewN+1, NewAcc, NewState);
+enumerate_constraints([#constraint{} = C|Tail], N, Acc, State) ->
+ enumerate_constraints(Tail, N, [C|Acc], State);
+enumerate_constraints([], N, Acc, State) ->
+ {lists:reverse(Acc), N, State}.
+
+shorter_than_two([]) -> true;
+shorter_than_two([_]) -> true;
+shorter_than_two([_|_]) -> false.
+
+group_constraints_in_components(Cs, N) ->
+ DepList = [Deps || #constraint{deps = Deps} <- Cs],
+ case find_dep_components(DepList, []) of
+ [_] -> {Cs, N};
+ [_|_] = Components ->
+ ConstrComp = [[C || #constraint{deps = D} = C <- Cs,
+ ordsets:is_subset(D, Comp)]
+ || Comp <- Components],
+ lists:mapfoldl(fun(CComp, TmpN) ->
+ TmpCList = mk_conj_constraint_list(CComp),
+ {TmpCList#constraint_list{id = {list, TmpN}},
+ TmpN + 1}
+ end, N, ConstrComp)
+ end.
+
+find_dep_components([Set|Left], AccComponents) ->
+ {Component, Ungrouped} = find_dep_components(Left, Set, []),
+ case Component =:= Set of
+ true -> find_dep_components(Ungrouped, [Component|AccComponents]);
+ false -> find_dep_components([Component|Ungrouped], AccComponents)
+ end;
+find_dep_components([], AccComponents) ->
+ AccComponents.
+
+find_dep_components([Set|Left], AccSet, Ungrouped) ->
+ case ordsets:intersection(Set, AccSet) of
+ [] -> find_dep_components(Left, AccSet, [Set|Ungrouped]);
+ [_|_] -> find_dep_components(Left, ordsets:union(Set, AccSet), Ungrouped)
+ end;
+find_dep_components([], AccSet, Ungrouped) ->
+ {AccSet, Ungrouped}.
+
+%% Put the fun ref constraints last in any conjunction since we need
+%% to separate the environment from the interior of the function.
+order_fun_constraints(State) ->
+ Cs = [mk_constraint_ref(Id, get_deps(state__get_cs(Id, State)))
+ || Id <- state__scc(State)],
+ order_fun_constraints(Cs, State).
+
+order_fun_constraints([#constraint_ref{id = Id}|Tail], State) ->
+ Cs = state__get_cs(Id, State),
+ {[NewCs], State1} = order_fun_constraints([Cs], [], [], State),
+ NewState = state__store_constrs(Id, NewCs, State1),
+ order_fun_constraints(Tail, NewState);
+order_fun_constraints([], State) ->
+ State.
+
+order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) ->
+ order_fun_constraints(Tail, [C|Funs], Acc, State);
+order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail],
+ Funs, Acc, State) ->
+ {NewList, NewState} =
+ case Type of
+ conj -> order_fun_constraints(List, [], [], State);
+ disj ->
+ FoldFun = fun(X, AccState) ->
+ {[NewX], NewAccState} =
+ order_fun_constraints([X], [], [], AccState),
+ {NewX, NewAccState}
+ end,
+ lists:mapfoldl(FoldFun, State, List)
+ end,
+ NewAcc = [update_constraint_list(C, NewList)|Acc],
+ order_fun_constraints(Tail, Funs, NewAcc, NewState);
+order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) ->
+ order_fun_constraints(Tail, Funs, [C|Acc], State);
+order_fun_constraints([], Funs, Acc, State) ->
+ NewState = order_fun_constraints(Funs, State),
+ {lists:reverse(Acc)++Funs, NewState}.
+
+%% ============================================================================
+%%
+%% Utilities.
+%%
+%% ============================================================================
+
+is_singleton_non_number_type(Type) ->
+ case t_is_number(Type) of
+ true -> false;
+ false -> is_singleton_type(Type)
+ end.
+
+is_singleton_type(Type) ->
+ case t_is_atom(Type) of
+ true ->
+ case t_atom_vals(Type) of
+ unknown -> false;
+ [_] -> true;
+ [_|_] -> false
+ end;
+ false ->
+ case t_is_integer(Type) of
+ true ->
+ case t_number_vals(Type) of
+ unknown -> false;
+ [_] -> true;
+ [_|_] -> false
+ end;
+ false ->
+ t_is_nil(Type)
+ end
+ end.
+
+%% ============================================================================
+%%
+%% Pretty printer and debug facilities.
+%%
+%% ============================================================================
+
+-ifdef(DEBUG_CONSTRAINTS).
+-ifndef(DEBUG).
+-define(DEBUG, true).
+-endif.
+-endif.
+
+-ifdef(DEBUG).
+format_type(#fun_var{deps = Deps}) ->
+ io_lib:format("Fun(~s)", [lists:flatten([format_type(t_var(X))||X<-Deps])]);
+format_type(Type) ->
+ case cerl:is_literal(Type) of
+ true -> io_lib:format("~w", [cerl:concrete(Type)]);
+ false -> erl_types:t_to_string(Type)
+ end.
+-endif.
+
+-ifdef(DEBUG_NAME_MAP).
+debug_make_name_map(Vars, Funs) ->
+ Map = get(dialyzer_typesig_map),
+ NewMap =
+ if Map =:= undefined -> debug_make_name_map(Vars, Funs, dict:new());
+ true -> debug_make_name_map(Vars, Funs, Map)
+ end,
+ put(dialyzer_typesig_map, NewMap).
+
+debug_make_name_map([Var|VarLeft], [Fun|FunLeft], Map) ->
+ Name = {cerl:fname_id(Var), cerl:fname_arity(Var)},
+ FunLabel = cerl_trees:get_label(Fun),
+ debug_make_name_map(VarLeft, FunLeft, dict:store(FunLabel, Name, Map));
+debug_make_name_map([], [], Map) ->
+ Map.
+
+debug_lookup_name(Var) ->
+ case dict:find(t_var_name(Var), get(dialyzer_typesig_map)) of
+ error -> Var;
+ {ok, Name} -> Name
+ end.
+
+-else.
+debug_make_name_map(_Vars, _Funs) ->
+ ok.
+-endif.
+
+-ifdef(DEBUG_CONSTRAINTS).
+pp_constrs_scc(SCC, State) ->
+ [pp_constrs(Fun, state__get_cs(Fun, State), State) || Fun <- SCC].
+
+pp_constrs(Fun, Cs, State) ->
+ io:format("Constraints for fun: ~w\n", [debug_lookup_name(Fun)]),
+ MaxDepth = pp_constraints(Cs, State),
+ io:format("Depth: ~w\n", [MaxDepth]).
+
+pp_constraints(Cs, State) ->
+ Res = pp_constraints([Cs], none, 0, 0, State),
+ io:nl(),
+ Res.
+
+pp_constraints([List|Tail], Separator, Level, MaxDepth,
+ State) when is_list(List) ->
+ pp_constraints(List++Tail, Separator, Level, MaxDepth, State);
+pp_constraints([#constraint_ref{id = Id}|Left], Separator,
+ Level, MaxDepth, State) ->
+ Cs = state__get_cs(Id, State),
+ io:format("%Ref ~w%", [t_var_name(Id)]),
+ pp_constraints([Cs|Left], Separator, Level, MaxDepth, State);
+pp_constraints([#constraint{lhs = Lhs, op = Op, rhs = Rhs}], _Separator,
+ Level, MaxDepth, _State) ->
+ io:format("~s ~w ~s", [format_type(Lhs), Op, format_type(Rhs)]),
+ erlang:max(Level, MaxDepth);
+pp_constraints([#constraint{lhs = Lhs, op = Op, rhs = Rhs}|Tail], Separator,
+ Level, MaxDepth, State) ->
+ io:format("~s ~w ~s ~s ", [format_type(Lhs), Op, format_type(Rhs),Separator]),
+ pp_constraints(Tail, Separator, Level, MaxDepth, State);
+pp_constraints([#constraint_list{type = Type, list = List, id = Id}],
+ _Separator, Level, MaxDepth, State) ->
+ io:format("%List ~w(", [Id]),
+ NewSeparator = case Type of
+ conj -> "*";
+ disj -> "+"
+ end,
+ NewMaxDepth = pp_constraints(List, NewSeparator, Level + 1, MaxDepth, State),
+ io:format(")", []),
+ NewMaxDepth;
+pp_constraints([#constraint_list{type = Type, list = List, id = Id}|Tail],
+ Separator, Level, MaxDepth, State) ->
+ io:format("List ~w(", [Id]),
+ NewSeparator = case Type of
+ conj -> "*";
+ disj -> "+"
+ end,
+ NewMaxDepth = pp_constraints(List, NewSeparator, Level+1, MaxDepth, State),
+ io:format(") ~s\n~s ", [Separator, Separator]),
+ pp_constraints(Tail, Separator, Level, NewMaxDepth, State).
+-else.
+pp_constrs_scc(_SCC, _State) ->
+ ok.
+-endif.
+
+-ifdef(TO_DOT).
+
+constraints_to_dot_scc(SCC, State) ->
+ io:format("SCC: ~p\n", [SCC]),
+ Name = lists:flatten([io_lib:format("'~w'", [debug_lookup_name(Fun)])
+ || Fun <- SCC]),
+ Cs = [state__get_cs(Fun, State) || Fun <- SCC],
+ constraints_to_dot(Cs, Name, State).
+
+constraints_to_dot(Cs0, Name, State) ->
+ NofCs = length(Cs0),
+ Cs = lists:zip(lists:seq(1, NofCs), Cs0),
+ {Graph, Opts, _N} = constraints_to_nodes(Cs, NofCs + 1, 1, [], [], State),
+ hipe_dot:translate_list(Graph, "/tmp/cs.dot", "foo", Opts),
+ Res = os:cmd("dot -o /tmp/"++ Name ++ ".ps -T ps /tmp/cs.dot"),
+ io:format("Res: ~p~n", [Res]),
+ ok.
+
+constraints_to_nodes([{Name, #constraint_list{type = Type, list = List, id=Id}}
+ |Left], N, Level, Graph, Opts, State) ->
+ N1 = N + length(List),
+ NewList = lists:zip(lists:seq(N, N1 - 1), List),
+ Names = [SubName || {SubName, _C} <- NewList],
+ Edges = [{Name, SubName} || SubName <- Names],
+ ThisNode = [{Name, Opt} || Opt <- [{label,
+ lists:flatten(io_lib:format("~w", [Id]))},
+ {shape, get_shape(Type)},
+ {level, Level}]],
+ {NewGraph, NewOpts, N2} = constraints_to_nodes(NewList, N1, Level+1,
+ [Edges|Graph],
+ [ThisNode|Opts], State),
+ constraints_to_nodes(Left, N2, Level, NewGraph, NewOpts, State);
+constraints_to_nodes([{Name, #constraint{lhs = Lhs, op = Op, rhs = Rhs}}|Left],
+ N, Level, Graph, Opts, State) ->
+ Label = lists:flatten(io_lib:format("~s ~w ~s",
+ [format_type(Lhs), Op,
+ format_type(Rhs)])),
+ ThisNode = [{Name, Opt} || Opt <- [{label, Label}, {level, Level}]],
+ NewOpts = [ThisNode|Opts],
+ constraints_to_nodes(Left, N, Level, Graph, NewOpts, State);
+constraints_to_nodes([{Name, #constraint_ref{id = Id0}}|Left],
+ N, Level, Graph, Opts, State) ->
+ Id = debug_lookup_name(Id0),
+ CList = state__get_cs(Id0, State),
+ ThisNode = [{Name, Opt} || Opt <- [{label,
+ lists:flatten(io_lib:format("~w", [Id]))},
+ {shape, ellipse},
+ {level, Level}]],
+ NewList = [{N, CList}],
+ {NewGraph, NewOpts, N1} = constraints_to_nodes(NewList, N + 1, Level + 1,
+ [{Name, N}|Graph],
+ [ThisNode|Opts], State),
+ constraints_to_nodes(Left, N1, Level, NewGraph, NewOpts, State);
+constraints_to_nodes([], N, _Level, Graph, Opts, _State) ->
+ {lists:flatten(Graph), lists:flatten(Opts), N}.
+
+get_shape(conj) -> box;
+get_shape(disj) -> diamond.
+
+-else.
+constraints_to_dot_scc(_SCC, _State) ->
+ ok.
+-endif.
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
new file mode 100644
index 0000000000..fa9ad2eae2
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -0,0 +1,458 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_utils.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 5 Dec 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_utils).
+
+-export([
+ format_sig/1,
+ format_sig/2,
+ get_abstract_code_from_beam/1,
+ get_abstract_code_from_src/1,
+ get_abstract_code_from_src/2,
+ get_core_from_abstract_code/1,
+ get_core_from_abstract_code/2,
+ get_core_from_src/1,
+ get_core_from_src/2,
+ get_record_and_type_info/1,
+ get_spec_info/3,
+ merge_records/2,
+ pp_hook/0,
+ process_record_remote_types/1,
+ src_compiler_opts/0
+ ]).
+
+-include("dialyzer.hrl").
+
+%%
+%% Types that need to be imported from somewhere else
+%%
+
+-type abstract_code() :: [tuple()]. %% XXX: refine
+-type comp_options() :: [atom()]. %% XXX: only a resticted set of options used
+
+%% ============================================================================
+%%
+%% Compilation utils
+%%
+%% ============================================================================
+
+-spec get_abstract_code_from_src(atom() | file:filename()) ->
+ {'ok', abstract_code()} | {'error', [string()]}.
+
+get_abstract_code_from_src(File) ->
+ get_abstract_code_from_src(File, src_compiler_opts()).
+
+-spec get_abstract_code_from_src(atom() | file:filename(), comp_options()) ->
+ {'ok', abstract_code()} | {'error', [string()]}.
+
+get_abstract_code_from_src(File, Opts) ->
+ case compile:file(File, [to_pp, binary|Opts]) of
+ error -> {error, []};
+ {error, Errors, _} -> {error, format_errors(Errors)};
+ {ok, _, AbstrCode} -> {ok, AbstrCode}
+ end.
+
+-type get_core_from_src_ret() :: {'ok', cerl:c_module()} | {'error', string()}.
+
+-spec get_core_from_src(file:filename()) -> get_core_from_src_ret().
+
+get_core_from_src(File) ->
+ get_core_from_src(File, []).
+
+-spec get_core_from_src(file:filename(), comp_options()) -> get_core_from_src_ret().
+
+get_core_from_src(File, Opts) ->
+ case get_abstract_code_from_src(File, Opts) of
+ {error, _} = Error -> Error;
+ {ok, AbstrCode} ->
+ case get_core_from_abstract_code(AbstrCode, Opts) of
+ error -> {error, " Could not get Core Erlang code from abstract code"};
+ {ok, _Core} = C -> C
+ end
+ end.
+
+-spec get_abstract_code_from_beam(file:filename()) -> 'error' | {'ok', abstract_code()}.
+
+get_abstract_code_from_beam(File) ->
+ case beam_lib:chunks(File, [abstract_code]) of
+ {ok, {_, List}} ->
+ case lists:keyfind(abstract_code, 1, List) of
+ {abstract_code, {raw_abstract_v1, Abstr}} -> {ok, Abstr};
+ _ -> error
+ end;
+ _ ->
+ %% No or unsuitable abstract code.
+ error
+ end.
+
+-type get_core_from_abs_ret() :: {'ok', cerl:c_module()} | 'error'.
+
+-spec get_core_from_abstract_code(abstract_code()) -> get_core_from_abs_ret().
+
+get_core_from_abstract_code(AbstrCode) ->
+ get_core_from_abstract_code(AbstrCode, []).
+
+-spec get_core_from_abstract_code(abstract_code(), comp_options()) -> get_core_from_abs_ret().
+
+get_core_from_abstract_code(AbstrCode, Opts) ->
+ %% We do not want the parse_transforms around since we already
+ %% performed them. In some cases we end up in trouble when
+ %% performing them again.
+ AbstrCode1 = cleanup_parse_transforms(AbstrCode),
+ try compile:forms(AbstrCode1, Opts ++ src_compiler_opts()) of
+ {ok, _, Core} -> {ok, Core};
+ _What -> error
+ catch
+ error:_ -> error
+ end.
+
+%% ============================================================================
+%%
+%% Typed Records
+%%
+%% ============================================================================
+
+-spec get_record_and_type_info(abstract_code()) ->
+ {'ok', dict()} | {'error', string()}.
+
+get_record_and_type_info(AbstractCode) ->
+ Module = get_module(AbstractCode),
+ get_record_and_type_info(AbstractCode, Module, dict:new()).
+
+-spec get_record_and_type_info(abstract_code(), atom(), dict()) ->
+ {'ok', dict()} | {'error', string()}.
+
+get_record_and_type_info([{attribute, _, record, {Name, Fields0}}|Left],
+ Module, RecDict) ->
+ case get_record_fields(Fields0, RecDict) of
+ {ok, Fields} ->
+ Arity = length(Fields),
+ Fun = fun(OldOrdDict) -> orddict:store(Arity, Fields, OldOrdDict) end,
+ NewRecDict = dict:update({record, Name}, Fun, [{Arity, Fields}], RecDict),
+ get_record_and_type_info(Left, Module, NewRecDict);
+ {error, Error} ->
+ {error, lists:flatten(io_lib:format(" Error while parsing #~w{}: ~s\n",
+ [Name, Error]))}
+ end;
+get_record_and_type_info([{attribute, _, type, {{record, Name}, Fields0, []}}
+ |Left], Module, RecDict) ->
+ %% This overrides the original record declaration.
+ case get_record_fields(Fields0, RecDict) of
+ {ok, Fields} ->
+ Arity = length(Fields),
+ Fun = fun(OldOrdDict) -> orddict:store(Arity, Fields, OldOrdDict) end,
+ NewRecDict = dict:update({record, Name}, Fun, [{Arity, Fields}], RecDict),
+ get_record_and_type_info(Left, Module, NewRecDict);
+ {error, Error} ->
+ {error, lists:flatten(io_lib:format(" Error while parsing #~w{}: ~s\n",
+ [Name, Error]))}
+ end;
+get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left],
+ Module, RecDict) when Attr =:= 'type'; Attr =:= 'opaque' ->
+ try
+ NewRecDict = add_new_type(Attr, Name, TypeForm, [], Module, RecDict),
+ get_record_and_type_info(Left, Module, NewRecDict)
+ catch
+ throw:{error, _} = Error -> Error
+ end;
+get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left],
+ Module, RecDict) when Attr =:= 'type'; Attr =:= 'opaque' ->
+ try
+ NewRecDict = add_new_type(Attr, Name, TypeForm, Args, Module, RecDict),
+ get_record_and_type_info(Left, Module, NewRecDict)
+ catch
+ throw:{error, _} = Error -> Error
+ end;
+get_record_and_type_info([_Other|Left], Module, RecDict) ->
+ get_record_and_type_info(Left, Module, RecDict);
+get_record_and_type_info([], _Module, RecDict) ->
+ {ok, RecDict}.
+
+add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) ->
+ case erl_types:type_is_defined(TypeOrOpaque, Name, RecDict) of
+ true ->
+ throw({error, io_lib:format("Type already defined: ~w\n", [Name])});
+ false ->
+ ArgTypes = [erl_types:t_from_form(X) || X <- ArgForms],
+ _Type = erl_types:t_from_form(TypeForm, RecDict),
+ case lists:all(fun erl_types:t_is_var/1, ArgTypes) of
+ true ->
+ ArgNames = [erl_types:t_var_name(X) || X <- ArgTypes],
+ dict:store({TypeOrOpaque, Name}, {Module, TypeForm, ArgNames}, RecDict);
+ false ->
+ throw({error, io_lib:format("Type declaration for ~w does not "
+ "have variables as parameters", [Name])})
+ end
+ end.
+
+get_record_fields(Fields, RecDict) ->
+ get_record_fields(Fields, RecDict, []).
+
+get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
+ RecDict, Acc) ->
+ Name =
+ case OrdRecField of
+ {record_field, _Line, Name0} -> erl_parse:normalise(Name0);
+ {record_field, _Line, Name0, _Init} -> erl_parse:normalise(Name0)
+ end,
+ try
+ Type = erl_types:t_from_form(TypeForm, RecDict),
+ get_record_fields(Left, RecDict, [{Name, Type}|Acc])
+ catch
+ throw:{error, _} = Error -> Error
+ end;
+get_record_fields([{record_field, _Line, Name}|Left], RecDict, Acc) ->
+ NewAcc = [{erl_parse:normalise(Name), erl_types:t_any()}|Acc],
+ get_record_fields(Left, RecDict, NewAcc);
+get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) ->
+ NewAcc = [{erl_parse:normalise(Name), erl_types:t_any()}|Acc],
+ get_record_fields(Left, RecDict, NewAcc);
+get_record_fields([], _RecDict, Acc) ->
+ {ok, lists:reverse(Acc)}.
+
+-spec process_record_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver().
+
+process_record_remote_types(CServer) ->
+ TempRecords = dialyzer_codeserver:get_temp_records(CServer),
+ RecordFun =
+ fun(Key, Value) ->
+ case Key of
+ {record, _Name} ->
+ FieldFun =
+ fun(_Arity, Fields) ->
+ [{Name, erl_types:t_solve_remote(Field, TempRecords)} || {Name, Field} <- Fields]
+ end,
+ orddict:map(FieldFun, Value);
+ _Other -> Value
+ end
+ end,
+ ModuleFun =
+ fun(_Module, Record) ->
+ dict:map(RecordFun, Record)
+ end,
+ NewRecords = dict:map(ModuleFun, TempRecords),
+ dialyzer_codeserver:finalize_records(NewRecords, CServer).
+
+-spec merge_records(dict(), dict()) -> dict().
+
+merge_records(NewRecords, OldRecords) ->
+ dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords).
+
+%% ============================================================================
+%%
+%% Spec info
+%%
+%% ============================================================================
+
+-spec get_spec_info(module(), abstract_code(), dict()) ->
+ {'ok', dict()} | {'error', string()}.
+
+get_spec_info(ModName, AbstractCode, RecordsDict) ->
+ get_spec_info(AbstractCode, dict:new(), RecordsDict, ModName, "nofile").
+
+%% TypeSpec is a list of conditional contracts for a function.
+%% Each contract is of the form {[Argument], Range, [Constraint]} where
+%% - Argument and Range are in erl_types:erl_type() format and
+%% - Constraint is of the form {subtype, T1, T2} where T1 and T2
+%% are erl_types:erl_type()
+
+get_spec_info([{attribute, Ln, spec, {Id, TypeSpec}}|Left],
+ SpecDict, RecordsDict, ModName, File) when is_list(TypeSpec) ->
+ MFA = case Id of
+ {_, _, _} = T -> T;
+ {F, A} -> {ModName, F, A}
+ end,
+ try dict:find(MFA, SpecDict) of
+ error ->
+ NewSpecDict =
+ dialyzer_contracts:store_tmp_contract(MFA, {File, Ln}, TypeSpec,
+ SpecDict, RecordsDict),
+ get_spec_info(Left, NewSpecDict, RecordsDict, ModName, File);
+ {ok, {{OtherFile, L},_C}} ->
+ {Mod, Fun, Arity} = MFA,
+ Msg = io_lib:format(" Contract for function ~w:~w/~w "
+ "already defined in ~s:~w\n",
+ [Mod, Fun, Arity, OtherFile, L]),
+ throw({error, Msg})
+ catch
+ throw:{error, Error} ->
+ {error, lists:flatten(io_lib:format(" Error while parsing contract "
+ "in line ~w: ~s\n", [Ln, Error]))}
+ end;
+get_spec_info([{attribute, _, file, {IncludeFile, _}}|Left],
+ SpecDict, RecordsDict, ModName, _File) ->
+ get_spec_info(Left, SpecDict, RecordsDict, ModName, IncludeFile);
+get_spec_info([_Other|Left], SpecDict, RecordsDict, ModName, File) ->
+ get_spec_info(Left, SpecDict, RecordsDict, ModName, File);
+get_spec_info([], SpecDict, _RecordsDict, _ModName, _File) ->
+ {ok, SpecDict}.
+
+%% ============================================================================
+%%
+%% Util utils
+%%
+%% ============================================================================
+
+-spec src_compiler_opts() -> comp_options().
+
+src_compiler_opts() ->
+ [no_copt, to_core, binary, return_errors,
+ no_inline, strict_record_tests, strict_record_updates].
+
+-spec get_module(abstract_code()) -> module().
+
+get_module([{attribute, _, module, {M, _As}} | _]) -> M;
+get_module([{attribute, _, module, M} | _]) -> M;
+get_module([_ | Rest]) -> get_module(Rest).
+
+-spec cleanup_parse_transforms(abstract_code()) -> abstract_code().
+
+cleanup_parse_transforms([{attribute, _, compile, {parse_transform, _}}|Left]) ->
+ cleanup_parse_transforms(Left);
+cleanup_parse_transforms([Other|Left]) ->
+ [Other|cleanup_parse_transforms(Left)];
+cleanup_parse_transforms([]) ->
+ [].
+
+-spec format_errors([{module(), string()}]) -> [string()].
+
+format_errors([{Mod, Errors}|Left]) ->
+ FormatedError =
+ [io_lib:format("~s:~w: ~s\n", [Mod, Line, M:format_error(Desc)])
+ || {Line, M, Desc} <- Errors],
+ [lists:flatten(FormatedError) | format_errors(Left)];
+format_errors([]) ->
+ [].
+
+-spec format_sig(erl_types:erl_type()) -> string().
+
+format_sig(Type) ->
+ format_sig(Type, dict:new()).
+
+-spec format_sig(erl_types:erl_type(), dict()) -> string().
+
+format_sig(Type, RecDict) ->
+ "fun(" ++ Sig = lists:flatten(erl_types:t_to_string(Type, RecDict)),
+ ")" ++ RevSig = lists:reverse(Sig),
+ lists:reverse(RevSig).
+
+%%-------------------------------------------------------------------
+%% Author : Per Gustafsson <[email protected]>
+%% Description : Provides better printing of binaries.
+%% Created : 5 March 2007
+%%-------------------------------------------------------------------
+
+pp_hook() ->
+ fun pp_hook/3.
+
+-spec pp_hook() -> fun((cerl:cerl(), _, _) -> term()).
+
+pp_hook(Node, Ctxt, Cont) ->
+ case cerl:type(Node) of
+ binary ->
+ pp_binary(Node, Ctxt, Cont);
+ bitstr ->
+ pp_segment(Node, Ctxt, Cont);
+ _ ->
+ Cont(Node, Ctxt)
+ end.
+
+pp_binary(Node, Ctxt, Cont) ->
+ prettypr:beside(prettypr:text("<<"),
+ prettypr:beside(pp_segments(cerl:binary_segments(Node),
+ Ctxt, Cont),
+ prettypr:text(">>"))).
+
+pp_segments([Seg], Ctxt, Cont) ->
+ pp_segment(Seg, Ctxt, Cont);
+pp_segments([], _Ctxt, _Cont) ->
+ prettypr:text("");
+pp_segments([Seg|Rest], Ctxt, Cont) ->
+ prettypr:beside(pp_segment(Seg, Ctxt, Cont),
+ prettypr:beside(prettypr:text(","),
+ pp_segments(Rest, Ctxt, Cont))).
+
+pp_segment(Node, Ctxt, Cont) ->
+ Val = cerl:bitstr_val(Node),
+ Size = cerl:bitstr_size(Node),
+ Unit = cerl:bitstr_unit(Node),
+ Type = cerl:bitstr_type(Node),
+ Flags = cerl:bitstr_flags(Node),
+ prettypr:beside(Cont(Val, Ctxt),
+ prettypr:beside(pp_size(Size, Ctxt, Cont),
+ prettypr:beside(pp_opts(Type, Flags),
+ pp_unit(Unit, Ctxt, Cont)))).
+
+pp_size(Size, Ctxt, Cont) ->
+ case cerl:is_c_atom(Size) of
+ true ->
+ prettypr:text("");
+ false ->
+ prettypr:beside(prettypr:text(":"), Cont(Size, Ctxt))
+ end.
+
+pp_opts(Type, Flags) ->
+ FinalFlags =
+ case cerl:atom_val(Type) of
+ binary -> [];
+ float -> keep_endian(cerl:concrete(Flags));
+ integer -> keep_all(cerl:concrete(Flags));
+ utf8 -> [];
+ utf16 -> [];
+ utf32 -> []
+ end,
+ prettypr:beside(prettypr:text("/"),
+ prettypr:beside(pp_atom(Type),
+ pp_flags(FinalFlags))).
+
+pp_flags([]) ->
+ prettypr:text("");
+pp_flags([Flag|Flags]) ->
+ prettypr:beside(prettypr:text("-"),
+ prettypr:beside(pp_atom(Flag),
+ pp_flags(Flags))).
+
+keep_endian(Flags) ->
+ [cerl:c_atom(X) || X <- Flags, (X =:= little) or (X =:= native)].
+
+keep_all(Flags) ->
+ [cerl:c_atom(X) || X <- Flags,
+ (X =:= little) or (X =:= native) or (X =:= signed)].
+
+pp_unit(Unit, Ctxt, Cont) ->
+ case cerl:concrete(Unit) of
+ N when is_integer(N) ->
+ prettypr:beside(prettypr:text("-"),
+ prettypr:beside(prettypr:text("unit:"),
+ Cont(Unit, Ctxt)));
+ _ -> % Other value: e.g. 'undefined' when UTF
+ prettypr:text("")
+ end.
+
+pp_atom(Atom) ->
+ String = atom_to_list(cerl:atom_val(Atom)),
+ prettypr:text(String).
diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk
new file mode 100644
index 0000000000..a946959256
--- /dev/null
+++ b/lib/dialyzer/vsn.mk
@@ -0,0 +1 @@
+DIALYZER_VSN = 2.1.0