aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer/src
diff options
context:
space:
mode:
Diffstat (limited to 'lib/dialyzer/src')
-rw-r--r--lib/dialyzer/src/Makefile4
-rw-r--r--lib/dialyzer/src/dialyzer.app.src11
-rw-r--r--lib/dialyzer/src/dialyzer.appup.src2
-rw-r--r--lib/dialyzer/src/dialyzer.erl3
-rw-r--r--lib/dialyzer/src/dialyzer.hrl14
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl45
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl130
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl4
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl10
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl283
-rw-r--r--lib/dialyzer/src/dialyzer_coordinator.erl40
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl374
-rw-r--r--lib/dialyzer/src/dialyzer_dep.erl64
-rw-r--r--lib/dialyzer/src/dialyzer_explanation.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl11
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl1
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_race_data_server.erl134
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl40
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl3
-rw-r--r--lib/dialyzer/src/dialyzer_timing.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl606
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl220
-rw-r--r--lib/dialyzer/src/dialyzer_worker.erl61
25 files changed, 1295 insertions, 773 deletions
diff --git a/lib/dialyzer/src/Makefile b/lib/dialyzer/src/Makefile
index 770af2140f..256f20f549 100644
--- a/lib/dialyzer/src/Makefile
+++ b/lib/dialyzer/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2006-2012. All Rights Reserved.
+# Copyright Ericsson AB 2006-2016. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -61,6 +61,7 @@ MODULES = \
dialyzer_gui_wx \
dialyzer_options \
dialyzer_plt \
+ dialyzer_race_data_server \
dialyzer_races \
dialyzer_succ_typings \
dialyzer_timing \
@@ -140,6 +141,7 @@ $(EBIN)/dialyzer_explanation.beam: dialyzer.hrl
$(EBIN)/dialyzer_gui_wx.beam: dialyzer.hrl dialyzer_gui_wx.hrl
$(EBIN)/dialyzer_options.beam: dialyzer.hrl
$(EBIN)/dialyzer_plt.beam: dialyzer.hrl
+$(EBIN)/dialyzer_race_data_server.beam: dialyzer.hrl
$(EBIN)/dialyzer_races.beam: dialyzer.hrl
$(EBIN)/dialyzer_succ_typings.beam: dialyzer.hrl
$(EBIN)/dialyzer_typesig.beam: dialyzer.hrl
diff --git a/lib/dialyzer/src/dialyzer.app.src b/lib/dialyzer/src/dialyzer.app.src
index 8ac6dc1367..5b28f7ae86 100644
--- a/lib/dialyzer/src/dialyzer.app.src
+++ b/lib/dialyzer/src/dialyzer.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -37,6 +37,7 @@
dialyzer_gui_wx,
dialyzer_options,
dialyzer_plt,
+ dialyzer_race_data_server,
dialyzer_races,
dialyzer_succ_typings,
dialyzer_typesig,
@@ -44,8 +45,8 @@
dialyzer_timing,
dialyzer_worker]},
{registered, []},
- {applications, [compiler, gs, hipe, kernel, stdlib, wx]},
+ {applications, [compiler, hipe, kernel, stdlib, wx]},
{env, []},
- {runtime_dependencies, ["wx-1.2","syntax_tools-1.6.14","stdlib-2.5",
- "kernel-3.0","hipe-3.13","erts-7.0",
- "compiler-5.0"]}]}.
+ {runtime_dependencies, ["wx-1.2","syntax_tools-2.0","stdlib-3.0",
+ "kernel-5.0","hipe-3.15.1","erts-8.0",
+ "compiler-7.0"]}]}.
diff --git a/lib/dialyzer/src/dialyzer.appup.src b/lib/dialyzer/src/dialyzer.appup.src
index 727e961629..d81ff641d7 100644
--- a/lib/dialyzer/src/dialyzer.appup.src
+++ b/lib/dialyzer/src/dialyzer.appup.src
@@ -1,7 +1,7 @@
%% -*- erlang -*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index 9f51dfe356..bcac8afe64 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -336,6 +336,9 @@ message_to_string({guard_fail, []}) ->
"Clause guard cannot succeed.\n";
message_to_string({guard_fail, [Arg1, Infix, Arg2]}) ->
io_lib:format("Guard test ~s ~s ~s can never succeed\n", [Arg1, Infix, Arg2]);
+message_to_string({map_update, [Type, Key]}) ->
+ io_lib:format("A key of type ~s cannot exist "
+ "in a map of type ~s\n", [Key, Type]);
message_to_string({neg_guard_fail, [Arg1, Infix, Arg2]}) ->
io_lib:format("Guard test not(~s ~s ~s) can never succeed\n",
[Arg1, Infix, Arg2]);
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
index de236f91ab..ea6a71217c 100644
--- a/lib/dialyzer/src/dialyzer.hrl
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -2,7 +2,7 @@
%%%
%%% %CopyrightBegin%
%%%
-%%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
@@ -60,6 +60,7 @@
-define(WARN_BEHAVIOUR, warn_behaviour).
-define(WARN_UNDEFINED_CALLBACK, warn_undefined_callbacks).
-define(WARN_UNKNOWN, warn_unknown).
+-define(WARN_MAP_CONSTRUCTION, warn_map_construction).
%%
%% The following type has double role:
@@ -75,7 +76,8 @@
| ?WARN_CONTRACT_SUPERTYPE | ?WARN_CALLGRAPH
| ?WARN_UNMATCHED_RETURN | ?WARN_RACE_CONDITION
| ?WARN_BEHAVIOUR | ?WARN_CONTRACT_RANGE
- | ?WARN_UNDEFINED_CALLBACK | ?WARN_UNKNOWN.
+ | ?WARN_UNDEFINED_CALLBACK | ?WARN_UNKNOWN
+ | ?WARN_MAP_CONSTRUCTION.
%%
%% This is the representation of each warning as they will be returned
@@ -123,10 +125,12 @@
%% Record declarations used by various files
%%--------------------------------------------------------------------
--record(analysis, {analysis_pid :: pid(),
+-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
+
+-record(analysis, {analysis_pid :: pid() | 'undefined',
type = succ_typings :: anal_type(),
defines = [] :: [dial_define()],
- doc_plt :: dialyzer_plt:plt(),
+ doc_plt :: doc_plt(),
files = [] :: [file:filename()],
include_dirs = [] :: [file:filename()],
start_from = byte_code :: start_from(),
@@ -135,7 +139,7 @@
race_detection = false :: boolean(),
behaviours_chk = false :: boolean(),
timing = false :: boolean() | 'debug',
- timing_server :: dialyzer_timing:timing_server(),
+ timing_server = none :: dialyzer_timing:timing_server(),
callgraph_file = "" :: file:filename(),
solvers :: [solver()]}).
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index 8537878dfc..50fc1d8471 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -31,14 +31,17 @@
-export([start/3]).
+%%% Export for dialyzer_coordinator...
-export([compile_init_result/0,
- add_to_result/4,
- start_compilation/2,
+ add_to_result/4]).
+%%% ... and export for dialyzer_worker.
+-export([start_compilation/2,
continue_compilation/2]).
-export_type([compile_init_data/0,
- one_file_result/0,
- compile_result/0]).
+ one_file_mid_error/0,
+ one_file_result_ok/0,
+ compile_result/0]).
-include("dialyzer.hrl").
@@ -153,8 +156,7 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
TmpCServer2 =
- dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes,
- TmpCServer1),
+ dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
?timing(State#analysis_state.timing_server, "remote",
begin
TmpCServer3 =
@@ -247,6 +249,10 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T1, _} = statistics(wall_clock),
Callgraph = dialyzer_callgraph:new(),
CompileInit = make_compile_init(State, Callgraph),
+ %% Spawn a worker per file - where each worker calls
+ %% start_compilation on its file, asks next label to coordinator and
+ %% calls continue_compilation - and let coordinator aggregate
+ %% results using (compile_init_result and) add_to_result.
{{Failed, Modules}, NextLabel} =
?timing(Timing, "compile", _C1,
dialyzer_coordinator:parallel_job(compile, Files,
@@ -278,16 +284,18 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
send_log(Parent, Msg2),
{Callgraph, CServer2}.
--type compile_init_data() :: #compile_init{}.
--type error_reason() :: string().
--type compile_result() :: {[{file:filename(), error_reason()}],
- [module()]}. %%opaque
--type one_file_result() :: {error, error_reason()} |
- {ok, [dialyzer_callgraph:callgraph_edge()],
- [mfa_or_funlbl()], module()}. %%opaque
--type compile_mid_data() :: {module(), cerl:cerl(),
- dialyzer_callgraph:callgraph(),
- dialyzer_codeserver:codeserver()}.
+-opaque compile_init_data() :: #compile_init{}.
+-type error_reason() :: string().
+-opaque compile_result() :: {[{file:filename(), error_reason()}],
+ [module()]}.
+-type one_file_mid_error() :: {error, error_reason()}.
+-opaque one_file_result_ok() :: {ok, [dialyzer_callgraph:callgraph_edge()],
+ [mfa_or_funlbl()], module()}.
+-type one_file_result() :: one_file_mid_error() |
+ one_file_result_ok().
+-type compile_mid_data() :: {module(), cerl:cerl(),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver()}.
-spec compile_init_result() -> compile_result().
@@ -428,7 +436,8 @@ store_core(Mod, Core, Callgraph, CServer) ->
CoreSize = cerl_trees:size(CoreTree),
{ok, CoreSize, {Mod, CoreTree, Callgraph, CServer}}.
--spec continue_compilation(integer(), compile_mid_data()) -> one_file_result().
+-spec continue_compilation(integer(), compile_mid_data()) ->
+ one_file_result_ok().
continue_compilation(NextLabel, {Mod, CoreTree, Callgraph, CServer}) ->
{LabeledTree, _NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index b6f15fdc0e..5623929a43 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index 9e53e171c0..50abb22009 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -96,26 +96,28 @@
%% whenever applicable.
%%-----------------------------------------------------------------------------
+%% Types with comment 'race' are due to dialyzer_races.erl.
-record(callgraph, {digraph = digraph:new() :: digraph:graph(),
- active_digraph :: active_digraph(),
- esc :: ets:tid(),
- letrec_map :: ets:tid(),
+ active_digraph :: active_digraph()
+ | 'undefined', % race
+ esc :: ets:tid()
+ | 'undefined', % race
+ letrec_map :: ets:tid()
+ | 'undefined', % race
name_map :: ets:tid(),
rev_name_map :: ets:tid(),
- rec_var_map :: ets:tid(),
- self_rec :: ets:tid(),
- calls :: ets:tid(),
+ rec_var_map :: ets:tid()
+ | 'undefined', % race
+ self_rec :: ets:tid()
+ | 'undefined', % race
+ calls :: ets:tid()
+ | 'undefined', % race
race_detection = false :: boolean(),
- race_data_server = new_race_data_server() :: pid()}).
-
--record(race_data_state, {race_code = dict:new() :: dict:dict(),
- public_tables = [] :: [label()],
- named_tables = [] :: [string()],
- beh_api_calls = [] :: [{mfa(), mfa()}]}).
+ race_data_server = dialyzer_race_data_server:new() :: pid()}).
%% Exported Types
--type callgraph() :: #callgraph{}.
+-opaque callgraph() :: #callgraph{}.
-type active_digraph() :: {'d', digraph:graph()} | {'e', ets:tid(), ets:tid()}.
@@ -609,45 +611,30 @@ digraph_reaching_subgraph(Funs, DG) ->
renew_race_info(#callgraph{race_data_server = RaceDataServer} = CG,
RaceCode, PublicTables, NamedTables) ->
- ok = race_data_server_cast(
+ ok = dialyzer_race_data_server:cast(
{renew_race_info, {RaceCode, PublicTables, NamedTables}},
RaceDataServer),
CG.
-renew_race_info({RaceCode, PublicTables, NamedTables},
- #race_data_state{} = State) ->
- State#race_data_state{race_code = RaceCode,
- public_tables = PublicTables,
- named_tables = NamedTables}.
-
-spec renew_race_code(dialyzer_races:races(), callgraph()) -> callgraph().
renew_race_code(Races, #callgraph{race_data_server = RaceDataServer} = CG) ->
Fun = dialyzer_races:get_curr_fun(Races),
FunArgs = dialyzer_races:get_curr_fun_args(Races),
Code = lists:reverse(dialyzer_races:get_race_list(Races)),
- ok = race_data_server_cast(
+ ok = dialyzer_race_data_server:cast(
{renew_race_code, {Fun, FunArgs, Code}},
RaceDataServer),
CG.
-renew_race_code_handler({Fun, FunArgs, Code},
- #race_data_state{race_code = RaceCode} = State) ->
- State#race_data_state{race_code = dict:store(Fun, [FunArgs, Code], RaceCode)}.
-
-spec renew_race_public_tables(label(), callgraph()) -> callgraph().
renew_race_public_tables(VarLabel,
#callgraph{race_data_server = RaceDataServer} = CG) ->
ok =
- race_data_server_cast({renew_race_public_tables, VarLabel}, RaceDataServer),
+ dialyzer_race_data_server:cast({renew_race_public_tables, VarLabel}, RaceDataServer),
CG.
-renew_race_public_tables_handler(VarLabel,
- #race_data_state{public_tables = PT}
- = State) ->
- State#race_data_state{public_tables = ordsets:add_element(VarLabel, PT)}.
-
-spec cleanup(callgraph()) -> callgraph().
cleanup(#callgraph{digraph = Digraph,
@@ -657,18 +644,18 @@ cleanup(#callgraph{digraph = Digraph,
#callgraph{digraph = Digraph,
name_map = NameMap,
rev_name_map = RevNameMap,
- race_data_server = race_data_server_call(dup, RaceDataServer)}.
+ race_data_server = dialyzer_race_data_server:duplicate(RaceDataServer)}.
-spec duplicate(callgraph()) -> callgraph().
duplicate(#callgraph{race_data_server = RaceDataServer} = Callgraph) ->
Callgraph#callgraph{
- race_data_server = race_data_server_call(dup, RaceDataServer)}.
+ race_data_server = dialyzer_race_data_server:duplicate(RaceDataServer)}.
-spec dispose_race_server(callgraph()) -> ok.
dispose_race_server(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_cast(stop, RaceDataServer).
+ dialyzer_race_data_server:stop(RaceDataServer).
-spec get_digraph(callgraph()) -> digraph:graph().
@@ -678,17 +665,17 @@ get_digraph(#callgraph{digraph = Digraph}) ->
-spec get_named_tables(callgraph()) -> [string()].
get_named_tables(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_named_tables, RaceDataServer).
+ dialyzer_race_data_server:call(get_named_tables, RaceDataServer).
-spec get_public_tables(callgraph()) -> [label()].
get_public_tables(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_public_tables, RaceDataServer).
+ dialyzer_race_data_server:call(get_public_tables, RaceDataServer).
-spec get_race_code(callgraph()) -> dict:dict().
get_race_code(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_race_code, RaceDataServer).
+ dialyzer_race_data_server:call(get_race_code, RaceDataServer).
-spec get_race_detection(callgraph()) -> boolean().
@@ -698,12 +685,12 @@ get_race_detection(#callgraph{race_detection = RD}) ->
-spec get_behaviour_api_calls(callgraph()) -> [{mfa(), mfa()}].
get_behaviour_api_calls(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_behaviour_api_calls, RaceDataServer).
+ dialyzer_race_data_server:call(get_behaviour_api_calls, RaceDataServer).
-spec race_code_new(callgraph()) -> callgraph().
race_code_new(#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast(race_code_new, RaceDataServer),
+ ok = dialyzer_race_data_server:cast(race_code_new, RaceDataServer),
CG.
-spec put_digraph(digraph:graph(), callgraph()) -> callgraph().
@@ -714,7 +701,7 @@ put_digraph(Digraph, Callgraph) ->
-spec put_race_code(dict:dict(), callgraph()) -> callgraph().
put_race_code(RaceCode, #callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_race_code, RaceCode}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_race_code, RaceCode}, RaceDataServer),
CG.
-spec put_race_detection(boolean(), callgraph()) -> callgraph().
@@ -726,78 +713,23 @@ put_race_detection(RaceDetection, Callgraph) ->
put_named_tables(NamedTables,
#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_named_tables, NamedTables}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_named_tables, NamedTables}, RaceDataServer),
CG.
-spec put_public_tables([label()], callgraph()) -> callgraph().
put_public_tables(PublicTables,
#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_public_tables, PublicTables}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_public_tables, PublicTables}, RaceDataServer),
CG.
-spec put_behaviour_api_calls([{mfa(), mfa()}], callgraph()) -> callgraph().
put_behaviour_api_calls(Calls,
#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_behaviour_api_calls, Calls}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_behaviour_api_calls, Calls}, RaceDataServer),
CG.
-
-new_race_data_server() ->
- spawn_link(fun() -> race_data_server_loop(#race_data_state{}) end).
-
-race_data_server_loop(State) ->
- receive
- {call, From, Ref, Query} ->
- Reply = race_data_server_handle_call(Query, State),
- From ! {Ref, Reply},
- race_data_server_loop(State);
- {cast, stop} ->
- ok;
- {cast, Message} ->
- NewState = race_data_server_handle_cast(Message, State),
- race_data_server_loop(NewState)
- end.
-
-race_data_server_call(Query, Server) ->
- Ref = make_ref(),
- Server ! {call, self(), Ref, Query},
- receive
- {Ref, Reply} -> Reply
- end.
-
-race_data_server_cast(Message, Server) ->
- Server ! {cast, Message},
- ok.
-
-race_data_server_handle_cast(race_code_new, State) ->
- State#race_data_state{race_code = dict:new()};
-race_data_server_handle_cast({Tag, Data}, State) ->
- case Tag of
- renew_race_info -> renew_race_info(Data, State);
- renew_race_code -> renew_race_code_handler(Data, State);
- renew_race_public_tables -> renew_race_public_tables_handler(Data, State);
- put_race_code -> State#race_data_state{race_code = Data};
- put_public_tables -> State#race_data_state{public_tables = Data};
- put_named_tables -> State#race_data_state{named_tables = Data};
- put_behaviour_api_calls -> State#race_data_state{beh_api_calls = Data}
- end.
-
-race_data_server_handle_call(Query,
- #race_data_state{race_code = RaceCode,
- public_tables = PublicTables,
- named_tables = NamedTables,
- beh_api_calls = BehApiCalls}
- = State) ->
- case Query of
- dup -> spawn_link(fun() -> race_data_server_loop(State) end);
- get_race_code -> RaceCode;
- get_public_tables -> PublicTables;
- get_named_tables -> NamedTables;
- get_behaviour_api_calls -> BehApiCalls
- end.
-
%%=============================================================================
%% Utilities for 'dot'
%%=============================================================================
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 9354b8007e..fc56693ea3 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -2,7 +2,7 @@
%%-------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -36,7 +36,7 @@
-include_lib("kernel/include/file.hrl"). % needed for #file_info{}
-record(cl_state,
- {backend_pid :: pid(),
+ {backend_pid :: pid() | 'undefined',
erlang_mode = false :: boolean(),
external_calls = [] :: [mfa()],
external_types = [] :: [mfa()],
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index 978ecd3843..03cd9671af 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -81,10 +81,10 @@
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
- exported_types :: set_ets(), % set(mfa())
- records :: dict_ets(),
- contracts :: dict_ets(),
- callbacks :: dict_ets(),
+ exported_types :: set_ets() | 'undefined', % set(mfa())
+ records :: dict_ets() | 'undefined',
+ contracts :: dict_ets() | 'undefined',
+ callbacks :: dict_ets() | 'undefined',
fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 7251de8b10..976a2b8955 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -53,7 +53,9 @@
%% to expand records and/or remote types that they might contain.
%%-----------------------------------------------------------------------
--type tmp_contract_fun() :: fun((sets:set(mfa()), types()) -> contract_pair()).
+-type cache() :: ets:tid().
+-type tmp_contract_fun() ::
+ fun((sets:set(mfa()), types(), cache()) -> contract_pair()).
-record(tmp_contract, {contract_funs = [] :: [tmp_contract_fun()],
forms = [] :: [{_, _}]}).
@@ -126,13 +128,19 @@ butlast([H|T]) -> [H|butlast(T)].
constraints_to_string([]) ->
"";
-constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}]) ->
- atom_to_list(What) ++ "(" ++
- sequence([erl_types:t_form_to_string(T) || T <- Types], ",") ++ ")";
constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}|Rest]) ->
- atom_to_list(What) ++ "(" ++
- sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
- ++ "), " ++ constraints_to_string(Rest).
+ S = constraint_to_string(What, Types),
+ case Rest of
+ [] -> S;
+ _ -> S ++ ", " ++ constraints_to_string(Rest)
+ end.
+
+constraint_to_string(is_subtype, [{var, _, Var}, T]) ->
+ atom_to_list(Var) ++ " :: " ++ erl_types:t_form_to_string(T);
+constraint_to_string(What, Types) ->
+ atom_to_list(What) ++ "("
+ ++ sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
+ ++ ")".
sequence([], _Delimiter) -> "";
sequence([H], _Delimiter) -> H;
@@ -147,22 +155,32 @@ process_contract_remote_types(CodeServer) ->
ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
RecordDict = dialyzer_codeserver:get_records(CodeServer),
ContractFun =
- fun({_M, _F, _A}, {File, #tmp_contract{contract_funs = CFuns, forms = Forms}, Xtra}) ->
- NewCs = [CFun(ExpTypes, RecordDict) || CFun <- CFuns],
- Args = general_domain(NewCs),
- {File, #contract{contracts = NewCs, args = Args, forms = Forms}, Xtra}
+ fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) ->
+ #tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract,
+ {NewCs, C2} = lists:mapfoldl(fun(CFun, C1) ->
+ CFun(ExpTypes, RecordDict, C1)
+ end, C0, CFuns),
+ Args = general_domain(NewCs),
+ Contract = #contract{contracts = NewCs, args = Args, forms = Forms},
+ {{MFA, {File, Contract, Xtra}}, C2}
end,
ModuleFun =
- fun(_ModuleName, ContractDict) ->
- dict:map(ContractFun, ContractDict)
+ fun({ModuleName, ContractDict}, C3) ->
+ {NewContractList, C4} =
+ lists:mapfoldl(ContractFun, C3, dict:to_list(ContractDict)),
+ {{ModuleName, dict:from_list(NewContractList)}, C4}
end,
- NewContractDict = dict:map(ModuleFun, TmpContractDict),
- NewCallbackDict = dict:map(ModuleFun, TmpCallbackDict),
+ Cache = erl_types:cache__new(),
+ {NewContractList, C5} =
+ lists:mapfoldl(ModuleFun, Cache, dict:to_list(TmpContractDict)),
+ {NewCallbackList, _C6} =
+ lists:mapfoldl(ModuleFun, C5, dict:to_list(TmpCallbackDict)),
+ NewContractDict = dict:from_list(NewContractList),
+ NewCallbackDict = dict:from_list(NewCallbackList),
dialyzer_codeserver:finalize_contracts(NewContractDict, NewCallbackDict,
- CodeServer).
+ CodeServer).
--type opaques() :: [erl_types:erl_type()] | 'universe'.
--type opaques_fun() :: fun((module()) -> opaques()).
+-type opaques_fun() :: fun((module()) -> [erl_types:erl_type()]).
-type fun_types() :: dict:dict(label(), erl_types:type_table()).
@@ -205,10 +223,10 @@ check_contract(Contract, SuccType) ->
check_contract(#contract{contracts = Contracts}, SuccType, Opaques) ->
try
- Contracts1 = [{Contract, insert_constraints(Constraints, dict:new())}
+ Contracts1 = [{Contract, insert_constraints(Constraints)}
|| {Contract, Constraints} <- Contracts],
- Contracts2 = [erl_types:t_subst(Contract, Dict)
- || {Contract, Dict} <- Contracts1],
+ Contracts2 = [erl_types:t_subst(Contract, Map)
+ || {Contract, Map} <- Contracts1],
GenDomains = [erl_types:t_fun_args(C) || C <- Contracts2],
case check_domains(GenDomains) of
error ->
@@ -272,28 +290,45 @@ check_extraneous_1(Contract, SuccType) ->
case [CR || CR <- CRngs,
erl_types:t_is_none(erl_types:t_inf(CR, STRng))] of
[] ->
- CRngList = list_part(CRng),
- STRngList = list_part(STRng),
- case is_not_nil_list(CRngList) andalso is_not_nil_list(STRngList) of
- false -> ok;
- true ->
- CRngElements = erl_types:t_list_elements(CRngList),
- STRngElements = erl_types:t_list_elements(STRngList),
- Inf = erl_types:t_inf(CRngElements, STRngElements),
- case erl_types:t_is_none(Inf) of
- true -> {error, invalid_contract};
- false -> ok
- end
+ case bad_extraneous_list(CRng, STRng)
+ orelse bad_extraneous_map(CRng, STRng)
+ of
+ true -> {error, invalid_contract};
+ false -> ok
end;
CRs -> {error, {extra_range, erl_types:t_sup(CRs), STRng}}
end.
+bad_extraneous_list(CRng, STRng) ->
+ CRngList = list_part(CRng),
+ STRngList = list_part(STRng),
+ case is_not_nil_list(CRngList) andalso is_not_nil_list(STRngList) of
+ false -> false;
+ true ->
+ CRngElements = erl_types:t_list_elements(CRngList),
+ STRngElements = erl_types:t_list_elements(STRngList),
+ Inf = erl_types:t_inf(CRngElements, STRngElements),
+ erl_types:t_is_none(Inf)
+ end.
+
list_part(Type) ->
erl_types:t_inf(erl_types:t_list(), Type).
is_not_nil_list(Type) ->
erl_types:t_is_list(Type) andalso not erl_types:t_is_nil(Type).
+bad_extraneous_map(CRng, STRng) ->
+ CRngMap = map_part(CRng),
+ STRngMap = map_part(STRng),
+ (not is_empty_map(CRngMap)) andalso (not is_empty_map(STRngMap))
+ andalso is_empty_map(erl_types:t_inf(CRngMap, STRngMap)).
+
+map_part(Type) ->
+ erl_types:t_inf(erl_types:t_map(), Type).
+
+is_empty_map(Type) ->
+ erl_types:t_is_equal(Type, erl_types:t_from_term(#{})).
+
%% This is the heart of the "range function"
-spec process_contracts([contract_pair()], [erl_types:erl_type()]) ->
erl_types:erl_type().
@@ -322,15 +357,15 @@ process_contract({Contract, Constraints}, CallTypes0) ->
[erl_types:t_to_string(ContArgsFun),
erl_types:t_to_string(CallTypesFun)]),
case solve_constraints(ContArgsFun, CallTypesFun, Constraints) of
- {ok, VarDict} ->
- {ok, erl_types:t_subst(erl_types:t_fun_range(Contract), VarDict)};
+ {ok, VarMap} ->
+ {ok, erl_types:t_subst(erl_types:t_fun_range(Contract), VarMap)};
error -> error
end.
solve_constraints(Contract, Call, Constraints) ->
%% First make sure the call follows the constraints
- CDict = insert_constraints(Constraints, dict:new()),
- Contract1 = erl_types:t_subst(Contract, CDict),
+ CMap = insert_constraints(Constraints),
+ Contract1 = erl_types:t_subst(Contract, CMap),
%% Just a safe over-approximation.
%% TODO: Find the types for type variables properly
ContrArgs = erl_types:t_fun_args(Contract1),
@@ -338,7 +373,7 @@ solve_constraints(Contract, Call, Constraints) ->
InfList = erl_types:t_inf_lists(ContrArgs, CallArgs),
case erl_types:any_none_or_unit(InfList) of
true -> error;
- false -> {ok, CDict}
+ false -> {ok, CMap}
end.
%%Inf = erl_types:t_inf(Contract1, Call),
%% Then unify with the constrained call type.
@@ -368,23 +403,26 @@ warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
{?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
%% This treats the "when" constraints. It will be extended, we hope.
-insert_constraints([{subtype, Type1, Type2}|Left], Dict) ->
+insert_constraints(Constraints) ->
+ insert_constraints(Constraints, maps:new()).
+
+insert_constraints([{subtype, Type1, Type2}|Left], Map) ->
case erl_types:t_is_var(Type1) of
true ->
Name = erl_types:t_var_name(Type1),
- Dict1 = case dict:find(Name, Dict) of
- error ->
- dict:store(Name, Type2, Dict);
- {ok, VarType} ->
- dict:store(Name, erl_types:t_inf(VarType, Type2), Dict)
- end,
- insert_constraints(Left, Dict1);
+ Map1 = case maps:find(Name, Map) of
+ error ->
+ maps:put(Name, Type2, Map);
+ {ok, VarType} ->
+ maps:put(Name, erl_types:t_inf(VarType, Type2), Map)
+ end,
+ insert_constraints(Left, Map1);
false ->
%% A lot of things should change to add supertypes
throw({error, io_lib:format("First argument of is_subtype constraint "
"must be a type variable: ~p\n", [Type1])})
end;
-insert_constraints([], Dict) -> Dict.
+insert_constraints([], Map) -> Map.
-type types() :: erl_types:type_table().
@@ -406,19 +444,19 @@ contract_from_form(Forms, MFA, RecDict, FileLine) ->
contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords) ->
- NewType =
+ fun(ExpTypes, AllRecords, Cache) ->
+ {NewType, NewCache} =
try
- from_form_with_check(Form, ExpTypes, MFA, AllRecords)
+ from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
NewMsg = io_lib:format("~s:~p: ~s", [filename:basename(File),
- Line, Msg]),
+ Line, Msg]),
throw({error, NewMsg})
end,
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
- {NewTypeNoVars, []}
+ {{NewTypeNoVars, []}, NewCache}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, []} | FormAcc],
@@ -427,13 +465,15 @@ contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
MFA, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords) ->
- {Constr1, VarDict} =
- process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords),
- NewType = from_form_with_check(Form, ExpTypes, MFA, AllRecords,
- VarDict),
+ fun(ExpTypes, AllRecords, Cache) ->
+ {Constr1, VarTable, Cache1} =
+ process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords,
+ Cache),
+ {NewType, NewCache} =
+ from_form_with_check(Form, ExpTypes, MFA, AllRecords,
+ VarTable, Cache1),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
- {NewTypeNoVars, Constr1}
+ {{NewTypeNoVars, Constr1}, NewCache}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, Constr} | FormAcc],
@@ -441,72 +481,91 @@ contract_from_form([{type, _L1, bounded_fun,
contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords) ->
- Init0 = initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords),
+process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+ {Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes,
+ AllRecords, Cache),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords).
+ constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords, NewCache).
-initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords) ->
- initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, []).
+initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+ initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ Cache, []).
-initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords, Acc) ->
- Acc;
-initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords, Acc) ->
+initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+ Cache, Acc) ->
+ {Acc, Cache};
+initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
+ Cache, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
- T1 = final_form(Type1, ExpTypes, MFA, AllRecords, dict:new()),
+ VarTable = erl_types:var_table__new(),
+ {T1, NewCache} =
+ final_form(Type1, ExpTypes, MFA, AllRecords, VarTable, Cache),
Entry = {T1, Type2},
- initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords, [Entry|Acc]);
+ initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ NewCache, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
throw({error,
io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords) ->
- VarDict =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, dict:new()),
- constraints_fixpoint(VarDict, MFA, Constrs, RecDict, ExpTypes, AllRecords).
-
-constraints_fixpoint(OldVarDict, MFA, Constrs, RecDict, ExpTypes, AllRecords) ->
- NewVarDict =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, OldVarDict),
- case NewVarDict of
- OldVarDict ->
- DictFold =
+constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+ VarTable = erl_types:var_table__new(),
+ {VarTab, NewCache} =
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ VarTable, Cache),
+ constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes,
+ AllRecords, NewCache).
+
+constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
+ AllRecords, Cache) ->
+ {NewVarTab, NewCache} =
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ OldVarTab, Cache),
+ case NewVarTab of
+ OldVarTab ->
+ Fun =
fun(Key, Value, Acc) ->
[{subtype, erl_types:t_var(Key), Value}|Acc]
end,
- FinalConstrs = dict:fold(DictFold, [], NewVarDict),
- {FinalConstrs, NewVarDict};
+ FinalConstrs = maps:fold(Fun, [], NewVarTab),
+ {FinalConstrs, NewVarTab, NewCache};
_Other ->
- constraints_fixpoint(NewVarDict, MFA, Constrs, RecDict, ExpTypes, AllRecords)
+ constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes,
+ AllRecords, NewCache)
end.
-final_form(Form, ExpTypes, MFA, AllRecords, VarDict) ->
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarDict).
+final_form(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
+ from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords) ->
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, dict:new()).
+from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) ->
+ VarTable = erl_types:var_table__new(),
+ from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarDict) ->
+from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
Site = {spec, MFA},
- erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords,
- VarDict),
- erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarDict).
-
-constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, VarDict) ->
- Subtypes =
- constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords, VarDict, []),
- insert_constraints(Subtypes, dict:new()).
-
-constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) ->
- Acc;
-constraints_to_subs([C|Rest], MFA, RecDict, ExpTypes, AllRecords, VarDict, Acc) ->
- {T1, Form2} = C,
- T2 = final_form(Form2, ExpTypes, MFA, AllRecords, VarDict),
+ C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords,
+ VarTable, Cache),
+ erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarTable, C1).
+
+constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ VarTab, Cache) ->
+ {Subtypes, NewCache} =
+ constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ VarTab, Cache, []),
+ {insert_constraints(Subtypes), NewCache}.
+
+constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+ _VarTab, Cache, Acc) ->
+ {Acc, Cache};
+constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, AllRecords,
+ VarTab, Cache, Acc) ->
+ {T2, NewCache} =
+ final_form(Form2, ExpTypes, MFA, AllRecords, VarTab, Cache),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords, VarDict, NewAcc).
+ constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ VarTab, NewCache, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
%% the constraints.
@@ -564,10 +623,13 @@ remove_uses([{Var, Use}|ToRemove], Constrs0) ->
remove_uses(_Var, _Use, []) -> [];
remove_uses(Var, Use, [Constr|Constrs]) ->
{V, Form} = Constr,
- case erl_types:t_var_name(V) =:= Var of
- true -> [{V, remove_use(Form, Use)}|Constrs];
- false -> [Constr|remove_uses(Var, Use, Constrs)]
- end.
+ NewConstr = case erl_types:t_var_name(V) =:= Var of
+ true ->
+ {V, remove_use(Form, Use)};
+ false ->
+ Constr
+ end,
+ [NewConstr|remove_uses(Var, Use, Constrs)].
remove_use({var, L, V}, V) -> {var, L, '_'};
remove_use(T, V) when is_tuple(T) ->
@@ -583,8 +645,8 @@ general_domain(List) ->
general_domain(List, erl_types:t_none()).
general_domain([{Sig, Constraints}|Left], AccSig) ->
- Dict = insert_constraints(Constraints, dict:new()),
- Sig1 = erl_types:t_subst(Sig, Dict),
+ Map = insert_constraints(Constraints),
+ Sig1 = erl_types:t_subst(Sig, Map),
general_domain(Left, erl_types:t_sup(AccSig, Sig1));
general_domain([], AccSig) ->
%% Get rid of all variables in the domain.
@@ -617,6 +679,7 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
{value, {Ret, Args}} ->
Sig = erl_types:t_fun(Args, Ret),
{M, _F, _A} = MFA,
+ %% io:format("MFA ~p~n", [MFA]),
Opaques = FindOpaques(M),
{File, Line} = FileLine,
WarningInfo = {File, Line, MFA},
@@ -765,7 +828,7 @@ is_remote_types_related(Contract, CSig, Sig, MFA, RecDict) ->
t_from_forms_without_remote([{FType, []}], MFA, RecDict) ->
Site = {spec, MFA},
- Type1 = erl_types:t_from_form_without_remote(FType, Site, RecDict),
+ {Type1, _} = erl_types:t_from_form_without_remote(FType, Site, RecDict),
{ok, erl_types:subst_all_vars_to_any(Type1)};
t_from_forms_without_remote([{_FType, _Constrs}], _MFA, _RecDict) ->
%% 'When' constraints
diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl
index 612f379d32..87cbd25b30 100644
--- a/lib/dialyzer/src/dialyzer_coordinator.erl
+++ b/lib/dialyzer/src/dialyzer_coordinator.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -29,8 +29,8 @@
%%% Export for dialyzer main process
-export([parallel_job/4]).
-%%% Exports for all possible workers
--export([wait_activation/0, job_done/3]).
+%%% Export for all possible workers
+-export([job_done/3]).
%%% Exports for the typesig and dataflow analysis workers
-export([sccs_to_pids/2, request_activation/1]).
@@ -38,7 +38,7 @@
%%% Exports for the compilation workers
-export([get_next_label/2]).
--export_type([coordinator/0, mode/0, init_data/0, result/0]).
+-export_type([coordinator/0, mode/0, init_data/0, result/0, job/0]).
%%--------------------------------------------------------------------
@@ -46,16 +46,18 @@
-type regulator() :: pid().
-type scc_to_pid() :: ets:tid() | 'unused'.
--type coordinator() :: {collector(), regulator(), scc_to_pid()}. %%opaque
+-opaque coordinator() :: {collector(), regulator(), scc_to_pid()}.
-type timing() :: dialyzer_timing:timing_server().
-type scc() :: [mfa_or_funlbl()].
-type mode() :: 'typesig' | 'dataflow' | 'compile' | 'warnings'.
--type compile_jobs() :: [file:filename()].
--type typesig_jobs() :: [scc()].
--type dataflow_jobs() :: [module()].
--type warnings_jobs() :: [module()].
+-type compile_job() :: file:filename().
+-type typesig_job() :: scc().
+-type dataflow_job() :: module().
+-type warnings_job() :: module().
+
+-type job() :: compile_job() | typesig_job() | dataflow_job() | warnings_job().
-type compile_init_data() :: dialyzer_analysis_callgraph:compile_init_data().
-type typesig_init_data() :: dialyzer_succ_typings:typesig_init_data().
@@ -73,9 +75,9 @@
-type result() :: compile_result() | typesig_result() |
dataflow_result() | warnings_result().
--type job() :: scc() | module() | file:filename().
--type job_result() :: dialyzer_analysis_callgraph:one_file_result() |
- typesig_result() | dataflow_result() | warnings_result().
+-type job_result() :: dialyzer_analysis_callgraph:one_file_mid_error() |
+ dialyzer_analysis_callgraph:one_file_result_ok() |
+ typesig_result() | dataflow_result() | warnings_result().
-record(state, {mode :: mode(),
active = 0 :: integer(),
@@ -90,13 +92,13 @@
%%--------------------------------------------------------------------
--spec parallel_job('compile', compile_jobs(), compile_init_data(), timing()) ->
+-spec parallel_job('compile', [compile_job()], compile_init_data(), timing()) ->
{compile_result(), integer()};
- ('typesig', typesig_jobs(), typesig_init_data(), timing()) ->
+ ('typesig', [typesig_job()], typesig_init_data(), timing()) ->
typesig_result();
- ('dataflow', dataflow_jobs(), dataflow_init_data(),
+ ('dataflow', [dataflow_job()], dataflow_init_data(),
timing()) -> dataflow_result();
- ('warnings', warnings_jobs(), warnings_init_data(),
+ ('warnings', [warnings_job()], warnings_init_data(),
timing()) -> warnings_result().
parallel_job(Mode, Jobs, InitData, Timing) ->
@@ -118,7 +120,7 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
case TypesigOrDataflow of
true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok;
- false -> request_activation(Regulator, Pid)
+ false -> ok
end,
Count + 1
end,
@@ -217,10 +219,6 @@ request_activation({_Collector, Regulator, _SCCtoPID}) ->
Regulator ! {req, self()},
wait_activation().
-request_activation(Regulator, Pid) ->
- Regulator ! {req, Pid},
- ok.
-
spawn_regulator() ->
InitTickets = dialyzer_utils:parallelism(),
spawn_link(fun() -> regulator_loop(InitTickets, queue:new()) end).
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index cabc5e9e0d..9399789464 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -43,7 +43,6 @@
-include("dialyzer.hrl").
-%%-import(helper, %% 'helper' could be any module doing sanity checks...
-import(erl_types,
[t_inf/2, t_inf/3, t_inf_lists/2, t_inf_lists/3,
t_inf_lists/3, t_is_equal/2, t_is_subtype/2, t_subtract/2,
@@ -72,7 +71,7 @@
t_tuple/0, t_tuple/1, t_tuple_args/1, t_tuple_args/2,
t_tuple_subtypes/2,
t_unit/0, t_unopaque/2,
- t_map/1
+ t_map/0, t_map/1, t_is_singleton/2
]).
%%-define(DEBUG, true).
@@ -99,25 +98,35 @@
-define(BITS, 128).
--record(state, {callgraph :: dialyzer_callgraph:callgraph(),
- codeserver :: dialyzer_codeserver:codeserver(),
- envs :: env_tab(),
- fun_tab :: fun_tab(),
- fun_homes :: dict:dict(label(), mfa()),
- plt :: dialyzer_plt:plt(),
- opaques :: [type()],
+%% Types with comment 'race' are due to dialyzer_races.erl.
+-record(state, {callgraph :: dialyzer_callgraph:callgraph()
+ | 'undefined', % race
+ codeserver :: dialyzer_codeserver:codeserver()
+ | 'undefined', % race
+ envs :: env_tab()
+ | 'undefined', % race
+ fun_tab :: fun_tab()
+ | 'undefined', % race
+ fun_homes :: dict:dict(label(), mfa())
+ | 'undefined', % race
+ plt :: dialyzer_plt:plt()
+ | 'undefined', % race
+ opaques :: [type()]
+ | 'undefined', % race
races = dialyzer_races:new() :: dialyzer_races:races(),
records = dict:new() :: types(),
- tree_map :: dict:dict(label(), cerl:cerl()),
+ tree_map :: dict:dict(label(), cerl:cerl())
+ | 'undefined', % race
warning_mode = false :: boolean(),
warnings = [] :: [raw_warning()],
- work :: {[_], [_], sets:set()},
+ work :: {[_], [_], sets:set()}
+ | 'undefined', % race
module :: module(),
curr_fun :: curr_fun()
}).
--record(map, {dict = dict:new() :: type_tab(),
- subst = dict:new() :: subst_tab(),
+-record(map, {map = maps:new() :: type_tab(),
+ subst = maps:new() :: subst_tab(),
modified = [] :: [Key :: term()],
modified_stack = [] :: [{[Key :: term()],reference()}],
ref = undefined :: reference() | undefined}).
@@ -125,10 +134,10 @@
-type env_tab() :: dict:dict(label(), #map{}).
-type fun_entry() :: {Args :: [type()], RetType :: type()}.
-type fun_tab() :: dict:dict('top' | label(),
- {'not_handled', fun_entry()} | fun_entry()).
+ {'not_handled', fun_entry()} | fun_entry()).
-type key() :: label() | cerl:cerl().
--type type_tab() :: dict:dict(key(), type()).
--type subst_tab() :: dict:dict(key(), cerl:cerl()).
+-type type_tab() :: #{key() => type()}.
+-type subst_tab() :: #{key() => cerl:cerl()}.
%% Exported Types
@@ -332,8 +341,6 @@ traverse(Tree, Map, State) ->
handle_tuple(Tree, Map, State);
map ->
handle_map(Tree, Map, State);
- map_pair ->
- handle_map_pair(Tree, Map, State);
values ->
Elements = cerl:values_es(Tree),
{State1, Map1, EsType} = traverse_list(Elements, Map, State),
@@ -423,17 +430,35 @@ handle_apply(Tree, Map, State) ->
handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State) ->
None = t_none(),
+ %% Call-site analysis may be inaccurate and consider more funs than those that
+ %% are actually possible. If all of them are incorrect, then warnings can be
+ %% emitted. If at least one fun is ok, however, then no warning is emitted,
+ %% just in case the bad ones are not really possible. The last argument is
+ %% used for this, with the following encoding:
+ %% Initial value: {none, []}
+ %% First fun checked: {one, <List of warns>}
+ %% More funs checked: {many, <List of warns>}
+ %% A '{one, []}' can only become '{many, []}'.
+ %% If at any point an fun does not add warnings, then the list is also
+ %% replaced with an empty list.
handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State,
- [None || _ <- ArgTypes], None, false).
+ [None || _ <- ArgTypes], None, false, {none, []}).
handle_apply_or_call([{local, external}|Left], Args, ArgTypes, Map, Tree, State,
- _AccArgTypes, _AccRet, _HadExternal) ->
+ _AccArgTypes, _AccRet, _HadExternal, Warns) ->
+ {HowMany, _} = Warns,
+ NewHowMany =
+ case HowMany of
+ none -> one;
+ _ -> many
+ end,
+ NewWarns = {NewHowMany, []},
handle_apply_or_call(Left, Args, ArgTypes, Map, Tree, State,
- ArgTypes, t_any(), true);
+ ArgTypes, t_any(), true, NewWarns);
handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
Args, ArgTypes, Map, Tree,
#state{opaques = Opaques} = State,
- AccArgTypes, AccRet, HadExternal) ->
+ AccArgTypes, AccRet, HadExternal, Warns) ->
Any = t_any(),
AnyArgs = [Any || _ <- Args],
GenSig = {AnyArgs, fun(_) -> t_any() end},
@@ -497,7 +522,7 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
?debug("RetWithoutLocal: ~s\n", [erl_types:t_to_string(RetWithoutLocal)]),
?debug("BifRet: ~s\n", [erl_types:t_to_string(BifRange(NewArgTypes))]),
?debug("SigRange: ~s\n", [erl_types:t_to_string(SigRange)]),
- ?debug("ContrRet: ~s\n", [erl_types:t_to_string(CRange(NewArgTypes))]),
+ ?debug("ContrRet: ~s\n", [erl_types:t_to_string(ContrRet)]),
?debug("LocalRet: ~s\n", [erl_types:t_to_string(LocalRet)]),
State1 =
@@ -579,16 +604,32 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
end,
NewAccRet = t_sup(AccRet, TotalRet),
?debug("NewAccRet: ~s\n", [t_to_string(NewAccRet)]),
+ {NewWarnings, State4} = state__remove_added_warnings(State, State3),
+ {HowMany, OldWarnings} = Warns,
+ NewWarns =
+ case HowMany of
+ none -> {one, NewWarnings};
+ _ ->
+ case OldWarnings =:= [] of
+ true -> {many, []};
+ false ->
+ case NewWarnings =:= [] of
+ true -> {many, []};
+ false -> {many, NewWarnings ++ OldWarnings}
+ end
+ end
+ end,
handle_apply_or_call(Left, Args, ArgTypes, Map, Tree,
- State3, NewAccArgTypes, NewAccRet, HadExternal);
+ State4, NewAccArgTypes, NewAccRet, HadExternal, NewWarns);
handle_apply_or_call([], Args, _ArgTypes, Map, _Tree, State,
- AccArgTypes, AccRet, HadExternal) ->
+ AccArgTypes, AccRet, HadExternal, {_, Warnings}) ->
+ State1 = state__add_warnings(Warnings, State),
case HadExternal of
false ->
NewMap = enter_type_lists(Args, AccArgTypes, Map),
- {State, NewMap, AccRet};
+ {State1, NewMap, AccRet};
true ->
- {had_external, State}
+ {had_external, State1}
end.
apply_fail_reason(FailedSig, FailedBif, FailedContract) ->
@@ -1092,15 +1133,54 @@ handle_try(Tree, Map, State) ->
%%----------------------------------------
handle_map(Tree,Map,State) ->
- Pairs = cerl:map_es(Tree),
- {State1, Map1, TypePairs} = traverse_list(Pairs,Map,State),
- {State1, Map1, t_map(TypePairs)}.
+ Pairs = cerl:map_es(Tree),
+ Arg = cerl:map_arg(Tree),
+ {State1, Map1, ArgType} = traverse(Arg, Map, State),
+ ArgType1 = t_inf(t_map(), ArgType),
+ case t_is_none_or_unit(ArgType1) of
+ true ->
+ {State1, Map1, ArgType1};
+ false ->
+ {State2, Map2, TypePairs, ExactKeys} =
+ traverse_map_pairs(Pairs, Map1, State1, t_none(), [], []),
+ InsertPair = fun({KV,assoc,_},Acc) -> erl_types:t_map_put(KV,Acc);
+ ({KV,exact,KVTree},Acc) ->
+ case t_is_none(T=erl_types:t_map_update(KV,Acc)) of
+ true -> throw({none, Acc, KV, KVTree});
+ false -> T
+ end
+ end,
+ try lists:foldl(InsertPair, ArgType1, TypePairs)
+ of ResT ->
+ BindT = t_map([{K, t_any()} || K <- ExactKeys]),
+ case bind_pat_vars_reverse([Arg], [BindT], [], Map2, State2) of
+ {error, _, _, _, _} -> {State2, Map2, ResT};
+ {Map3, _} -> {State2, Map3, ResT}
+ end
+ catch {none, MapType, {K,_}, KVTree} ->
+ Msg2 = {map_update, [format_type(MapType, State2),
+ format_type(K, State2)]},
+ {state__add_warning(State2, ?WARN_MAP_CONSTRUCTION, KVTree, Msg2),
+ Map2, t_none()}
+ end
+ end.
-handle_map_pair(Tree,Map,State) ->
- Key = cerl:map_pair_key(Tree),
- Val = cerl:map_pair_val(Tree),
+traverse_map_pairs([], Map, State, _ShadowKeys, PairAcc, KeyAcc) ->
+ {State, Map, lists:reverse(PairAcc), KeyAcc};
+traverse_map_pairs([Pair|Pairs], Map, State, ShadowKeys, PairAcc, KeyAcc) ->
+ Key = cerl:map_pair_key(Pair),
+ Val = cerl:map_pair_val(Pair),
+ Op = cerl:map_pair_op(Pair),
{State1, Map1, [K,V]} = traverse_list([Key,Val],Map,State),
- {State1, Map1, {K,V}}.
+ KeyAcc1 =
+ case cerl:is_literal(Op) andalso cerl:concrete(Op) =:= exact andalso
+ t_is_singleton(K, State#state.opaques) andalso
+ t_is_none(t_inf(ShadowKeys, K)) of
+ true -> [K|KeyAcc];
+ false -> KeyAcc
+ end,
+ traverse_map_pairs(Pairs, Map1, State1, t_sup(K, ShadowKeys),
+ [{{K,V},cerl:concrete(Op),Pair}|PairAcc], KeyAcc1).
%%----------------------------------------
@@ -1435,7 +1515,9 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
{NewMap, TypeOut} =
case cerl:type(Pat) of
alias ->
- AliasPat = cerl:alias_pat(Pat),
+ %% Map patterns are more allowing than the type of their literal. We
+ %% must unfold AliasPat if it is a literal.
+ AliasPat = dialyzer_utils:refold_pattern(cerl:alias_pat(Pat)),
Var = cerl:alias_var(Pat),
Map1 = enter_subst(Var, AliasPat, Map),
{Map2, [PatType]} = bind_pat_vars([AliasPat], [Type], [],
@@ -1476,14 +1558,59 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
{Map1, t_cons(HdType, TlType)}
end;
literal ->
- Literal = literal_type(Pat),
- case t_is_none(t_inf(Literal, Type, Opaques)) of
+ Pat0 = dialyzer_utils:refold_pattern(Pat),
+ case cerl:is_literal(Pat0) of
true ->
- bind_opaque_pats(Literal, Type, Pat, State);
- false -> {Map, Literal}
+ Literal = literal_type(Pat),
+ case t_is_none(t_inf(Literal, Type, Opaques)) of
+ true ->
+ bind_opaque_pats(Literal, Type, Pat, State);
+ false -> {Map, Literal}
+ end;
+ false ->
+ %% Retry with the unfolded pattern
+ {Map1, [PatType]}
+ = bind_pat_vars([Pat0], [Type], [], Map, State, Rev),
+ {Map1, PatType}
end;
map ->
- {Map, t_map([])};
+ MapT = t_inf(Type, t_map(), Opaques),
+ case t_is_none(MapT) of
+ true ->
+ bind_opaque_pats(t_map(), Type, Pat, State);
+ false ->
+ case Rev of
+ %% TODO: Reverse matching (propagating a matched subset back to a value)
+ true -> {Map, MapT};
+ false ->
+ FoldFun =
+ fun(Pair, {MapAcc, ListAcc}) ->
+ %% Only exact (:=) can appear in patterns
+ exact = cerl:concrete(cerl:map_pair_op(Pair)),
+ Key = cerl:map_pair_key(Pair),
+ KeyType =
+ case cerl:type(Key) of
+ var ->
+ case state__lookup_type_for_letrec(Key, State) of
+ error -> lookup_type(Key, MapAcc);
+ {ok, RecType} -> RecType
+ end;
+ literal ->
+ literal_type(Key)
+ end,
+ Bind = erl_types:t_map_get(KeyType, MapT),
+ {MapAcc1, [ValType]} =
+ bind_pat_vars([cerl:map_pair_val(Pair)],
+ [Bind], [], MapAcc, State, Rev),
+ case t_is_singleton(KeyType, Opaques) of
+ true -> {MapAcc1, [{KeyType, ValType}|ListAcc]};
+ false -> {MapAcc1, ListAcc}
+ end
+ end,
+ {Map1, Pairs} = lists:foldl(FoldFun, {Map, []}, cerl:map_es(Pat)),
+ {Map1, t_inf(MapT, t_map(Pairs))}
+ end
+ end;
tuple ->
Es = cerl:tuple_es(Pat),
{TypedRecord, Prototype} =
@@ -1672,7 +1799,7 @@ bind_opaque_pats(GenType, Type, Pat, State) ->
%%
bind_guard(Guard, Map, State) ->
- try bind_guard(Guard, Map, dict:new(), pos, State) of
+ try bind_guard(Guard, Map, maps:new(), pos, State) of
{Map1, _Type} -> Map1
catch
throw:{fail, Warning} -> {error, Warning};
@@ -1700,20 +1827,63 @@ bind_guard(Guard, Map, Env, Eval, State) ->
'try' ->
Arg = cerl:try_arg(Guard),
[Var] = cerl:try_vars(Guard),
+ EVars = cerl:try_evars(Guard),
%%?debug("Storing: ~w\n", [Var]),
- NewEnv = dict:store(get_label(Var), Arg, Env),
- bind_guard(cerl:try_body(Guard), Map, NewEnv, Eval, State);
+ Map1 = join_maps_begin(Map),
+ Map2 = mark_as_fresh(EVars, Map1),
+ %% Visit handler first so we know if it should be ignored
+ {{HandlerMap, HandlerType}, HandlerE} =
+ try {bind_guard(cerl:try_handler(Guard), Map2, Env, Eval, State), none}
+ catch throw:HE ->
+ {{Map2, t_none()}, HE}
+ end,
+ BodyEnv = maps:put(get_label(Var), Arg, Env),
+ Wanted = case Eval of pos -> t_atom(true); neg -> t_atom(false);
+ dont_know -> t_any() end,
+ case t_is_none(t_inf(HandlerType, Wanted)) of
+ %% Handler won't save us; pretend it does not exist
+ true -> bind_guard(cerl:try_body(Guard), Map, BodyEnv, Eval, State);
+ false ->
+ {{BodyMap, BodyType}, BodyE} =
+ try {bind_guard(cerl:try_body(Guard), Map1, BodyEnv,
+ Eval, State), none}
+ catch throw:BE ->
+ {{Map1, t_none()}, BE}
+ end,
+ Map3 = join_maps_end([BodyMap, HandlerMap], Map1),
+ case t_is_none(Sup = t_sup(BodyType, HandlerType)) of
+ true ->
+ %% Pick a reason. N.B. We assume that the handler is always
+ %% compiler-generated if the body is; that way, we won't need to
+ %% check.
+ Fatality = case {BodyE, HandlerE} of
+ {{fatal_fail, _}, _} -> fatal_fail;
+ {_, {fatal_fail, _}} -> fatal_fail;
+ _ -> fail
+ end,
+ throw({Fatality,
+ case {BodyE, HandlerE} of
+ {{_, Rsn}, _} when Rsn =/= none -> Rsn;
+ {_, {_,Rsn}} -> Rsn;
+ _ -> none
+ end});
+ false -> {Map3, Sup}
+ end
+ end;
tuple ->
Es0 = cerl:tuple_es(Guard),
{Map1, Es} = bind_guard_list(Es0, Map, Env, dont_know, State),
{Map1, t_tuple(Es)};
map ->
- {Map, t_map([])};
+ case Eval of
+ dont_know -> handle_guard_map(Guard, Map, Env, State);
+ _PosOrNeg -> {Map, t_none()} %% Map exprs do not produce bools
+ end;
'let' ->
Arg = cerl:let_arg(Guard),
[Var] = cerl:let_vars(Guard),
%%?debug("Storing: ~w\n", [Var]),
- NewEnv = dict:store(get_label(Var), Arg, Env),
+ NewEnv = maps:put(get_label(Var), Arg, Env),
bind_guard(cerl:let_body(Guard), Map, NewEnv, Eval, State);
values ->
Es = cerl:values_es(Guard),
@@ -1722,7 +1892,7 @@ bind_guard(Guard, Map, Env, Eval, State) ->
{Map, Type};
var ->
?debug("Looking for var(~w)...", [cerl_trees:get_label(Guard)]),
- case dict:find(get_label(Guard), Env) of
+ case maps:find(get_label(Guard), Env) of
error ->
?debug("Did not find it\n", []),
Type = lookup_type(Guard, Map),
@@ -1751,7 +1921,7 @@ handle_guard_call(Guard, Map, Env, Eval, State) ->
{erlang, F, 1} when F =:= is_atom; F =:= is_boolean;
F =:= is_binary; F =:= is_bitstring;
F =:= is_float; F =:= is_function;
- F =:= is_integer; F =:= is_list;
+ F =:= is_integer; F =:= is_list; F =:= is_map;
F =:= is_number; F =:= is_pid; F =:= is_port;
F =:= is_reference; F =:= is_tuple ->
handle_guard_type_test(Guard, F, Map, Env, Eval, State);
@@ -1831,6 +2001,7 @@ bind_type_test(Eval, TypeTest, ArgType, State) ->
is_function -> t_fun();
is_integer -> t_integer();
is_list -> t_maybe_improper_list();
+ is_map -> t_map();
is_number -> t_number();
is_pid -> t_pid();
is_port -> t_port();
@@ -2339,6 +2510,30 @@ bind_guard_list([G|Gs], Map, Env, Eval, State, Acc) ->
bind_guard_list([], Map, _Env, _Eval, _State, Acc) ->
{Map, lists:reverse(Acc)}.
+handle_guard_map(Guard, Map, Env, State) ->
+ Pairs = cerl:map_es(Guard),
+ Arg = cerl:map_arg(Guard),
+ {Map1, ArgType0} = bind_guard(Arg, Map, Env, dont_know, State),
+ ArgType1 = t_inf(t_map(), ArgType0),
+ case t_is_none_or_unit(ArgType1) of
+ true -> {Map1, t_none()};
+ false ->
+ {Map2, TypePairs} = bind_guard_map_pairs(Pairs, Map1, Env, State, []),
+ {Map2, lists:foldl(fun({KV,assoc},Acc) -> erl_types:t_map_put(KV,Acc);
+ ({KV,exact},Acc) -> erl_types:t_map_update(KV,Acc)
+ end, ArgType1, TypePairs)}
+ end.
+
+bind_guard_map_pairs([], Map, _Env, _State, PairAcc) ->
+ {Map, lists:reverse(PairAcc)};
+bind_guard_map_pairs([Pair|Pairs], Map, Env, State, PairAcc) ->
+ Key = cerl:map_pair_key(Pair),
+ Val = cerl:map_pair_val(Pair),
+ Op = cerl:map_pair_op(Pair),
+ {Map1, [K,V]} = bind_guard_list([Key,Val],Map,Env,dont_know,State),
+ bind_guard_map_pairs(Pairs, Map1, Env, State,
+ [{{K,V},cerl:concrete(Op)}|PairAcc]).
+
-type eval() :: 'pos' | 'neg' | 'dont_know'.
-spec signal_guard_fail(eval(), cerl:c_call(), [type()],
@@ -2411,7 +2606,9 @@ filter_fail_clauses([Clause|Left]) ->
case (cerl:clause_pats(Clause) =:= []) of
true ->
Body = cerl:clause_body(Clause),
- case cerl:is_literal(Body) andalso (cerl:concrete(Body) =:= fail) of
+ case cerl:is_literal(Body) andalso (cerl:concrete(Body) =:= fail) orelse
+ cerl:is_c_primop(Body) andalso
+ (cerl:atom_val(cerl:primop_name(Body)) =:= match_fail) of
true -> filter_fail_clauses(Left);
false -> [Clause|filter_fail_clauses(Left)]
end;
@@ -2525,10 +2722,10 @@ join_maps_end(Maps, MapOut) ->
#map{ref = Ref, modified_stack = [{M1,R1} | S]} = MapOut,
true = lists:all(fun(M) -> M#map.ref =:= Ref end, Maps), % sanity
Keys0 = lists:usort(lists:append([M#map.modified || M <- Maps])),
- #map{dict = Dict, subst = Subst} = MapOut,
+ #map{map = Map, subst = Subst} = MapOut,
Keys = [Key ||
Key <- Keys0,
- dict:is_key(Key, Dict) orelse dict:is_key(Key, Subst)],
+ maps:is_key(Key, Map) orelse maps:is_key(Key, Subst)],
Out = case Maps of
[] -> join_maps(Maps, MapOut);
_ -> join_maps(Keys, Maps, MapOut)
@@ -2539,8 +2736,8 @@ join_maps_end(Maps, MapOut) ->
modified_stack = S}.
join_maps(Maps, MapOut) ->
- #map{dict = Dict, subst = Subst} = MapOut,
- Keys = ordsets:from_list(dict:fetch_keys(Dict) ++ dict:fetch_keys(Subst)),
+ #map{map = Map, subst = Subst} = MapOut,
+ Keys = ordsets:from_list(maps:keys(Map) ++ maps:keys(Subst)),
join_maps(Keys, Maps, MapOut).
join_maps(Keys, Maps, MapOut) ->
@@ -2569,11 +2766,11 @@ join_maps_one_key([], _Key, AccType) ->
-ifdef(DEBUG).
debug_join_check(Maps, MapOut, Out) ->
- #map{dict = Dict, subst = Subst} = Out,
- #map{dict = Dict2, subst = Subst2} = join_maps(Maps, MapOut),
- F = fun(D) -> lists:keysort(1, dict:to_list(D)) end,
+ #map{map = Map, subst = Subst} = Out,
+ #map{map = Map2, subst = Subst2} = join_maps(Maps, MapOut),
+ F = fun(D) -> lists:keysort(1, maps:to_list(D)) end,
[throw({bug, join_maps}) ||
- F(Dict) =/= F(Dict2) orelse F(Subst) =/= F(Subst2)].
+ F(Map) =/= F(Map2) orelse F(Subst) =/= F(Subst2)].
-else.
debug_join_check(_Maps, _MapOut, _Out) -> ok.
-endif.
@@ -2604,15 +2801,15 @@ enter_type(Key, Val, MS) ->
enter_type_lists(Keys, t_to_tlist(Val), MS)
end;
false ->
- #map{dict = Dict, subst = Subst} = MS,
+ #map{map = Map, subst = Subst} = MS,
KeyLabel = get_label(Key),
- case dict:find(KeyLabel, Subst) of
+ case maps:find(KeyLabel, Subst) of
{ok, NewKey} ->
?debug("Binding ~p to ~p\n", [KeyLabel, NewKey]),
enter_type(NewKey, Val, MS);
error ->
?debug("Entering ~p :: ~s\n", [KeyLabel, t_to_string(Val)]),
- case dict:find(KeyLabel, Dict) of
+ case maps:find(KeyLabel, Map) of
{ok, Value} ->
case erl_types:t_is_equal(Val, Value) of
true -> MS;
@@ -2624,13 +2821,14 @@ enter_type(Key, Val, MS) ->
end
end.
-store_map(Key, Val, #map{dict = Dict, ref = undefined} = Map) ->
- Map#map{dict = dict:store(Key, Val, Dict)};
-store_map(Key, Val, #map{dict = Dict, modified = Mod} = Map) ->
- Map#map{dict = dict:store(Key, Val, Dict), modified = [Key | Mod]}.
+store_map(Key, Val, #map{map = Map, ref = undefined} = MapRec) ->
+ MapRec#map{map = maps:put(Key, Val, Map)};
+store_map(Key, Val, #map{map = Map, modified = Mod} = MapRec) ->
+ MapRec#map{map = maps:put(Key, Val, Map), modified = [Key | Mod]}.
-enter_subst(Key, Val, #map{subst = Subst} = MS) ->
+enter_subst(Key, Val0, #map{subst = Subst} = MS) ->
KeyLabel = get_label(Key),
+ Val = dialyzer_utils:refold_pattern(Val0),
case cerl:is_literal(Val) of
true ->
store_map(KeyLabel, literal_type(Val), MS);
@@ -2639,7 +2837,7 @@ enter_subst(Key, Val, #map{subst = Subst} = MS) ->
false -> MS;
true ->
ValLabel = get_label(Val),
- case dict:find(ValLabel, Subst) of
+ case maps:find(ValLabel, Subst) of
{ok, NewVal} ->
enter_subst(Key, NewVal, MS);
error ->
@@ -2653,22 +2851,22 @@ enter_subst(Key, Val, #map{subst = Subst} = MS) ->
end.
store_subst(Key, Val, #map{subst = S, ref = undefined} = Map) ->
- Map#map{subst = dict:store(Key, Val, S)};
+ Map#map{subst = maps:put(Key, Val, S)};
store_subst(Key, Val, #map{subst = S, modified = Mod} = Map) ->
- Map#map{subst = dict:store(Key, Val, S), modified = [Key | Mod]}.
+ Map#map{subst = maps:put(Key, Val, S), modified = [Key | Mod]}.
-lookup_type(Key, #map{dict = Dict, subst = Subst}) ->
- lookup(Key, Dict, Subst, t_none()).
+lookup_type(Key, #map{map = Map, subst = Subst}) ->
+ lookup(Key, Map, Subst, t_none()).
-lookup(Key, Dict, Subst, AnyNone) ->
+lookup(Key, Map, Subst, AnyNone) ->
case cerl:is_literal(Key) of
true -> literal_type(Key);
false ->
Label = get_label(Key),
- case dict:find(Label, Subst) of
- {ok, NewKey} -> lookup(NewKey, Dict, Subst, AnyNone);
+ case maps:find(Label, Subst) of
+ {ok, NewKey} -> lookup(NewKey, Map, Subst, AnyNone);
error ->
- case dict:find(Label, Dict) of
+ case maps:find(Label, Map) of
{ok, Val} -> Val;
error -> AnyNone
end
@@ -2693,6 +2891,9 @@ mark_as_fresh([Tree|Left], Map) ->
bitstr ->
%% The Size field is not fresh.
{SubTrees1 -- [cerl:bitstr_size(Tree)], Map};
+ map_pair ->
+ %% The keys are not fresh
+ {SubTrees1 -- [cerl:map_pair_key(Tree)], Map};
var ->
{SubTrees1, enter_type(Tree, t_any(), Map)};
_ ->
@@ -2703,12 +2904,12 @@ mark_as_fresh([], Map) ->
Map.
-ifdef(DEBUG).
-debug_pp_map(#map{dict = Dict}=Map) ->
- Keys = dict:fetch_keys(Dict),
+debug_pp_map(#map{map = Map}=MapRec) ->
+ Keys = maps:keys(Map),
io:format("Map:\n", []),
lists:foreach(fun (Key) ->
io:format("\t~w :: ~s\n",
- [Key, t_to_string(lookup_type(Key, Map))])
+ [Key, t_to_string(lookup_type(Key, MapRec))])
end, Keys),
ok.
-else.
@@ -2753,11 +2954,15 @@ is_call_to_send(Tree) ->
Arity = cerl:call_arity(Tree),
cerl:is_c_atom(Mod)
andalso cerl:is_c_atom(Name)
- andalso (cerl:atom_val(Name) =:= '!')
+ andalso is_send(cerl:atom_val(Name))
andalso (cerl:atom_val(Mod) =:= erlang)
andalso (Arity =:= 2)
end.
+is_send('!') -> true;
+is_send(send) -> true;
+is_send(_) -> false.
+
is_lc_simple_list(Tree, TreeType, State) ->
Opaques = State#state.opaques,
Ann = cerl:get_ann(Tree),
@@ -2866,11 +3071,22 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
false ->
WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
- ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
+ case Tag of
+ ?WARN_CONTRACT_RANGE -> ok;
+ _ -> ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)])
+ end,
State#state{warnings = [Warn|Warnings]}
end
end.
+state__remove_added_warnings(OldState, NewState) ->
+ #state{warnings = OldWarnings} = OldState,
+ #state{warnings = NewWarnings} = NewState,
+ {NewWarnings -- OldWarnings, NewState#state{warnings = OldWarnings}}.
+
+state__add_warnings(Warns, #state{warnings = Warnings} = State) ->
+ State#state{warnings = Warns ++ Warnings}.
+
-spec state__set_curr_fun(curr_fun(), state()) -> state().
state__set_curr_fun(undefined, State) ->
diff --git a/lib/dialyzer/src/dialyzer_dep.erl b/lib/dialyzer/src/dialyzer_dep.erl
index a7bc074d02..273c05c54c 100644
--- a/lib/dialyzer/src/dialyzer_dep.erl
+++ b/lib/dialyzer/src/dialyzer_dep.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -59,8 +59,14 @@
%% separately.
%%
--spec analyze(cerl:c_module()) ->
- {dict:dict(), ordsets:ordset('external' | label()), dict:dict(), dict:dict()}.
+-type dep_ordset() :: ordsets:ordset(label() | 'external').
+
+-type deps() :: dict:dict(label() | 'external' | 'top', dep_ordset()).
+-type esc() :: dep_ordset().
+-type calls() :: dict:dict(label(), ordsets:ordset(label())).
+-type letrecs() :: dict:dict(label(), label()).
+
+-spec analyze(cerl:c_module()) -> {deps(), esc(), calls(), letrecs()}.
analyze(Tree) ->
%% io:format("Handling ~w\n", [cerl:atom_val(cerl:module_name(Tree))]),
@@ -79,22 +85,26 @@ traverse(Tree, Out, State, CurrentFun) ->
apply ->
Op = cerl:apply_op(Tree),
Args = cerl:apply_args(Tree),
- %% Op is always a variable and should not be marked as escaping
- %% based on its use.
case var =:= cerl:type(Op) of
- false -> erlang:error({apply_op_not_a_variable, cerl:type(Op)});
- true -> ok
- end,
- OpFuns = case map__lookup(cerl_trees:get_label(Op), Out) of
- none -> output(none);
- {value, OF} -> OF
- end,
- {ArgFuns, State2} = traverse_list(Args, Out, State, CurrentFun),
- State3 = state__add_esc(merge_outs(ArgFuns), State2),
- State4 = state__add_deps(CurrentFun, OpFuns, State3),
- State5 = state__store_callsite(cerl_trees:get_label(Tree),
- OpFuns, length(Args), State4),
- {output(set__singleton(external)), State5};
+ false ->
+ %% We have discovered an error here, but we ignore it and let
+ %% later passes handle it; we do not modify the dependencies.
+ %% erlang:error({apply_op_not_a_variable, cerl:type(Op)});
+ {output(none), State};
+ true ->
+ %% Op is a variable and should not be marked as escaping
+ %% based on its use.
+ OpFuns = case map__lookup(cerl_trees:get_label(Op), Out) of
+ none -> output(none);
+ {value, OF} -> OF
+ end,
+ {ArgFuns, State2} = traverse_list(Args, Out, State, CurrentFun),
+ State3 = state__add_esc(merge_outs(ArgFuns), State2),
+ State4 = state__add_deps(CurrentFun, OpFuns, State3),
+ State5 = state__store_callsite(cerl_trees:get_label(Tree),
+ OpFuns, length(Args), State4),
+ {output(set__singleton(external)), State5}
+ end;
binary ->
{output(none), State};
'case' ->
@@ -481,11 +491,11 @@ all_vars(Tree, AccIn) ->
-type local_set() :: 'none' | #set{}.
--record(state, {deps :: dict:dict(),
+-record(state, {deps :: deps(),
esc :: local_set(),
- call :: dict:dict(),
- arities :: dict:dict(),
- letrecs :: dict:dict()}).
+ calls :: calls(),
+ arities :: dict:dict(label() | 'top', arity()),
+ letrecs :: letrecs()}).
state__new(Tree) ->
Exports = set__from_list([X || X <- cerl:module_exports(Tree)]),
@@ -503,7 +513,7 @@ state__new(Tree) ->
%% init the escaping function labels to exported + called from on_load
InitEsc = set__from_list(OnLoadLs ++ ExpLs),
Arities = cerl_trees:fold(fun find_arities/2, dict:new(), Tree),
- #state{deps = map__new(), esc = InitEsc, call = map__new(),
+ #state{deps = map__new(), esc = InitEsc, calls = map__new(),
arities = Arities, letrecs = map__new()}.
find_arities(Tree, AccMap) ->
@@ -518,7 +528,7 @@ find_arities(Tree, AccMap) ->
state__add_deps(_From, #output{content = none}, State) ->
State;
-state__add_deps(From, #output{type = single, content=To},
+state__add_deps(From, #output{type = single, content = To},
#state{deps = Map} = State) ->
%% io:format("Adding deps from ~w to ~w\n", [From, set__to_ordsets(To)]),
State#state{deps = map__add(From, To, Map)}.
@@ -544,16 +554,16 @@ state__esc(#state{esc = Esc}) ->
state__store_callsite(_From, #output{content = none}, _CallArity, State) ->
State;
state__store_callsite(From, To, CallArity,
- #state{call = Calls, arities = Arities} = State) ->
+ #state{calls = Calls, arities = Arities} = State) ->
Filter = fun(external) -> true;
(Fun) -> CallArity =:= dict:fetch(Fun, Arities)
end,
case filter_outs(To, Filter) of
#output{content = none} -> State;
- To1 -> State#state{call = map__store(From, To1, Calls)}
+ To1 -> State#state{calls = map__store(From, To1, Calls)}
end.
-state__calls(#state{call = Calls}) ->
+state__calls(#state{calls = Calls}) ->
Calls.
%%------------------------------------------------------------
diff --git a/lib/dialyzer/src/dialyzer_explanation.erl b/lib/dialyzer/src/dialyzer_explanation.erl
index 20fbcfed35..968f8df78e 100644
--- a/lib/dialyzer/src/dialyzer_explanation.erl
+++ b/lib/dialyzer/src/dialyzer_explanation.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2009. All Rights Reserved.
+%% Copyright Ericsson AB 2009-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index ff54a91ce1..30d2bdeca4 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -2,7 +2,7 @@
%%------------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2009-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2009-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -52,7 +52,6 @@
add_dir :: wx:wx_object(),
add_rec :: wx:wx_object(),
chosen_box :: wx:wx_object(),
- analysis_pid :: pid(),
del_file :: wx:wx_object(),
doc_plt :: dialyzer_plt:plt(),
clear_chosen :: wx:wx_object(),
@@ -72,11 +71,11 @@
stop :: wx:wx_object(),
frame :: wx:wx_object(),
warnings_box :: wx:wx_object(),
- explanation_box :: wx:wx_object(),
+ explanation_box :: wx:wx_object() | 'undefined',
wantedWarnings :: list(),
rawWarnings :: list(),
- backend_pid :: pid(),
- expl_pid :: pid()}).
+ backend_pid :: pid() | 'undefined',
+ expl_pid :: pid() | 'undefined'}).
%%------------------------------------------------------------------------
@@ -423,7 +422,7 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
#wx{id=?menuID_HELP_ABOUT, obj=Frame,
event=#wxCommand{type=command_menu_selected}} ->
Message = " This is DIALYZER version " ++ ?VSN ++ " \n"++
- "DIALYZER is a DIscrepany AnaLYZer for ERlang programs.\n\n"++
+ "DIALYZER is a DIscrepancy AnaLYZer for ERlang programs.\n\n"++
" Copyright (C) Tobias Lindahl <[email protected]>\n"++
" Kostis Sagonas <[email protected]>\n\n",
output_sms(State, "About Dialyzer", Message, info),
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index dd81dd01ed..add660eae9 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -47,6 +47,7 @@ build(Opts) ->
?WARN_CALLGRAPH,
?WARN_FAILING_CALL,
?WARN_BIN_CONSTRUCTION,
+ ?WARN_MAP_CONSTRUCTION,
?WARN_CONTRACT_RANGE,
?WARN_CONTRACT_TYPES,
?WARN_CONTRACT_SYNTAX,
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 769f26a3df..cf2f0e919e 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -2,7 +2,7 @@
%%----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/src/dialyzer_race_data_server.erl b/lib/dialyzer/src/dialyzer_race_data_server.erl
new file mode 100644
index 0000000000..3600491809
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_race_data_server.erl
@@ -0,0 +1,134 @@
+%% -*- erlang-indent-level: 2 -*-
+%%-----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_race_data_server.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 18 Sep 2015 by Luca Favatella <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_race_data_server).
+
+-export([new/0,
+ duplicate/1,
+ stop/1,
+ call/2,
+ cast/2]).
+
+-include("dialyzer.hrl").
+
+%%----------------------------------------------------------------------
+
+-record(state, {race_code = dict:new() :: dict:dict(),
+ public_tables = [] :: [label()],
+ named_tables = [] :: [string()],
+ beh_api_calls = [] :: [{mfa(), mfa()}]}).
+
+%%----------------------------------------------------------------------
+
+-spec new() -> pid().
+
+new() ->
+ spawn_link(fun() -> loop(#state{}) end).
+
+-spec duplicate(pid()) -> pid().
+
+duplicate(Server) ->
+ call(dup, Server).
+
+-spec stop(pid()) -> ok.
+
+stop(Server) ->
+ cast(stop, Server).
+
+-spec call(atom(), pid()) -> term().
+
+call(Query, Server) ->
+ Ref = make_ref(),
+ Server ! {call, self(), Ref, Query},
+ receive
+ {Ref, Reply} -> Reply
+ end.
+
+-spec cast(atom() | {atom(), term()}, pid()) -> ok.
+
+cast(Message, Server) ->
+ Server ! {cast, Message},
+ ok.
+
+%%----------------------------------------------------------------------
+
+loop(State) ->
+ receive
+ {call, From, Ref, Query} ->
+ Reply = handle_call(Query, State),
+ From ! {Ref, Reply},
+ loop(State);
+ {cast, stop} ->
+ ok;
+ {cast, Message} ->
+ NewState = handle_cast(Message, State),
+ loop(NewState)
+ end.
+
+handle_cast(race_code_new, State) ->
+ State#state{race_code = dict:new()};
+handle_cast({Tag, Data}, State) ->
+ case Tag of
+ renew_race_info -> renew_race_info_handler(Data, State);
+ renew_race_code -> renew_race_code_handler(Data, State);
+ renew_race_public_tables -> renew_race_public_tables_handler(Data, State);
+ put_race_code -> State#state{race_code = Data};
+ put_public_tables -> State#state{public_tables = Data};
+ put_named_tables -> State#state{named_tables = Data};
+ put_behaviour_api_calls -> State#state{beh_api_calls = Data}
+ end.
+
+handle_call(Query,
+ #state{race_code = RaceCode,
+ public_tables = PublicTables,
+ named_tables = NamedTables,
+ beh_api_calls = BehApiCalls}
+ = State) ->
+ case Query of
+ dup -> spawn_link(fun() -> loop(State) end);
+ get_race_code -> RaceCode;
+ get_public_tables -> PublicTables;
+ get_named_tables -> NamedTables;
+ get_behaviour_api_calls -> BehApiCalls
+ end.
+
+%%----------------------------------------------------------------------
+
+renew_race_info_handler({RaceCode, PublicTables, NamedTables},
+ #state{} = State) ->
+ State#state{race_code = RaceCode,
+ public_tables = PublicTables,
+ named_tables = NamedTables}.
+
+renew_race_code_handler({Fun, FunArgs, Code},
+ #state{race_code = RaceCode} = State) ->
+ State#state{race_code = dict:store(Fun, [FunArgs, Code], RaceCode)}.
+
+renew_race_public_tables_handler(VarLabel,
+ #state{public_tables = PT} = State) ->
+ State#state{public_tables = ordsets:add_element(VarLabel, PT)}.
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 39de071bde..bb43d1dcb8 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -92,27 +92,28 @@
-type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER
| ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE.
--record(beg_clause, {arg :: var_to_map1(),
- pats :: var_to_map1(),
- guard :: cerl:cerl()}).
--record(end_clause, {arg :: var_to_map1(),
- pats :: var_to_map1(),
- guard :: cerl:cerl()}).
+-record(beg_clause, {arg :: var_to_map1() | 'undefined',
+ pats :: var_to_map1() | 'undefined',
+ guard :: cerl:cerl() | 'undefined'}).
+-record(end_clause, {arg :: var_to_map1() | 'undefined',
+ pats :: var_to_map1() | 'undefined',
+ guard :: cerl:cerl() | 'undefined'}).
-record(end_case, {clauses :: [#end_clause{}]}).
--record(curr_fun, {status :: 'in' | 'out',
- mfa :: dialyzer_callgraph:mfa_or_funlbl(),
- label :: label(),
- def_vars :: [core_vars()],
- arg_types :: [erl_types:erl_type()],
- call_vars :: [core_vars()],
- var_map :: dict:dict()}).
+-record(curr_fun, {status :: 'in' | 'out' | 'undefined',
+ mfa :: dialyzer_callgraph:mfa_or_funlbl()
+ | 'undefined',
+ label :: label() | 'undefined',
+ def_vars :: [core_vars()] | 'undefined',
+ arg_types :: [erl_types:erl_type()] | 'undefined',
+ call_vars :: [core_vars()] | 'undefined',
+ var_map :: dict:dict() | 'undefined'}).
-record(dep_call, {call_name :: dep_calls(),
- args :: args(),
+ args :: args() | 'undefined',
arg_types :: [erl_types:erl_type()],
vars :: [core_vars()],
state :: dialyzer_dataflow:state(),
file_line :: file_line(),
- var_map :: dict:dict()}).
+ var_map :: dict:dict() | 'undefined'}).
-record(fun_call, {caller :: dialyzer_callgraph:mfa_or_funlbl(),
callee :: dialyzer_callgraph:mfa_or_funlbl(),
arg_types :: [erl_types:erl_type()],
@@ -121,7 +122,7 @@
arg :: var_to_map1()}).
-record(warn_call, {call_name :: warn_calls(),
args :: args(),
- var_map :: dict:dict()}).
+ var_map :: dict:dict() | 'undefined'}).
-type case_tags() :: 'beg_case' | #beg_clause{} | #end_clause{} | #end_case{}.
-type code() :: [#dep_call{} | #fun_call{} | #warn_call{} |
@@ -139,8 +140,9 @@
fun_mfa :: dialyzer_callgraph:mfa_or_funlbl(),
fun_label :: label()}).
--record(races, {curr_fun :: dialyzer_callgraph:mfa_or_funlbl(),
- curr_fun_label :: label(),
+-record(races, {curr_fun :: dialyzer_callgraph:mfa_or_funlbl()
+ | 'undefined',
+ curr_fun_label :: label() | 'undefined',
curr_fun_args = 'empty' :: core_args(),
new_table = 'no_t' :: table(),
race_list = [] :: code(),
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 18f02e6742..987da3aecf 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -134,7 +134,6 @@ get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
end
end.
--type doc_plt() :: 'undefined' | dialyzer_plt:plt().
-spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
doc_plt(), dialyzer_codeserver:codeserver(),
dialyzer_timing:timing_server(), [solver()], pid()) ->
diff --git a/lib/dialyzer/src/dialyzer_timing.erl b/lib/dialyzer/src/dialyzer_timing.erl
index 33fd008732..aa71318d8e 100644
--- a/lib/dialyzer/src/dialyzer_timing.erl
+++ b/lib/dialyzer/src/dialyzer_timing.erl
@@ -2,7 +2,7 @@
%%-------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 0b8b244cc9..1787b66192 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -48,6 +48,7 @@
t_is_float/1, t_is_fun/1,
t_is_integer/1, t_non_neg_integer/0,
t_is_list/1, t_is_nil/1, t_is_none/1, t_is_number/1,
+ t_is_singleton/1,
t_limit/2, t_list/0, t_list/1,
t_list_elements/1, t_nonempty_list/1, t_maybe_improper_list/0,
@@ -57,7 +58,7 @@
t_timeout/0, t_tuple/0, t_tuple/1,
t_var/1, t_var_name/1,
t_none/0, t_unit/0,
- t_map/1
+ t_map/0, t_map/1, t_map_get/2, t_map_put/2
]).
-include("dialyzer.hrl").
@@ -65,10 +66,12 @@
%%-----------------------------------------------------------------------------
-type dep() :: integer(). %% type variable names used as constraint ids
+-type deps() :: ordsets:ordset(dep()).
+
-type type_var() :: erl_types:erl_type(). %% actually: {'c','var',_,_}
--record(fun_var, {'fun' :: fun((_) -> erl_types:erl_type()), deps :: [dep()],
- origin :: integer()}).
+-record(fun_var, {'fun' :: fun((_) -> erl_types:erl_type()), deps :: deps(),
+ origin :: integer() | 'undefined'}).
-type constr_op() :: 'eq' | 'sub'.
-type fvar_or_type() :: #fun_var{} | erl_types:erl_type().
@@ -76,20 +79,21 @@
-record(constraint, {lhs :: erl_types:erl_type(),
op :: constr_op(),
rhs :: fvar_or_type(),
- deps :: [dep()]}).
+ deps :: deps()}).
-type constraint() :: #constraint{}.
+-type mask() :: ordsets:ordset(non_neg_integer()).
+
-record(constraint_list, {type :: 'conj' | 'disj',
list :: [constr()],
- deps :: [dep()],
- masks :: [{dep(),[non_neg_integer()]}] |
- {'d',dict:dict(dep(), [non_neg_integer()])},
- id :: {'list', dep()}}).
+ deps :: deps(),
+ masks = maps:new() :: #{dep() => mask()},
+ id :: {'list', dep()} | 'undefined'}).
-type constraint_list() :: #constraint_list{}.
--record(constraint_ref, {id :: type_var(), deps :: [dep()]}).
+-record(constraint_ref, {id :: type_var(), deps :: deps()}).
-type constraint_ref() :: #constraint_ref{}.
@@ -98,32 +102,33 @@
-type types() :: erl_types:type_table().
-type typesig_scc() :: [{mfa(), {cerl:c_var(), cerl:c_fun()}, types()}].
--type typesig_funmap() :: [{type_var(), type_var()}]. %% Orddict
+-type typesig_funmap() :: #{type_var() => type_var()}.
-type prop_types() :: dict:dict(label(), types()).
--type dict_or_ets() :: {'d', prop_types()} | {'e', ets:tid()}.
-
--record(state, {callgraph :: dialyzer_callgraph:callgraph(),
- cs = [] :: [constr()],
- cmap = {'d', dict:new()} :: dict_or_ets(),
- fun_map = [] :: typesig_funmap(),
- fun_arities = dict:new() :: dict:dict(type_var(), arity()),
- in_match = false :: boolean(),
- in_guard = false :: boolean(),
- module :: module(),
- name_map = dict:new() :: dict:dict(mfa(),
- cerl:c_fun()),
- next_label = 0 :: label(),
- self_rec :: 'false' | erl_types:erl_type(),
- plt :: dialyzer_plt:plt(),
- prop_types = {'d', dict:new()} :: dict_or_ets(),
- records = dict:new() :: types(),
- scc = [] :: [type_var()],
- mfas :: [tuple()],
- solvers = [] :: [solver()]
+-record(state, {callgraph :: dialyzer_callgraph:callgraph()
+ | 'undefined',
+ cs = [] :: [constr()],
+ cmap = maps:new() :: #{type_var() => constr()},
+ fun_map = maps:new() :: typesig_funmap(),
+ fun_arities = maps:new() :: #{type_var() => arity()},
+ in_match = false :: boolean(),
+ in_guard = false :: boolean(),
+ module :: module(),
+ name_map = maps:new() :: #{mfa() => cerl:c_fun()},
+ next_label = 0 :: label(),
+ self_rec :: 'false' | erl_types:erl_type(),
+ plt :: dialyzer_plt:plt()
+ | 'undefined',
+ prop_types = dict:new() :: prop_types(),
+ records = dict:new() :: types(),
+ scc = [] :: ordsets:ordset(type_var()),
+ mfas :: [mfa()],
+ solvers = [] :: [solver()]
}).
+-type state() :: #state{}.
+
%%-----------------------------------------------------------------------------
-define(TYPE_LIMIT, 4).
@@ -185,7 +190,8 @@ analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers0) ->
Funs = state__scc(State3),
pp_constrs_scc(Funs, State3),
constraints_to_dot_scc(Funs, State3),
- solve(Funs, State3).
+ T = solve(Funs, State3),
+ dict:from_list(maps:to_list(T)).
assert_format_of_scc([{_MFA, {_Var, _Fun}, _Records}|Left]) ->
assert_format_of_scc(Left);
@@ -309,7 +315,7 @@ traverse(Tree, DefinedVars, State) ->
Hd = cerl:cons_hd(Tree),
Tl = cerl:cons_tl(Tree),
{State1, [HdVar, TlVar]} = traverse_list([Hd, Tl], DefinedVars, State),
- case cerl:is_literal(cerl:fold_literal(Tree)) of
+ case cerl:is_literal(fold_literal_maybe_match(Tree, State)) of
true ->
%% We do not need to do anything more here.
{State, t_cons(HdVar, TlVar)};
@@ -390,8 +396,18 @@ traverse(Tree, DefinedVars, State) ->
{State2, _} = traverse_list(Funs, DefinedVars1, State1),
traverse(Body, DefinedVars1, State2);
literal ->
- Type = t_from_term(cerl:concrete(Tree)),
- {State, Type};
+ %% Maps are special; a literal pattern matches more than just the value
+ %% constructed by the literal. For example #{} constructs the empty map,
+ %% but matches every map.
+ case state__is_in_match(State) of
+ true ->
+ Tree1 = dialyzer_utils:refold_pattern(Tree),
+ case cerl:is_literal(Tree1) of
+ false -> traverse(Tree1, DefinedVars, State);
+ true -> {State, t_from_term(cerl:concrete(Tree))}
+ end;
+ _ -> {State, t_from_term(cerl:concrete(Tree))}
+ end;
module ->
Defs = cerl:module_defs(Tree),
Funs = [Fun || {_Var, Fun} <- Defs],
@@ -435,7 +451,7 @@ traverse(Tree, DefinedVars, State) ->
Elements = cerl:tuple_es(Tree),
{State1, EVars} = traverse_list(Elements, DefinedVars, State),
{State2, TupleType} =
- case cerl:is_literal(cerl:fold_literal(Tree)) of
+ case cerl:is_literal(fold_literal_maybe_match(Tree, State1)) of
true ->
%% We do not need to do anything more here.
{State, t_tuple(EVars)};
@@ -474,7 +490,111 @@ traverse(Tree, DefinedVars, State) ->
[] -> {State2, TupleType}
end;
map ->
- {State, t_map([])};
+ Entries = cerl:map_es(Tree),
+ MapFoldFun = fun(Entry, AccState) ->
+ AccState1 = state__set_in_match(AccState, false),
+ {AccState2, KeyVar} = traverse(cerl:map_pair_key(Entry),
+ DefinedVars, AccState1),
+ AccState3 = state__set_in_match(
+ AccState2, state__is_in_match(AccState)),
+ {AccState4, ValVar} = traverse(cerl:map_pair_val(Entry),
+ DefinedVars, AccState3),
+ {{KeyVar, ValVar}, AccState4}
+ end,
+ {Pairs, State1} = lists:mapfoldl(MapFoldFun, State, Entries),
+ %% We mustn't recurse into map arguments to matches. Not only are they
+ %% syntactically only allowed to be the literal #{}, but that would also
+ %% cause an infinite recursion, since traverse/3 unfolds literals with
+ %% maps in them using dialyzer_utils:reflow_pattern/1.
+ {State2, ArgVar} =
+ case state__is_in_match(State) of
+ false -> traverse(cerl:map_arg(Tree), DefinedVars, State1);
+ true -> {State1, t_map()}
+ end,
+ MapVar = mk_var(Tree),
+ MapType = ?mk_fun_var(
+ fun(Map) ->
+ lists:foldl(
+ fun({K,V}, TypeAcc) ->
+ t_map_put({lookup_type(K, Map),
+ lookup_type(V, Map)},
+ TypeAcc)
+ end, t_inf(t_map(), lookup_type(ArgVar, Map)),
+ Pairs)
+ end, [ArgVar | lists:append([[K,V] || {K,V} <- Pairs])]),
+ %% TODO: does the "same element appearing several times" problem apply
+ %% here too?
+ Fun =
+ fun({KeyVar, ValVar}, {AccState, ShadowKeys}) ->
+ %% If Val is known to be the last association of Key (i.e. Key
+ %% is not in ShadowKeys), Val must be a subtype of what is
+ %% associated to Key in Tree
+ TypeFun =
+ fun(Map) ->
+ KeyType = lookup_type(KeyVar, Map),
+ case t_is_singleton(KeyType) of
+ false -> t_any();
+ true ->
+ MT = t_inf(lookup_type(MapVar, Map), t_map()),
+ case t_is_none(MT) of
+ true -> t_none();
+ false ->
+ DisjointFromKeyType =
+ fun(ShadowKey) ->
+ t_is_none(t_inf(lookup_type(ShadowKey, Map),
+ KeyType))
+ end,
+ case lists:all(DisjointFromKeyType, ShadowKeys) of
+ true -> t_map_get(KeyType, MT);
+ %% A later association might shadow this one
+ false -> t_any()
+ end
+ end
+ end
+ end,
+ ValType = ?mk_fun_var(TypeFun, [KeyVar, MapVar | ShadowKeys]),
+ {state__store_conj(ValVar, sub, ValType, AccState),
+ [KeyVar | ShadowKeys]}
+ end,
+ %% Accumulate shadowing keys right-to-left
+ {State3, _} = lists:foldr(Fun, {State2, []}, Pairs),
+ %% In a map expression, Arg must contain all keys that are inserted with
+ %% the exact (:=) operator, and are known (i.e. are not in ShadowedKeys)
+ %% to not have been introduced by a previous association
+ State4 =
+ case state__is_in_match(State) of
+ true -> State3;
+ false ->
+ ArgFun =
+ fun(Map) ->
+ FoldFun =
+ fun({{KeyVar, _}, Entry}, {AccType, ShadowedKeys}) ->
+ OpTree = cerl:map_pair_op(Entry),
+ KeyType = lookup_type(KeyVar, Map),
+ AccType1 =
+ case cerl:is_literal(OpTree) andalso
+ cerl:concrete(OpTree) =:= exact of
+ true ->
+ case t_is_none(t_inf(ShadowedKeys, KeyType)) of
+ true ->
+ t_map_put({KeyType, t_any()}, AccType);
+ false ->
+ AccType
+ end;
+ false ->
+ AccType
+ end,
+ {AccType1, t_sup(KeyType, ShadowedKeys)}
+ end,
+ %% Accumulate shadowed keys left-to-right
+ {ResType, _} = lists:foldl(FoldFun, {t_map(), t_none()},
+ lists:zip(Pairs, Entries)),
+ ResType
+ end,
+ ArgType = ?mk_fun_var(ArgFun, [KeyVar || {KeyVar, _} <- Pairs]),
+ state__store_conj(ArgVar, sub, ArgType, State3)
+ end,
+ {state__store_conj(MapVar, sub, MapType, State4), MapVar};
values ->
%% We can get into trouble when unifying products that have the
%% same element appearing several times. Handle these cases by
@@ -825,11 +945,11 @@ get_safe_underapprox(Pats, Guard) ->
Map1 = cerl_trees:fold(fun(X, Acc) ->
case cerl:is_c_var(X) of
true ->
- dict:store(cerl_trees:get_label(X), t_any(),
- Acc);
+ maps:put(cerl_trees:get_label(X), t_any(),
+ Acc);
false -> Acc
end
- end, dict:new(), cerl:c_values(Pats)),
+ end, maps:new(), cerl:c_values(Pats)),
{Type, Map2} = get_underapprox_from_guard(Guard, Map1),
Map3 = case t_is_none(t_inf(t_from_term(true), Type)) of
true -> throw(dont_know);
@@ -837,8 +957,8 @@ get_safe_underapprox(Pats, Guard) ->
case cerl:is_c_var(Guard) of
false -> Map2;
true ->
- dict:store(cerl_trees:get_label(Guard),
- t_from_term(true), Map2)
+ maps:put(cerl_trees:get_label(Guard),
+ t_from_term(true), Map2)
end
end,
{Ts, _Map4} = get_safe_underapprox_1(Pats, [], Map3),
@@ -864,7 +984,7 @@ get_underapprox_from_guard(Tree, Map) ->
case t_is_none(Inf) of
true -> throw(dont_know);
false ->
- {True, dict:store(cerl_trees:get_label(Fun), Inf, Map1)}
+ {True, maps:put(cerl_trees:get_label(Fun), Inf, Map1)}
end
end;
MFA ->
@@ -880,7 +1000,7 @@ get_underapprox_from_guard(Tree, Map) ->
case cerl:is_literal(Arg) of
true -> {True, Map1};
false ->
- {True, dict:store(cerl_trees:get_label(Arg), Inf, Map1)}
+ {True, maps:put(cerl_trees:get_label(Arg), Inf, Map1)}
end
end;
error ->
@@ -912,7 +1032,7 @@ get_underapprox_from_guard(Tree, Map) ->
end;
var ->
Type =
- case dict:find(cerl_trees:get_label(Tree), Map) of
+ case maps:find(cerl_trees:get_label(Tree), Map) of
error -> throw(dont_know);
{ok, T} -> T
end,
@@ -946,6 +1066,7 @@ get_type_test({erlang, is_float, 1}) -> {ok, t_float()};
get_type_test({erlang, is_function, 1}) -> {ok, t_fun()};
get_type_test({erlang, is_integer, 1}) -> {ok, t_integer()};
get_type_test({erlang, is_list, 1}) -> {ok, t_list()};
+get_type_test({erlang, is_map, 1}) -> {ok, t_map()};
get_type_test({erlang, is_number, 1}) -> {ok, t_number()};
get_type_test({erlang, is_pid, 1}) -> {ok, t_pid()};
get_type_test({erlang, is_port, 1}) -> {ok, t_port()};
@@ -1002,7 +1123,9 @@ bitstr_val_constr(SizeType, UnitVal, Flags) ->
end
end.
-get_safe_underapprox_1([Pat|Left], Acc, Map) ->
+get_safe_underapprox_1([Pat0|Left], Acc, Map) ->
+ %% Maps should be treated as patterns, not as literals
+ Pat = dialyzer_utils:refold_pattern(Pat0),
case cerl:type(Pat) of
alias ->
APat = cerl:alias_pat(Pat),
@@ -1013,7 +1136,7 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
case t_is_none(Inf) of
true -> throw(dont_know);
false ->
- Map3 = dict:store(cerl_trees:get_label(AVar), Inf, Map2),
+ Map3 = maps:put(cerl_trees:get_label(AVar), Inf, Map2),
get_safe_underapprox_1(Left, [Inf|Acc], Map3)
end;
binary ->
@@ -1046,15 +1169,42 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
Type = t_tuple(Ts),
get_safe_underapprox_1(Left, [Type|Acc], Map1);
map ->
- %% TODO: Can maybe do something here
- throw(dont_know);
+ %% Some assertions in case the syntax gets more premissive in the future
+ true = #{} =:= cerl:concrete(cerl:map_arg(Pat)),
+ true = lists:all(fun(P) ->
+ cerl:is_literal(Op = cerl:map_pair_op(P)) andalso
+ exact =:= cerl:concrete(Op)
+ end, cerl:map_es(Pat)),
+ KeyTrees = lists:map(fun cerl:map_pair_key/1, cerl:map_es(Pat)),
+ ValTrees = lists:map(fun cerl:map_pair_val/1, cerl:map_es(Pat)),
+ %% Keys must not be underapproximated. Overapproximations are safe.
+ Keys = get_safe_overapprox(KeyTrees),
+ {Vals, Map1} = get_safe_underapprox_1(ValTrees, [], Map),
+ case lists:all(fun erl_types:t_is_singleton/1, Keys) of
+ false -> throw(dont_know);
+ true -> ok
+ end,
+ SortedPairs = lists:sort(lists:zip(Keys, Vals)),
+ %% We need to deal with duplicates ourselves
+ SquashDuplicates =
+ fun SquashDuplicates([{K,First},{K,Second}|List]) ->
+ case t_is_none(Inf = t_inf(First, Second)) of
+ true -> throw(dont_know);
+ false -> [{K, Inf}|SquashDuplicates(List)]
+ end;
+ SquashDuplicates([Good|Rest]) ->
+ [Good|SquashDuplicates(Rest)];
+ SquashDuplicates([]) -> []
+ end,
+ Type = t_map(SquashDuplicates(SortedPairs)),
+ get_safe_underapprox_1(Left, [Type|Acc], Map1);
values ->
Es = cerl:values_es(Pat),
{Ts, Map1} = get_safe_underapprox_1(Es, [], Map),
Type = t_product(Ts),
get_safe_underapprox_1(Left, [Type|Acc], Map1);
var ->
- case dict:find(cerl_trees:get_label(Pat), Map) of
+ case maps:find(cerl_trees:get_label(Pat), Map) of
error -> throw(dont_know);
{ok, VarType} -> get_safe_underapprox_1(Left, [VarType|Acc], Map)
end
@@ -1062,6 +1212,15 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
get_safe_underapprox_1([], Acc, Map) ->
{lists:reverse(Acc), Map}.
+get_safe_overapprox(Pats) ->
+ lists:map(fun get_safe_overapprox_1/1, Pats).
+
+get_safe_overapprox_1(Pat) ->
+ case cerl:is_literal(Lit = cerl:fold_literal(Pat)) of
+ true -> t_from_term(cerl:concrete(Lit));
+ false -> t_any()
+ end.
+
%%----------------------------------------
%% Guards
%%
@@ -1261,6 +1420,8 @@ get_bif_constr({erlang, is_integer, 1}, Dst, [Arg], State) ->
get_bif_test_constr(Dst, Arg, t_integer(), State);
get_bif_constr({erlang, is_list, 1}, Dst, [Arg], State) ->
get_bif_test_constr(Dst, Arg, t_maybe_improper_list(), State);
+get_bif_constr({erlang, is_map, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_map(), State);
get_bif_constr({erlang, is_number, 1}, Dst, [Arg], State) ->
get_bif_test_constr(Dst, Arg, t_number(), State);
get_bif_constr({erlang, is_pid, 1}, Dst, [Arg], State) ->
@@ -1642,12 +1803,16 @@ solve([Fun], State) ->
solve([_|_] = SCC, State) ->
?debug("============ Analyzing SCC: ~w ===========\n",
[[debug_lookup_name(F) || F <- SCC]]),
- {Parallel, NewState} =
- case parallel_split(SCC) of
- false -> {false, State};
- SplitSCC -> {SplitSCC, minimize_state(State)}
- end,
- solve_scc(SCC, Parallel, map_new(), NewState, false).
+ Users = comp_users(SCC, State),
+ solve_scc(SCC, map_new(), State, Users, _ToSolve=SCC, false).
+
+comp_users(SCC, State) ->
+ Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
+ Vars = lists:sort([t_var_name(Var) || {_, {ok, Var}} <- Vars0]),
+ family([{t_var(V), F} ||
+ F <- SCC,
+ V <- ordsets:intersection(get_deps(state__get_cs(F, State)),
+ Vars)]).
solve_fun(Fun, FunMap, State) ->
Cs = state__get_cs(Fun, State),
@@ -1662,7 +1827,7 @@ solve_fun(Fun, FunMap, State) ->
end,
enter_type(Fun, NewType, NewFunMap1).
-solve_scc(SCC, Parallel, Map, State, TryingUnit) ->
+solve_scc(SCC, Map, State, Users, ToSolve, TryingUnit) ->
Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
Vars = [Var || {_, {ok, Var}} <- Vars0],
Funs = [Fun || {Fun, {ok, _}} <- Vars0],
@@ -1670,16 +1835,13 @@ solve_scc(SCC, Parallel, Map, State, TryingUnit) ->
RecTypes = [t_limit(Type, ?TYPE_LIMIT) || Type <- Types],
CleanMap = lists:foldl(fun(Fun, AccFunMap) ->
erase_type(t_var_name(Fun), AccFunMap)
- end, Map, SCC),
+ end, Map, ToSolve),
Map1 = enter_type_lists(Vars, RecTypes, CleanMap),
?debug("Checking SCC: ~w\n", [[debug_lookup_name(F) || F <- SCC]]),
- FunSet = ordsets:from_list([t_var_name(F) || F <- SCC]),
- Map2 =
- case Parallel of
- false -> solve_whole_scc(SCC, Map1, State);
- SplitSCC -> solve_whole_scc_parallel(SplitSCC, Map1, State)
- end,
- case maps_are_equal(Map2, Map, FunSet) of
+ SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State) end,
+ Map2 = lists:foldl(SolveFun, Map1, ToSolve),
+ Updated = updated_vars_only(Vars, Map, Map2),
+ case Updated =:= [] of
true ->
?debug("SCC ~w reached fixpoint\n", [SCC]),
NewTypes = unsafe_lookup_type_list(Funs, Map2),
@@ -1692,127 +1854,21 @@ solve_scc(SCC, Parallel, Map, State, TryingUnit) ->
true -> t_fun(t_fun_args(T), t_unit())
end || T <- NewTypes],
Map3 = enter_type_lists(Funs, UnitTypes, Map2),
- solve_scc(SCC, Parallel, Map3, State, true);
+ solve_scc(SCC, Map3, State, Users, SCC, true);
false ->
- case Parallel of
- false -> true;
- _ -> dispose_state(State)
- end,
Map2
end;
false ->
?debug("SCC ~w did not reach fixpoint\n", [SCC]),
- solve_scc(SCC, Parallel, Map2, State, TryingUnit)
- end.
-
-solve_whole_scc(SCC, Map, State) ->
- SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State) end,
- lists:foldl(SolveFun, Map, SCC).
-
-%%------------------------------------------------------------------------------
-
--define(worth_it, 42).
-
-parallel_split(SCC) ->
- Length = length(SCC),
- case Length > 2*?worth_it of
- false -> false;
- true ->
- case min(dialyzer_utils:parallelism(), 8) of
- 1 -> false;
- CPUs ->
- FullShare = Length div CPUs + 1,
- Unit = max(FullShare, ?worth_it),
- split(SCC, Unit, [])
- end
- end.
-
-minimize_state(#state{
- cmap = {d, CMap},
- fun_map = FunMap,
- fun_arities = FunArities,
- self_rec = SelfRec,
- prop_types = {d, PropTypes},
- solvers = Solvers
- }) ->
- Opts = [{read_concurrency, true}],
- ETSCMap = ets:new(cmap, Opts),
- ETSPropTypes = ets:new(prop_types, Opts),
- true = ets:insert(ETSCMap, dict:to_list(CMap)),
- true = ets:insert(ETSPropTypes, dict:to_list(PropTypes)),
- #state
- {cmap = {e, ETSCMap},
- fun_map = FunMap,
- fun_arities = FunArities,
- self_rec = SelfRec,
- prop_types = {e, ETSPropTypes},
- solvers = Solvers
- }.
-
-dispose_state(#state{cmap = {e, ETSCMap},
- prop_types = {e, ETSPropTypes}}) ->
- true = ets:delete(ETSCMap),
- true = ets:delete(ETSPropTypes).
-
-solve_whole_scc_parallel(SplitSCC, Map, State) ->
- Workers = spawn_workers(SplitSCC, Map, State),
- wait_results(Workers, Map, fold_res_fun(State)).
-
-spawn_workers(SplitSCC, Map, State) ->
- Spawner = solve_scc_spawner(self(), Map, State),
- lists:foreach(Spawner, SplitSCC),
- length(SplitSCC).
-
-wait_results(0, Map, _FoldResFun) ->
- Map;
-wait_results(Pending, Map, FoldResFun) ->
- Res = receive_scc_result(),
- NewMap = lists:foldl(FoldResFun, Map, Res),
- wait_results(Pending-1, NewMap, FoldResFun).
-
-solve_scc_spawner(Parent, Map, State) ->
- fun(SCCPart) ->
- spawn_link(fun() -> solve_scc_worker(Parent, SCCPart, Map, State) end)
- end.
-
-split([], _Unit, Acc) ->
- Acc;
-split(List, Unit, Acc) ->
- {Taken, Rest} =
- try
- lists:split(Unit, List)
- catch
- _:_ -> {List, []}
- end,
- split(Rest, Unit, [Taken|Acc]).
-
-solve_scc_worker(Parent, SCCPart, Map, State) ->
- SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State) end,
- FinalMap = lists:foldl(SolveFun, Map, SCCPart),
- Res =
- [{F, t_limit(unsafe_lookup_type(F, FinalMap), ?TYPE_LIMIT)} ||
- F <- SCCPart],
- send_scc_result(Parent, Res).
-
-fold_res_fun(State) ->
- fun({F, Type}, Map) ->
- case state__get_rec_var(F, State) of
- {ok, R} ->
- enter_type(R, Type, enter_type(F, Type, Map));
- error ->
- enter_type(F, Type, Map)
- end
+ ToSolve1 = affected(Updated, Users),
+ solve_scc(SCC, Map2, State, Users, ToSolve1, TryingUnit)
end.
-receive_scc_result() ->
- receive
- {scc_fun, Res} -> Res
- end.
-
-send_scc_result(Parent, Res) ->
- Parent ! {scc_fun, Res}.
-
-%%------------------------------------------------------------------------------
+affected(Updated, Users) ->
+ lists:umerge([case lists:keyfind(V, 1, Users) of
+ {V, Vs} -> Vs;
+ false -> []
+ end || V <- Updated]).
scc_fold_fun(F, FunMap, State) ->
Deps = get_deps(state__get_cs(F, State)),
@@ -1854,7 +1910,7 @@ solver(Solver, SolveFun) ->
solve_fun(v1, _Fun, Cs, FunMap, State) ->
fun() ->
- {ok, _MapDict, NewMap} = solve_ref_or_list(Cs, FunMap, dict:new(), State),
+ {ok, _MapDict, NewMap} = solve_ref_or_list(Cs, FunMap, map_new(), State),
{ok, NewMap}
end;
solve_fun(v2, Fun, _Cs, FunMap, State) ->
@@ -1894,8 +1950,8 @@ sane_maps(Map1, Map2, Keys, _S1, _S2) ->
%% Solver v2
--record(v2_state, {constr_data = dict:new() :: dict:dict(),
- state :: #state{}}).
+-record(v2_state, {constr_data = maps:new() :: map(),
+ state :: state()}).
v2_solve_ref(Fun, Map, State) ->
V2State = #v2_state{state = State},
@@ -2056,30 +2112,30 @@ v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) ->
v2_solve_disj(Is, Cs, I+1, Map, V2State, UL, MapL, Eval, Uneval, Failed).
save_local_map(#v2_state{constr_data = ConData}=V2State, Id, U, Map) ->
- Part0 = [{V,dict:fetch(V, Map)} || V <- U],
+ Part0 = [{V,maps:get(V, Map)} || V <- U],
Part1 =
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error -> []; % cannot happen
{ok, {Part2,[]}} -> Part2
end,
?debug("save local map Id=~w:\n", [Id]),
Part = lists:ukeymerge(1, lists:keysort(1, Part0), Part1),
- pp_map("New Part", dict:from_list(Part0)),
- pp_map("Old Part", dict:from_list(Part1)),
- pp_map(" => Part", dict:from_list(Part)),
- V2State#v2_state{constr_data = dict:store(Id, {Part,[]}, ConData)}.
+ pp_map("New Part", maps:from_list(Part0)),
+ pp_map("Old Part", maps:from_list(Part1)),
+ pp_map(" => Part", maps:from_list(Part)),
+ V2State#v2_state{constr_data = maps:put(Id, {Part,[]}, ConData)}.
restore_local_map(#v2_state{constr_data = ConData}, Id, Map0) ->
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error -> Map0;
{ok, failed} -> Map0;
{ok, {[],_}} -> Map0;
{ok, {Part0,U}} ->
Part = [KV || {K,_V} = KV <- Part0, not lists:member(K, U)],
?debug("restore local map Id=~w U=~w\n", [Id, U]),
- pp_map("Part", dict:from_list(Part)),
+ pp_map("Part", maps:from_list(Part)),
pp_map("Map0", Map0),
- Map = lists:foldl(fun({K,V}, D) -> dict:store(K, V, D) end, Map0, Part),
+ Map = lists:foldl(fun({K,V}, D) -> maps:put(K, V, D) end, Map0, Part),
pp_map("Map", Map),
Map
end.
@@ -2159,31 +2215,26 @@ report_detected_loop(_) ->
add_mask_to_flags(Flags, [Im|M], I, L) when I > Im ->
add_mask_to_flags(Flags, M, I, [Im|L]);
add_mask_to_flags(Flags, [_|M], _I, L) ->
- {lists:umerge(Flags, M), lists:reverse(L)}.
+ {lists:umerge(M, Flags), lists:reverse(L)}.
-get_mask(V, {d, Masks}) ->
- case dict:find(V, Masks) of
+get_mask(V, Masks) ->
+ case maps:find(V, Masks) of
error -> [];
{ok, M} -> M
- end;
-get_mask(V, Masks) ->
- case lists:keyfind(V, 1, Masks) of
- false -> [];
- {V, M} -> M
end.
get_flags(#v2_state{constr_data = ConData}=V2State0, C) ->
#constraint_list{id = Id, list = Cs, masks = Masks} = C,
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error ->
?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]),
- V2State = V2State0#v2_state{constr_data = dict:store(Id, {[],[]}, ConData)},
+ V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)},
{V2State, lists:seq(1, length(Cs))};
{ok, failed} ->
{V2State0, failed_list};
{ok, {Part,U}} when U =/= [] ->
?debug("get_flags Id=~w U=~w\n", [Id, U]),
- V2State = V2State0#v2_state{constr_data = dict:store(Id, {Part,[]}, ConData)},
+ V2State = V2State0#v2_state{constr_data = maps:put(Id, {Part,[]}, ConData)},
save_updated_vars_list(Cs, vars_per_child(U, Masks), V2State)
end.
@@ -2212,13 +2263,13 @@ save_updated_vars(#constraint_ref{id = Id}, U, V2State) ->
save_updated_vars1(V2State, C, U) ->
#v2_state{constr_data = ConData} = V2State,
#constraint_list{id = Id} = C,
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error -> V2State; % error means everything is flagged
{ok, failed} -> V2State;
{ok, {Part,U0}} ->
%% Duplicates are not so common; let masks/2 remove them.
U1 = U ++ U0,
- V2State#v2_state{constr_data = dict:store(Id, {Part,U1}, ConData)}
+ V2State#v2_state{constr_data = maps:put(Id, {Part,U1}, ConData)}
end.
-ifdef(DEBUG).
@@ -2228,12 +2279,12 @@ pp_constr_data(_Tag, #v2_state{constr_data = D}) ->
case _PartU of
{_Part, _U} ->
io:format("Id: ~w Vars: ~w\n", [_Id, _U]),
- [pp_map("Part", dict:from_list(_Part)) || _Part =/= []];
+ [pp_map("Part", maps:from_list(_Part)) || _Part =/= []];
failed ->
io:format("Id: ~w failed list\n", [_Id])
end
end ||
- {_Id, _PartU} <- lists:keysort(1, dict:to_list(D))],
+ {_Id, _PartU} <- lists:keysort(1, maps:to_list(D))],
ok.
-else.
@@ -2243,17 +2294,17 @@ pp_constr_data(_Tag, _V2State) ->
failed_list(#constraint_list{id = Id}, #v2_state{constr_data = D}=V2State) ->
?debug("error list ~w~n", [Id]),
- V2State#v2_state{constr_data = dict:store(Id, failed, D)}.
+ V2State#v2_state{constr_data = maps:put(Id, failed, D)}.
is_failed_list(#constraint_list{id = Id}, #v2_state{constr_data = D}) ->
- dict:find(Id, D) =:= {ok, failed}.
+ maps:find(Id, D) =:= {ok, failed}.
%% Solver v1
solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
Map, MapDict, State) ->
{OldLocalMap, Check} =
- case dict:find(Id, MapDict) of
+ case maps:find(Id, MapDict) of
error -> {map_new(), false};
{ok, M} -> {M, true}
end,
@@ -2299,12 +2350,12 @@ solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
{ok, Var} -> enter_type(Var, FunType, NewMap1);
error -> NewMap1
end,
- {ok, dict:store(Id, NewMap2, NewMapDict), NewMap2}
+ {ok, maps:put(Id, NewMap2, NewMapDict), NewMap2}
end;
solve_ref_or_list(#constraint_list{type=Type, list = Cs, deps = Deps, id = Id},
Map, MapDict, State) ->
{OldLocalMap, Check} =
- case dict:find(Id, MapDict) of
+ case maps:find(Id, MapDict) of
error -> {map_new(), false};
{ok, M} -> {M, true}
end,
@@ -2354,7 +2405,7 @@ solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
solve_clist(Cs, conj, Id, Deps, MapDict, Map, State) ->
case solve_cs(Cs, Map, MapDict, State) of
{error, NewMapDict} ->
- {error, dict:store(Id, error, NewMapDict)};
+ {error, maps:put(Id, error, NewMapDict)};
{ok, NewMapDict, NewMap} = Ret ->
case Cs of
[_] ->
@@ -2362,7 +2413,7 @@ solve_clist(Cs, conj, Id, Deps, MapDict, Map, State) ->
Ret;
_ ->
case maps_are_equal(Map, NewMap, Deps) of
- true -> {ok, dict:store(Id, NewMap, NewMapDict), NewMap};
+ true -> {ok, maps:put(Id, NewMap, NewMapDict), NewMap};
false -> solve_clist(Cs, conj, Id, Deps, NewMapDict, NewMap, State)
end
end
@@ -2376,10 +2427,10 @@ solve_clist(Cs, disj, Id, _Deps, MapDict, Map, State) ->
end,
{Maps, NewMapDict} = lists:mapfoldl(Fun, MapDict, Cs),
case [X || {ok, X} <- Maps] of
- [] -> {error, dict:store(Id, error, NewMapDict)};
+ [] -> {error, maps:put(Id, error, NewMapDict)};
MapList ->
NewMap = join_maps(MapList),
- {ok, dict:store(Id, NewMap, NewMapDict), NewMap}
+ {ok, maps:put(Id, NewMap, NewMapDict), NewMap}
end.
solve_cs([#constraint_ref{} = C|Tail], Map, MapDict, State) ->
@@ -2460,7 +2511,7 @@ report_failed_constraint(_C, _Map) ->
%% ============================================================================
map_new() ->
- dict:new().
+ maps:new().
join_maps([Map]) ->
Map;
@@ -2470,9 +2521,9 @@ join_maps(Maps) ->
constrained_keys(Maps) ->
lists:foldl(fun(TmpMap, AccKeys) ->
- [Key || Key <- AccKeys, dict:is_key(Key, TmpMap)]
+ [Key || Key <- AccKeys, maps:is_key(Key, TmpMap)]
end,
- dict:fetch_keys(hd(Maps)), tl(Maps)).
+ maps:keys(hd(Maps)), tl(Maps)).
join_maps([Key|Left], Maps = [Map|MapsLeft], AccMap) ->
NewType = join_one_key(Key, MapsLeft, lookup_type(Key, Map)),
@@ -2518,11 +2569,11 @@ prune_keys(Map1, Map2, Deps) ->
NofDeps = length(Deps),
case NofDeps > ?PRUNE_LIMIT of
true ->
- Keys1 = dict:fetch_keys(Map1),
+ Keys1 = maps:keys(Map1),
case length(Keys1) > NofDeps of
true ->
Set1 = lists:sort(Keys1),
- Set2 = lists:sort(dict:fetch_keys(Map2)),
+ Set2 = lists:sort(maps:keys(Map2)),
ordsets:intersection(ordsets:union(Set1, Set2), Deps);
false ->
Deps
@@ -2543,7 +2594,7 @@ enter_type(Key, Val, Map) when is_integer(Key) ->
true -> ok;
false -> ?debug("LimitedVal ~s\n", [format_type(LimitedVal)])
end,
- case dict:find(Key, Map) of
+ case maps:find(Key, Map) of
{ok, Value} ->
case is_equal(Value, LimitedVal) of
true -> Map;
@@ -2577,16 +2628,16 @@ enter_type2(Key, Val, Map) ->
map_store(Key, Val, Map) ->
?debug("Storing ~w :: ~s\n", [Key, format_type(Val)]),
- dict:store(Key, Val, Map).
+ maps:put(Key, Val, Map).
erase_type(Key, Map) ->
- dict:erase(Key, Map).
+ maps:remove(Key, Map).
lookup_type_list(List, Map) ->
[lookup_type(X, Map) || X <- List].
unsafe_lookup_type(Key, Map) ->
- case dict:find(t_var_name(Key), Map) of
+ case maps:find(t_var_name(Key), Map) of
{ok, Type} -> Type;
error -> t_none()
end.
@@ -2595,7 +2646,7 @@ unsafe_lookup_type_list(List, Map) ->
[unsafe_lookup_type(X, Map) || X <- List].
lookup_type(Key, Map) when is_integer(Key) ->
- case dict:find(Key, Map) of
+ case maps:find(Key, Map) of
error -> t_any();
{ok, Val} -> Val
end;
@@ -2643,7 +2694,7 @@ is_equal(Type1, Type2) ->
pp_map(_S, _Map) ->
?debug("\t~s: ~p\n",
[_S, [{X, lists:flatten(format_type(Y))} ||
- {X, Y} <- lists:keysort(1, dict:to_list(_Map))]]).
+ {X, Y} <- lists:keysort(1, maps:to_list(_Map))]]).
%% ============================================================================
%%
@@ -2653,7 +2704,7 @@ pp_map(_S, _Map) ->
new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes, Solvers) ->
List = [{MFA, Var} || {MFA, {Var, _Fun}, _Rec} <- SCC0],
- NameMap = dict:from_list(List),
+ NameMap = maps:from_list(List),
MFAs = [MFA || {MFA, _Var} <- List],
SCC = [mk_var(Fun) || {_MFA, {_Var, Fun}, _Rec} <- SCC0],
SelfRec =
@@ -2667,7 +2718,7 @@ new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes, Solvers) ->
_Many -> false
end,
#state{callgraph = CallGraph, name_map = NameMap, next_label = NextLabel,
- prop_types = {d, PropTypes}, plt = Plt, scc = ordsets:from_list(SCC),
+ prop_types = PropTypes, plt = Plt, scc = ordsets:from_list(SCC),
mfas = MFAs, self_rec = SelfRec, solvers = Solvers}.
state__set_rec_dict(State, RecDict) ->
@@ -2695,15 +2746,15 @@ state__get_fun_prototype(Op, Arity, State) ->
end.
state__lookup_rec_var_in_scope(MFA, #state{name_map = NameMap}) ->
- dict:find(MFA, NameMap).
+ maps:find(MFA, NameMap).
state__store_fun_arity(Tree, #state{fun_arities = Map} = State) ->
Arity = length(cerl:fun_vars(Tree)),
Id = mk_var(Tree),
- State#state{fun_arities = dict:store(Id, Arity, Map)}.
+ State#state{fun_arities = maps:put(Id, Arity, Map)}.
state__fun_arity(Id, #state{fun_arities = Map}) ->
- dict:fetch(Id, Map).
+ maps:get(Id, Map).
state__lookup_undef_var(Tree, #state{callgraph = CG, plt = Plt}) ->
Label = cerl_trees:get_label(Tree),
@@ -2763,21 +2814,14 @@ state__plt(#state{plt = PLT}) ->
state__new_constraint_context(State) ->
State#state{cs = []}.
-state__prop_domain(FunLabel, #state{prop_types = {e, ETSPropTypes}}) ->
- try ets:lookup_element(ETSPropTypes, FunLabel, 2) of
- {_Range_Fun, Dom} -> {ok, Dom};
- FunType -> {ok, t_fun_args(FunType)}
- catch
- _:_ -> error
- end;
-state__prop_domain(FunLabel, #state{prop_types = {d, PropTypes}}) ->
+state__prop_domain(FunLabel, #state{prop_types = PropTypes}) ->
case dict:find(FunLabel, PropTypes) of
error -> error;
{ok, {_Range_Fun, Dom}} -> {ok, Dom};
{ok, FunType} -> {ok, t_fun_args(FunType)}
end.
-state__add_prop_constrs(Tree, #state{prop_types = {d, PropTypes}} = State) ->
+state__add_prop_constrs(Tree, #state{prop_types = PropTypes} = State) ->
Label = cerl_trees:get_label(Tree),
case dict:find(Label, PropTypes) of
error -> State;
@@ -2840,14 +2884,12 @@ state__mk_vars(N, #state{next_label = NL} = State) ->
Vars = [t_var(X) || X <- lists:seq(NL, NewLabel-1)],
{State#state{next_label = NewLabel}, Vars}.
-state__store_constrs(Id, Cs, #state{cmap = {d, Dict}} = State) ->
- NewDict = dict:store(Id, Cs, Dict),
- State#state{cmap = {d, NewDict}}.
+state__store_constrs(Id, Cs, #state{cmap = Map} = State) ->
+ NewMap = maps:put(Id, Cs, Map),
+ State#state{cmap = NewMap}.
-state__get_cs(Var, #state{cmap = {e, ETSDict}}) ->
- ets:lookup_element(ETSDict, Var, 2);
-state__get_cs(Var, #state{cmap = {d, Dict}}) ->
- dict:fetch(Var, Dict).
+state__get_cs(Var, #state{cmap = Map}) ->
+ maps:get(Var, Map).
state__is_self_rec(Fun, #state{self_rec = SelfRec}) ->
not (SelfRec =:= 'false') andalso is_equal(Fun, SelfRec).
@@ -2856,15 +2898,12 @@ state__store_funs(Vars0, Funs0, #state{fun_map = Map} = State) ->
debug_make_name_map(Vars0, Funs0),
Vars = mk_var_list(Vars0),
Funs = mk_var_list(Funs0),
- NewMap = lists:foldl(fun({Var, Fun}, MP) -> orddict:store(Var, Fun, MP) end,
+ NewMap = lists:foldl(fun({Var, Fun}, MP) -> maps:put(Fun, Var, MP) end,
Map, lists:zip(Vars, Funs)),
State#state{fun_map = NewMap}.
state__get_rec_var(Fun, #state{fun_map = Map}) ->
- case [V || {V, FV} <- Map, FV =:= Fun] of
- [Var] -> {ok, Var};
- [] -> error
- end.
+ maps:find(Fun, Map).
state__finalize(State) ->
State1 = enumerate_constraints(State),
@@ -2884,8 +2923,7 @@ mk_constraint(Lhs, Op, Rhs) ->
case t_is_any(Lhs) orelse constraint_opnd_is_any(Rhs) of
false ->
Deps = find_constraint_deps([Lhs, Rhs]),
- C0 = mk_constraint_1(Lhs, Op, Rhs),
- C = C0#constraint{deps = Deps},
+ C = mk_constraint_1(Lhs, Op, Rhs, Deps),
case Deps =:= [] of
true ->
%% This constraint is constant. Solve it immediately.
@@ -2903,8 +2941,7 @@ mk_constraint(Lhs, Op, Rhs) ->
end.
mk_constraint_any(Op) ->
- C = mk_constraint_1(t_any(), Op, t_any()),
- C#constraint{deps = []}.
+ mk_constraint_1(t_any(), Op, t_any(), []).
%% the following function is used so that we do not call
%% erl_types:t_is_any/1 with a term other than an erl_type()
@@ -2933,13 +2970,13 @@ mk_fun_var(Fun, Types) ->
-endif.
--spec get_deps(constr()) -> [dep()].
+-spec get_deps(constr()) -> deps().
get_deps(#constraint{deps = D}) -> D;
get_deps(#constraint_list{deps = D}) -> D;
get_deps(#constraint_ref{deps = D}) -> D.
--spec find_constraint_deps([fvar_or_type()]) -> [dep()].
+-spec find_constraint_deps([fvar_or_type()]) -> deps().
find_constraint_deps(List) ->
ordsets:from_list(find_constraint_deps(List, [])).
@@ -2952,12 +2989,12 @@ find_constraint_deps([Type|Tail], Acc) ->
find_constraint_deps([], Acc) ->
lists:flatten(Acc).
-mk_constraint_1(Lhs, eq, Rhs) when Lhs < Rhs ->
- #constraint{lhs = Lhs, op = eq, rhs = Rhs};
-mk_constraint_1(Lhs, eq, Rhs) ->
- #constraint{lhs = Rhs, op = eq, rhs = Lhs};
-mk_constraint_1(Lhs, Op, Rhs) ->
- #constraint{lhs = Lhs, op = Op, rhs = Rhs}.
+mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs ->
+ #constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps};
+mk_constraint_1(Lhs, eq, Rhs, Deps) ->
+ #constraint{lhs = Rhs, op = eq, rhs = Lhs, deps = Deps};
+mk_constraint_1(Lhs, Op, Rhs, Deps) ->
+ #constraint{lhs = Lhs, op = Op, rhs = Rhs, deps = Deps}.
mk_constraints([Lhs|LhsTail], Op, [Rhs|RhsTail]) ->
[mk_constraint(Lhs, Op, Rhs) |
@@ -2972,13 +3009,24 @@ mk_constraint_ref(Id, Deps) ->
mk_constraint_list(Type, List) ->
List1 = ordsets:from_list(lift_lists(Type, List)),
- List2 = ordsets:filter(fun(X) -> get_deps(X) =/= [] end, List1),
- Deps = calculate_deps(List2),
+ case Type of
+ conj ->
+ List2 = ordsets:filter(fun(X) -> get_deps(X) =/= [] end, List1),
+ mk_constraint_list_cont(Type, List2);
+ disj ->
+ case lists:any(fun(X) -> get_deps(X) =:= [] end, List1) of
+ true -> mk_constraint_list_cont(Type, [mk_constraint_any(eq)]);
+ false -> mk_constraint_list_cont(Type, List1)
+ end
+ end.
+
+mk_constraint_list_cont(Type, List) ->
+ Deps = calculate_deps(List),
case Deps =:= [] of
true -> #constraint_list{type = conj,
list = [mk_constraint_any(eq)],
deps = []};
- false -> #constraint_list{type = Type, list = List2, deps = Deps}
+ false -> #constraint_list{type = Type, list = List, deps = Deps}
end.
lift_lists(Type, List) ->
@@ -3176,18 +3224,11 @@ order_fun_constraints([], Funs, Acc, State) ->
update_masks(C, Masks) ->
C#constraint_list{masks = Masks}.
--define(VARS_LIMIT, 50).
-
calculate_masks([C|Cs], I, L0) ->
calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0);
calculate_masks([], _I, L) ->
M = family(L),
- case length(M) > ?VARS_LIMIT of
- true ->
- {d, dict:from_list(M)};
- false ->
- M
- end.
+ maps:from_list(M).
%% ============================================================================
%%
@@ -3260,6 +3301,15 @@ find_constraint(Tuple, [#constraint_list{list = List}|Cs]) ->
find_constraint(Tuple, [_|Cs]) ->
find_constraint(Tuple, Cs).
+-spec fold_literal_maybe_match(cerl:cerl(), state()) -> cerl:cerl().
+
+fold_literal_maybe_match(Tree0, State) ->
+ Tree1 = cerl:fold_literal(Tree0),
+ case state__is_in_match(State) of
+ false -> Tree1;
+ true -> dialyzer_utils:refold_pattern(Tree1)
+ end.
+
lookup_record(Records, Tag, Arity) ->
case erl_types:lookup_record(Tag, Arity, Records) of
{ok, Fields} ->
@@ -3306,7 +3356,7 @@ join_chars([H|T], Sep) ->
[H|[[Sep,X] || X <- T]].
debug_lookup_name(Var) ->
- case dict:find(t_var_name(Var), get(dialyzer_typesig_map)) of
+ case maps:find(t_var_name(Var), get(dialyzer_typesig_map)) of
error -> Var;
{ok, Name} -> Name
end.
@@ -3316,7 +3366,7 @@ debug_lookup_name(Var) ->
debug_make_name_map(Vars, Funs) ->
Map = get(dialyzer_typesig_map),
NewMap =
- if Map =:= undefined -> debug_make_name_map(Vars, Funs, dict:new());
+ if Map =:= undefined -> debug_make_name_map(Vars, Funs, maps:new());
true -> debug_make_name_map(Vars, Funs, Map)
end,
put(dialyzer_typesig_map, NewMap).
@@ -3324,7 +3374,7 @@ debug_make_name_map(Vars, Funs) ->
debug_make_name_map([Var|VarLeft], [Fun|FunLeft], Map) ->
Name = {cerl:fname_id(Var), cerl:fname_arity(Var)},
FunLabel = cerl_trees:get_label(Fun),
- debug_make_name_map(VarLeft, FunLeft, dict:store(FunLabel, Name, Map));
+ debug_make_name_map(VarLeft, FunLeft, maps:put(FunLabel, Name, Map));
debug_make_name_map([], [], Map) ->
Map.
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 7fe982a992..76a5cf3d0b 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -49,6 +49,7 @@
process_record_remote_types/1,
sets_filter/2,
src_compiler_opts/0,
+ refold_pattern/1,
parallelism/0,
family/1
]).
@@ -83,7 +84,7 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
%% ----------------------------------------------------------------------------
--type abstract_code() :: [tuple()]. %% XXX: import from somewhere
+-type abstract_code() :: [erl_parse:abstract_form()].
-type comp_options() :: [compile:option()].
-type mod_or_fname() :: module() | file:filename().
-type fa() :: {atom(), arity()}.
@@ -193,15 +194,18 @@ get_core_from_abstract_code(AbstrCode, Opts) ->
%%
%% ============================================================================
+-type type_table() :: erl_types:type_table().
+-type mod_records() :: dict:dict(module(), type_table()).
+
-spec get_record_and_type_info(abstract_code()) ->
- {'ok', dict:dict()} | {'error', string()}.
+ {'ok', type_table()} | {'error', string()}.
get_record_and_type_info(AbstractCode) ->
Module = get_module(AbstractCode),
get_record_and_type_info(AbstractCode, Module, dict:new()).
--spec get_record_and_type_info(abstract_code(), module(), dict:dict()) ->
- {'ok', dict:dict()} | {'error', string()}.
+-spec get_record_and_type_info(abstract_code(), module(), type_table()) ->
+ {'ok', type_table()} | {'error', string()}.
get_record_and_type_info(AbstractCode, Module, RecDict) ->
get_record_and_type_info(AbstractCode, Module, RecDict, "nofile").
@@ -297,94 +301,118 @@ get_record_fields([], _RecDict, Acc) ->
%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
TempRecords = dialyzer_codeserver:get_temp_records(CServer),
- TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer),
- TempRecords1 = process_opaque_types0(TempRecords, TempExpTypes),
+ ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
+ Cache = erl_types:cache__new(),
+ {TempRecords1, Cache1} =
+ process_opaque_types0(TempRecords, ExpTypes, Cache),
+ %% A cache (not the field type cache) is used for speeding things up a bit.
+ VarTable = erl_types:var_table__new(),
ModuleFun =
- fun(Module, Record) ->
+ fun({Module, Record}, C0) ->
RecordFun =
- fun(Key, Value) ->
+ fun({Key, Value}, C2) ->
case Key of
{record, Name} ->
FieldFun =
- fun(Arity, Fields) ->
+ fun({Arity, Fields}, C4) ->
Site = {record, {Module, Name, Arity}},
- [{FieldName, Field,
- erl_types:t_from_form(Field,
- TempExpTypes,
- Site,
- TempRecords1)}
- || {FieldName, Field, _} <- Fields]
+ {Fields1, C7} =
+ lists:mapfoldl(fun({FieldName, Field, _}, C5) ->
+ {FieldT, C6} =
+ erl_types:t_from_form
+ (Field, ExpTypes, Site,
+ TempRecords1, VarTable,
+ C5),
+ {{FieldName, Field, FieldT}, C6}
+ end, C4, Fields),
+ {{Arity, Fields1}, C7}
end,
{FileLine, Fields} = Value,
- {FileLine, orddict:map(FieldFun, Fields)};
- _Other -> Value
+ {FieldsList, C3} =
+ lists:mapfoldl(FieldFun, C2, orddict:to_list(Fields)),
+ {{Key, {FileLine, orddict:from_list(FieldsList)}}, C3};
+ _Other -> {{Key, Value}, C2}
end
end,
- dict:map(RecordFun, Record)
+ {RecordList, C1} =
+ lists:mapfoldl(RecordFun, C0, dict:to_list(Record)),
+ {{Module, dict:from_list(RecordList)}, C1}
end,
- NewRecords = dict:map(ModuleFun, TempRecords1),
- ok = check_record_fields(NewRecords, TempExpTypes),
- CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
- dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1).
+ {NewRecordsList, C1} =
+ lists:mapfoldl(ModuleFun, Cache1, dict:to_list(TempRecords1)),
+ NewRecords = dict:from_list(NewRecordsList),
+ _C8 = check_record_fields(NewRecords, ExpTypes, C1),
+ dialyzer_codeserver:finalize_records(NewRecords, CServer).
%% erl_types:t_from_form() substitutes the declaration of opaque types
%% for the expanded type in some cases. To make sure the initial type,
%% any(), is not used, the expansion is done twice.
%% XXX: Recursive opaque types are not handled well.
-process_opaque_types0(TempRecords0, TempExpTypes) ->
- TempRecords1 = process_opaque_types(TempRecords0, TempExpTypes),
- process_opaque_types(TempRecords1, TempExpTypes).
+process_opaque_types0(TempRecords0, TempExpTypes, Cache) ->
+ {TempRecords1, NewCache} =
+ process_opaque_types(TempRecords0, TempExpTypes, Cache),
+ process_opaque_types(TempRecords1, TempExpTypes, NewCache).
-process_opaque_types(TempRecords, TempExpTypes) ->
+process_opaque_types(TempRecords, TempExpTypes, Cache) ->
+ VarTable = erl_types:var_table__new(),
ModuleFun =
- fun(Module, Record) ->
+ fun({Module, Record}, C0) ->
RecordFun =
- fun(Key, Value) ->
+ fun({Key, Value}, C2) ->
case Key of
{opaque, Name, NArgs} ->
{{_Module, _FileLine, Form, _ArgNames}=F, _Type} = Value,
Site = {type, {Module, Name, NArgs}},
- Type = erl_types:t_from_form(Form, TempExpTypes, Site,
- TempRecords),
- {F, Type};
- _Other -> Value
+ {Type, C3} =
+ erl_types:t_from_form(Form, TempExpTypes, Site,
+ TempRecords, VarTable, C2),
+ {{Key, {F, Type}}, C3};
+ _Other -> {{Key, Value}, C2}
end
end,
- dict:map(RecordFun, Record)
+ {RecordList, C1} =
+ lists:mapfoldl(RecordFun, C0, dict:to_list(Record)),
+ {{Module, dict:from_list(RecordList)}, C1}
+ %% dict:map(RecordFun, Record)
end,
- dict:map(ModuleFun, TempRecords).
+ {TempRecordList, NewCache} =
+ lists:mapfoldl(ModuleFun, Cache, dict:to_list(TempRecords)),
+ {dict:from_list(TempRecordList), NewCache}.
+ %% dict:map(ModuleFun, TempRecords).
-check_record_fields(Records, TempExpTypes) ->
+check_record_fields(Records, TempExpTypes, Cache) ->
+ VarTable = erl_types:var_table__new(),
CheckFun =
- fun({Module, Element}) ->
- CheckForm = fun(Form, Site) ->
- erl_types:t_check_record_fields(Form, TempExpTypes,
- Site, Records)
+ fun({Module, Element}, C0) ->
+ CheckForm = fun(Form, Site, C1) ->
+ erl_types:t_check_record_fields(Form, TempExpTypes,
+ Site, Records,
+ VarTable, C1)
end,
ElemFun =
- fun({Key, Value}) ->
+ fun({Key, Value}, C2) ->
case Key of
{record, Name} ->
FieldFun =
- fun({Arity, Fields}) ->
+ fun({Arity, Fields}, C3) ->
Site = {record, {Module, Name, Arity}},
- _ = [ok = CheckForm(Field, Site) ||
- {_, Field, _} <- Fields],
- ok
+ lists:foldl(fun({_, Field, _}, C4) ->
+ CheckForm(Field, Site, C4)
+ end, C3, Fields)
end,
{FileLine, Fields} = Value,
- Fun = fun() -> lists:foreach(FieldFun, Fields) end,
+ Fun = fun() -> lists:foldl(FieldFun, C2, Fields) end,
msg_with_position(Fun, FileLine);
{_OpaqueOrType, Name, NArgs} ->
Site = {type, {Module, Name, NArgs}},
{{_Module, FileLine, Form, _ArgNames}, _Type} = Value,
- Fun = fun() -> ok = CheckForm(Form, Site) end,
+ Fun = fun() -> CheckForm(Form, Site, C2) end,
msg_with_position(Fun, FileLine)
end
end,
- lists:foreach(ElemFun, dict:to_list(Element))
+ lists:foldl(ElemFun, C0, dict:to_list(Element))
end,
- lists:foreach(CheckFun, dict:to_list(Records)).
+ lists:foldl(CheckFun, Cache, dict:to_list(Records)).
msg_with_position(Fun, FileLine) ->
try Fun()
@@ -396,7 +424,7 @@ msg_with_position(Fun, FileLine) ->
throw({error, NewMsg})
end.
--spec merge_records(dict:dict(), dict:dict()) -> dict:dict().
+-spec merge_records(mod_records(), mod_records()) -> mod_records().
merge_records(NewRecords, OldRecords) ->
dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords).
@@ -410,7 +438,7 @@ merge_records(NewRecords, OldRecords) ->
-type spec_dict() :: dict:dict().
-type callback_dict() :: dict:dict().
--spec get_spec_info(module(), abstract_code(), dict:dict()) ->
+-spec get_spec_info(module(), abstract_code(), type_table()) ->
{'ok', spec_dict(), callback_dict()} | {'error', string()}.
get_spec_info(ModName, AbstractCode, RecordsDict) ->
@@ -676,7 +704,7 @@ format_errors([]) ->
format_sig(Type) ->
format_sig(Type, dict:new()).
--spec format_sig(erl_types:erl_type(), dict:dict()) -> string().
+-spec format_sig(erl_types:erl_type(), type_table()) -> string().
format_sig(Type, RecDict) ->
"fun(" ++ Sig = lists:flatten(erl_types:t_to_string(Type, RecDict)),
@@ -753,6 +781,13 @@ pp_hook(Node, Ctxt, Cont) ->
pp_binary(Node, Ctxt, Cont);
bitstr ->
pp_segment(Node, Ctxt, Cont);
+ map ->
+ pp_map(Node, Ctxt, Cont);
+ literal ->
+ case is_map(cerl:concrete(Node)) of
+ true -> pp_map(Node, Ctxt, Cont);
+ false -> Cont(Node, Ctxt)
+ end;
_ ->
Cont(Node, Ctxt)
end.
@@ -833,6 +868,87 @@ pp_atom(Atom) ->
String = atom_to_list(cerl:atom_val(Atom)),
prettypr:text(String).
+pp_map(Node, Ctxt, Cont) ->
+ Arg = cerl:map_arg(Node),
+ Before = case cerl:is_c_map_empty(Arg) of
+ true -> prettypr:floating(prettypr:text("#{"));
+ false ->
+ prettypr:beside(Cont(Arg,Ctxt),
+ prettypr:floating(prettypr:text("#{")))
+ end,
+ prettypr:beside(
+ Before, prettypr:beside(
+ prettypr:par(seq(cerl:map_es(Node),
+ prettypr:floating(prettypr:text(",")),
+ Ctxt, Cont)),
+ prettypr:floating(prettypr:text("}")))).
+
+seq([H | T], Separator, Ctxt, Fun) ->
+ case T of
+ [] -> [Fun(H, Ctxt)];
+ _ -> [prettypr:beside(Fun(H, Ctxt), Separator)
+ | seq(T, Separator, Ctxt, Fun)]
+ end;
+seq([], _, _, _) ->
+ [prettypr:empty()].
+
+%%------------------------------------------------------------------------------
+
+-spec refold_pattern(cerl:cerl()) -> cerl:cerl().
+
+refold_pattern(Pat) ->
+ %% Avoid the churn of unfolding and refolding
+ case cerl:is_literal(Pat) andalso find_map(cerl:concrete(Pat)) of
+ true ->
+ Tree = refold_concrete_pat(cerl:concrete(Pat)),
+ PatAnn = cerl:get_ann(Pat),
+ case proplists:is_defined(label, PatAnn) of
+ %% Literals are not normally annotated with a label, but can be if, for
+ %% example, they were created by cerl:fold_literal/1.
+ true -> cerl:set_ann(Tree, PatAnn);
+ false ->
+ [{label, Label}] = cerl:get_ann(Tree),
+ cerl:set_ann(Tree, [{label, Label}|PatAnn])
+ end;
+ false -> Pat
+ end.
+
+find_map(#{}) -> true;
+find_map(Tuple) when is_tuple(Tuple) -> find_map(tuple_to_list(Tuple));
+find_map([H|T]) -> find_map(H) orelse find_map(T);
+find_map(_) -> false.
+
+refold_concrete_pat(Val) ->
+ case Val of
+ _ when is_tuple(Val) ->
+ Els = lists:map(fun refold_concrete_pat/1, tuple_to_list(Val)),
+ case lists:all(fun cerl:is_literal/1, Els) of
+ true -> cerl:abstract(Val);
+ false -> label(cerl:c_tuple_skel(Els))
+ end;
+ [H|T] ->
+ case cerl:is_literal(HP=refold_concrete_pat(H))
+ and cerl:is_literal(TP=refold_concrete_pat(T))
+ of
+ true -> cerl:abstract(Val);
+ false -> label(cerl:c_cons_skel(HP, TP))
+ end;
+ M when is_map(M) ->
+ %% Map patterns are not generated by the parser(!), but they have a
+ %% property we want, namely that they are never folded into literals.
+ %% N.B.: The key in a map pattern is an expression, *not* a pattern.
+ label(cerl:c_map_pattern([cerl:c_map_pair_exact(cerl:abstract(K),
+ refold_concrete_pat(V))
+ || {K, V} <- maps:to_list(M)]));
+ _ ->
+ cerl:abstract(Val)
+ end.
+
+label(Tree) ->
+ %% Sigh
+ Label = -erlang:unique_integer([positive]),
+ cerl:set_ann(Tree, [{label, Label}]).
+
%%------------------------------------------------------------------------------
-spec parallelism() -> integer().
diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl
index 4be93c75bf..b9ab27c11d 100644
--- a/lib/dialyzer/src/dialyzer_worker.erl
+++ b/lib/dialyzer/src/dialyzer_worker.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -21,20 +21,20 @@
-module(dialyzer_worker).
--export([launch/4, sequential/4]).
+-export([launch/4]).
-export_type([worker/0]).
--type worker() :: pid(). %%opaque
+-opaque worker() :: pid().
-type mode() :: dialyzer_coordinator:mode().
-type coordinator() :: dialyzer_coordinator:coordinator().
-type init_data() :: dialyzer_coordinator:init_data().
--type result() :: dialyzer_coordinator:result().
+-type job() :: dialyzer_coordinator:job().
-record(state, {
mode :: mode(),
- job :: mfa_or_funlbl() | file:filename(),
+ job :: job(),
coordinator :: coordinator(),
init_data :: init_data(),
depends_on = [] :: list()
@@ -52,23 +52,28 @@
%%--------------------------------------------------------------------
--spec launch(mode(), [mfa_or_funlbl()], init_data(), coordinator()) -> worker().
+-spec launch(mode(), job(), init_data(), coordinator()) -> worker().
launch(Mode, Job, InitData, Coordinator) ->
State = #state{mode = Mode,
job = Job,
init_data = InitData,
coordinator = Coordinator},
- InitState =
- case Mode of
- X when X =:= 'typesig'; X =:= 'dataflow' -> initializing;
- X when X =:= 'compile'; X =:= 'warnings' -> running
- end,
- spawn_link(fun() -> loop(InitState, State) end).
+ spawn_link(fun() -> init(State) end).
%%--------------------------------------------------------------------
-loop(updating, State) ->
+init(#state{job = SCC, mode = Mode, init_data = InitData} = State) when
+ Mode =:= 'typesig'; Mode =:= 'dataflow' ->
+ DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData),
+ ?debug("Deps ~p: ~p\n",[SCC, DependsOn]),
+ loop(updating, State#state{depends_on = DependsOn});
+init(#state{mode = Mode} = State) when
+ Mode =:= 'compile'; Mode =:= 'warnings' ->
+ loop(running, State).
+
+loop(updating, #state{mode = Mode} = State) when
+ Mode =:= 'typesig'; Mode =:= 'dataflow' ->
?debug("Update: ~p\n",[State#state.job]),
NextStatus =
case waits_more_success_typings(State) of
@@ -76,16 +81,13 @@ loop(updating, State) ->
false -> running
end,
loop(NextStatus, State);
-loop(initializing, #state{job = SCC, init_data = InitData} = State) ->
- DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData),
- ?debug("Deps ~p: ~p\n",[State#state.job, DependsOn]),
- loop(updating, State#state{depends_on = DependsOn});
-loop(waiting, State) ->
+loop(waiting, #state{mode = Mode} = State) when
+ Mode =:= 'typesig'; Mode =:= 'dataflow' ->
?debug("Wait: ~p\n",[State#state.job]),
NewState = wait_for_success_typings(State),
loop(updating, NewState);
loop(running, #state{mode = 'compile'} = State) ->
- dialyzer_coordinator:wait_activation(),
+ dialyzer_coordinator:request_activation(State#state.coordinator),
?debug("Compile: ~s\n",[State#state.job]),
Result =
case start_compilation(State) of
@@ -97,7 +99,7 @@ loop(running, #state{mode = 'compile'} = State) ->
end,
report_to_coordinator(Result, State);
loop(running, #state{mode = 'warnings'} = State) ->
- dialyzer_coordinator:wait_activation(),
+ dialyzer_coordinator:request_activation(State#state.coordinator),
?debug("Warning: ~s\n",[State#state.job]),
Result = collect_warnings(State),
report_to_coordinator(Result, State);
@@ -169,22 +171,3 @@ continue_compilation(Label, Data) ->
collect_warnings(#state{job = Job, init_data = InitData}) ->
dialyzer_succ_typings:collect_warnings(Job, InitData).
-
-%%------------------------------------------------------------------------------
-
--type extra() :: label() | 'unused'.
-
--spec sequential(mode(), [mfa_or_funlbl()], init_data(), extra()) -> result().
-
-sequential('compile', Job, InitData, Extra) ->
- case dialyzer_analysis_callgraph:start_compilation(Job, InitData) of
- {ok, EstimatedSize, Data} ->
- {EstimatedSize, continue_compilation(Extra, Data)};
- {error, _Reason} = Error -> {0, Error}
- end;
-sequential('typesig', Job, InitData, _Extra) ->
- dialyzer_succ_typings:find_succ_types_for_scc(Job, InitData);
-sequential('dataflow', Job, InitData, _Extra) ->
- dialyzer_succ_typings:refine_one_module(Job, InitData);
-sequential('warnings', Job, InitData, _Extra) ->
- dialyzer_succ_typings:collect_warnings(Job, InitData).