aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer
diff options
context:
space:
mode:
Diffstat (limited to 'lib/dialyzer')
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml102
-rw-r--r--lib/dialyzer/doc/src/notes.xml6
-rw-r--r--lib/dialyzer/src/Makefile2
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl13
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl9
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl250
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl31
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl24
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl22
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/simple19
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/timer2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl60
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl4
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/results/compiler4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/results/asn12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/confusing_record_warning3
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/literals14
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/maps_difftype3
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/my_sofs4
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/record_pat2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/record_test2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning3
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/common_types.hrl6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/config.hrl148
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs.hrl9
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_gc.hrl17
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_master.erl531
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_tag.hrl19
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ddfs_master/gs_util.hrl16
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/fun2ms.erl21
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/literals.erl33
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps_difftype.erl11
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/relevant_record_warning.erl (renamed from lib/dialyzer/test/small_SUITE_data/src/confusing_record_warning.erl)6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/remote_field.erl11
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/remote_field2.erl17
36 files changed, 1152 insertions, 276 deletions
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
index a92b890a80..4e26a9e95e 100644
--- a/lib/dialyzer/doc/src/dialyzer.xml
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2014</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -50,33 +50,31 @@
<p>Dialyzer also has a command line version for automated use. Below is a
brief description of the list of its options. The same information can
be obtained by writing</p>
- <code type="none"><![CDATA[
- dialyzer --help
- ]]></code>
+ <code type="none">
+ dialyzer --help</code>
<p>in a shell. Please refer to the GUI description for more details on
the operation of Dialyzer.</p>
<p>The exit status of the command line version is:</p>
- <code type="none"><![CDATA[
+ <code type="none">
0 - No problems were encountered during the analysis and no
warnings were emitted.
1 - Problems were encountered during the analysis.
- 2 - No problems were encountered, but warnings were emitted.
- ]]></code>
+ 2 - No problems were encountered, but warnings were emitted.</code>
<p>Usage:</p>
- <code type="none"><![CDATA[
+ <code type="none">
dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
[-pa dir]* [--plt plt] [--plts plt*] [-Ddefine]*
- [-I include_dir]* [--output_plt file] [-Wwarn]*
+ [-I include_dir]* [--output_plt file] [-Wwarn]* [--raw]
[--src] [--gui] [files_or_dirs] [-r dirs]
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
- [--no_native] [--fullpath]
- ]]></code>
+ [--dump_callgraph file] [--no_native] [--fullpath]
+ [--statistics]</code>
<p>Options:</p>
<taglist>
<tag><c><![CDATA[files_or_dirs]]></c> (for backwards compatibility also
- as: <c><![CDATA[-c files_or_dirs]]></c></tag>
+ as: <c><![CDATA[-c files_or_dirs]]></c>)</tag>
<item>Use Dialyzer from the command line to detect defects in the
specified files or directories containing <c><![CDATA[.erl]]></c> or
<c><![CDATA[.beam]]></c> files, depending on the type of the
@@ -88,16 +86,14 @@
analysis.</item>
<tag><c><![CDATA[--apps applications]]></c></tag>
<item>Option typically used when building or modifying a plt as in:
- <code type="none"><![CDATA[
- dialyzer --build_plt --apps erts kernel stdlib mnesia ...
- ]]></code>
+ <code type="none">
+ dialyzer --build_plt --apps erts kernel stdlib mnesia ...</code>
to conveniently refer to library applications corresponding to the
Erlang/OTP installation. However, the option is general and can also
be used during analysis in order to refer to Erlang/OTP applications.
In addition, file or directory names can also be included, as in:
- <code type="none"><![CDATA[
- dialyzer --apps inets ssl ./ebin ../other_lib/ebin/my_module.beam
- ]]></code></item>
+ <code type="none">
+ dialyzer --apps inets ssl ./ebin ../other_lib/ebin/my_module.beam</code></item>
<tag><c><![CDATA[-o outfile]]></c> (or
<c><![CDATA[--output outfile]]></c>)</tag>
<item>When using Dialyzer from the command line, send the analysis
@@ -129,19 +125,16 @@
that the plts are disjoint (i.e., do not have any module
appearing in more than one plt).
The plts are created in the usual way:
- <code type="none"><![CDATA[
+ <code type="none">
dialyzer --build_plt --output_plt plt_1 files_to_include
...
- dialyzer --build_plt --output_plt plt_n files_to_include
- ]]></code>
+ dialyzer --build_plt --output_plt plt_n files_to_include</code>
and then can be used in either of the following ways:
- <code type="none"><![CDATA[
- dialyzer files_to_analyze --plts plt_1 ... plt_n
- ]]></code>
+ <code type="none">
+ dialyzer files_to_analyze --plts plt_1 ... plt_n</code>
or:
- <code type="none"><![CDATA[
- dialyzer --plts plt_1 ... plt_n -- files_to_analyze
- ]]></code>
+ <code type="none">
+ dialyzer --plts plt_1 ... plt_n -- files_to_analyze</code>
(Note the -- delimiter in the second case)</item>
<tag><c><![CDATA[-Wwarn]]></c></tag>
<item>A family of options which selectively turn on/off warnings
@@ -220,8 +213,6 @@
<item>Suppress warnings for unused functions.</item>
<tag><c><![CDATA[-Wno_improper_lists]]></c></tag>
<item>Suppress warnings for construction of improper lists.</item>
- <tag><c><![CDATA[-Wno_tuple_as_fun]]></c></tag>
- <item>Suppress warnings for using tuples instead of funs.</item>
<tag><c><![CDATA[-Wno_fun_app]]></c></tag>
<item>Suppress warnings for fun applications that will fail.</item>
<tag><c><![CDATA[-Wno_match]]></c></tag>
@@ -229,9 +220,16 @@
match.</item>
<tag><c><![CDATA[-Wno_opaque]]></c></tag>
<item>Suppress warnings for violations of opaqueness of data types.</item>
+ <tag><c><![CDATA[-Wno_fail_call]]></c></tag>
+ <item>Suppress warnings for failing calls.</item>
+ <tag><c><![CDATA[-Wno_contracts]]></c></tag>
+ <item>Suppress warnings about invalid contracts.</item>
<tag><c><![CDATA[-Wno_behaviours]]></c></tag>
<item>Suppress warnings about behaviour callbacks which drift from the
published recommended interfaces.</item>
+ <tag><c><![CDATA[-Wno_undefined_callbacks]]></c></tag>
+ <item>Suppress warnings about behaviours that have no
+ <c>-callback</c> attributes for their callbacks.</item>
<tag><c><![CDATA[-Wunmatched_returns]]></c>***</tag>
<item>Include warnings for function calls which ignore a structured return
value or do not match against one of many possible return
@@ -278,13 +276,13 @@
</type>
<desc>
<p>Dialyzer GUI version.</p>
- <code type="none"><![CDATA[
+ <code type="none">
OptList :: [Option]
Option :: {files, [Filename :: string()]}
| {files_rec, [DirName :: string()]}
| {defines, [{Macro: atom(), Value : term()}]}
- | {from, src_code | byte_code} %% Defaults to byte_code
- | {init_plt, FileName :: string()} %% If changed from default
+ | {from, src_code | byte_code} %% Defaults to byte_code
+ | {init_plt, FileName :: string()} %% If changed from default
| {plts, [FileName :: string()]} %% If changed from default
| {include_dirs, [DirName :: string()]}
| {output_file, FileName :: string()}
@@ -304,14 +302,15 @@ WarnOpts :: no_return
| no_match
| no_opaque
| no_fail_call
+ | no_contracts
+ | no_behaviours
+ | no_undefined_callbacks
+ | unmatched_returns
| error_handling
| race_conditions
- | behaviours
- | unmatched_returns
| overspecs
| underspecs
- | specdiffs
- ]]></code>
+ | specdiffs</code>
</desc>
</func>
<func>
@@ -323,17 +322,30 @@ WarnOpts :: no_return
</type>
<desc>
<p>Dialyzer command line version.</p>
- <code type="none"><![CDATA[
+ <code type="none">
Warnings :: [{Tag, Id, Msg}]
-Tag :: 'warn_return_no_exit' | 'warn_return_only_exit'
- | 'warn_not_called' | 'warn_non_proper_list'
- | 'warn_fun_app' | 'warn_matching'
- | 'warn_failing_call' | 'warn_contract_types'
- | 'warn_contract_syntax' | 'warn_contract_not_equal'
- | 'warn_contract_subtype' | 'warn_contract_supertype'
+Tag :: 'warn_behaviour'
+ | 'warn_bin_construction'
+ | 'warn_callgraph'
+ | 'warn_contract_not_equal'
+ | 'warn_contract_range'
+ | 'warn_contract_subtype'
+ | 'warn_contract_supertype'
+ | 'warn_contract_syntax'
+ | 'warn_contract_types'
+ | 'warn_failing_call'
+ | 'warn_fun_app'
+ | 'warn_matching'
+ | 'warn_non_proper_list'
+ | 'warn_not_called'
+ | 'warn_opaque'
+ | 'warn_race_condition'
+ | 'warn_return_no_exit'
+ | 'warn_return_only_exit'
+ | 'warn_umatched_return'
+ | 'warn_undefined_callbacks'
Id = {File :: string(), Line :: integer()}
-Msg = msg() -- Undefined
-]]></code>
+Msg = msg() -- Undefined</code>
</desc>
</func>
<func>
diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml
index 05baa93557..99ec0caddc 100644
--- a/lib/dialyzer/doc/src/notes.xml
+++ b/lib/dialyzer/doc/src/notes.xml
@@ -167,9 +167,9 @@
"hi" := V1, a := V2, b := V3} = M2. % match keys with
values</c></item> </taglist></p>
<p>
- For information on how to use Maps please see the
- <seealso marker="doc/reference_manual:maps">Reference
- Manual</seealso>.</p>
+ For information on how to use Maps please see Map Expressions in the
+ <seealso marker="doc/reference_manual:expressions#map_expressions">
+ Reference Manual</seealso>.</p>
<p>
The current implementation is without the following
features: <taglist> <item>No variable keys</item>
diff --git a/lib/dialyzer/src/Makefile b/lib/dialyzer/src/Makefile
index d7265ba31a..91fbdca5bd 100644
--- a/lib/dialyzer/src/Makefile
+++ b/lib/dialyzer/src/Makefile
@@ -88,7 +88,7 @@ APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
ifeq ($(NATIVE_LIBS_ENABLED),yes)
ERL_COMPILE_FLAGS += +native
endif
-ERL_COMPILE_FLAGS += +warn_exported_vars +warn_unused_import +warn_untyped_record +warn_missing_spec +warnings_as_errors
+ERL_COMPILE_FLAGS += +warn_export_vars +warn_unused_import +warn_untyped_record +warn_missing_spec +warnings_as_errors
# ----------------------------------------------------
# Targets
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index db27b2037d..04ce0e8bc3 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -357,12 +357,13 @@ help_warnings() ->
help_message() ->
S = "Usage: dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
[-pa dir]* [--plt plt] [--plts plt*] [-Ddefine]*
- [-I include_dir]* [--output_plt file] [-Wwarn]*
+ [-I include_dir]* [--output_plt file] [-Wwarn]* [--raw]
[--src] [--gui] [files_or_dirs] [-r dirs]
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
- [--no_native] [--fullpath] [--statistics]
+ [--dump_callgraph file] [--no_native] [--fullpath]
+ [--statistics]
Options:
files_or_dirs (for backwards compatibility also as: -c files_or_dirs)
Use Dialyzer from the command line to detect defects in the
@@ -495,14 +496,16 @@ warning_options_msg() ->
Suppress warnings for unused functions.
-Wno_improper_lists
Suppress warnings for construction of improper lists.
- -Wno_tuple_as_fun
- Suppress warnings for using tuples instead of funs.
-Wno_fun_app
Suppress warnings for fun applications that will fail.
-Wno_match
Suppress warnings for patterns that are unused or cannot match.
-Wno_opaque
Suppress warnings for violations of opaqueness of data types.
+ -Wno_fail_call
+ Suppress warnings for failing calls.
+ -Wno_contracts
+ Suppress warnings about invalid contracts.
-Wno_behaviours
Suppress warnings about behaviour callbacks which drift from the published
recommended interfaces.
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 283031eb9a..1d2dfc7b2d 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -752,14 +752,7 @@ is_remote_types_related(Contract, CSig, Sig, RecDict) ->
t_from_forms_without_remote([{FType, []}], RecDict) ->
Type0 = erl_types:t_from_form(FType, RecDict),
- Map =
- fun(Type) ->
- case erl_types:t_is_remote(Type) of
- true -> erl_types:t_none();
- false -> Type
- end
- end,
- {ok, erl_types:t_map(Map, Type0)};
+ {ok, erl_types:subst_all_remote(Type0, erl_types:t_none())};
t_from_forms_without_remote([{_FType, _Constrs}], _RecDict) ->
%% 'When' constraints
unsupported;
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index e0873b17f8..92aab68ad6 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -53,7 +53,7 @@
t_bitstr/0, t_bitstr/2, t_bitstr_concat/1, t_bitstr_match/2,
t_cons/0, t_cons/2, t_cons_hd/2, t_cons_tl/2,
t_contains_opaque/2,
- t_find_opaque_mismatch/2, t_float/0, t_from_range/2, t_from_term/1,
+ t_find_opaque_mismatch/3, t_float/0, t_from_range/2, t_from_term/1,
t_fun/0, t_fun/2, t_fun_args/1, t_fun_args/2, t_fun_range/1,
t_fun_range/2, t_integer/0, t_integers/1,
t_is_any/1, t_is_atom/1, t_is_atom/2, t_is_any_atom/3,
@@ -136,11 +136,10 @@
get_warnings(Tree, Plt, Callgraph, Records, NoWarnUnused) ->
State1 = analyze_module(Tree, Plt, Callgraph, Records, true),
- State2 = find_mismatched_record_patterns(Tree, State1),
- State3 =
- state__renew_warnings(state__get_warnings(State2, NoWarnUnused), State2),
- State4 = state__get_race_warnings(State3),
- {State4#state.warnings, state__all_fun_types(State4)}.
+ State2 =
+ state__renew_warnings(state__get_warnings(State1, NoWarnUnused), State1),
+ State3 = state__get_race_warnings(State2),
+ {State3#state.warnings, state__all_fun_types(State3)}.
-spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(),
dialyzer_callgraph:callgraph(),
@@ -277,13 +276,8 @@ traverse(Tree, Map, State) ->
{State1, Map1} = lists:foldl(FoldFun, {State, Map}, Defs),
traverse(Body, Map1, State1);
literal ->
- %% This is needed for finding records
- case cerl:unfold_literal(Tree) of
- Tree ->
- Type = literal_type(Tree),
- {State, Map, Type};
- NewTree -> traverse(NewTree, Map, State)
- end;
+ Type = literal_type(Tree),
+ {State, Map, Type};
module ->
handle_module(Tree, Map, State);
primop ->
@@ -1110,7 +1104,7 @@ handle_tuple(Tree, Map, State) ->
%% Let's find out if this is a record
case Elements of
[Tag|Left] ->
- case cerl:is_c_atom(Tag) of
+ case cerl:is_c_atom(Tag) andalso is_literal_record(Tree) of
true ->
TagVal = cerl:atom_val(Tag),
case state__lookup_record(TagVal, length(Left), State1) of
@@ -1240,15 +1234,10 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
false ->
{State1, Map, t_none(), ArgType0};
true ->
- PatString =
- case ErrorType of
- bind -> format_patterns(Pats);
- record -> format_patterns(Pats);
- opaque -> format_patterns(NewPats)
- end,
{Msg, Force} =
case t_is_none(ArgType0) of
true ->
+ PatString = format_patterns(Pats),
PatTypes = [PatString, format_type(OrigArgType, State1)],
%% See if this is covered by an earlier clause or if it
%% simply cannot match
@@ -1298,6 +1287,12 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
false ->
true
end,
+ PatString =
+ case ErrorType of
+ bind -> format_patterns(Pats);
+ record -> format_patterns(NewPats);
+ opaque -> format_patterns(NewPats)
+ end,
PatTypes = case ErrorType of
bind -> [PatString, format_type(ArgType0, State1)];
record -> [PatString, format_type(Type, State1)];
@@ -1444,7 +1439,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
BinType = t_inf(t_bitstr(), Type, Opaques),
case t_is_none(BinType) of
true ->
- case t_find_opaque_mismatch(t_bitstr(), Type) of
+ case t_find_opaque_mismatch(t_bitstr(), Type, Opaques) of
{ok, T1, T2} ->
bind_error([Pat], T1, T2, opaque);
error ->
@@ -1460,7 +1455,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
Cons = t_inf(Type, t_cons(), Opaques),
case t_is_none(Cons) of
true ->
- bind_opaque_pats(t_cons(), Type, Pat, Map, State, Rev);
+ bind_opaque_pats(t_cons(), Type, Pat, State);
false ->
{Map1, [HdType, TlType]} =
bind_pat_vars([cerl:cons_hd(Pat), cerl:cons_tl(Pat)],
@@ -1473,7 +1468,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
Literal = literal_type(Pat),
case t_is_none(t_inf(Literal, Type, Opaques)) of
true ->
- bind_opaque_pats(Literal, Type, Pat, Map, State, Rev);
+ bind_opaque_pats(Literal, Type, Pat, State);
false -> {Map, Literal}
end;
map ->
@@ -1484,7 +1479,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
case Es of
[] -> {false, t_tuple([])};
[Tag|Left] ->
- case cerl:is_c_atom(Tag) of
+ case cerl:is_c_atom(Tag) andalso is_literal_record(Pat) of
true ->
TagAtom = cerl:atom_val(Tag),
case state__lookup_record(TagAtom, length(Left), State) of
@@ -1500,7 +1495,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
Tuple = t_inf(Prototype, Type, Opaques),
case t_is_none(Tuple) of
true ->
- bind_opaque_pats(Prototype, Type, Pat, Map, State, Rev);
+ bind_opaque_pats(Prototype, Type, Pat, State);
false ->
SubTuples = t_tuple_subtypes(Tuple, Opaques),
%% Need to call the top function to get the try-catch wrapper
@@ -1549,7 +1544,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
VarType2 = t_inf(VarType1, Type, Opaques),
case t_is_none(VarType2) of
true ->
- case t_find_opaque_mismatch(VarType1, Type) of
+ case t_find_opaque_mismatch(VarType1, Type, Opaques) of
{ok, T1, T2} ->
bind_error([Pat], T1, T2, opaque);
error ->
@@ -1631,21 +1626,26 @@ bind_bin_segs([Seg|Segs], BinType, Acc, Map, State) ->
bind_bin_segs([], _BinType, Acc, Map, _State) ->
{Map, lists:reverse(Acc)}.
-bind_error(Pats, Type, OpaqueType, Error) ->
+bind_error(Pats, Type, OpaqueType, Error0) ->
+ Error = case {Error0, Pats} of
+ {bind, [Pat]} ->
+ case is_literal_record(Pat) of
+ true -> record;
+ false -> Error0
+ end;
+ _ -> Error0
+ end,
throw({error, Error, Pats, Type, OpaqueType}).
-bind_opaque_pats(GenType, Type, Pat, Map, State, Rev) ->
- case t_find_opaque_mismatch(GenType, Type) of
+-spec bind_opaque_pats(type(), type(), cerl:c_literal(), state()) ->
+ no_return().
+
+bind_opaque_pats(GenType, Type, Pat, State) ->
+ case t_find_opaque_mismatch(GenType, Type, State#state.opaques) of
{ok, T1, T2} ->
- case erl_types:is_opaque_type(T2, State#state.opaques) of
- true ->
- NewType = erl_types:t_struct_from_opaque(Type, [T2]),
- {Map1, _} =
- bind_pat_vars([Pat], [NewType], [], Map, State, Rev),
- {Map1, T2};
- false -> bind_error([Pat], T1, T2, opaque)
- end;
- error -> bind_error([Pat], Type, t_none(), bind)
+ bind_error([Pat], T1, T2, opaque);
+ error ->
+ bind_error([Pat], Type, t_none(), bind)
end.
%%----------------------------------------
@@ -1843,9 +1843,9 @@ handle_guard_comp(Guard, Comp, Map, Env, Eval, State) ->
[Type1, Type2] = ArgTypes,
IsInt1 = t_is_integer(Type1, Opaques),
IsInt2 = t_is_integer(Type2, Opaques),
- case {cerl:type(Arg1), cerl:type(Arg2)} of
- {literal, literal} ->
- case erlang:Comp(cerl:concrete(Arg1), cerl:concrete(Arg2)) of
+ case {type(Arg1), type(Arg2)} of
+ {{literal, Lit1}, {literal, Lit2}} ->
+ case erlang:Comp(cerl:concrete(Lit1), cerl:concrete(Lit2)) of
true when Eval =:= pos -> {Map, t_atom(true)};
true when Eval =:= dont_know -> {Map, t_atom(true)};
true when Eval =:= neg -> {Map, t_atom(true)};
@@ -1854,13 +1854,13 @@ handle_guard_comp(Guard, Comp, Map, Env, Eval, State) ->
false when Eval =:= dont_know -> {Map, t_atom(false)};
false when Eval =:= neg -> {Map, t_atom(false)}
end;
- {literal, var} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
- case bind_comp_literal_var(Arg1, Arg2, Type2, Comp, Map1, Opaques) of
+ {{literal, Lit1}, var} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
+ case bind_comp_literal_var(Lit1, Arg2, Type2, Comp, Map1, Opaques) of
error -> signal_guard_fail(Eval, Guard, ArgTypes, State);
{ok, NewMap} -> {NewMap, t_atom(true)}
end;
- {var, literal} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
- case bind_comp_literal_var(Arg2, Arg1, Type1, invert_comp(Comp),
+ {var, {literal, Lit2}} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
+ case bind_comp_literal_var(Lit2, Arg1, Type1, invert_comp(Comp),
Map1, Opaques) of
error -> signal_guard_fail(Eval, Guard, ArgTypes, State);
{ok, NewMap} -> {NewMap, t_atom(true)}
@@ -1980,15 +1980,15 @@ handle_guard_is_record(Guard, Map, Env, Eval, State) ->
handle_guard_eq(Guard, Map, Env, Eval, State) ->
[Arg1, Arg2] = cerl:call_args(Guard),
- case {cerl:type(Arg1), cerl:type(Arg2)} of
- {literal, literal} ->
- case cerl:concrete(Arg1) =:= cerl:concrete(Arg2) of
+ case {type(Arg1), type(Arg2)} of
+ {{literal, Lit1}, {literal, Lit2}} ->
+ case cerl:concrete(Lit1) =:= cerl:concrete(Lit2) of
true ->
if
Eval =:= pos -> {Map, t_atom(true)};
Eval =:= neg ->
- ArgTypes = [t_from_term(cerl:concrete(Arg1)),
- t_from_term(cerl:concrete(Arg2))],
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
signal_guard_fail(Eval, Guard, ArgTypes, State);
Eval =:= dont_know -> {Map, t_atom(true)}
end;
@@ -1997,28 +1997,28 @@ handle_guard_eq(Guard, Map, Env, Eval, State) ->
Eval =:= neg -> {Map, t_atom(false)};
Eval =:= dont_know -> {Map, t_atom(false)};
Eval =:= pos ->
- ArgTypes = [t_from_term(cerl:concrete(Arg1)),
- t_from_term(cerl:concrete(Arg2))],
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
signal_guard_fail(Eval, Guard, ArgTypes, State)
end
end;
- {literal, _} when Eval =:= pos ->
- case cerl:concrete(Arg1) of
+ {{literal, Lit1}, _} when Eval =:= pos ->
+ case cerl:concrete(Lit1) of
Atom when is_atom(Atom) ->
- bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State);
+ bind_eqeq_guard_lit_other(Guard, Lit1, Arg2, Map, Env, State);
[] ->
- bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State);
+ bind_eqeq_guard_lit_other(Guard, Lit1, Arg2, Map, Env, State);
_ ->
- bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ bind_eq_guard(Guard, Lit1, Arg2, Map, Env, Eval, State)
end;
- {_, literal} when Eval =:= pos ->
- case cerl:concrete(Arg2) of
+ {_, {literal, Lit2}} when Eval =:= pos ->
+ case cerl:concrete(Lit2) of
Atom when is_atom(Atom) ->
- bind_eqeq_guard_lit_other(Guard, Arg2, Arg1, Map, Env, State);
+ bind_eqeq_guard_lit_other(Guard, Lit2, Arg1, Map, Env, State);
[] ->
- bind_eqeq_guard_lit_other(Guard, Arg2, Arg1, Map, Env, State);
+ bind_eqeq_guard_lit_other(Guard, Lit2, Arg1, Map, Env, State);
_ ->
- bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ bind_eq_guard(Guard, Arg1, Lit2, Map, Env, Eval, State)
end;
{_, _} ->
bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
@@ -2050,13 +2050,14 @@ bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
[Arg1, Arg2] = cerl:call_args(Guard),
- case {cerl:type(Arg1), cerl:type(Arg2)} of
- {literal, literal} ->
- case cerl:concrete(Arg1) =:= cerl:concrete(Arg2) of
+ case {type(Arg1), type(Arg2)} of
+ {{literal, Lit1}, {literal, Lit2}} ->
+
+ case cerl:concrete(Lit1) =:= cerl:concrete(Lit2) of
true ->
if Eval =:= neg ->
- ArgTypes = [t_from_term(cerl:concrete(Arg1)),
- t_from_term(cerl:concrete(Arg2))],
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
signal_guard_fail(Eval, Guard, ArgTypes, State);
Eval =:= pos -> {Map, t_atom(true)};
Eval =:= dont_know -> {Map, t_atom(true)}
@@ -2065,15 +2066,15 @@ handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
if Eval =:= neg -> {Map, t_atom(false)};
Eval =:= dont_know -> {Map, t_atom(false)};
Eval =:= pos ->
- ArgTypes = [t_from_term(cerl:concrete(Arg1)),
- t_from_term(cerl:concrete(Arg2))],
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
signal_guard_fail(Eval, Guard, ArgTypes, State)
end
end;
- {literal, _} when Eval =:= pos ->
- bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State);
- {_, literal} when Eval =:= pos ->
- bind_eqeq_guard_lit_other(Guard, Arg2, Arg1, Map, Env, State);
+ {{literal, Lit1}, _} when Eval =:= pos ->
+ bind_eqeq_guard_lit_other(Guard, Lit1, Arg2, Map, Env, State);
+ {_, {literal, Lit2}} when Eval =:= pos ->
+ bind_eqeq_guard_lit_other(Guard, Lit2, Arg1, Map, Env, State);
{_, _} ->
bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
end.
@@ -3282,12 +3283,17 @@ get_file([_|Tail]) -> get_file(Tail).
is_compiler_generated(Ann) ->
lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
+is_literal_record(Tree) ->
+ Ann = cerl:get_ann(Tree),
+ lists:member(record, Ann).
+
-spec format_args([cerl:cerl()], [type()], state()) ->
nonempty_string().
format_args([], [], _State) ->
"()";
-format_args(ArgList, TypeList, State) ->
+format_args(ArgList0, TypeList, State) ->
+ ArgList = fold_literals(ArgList0),
"(" ++ format_args_1(ArgList, TypeList, State) ++ ")".
format_args_1([Arg], [Type], State) ->
@@ -3346,7 +3352,8 @@ format_cerl(Tree) ->
{ribbon, 100000} %% newlines.
]).
-format_patterns(Pats) ->
+format_patterns(Pats0) ->
+ Pats = fold_literals(Pats0),
NewPats = map_pats(cerl:c_values(Pats)),
String = format_cerl(NewPats),
case Pats of
@@ -3378,6 +3385,23 @@ map_pats(Pats) ->
end,
cerl_trees:map(Fun, Pats).
+fold_literals(TreeList) ->
+ [cerl:fold_literal(Tree) || Tree <- TreeList].
+
+type(Tree) ->
+ Folded = cerl:fold_literal(Tree),
+ case cerl:type(Folded) of
+ literal -> {literal, Folded};
+ Type -> Type
+ end.
+
+is_literal(Tree) ->
+ Folded = cerl:fold_literal(Tree),
+ case cerl:is_literal(Folded) of
+ true -> {yes, Folded};
+ false -> no
+ end.
+
parent_allows_this(FunLbl, #state{callgraph = Callgraph, plt = Plt} =State) ->
case state__is_escaping(FunLbl, State) of
false -> false; % if it isn't escaping it can't be a return value
@@ -3422,18 +3446,18 @@ find_terminals(Tree) ->
M0 = cerl:call_module(Tree),
F0 = cerl:call_name(Tree),
A = length(cerl:call_args(Tree)),
- case cerl:is_literal(M0) andalso cerl:is_literal(F0) of
- false ->
- %% We cannot make assumptions. Say that both are true.
- {true, true};
- true ->
- M = cerl:concrete(M0),
- F = cerl:concrete(F0),
+ case {is_literal(M0), is_literal(F0)} of
+ {{yes, LitM}, {yes, LitF}} ->
+ M = cerl:concrete(LitM),
+ F = cerl:concrete(LitF),
case (erl_bif_types:is_known(M, F, A)
andalso t_is_none(erl_bif_types:type(M, F, A))) of
true -> {true, false};
false -> {false, true}
- end
+ end;
+ _ ->
+ %% We cannot make assumptions. Say that both are true.
+ {true, true}
end;
'case' -> find_terminals_list(cerl:case_clauses(Tree));
'catch' -> find_terminals(cerl:catch_body(Tree));
@@ -3478,66 +3502,6 @@ find_terminals_list([], Explicit, Normal) ->
%%----------------------------------------------------------------------------
-%% If you write a record pattern in a matching that violates the
-%% definition it will never match. However, the warning is lost in the
-%% regular analysis. This after-pass catches it.
-
-find_mismatched_record_patterns(Tree, State) ->
- cerl_trees:fold(
- fun(SubTree, AccState) ->
- case cerl:is_c_clause(SubTree) of
- true -> lists:foldl(fun(P, AccState1) ->
- find_rec_warnings(P, AccState1)
- end, AccState, cerl:clause_pats(SubTree));
- false -> AccState
- end
- end, State, Tree).
-
-find_rec_warnings(Tree, State) ->
- cerl_trees:fold(
- fun(SubTree, AccState) ->
- case cerl:is_c_tuple(SubTree) of
- true -> find_rec_warnings_tuple(SubTree, AccState);
- false -> AccState
- end
- end, State, Tree).
-
-find_rec_warnings_tuple(Tree, State) ->
- Elements = cerl:tuple_es(Tree),
- {_, _, EsType} = traverse_list(Elements, map__new(), State),
- TupleType = t_tuple(EsType),
- case t_is_none(TupleType) of
- true -> State;
- false ->
- %% Let's find out if this is a record construction.
- case Elements of
- [Tag|Left] ->
- case cerl:is_c_atom(Tag) of
- true ->
- TagVal = cerl:atom_val(Tag),
- case state__lookup_record(TagVal, length(Left), State) of
- error -> State;
- {ok, Prototype} ->
- InfTupleType = t_inf(Prototype, TupleType),
- case t_is_none(InfTupleType) of
- true ->
- Msg = {record_matching,
- [format_patterns([Tree]), TagVal]},
- state__add_warning(State, ?WARN_MATCHING, Tree, Msg);
- false ->
- State
- end
- end;
- false ->
- State
- end;
- _ ->
- State
- end
- end.
-
-%%----------------------------------------------------------------------------
-
-ifdef(DEBUG_PP).
debug_pp(Tree, true) ->
io:put_chars(cerl_prettypr:format(Tree, [{hook, cerl_typean:pp_hook()}])),
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index 7070fa240d..868857d675 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -699,8 +699,7 @@ handle_add_files(#gui_state{chosen_box = ChosenBox, file_box = FileBox,
end.
handle_add_dir(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox,
- files_to_analyze = FileList,
- mode = Mode} = State) ->
+ files_to_analyze = FileList, mode = Mode} = State) ->
case wxDirPickerCtrl:getPath(DirBox) of
"" ->
State;
@@ -714,8 +713,8 @@ handle_add_dir(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox,
State#gui_state{files_to_analyze = add_files(filter_mods(NewDir1,Ext), FileList, ChosenBox, Ext)}
end.
-handle_add_rec(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox, files_to_analyze = FileList,
- mode = Mode} = State) ->
+handle_add_rec(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox,
+ files_to_analyze = FileList, mode = Mode} = State) ->
case wxDirPickerCtrl:getPath(DirBox) of
"" ->
State;
@@ -723,11 +722,11 @@ handle_add_rec(#gui_state{chosen_box = ChosenBox, dir_entry = DirBox, files_to_a
NewDir = ordsets:new(),
NewDir1 = ordsets:add_element(Dir,NewDir),
TargetDirs = ordsets:union(NewDir1, all_subdirs(NewDir1)),
- case wxRadioBox:getSelection(Mode) of
- 0 -> Ext = ".beam";
- 1-> Ext = ".erl"
- end,
- State#gui_state{files_to_analyze = add_files(filter_mods(TargetDirs,Ext), FileList, ChosenBox, Ext)}
+ Ext = case wxRadioBox:getSelection(Mode) of
+ 0 -> ".beam";
+ 1 -> ".erl"
+ end,
+ State#gui_state{files_to_analyze = add_files(filter_mods(TargetDirs, Ext), FileList, ChosenBox, Ext)}
end.
handle_file_delete(#gui_state{chosen_box = ChosenBox,
@@ -886,13 +885,10 @@ config_gui_start(State) ->
wxRadioBox:disable(State#gui_state.mode).
save_file(#gui_state{frame = Frame, warnings_box = WBox, log = Log} = State, Type) ->
- case Type of
- warnings ->
- Message = "Save Warnings",
- Box = WBox;
- log -> Message = "Save Log",
- Box = Log
- end,
+ {Message, Box} = case Type of
+ warnings -> {"Save Warnings", WBox};
+ log -> {"Save Log", Log}
+ end,
case wxTextCtrl:getValue(Box) of
"" -> error_sms(State,"There is nothing to save...\n");
_ ->
@@ -936,8 +932,7 @@ include_dialog(#gui_state{gui = Wx, frame = Frame, options = Options}) ->
wxButton:connect(DeleteAllButton, command_button_clicked),
wxButton:connect(Ok, command_button_clicked),
wxButton:connect(Cancel, command_button_clicked),
- Dirs = [io_lib:format("~s", [X])
- || X <- Options#options.include_dirs],
+ Dirs = [io_lib:format("~s", [X]) || X <- Options#options.include_dirs],
wxListBox:set(Box, Dirs),
Layout = wxBoxSizer:new(?wxVERTICAL),
Buttons = wxBoxSizer:new(?wxHORIZONTAL),
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index b1f849b16f..2a8aba5d8f 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -990,8 +990,7 @@ fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
NewRaceVarMap, Args, NewFunArgs, NewFunTypes, NestingLevel};
{CurrFun, Fun} ->
NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
- NewRaceVarMap =
- race_var_map(Args, NewFunArgs, RaceVarMap, bind),
+ NewRaceVarMap = race_var_map(Args, NewFunArgs, RaceVarMap, bind),
RetC =
case Fun of
InitFun ->
@@ -1018,8 +1017,7 @@ fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
label = FunLabel, var_map = NewRaceVarMap,
def_vars = Args, call_vars = NewFunArgs,
arg_types = NewFunTypes}|
- lists:reverse(StateRaceList)] ++
- RetC;
+ lists:reverse(StateRaceList)] ++ RetC;
_ ->
[#curr_fun{status = in, mfa = Fun,
label = FunLabel, var_map = NewRaceVarMap,
@@ -1054,13 +1052,9 @@ fixup_race_backward(CurrFun, Calls, CallsToAnalyze, Parents, Height) ->
false -> [CurrFun|Parents]
end;
[Head|Tail] ->
- MorePaths =
- case Head of
- {Parent, CurrFun} -> true;
- {Parent, _TupleB} -> false
- end,
- case MorePaths of
- true ->
+ {Parent, TupleB} = Head,
+ case TupleB =:= CurrFun of
+ true -> % more paths are needed
NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
NewParents =
fixup_race_backward(Parent, NewCallsToAnalyze,
@@ -1854,7 +1848,8 @@ ets_tuple_argtypes1(Str, Tuple, TupleList, NestingLevel) ->
end.
format_arg(?bypassed) -> ?no_label;
-format_arg(Arg) ->
+format_arg(Arg0) ->
+ Arg = cerl:fold_literal(Arg0),
case cerl:type(Arg) of
var -> cerl_trees:get_label(Arg);
tuple -> list_to_tuple([format_arg(A) || A <- cerl:tuple_es(Arg)]);
@@ -1884,7 +1879,7 @@ format_args_1([Arg|Args], [Type|Types], CleanState) ->
case Arg =:= ?bypassed of
true -> [?no_label, format_type(Type, CleanState)];
false ->
- case cerl:is_literal(Arg) of
+ case cerl:is_literal(cerl:fold_literal(Arg)) of
true -> [?no_label, format_cerl(Arg)];
false -> [format_arg(Arg), format_type(Type, CleanState)]
end
@@ -2154,7 +2149,8 @@ race_var_map_guard_helper1(Arg, Pats, RaceVarMap, Op) ->
end
end.
-race_var_map_guard_helper2(Arg, Pat, Bool, RaceVarMap, Op) ->
+race_var_map_guard_helper2(Arg, Pat0, Bool, RaceVarMap, Op) ->
+ Pat = cerl:fold_literal(Pat0),
case cerl:type(Pat) of
literal ->
[Arg1, Arg2] = cerl:call_args(Arg),
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 31ceaf5ac5..3d03ed3ab3 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -389,13 +389,8 @@ traverse(Tree, DefinedVars, State) ->
{State2, _} = traverse_list(Funs, DefinedVars1, State1),
traverse(Body, DefinedVars1, State2);
literal ->
- %% This is needed for finding records
- case cerl:unfold_literal(Tree) of
- Tree ->
- Type = t_from_term(cerl:concrete(Tree)),
- {State, Type};
- NewTree -> traverse(NewTree, DefinedVars, State)
- end;
+ Type = t_from_term(cerl:concrete(Tree)),
+ {State, Type};
module ->
Defs = cerl:module_defs(Tree),
Funs = [Fun || {_Var, Fun} <- Defs],
@@ -462,7 +457,7 @@ traverse(Tree, DefinedVars, State) ->
end,
case Elements of
[Tag|Fields] ->
- case cerl:is_c_atom(Tag) of
+ case cerl:is_c_atom(Tag) andalso is_literal_record(Tree) of
true ->
%% Check if a record is constructed.
Arity = length(Fields),
@@ -874,7 +869,8 @@ get_underapprox_from_guard(Tree, Map) ->
MFA ->
case get_type_test(MFA) of
{ok, Type} ->
- [Arg] = cerl:call_args(Tree),
+ [Arg0] = cerl:call_args(Tree),
+ Arg = cerl:fold_literal(Arg0),
{ArgType, Map1} = get_underapprox_from_guard(Arg, Map),
Inf = t_inf(Type, ArgType),
case t_is_none(Inf) of
@@ -891,7 +887,9 @@ get_underapprox_from_guard(Tree, Map) ->
{erlang, '=:=', 2} -> throw(dont_know);
{erlang, '==', 2} -> throw(dont_know);
{erlang, 'and', 2} ->
- [Arg1, Arg2] = cerl:call_args(Tree),
+ [Arg1_0, Arg2_0] = cerl:call_args(Tree),
+ Arg1 = cerl:fold_literal(Arg1_0),
+ Arg2 = cerl:fold_literal(Arg2_0),
case ((cerl:is_c_var(Arg1) orelse cerl:is_literal(Arg1))
andalso
(cerl:is_c_var(Arg2) orelse cerl:is_literal(Arg2))) of
@@ -3272,6 +3270,10 @@ lookup_record(Records, Tag, Arity) ->
error
end.
+is_literal_record(Tree) ->
+ Ann = cerl:get_ann(Tree),
+ lists:member(record, Ann).
+
family(L) ->
sofs:to_external(sofs:rel2fam(sofs:relation(L))).
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 21183e3459..e1bcd72c0b 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -402,7 +402,7 @@ sets_filter([Mod|Mods], ExpTypes) ->
src_compiler_opts() ->
[no_copt, to_core, binary, return_errors,
no_inline, strict_record_tests, strict_record_updates,
- no_is_record_optimization].
+ dialyzer].
-spec get_module(abstract_code()) -> module().
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/simple b/lib/dialyzer/test/opaque_SUITE_data/results/simple
index 072ac9be8f..29864d6065 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/simple
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/simple
@@ -14,12 +14,17 @@ is_rec.erl:53: The call erlang:is_record(A::simple1_adt:d1(),A::simple1_adt:d1()
is_rec.erl:57: Guard test is_record(A::simple1_adt:d1(),'r',2) breaks the opaqueness of its argument
is_rec.erl:61: The record #r{f1::simple1_adt:d1()} violates the declared type for #r{}
is_rec.erl:65: The call erlang:is_record({simple1_adt:d1(),1},'r',2) contains an opaque term as 1st argument when terms of different types are expected in these positions
-rec_api.erl:22: Record construction #r1{f1::10} violates the declared type of field f1::'undefined' | rec_api:a()
-rec_api.erl:23: The pattern {'r1', 10} violates the declared type for #r1{}
-rec_api.erl:27: The attempt to match a term of type rec_adt:r1() against the pattern {'r1', 'a'} breaks the opaqueness of the term
-rec_api.erl:29: Invalid type specification for function rec_api:adt_t1/1. The success typing is (#r1{f1::'a'}) -> #r1{f1::'a'}
-rec_api.erl:34: Invalid type specification for function rec_api:adt_r1/0. The success typing is () -> #r1{f1::'a'}
-rec_api.erl:77: The attempt to match a term of type rec_api:f() against the variable _ breaks the opaqueness of the term
+rec_api.erl:104: Matching of pattern {'r2', 10} tagged with a record name violates the declared type of #r2{f1::10}
+rec_api.erl:113: The attempt to match a term of type #r3{f1::queue:queue(_)} against the pattern {'r3', 'a'} breaks the opaqueness of queue:queue(_)
+rec_api.erl:118: Record construction #r3{f1::10} violates the declared type of field f1::queue:queue(_)
+rec_api.erl:123: The attempt to match a term of type #r3{f1::10} against the pattern {'r3', 10} breaks the opaqueness of queue:queue(_)
+rec_api.erl:24: Record construction #r1{f1::10} violates the declared type of field f1::'undefined' | rec_api:a()
+rec_api.erl:29: Matching of pattern {'r1', 10} tagged with a record name violates the declared type of #r1{f1::10}
+rec_api.erl:33: The attempt to match a term of type rec_adt:r1() against the pattern {'r1', 'a'} breaks the opaqueness of the term
+rec_api.erl:35: Invalid type specification for function rec_api:adt_t1/1. The success typing is (#r1{f1::'a'}) -> #r1{f1::'a'}
+rec_api.erl:40: Invalid type specification for function rec_api:adt_r1/0. The success typing is () -> #r1{f1::'a'}
+rec_api.erl:85: The attempt to match a term of type rec_api:f() against the variable _ breaks the opaqueness of rec_adt:f()
+rec_api.erl:99: Record construction #r2{f1::10} violates the declared type of field f1::rec_api:a()
simple1_api.erl:113: The test simple1_api:d1() =:= simple1_api:d2() can never evaluate to 'true'
simple1_api.erl:118: Guard test simple1_api:d2() =:= A::simple1_api:d1() can never succeed
simple1_api.erl:142: Attempt to test for equality between a term of type simple1_adt:o2() and a term of opaque type simple1_adt:o1()
@@ -58,7 +63,7 @@ simple1_api.erl:381: Invalid type specification for function simple1_api:bool_ad
simple1_api.erl:407: The size simple1_adt:i1() breaks the opaqueness of A
simple1_api.erl:418: The attempt to match a term of type non_neg_integer() against the variable A breaks the opaqueness of simple1_adt:i1()
simple1_api.erl:425: The attempt to match a term of type non_neg_integer() against the variable B breaks the opaqueness of simple1_adt:i1()
-simple1_api.erl:432: The attempt to match a term of type non_neg_integer() against the variable B breaks the opaqueness of simple1_api:o1()
+simple1_api.erl:432: The pattern <<_:B/integer-unit:1>> can never match the type any()
simple1_api.erl:448: The attempt to match a term of type non_neg_integer() against the variable Sz breaks the opaqueness of simple1_adt:i1()
simple1_api.erl:460: The attempt to match a term of type simple1_adt:bit1() against the pattern <<_/binary-unit:8>> breaks the opaqueness of the term
simple1_api.erl:478: The call 'foo':A(A::simple1_adt:a()) breaks the opaqueness of the term A :: simple1_adt:a()
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/timer b/lib/dialyzer/test/opaque_SUITE_data/results/timer
index e917b76b08..b1cfcd4e9f 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/timer
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/timer
@@ -1,4 +1,4 @@
timer_use.erl:16: The pattern 'gazonk' can never match the type {'error',_} | {'ok',timer:tref()}
-timer_use.erl:17: The attempt to match a term of type {'ok',timer:tref()} against the pattern {'ok', 42} breaks the opaqueness of timer:tref()
+timer_use.erl:17: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {'ok', 42} breaks the opaqueness of timer:tref()
timer_use.erl:18: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {Tag, 'gazonk'} breaks the opaqueness of timer:tref()
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl b/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl
index d9b1d59f0c..fb6d59d263 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl
@@ -1,9 +1,9 @@
-module(rec_api).
--export([t1/0, t2/0, adt_t1/0, adt_t1/1, adt_r1/0,
- t/1, t_adt/0, r/0, r_adt/0]).
+-export([t1/0, t2/0, t3/0, adt_t1/0, adt_t1/1, adt_r1/0,
+ t/1, t_adt/0, r/0, r_adt/0, u1/0, u2/0, u3/0, v1/0, v2/0, v3/0]).
--export_type([{a,0},{r1,0}]).
+-export_type([{a,0},{r1,0}, r2/0, r3/0]).
-export_type([f/0, op_t/0, r/0, tup/0]).
@@ -19,8 +19,14 @@ t1() ->
{r1, a} = A.
t2() ->
- A = {r1, 10}, % violates the type of #r1{}
- {r1, 10} = A. % violates the type of #r1{}
+ A = {r1, 10},
+ {r1, 10} = A,
+ A = #r1{f1 = 10}, % violates the type of field f1
+ #r1{f1 = 10} = A.
+
+t3() ->
+ A = {r1, 10},
+ #r1{f1 = 10} = A. % violates the type of #r1{}
adt_t1() ->
R = rec_adt:r1(),
@@ -66,7 +72,8 @@ t_adt() ->
-spec r() -> _.
r() ->
- {r, f(), 2}. % OK, f() is a local opaque type
+ {{r, f(), 2},
+ #r{f = f(), o = 2}}. % OK, f() is a local opaque type
-spec f() -> f().
@@ -74,4 +81,43 @@ f() ->
fun(_) -> 3 end.
r_adt() ->
- {r, rec_adt:f(), 2}. % breaks the opaqueness
+ {{r, rec_adt:f(), 2},
+ #r{f = rec_adt:f(), o = 2}}. % breaks the opaqueness
+
+-record(r2, % like #r1{}, but with initial value
+ {f1 = a :: a()}).
+
+-opaque r2() :: #r2{}.
+
+u1() ->
+ A = #r2{f1 = a},
+ {r2, a} = A.
+
+u2() ->
+ A = {r2, 10},
+ {r2, 10} = A,
+ A = #r2{f1 = 10}, % violates the type of field f1
+ #r2{f1 = 10} = A.
+
+u3() ->
+ A = {r2, 10},
+ #r2{f1 = 10} = A. % violates the type of #r2{}
+
+-record(r3, % like #r1{}, but an opaque type
+ {f1 = queue:new():: queue:queue()}).
+
+-opaque r3() :: #r3{}.
+
+v1() ->
+ A = #r3{f1 = queue:new()},
+ {r3, a} = A. % breaks the opaqueness
+
+v2() ->
+ A = {r3, 10},
+ {r3, 10} = A,
+ A = #r3{f1 = 10}, % violates the type of field f1
+ #r3{f1 = 10} = A.
+
+v3() ->
+ A = {r3, 10},
+ #r3{f1 = 10} = A. % breaks the opaqueness
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl b/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl
index 5135eb8e59..eef2074e0c 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl
@@ -428,8 +428,8 @@ bit_adt_t3(A) ->
bit_t5(A) ->
B = o1(),
- case none:none() of
- <<A:B>> -> 1 % breaks the opaqueness
+ case none:none() of % the type is any(); should fix that XXX
+ <<A:B>> -> 1 % can never match (local opaque type is OK)
end.
-spec bit_t4(<<_:1>>) -> integer().
diff --git a/lib/dialyzer/test/options1_SUITE_data/results/compiler b/lib/dialyzer/test/options1_SUITE_data/results/compiler
index 6399e3e36b..30b6f4814a 100644
--- a/lib/dialyzer/test/options1_SUITE_data/results/compiler
+++ b/lib/dialyzer/test/options1_SUITE_data/results/compiler
@@ -4,7 +4,7 @@ beam_bool.erl:193: The pattern {[], _} can never match the type {[{_,_,_,_},...]
beam_bool.erl:510: The pattern [{'set', [Dst], _, _}, {'%live', _}] can never match the type [{_,_,_,_}]
beam_disasm.erl:537: The variable X can never match since previous clauses completely covered the type 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
beam_type.erl:284: The pattern <'pi', 0> can never match the type <_,1 | 2>
-beam_validator.erl:396: The pattern <{'jump', {'f', _}}, Vst = {'vst', 'none', _}> can never match the type <_,#vst{current::#st{ct::[]}}>
+beam_validator.erl:396: Matching of pattern {'vst', 'none', _} tagged with a record name violates the declared type of #vst{current::#st{ct::[]}}
beam_validator.erl:690: The pattern <'term', OldT> can never match the type <{'tuple',[any(),...]},_>
beam_validator.erl:693: Guard test 'or'('false','false') can never succeed
beam_validator.erl:700: Guard test 'or'('false','false') can never succeed
@@ -33,4 +33,4 @@ core_lint.erl:473: The pattern <{'c_atom', _, 'all'}, 'binary', _Def, St> can ne
core_lint.erl:505: The pattern <_Req, 'unknown', St> can never match the type <non_neg_integer(),non_neg_integer(),_>
v3_codegen.erl:1569: The call v3_codegen:load_reg_1(V::any(),I::0,Rs::any(),pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
v3_codegen.erl:1571: The call v3_codegen:load_reg_1(V::any(),I::0,[],pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
-v3_core.erl:646: The pattern <Prim = {'iprimop', _, _, _}, St> can never match the type <#c_nil{anno::[any(),...]} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple' | 'c_var' | 'ibinary' | 'icatch' | 'ireceive1',[any(),...] | {_,_,_,_},_} | #c_cons{anno::[any(),...]} | #c_fname{anno::[any(),...]} | #iletrec{anno::{_,_,_,_},defs::[any(),...],body::[any(),...]} | #icase{anno::{_,_,_,_},args::[any()],clauses::[any()],fc::{_,_,_,_,_,_}} | #ireceive2{anno::{_,_,_,_},clauses::[any()],action::[any()]} | #ifun{anno::{_,_,_,_},id::[any(),...],vars::[any()],clauses::[any(),...],fc::{_,_,_,_,_,_}} | #imatch{anno::{_,_,_,_},guard::[],fc::{_,_,_,_,_,_}} | #itry{anno::{_,_,_,_},args::[any()],vars::[any(),...],body::[any(),...],evars::[any(),...],handler::[any(),...]},_>
+v3_core.erl:646: Matching of pattern {'iprimop', _, _, _} tagged with a record name violates the declared type of #c_nil{anno::[any(),...]} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple' | 'c_var' | 'ibinary' | 'icatch' | 'ireceive1',[any(),...] | {_,_,_,_},_} | #c_cons{anno::[any(),...]} | #c_fname{anno::[any(),...]} | #iletrec{anno::{_,_,_,_},defs::[any(),...],body::[any(),...]} | #icase{anno::{_,_,_,_},args::[any()],clauses::[any()],fc::{_,_,_,_,_,_}} | #ireceive2{anno::{_,_,_,_},clauses::[any()],action::[any()]} | #ifun{anno::{_,_,_,_},id::[any(),...],vars::[any()],clauses::[any(),...],fc::{_,_,_,_,_,_}} | #imatch{anno::{_,_,_,_},guard::[],fc::{_,_,_,_,_,_}} | #itry{anno::{_,_,_,_},args::[any()],vars::[any(),...],body::[any(),...],evars::[any(),...],handler::[any(),...]}
diff --git a/lib/dialyzer/test/r9c_SUITE_data/results/asn1 b/lib/dialyzer/test/r9c_SUITE_data/results/asn1
index c11105b76d..1cf03346ee 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/results/asn1
+++ b/lib/dialyzer/test/r9c_SUITE_data/results/asn1
@@ -5,7 +5,7 @@ asn1ct.erl:1673: The pattern 'all' can never match the type 'asn1_module' | 'exc
asn1ct.erl:672: The pattern <{'false', Result}, _, _> can never match the type <{'true','true'},atom() | binary() | [atom() | [any()] | char()],[any()]>
asn1ct.erl:909: Guard test is_atom(Ext::[49 | 97 | 98 | 100 | 110 | 115]) can never succeed
asn1ct_check.erl:1698: The pattern {'error', _} can never match the type [any()]
-asn1ct_check.erl:2733: The pattern {'type', Tag, _, _, _, _} can never match the type 'ASN1_OPEN_TYPE' | {_,_} | {'fixedtypevaluefield',_,_}
+asn1ct_check.erl:2733: Matching of pattern {'type', Tag, _, _, _, _} tagged with a record name violates the declared type of 'ASN1_OPEN_TYPE' | {_,_} | {'fixedtypevaluefield',_,_}
asn1ct_check.erl:2738: The pattern <_S, _> can never match since previous clauses completely covered the type <#state{},#'ObjectClassFieldType'{class::#objectclass{fields::maybe_improper_list() | {_,_,_,_}},fieldname::{_,maybe_improper_list()},type::'ASN1_OPEN_TYPE' | {_,_} | {'fixedtypevaluefield',_,_}}>
asn1ct_check.erl:2887: The variable Other can never match since previous clauses completely covered the type any()
asn1ct_check.erl:3188: The pattern <_S, [], B> can never match the type <#state{},{'SingleValue',_},{'ValueRange',_}>
diff --git a/lib/dialyzer/test/small_SUITE_data/results/confusing_record_warning b/lib/dialyzer/test/small_SUITE_data/results/confusing_record_warning
deleted file mode 100644
index ac3d89b02b..0000000000
--- a/lib/dialyzer/test/small_SUITE_data/results/confusing_record_warning
+++ /dev/null
@@ -1,3 +0,0 @@
-
-confusing_record_warning.erl:18: Function test/1 has no local return
-confusing_record_warning.erl:18: Matching of pattern {'r', [_]} tagged with a record name violates the declared type of #r{field::'binary' | 'undefined'}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/literals b/lib/dialyzer/test/small_SUITE_data/results/literals
new file mode 100644
index 0000000000..03e161ca71
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/literals
@@ -0,0 +1,14 @@
+
+literals.erl:11: Function t1/0 has no local return
+literals.erl:12: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:14: Function t2/0 has no local return
+literals.erl:15: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:17: Function t3/0 has no local return
+literals.erl:18: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:21: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:23: Function m1/1 has no local return
+literals.erl:23: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer' | 'undefined'}
+literals.erl:26: Function m2/1 has no local return
+literals.erl:26: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer' | 'undefined'}
+literals.erl:29: Function m3/1 has no local return
+literals.erl:29: The pattern {{'r', 'a'}} can never match the type any()
diff --git a/lib/dialyzer/test/small_SUITE_data/results/maps_difftype b/lib/dialyzer/test/small_SUITE_data/results/maps_difftype
new file mode 100644
index 0000000000..8980321135
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/maps_difftype
@@ -0,0 +1,3 @@
+
+maps_difftype.erl:10: Function empty_mismatch/1 has no local return
+maps_difftype.erl:11: The pattern ~{}~ can never match the type tuple()
diff --git a/lib/dialyzer/test/small_SUITE_data/results/my_sofs b/lib/dialyzer/test/small_SUITE_data/results/my_sofs
index bc97c08d62..0b933e6cd7 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/my_sofs
+++ b/lib/dialyzer/test/small_SUITE_data/results/my_sofs
@@ -1,3 +1,3 @@
-my_sofs.erl:34: The pattern {'Set', _, _} can never match the type #'OrdSet'{}
-my_sofs.erl:54: The pattern {'Set', _, _} can never match the type #'OrdSet'{}
+my_sofs.erl:34: Matching of pattern {'Set', _, _} tagged with a record name violates the declared type of #'OrdSet'{}
+my_sofs.erl:54: Matching of pattern {'Set', _, _} tagged with a record name violates the declared type of #'OrdSet'{}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/record_pat b/lib/dialyzer/test/small_SUITE_data/results/record_pat
index 9a3f925e42..a46be6c451 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/record_pat
+++ b/lib/dialyzer/test/small_SUITE_data/results/record_pat
@@ -1,2 +1,2 @@
-record_pat.erl:14: The pattern {'foo', 'baz'} violates the declared type for #foo{}
+record_pat.erl:14: Matching of pattern {'foo', 'baz'} tagged with a record name violates the declared type of #foo{bar::'undefined' | integer()}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/record_test b/lib/dialyzer/test/small_SUITE_data/results/record_test
index 9715f0dcfb..7060bfa200 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/record_test
+++ b/lib/dialyzer/test/small_SUITE_data/results/record_test
@@ -1,3 +1,3 @@
-record_test.erl:19: The pattern {'foo', _} can never match the type 'foo'
+record_test.erl:19: Matching of pattern {'foo', _} tagged with a record name violates the declared type of 'foo'
record_test.erl:21: The variable _ can never match since previous clauses completely covered the type 'foo'
diff --git a/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning b/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning
new file mode 100644
index 0000000000..2e417e1b2a
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning
@@ -0,0 +1,3 @@
+
+relevant_record_warning.erl:22: Function test/1 has no local return
+relevant_record_warning.erl:23: Record construction #r{field::<<_:8>>} violates the declared type of field field::'binary' | 'undefined'
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/common_types.hrl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/common_types.hrl
new file mode 100644
index 0000000000..f362a06bca
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/common_types.hrl
@@ -0,0 +1,6 @@
+-type host() :: nonempty_string().
+-type path() :: nonempty_string().
+-type url() :: binary().
+
+% The host portion of a url, if available.
+-type url_host() :: host() | none.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/config.hrl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/config.hrl
new file mode 100644
index 0000000000..8cab65fc9c
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/config.hrl
@@ -0,0 +1,148 @@
+
+-define(SECOND, 1000).
+-define(MINUTE, (60 * ?SECOND)).
+-define(HOUR, (60 * ?MINUTE)).
+-define(DAY, (24 * ?HOUR)).
+-define(MB, (1024 * 1024)).
+
+% Maximum length of tag/blob prefix
+-define(NAME_MAX, 511).
+
+% How long ddfs node startup can take. The most time-consuming part
+% is the scanning of the tag objects in the node's DDFS volumes.
+-define(NODE_STARTUP, (1 * ?MINUTE)).
+
+% How long to wait on the master for replies from nodes.
+-define(NODE_TIMEOUT, (10 * ?SECOND)).
+
+% How long to wait for a reply from an operation coordinated by the
+% master that accesses nodes. This value should be larger than
+% NODE_TIMEOUT.
+-define(NODEOP_TIMEOUT, (1 * ?MINUTE)).
+
+% The minimum amount of free space a node must have, to be considered
+% a primary candidate host for a new blob.
+-define(MIN_FREE_SPACE, (1024 * ?MB)).
+
+% The maximum number of active HTTP connections on a system (this
+% applies separately for GET and PUT operations).
+-define(HTTP_MAX_ACTIVE, 3).
+
+% The maximum number of waiting HTTP connections to queue up on a busy system.
+-define(HTTP_QUEUE_LENGTH, 100).
+
+% The maximum number of simultaneous HTTP connections. Note that
+% HTTP_MAX_CONNS * 2 * 2 + 32 < Maximum number of file descriptors, where
+% 2 = Get and put, 2 = two FDs required for each connection (connection
+% itself + a file it accesses), 32 = a guess how many extra fds is needed.
+-define(HTTP_MAX_CONNS, 128).
+
+% How long to keep a PUT request in queue if the system is busy.
+-define(PUT_WAIT_TIMEOUT, (1 * ?MINUTE)).
+
+% How long to keep a GET request in queue if the system is busy.
+-define(GET_WAIT_TIMEOUT, (1 * ?MINUTE)).
+
+% An unused loaded tag expires in TAG_EXPIRES milliseconds. Note that
+% if TAG_EXPIRES is not smaller than GC_INTERVAL, tags will never
+% expire from the memory cache and will always take up memory.
+-define(TAG_EXPIRES, (10 * ?HOUR)).
+
+% How often the master's cache of all known tag names is refreshed.
+% This refresh is only needed to purge deleted tags eventually from
+% the tag cache. It doesn't harm to have a long interval.
+-define(TAG_CACHE_INTERVAL, (10 * ?MINUTE)).
+
+% How soon a tag object initialized in memory expires if it's content
+% cannot be fetched from the cluster.
+-define(TAG_EXPIRES_ONERROR, (1 * ?SECOND)).
+
+% How often a DDFS node should refresh its tag cache from disk.
+-define(FIND_TAGS_INTERVAL, ?DAY).
+
+% How often buffered (delayed) updates to a tag need to be
+% flushed. Tradeoff: The longer the interval, the more updates are
+% bundled in a single commit. On the other hand, in the worst case
+% the requester has to wait for the full interval before getting a
+% reply. A long interval also increases the likelihood that the server
+% crashes before the commit has finished successfully, making requests
+% more unreliable.
+-define(DELAYED_FLUSH_INTERVAL, (1 * ?SECOND)).
+
+% How long to wait between garbage collection runs.
+-define(GC_INTERVAL, ?DAY).
+
+% Max duration for a GC run. This should be smaller than
+% min(ORPHANED_{BLOB,TAG}_EXPIRES).
+-define(GC_MAX_DURATION, (3 * ?DAY)).
+
+% How long to wait after startup for cluster to stabilize before
+% starting the first GC run.
+-define(GC_DEFAULT_INITIAL_WAIT, (5 * ?MINUTE)).
+
+% The longest potential interval between messages in the GC protocol;
+% used to ensure GC makes forward progress. This can be set to the
+% estimated time to traverse all the volumes on a DDFS node.
+-define(GC_PROGRESS_INTERVAL, (30 * ?MINUTE)).
+
+% Number of extra replicas (i.e. lost replicas recovered during GC) to
+% allow before deleting extra replicas.
+-define(NUM_EXTRA_REPLICAS, 1).
+
+% Permissions for files backing blobs and tags.
+-define(FILE_MODE, 8#00400).
+
+% How often to check available disk space in ddfs_node.
+-define(DISKSPACE_INTERVAL, (10 * ?SECOND)).
+
+% The maximum size of payloads of HTTP requests to the /ddfs/tag/
+% prefix.
+-define(MAX_TAG_BODY_SIZE, (512 * ?MB)).
+
+% Tag attribute names and values have a limited size, and there
+% can be only a limited number of them.
+-define(MAX_TAG_ATTRIB_NAME_SIZE, 1024).
+-define(MAX_TAG_ATTRIB_VALUE_SIZE, 1024).
+-define(MAX_NUM_TAG_ATTRIBS, 1000).
+
+% How long HTTP requests that perform tag updates should wait to
+% finish (a long time).
+-define(TAG_UPDATE_TIMEOUT, ?DAY).
+
+% Timeout for re-replicating a single blob over HTTP PUT. This
+% depends on the largest blobs hosted by DDFS, and the speed of the
+% cluster network.
+-define(GC_PUT_TIMEOUT, (180 * ?MINUTE)).
+
+% Delete !partial files after this many milliseconds.
+-define(PARTIAL_EXPIRES, ?DAY).
+
+% When orphaned blob can be deleted. This should be large enough that
+% you can upload all the new blobs of a tag and perform the tag update
+% within this time.
+-define(ORPHANED_BLOB_EXPIRES, (5 * ?DAY)).
+
+% When orphaned tag can be deleted.
+-define(ORPHANED_TAG_EXPIRES, (5 * ?DAY)).
+
+% How long a tag has to stay on the deleted list before
+% we can permanently forget it, after all known instances
+% of the tag object have been removed. This quarantine period
+% ensures that a node that was temporarily unavailable
+% and reactivates can't resurrect deleted tags. You
+% must ensure that all temporarily inactive nodes
+% are reactivated (or cleaned) within the ?DELETED_TAG_EXPIRES
+% time frame.
+%
+% This value _must_ be larger than the other time-related DDFS
+% parameters listed in this file. In particular, it must be larger
+% than ORPHANED_TAG_EXPIRES.
+-define(DELETED_TAG_EXPIRES, (30 * ?DAY)).
+
+% How many times a tag operation should be retried before aborting.
+-define(MAX_TAG_OP_RETRIES, 3).
+
+% How long to wait before timing out a tag retrieval. This should be
+% large enough to read a large tag object off the disk and send it
+% over the network.
+-define(GET_TAG_TIMEOUT, (5 * ?MINUTE)).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs.hrl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs.hrl
new file mode 100644
index 0000000000..e43ec23fe1
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs.hrl
@@ -0,0 +1,9 @@
+-type volume_name() :: nonempty_string().
+
+% Diskinfo is {FreeSpace, UsedSpace}.
+-type diskinfo() :: {non_neg_integer(), non_neg_integer()}.
+-type volume() :: {diskinfo(), volume_name()}.
+
+-type object_type() :: 'blob' | 'tag'.
+-type object_name() :: binary().
+-type taginfo() :: {erlang:timestamp(), volume_name()}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_gc.hrl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_gc.hrl
new file mode 100644
index 0000000000..dc43f7586b
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_gc.hrl
@@ -0,0 +1,17 @@
+-type local_object() :: {object_name(), node()}.
+-type phase() :: 'start' | 'build_map' | 'map_wait' | 'gc'
+ | 'rr_blobs' | 'rr_blobs_wait' | 'rr_tags'.
+-type protocol_msg() :: {'check_blob', object_name()} | 'start_gc' | 'end_rr'.
+
+-type blob_update() :: {object_name(), 'filter' | [url()]}.
+
+-type check_blob_result() :: 'false' | {'true', volume_name()}.
+
+% GC statistics
+
+% {Files, Bytes}
+-type gc_stat() :: {non_neg_integer(), non_neg_integer()}.
+% {Kept, Deleted}
+-type obj_stats() :: {gc_stat(), gc_stat()}.
+% {Tags, Blobs}.
+-type gc_run_stats() :: {obj_stats(), obj_stats()}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_master.erl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_master.erl
new file mode 100644
index 0000000000..2be2773dc5
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_master.erl
@@ -0,0 +1,531 @@
+-module(ddfs_master).
+-behaviour(gen_server).
+
+-export([start_link/0]).
+-export([get_tags/1, get_tags/3,
+ get_nodeinfo/1,
+ get_read_nodes/0,
+ get_hosted_tags/1,
+ gc_blacklist/0, gc_blacklist/1,
+ gc_stats/0,
+ choose_write_nodes/3,
+ new_blob/4, new_blob/5,
+ safe_gc_blacklist/0, safe_gc_blacklist/1,
+ refresh_tag_cache/0,
+ tag_notify/2,
+ tag_operation/2, tag_operation/3,
+ update_gc_stats/1,
+ update_nodes/1
+ ]).
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+-define(WEB_PORT, 8011).
+
+-compile(nowarn_deprecated_type).
+
+-include("common_types.hrl").
+-include("gs_util.hrl").
+-include("config.hrl").
+-include("ddfs.hrl").
+-include("ddfs_tag.hrl").
+-include("ddfs_gc.hrl").
+
+-type node_info() :: {node(), {non_neg_integer(), non_neg_integer()}}.
+-type gc_stats() :: none | gc_run_stats().
+
+-record(state, {tags = gb_trees:empty() :: gb_trees:tree(),
+ tag_cache = false :: false | gb_sets:set(),
+ cache_refresher :: pid(),
+
+ nodes = [] :: [node_info()],
+ write_blacklist = [] :: [node()],
+ read_blacklist = [] :: [node()],
+ gc_blacklist = [] :: [node()],
+ safe_gc_blacklist = gb_sets:empty() :: gb_sets:set(),
+ gc_stats = none :: none | {gc_stats(), erlang:timestamp()}}).
+-type state() :: #state{}.
+-type replyto() :: {pid(), reference()}.
+
+-export_type([gc_stats/0, node_info/0]).
+
+%% ===================================================================
+%% API functions
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+ lager:info("DDFS master starts"),
+ case gen_server:start_link({local, ?MODULE}, ?MODULE, [], []) of
+ {ok, Server} -> {ok, Server};
+ {error, {already_started, Server}} -> {ok, Server}
+ end.
+
+-spec tag_operation(term(), tagname()) -> term().
+tag_operation(Op, Tag) ->
+ gen_server:call(?MODULE, {tag, Op, Tag}).
+-spec tag_operation(term(), tagname(), non_neg_integer() | infinity) ->
+ term().
+tag_operation(Op, Tag, Timeout) ->
+ gen_server:call(?MODULE, {tag, Op, Tag}, Timeout).
+
+-spec tag_notify(term(), tagname()) -> ok.
+tag_notify(Op, Tag) ->
+ gen_server:cast(?MODULE, {tag_notify, Op, Tag}).
+
+-spec get_nodeinfo(all) -> {ok, [node_info()]}.
+get_nodeinfo(all) ->
+ gen_server:call(?MODULE, {get_nodeinfo, all}).
+
+-spec get_read_nodes() -> {ok, [node()], non_neg_integer()} | {error, term()}.
+get_read_nodes() ->
+ gen_server:call(?MODULE, get_read_nodes, infinity).
+
+-spec gc_blacklist() -> {ok, [node()]}.
+gc_blacklist() ->
+ gen_server:call(?MODULE, gc_blacklist).
+
+-spec gc_blacklist([node()]) -> ok.
+gc_blacklist(Nodes) ->
+ gen_server:cast(?MODULE, {gc_blacklist, Nodes}).
+
+-spec gc_stats() -> {ok, none | {gc_stats(), erlang:timestamp()}} | {error, term()}.
+gc_stats() ->
+ gen_server:call(?MODULE, gc_stats).
+
+-spec get_hosted_tags(host()) -> {ok, [tagname()]} | {error, term()}.
+get_hosted_tags(Host) ->
+ gen_server:call(?MODULE, {get_hosted_tags, Host}).
+
+-spec choose_write_nodes(non_neg_integer(), [node()], [node()]) -> {ok, [node()]}.
+choose_write_nodes(K, Include, Exclude) ->
+ gen_server:call(?MODULE, {choose_write_nodes, K, Include, Exclude}).
+
+-spec get_tags(gc) -> {ok, [tagname()], [node()]} | too_many_failed_nodes;
+ (safe) -> {ok, [binary()]} | too_many_failed_nodes.
+get_tags(Mode) ->
+ get_tags(?MODULE, Mode, ?GET_TAG_TIMEOUT).
+
+-spec get_tags(server(), gc, non_neg_integer()) ->
+ {ok, [tagname()], [node()]} | too_many_failed_nodes;
+ (server(), safe, non_neg_integer()) ->
+ {ok, [binary()]} | too_many_failed_nodes.
+get_tags(Server, Mode, Timeout) ->
+ disco_profile:timed_run(
+ fun() -> gen_server:call(Server, {get_tags, Mode}, Timeout) end,
+ get_tags).
+
+-spec new_blob(string()|object_name(), non_neg_integer(), [node()], [node()]) ->
+ too_many_replicas | {ok, [nonempty_string()]}.
+new_blob(Obj, K, Include, Exclude) ->
+ gen_server:call(?MODULE, {new_blob, Obj, K, Include, Exclude}, infinity).
+
+-spec new_blob(server(), string()|object_name(), non_neg_integer(), [node()], [node()]) ->
+ too_many_replicas | {ok, [nonempty_string()]}.
+new_blob(Master, Obj, K, Include, Exclude) ->
+ gen_server:call(Master, {new_blob, Obj, K, Include, Exclude}, infinity).
+
+-spec safe_gc_blacklist() -> {ok, [node()]} | {error, term()}.
+safe_gc_blacklist() ->
+ gen_server:call(?MODULE, safe_gc_blacklist).
+
+-spec safe_gc_blacklist(gb_sets:set()) -> ok.
+safe_gc_blacklist(SafeGCBlacklist) ->
+ gen_server:cast(?MODULE, {safe_gc_blacklist, SafeGCBlacklist}).
+
+-spec update_gc_stats(gc_run_stats()) -> ok.
+update_gc_stats(Stats) ->
+ gen_server:cast(?MODULE, {update_gc_stats, Stats}).
+
+-type nodes_update() :: [{node(), boolean(), boolean()}].
+-spec update_nodes(nodes_update()) -> ok.
+update_nodes(DDFSNodes) ->
+ gen_server:cast(?MODULE, {update_nodes, DDFSNodes}).
+
+-spec update_nodestats(gb_trees:tree()) -> ok.
+update_nodestats(NewNodes) ->
+ gen_server:cast(?MODULE, {update_nodestats, NewNodes}).
+
+-spec update_tag_cache(gb_sets:set()) -> ok.
+update_tag_cache(TagCache) ->
+ gen_server:cast(?MODULE, {update_tag_cache, TagCache}).
+
+-spec refresh_tag_cache() -> ok.
+refresh_tag_cache() ->
+ gen_server:cast(?MODULE, refresh_tag_cache).
+
+%% ===================================================================
+%% gen_server callbacks
+
+-spec init(_) -> gs_init().
+init(_Args) ->
+ _ = [disco_profile:new_histogram(Name)
+ || Name <- [get_tags, do_get_tags_all, do_get_tags_filter,
+ do_get_tags_safe, do_get_tags_gc]],
+ spawn_link(fun() -> monitor_diskspace() end),
+ spawn_link(fun() -> ddfs_gc:start_gc(disco:get_setting("DDFS_DATA")) end),
+ Refresher = spawn_link(fun() -> refresh_tag_cache_proc() end),
+ put(put_port, disco:get_setting("DDFS_PUT_PORT")),
+ {ok, #state{cache_refresher = Refresher}}.
+
+-type choose_write_nodes_msg() :: {choose_write_nodes, non_neg_integer(), [node()], [node()]}.
+-type new_blob_msg() :: {new_blob, string() | object_name(), non_neg_integer(), [node()]}.
+-type tag_msg() :: {tag, ddfs_tag:call_msg(), tagname()}.
+-spec handle_call(dbg_state_msg(), from(), state()) ->
+ gs_reply(state());
+ ({get_nodeinfo, all}, from(), state()) ->
+ gs_reply({ok, [node_info()]});
+ (get_read_nodes, from(), state()) ->
+ gs_reply({ok, [node()], non_neg_integer});
+ (gc_blacklist, from(), state()) ->
+ gs_reply({ok, [node()]});
+ (gc_stats, from(), state()) ->
+ gs_reply({ok, gc_stats(), erlang:timestamp()});
+ (choose_write_nodes_msg(), from(), state()) ->
+ gs_reply({ok, [node()]});
+ (new_blob_msg(), from(), state()) ->
+ gs_reply(new_blob_result());
+ (tag_msg(), from(), state()) ->
+ gs_reply({error, nonodes}) | gs_noreply();
+ ({get_tags, gc | safe}, from(), state()) ->
+ gs_noreply();
+ ({get_hosted_tags, host()}, from(), state()) ->
+ gs_noreply();
+ (safe_gc_blacklist, from(), state()) ->
+ gs_reply({ok, [node()]}).
+handle_call(dbg_get_state, _, S) ->
+ {reply, S, S};
+
+handle_call({get_nodeinfo, all}, _From, #state{nodes = Nodes} = S) ->
+ {reply, {ok, Nodes}, S};
+
+handle_call(get_read_nodes, _F, #state{nodes = Nodes, read_blacklist = RB} = S) ->
+ {reply, do_get_readable_nodes(Nodes, RB), S};
+
+handle_call(gc_blacklist, _F, #state{gc_blacklist = Nodes} = S) ->
+ {reply, {ok, Nodes}, S};
+
+handle_call(gc_stats, _F, #state{gc_stats = Stats} = S) ->
+ {reply, {ok, Stats}, S};
+
+handle_call({choose_write_nodes, K, Include, Exclude}, _,
+ #state{nodes = N, write_blacklist = WBL, gc_blacklist = GBL} = S) ->
+ BL = lists:umerge(WBL, GBL),
+ {reply, do_choose_write_nodes(N, K, Include, Exclude, BL), S};
+
+handle_call({new_blob, Obj, K, Include, Exclude}, _,
+ #state{nodes = N, gc_blacklist = GBL, write_blacklist = WBL} = S) ->
+ BL = lists:umerge(WBL, GBL),
+ {reply, do_new_blob(Obj, K, Include, Exclude, BL, N), S};
+
+handle_call({tag, _M, _Tag}, _From, #state{nodes = []} = S) ->
+ {reply, {error, no_nodes}, S};
+
+handle_call({tag, M, Tag}, From, S) ->
+ {noreply, do_tag_request(M, Tag, From, S)};
+
+handle_call({get_tags, Mode}, From, #state{nodes = Nodes} = S) ->
+ spawn(fun() ->
+ gen_server:reply(From, do_get_tags(Mode, [N || {N, _} <- Nodes]))
+ end),
+ {noreply, S};
+
+handle_call({get_hosted_tags, Host}, From, S) ->
+ spawn(fun() -> gen_server:reply(From, ddfs_gc:hosted_tags(Host)) end),
+ {noreply, S};
+
+handle_call(safe_gc_blacklist, _From, #state{safe_gc_blacklist = SBL} = S) ->
+ {reply, {ok, gb_sets:to_list(SBL)}, S}.
+
+-spec handle_cast({tag_notify, ddfs_tag:cast_msg(), tagname()}
+ | {gc_blacklist, [node()]}
+ | {safe_gc_blacklist, gb_sets:set()}
+ | {update_gc_stats, gc_stats()}
+ | {update_tag_cache, gb_sets:set()}
+ | refresh_tag_cache
+ | {update_nodes, nodes_update()}
+ | {update_nodestats, gb_trees:tree()},
+ state()) -> gs_noreply().
+handle_cast({tag_notify, M, Tag}, S) ->
+ {noreply, do_tag_notify(M, Tag, S)};
+
+handle_cast({gc_blacklist, Nodes}, #state{safe_gc_blacklist = SBL} = S) ->
+ BLSet = gb_sets:from_list(Nodes),
+ NewSBL = gb_sets:intersection(BLSet, SBL),
+ {noreply, S#state{gc_blacklist = gb_sets:to_list(BLSet),
+ safe_gc_blacklist = NewSBL}};
+
+handle_cast({safe_gc_blacklist, SafeBlacklist}, #state{gc_blacklist = BL} = S) ->
+ SBL = gb_sets:intersection(SafeBlacklist, gb_sets:from_list(BL)),
+ {noreply, S#state{safe_gc_blacklist = SBL}};
+
+handle_cast({update_gc_stats, Stats}, S) ->
+ {noreply, S#state{gc_stats = {Stats, now()}}};
+
+handle_cast({update_tag_cache, TagCache}, S) ->
+ {noreply, S#state{tag_cache = TagCache}};
+
+handle_cast(refresh_tag_cache, #state{cache_refresher = Refresher} = S) ->
+ Refresher ! refresh,
+ {noreply, S};
+
+handle_cast({update_nodes, NewNodes}, S) ->
+ {noreply, do_update_nodes(NewNodes, S)};
+
+handle_cast({update_nodestats, NewNodes}, S) ->
+ {noreply, do_update_nodestats(NewNodes, S)}.
+
+-spec handle_info({'DOWN', _, _, pid(), _}, state()) -> gs_noreply().
+handle_info({'DOWN', _, _, Pid, _}, S) ->
+ {noreply, do_tag_exit(Pid, S)}.
+
+%% ===================================================================
+%% gen_server callback stubs
+
+-spec terminate(term(), state()) -> ok.
+terminate(Reason, _State) ->
+ lager:warning("DDFS master died: ~p", [Reason]).
+
+-spec code_change(term(), state(), term()) -> {ok, state()}.
+code_change(_OldVsn, State, _Extra) -> {ok, State}.
+
+%% ===================================================================
+%% internal functions
+
+-spec do_get_readable_nodes([node_info()], [node()]) ->
+ {ok, [node()], non_neg_integer()}.
+do_get_readable_nodes(Nodes, ReadBlacklist) ->
+ NodeSet = gb_sets:from_ordset(lists:sort([Node || {Node, _} <- Nodes])),
+ BlackSet = gb_sets:from_ordset(ReadBlacklist),
+ ReadableNodeSet = gb_sets:subtract(NodeSet, BlackSet),
+ {ok, gb_sets:to_list(ReadableNodeSet), gb_sets:size(BlackSet)}.
+
+-spec do_choose_write_nodes([node_info()], non_neg_integer(), [node()], [node()], [node()]) ->
+ {ok, [node()]}.
+do_choose_write_nodes(Nodes, K, Include, Exclude, BlackList) ->
+ % Include is the list of nodes that must be included
+ %
+ % Node selection algorithm:
+ % 1. try to choose K nodes randomly from all the nodes which have
+ % more than ?MIN_FREE_SPACE bytes free space available and which
+ % are not excluded or blacklisted.
+ % 2. if K nodes cannot be found this way, choose the K emptiest
+ % nodes which are not excluded or blacklisted.
+ Primary = ([N || {N, {Free, _Total}} <- Nodes, Free > ?MIN_FREE_SPACE / 1024]
+ -- (Exclude ++ BlackList)),
+ if length(Primary) >= K ->
+ {ok, Include ++ disco_util:choose_random(Primary -- Include , K - length(Include))};
+ true ->
+ Preferred = [N || {N, _} <- lists:reverse(lists:keysort(2, Nodes))],
+ Secondary = Include ++ lists:sublist(Preferred -- (Include ++ Exclude ++ BlackList),
+ K - length(Include)),
+ {ok, Secondary}
+ end.
+
+-type new_blob_result() :: too_many_replicas | {ok, [nonempty_string()]}.
+-spec do_new_blob(string()|object_name(), non_neg_integer(), [node()], [node()], [node()], [node_info()]) ->
+ new_blob_result().
+do_new_blob(_Obj, K, _Include, _Exclude, _BlackList, Nodes) when K > length(Nodes) ->
+ too_many_replicas;
+do_new_blob(Obj, K, Include, Exclude, BlackList, Nodes) ->
+ {ok, WriteNodes} = do_choose_write_nodes(Nodes, K, Include, Exclude, BlackList),
+ Urls = [["http://", disco:host(N), ":", get(put_port), "/ddfs/", Obj]
+ || N <- WriteNodes],
+ {ok, Urls}.
+
+% Tag request: Start a new tag server if one doesn't exist already. Forward
+% the request to the tag server.
+
+-spec get_tag_pid(tagname(), gb_trees:tree(), false | gb_sets:set()) ->
+ {pid(), gb_trees:tree()}.
+get_tag_pid(Tag, Tags, Cache) ->
+ case gb_trees:lookup(Tag, Tags) of
+ none ->
+ NotFound = (Cache =/= false
+ andalso not gb_sets:is_element(Tag, Cache)),
+ {ok, Server} = ddfs_tag:start(Tag, NotFound),
+ erlang:monitor(process, Server),
+ {Server, gb_trees:insert(Tag, Server, Tags)};
+ {value, P} ->
+ {P, Tags}
+ end.
+
+-spec do_tag_request(term(), tagname(), replyto(), state()) ->
+ state().
+do_tag_request(M, Tag, From, #state{tags = Tags, tag_cache = Cache} = S) ->
+ {Pid, TagsN} = get_tag_pid(Tag, Tags, Cache),
+ gen_server:cast(Pid, {M, From}),
+ S#state{tags = TagsN,
+ tag_cache = Cache =/= false andalso gb_sets:add(Tag, Cache)}.
+
+-spec do_tag_notify(term(), tagname(), state()) -> state().
+do_tag_notify(M, Tag, #state{tags = Tags, tag_cache = Cache} = S) ->
+ {Pid, TagsN} = get_tag_pid(Tag, Tags, Cache),
+ gen_server:cast(Pid, {notify, M}),
+ S#state{tags = TagsN,
+ tag_cache = Cache =/= false andalso gb_sets:add(Tag, Cache)}.
+
+-spec do_update_nodes(nodes_update(), state()) -> state().
+do_update_nodes(NewNodes, #state{nodes = Nodes, tags = Tags} = S) ->
+ WriteBlacklist = lists:sort([Node || {Node, false, _} <- NewNodes]),
+ ReadBlacklist = lists:sort([Node || {Node, _, false} <- NewNodes]),
+ OldNodes = gb_trees:from_orddict(Nodes),
+ UpdatedNodes = lists:keysort(1, [case gb_trees:lookup(Node, OldNodes) of
+ none ->
+ {Node, {0, 0}};
+ {value, OldStats} ->
+ {Node, OldStats}
+ end || {Node, _WB, _RB} <- NewNodes]),
+ if
+ UpdatedNodes =/= Nodes ->
+ _ = [gen_server:cast(Pid, {die, none}) || Pid <- gb_trees:values(Tags)],
+ spawn(fun() ->
+ {ok, ReadableNodes, RBSize} =
+ do_get_readable_nodes(UpdatedNodes, ReadBlacklist),
+ refresh_tag_cache(ReadableNodes, RBSize)
+ end),
+ S#state{nodes = UpdatedNodes,
+ write_blacklist = WriteBlacklist,
+ read_blacklist = ReadBlacklist,
+ tag_cache = false,
+ tags = gb_trees:empty()};
+ true ->
+ S#state{write_blacklist = WriteBlacklist,
+ read_blacklist = ReadBlacklist}
+ end.
+
+-spec do_update_nodestats(gb_trees:tree(), state()) -> state().
+do_update_nodestats(NewNodes, #state{nodes = Nodes} = S) ->
+ UpdatedNodes = [case gb_trees:lookup(Node, NewNodes) of
+ none ->
+ {Node, Stats};
+ {value, NewStats} ->
+ {Node, NewStats}
+ end || {Node, Stats} <- Nodes],
+ S#state{nodes = UpdatedNodes}.
+
+-spec do_tag_exit(pid(), state()) -> state().
+do_tag_exit(Pid, S) ->
+ NewTags = [X || {_, V} = X <- gb_trees:to_list(S#state.tags), V =/= Pid],
+ S#state{tags = gb_trees:from_orddict(NewTags)}.
+
+-spec do_get_tags(all | filter, [node()]) -> {[node()], [node()], [binary()]};
+ (safe, [node()]) -> {ok, [binary()]} | too_many_failed_nodes;
+ (gc, [node()]) -> {ok, [binary()], [node()]} | too_many_failed_nodes.
+do_get_tags(all, Nodes) ->
+ disco_profile:timed_run(
+ fun() ->
+ {Replies, Failed} =
+ gen_server:multi_call(Nodes, ddfs_node, get_tags, ?NODE_TIMEOUT),
+ {OkNodes, Tags} = lists:unzip(Replies),
+ {OkNodes, Failed, lists:usort(lists:flatten(Tags))}
+ end, do_get_tags_all);
+
+do_get_tags(filter, Nodes) ->
+ disco_profile:timed_run(
+ fun() ->
+ {OkNodes, Failed, Tags} = do_get_tags(all, Nodes),
+ case tag_operation(get_tagnames, <<"+deleted">>, ?NODEOP_TIMEOUT) of
+ {ok, Deleted} ->
+ TagSet = gb_sets:from_ordset(Tags),
+ DelSet = gb_sets:insert(<<"+deleted">>, Deleted),
+ NotDeleted = gb_sets:to_list(gb_sets:subtract(TagSet, DelSet)),
+ {OkNodes, Failed, NotDeleted};
+ E ->
+ E
+ end
+ end, do_get_tags_filter);
+
+do_get_tags(safe, Nodes) ->
+ disco_profile:timed_run(
+ fun() ->
+ TagMinK = list_to_integer(disco:get_setting("DDFS_TAG_MIN_REPLICAS")),
+ case do_get_tags(filter, Nodes) of
+ {_OkNodes, Failed, Tags} when length(Failed) < TagMinK ->
+ {ok, Tags};
+ _ ->
+ too_many_failed_nodes
+ end
+ end, do_get_tags_safe);
+
+% The returned tag list may include +deleted.
+do_get_tags(gc, Nodes) ->
+ disco_profile:timed_run(
+ fun() ->
+ {OkNodes, Failed, Tags} = do_get_tags(all, Nodes),
+ TagMinK = list_to_integer(disco:get_setting("DDFS_TAG_MIN_REPLICAS")),
+ case length(Failed) < TagMinK of
+ false ->
+ too_many_failed_nodes;
+ true ->
+ case tag_operation(get_tagnames, <<"+deleted">>, ?NODEOP_TIMEOUT) of
+ {ok, Deleted} ->
+ TagSet = gb_sets:from_ordset(Tags),
+ NotDeleted = gb_sets:subtract(TagSet, Deleted),
+ {ok, gb_sets:to_list(NotDeleted), OkNodes};
+ E ->
+ E
+ end
+ end
+ end, do_get_tags_gc).
+
+% Timeouts in this call by the below processes can cause ddfs_master
+% itself to crash, since the processes are linked to it.
+-spec safe_get_read_nodes() -> {ok, [node()], non_neg_integer()} | error.
+safe_get_read_nodes() ->
+ try get_read_nodes() of
+ {ok, _ReadableNodes, _RBSize} = RN ->
+ RN;
+ E ->
+ lager:error("unexpected response retrieving readable nodes: ~p", [E]),
+ error
+ catch
+ K:E ->
+ lager:error("error retrieving readable nodes: ~p:~p", [K, E]),
+ error
+ end.
+
+-spec monitor_diskspace() -> no_return().
+monitor_diskspace() ->
+ case safe_get_read_nodes() of
+ {ok, ReadableNodes, _RBSize} ->
+ {Space, _F} = gen_server:multi_call(ReadableNodes,
+ ddfs_node,
+ get_diskspace,
+ ?NODE_TIMEOUT),
+ update_nodestats(gb_trees:from_orddict(lists:keysort(1, Space)));
+ error ->
+ ok
+ end,
+ timer:sleep(?DISKSPACE_INTERVAL),
+ monitor_diskspace().
+
+-spec refresh_tag_cache_proc() -> no_return().
+refresh_tag_cache_proc() ->
+ case safe_get_read_nodes() of
+ {ok, ReadableNodes, RBSize} ->
+ refresh_tag_cache(ReadableNodes, RBSize);
+ error ->
+ ok
+ end,
+ receive
+ refresh ->
+ ok
+ after ?TAG_CACHE_INTERVAL ->
+ ok
+ end,
+ refresh_tag_cache_proc().
+
+-spec refresh_tag_cache([node()], non_neg_integer()) -> ok.
+refresh_tag_cache(Nodes, BLSize) ->
+ TagMinK = list_to_integer(disco:get_setting("DDFS_TAG_MIN_REPLICAS")),
+ {Replies, Failed} =
+ gen_server:multi_call(Nodes, ddfs_node, get_tags, ?NODE_TIMEOUT),
+ if Nodes =/= [], length(Failed) + BLSize < TagMinK ->
+ {_OkNodes, Tags} = lists:unzip(Replies),
+ update_tag_cache(gb_sets:from_list(lists:flatten(Tags)));
+ true -> ok
+ end.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_tag.hrl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_tag.hrl
new file mode 100644
index 0000000000..2920b67fc5
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/ddfs_tag.hrl
@@ -0,0 +1,19 @@
+
+-type tokentype() :: 'read' | 'write'.
+-type user_attr() :: [{binary(), binary()}].
+% An 'internal' token is also used by internal consumers, but never stored.
+-type token() :: 'null' | binary().
+
+-type tagname() :: binary().
+-type tagid() :: binary().
+
+-type attrib() :: 'urls' | 'read_token' | 'write_token' | {'user', binary()}.
+
+-record(tagcontent, {id :: tagid(),
+ last_modified :: binary(),
+ read_token = null :: token(),
+ write_token = null :: token(),
+ urls = [] :: [[binary()]],
+ user = [] :: user_attr()}).
+
+-type tagcontent() :: #tagcontent{}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/gs_util.hrl b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/gs_util.hrl
new file mode 100644
index 0000000000..d579e9a7d7
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ddfs_master/gs_util.hrl
@@ -0,0 +1,16 @@
+% This is a set of type utilities to be used when spec-cing the
+% callbacks of a gen_server implementation. It should be included in
+% the impl module, which needs to define the state() type.
+
+-type gs_init() :: {ok, state()}.
+-type gs_reply(T) :: {reply, (T), state()}.
+-type gs_noreply() :: {noreply, state()}.
+-type gs_noreply_t() :: {noreply, state(), non_neg_integer()}.
+-type gs_stop(T) :: {stop, (T), state()}.
+
+% Generic utilities.
+
+-type server() :: pid() | atom() | {atom(), node()}.
+-type from() :: {pid(), term()}.
+
+-type dbg_state_msg() :: dbg_get_state.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/fun2ms.erl b/lib/dialyzer/test/small_SUITE_data/src/fun2ms.erl
new file mode 100644
index 0000000000..9e7df85e4c
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/fun2ms.erl
@@ -0,0 +1,21 @@
+-module(fun2ms).
+-export([return/0]).
+-include_lib("stdlib/include/ms_transform.hrl").
+
+-record(snapshot, {id :: integer(), arg1 :: atom(), arg2 :: tuple()}).
+
+return() ->
+ TableId = ets:new(table, [public, {keypos, #snapshot.id}]),
+
+ ets:insert(TableId, [#snapshot{id = 1, arg1 = hard, arg2 = {1,2}},
+ #snapshot{id = 2, arg1 = rock, arg2 = {1,2}},
+ #snapshot{id = 3, arg1 = hallelujah, arg2 =
+ {1,2}}]),
+
+
+ Example = ets:fun2ms(
+ fun(#snapshot{id = Arg1, arg1 = Arg2}) ->
+ {Arg1, Arg2}
+ end),
+
+ ets:select(TableId, Example).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/literals.erl b/lib/dialyzer/test/small_SUITE_data/src/literals.erl
new file mode 100644
index 0000000000..abd7033712
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/literals.erl
@@ -0,0 +1,33 @@
+-module(literals).
+
+%% Bad records inside structures used to be ignored. The reason:
+%% v3_core:unfold() does not annotate the parts of a literal.
+%% This example does not work perfectly yet, in particular Maps.
+
+-export([t1/0, t2/0, t3/0, t4/0, m1/1, m2/1, m3/1, m4/1]).
+
+-record(r, {id :: integer}).
+
+t1() ->
+ #r{id = a}. % violation
+
+t2() ->
+ [#r{id = a}]. % violation
+
+t3() ->
+ {#r{id = a}}. % violation
+
+t4() ->
+ #{a => #r{id = a}}. % violation found, but t4() returns... (bug)
+
+m1(#r{id = a}) -> % violation
+ ok.
+
+m2([#r{id = a}]) -> % violation
+ ok.
+
+m3({#r{id = a}}) -> % can never match; not so good
+ ok.
+
+m4(#{a := #r{id = a}}) -> % violation not found
+ ok.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps_difftype.erl b/lib/dialyzer/test/small_SUITE_data/src/maps_difftype.erl
new file mode 100644
index 0000000000..19e61a7944
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/maps_difftype.erl
@@ -0,0 +1,11 @@
+%%
+%% File: maps_difftype.erl
+%% Author: Björn-Egil Dahlberg
+%% Created: 2014-04-29
+%%
+-module(maps_difftype).
+
+-export([empty_mismatch/1]).
+
+empty_mismatch(Tuple) when is_tuple(Tuple) ->
+ case Tuple of #{} -> ok end.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/confusing_record_warning.erl b/lib/dialyzer/test/small_SUITE_data/src/relevant_record_warning.erl
index 8af74e0914..3ff65458df 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/confusing_record_warning.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/relevant_record_warning.erl
@@ -1,3 +1,7 @@
+%% Formerly confusing_record_warning.erl.
+%% The warning output is relevant as of Erlang/OTP 17.1.
+%% The original comment kept below.
+
%%---------------------------------------------------------------------
%% A user complained that dialyzer produces a weird warning for the
%% following program. I explained to him that there is an implicit
@@ -9,7 +13,7 @@
%% The pattern {'r', [_]} can never match the type any()
%% We should clearly give some less confusing warning in this case.
%%---------------------------------------------------------------------
--module(confusing_record_warning).
+-module(relevant_record_warning).
-export([test/1]).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/remote_field.erl b/lib/dialyzer/test/small_SUITE_data/src/remote_field.erl
new file mode 100644
index 0000000000..c34fa1b9dd
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/remote_field.erl
@@ -0,0 +1,11 @@
+-module(remote_field).
+
+-type f(T) :: {ssl:sslsocket(), T}.
+
+-record(r1, { f1 :: f(_) }).
+-type r1(T) :: #r1{ f1 :: fun((ssl:sslsocket(), T) -> any()) }.
+
+-record(state, {
+ r :: r1(T),
+ arg :: T
+ }).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/remote_field2.erl b/lib/dialyzer/test/small_SUITE_data/src/remote_field2.erl
new file mode 100644
index 0000000000..35687e22ec
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/remote_field2.erl
@@ -0,0 +1,17 @@
+-module(remote_field2).
+
+-export([handle_cast/2]).
+
+-record(state, {tcp_socket :: inet:socket()}).
+
+-spec handle_cast(_,_) ->
+ {noreply,_} |
+ {stop,{shutdown,connection_closed},
+ #state{tcp_socket :: port()}}.
+handle_cast({send, Message}, #state{tcp_socket = TCPSocket} = State) ->
+ case gen_tcp:send(TCPSocket, Message) of
+ ok ->
+ {noreply, State};
+ {error, closed} ->
+ {stop, {shutdown, connection_closed}, State}
+ end.