aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/compiler/src/beam_asm.erl29
-rw-r--r--lib/compiler/src/beam_dict.erl12
-rw-r--r--lib/compiler/src/compile.erl21
-rw-r--r--lib/compiler/test/compile_SUITE.erl31
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple.erl5
-rw-r--r--lib/compiler/test/lc_SUITE.erl2
-rw-r--r--lib/crypto/doc/src/crypto.xml2
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl43
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl109
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl38
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl117
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl82
-rw-r--r--lib/dialyzer/src/dialyzer_coordinator.erl65
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl36
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl8
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl103
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl115
-rw-r--r--lib/dialyzer/src/dialyzer_worker.erl53
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/map_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/dialyzer_options2
-rw-r--r--lib/hipe/cerl/erl_types.erl119
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl4
-rw-r--r--lib/inets/src/inets_app/inets.appup.src4
-rw-r--r--lib/kernel/doc/src/kernel_app.xml59
-rw-r--r--lib/kernel/doc/src/os.xml31
-rw-r--r--lib/kernel/src/Makefile1
-rw-r--r--lib/kernel/src/code.erl12
-rw-r--r--lib/kernel/src/erl_signal_handler.erl57
-rw-r--r--lib/kernel/src/error_logger.erl8
-rw-r--r--lib/kernel/src/file.erl2
-rw-r--r--lib/kernel/src/kernel.app.src1
-rw-r--r--lib/kernel/src/kernel.erl13
-rw-r--r--lib/kernel/src/os.erl11
-rw-r--r--lib/kernel/src/rpc.erl14
-rw-r--r--lib/kernel/test/code_SUITE.erl2
-rw-r--r--lib/kernel/test/error_logger_SUITE.erl13
-rw-r--r--lib/kernel/test/multi_load_SUITE.erl8
-rw-r--r--lib/kernel/test/rpc_SUITE.erl12
-rw-r--r--lib/mnesia/doc/src/notes.xml18
-rw-r--r--lib/mnesia/src/mnesia_checkpoint.erl7
-rw-r--r--lib/mnesia/src/mnesia_event.erl3
-rw-r--r--lib/mnesia/test/mnesia_evil_backup.erl43
-rw-r--r--lib/mnesia/vsn.mk2
-rw-r--r--lib/ssh/src/ssh.app.src1
-rw-r--r--lib/ssh/test/ssh_algorithms_SUITE.erl2
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE.erl41
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test4
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli5
-rw-r--r--lib/ssh/test/ssh_test_lib.erl7
-rw-r--r--lib/ssl/src/ssl_config.erl48
-rw-r--r--lib/ssl/src/ssl_connection.erl39
-rw-r--r--lib/ssl/src/ssl_manager.erl28
-rw-r--r--lib/ssl/src/ssl_pem_cache.erl2
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl17
-rw-r--r--lib/stdlib/src/beam_lib.erl54
-rw-r--r--lib/stdlib/src/binary.erl2
-rw-r--r--lib/stdlib/src/dets.erl7
-rw-r--r--lib/stdlib/src/ets.erl18
-rw-r--r--lib/stdlib/test/beam_lib_SUITE.erl45
-rw-r--r--lib/stdlib/test/dets_SUITE.erl9
-rw-r--r--lib/stdlib/test/erl_scan_SUITE.erl15
-rw-r--r--lib/typer/src/typer.erl36
63 files changed, 1117 insertions, 583 deletions
diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl
index a2f5dc674c..2fc2850591 100644
--- a/lib/compiler/src/beam_asm.erl
+++ b/lib/compiler/src/beam_asm.erl
@@ -21,7 +21,7 @@
-module(beam_asm).
--export([module/4]).
+-export([module/5]).
-export([encode/2]).
-export_type([fail/0,label/0,reg/0,src/0,module_code/0,function_name/0]).
@@ -57,20 +57,20 @@
-type module_code() ::
{module(),[_],[_],[asm_function()],pos_integer()}.
--spec module(module_code(), exports(), [_], [compile:option()]) ->
+-spec module(module_code(), exports(), [_], [compile:option()], [compile:option()]) ->
{'ok',binary()}.
-module(Code, Abst, SourceFile, Opts) ->
- {ok,assemble(Code, Abst, SourceFile, Opts)}.
+module(Code, Abst, SourceFile, Opts, CompilerOpts) ->
+ {ok,assemble(Code, Abst, SourceFile, Opts, CompilerOpts)}.
-assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, Abst, SourceFile, Opts) ->
+assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, Abst, SourceFile, Opts, CompilerOpts) ->
{1,Dict0} = beam_dict:atom(Mod, beam_dict:new()),
{0,Dict1} = beam_dict:fname(atom_to_list(Mod) ++ ".erl", Dict0),
NumFuncs = length(Asm0),
{Asm,Attr} = on_load(Asm0, Attr0),
Exp = cerl_sets:from_list(Exp0),
{Code,Dict2} = assemble_1(Asm, Exp, Dict1, []),
- build_file(Code, Attr, Dict2, NumLabels, NumFuncs, Abst, SourceFile, Opts).
+ build_file(Code, Attr, Dict2, NumLabels, NumFuncs, Abst, SourceFile, Opts, CompilerOpts).
on_load(Fs0, Attr0) ->
case proplists:get_value(on_load, Attr0) of
@@ -113,7 +113,7 @@ assemble_function([H|T], Acc, Dict0) ->
assemble_function([], Code, Dict) ->
{Code, Dict}.
-build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
+build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts, CompilerOpts) ->
%% Create the code chunk.
CodeChunk = chunk(<<"Code">>,
@@ -125,9 +125,9 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
Code),
%% Create the atom table chunk.
-
- {NumAtoms, AtomTab} = beam_dict:atom_table(Dict),
- AtomChunk = chunk(<<"Atom">>, <<NumAtoms:32>>, AtomTab),
+ AtomEncoding = atom_encoding(CompilerOpts),
+ {NumAtoms, AtomTab} = beam_dict:atom_table(Dict, AtomEncoding),
+ AtomChunk = chunk(atom_chunk_name(AtomEncoding), <<NumAtoms:32>>, AtomTab),
%% Create the import table chunk.
@@ -203,6 +203,15 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
end,
build_form(<<"BEAM">>, Chunks).
+atom_encoding(Opts) ->
+ case proplists:get_bool(no_utf8_atoms, Opts) of
+ false -> utf8;
+ true -> latin1
+ end.
+
+atom_chunk_name(utf8) -> <<"AtU8">>;
+atom_chunk_name(latin1) -> <<"Atom">>.
+
%% finalize_fun_table(Essentials, MD5) -> FinalizedEssentials
%% Update the 'old_uniq' field in the entry for each fun in the
%% 'FunT' chunk. We'll use part of the MD5 for the module as a
diff --git a/lib/compiler/src/beam_dict.erl b/lib/compiler/src/beam_dict.erl
index 719d799fd7..990e86062a 100644
--- a/lib/compiler/src/beam_dict.erl
+++ b/lib/compiler/src/beam_dict.erl
@@ -24,7 +24,7 @@
-export([new/0,opcode/2,highest_opcode/1,
atom/2,local/4,export/4,import/4,
string/2,lambda/3,literal/2,line/2,fname/2,
- atom_table/1,local_table/1,export_table/1,import_table/1,
+ atom_table/2,local_table/1,export_table/1,import_table/1,
string_table/1,lambda_table/1,literal_table/1,
line_table/1]).
@@ -197,15 +197,15 @@ fname(Name, #asm{fnames=Fnames}=Dict) ->
end.
%% Returns the atom table.
-%% atom_table(Dict) -> {LastIndex,[Length,AtomString...]}
--spec atom_table(bdict()) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
+%% atom_table(Dict, Encoding) -> {LastIndex,[Length,AtomString...]}
+-spec atom_table(bdict(), latin1 | utf8) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
-atom_table(#asm{atoms=Atoms}) ->
+atom_table(#asm{atoms=Atoms}, Encoding) ->
NumAtoms = maps:size(Atoms),
Sorted = lists:keysort(2, maps:to_list(Atoms)),
{NumAtoms,[begin
- L = atom_to_list(A),
- [length(L)|L]
+ L = atom_to_binary(A, Encoding),
+ [byte_size(L),L]
end || {A,_} <- Sorted]}.
%% Returns the table of local functions.
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 069add7890..dcd962df66 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -214,11 +214,21 @@ expand_opt(report, Os) ->
expand_opt(return, Os) ->
[return_errors,return_warnings|Os];
expand_opt(r12, Os) ->
- [no_recv_opt,no_line_info|Os];
+ [no_recv_opt,no_line_info,no_utf8_atoms|Os];
expand_opt(r13, Os) ->
- [no_recv_opt,no_line_info|Os];
+ [no_recv_opt,no_line_info,no_utf8_atoms|Os];
expand_opt(r14, Os) ->
- [no_line_info|Os];
+ [no_line_info,no_utf8_atoms|Os];
+expand_opt(r15, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r16, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r17, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r18, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r19, Os) ->
+ [no_utf8_atoms|Os];
expand_opt({debug_info_key,_}=O, Os) ->
[encrypt_debug_info,O|Os];
expand_opt(no_float_opt, Os) ->
@@ -1376,13 +1386,14 @@ encrypt({des3_cbc=Type,Key,IVec,BlockSize}, Bin0) ->
save_core_code(Code, St) ->
{ok,Code,St#compile{core_code=cerl:from_records(Code)}}.
-beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,mod_options=Opts0}=St) ->
+beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,
+ options=CompilerOpts,mod_options=Opts0}=St) ->
Source = paranoid_absname(File),
Opts1 = lists:map(fun({debug_info_key,_}) -> {debug_info_key,'********'};
(Other) -> Other
end, Opts0),
Opts2 = [O || O <- Opts1, effects_code_generation(O)],
- case beam_asm:module(Code0, Abst, Source, Opts2) of
+ case beam_asm:module(Code0, Abst, Source, Opts2, CompilerOpts) of
{ok,Code} -> {ok,Code,St#compile{abstract_code=[]}}
end.
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index 8c09414a52..8d7facd727 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -30,7 +30,7 @@
file_1/1, forms_2/1, module_mismatch/1, big_file/1, outdir/1,
binary/1, makedep/1, cond_and_ifdef/1, listings/1, listings_big/1,
other_output/1, kernel_listing/1, encrypted_abstr/1,
- strict_record/1,
+ strict_record/1, utf8_atoms/1,
cover/1, env/1, core/1,
core_roundtrip/1, asm/1, optimized_guards/1,
sys_pre_attributes/1, dialyzer/1,
@@ -48,7 +48,7 @@ all() ->
[app_test, appup_test, file_1, forms_2, module_mismatch, big_file, outdir,
binary, makedep, cond_and_ifdef, listings, listings_big,
other_output, kernel_listing, encrypted_abstr,
- strict_record,
+ strict_record, utf8_atoms,
cover, env, core, core_roundtrip, asm, optimized_guards,
sys_pre_attributes, dialyzer, warnings, pre_load_check,
env_compiler_options].
@@ -450,8 +450,10 @@ do_kernel_listing({M,A}) ->
try
{ok,M,Kern} = compile:forms(A, [to_kernel]),
IoList = v3_kernel_pp:format(Kern),
- _ = iolist_size(IoList),
- ok
+ case unicode:characters_to_binary(IoList) of
+ Bin when is_binary(Bin) ->
+ ok
+ end
catch
throw:{error,Error} ->
io:format("*** compilation failure '~p' for module ~s\n",
@@ -680,6 +682,23 @@ test_sloppy() ->
{1,2} = record_access:test(Turtle),
Turtle.
+utf8_atoms(Config) when is_list(Config) ->
+ Anno = erl_anno:new(1),
+ Atom = binary_to_atom(<<"こんにちは"/utf8>>, utf8),
+ Forms = [{attribute,Anno,compile,[export_all]},
+ {function,Anno,atom,0,[{clause,Anno,[],[],[{atom,Anno,Atom}]}]}],
+
+ Utf8AtomForms = [{attribute,Anno,module,utf8_atom}|Forms],
+ {ok,utf8_atom,Utf8AtomBin} =
+ compile:forms(Utf8AtomForms, [binary]),
+ {ok,{utf8_atom,[{atoms,_}]}} =
+ beam_lib:chunks(Utf8AtomBin, [atoms]),
+ code:load_binary(utf8_atom, "compile_SUITE", Utf8AtomBin),
+ Atom = utf8_atom:atom(),
+
+ NoUtf8AtomForms = [{attribute,Anno,module,no_utf8_atom}|Forms],
+ error = compile:forms(NoUtf8AtomForms, [binary, r19]).
+
env(Config) when is_list(Config) ->
{Simple,Target} = get_files(Config, simple, env),
{ok,Cwd} = file:get_cwd(),
@@ -751,7 +770,7 @@ do_core_1(M, A, Outdir) ->
{ok,M,Core0} = compile:forms(A, [to_core]),
CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
CorePP = core_pp:format(Core0),
- ok = file:write_file(CoreFile, CorePP),
+ ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)),
%% Parse the .core file and return the result as Core Erlang Terms.
Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of
@@ -823,7 +842,7 @@ do_core_roundtrip_1(Mod, Abstr, Outdir) ->
do_core_roundtrip_2(M, Core0, Outdir) ->
CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
CorePP = core_pp:format_all(Core0),
- ok = file:write_file(CoreFile, CorePP),
+ ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)),
%% Parse the .core file and return the result as Core Erlang Terms.
Core2 = case compile:file(CoreFile, [report_errors,from_core,
diff --git a/lib/compiler/test/compile_SUITE_data/simple.erl b/lib/compiler/test/compile_SUITE_data/simple.erl
index d8324dafaf..9385d101e0 100644
--- a/lib/compiler/test/compile_SUITE_data/simple.erl
+++ b/lib/compiler/test/compile_SUITE_data/simple.erl
@@ -19,7 +19,7 @@
%%
-module(simple).
--export([test/0]).
+-export([test/0,unicode/0]).
-ifdef(need_foo).
-export([foo/0]).
@@ -28,6 +28,9 @@
test() ->
passed.
+unicode() ->
+ {"это",'спутник'}.
+
%% Conditional inclusion.
%% Compile with [{d, need_foo}, {d, foo_value, 42}].
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index adb96fb87d..76dfaee482 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -227,7 +227,7 @@ effect(Config) when is_list(Config) ->
lc_SUITE ->
_ = [{'EXIT',{badarg,_}} =
(catch binary_to_atom(<<C/utf8>>, utf8)) ||
- C <- lists:seq(16#10000, 16#FFFFF)];
+ C <- lists:seq(16#FF10000, 16#FFFFFFF)];
_ ->
ok
end,
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index cbf141b3b0..a4b34657ba 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -103,7 +103,7 @@
<code>dh_private() = key_value() </code>
- <code>dh_params() = [key_value()] = [P, G] </code>
+ <code>dh_params() = [key_value()] = [P, G] | [P, G, PrivateKeyBitLength]</code>
<code>ecdh_public() = key_value() </code>
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index ae1e4d8c38..aeeb895a0c 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -114,7 +114,6 @@ loop(#server_state{parent = Parent} = State,
%% The Analysis
%%--------------------------------------------------------------------
-%% Calls to erlang:garbage_collect() help to reduce the heap size.
analysis_start(Parent, Analysis, LegalWarnings) ->
CServer = dialyzer_codeserver:new(),
Plt = Analysis#analysis.plt,
@@ -136,11 +135,9 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
%% Remote type postprocessing
NewCServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0),
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer0, Plt),
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0),
- OldRecords = dialyzer_plt:get_types(Plt),
OldExpTypes0 = dialyzer_plt:get_exported_types(Plt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
RemMods =
[case Analysis#analysis.start_from of
byte_code -> list_to_atom(filename:basename(F, ".beam"));
@@ -148,25 +145,20 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
end || F <- Files],
OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0),
MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
- erlang:garbage_collect(),
+ erlang:garbage_collect(), % reduce heap size
?timing(State#analysis_state.timing_server, "remote",
contracts_and_records(TmpCServer2))
catch
throw:{error, _ErrorMsg} = Error -> exit(Error)
end,
- NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
- ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer),
- NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes),
- State0 = State#analysis_state{plt = NewPlt1},
- dump_callgraph(Callgraph, State0, Analysis),
+ dump_callgraph(Callgraph, State, Analysis),
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
- Plt1_a = dialyzer_plt:delete_list(NewPlt1, AllNodes),
+ Plt1_a = dialyzer_plt:delete_list(Plt, AllNodes),
Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer),
- State1 = State0#analysis_state{codeserver = NewCServer, plt = Plt1},
+ State1 = State#analysis_state{codeserver = NewCServer, plt = Plt1},
Exports = dialyzer_codeserver:get_exports(NewCServer),
NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
NonExportsList = sets:to_list(NonExports),
@@ -176,14 +168,17 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
false -> Callgraph
end,
State2 = analyze_callgraph(NewCallgraph, State1),
- #analysis_state{plt = MiniPlt2, doc_plt = DocPlt} = State2,
+ #analysis_state{plt = MiniPlt2,
+ doc_plt = DocPlt,
+ codeserver = Codeserver0} = State2,
+ {Codeserver, MiniPlt3} = move_data(Codeserver0, MiniPlt2),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
%% Since the PLT is never used, a dummy is sent:
DummyPlt = dialyzer_plt:new(),
- send_codeserver_plt(Parent, CServer, DummyPlt),
- MiniPlt3 = dialyzer_plt:delete_list(MiniPlt2, NonExportsList),
- send_analysis_done(Parent, MiniPlt3, DocPlt).
+ send_codeserver_plt(Parent, Codeserver, DummyPlt),
+ MiniPlt4 = dialyzer_plt:delete_list(MiniPlt3, NonExportsList),
+ send_analysis_done(Parent, MiniPlt4, DocPlt).
contracts_and_records(CodeServer) ->
Fun = contrs_and_recs(CodeServer),
@@ -200,15 +195,20 @@ contracts_and_records(CodeServer) ->
contrs_and_recs(TmpCServer2) ->
fun() ->
Parent = receive {Pid, go} -> Pid end,
- {TmpCServer3, RecordDict} =
- dialyzer_utils:process_record_remote_types(TmpCServer2),
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
TmpServer4 =
- dialyzer_contracts:process_contract_remote_types(TmpCServer3,
- RecordDict),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3),
dialyzer_codeserver:give_away(TmpServer4, Parent),
exit(TmpServer4)
end.
+move_data(CServer, MiniPlt) ->
+ {CServer1, Records} = dialyzer_codeserver:extract_records(CServer),
+ MiniPlt1 = dialyzer_plt:insert_types(MiniPlt, Records),
+ {NewCServer, ExpTypes} = dialyzer_codeserver:extract_exported_types(CServer1),
+ NewMiniPlt = dialyzer_plt:insert_exported_types(MiniPlt1, ExpTypes),
+ {NewCServer, NewMiniPlt}.
+
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
plt = Plt,
@@ -603,6 +603,7 @@ send_ext_types(Parent, ExtTypes) ->
ok.
send_codeserver_plt(Parent, CServer, Plt) ->
+ ok = dialyzer_codeserver:give_away(CServer, Parent),
Parent ! {self(), cserver, CServer, Plt},
ok.
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index 68f3d7a240..bb02bc8c0d 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -40,7 +40,7 @@
module_postorder_from_funs/2,
new/0,
get_depends_on/2,
- get_required_by/2,
+ %% get_required_by/2,
in_neighbours/2,
renew_race_info/4,
renew_race_code/2,
@@ -250,12 +250,12 @@ get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) ->
get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) ->
digraph:out_neighbours(DG, SCC).
--spec get_required_by(scc() | module(), callgraph()) -> [scc()].
+%% -spec get_required_by(scc() | module(), callgraph()) -> [scc()].
-get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
- lookup_scc(SCC, In, Maps);
-get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
- digraph:in_neighbours(DG, SCC).
+%% get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
+%% lookup_scc(SCC, In, Maps);
+%% get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
+%% digraph:in_neighbours(DG, SCC).
lookup_scc(SCC, Table, Maps) ->
case ets_lookup_dict({'scc', SCC}, Maps) of
@@ -285,9 +285,11 @@ module_postorder(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new([acyclic]),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> _ = digraph:add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
- {digraph_utils:topsort(MDG), {'d', MDG}}.
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ {lists:reverse(digraph_utils:topsort(MDG)), {'d', MDG}}.
edge_fold({{M1,_,_},{M2,_,_}}, Set) ->
case M1 =/= M2 of
@@ -305,7 +307,7 @@ module_deps(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new(),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> check_add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG, N))}
|| N <- sets:to_list(Nodes)],
@@ -552,9 +554,21 @@ digraph_add_edge(From, To, DG) ->
false -> digraph:add_vertex(DG, To);
{To, _} -> ok
end,
- digraph:add_edge(DG, {From, To}, From, To, []),
+ check_add_edge(DG, {From, To}, From, To, []),
ok.
+check_add_edge(G, V1, V2) ->
+ case digraph:add_edge(G, V1, V2) of
+ {error, Error} -> exit({add_edge, V1, V2, Error});
+ _Edge -> ok
+ end.
+
+check_add_edge(G, E, V1, V2, L) ->
+ case digraph:add_edge(G, E, V1, V2, L) of
+ {error, Error} -> exit({add_edge, E, V1, V2, L, Error});
+ _Edge -> ok
+ end.
+
digraph_confirm_vertices([MFA|Left], DG) ->
digraph:add_vertex(DG, MFA, confirmed),
digraph_confirm_vertices(Left, DG);
@@ -762,28 +776,53 @@ to_ps(#callgraph{} = CG, File, Args) ->
ok.
condensation(G) ->
- SCCs = digraph_utils:strong_components(G),
- %% Assign unique numbers to SCCs:
- Ints = lists:seq(1, length(SCCs)),
- IntToSCC = lists:zip(Ints, SCCs),
- IntScc = sofs:relation(IntToSCC, [{int, scc}]),
- %% Subsitute strong components for vertices in edges using the
- %% unique numbers:
- C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
- I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
- Es = sofs:relation(digraph:edges(G), [{v, v}]),
- R1 = sofs:relative_product(I2V, Es),
- R2 = sofs:relative_product(I2V, sofs:converse(R1)),
- %% Create in- and out-neighbours:
- In = sofs:relation_to_family(sofs:strict_relation(R2)),
- R3 = sofs:converse(R2),
- Out = sofs:relation_to_family(sofs:strict_relation(R3)),
- [OutETS, InETS, MapsETS] =
- [ets:new(Name,[{read_concurrency, true}]) ||
- Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
- ets:insert(OutETS, sofs:to_external(Out)),
- ets:insert(InETS, sofs:to_external(In)),
- %% Create mappings from SCCs to unique integers, and the inverse:
- ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
- ets:insert(MapsETS, IntToSCC),
- {{'e', OutETS, InETS, MapsETS}, SCCs}.
+ erlang:garbage_collect(), % reduce heap size
+ {Pid, Ref} = erlang:spawn_monitor(do_condensation(G, self())),
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ {SCCInts, OutETS, InETS, MapsETS} = Result,
+ NewSCCs = [ets:lookup_element(MapsETS, SCCInt, 2) || SCCInt <- SCCInts],
+ {{'e', OutETS, InETS, MapsETS}, NewSCCs}
+ end.
+
+-spec do_condensation(digraph:graph(), pid()) -> fun(() -> no_return()).
+
+do_condensation(G, Parent) ->
+ fun() ->
+ [OutETS, InETS, MapsETS] =
+ [ets:new(Name,[{read_concurrency, true}]) ||
+ Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
+ SCCs = digraph_utils:strong_components(G),
+ %% Assign unique numbers to SCCs:
+ Ints = lists:seq(1, length(SCCs)),
+ IntToSCC = lists:zip(Ints, SCCs),
+ IntScc = sofs:relation(IntToSCC, [{int, scc}]),
+ %% Create mapping from unique integers to SCCs:
+ ets:insert(MapsETS, IntToSCC),
+ %% Subsitute strong components for vertices in edges using the
+ %% unique numbers:
+ C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
+ I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
+ Es = sofs:relation(digraph:edges(G), [{v, v}]),
+ R1 = sofs:relative_product(I2V, Es),
+ R2 = sofs:relative_product(I2V, sofs:converse(R1)),
+ R2Strict = sofs:strict_relation(R2),
+ %% Create out-neighbours:
+ Out = sofs:relation_to_family(sofs:converse(R2Strict)),
+ ets:insert(OutETS, sofs:to_external(Out)),
+ %% Sort the SCCs topologically:
+ DG = sofs:family_to_digraph(Out),
+ lists:foreach(fun(I) -> digraph:add_vertex(DG, I) end, Ints),
+ SCCInts0 = digraph_utils:topsort(DG),
+ digraph:delete(DG),
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ SCCInts = lists:reverse(SCCInts0),
+ %% Create in-neighbours:
+ In = sofs:relation_to_family(R2Strict),
+ ets:insert(InETS, sofs:to_external(In)),
+ %% Create mapping from SCCs to unique integers:
+ ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
+ lists:foreach(fun(E) -> true = ets:give_away(E, Parent, any)
+ end, [OutETS, InETS, MapsETS]),
+ exit({SCCInts, OutETS, InETS, MapsETS})
+ end.
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 158ee761af..8500c59ebe 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -30,6 +30,8 @@
-record(cl_state,
{backend_pid :: pid() | 'undefined',
+ code_server = none :: 'none'
+ | dialyzer_codeserver:codeserver(),
erlang_mode = false :: boolean(),
external_calls = [] :: [mfa()],
external_types = [] :: [mfa()],
@@ -630,6 +632,9 @@ cl_loop(State, LogCache) ->
{BackendPid, warnings, Warnings} ->
NewState = store_warnings(State, Warnings),
cl_loop(NewState, LogCache);
+ {BackendPid, cserver, CodeServer, _Plt} -> % Plt is ignored
+ NewState = State#cl_state{code_server = CodeServer},
+ cl_loop(NewState, LogCache);
{BackendPid, done, NewMiniPlt, _NewDocPlt} ->
return_value(State, NewMiniPlt);
{BackendPid, ext_calls, ExtCalls} ->
@@ -647,7 +652,6 @@ cl_loop(State, LogCache) ->
cl_error(State, Msg);
_Other ->
%% io:format("Received ~p\n", [_Other]),
- %% Note: {BackendPid, cserver, CodeServer, Plt} is ignored.
cl_loop(State, LogCache)
end.
@@ -688,18 +692,34 @@ cl_error(State, Msg) ->
maybe_close_output_file(State),
throw({dialyzer_error, lists:flatten(Msg)}).
-return_value(State = #cl_state{erlang_mode = ErlangMode,
+return_value(State = #cl_state{code_server = CodeServer,
+ erlang_mode = ErlangMode,
mod_deps = ModDeps,
output_plt = OutputPlt,
plt_info = PltInfo,
stored_warnings = StoredWarnings},
MiniPlt) ->
+ %% Just for now:
+ case CodeServer =:= none of
+ true ->
+ ok;
+ false ->
+ dialyzer_codeserver:delete(CodeServer)
+ end,
case OutputPlt =:= none of
true ->
dialyzer_plt:delete(MiniPlt);
false ->
- Plt = dialyzer_plt:restore_full_plt(MiniPlt),
- dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
+ Fun = to_file_fun(OutputPlt, MiniPlt, ModDeps, PltInfo),
+ {Pid, Ref} = erlang:spawn_monitor(Fun),
+ dialyzer_plt:give_away(MiniPlt, Pid),
+ Pid ! go,
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ case Result of
+ ok -> ok;
+ Thrown -> throw(Thrown)
+ end
+ end
end,
UnknownWarnings = unknown_warnings(State),
RetValue =
@@ -720,6 +740,16 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
{RetValue, set_warning_id(AllWarnings)}
end.
+-spec to_file_fun(_, _, _, _) -> fun(() -> no_return()).
+
+to_file_fun(Filename, MiniPlt, ModDeps, PltInfo) ->
+ fun() ->
+ receive go -> ok end,
+ Plt = dialyzer_plt:restore_full_plt(MiniPlt),
+ dialyzer_plt:to_file(Filename, Plt, ModDeps, PltInfo),
+ exit(ok)
+ end.
+
unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) ->
Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of
true ->
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index f53c713bfe..a1a7370eff 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -26,18 +26,21 @@
give_away/2,
finalize_contracts/1,
finalize_exported_types/2,
- finalize_records/2,
+ finalize_records/1,
get_contracts/1,
get_callbacks/1,
get_exported_types/1,
+ extract_exported_types/1,
get_exports/1,
- get_records/1,
+ get_records_table/1,
+ extract_records/1,
get_next_core_label/1,
get_temp_contracts/2,
- contracts_modules/1,
+ all_temp_modules/1,
store_contracts/4,
get_temp_exported_types/1,
- get_temp_records/1,
+ get_temp_records_table/1,
+ lookup_temp_mod_records/2,
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
@@ -52,7 +55,6 @@
lookup_meta_info/2,
new/0,
set_next_core_label/2,
- set_temp_records/2,
store_temp_records/3,
translate_fake_file/3]).
@@ -67,10 +69,8 @@
-type set_ets() :: ets:tid().
-type types() :: erl_types:type_table().
--type mod_records() :: erl_types:mod_records().
-type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}.
--type mod_contracts() :: dict:dict(module(), contracts()).
%% A property-list of data compiled from -compile and -dialyzer attributes.
-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
@@ -80,8 +80,8 @@
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
- exported_types :: set_ets(), % set(mfa())
- records :: map_ets(),
+ exported_types :: 'clean' | set_ets(), % set(mfa())
+ records :: 'clean' | map_ets(),
contracts :: map_ets(),
callbacks :: map_ets(),
fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
@@ -107,9 +107,6 @@ ets_map_store(Key, Element, Table) ->
true = ets:insert(Table, {Key, Element}),
Table.
-ets_dict_store_dict(Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)).
-
ets_dict_to_dict(Table) ->
Fold = fun({Key,Value}, Dict) -> dict:store(Key, Value, Dict) end,
ets:foldl(Fold, dict:new(), Table).
@@ -164,11 +161,8 @@ new() ->
-spec delete(codeserver()) -> 'ok'.
-delete(#codeserver{code = Code, exported_types = ExportedTypes,
- records = Records, contracts = Contracts,
- callbacks = Callbacks}) ->
- lists:foreach(fun ets:delete/1,
- [Code, ExportedTypes, Records, Contracts, Callbacks]).
+delete(CServer) ->
+ lists:foreach(fun(Table) -> true = ets:delete(Table) end, tables(CServer)).
-spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver().
@@ -222,6 +216,11 @@ is_exported(MFA, #codeserver{exports = Exports}) ->
get_exported_types(#codeserver{exported_types = ExpTypes}) ->
ets_set_to_set(ExpTypes).
+-spec extract_exported_types(codeserver()) -> {codeserver(), set_ets()}.
+
+extract_exported_types(#codeserver{exported_types = ExpTypes} = CS) ->
+ {CS#codeserver{exported_types = 'clean'}, ExpTypes}.
+
-spec get_exports(codeserver()) -> sets:set(mfa()).
get_exports(#codeserver{exports = Exports}) ->
@@ -269,10 +268,15 @@ lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) ->
{ok, Map} -> Map
end.
--spec get_records(codeserver()) -> mod_records().
+-spec get_records_table(codeserver()) -> map_ets().
+
+get_records_table(#codeserver{records = RecDict}) ->
+ RecDict.
-get_records(#codeserver{records = RecDict}) ->
- ets_dict_to_dict(RecDict).
+-spec extract_records(codeserver()) -> {codeserver(), map_ets()}.
+
+extract_records(#codeserver{records = RecDict} = CS) ->
+ {CS#codeserver{records = clean}, RecDict}.
-spec store_temp_records(module(), types(), codeserver()) -> codeserver().
@@ -283,26 +287,26 @@ store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS)
false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)}
end.
--spec get_temp_records(codeserver()) -> mod_records().
+-spec get_temp_records_table(codeserver()) -> map_ets().
-get_temp_records(#codeserver{temp_records = TempRecDict}) ->
- ets_dict_to_dict(TempRecDict).
+get_temp_records_table(#codeserver{temp_records = TempRecDict}) ->
+ TempRecDict.
--spec set_temp_records(mod_records(), codeserver()) -> codeserver().
+-spec lookup_temp_mod_records(module(), codeserver()) -> types().
-set_temp_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- TempRecords = ets:new(dialyzer_codeserver_temp_records,[]),
- true = ets_dict_store_dict(Dict, TempRecords),
- CS#codeserver{temp_records = TempRecords}.
+lookup_temp_mod_records(Mod, #codeserver{temp_records = TempRecDict}) ->
+ case ets_dict_find(Mod, TempRecDict) of
+ error -> maps:new();
+ {ok, Map} -> Map
+ end.
--spec finalize_records(mod_records(), codeserver()) -> codeserver().
+-spec finalize_records(codeserver()) -> codeserver().
-finalize_records(Dict, #codeserver{temp_records = TmpRecords,
- records = Records} = CS) ->
- true = ets:delete(TmpRecords),
- true = ets_dict_store_dict(Dict, Records),
- CS#codeserver{temp_records = clean}.
+finalize_records(#codeserver{temp_records = TmpRecords,
+ records = Records} = CS) ->
+ true = ets:delete(Records),
+ ets:rename(TmpRecords, dialyzer_codeserver_records),
+ CS#codeserver{temp_records = clean, records = TmpRecords}.
-spec lookup_mod_contracts(atom(), codeserver()) -> contracts().
@@ -331,10 +335,13 @@ lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
{ok, PropList} -> PropList
end.
--spec get_contracts(codeserver()) -> mod_contracts().
+-spec get_contracts(codeserver()) ->
+ dict:dict(mfa(), dialyzer_contracts:file_contract()).
get_contracts(#codeserver{contracts = ContDict}) ->
- ets_dict_to_dict(ContDict).
+ dict:filter(fun({_M, _F, _A}, _) -> true;
+ (_, _) -> false
+ end, ets_dict_to_dict(ContDict)).
-spec get_callbacks(codeserver()) -> list().
@@ -348,12 +355,14 @@ store_temp_contracts(Mod, SpecMap, CallbackMap,
#codeserver{temp_contracts = Cn,
temp_callbacks = Cb} = CS)
when is_atom(Mod) ->
+ %% Make sure Mod is stored even if there are not callbacks or
+ %% contracts.
CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)},
CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}.
--spec contracts_modules(codeserver()) -> [module()].
+-spec all_temp_modules(codeserver()) -> [module()].
-contracts_modules(#codeserver{temp_contracts = TempContTable}) ->
+all_temp_modules(#codeserver{temp_contracts = TempContTable}) ->
ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]).
-spec store_contracts(module(), contracts(), contracts(), codeserver()) ->
@@ -380,17 +389,25 @@ get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict,
-spec give_away(codeserver(), pid()) -> 'ok'.
-give_away(#codeserver{temp_records = TempRecords,
- temp_contracts = TempContracts,
- temp_callbacks = TempCallbacks,
- records = Records,
- contracts = Contracts,
- callbacks = Callbacks}, Pid) ->
- _ = [true = ets:give_away(Table, Pid, any) ||
- Table <- [TempRecords, TempContracts, TempCallbacks,
- Records, Contracts, Callbacks],
- Table =/= clean],
- ok.
+give_away(CServer, Pid) ->
+ lists:foreach(fun(Table) -> true = ets:give_away(Table, Pid, any)
+ end, tables(CServer)).
+
+tables(#codeserver{code = Code,
+ fun_meta_info = FunMetaInfo,
+ exports = Exports,
+ temp_exported_types = TempExpTypes,
+ temp_records = TempRecords,
+ temp_contracts = TempContracts,
+ temp_callbacks = TempCallbacks,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks}) ->
+ [Table || Table <- [Code, FunMetaInfo, Exports, TempExpTypes,
+ TempRecords, TempContracts, TempCallbacks,
+ ExportedTypes, Records, Contracts, Callbacks],
+ Table =/= clean].
-spec finalize_contracts(codeserver()) -> codeserver().
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 2078e58ce8..5f24b5a668 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -24,7 +24,7 @@
get_contract_return/2,
%% get_contract_signature/1,
is_overloaded/1,
- process_contract_remote_types/2,
+ process_contract_remote_types/1,
store_tmp_contract/5]).
-export_type([file_contract/0, plt_contracts/0]).
@@ -139,18 +139,18 @@ sequence([], _Delimiter) -> "";
sequence([H], _Delimiter) -> H;
sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
--spec process_contract_remote_types(dialyzer_codeserver:codeserver(),
- erl_types:mod_records()) ->
+-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) ->
dialyzer_codeserver:codeserver().
-process_contract_remote_types(CodeServer, RecordDict) ->
- Mods = dialyzer_codeserver:contracts_modules(CodeServer),
+process_contract_remote_types(CodeServer) ->
+ Mods = dialyzer_codeserver:all_temp_modules(CodeServer),
+ RecordTable = dialyzer_codeserver:get_records_table(CodeServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
ContractFun =
fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) ->
#tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract,
{NewCs, C2} = lists:mapfoldl(fun(CFun, C1) ->
- CFun(ExpTypes, RecordDict, C1)
+ CFun(ExpTypes, RecordTable, C1)
end, C0, CFuns),
Args = general_domain(NewCs),
Contract = #contract{contracts = NewCs, args = Args, forms = Forms},
@@ -177,7 +177,7 @@ process_contract_remote_types(CodeServer, RecordDict) ->
-type fun_types() :: dict:dict(label(), erl_types:type_table()).
--spec check_contracts([{mfa(), file_contract()}],
+-spec check_contracts(orddict:orddict(mfa(), file_contract()),
dialyzer_callgraph:callgraph(), fun_types(),
opaques_fun()) -> plt_contracts().
@@ -206,7 +206,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) ->
error -> NewContracts
end
end,
- dict:fold(FoldFun, [], FunTypes).
+ orddict:from_list(dict:fold(FoldFun, [], FunTypes)).
%% Checks all components of a contract
-spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}.
@@ -451,10 +451,10 @@ contract_from_form(Forms, MFA, RecDict, FileLine) ->
contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords, Cache) ->
+ fun(ExpTypes, RecordTable, Cache) ->
{NewType, NewCache} =
try
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache)
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
@@ -472,12 +472,12 @@ contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
MFA, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords, Cache) ->
+ fun(ExpTypes, RecordTable, Cache) ->
{Constr1, VarTable, Cache1} =
- process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords,
+ process_constraints(Constr, MFA, RecDict, ExpTypes, RecordTable,
Cache),
{NewType, NewCache} =
- from_form_with_check(Form, ExpTypes, MFA, AllRecords,
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable,
VarTable, Cache1),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{{NewTypeNoVars, Constr1}, NewCache}
@@ -488,28 +488,28 @@ contract_from_form([{type, _L1, bounded_fun,
contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+process_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
{Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes,
- AllRecords, Cache),
+ RecordTable, Cache),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords, NewCache).
+ constraints_fixpoint(Init, MFA, RecDict, ExpTypes, RecordTable, NewCache).
-initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
- initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
+ initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable,
Cache, []).
-initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+initialize_constraints([], _MFA, _RecDict, _ExpTypes, _RecordTable,
Cache, Acc) ->
{Acc, Cache};
-initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
+initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, RecordTable,
Cache, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
VarTable = erl_types:var_table__new(),
{T1, NewCache} =
- final_form(Type1, ExpTypes, MFA, AllRecords, VarTable, Cache),
+ final_form(Type1, ExpTypes, MFA, RecordTable, VarTable, Cache),
Entry = {T1, Type2},
- initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ initialize_constraints(Rest, MFA, RecDict, ExpTypes, RecordTable,
NewCache, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
@@ -517,18 +517,18 @@ initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
VarTable = erl_types:var_table__new(),
{VarTab, NewCache} =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTable, Cache),
constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, NewCache).
+ RecordTable, NewCache).
constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, Cache) ->
+ RecordTable, Cache) ->
{NewVarTab, NewCache} =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
OldVarTab, Cache),
case NewVarTab of
OldVarTab ->
@@ -540,38 +540,38 @@ constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
{FinalConstrs, NewVarTab, NewCache};
_Other ->
constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, NewCache)
+ RecordTable, NewCache)
end.
-final_form(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
+final_form(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) ->
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) ->
VarTable = erl_types:var_table__new(),
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
Site = {spec, MFA},
- C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords,
+ C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable,
VarTable, Cache),
- erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarTable, C1).
+ erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1).
-constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache) ->
{Subtypes, NewCache} =
- constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache, []),
{insert_constraints(Subtypes), NewCache}.
-constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _RecordTable,
_VarTab, Cache, Acc) ->
{Acc, Cache};
-constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, AllRecords,
+constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache, Acc) ->
{T2, NewCache} =
- final_form(Form2, ExpTypes, MFA, AllRecords, VarTab, Cache),
+ final_form(Form2, ExpTypes, MFA, RecordTable, VarTab, Cache),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_subs(Rest, MFA, RecDict, ExpTypes, RecordTable,
VarTab, NewCache, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl
index 99f95a4dca..7c1bc1de5a 100644
--- a/lib/dialyzer/src/dialyzer_coordinator.erl
+++ b/lib/dialyzer/src/dialyzer_coordinator.erl
@@ -76,6 +76,8 @@
active = 0 :: integer(),
result :: result(),
next_label = 0 :: integer(),
+ jobs :: [job()],
+ job_fun :: fun(),
init_data :: init_data(),
regulator :: regulator(),
scc_to_pid :: scc_to_pid()
@@ -108,16 +110,18 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
false -> unused
end,
Coordinator = {Collector, Regulator, SCCtoPID},
- Fold =
- fun(Job, Count) ->
- Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
- case TypesigOrDataflow of
- true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok;
- false -> ok
- end,
- Count + 1
+ JobFun =
+ fun(Job) ->
+ Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
+ case TypesigOrDataflow of
+ true -> true = ets:insert(SCCtoPID, {Job, Pid});
+ false -> true
+ end
end,
- JobCount = lists:foldl(Fold, 0, Jobs),
+ JobCount = length(Jobs),
+ NumberOfInitJobs = min(JobCount, 20 * dialyzer_utils:parallelism()),
+ {InitJobs, RestJobs} = lists:split(NumberOfInitJobs, Jobs),
+ lists:foreach(JobFun, InitJobs),
Unit =
case Mode of
'typesig' -> "SCCs";
@@ -129,11 +133,13 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
'compile' -> dialyzer_analysis_callgraph:compile_init_result();
_ -> []
end,
- #state{mode = Mode, active = JobCount, result = InitResult, next_label = 0,
- init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
+ #state{mode = Mode, active = JobCount, result = InitResult,
+ next_label = 0, job_fun = JobFun, jobs = RestJobs,
+ init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
collect_result(#state{mode = Mode, active = Active, result = Result,
next_label = NextLabel, init_data = InitData,
+ jobs = JobsLeft, job_fun = JobFun,
regulator = Regulator, scc_to_pid = SCCtoPID} = State) ->
receive
{next_label_request, Estimation, Pid} ->
@@ -141,20 +147,35 @@ collect_result(#state{mode = Mode, active = Active, result = Result,
collect_result(State#state{next_label = NextLabel + Estimation});
{done, Job, Data} ->
NewResult = update_result(Mode, InitData, Job, Data, Result),
+ TypesigOrDataflow = (Mode =:= 'typesig') orelse (Mode =:= 'dataflow'),
case Active of
1 ->
kill_regulator(Regulator),
case Mode of
'compile' ->
{NewResult, NextLabel};
- X when X =:= 'typesig'; X =:= 'dataflow' ->
+ _ when TypesigOrDataflow ->
ets:delete(SCCtoPID),
NewResult;
'warnings' ->
NewResult
end;
N ->
- collect_result(State#state{result = NewResult, active = N - 1})
+ case TypesigOrDataflow of
+ true -> true = ets:delete(SCCtoPID, Job);
+ false -> true
+ end,
+ NewJobsLeft =
+ case JobsLeft of
+ [] -> [];
+ [NewJob|JobsLeft1] ->
+ JobFun(NewJob),
+ JobsLeft1
+ end,
+ NewState = State#state{result = NewResult,
+ jobs = NewJobsLeft,
+ active = N - 1},
+ collect_result(NewState)
end
end.
@@ -170,18 +191,20 @@ update_result(Mode, InitData, Job, Data, Result) ->
end.
-spec sccs_to_pids([scc() | module()], coordinator()) ->
- {[dialyzer_worker:worker()], [scc() | module()]}.
+ [dialyzer_worker:worker()].
sccs_to_pids(SCCs, {_Collector, _Regulator, SCCtoPID}) ->
Fold =
- fun(SCC, {Pids, Unknown}) ->
- try ets:lookup_element(SCCtoPID, SCC, 2) of
- Result -> {[Result|Pids], Unknown}
- catch
- _:_ -> {Pids, [SCC|Unknown]}
- end
+ fun(SCC, Pids) ->
+ %% The SCCs that SCC depends on have always been started.
+ try ets:lookup_element(SCCtoPID, SCC, 2) of
+ Pid when is_pid(Pid) ->
+ [Pid|Pids]
+ catch
+ _:_ -> Pids
+ end
end,
- lists:foldl(Fold, {[], []}, SCCs).
+ lists:foldl(Fold, [], SCCs).
-spec job_done(job(), job_result(), coordinator()) -> ok.
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 37c22fef48..eb63e9e695 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -31,9 +31,8 @@
included_files/1,
from_file/1,
get_default_plt/0,
- get_types/1,
+ get_module_types/2,
get_exported_types/1,
- %% insert/3,
insert_list/2,
insert_contract_list/2,
insert_callbacks/2,
@@ -143,6 +142,10 @@ delete_list(#plt{info = Info, types = Types,
-spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt().
+insert_contract_list(#plt{contracts = Contracts} = PLT, List) ->
+ NewContracts = dict:merge(fun(_MFA, _Old, New) -> New end,
+ Contracts, dict:from_list(List)),
+ PLT#plt{contracts = NewContracts};
insert_contract_list(#mini_plt{contracts = Contracts} = PLT, List) ->
true = ets:insert(Contracts, List),
PLT.
@@ -184,20 +187,23 @@ lookup(Plt, Label) when is_integer(Label) ->
lookup_1(#mini_plt{info = Info}, MFAorLabel) ->
ets_table_lookup(Info, MFAorLabel).
--spec insert_types(plt(), erl_types:mod_records()) -> plt().
+-spec insert_types(plt(), ets:tid()) -> plt().
-insert_types(PLT, Rec) ->
- PLT#plt{types = Rec}.
+insert_types(MiniPLT, Records) ->
+ ets:rename(Records, plt_types),
+ MiniPLT#mini_plt{types = Records}.
--spec insert_exported_types(plt(), sets:set()) -> plt().
+-spec insert_exported_types(plt(), ets:tid()) -> plt().
-insert_exported_types(PLT, Set) ->
- PLT#plt{exported_types = Set}.
+insert_exported_types(MiniPLT, ExpTypes) ->
+ ets:rename(ExpTypes, plt_exported_types),
+ MiniPLT#mini_plt{exported_types = ExpTypes}.
--spec get_types(plt()) -> erl_types:mod_records().
+-spec get_module_types(plt(), atom()) ->
+ 'none' | {'value', erl_types:type_table()}.
-get_types(#plt{types = Types}) ->
- Types.
+get_module_types(#plt{types = Types}, M) when is_atom(M) ->
+ table_lookup(Types, M).
-spec get_exported_types(plt()) -> sets:set().
@@ -520,10 +526,12 @@ get_mini_plt(#plt{info = Info,
contracts = Contracts,
callbacks = Callbacks,
exported_types = ExpTypes}) ->
- [ETSInfo, ETSTypes, ETSContracts, ETSCallbacks, ETSExpTypes] =
+ [ETSInfo, ETSContracts] =
[ets:new(Name, [public]) ||
- Name <- [plt_info, plt_types, plt_contracts, plt_callbacks,
- plt_exported_types]],
+ Name <- [plt_info, plt_contracts]],
+ [ETSTypes, ETSCallbacks, ETSExpTypes] =
+ [ets:new(Name, [compressed, public]) ||
+ Name <- [plt_types, plt_callbacks, plt_exported_types]],
CallbackList = dict:to_list(Callbacks),
CallbacksByModule =
[{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} ||
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 3c90f46e95..be685baf22 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -29,7 +29,7 @@
-export([
find_succ_types_for_scc/2,
refine_one_module/2,
- find_required_by/2,
+ %% find_required_by/2,
find_depends_on/2,
collect_warnings/2,
lookup_names/2
@@ -236,10 +236,10 @@ refine_succ_typings(Modules, #st{codeserver = Codeserver,
find_depends_on(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
dialyzer_callgraph:get_depends_on(SCC, Callgraph).
--spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
+%% -spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
-find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
- dialyzer_callgraph:get_required_by(SCC, Callgraph).
+%% find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
+%% dialyzer_callgraph:get_required_by(SCC, Callgraph).
-spec lookup_names([label()], fixpoint_init_data()) -> [mfa_or_funlbl()].
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index b33484bda4..c4f8adf7ee 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -81,7 +81,7 @@
-record(constraint_list, {type :: 'conj' | 'disj',
list :: [constr()],
deps :: deps(),
- masks = maps:new() :: #{dep() => mask()},
+ masks :: #{dep() => mask()} | 'undefined',
id :: {'list', dep()} | 'undefined'}).
-type constraint_list() :: #constraint_list{}.
@@ -181,7 +181,6 @@ analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) ->
M <- lists:usort([M || {M, _, _} <- SCC])],
State2 = traverse_scc(SCC, CServer, DefSet, ModRecs, State1),
State3 = state__finalize(State2),
- erlang:garbage_collect(),
Funs = state__scc(State3),
pp_constrs_scc(Funs, State3),
constraints_to_dot_scc(Funs, State3),
@@ -202,7 +201,8 @@ traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, ModRecs, AccState) ->
{M, Rec} = lists:keyfind(M, 1, ModRecs),
TmpState1 = state__set_rec_dict(AccState, Rec),
DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)),
- {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1),
+ TmpState2 = state__new_constraint_context(TmpState1),
+ {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2),
traverse_scc(Left, Codeserver, DefSet, ModRecs, NewAccState);
traverse_scc([], _Codeserver, _DefSet, _ModRecs, AccState) ->
AccState.
@@ -2080,6 +2080,8 @@ v2_solve_disjunct(Disj, Map, V2State0) ->
var_occurs_everywhere(V, Masks, NotFailed) ->
ordsets:is_subset(NotFailed, get_mask(V, Masks)).
+-dialyzer({no_improper_lists, [v2_solve_disj/10, v2_solve_conj/12]}).
+
v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval,
Failed0) ->
Id = C#constraint_list.id,
@@ -2098,6 +2100,12 @@ v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval,
end;
v2_solve_disj([], [], _I, _Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
{ok, V2State, lists:reverse(Eval), UL, MapL, lists:reverse(Uneval), Failed};
+v2_solve_disj(every_i, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I|every_i]
+ end,
+ v2_solve_disj(NewIs, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed);
v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) ->
Uneval = [{I,C#constraint_list.id} ||
not is_failed_list(C, V2State)] ++ Uneval0,
@@ -2169,7 +2177,7 @@ v2_solve_conj([I|Is], [Cs|Tail], I, Map0, Conj, IsFlat, V2State0,
M = lists:keydelete(I, 1, vars_per_child(U, Masks)),
{V2State2, NewF0} = save_updated_vars_list(AllCs, M, V2State1),
{NewF, F} = lists:splitwith(fun(J) -> J < I end, NewF0),
- Is1 = lists:umerge(Is, F),
+ Is1 = umerge_mask(Is, F),
NewFs = [NewF|NewFs0],
v2_solve_conj(Is1, Tail, I+1, Map, Conj, IsFlat, V2State2,
[U|UL], NewFs, VarsUp, LastMap, LastFlags)
@@ -2191,6 +2199,14 @@ v2_solve_conj([], _Cs, _I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
v2_solve_conj(NewFlags, Cs, 1, Map, Conj, IsFlat, V2State,
[], [], [U|VarsUp], Map, NewFlags)
end;
+v2_solve_conj(every_i, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I|every_i]
+ end,
+ v2_solve_conj(NewIs, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags);
v2_solve_conj(Is, [_|Tail], I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
LastMap, LastFlags) ->
v2_solve_conj(Is, Tail, I+1, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
@@ -2207,7 +2223,12 @@ report_detected_loop(_) ->
add_mask_to_flags(Flags, [Im|M], I, L) when I > Im ->
add_mask_to_flags(Flags, M, I, [Im|L]);
add_mask_to_flags(Flags, [_|M], _I, L) ->
- {lists:umerge(M, Flags), lists:reverse(L)}.
+ {umerge_mask(Flags, M), lists:reverse(L)}.
+
+umerge_mask(every_i, _F) ->
+ every_i;
+umerge_mask(Is, F) ->
+ lists:umerge(Is, F).
get_mask(V, Masks) ->
case maps:find(V, Masks) of
@@ -2221,7 +2242,7 @@ get_flags(#v2_state{constr_data = ConData}=V2State0, C) ->
error ->
?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]),
V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)},
- {V2State, lists:seq(1, length(Cs))};
+ {V2State, every_i};
{ok, failed} ->
{V2State0, failed_list};
{ok, {Part,U}} when U =/= [] ->
@@ -2901,8 +2922,9 @@ state__get_rec_var(Fun, #state{fun_map = Map}) ->
maps:find(Fun, Map).
state__finalize(State) ->
- State1 = enumerate_constraints(State),
- order_fun_constraints(State1).
+ State1 = state__new_constraint_context(State),
+ State2 = enumerate_constraints(State1),
+ order_fun_constraints(State2).
%% ============================================================================
%%
@@ -2982,7 +3004,7 @@ find_constraint_deps([Type|Tail], Acc) ->
NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc],
find_constraint_deps(Tail, NewAcc);
find_constraint_deps([], Acc) ->
- lists:flatten(Acc).
+ lists:append(Acc).
mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs ->
#constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps};
@@ -3090,8 +3112,8 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) ->
List1 = [C || C <- List, is_simple_constraint(C)],
%% Just an assert.
[] = [C || #constraint{} = C <- List1],
- Expanded = lists:flatten([expand_to_conjunctions(C)
- || #constraint_list{} = C <- List]),
+ Expanded = lists:append([expand_to_conjunctions(C)
+ || #constraint_list{} = C <- List]),
ReturnList = Expanded ++ List1,
if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
true -> ReturnList
@@ -3116,8 +3138,10 @@ calculate_deps(List) ->
calculate_deps([H|Tail], Acc) ->
Deps = get_deps(H),
calculate_deps(Tail, [Deps|Acc]);
+calculate_deps([], []) -> [];
+calculate_deps([], [L]) -> L;
calculate_deps([], Acc) ->
- ordsets:from_list(lists:flatten(Acc)).
+ lists:umerge(Acc).
mk_conj_constraint_list(List) ->
mk_constraint_list(conj, List).
@@ -3185,7 +3209,8 @@ order_fun_constraints(State) ->
order_fun_constraints([#constraint_ref{id = Id}|Tail], State) ->
Cs = state__get_cs(Id, State),
- {[NewCs], State1} = order_fun_constraints([Cs], [], [], State),
+ {[Cs1], State1} = order_fun_constraints([Cs], [], [], State),
+ NewCs = Cs1#constraint_list{deps = Cs#constraint_list.deps},
NewState = state__store_constrs(Id, NewCs, State1),
order_fun_constraints(Tail, NewState);
order_fun_constraints([], State) ->
@@ -3193,23 +3218,31 @@ order_fun_constraints([], State) ->
order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, [C|Funs], Acc, State);
-order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail],
+order_fun_constraints([#constraint_list{list = List,
+ type = Type,
+ masks = OldMasks} = C|Tail],
Funs, Acc, State) ->
- {NewList, NewState} =
- case Type of
- conj -> order_fun_constraints(List, [], [], State);
- disj ->
- FoldFun = fun(X, AccState) ->
- {[NewX], NewAccState} =
- order_fun_constraints([X], [], [], AccState),
- {NewX, NewAccState}
- end,
- lists:mapfoldl(FoldFun, State, List)
- end,
- C1 = update_constraint_list(C, NewList),
- Masks = calculate_masks(NewList, 1, []),
- NewAcc = [update_masks(C1, Masks)|Acc],
- order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ case OldMasks of
+ undefined ->
+ {NewList, NewState} =
+ case Type of
+ conj -> order_fun_constraints(List, [], [], State);
+ disj ->
+ FoldFun = fun(X, AccState) ->
+ {[NewX], NewAccState} =
+ order_fun_constraints([X], [], [], AccState),
+ {NewX, NewAccState}
+ end,
+ lists:mapfoldl(FoldFun, State, List)
+ end,
+ NewList2 = reset_deps(NewList, State),
+ C1 = update_constraint_list(C, NewList2),
+ Masks = calculate_masks(NewList, 1, []),
+ NewAcc = [update_masks(C1, Masks)|Acc],
+ order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ M when is_map(M) ->
+ order_fun_constraints(Tail, Funs, [C|Acc], State)
+ end;
order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, Funs, [C|Acc], State);
order_fun_constraints([], Funs, Acc, State) ->
@@ -3219,6 +3252,18 @@ order_fun_constraints([], Funs, Acc, State) ->
update_masks(C, Masks) ->
C#constraint_list{masks = Masks}.
+reset_deps(ConstrList, #state{solvers = Solvers}) ->
+ case lists:member(v1, Solvers) of
+ true ->
+ ConstrList;
+ false ->
+ [reset_deps(Constr) || Constr <- ConstrList]
+ end.
+
+reset_deps(#constraint{}=C) -> C#constraint{deps = []};
+reset_deps(#constraint_list{}=C) -> C#constraint_list{deps = []};
+reset_deps(#constraint_ref{}=C) -> C#constraint_ref{deps = []}.
+
calculate_masks([C|Cs], I, L0) ->
calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0);
calculate_masks([], _I, L) ->
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 432d27571b..9eaf95c1a2 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -37,9 +37,9 @@
get_fun_meta_info/3,
is_suppressed_fun/2,
is_suppressed_tag/3,
- merge_records/2,
pp_hook/0,
process_record_remote_types/1,
+ merge_types/2,
sets_filter/2,
src_compiler_opts/0,
refold_pattern/1,
@@ -188,7 +188,6 @@ get_core_from_abstract_code(AbstrCode, Opts) ->
%% ============================================================================
-type type_table() :: erl_types:type_table().
--type mod_records() :: dict:dict(module(), type_table()).
-spec get_record_and_type_info(abstract_code()) ->
{'ok', type_table()} | {'error', string()}.
@@ -289,18 +288,18 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) ->
get_record_fields([], _RecDict, Acc) ->
lists:reverse(Acc).
--spec process_record_remote_types(codeserver()) ->
- {codeserver(), mod_records()}.
+-spec process_record_remote_types(codeserver()) -> codeserver().
%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
- TempRecords = dialyzer_codeserver:get_temp_records(CServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
- TempRecords1 = process_opaque_types0(TempRecords, ExpTypes),
- %% A cache (not the field type cache) is used for speeding things up a bit.
+ Mods = dialyzer_codeserver:all_temp_modules(CServer),
+ process_opaque_types0(Mods, CServer, ExpTypes),
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun({Module, Record}) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -313,7 +312,7 @@ process_record_remote_types(CServer) ->
{FieldT, C6} =
erl_types:t_from_form
(Field, ExpTypes, Site,
- TempRecords1, VarTable,
+ RecordTable, VarTable,
C5),
{{FieldName, Field, FieldT}, C6}
end, C4, Fields),
@@ -328,30 +327,29 @@ process_record_remote_types(CServer) ->
end,
Cache = erl_types:cache__new(),
{RecordList, _NewCache} =
- lists:mapfoldl(RecordFun, Cache, maps:to_list(Record)),
- {Module, maps:from_list(RecordList)}
+ lists:mapfoldl(RecordFun, Cache, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- NewRecordsList = lists:map(ModuleFun, dict:to_list(TempRecords1)),
- NewRecords = dict:from_list(NewRecordsList),
- check_record_fields(NewRecords, ExpTypes),
- {dialyzer_codeserver:finalize_records(NewRecords, CServer), NewRecords}.
+ lists:foreach(ModuleFun, Mods),
+ check_record_fields(Mods, CServer, ExpTypes),
+ dialyzer_codeserver:finalize_records(CServer).
%% erl_types:t_from_form() substitutes the declaration of opaque types
%% for the expanded type in some cases. To make sure the initial type,
%% any(), is not used, the expansion is done twice.
%% XXX: Recursive opaque types are not handled well.
-process_opaque_types0(TempRecords0, TempExpTypes) ->
- Cache = erl_types:cache__new(),
- {TempRecords1, Cache1} =
- process_opaque_types(TempRecords0, TempExpTypes, Cache),
- {TempRecords, _NewCache} =
- process_opaque_types(TempRecords1, TempExpTypes, Cache1),
- TempRecords.
-
-process_opaque_types(TempRecords, TempExpTypes, Cache) ->
+process_opaque_types0(AllModules, CServer, TempExpTypes) ->
+ process_opaque_types(AllModules, CServer, TempExpTypes),
+ process_opaque_types(AllModules, CServer, TempExpTypes).
+
+process_opaque_types(AllModules, CServer, TempExpTypes) ->
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun({Module, Record}, C0) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -360,32 +358,32 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) ->
Site = {type, {Module, Name, NArgs}},
{Type, C3} =
erl_types:t_from_form(Form, TempExpTypes, Site,
- TempRecords, VarTable, C2),
+ RecordTable, VarTable, C2),
{{Key, {F, Type}}, C3};
_Other -> {{Key, Value}, C2}
end
end,
- {RecordList, C1} =
- lists:mapfoldl(RecordFun, C0, maps:to_list(Record)),
- {{Module, maps:from_list(RecordList)}, C1}
- %% dict:map(RecordFun, Record)
+ C0 = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, C0, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- {TempRecordList, NewCache} =
- lists:mapfoldl(ModuleFun, Cache, dict:to_list(TempRecords)),
- {dict:from_list(TempRecordList), NewCache}.
- %% dict:map(ModuleFun, TempRecords).
+ lists:foreach(ModuleFun, AllModules).
-check_record_fields(Records, TempExpTypes) ->
- Cache = erl_types:cache__new(),
+check_record_fields(AllModules, CServer, TempExpTypes) ->
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
CheckFun =
- fun({Module, Element}, C0) ->
+ fun(Module) ->
CheckForm = fun(Form, Site, C1) ->
erl_types:t_check_record_fields(Form, TempExpTypes,
- Site, Records,
+ Site, RecordTable,
VarTable, C1)
end,
- ElemFun =
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
+ RecordFun =
fun({Key, Value}, C2) ->
case Key of
{record, Name} ->
@@ -406,10 +404,10 @@ check_record_fields(Records, TempExpTypes) ->
msg_with_position(Fun, FileLine)
end
end,
- lists:foldl(ElemFun, C0, maps:to_list(Element))
+ C0 = erl_types:cache__new(),
+ _ = lists:foldl(RecordFun, C0, maps:to_list(RecordMap))
end,
- _NewCache = lists:foldl(CheckFun, Cache, dict:to_list(Records)),
- ok.
+ lists:foreach(CheckFun, AllModules).
msg_with_position(Fun, FileLine) ->
try Fun()
@@ -421,10 +419,37 @@ msg_with_position(Fun, FileLine) ->
throw({error, NewMsg})
end.
--spec merge_records(mod_records(), mod_records()) -> mod_records().
+-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver().
-merge_records(NewRecords, OldRecords) ->
- dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords).
+merge_types(CServer, Plt) ->
+ AllNewModules = dialyzer_codeserver:all_temp_modules(CServer),
+ AllNewModulesSet = sets:from_list(AllNewModules),
+ AllOldModulesSet = dialyzer_plt:all_modules(Plt),
+ AllModulesSet = sets:union(AllNewModulesSet, AllOldModulesSet),
+ ModuleFun =
+ fun(Module) ->
+ KeepOldFun =
+ fun() ->
+ case dialyzer_plt:get_module_types(Plt, Module) of
+ none -> no;
+ {value, OldRecords} ->
+ case sets:is_element(Module, AllNewModulesSet) of
+ true -> no;
+ false -> {yes, OldRecords}
+ end
+ end
+ end,
+ Records =
+ case KeepOldFun() of
+ no ->
+ dialyzer_codeserver:lookup_temp_mod_records(Module, CServer);
+ {yes, OldRecords} ->
+ OldRecords
+ end,
+ dialyzer_codeserver:store_temp_records(Module, Records, CServer)
+ end,
+ lists:foreach(ModuleFun, sets:to_list(AllModulesSet)),
+ CServer.
%% ============================================================================
%%
diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl
index 418c9798b3..af0f2e9e08 100644
--- a/lib/dialyzer/src/dialyzer_worker.erl
+++ b/lib/dialyzer/src/dialyzer_worker.erl
@@ -56,10 +56,14 @@ launch(Mode, Job, InitData, Coordinator) ->
%%--------------------------------------------------------------------
-init(#state{job = SCC, mode = Mode, init_data = InitData} = State) when
+init(#state{job = SCC, mode = Mode, init_data = InitData,
+ coordinator = Coordinator} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData),
- ?debug("Deps ~p: ~p\n",[SCC, DependsOn]),
+ DependsOnSCCs = dialyzer_succ_typings:find_depends_on(SCC, InitData),
+ ?debug("~w: Deps ~p: ~p\n", [self(), SCC, DependsOnSCCs]),
+ Pids = dialyzer_coordinator:sccs_to_pids(DependsOnSCCs, Coordinator),
+ ?debug("~w: PidsDeps ~p\n", [self(), Pids]),
+ DependsOn = [{Pid, erlang:monitor(process, Pid)} || Pid <- Pids],
loop(updating, State#state{depends_on = DependsOn});
init(#state{mode = Mode} = State) when
Mode =:= 'compile'; Mode =:= 'warnings' ->
@@ -67,7 +71,7 @@ init(#state{mode = Mode} = State) when
loop(updating, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- ?debug("Update: ~p\n",[State#state.job]),
+ ?debug("~w: Update: ~p\n", [self(), State#state.job]),
NextStatus =
case waits_more_success_typings(State) of
true -> waiting;
@@ -76,11 +80,11 @@ loop(updating, #state{mode = Mode} = State) when
loop(NextStatus, State);
loop(waiting, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- ?debug("Wait: ~p\n",[State#state.job]),
+ ?debug("~w: Wait: ~p\n", [self(), State#state.job]),
NewState = wait_for_success_typings(State),
loop(updating, NewState);
loop(running, #state{mode = 'compile'} = State) ->
- dialyzer_coordinator:request_activation(State#state.coordinator),
+ request_activation(State),
?debug("Compile: ~s\n",[State#state.job]),
Result =
case start_compilation(State) of
@@ -92,51 +96,28 @@ loop(running, #state{mode = 'compile'} = State) ->
end,
report_to_coordinator(Result, State);
loop(running, #state{mode = 'warnings'} = State) ->
- dialyzer_coordinator:request_activation(State#state.coordinator),
+ request_activation(State),
?debug("Warning: ~s\n",[State#state.job]),
Result = collect_warnings(State),
report_to_coordinator(Result, State);
loop(running, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
request_activation(State),
- ?debug("Run: ~p\n",[State#state.job]),
+ ?debug("~w: Run: ~p\n", [self(), State#state.job]),
NotFixpoint = do_work(State),
- ok = broadcast_done(State),
report_to_coordinator(NotFixpoint, State).
waits_more_success_typings(#state{depends_on = Depends}) ->
Depends =/= [].
-broadcast_done(#state{job = SCC, init_data = InitData,
- coordinator = Coordinator}) ->
- RequiredBy = dialyzer_succ_typings:find_required_by(SCC, InitData),
- {Callers, Unknown} =
- dialyzer_coordinator:sccs_to_pids(RequiredBy, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
-send_done(Callers, SCC) ->
- ?debug("Sending ~p: ~p\n",[SCC, Callers]),
- SendSTFun = fun(PID) -> PID ! {done, SCC} end,
- lists:foreach(SendSTFun, Callers).
-
-continue_broadcast_done([], _SCC, _Coordinator) -> ok;
-continue_broadcast_done(Rest, SCC, Coordinator) ->
- %% This time limit should be greater than the time required
- %% by the coordinator to spawn all processes.
- timer:sleep(500),
- {Callers, Unknown} = dialyzer_coordinator:sccs_to_pids(Rest, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
wait_for_success_typings(#state{depends_on = DependsOn} = State) ->
receive
- {done, SCC} ->
- ?debug("GOT ~p: ~p\n",[State#state.job, SCC]),
- State#state{depends_on = DependsOn -- [SCC]}
+ {'DOWN', Ref, process, Pid, _Info} ->
+ ?debug("~w: ~p got DOWN: ~p\n", [self(), State#state.job, Pid]),
+ State#state{depends_on = DependsOn -- [{Pid, Ref}]}
after
5000 ->
- ?debug("Still Waiting ~p: ~p\n",[State#state.job, DependsOn]),
+ ?debug("~w: Still Waiting ~p:\n ~p\n", [self(), State#state.job, DependsOn]),
State
end.
@@ -150,7 +131,7 @@ do_work(#state{mode = Mode, job = Job, init_data = InitData}) ->
end.
report_to_coordinator(Result, #state{job = Job, coordinator = Coordinator}) ->
- ?debug("Done: ~p\n",[Job]),
+ ?debug("~w: Done: ~p\n",[self(), Job]),
dialyzer_coordinator:job_done(Job, Result, Coordinator).
start_compilation(#state{job = Job, init_data = InitData}) ->
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
index cb6a88786e..365b4798c5 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
{dialyzer_options, []}.
-{time_limit, 2}.
+{time_limit, 5}.
diff --git a/lib/dialyzer/test/map_SUITE_data/dialyzer_options b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
index 50991c9bc5..02425c33f2 100644
--- a/lib/dialyzer/test/map_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
@@ -1 +1,2 @@
{dialyzer_options, []}.
+{time_limit, 30}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
index 06ed52043a..cb301ff6a1 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
{dialyzer_options, [{warnings, [no_unused, no_return]}]}.
-{time_limit, 20}.
+{time_limit, 40}.
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 91ee104f77..9d46d4ac81 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -2235,16 +2235,21 @@ t_has_var_list([]) -> false.
-spec t_collect_vars(erl_type()) -> [erl_type()].
t_collect_vars(T) ->
- t_collect_vars(T, []).
+ Vs = t_collect_vars(T, maps:new()),
+ [V || {V, _} <- maps:to_list(Vs)].
--spec t_collect_vars(erl_type(), [erl_type()]) -> [erl_type()].
+-type ctab() :: #{erl_type() => 'any'}.
+
+-spec t_collect_vars(erl_type(), ctab()) -> ctab().
t_collect_vars(?var(_) = Var, Acc) ->
- ordsets:add_element(Var, Acc);
+ maps:put(Var, any, Acc);
t_collect_vars(?function(Domain, Range), Acc) ->
- ordsets:union(t_collect_vars(Domain, Acc), t_collect_vars(Range, []));
+ Acc1 = t_collect_vars(Domain, Acc),
+ t_collect_vars(Range, Acc1);
t_collect_vars(?list(Contents, Termination, _), Acc) ->
- ordsets:union(t_collect_vars(Contents, Acc), t_collect_vars(Termination, []));
+ Acc1 = t_collect_vars(Contents, Acc),
+ t_collect_vars(Termination, Acc1);
t_collect_vars(?product(Types), Acc) ->
t_collect_vars_list(Types, Acc);
t_collect_vars(?tuple(?any, ?any, ?any), Acc) ->
@@ -4424,9 +4429,17 @@ mod_name(Mod, Name) ->
-type site() :: {'type', mta()} | {'spec', mfa()} | {'record', mra()}.
-type cache_key() :: {module(), atom(), expand_depth(),
[erl_type()], type_names()}.
--opaque cache() :: #{cache_key() => {erl_type(), expand_limit()}}.
+-type mod_type_table() :: ets:tid().
+-record(cache,
+ {
+ types = maps:new() :: #{cache_key() => {erl_type(), expand_limit()}},
+ mod_recs = {mrecs, dict:new()} :: 'undefined'
+ | {'mrecs', mod_records()}
+ }).
+
+-opaque cache() :: #cache{}.
--spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_records(),
+-spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_type_table(),
var_table(), cache()) -> {erl_type(), cache()}.
t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
@@ -4438,11 +4451,12 @@ t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
t_from_form_without_remote(Form, Site, TypeTable) ->
Module = site_module(Site),
- RecDict = dict:from_list([{Module, TypeTable}]),
+ ModRecs = dict:from_list([{Module, TypeTable}]),
ExpTypes = replace_by_none,
VarTab = var_table__new(),
- Cache = cache__new(),
- t_from_form1(Form, ExpTypes, Site, RecDict, VarTab, Cache).
+ Cache0 = cache__new(),
+ Cache = Cache0#cache{mod_recs = {mrecs, ModRecs}},
+ t_from_form1(Form, ExpTypes, Site, undefined, VarTab, Cache).
%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
%% EXPAND_LIMIT is used for limiting the size of types by
@@ -4457,13 +4471,13 @@ t_from_form_without_remote(Form, Site, TypeTable) ->
-record(from_form, {site :: site(),
xtypes :: sets:set(mfa()) | 'replace_by_none',
- mrecs :: mod_records(),
+ mrecs :: 'undefined' | mod_type_table(),
vtab :: var_table(),
tnames :: type_names()}).
-spec t_from_form1(parse_form(), sets:set(mfa()) | 'replace_by_none',
- site(), mod_records(), var_table(), cache()) ->
- {erl_type(), cache()}.
+ site(), 'undefined' | mod_type_table(), var_table(),
+ cache()) -> {erl_type(), cache()}.
t_from_form1(Form, ET, Site, MR, V, C) ->
TypeNames = initial_typenames(Site),
@@ -4709,13 +4723,13 @@ from_form({opaque, _L, Name, {Mod, Args, Rep}}, _S, _D, L, C) ->
builtin_type(Name, Type, S, D, L, C) ->
#from_form{site = Site, mrecs = MR} = S,
M = site_module(Site),
- case dict:find(M, MR) of
- {ok, R} ->
+ case lookup_module_types(M, MR, C) of
+ {R, C1} ->
case lookup_type(Name, 0, R) of
{_, {{_M, _FL, _F, _A}, _T}} ->
- type_from_form(Name, [], S, D, L, C);
+ type_from_form(Name, [], S, D, L, C1);
error ->
- {Type, L, C}
+ {Type, L, C1}
end;
error ->
{Type, L, C}
@@ -4728,9 +4742,9 @@ type_from_form(Name, Args, S, D, L, C) ->
TypeName = {type, {Module, Name, ArgsLen}},
case can_unfold_more(TypeName, TypeNames) of
true ->
- {ok, R} = dict:find(Module, MR),
+ {R, C1} = lookup_module_types(Module, MR, C),
type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames,
- S, D, L, C);
+ S, D, L, C1);
false ->
{t_any(), L, C}
end.
@@ -4782,24 +4796,24 @@ remote_from_form(RemMod, Name, Args, S, D, L, C) ->
true ->
ArgsLen = length(Args),
MFA = {RemMod, Name, ArgsLen},
- case dict:find(RemMod, MR) of
+ case lookup_module_types(RemMod, MR, C) of
error ->
self() ! {self(), ext_types, MFA},
{t_any(), L, C};
- {ok, RemDict} ->
+ {RemDict, C1} ->
case sets:is_element(MFA, ET) of
true ->
RemType = {type, MFA},
case can_unfold_more(RemType, TypeNames) of
true ->
remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict,
- RemType, TypeNames, S, D, L, C);
+ RemType, TypeNames, S, D, L, C1);
false ->
- {t_any(), L, C}
+ {t_any(), L, C1}
end;
false ->
self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), L, C}
+ {t_any(), L, C1}
end
end
end.
@@ -4874,15 +4888,15 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
case can_unfold_more(RecordType, TypeNames) of
true ->
M = site_module(Site),
- {ok, R} = dict:find(M, MR),
+ {R, C1} = lookup_module_types(M, MR, C),
case lookup_record(Name, R) of
{ok, DeclFields} ->
NewTypeNames = [RecordType|TypeNames],
Site1 = {record, {M, Name, length(DeclFields)}},
S1 = S#from_form{site = Site1, tnames = NewTypeNames},
Fun = fun(D, L) ->
- {GetModRec, L1, C1} =
- get_mod_record(ModFields, DeclFields, S1, D, L, C),
+ {GetModRec, L1, C2} =
+ get_mod_record(ModFields, DeclFields, S1, D, L, C1),
case GetModRec of
{error, FieldName} ->
throw({error,
@@ -4890,12 +4904,12 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
[Name, FieldName])});
{ok, NewFields} ->
S2 = S1#from_form{vtab = var_table__new()},
- {NewFields1, L2, C2} =
- fields_from_form(NewFields, S2, D, L1, C1),
+ {NewFields1, L2, C3} =
+ fields_from_form(NewFields, S2, D, L1, C2),
Rec = t_tuple(
[t_atom(Name)|[Type
|| {_FieldName, Type} <- NewFields1]]),
- {Rec, L2, C2}
+ {Rec, L2, C3}
end
end,
recur_limit(Fun, D0, L0, RecordType, TypeNames);
@@ -5026,7 +5040,7 @@ recur_limit(Fun, D, L, TypeName, TypeNames) ->
end.
-spec t_check_record_fields(parse_form(), sets:set(mfa()), site(),
- mod_records(), var_table(), cache()) -> cache().
+ mod_type_table(), var_table(), cache()) -> cache().
t_check_record_fields(Form, ExpTypes, Site, RecDict, VarTable, Cache) ->
State = #from_form{site = Site,
@@ -5070,13 +5084,13 @@ check_record_fields({user_type, _L, _Name, Args}, S, C) ->
check_record({atom, _, Name}, ModFields, S, C) ->
#from_form{site = Site, mrecs = MR} = S,
M = site_module(Site),
- {ok, R} = dict:find(M, MR),
+ {R, C1} = lookup_module_types(M, MR, C),
{ok, DeclFields} = lookup_record(Name, R),
- case check_fields(Name, ModFields, DeclFields, S, C) of
+ case check_fields(Name, ModFields, DeclFields, S, C1) of
{error, FieldName} ->
throw({error, io_lib:format("Illegal declaration of #~w{~w}\n",
[Name, FieldName])});
- C1 -> C1
+ C2 -> C2
end.
check_fields(RecName, [{type, _, field_type, [{atom, _, Name}, Abstr]}|Left],
@@ -5106,7 +5120,7 @@ site_module({_, {Module, _, _}}) ->
-spec cache__new() -> cache().
cache__new() ->
- maps:new().
+ #cache{}.
-spec cache_key(module(), atom(), [erl_type()],
type_names(), expand_depth()) -> cache_key().
@@ -5123,8 +5137,8 @@ cache_key(Module, Name, ArgTypes, TypeNames, D) ->
-spec cache_find(cache_key(), cache()) ->
{erl_type(), expand_limit()} | 'error'.
-cache_find(Key, Cache) ->
- case maps:find(Key, Cache) of
+cache_find(Key, #cache{types = Types}) ->
+ case maps:find(Key, Types) of
{ok, Value} ->
Value;
error ->
@@ -5136,8 +5150,9 @@ cache_find(Key, Cache) ->
cache_put(_Key, _Type, DeltaL, Cache) when DeltaL < 0 ->
%% The type is truncated; do not reuse it.
Cache;
-cache_put(Key, Type, DeltaL, Cache) ->
- maps:put(Key, {Type, DeltaL}, Cache).
+cache_put(Key, Type, DeltaL, #cache{types = Types} = Cache) ->
+ NewTypes = maps:put(Key, {Type, DeltaL}, Types),
+ Cache#cache{types = NewTypes}.
-spec t_var_names([erl_type()]) -> [atom()].
@@ -5236,14 +5251,12 @@ t_form_to_string({type, _L, union, Args}) ->
t_form_to_string({type, _L, Name, []} = T) ->
try
M = mod,
- D0 = maps:new(),
- MR = dict:from_list([{M, D0}]),
Site = {type, {M,Name,0}},
V = var_table__new(),
C = cache__new(),
State = #from_form{site = Site,
xtypes = sets:new(),
- mrecs = MR,
+ mrecs = 'undefined',
vtab = V,
tnames = []},
{T1, _, _} = from_form(T, State, _Deep=1000, _ALot=1000000, C),
@@ -5297,6 +5310,28 @@ is_erl_type(?unit) -> true;
is_erl_type(#c{}) -> true;
is_erl_type(_) -> false.
+-spec lookup_module_types(module(), mod_type_table(), cache()) ->
+ 'error' | {type_table(), cache()}.
+
+lookup_module_types(Module, CodeTable, Cache) ->
+ #cache{mod_recs = ModRecs} = Cache,
+ case ModRecs of
+ undefined -> error;
+ {mrecs, MRecs} ->
+ case dict:find(Module, MRecs) of
+ {ok, R} ->
+ {R, Cache};
+ error ->
+ try ets:lookup_element(CodeTable, Module, 2) of
+ R ->
+ NewMRecs = dict:store(Module, R, MRecs),
+ {R, Cache#cache{mod_recs = {mrecs, NewMRecs}}}
+ catch
+ _:_ -> error
+ end
+ end
+ end.
+
-spec lookup_record(atom(), type_table()) ->
'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 7e20a9ba67..82273c8c74 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -241,9 +241,9 @@ handle_info({tcp_closed, _}, State) ->
handle_info({ssl_closed, _}, State) ->
{stop, normal, State};
handle_info({tcp_error, _, _} = Reason, State) ->
- {stop, Reason, State};
+ {stop, {shutdown, Reason}, State};
handle_info({ssl_error, _, _} = Reason, State) ->
- {stop, Reason, State};
+ {stop, {shutdown, Reason}, State};
%% Timeouts
handle_info(timeout, #state{mfa = {_, parse, _}} = State) ->
diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src
index 3a31daeb20..d28d4cd766 100644
--- a/lib/inets/src/inets_app/inets.appup.src
+++ b/lib/inets/src/inets_app/inets.appup.src
@@ -18,10 +18,14 @@
%% %CopyrightEnd%
{"%VSN%",
[
+ {<<"6.2.4">>, [{load_module, httpd_request_handler,
+ soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
],
[
+ {<<"6.2.4">>, [{load_module, httpd_request_handler,
+ soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
]
diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml
index df681a505f..c581fa9d1e 100644
--- a/lib/kernel/doc/src/kernel_app.xml
+++ b/lib/kernel/doc/src/kernel_app.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -19,7 +19,7 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
+
</legalnotice>
<title>kernel</title>
@@ -58,6 +58,60 @@
</section>
<section>
+ <title>OS Signal Event Handler</title>
+ <p>Asynchronous OS signals may be subscribed to via the Kernel applications event manager
+ (see <seealso marker="doc/design_principles:des_princ">OTP Design Principles</seealso> and
+ <seealso marker="stdlib:gen_event"><c>gen_event(3)</c></seealso>) registered as <c>erl_signal_server</c>.
+ A default signal handler is installed which handles the following signals:</p>
+ <taglist>
+ <tag><c>sigusr1</c></tag>
+ <item><p>The default handler will halt Erlang and produce a crashdump
+ with slogan "Received SIGUSR1".
+ This is equivalent to calling <c>erlang:halt("Received SIGUSR1")</c>.
+ </p></item>
+
+ <tag><c>sigquit</c></tag>
+ <item><p>The default handler will halt Erlang immediately.
+ This is equivalent to calling <c>erlang:halt()</c>.
+ </p></item>
+
+ <tag><c>sigterm</c></tag>
+ <item><p>The default handler will terminate Erlang normally.
+ This is equivalent to calling <c>init:stop()</c>.
+ </p></item>
+ </taglist>
+
+ <section>
+ <title>Events</title>
+ <p>Any event handler added to <c>erl_signal_server</c> must handle the following events.</p>
+ <taglist>
+ <tag><c>sighup</c></tag>
+ <item><p>Hangup detected on controlling terminal or death of controlling process</p></item>
+ <tag><c>sigquit</c></tag>
+ <item><p>Quit from keyboard</p></item>
+ <tag><c>sigabrt</c></tag>
+ <item><p>Abort signal from abort</p></item>
+ <tag><c>sigalrm</c></tag>
+ <item><p>Timer signal from alarm</p></item>
+ <tag><c>sigterm</c></tag>
+ <item><p>Termination signal</p></item>
+ <tag><c>sigusr1</c></tag>
+ <item><p>User-defined signal 1</p></item>
+ <tag><c>sigusr2</c></tag>
+ <item><p>User-defined signal 2</p></item>
+ <tag><c>sigchld</c></tag>
+ <item><p>Child process stopped or terminated</p></item>
+ <tag><c>sigstop</c></tag>
+ <item><p>Stop process</p></item>
+ <tag><c>sigtstp</c></tag>
+ <item><p>Stop typed at terminal</p></item>
+ </taglist>
+
+ <p>Setting OS signals are described in <seealso marker="os#set_signal/2"><c>os:set_signal/2</c></seealso>.</p>
+ </section>
+ </section>
+
+ <section>
<title>Configuration</title>
<p>The following configuration parameters are defined for the Kernel
application. For more information about configuration parameters,
@@ -405,4 +459,3 @@ MaxT = TickTime + TickTime / 4</code>
<seealso marker="stdlib:timer"><c>timer(3)</c></seealso></p>
</section>
</appref>
-
diff --git a/lib/kernel/doc/src/os.xml b/lib/kernel/doc/src/os.xml
index 739ac35d2a..6ba69d12a3 100644
--- a/lib/kernel/doc/src/os.xml
+++ b/lib/kernel/doc/src/os.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -19,7 +19,7 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
+
</legalnotice>
<title>os</title>
@@ -156,6 +156,32 @@ DirOut = os:cmd("dir"), % on Win32 platform</code>
</func>
<func>
+ <name name="set_signal" arity="2"/>
+ <fsummary>Enables or disables handling of OS signals.</fsummary>
+ <desc>
+ <p>Enables or disables OS signals.</p>
+ <p>Each signal my be set to one of the following options:</p>
+ <taglist>
+ <tag><c>ignore</c></tag>
+ <item>
+ This signal will be ignored.
+ </item>
+
+ <tag><c>default</c></tag>
+ <item>
+ This signal will use the default signal handler for the operating system.
+ </item>
+
+ <tag><c>handle</c></tag>
+ <item>
+ This signal will notify <c>erl_signal_server</c> when it is received by
+ the Erlang runtime system.
+ </item>
+ </taglist>
+ </desc>
+ </func>
+
+ <func>
<name name="system_time" arity="0"/>
<fsummary>Current OS system time.</fsummary>
<desc>
@@ -296,4 +322,3 @@ calendar:now_to_universal_time(TS),
</func>
</funcs>
</erlref>
-
diff --git a/lib/kernel/src/Makefile b/lib/kernel/src/Makefile
index 2b72f78dcf..2a89faaf13 100644
--- a/lib/kernel/src/Makefile
+++ b/lib/kernel/src/Makefile
@@ -71,6 +71,7 @@ MODULES = \
erl_distribution \
erl_epmd \
erl_reply \
+ erl_signal_handler \
erts_debug \
error_handler \
error_logger \
diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl
index 5a7ca493cc..2a06d0cb15 100644
--- a/lib/kernel/src/code.erl
+++ b/lib/kernel/src/code.erl
@@ -489,13 +489,13 @@ prepare_check_uniq_1([], [_|_]=Errors) ->
{error,Errors}.
partition_on_load(Prep) ->
- P = fun({_,{Bin,_,_}}) ->
- erlang:has_prepared_code_on_load(Bin)
+ P = fun({_,{PC,_,_}}) ->
+ erlang:has_prepared_code_on_load(PC)
end,
lists:partition(P, Prep).
verify_prepared([{M,{Prep,Name,_Native}}|T])
- when is_atom(M), is_binary(Prep), is_list(Name) ->
+ when is_atom(M), is_list(Name) ->
try erlang:has_prepared_code_on_load(Prep) of
false ->
verify_prepared(T);
@@ -562,10 +562,10 @@ prepare_loading_fun() ->
GetNative = get_native_fun(),
fun(Mod, FullName, Beam) ->
case erlang:prepare_loading(Mod, Beam) of
- Prepared when is_binary(Prepared) ->
- {ok,{Prepared,FullName,GetNative(Beam)}};
{error,_}=Error ->
- Error
+ Error;
+ Prepared ->
+ {ok,{Prepared,FullName,GetNative(Beam)}}
end
end.
diff --git a/lib/kernel/src/erl_signal_handler.erl b/lib/kernel/src/erl_signal_handler.erl
new file mode 100644
index 0000000000..8f924d2adc
--- /dev/null
+++ b/lib/kernel/src/erl_signal_handler.erl
@@ -0,0 +1,57 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(erl_signal_handler).
+-behaviour(gen_event).
+-export([init/1, format_status/2,
+ handle_event/2, handle_call/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-record(state,{}).
+
+init(_Args) ->
+ {ok, #state{}}.
+
+handle_event(sigusr1, S) ->
+ erlang:halt("Received SIGUSR1"),
+ {ok, S};
+handle_event(sigquit, S) ->
+ erlang:halt(),
+ {ok, S};
+handle_event(sigterm, S) ->
+ error_logger:info_msg("SIGTERM received - shutting down~n"),
+ ok = init:stop(),
+ {ok, S};
+handle_event(_SignalMsg, S) ->
+ {ok, S}.
+
+handle_info(_Info, S) ->
+ {ok, S}.
+
+handle_call(_Request, S) ->
+ {ok, ok, S}.
+
+format_status(_Opt, [_Pdict,_S]) ->
+ ok.
+
+code_change(_OldVsn, S, _Extra) ->
+ {ok, S}.
+
+terminate(_Args, _S) ->
+ ok.
diff --git a/lib/kernel/src/error_logger.erl b/lib/kernel/src/error_logger.erl
index 3523f680a3..3ee8e2c6e6 100644
--- a/lib/kernel/src/error_logger.erl
+++ b/lib/kernel/src/error_logger.erl
@@ -360,8 +360,12 @@ init(Max) when is_integer(Max) ->
%% go back.
init({go_back, _PostState}) ->
{ok, {?buffer_size, 0, []}};
-init(_) -> %% Start and just relay to other
- {ok, []}. %% node if node(GLeader) =/= node().
+init(_) ->
+ %% The error logger process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ process_flag(message_queue_data, off_heap),
+ {ok, []}.
-spec handle_event(term(), state()) -> {'ok', state()}.
diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl
index 1971df9038..79e72cdc6d 100644
--- a/lib/kernel/src/file.erl
+++ b/lib/kernel/src/file.erl
@@ -1424,7 +1424,7 @@ path_open_first([Path|Rest], Name, Mode, LastError) ->
case open(FileName, Mode) of
{ok, Fd} ->
{ok, Fd, FileName};
- {error, enoent} ->
+ {error, Reason} when Reason =:= enoent; Reason =:= enotdir ->
path_open_first(Rest, Name, Mode, LastError);
Error ->
Error
diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src
index 4d08a55c7c..25e4ddd95c 100644
--- a/lib/kernel/src/kernel.app.src
+++ b/lib/kernel/src/kernel.app.src
@@ -34,6 +34,7 @@
erl_boot_server,
erl_distribution,
erl_reply,
+ erl_signal_handler,
error_handler,
error_logger,
file,
diff --git a/lib/kernel/src/kernel.erl b/lib/kernel/src/kernel.erl
index 3d0ef81318..59eca242b1 100644
--- a/lib/kernel/src/kernel.erl
+++ b/lib/kernel/src/kernel.erl
@@ -32,6 +32,14 @@
start(_, []) ->
case supervisor:start_link({local, kernel_sup}, kernel, []) of
{ok, Pid} ->
+ %% add signal handler
+ case whereis(erl_signal_server) of
+ %% in case of minimal mode
+ undefined -> ok;
+ _ ->
+ ok = gen_event:add_handler(erl_signal_server, erl_signal_handler, [])
+ end,
+ %% add error handler
Type = get_error_logger_type(),
case error_logger:swap_handler(Type) of
ok -> {ok, Pid, []};
@@ -131,6 +139,9 @@ init([]) ->
permanent, 2000, worker, [inet_db]},
NetSup = {net_sup, {erl_distribution, start_link, []},
permanent, infinity, supervisor,[erl_distribution]},
+ SigSrv = #{id => erl_signal_server,
+ start => {gen_event, start_link, [{local, erl_signal_server}]},
+ type => worker, restart => permanent, shutdown => 2000, modules => dynamic},
DistAC = start_dist_ac(),
Timer = start_timer(),
@@ -141,7 +152,7 @@ init([]) ->
permanent, infinity, supervisor, [?MODULE]},
{ok, {SupFlags,
[Code, Rpc, Global, InetDb | DistAC] ++
- [NetSup, Glo_grp, File,
+ [NetSup, Glo_grp, File, SigSrv,
StdError, User, Config, SafeSupervisor] ++ Timer}}
end;
init(safe) ->
diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl
index f8519d3a5e..c3ffcb3f70 100644
--- a/lib/kernel/src/os.erl
+++ b/lib/kernel/src/os.erl
@@ -29,7 +29,7 @@
-export([getenv/0, getenv/1, getenv/2, getpid/0,
perf_counter/0, perf_counter/1,
- putenv/2, system_time/0, system_time/1,
+ putenv/2, set_signal/2, system_time/0, system_time/1,
timestamp/0, unsetenv/1]).
-spec getenv() -> [string()].
@@ -104,6 +104,15 @@ timestamp() ->
unsetenv(_) ->
erlang:nif_error(undef).
+-spec set_signal(Signal, Option) -> 'ok' when
+ Signal :: 'sighup' | 'sigquit' | 'sigabrt' | 'sigalrm' |
+ 'sigterm' | 'sigusr1' | 'sigusr2' | 'sigchld' |
+ 'sigstop' | 'sigtstp',
+ Option :: 'default' | 'handle' | 'ignore'.
+
+set_signal(_Signal, _Option) ->
+ erlang:nif_error(undef).
+
%%% End of BIFs
-spec type() -> {Osfamily, Osname} when
diff --git a/lib/kernel/src/rpc.erl b/lib/kernel/src/rpc.erl
index 21bff02214..bd6ea26678 100644
--- a/lib/kernel/src/rpc.erl
+++ b/lib/kernel/src/rpc.erl
@@ -67,17 +67,27 @@
%%------------------------------------------------------------------------
+
+%% The rex server may receive a huge amount of
+%% messages. Make sure that they are stored off heap to
+%% avoid exessive GCs.
+
+-define(SPAWN_OPTS, [{spawn_opt,[{message_queue_data,off_heap}]}]).
+
%% Remote execution and broadcasting facility
-spec start() -> {'ok', pid()} | 'ignore' | {'error', term()}.
start() ->
- gen_server:start({local,?NAME}, ?MODULE, [], []).
+ gen_server:start({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS).
-spec start_link() -> {'ok', pid()} | 'ignore' | {'error', term()}.
start_link() ->
- gen_server:start_link({local,?NAME}, ?MODULE, [], []).
+ %% The rex server process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ gen_server:start_link({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS).
-spec stop() -> term().
diff --git a/lib/kernel/test/code_SUITE.erl b/lib/kernel/test/code_SUITE.erl
index 4914ce9e4c..f368232bfc 100644
--- a/lib/kernel/test/code_SUITE.erl
+++ b/lib/kernel/test/code_SUITE.erl
@@ -323,7 +323,7 @@ load_abs(Config) when is_list(Config) ->
{error, nofile} = code:load_abs(TestDir ++ "/duuuumy_mod"),
{error, badfile} = code:load_abs(TestDir ++ "/code_a_test"),
{'EXIT', _} = (catch code:load_abs({})),
- {'EXIT', _} = (catch code:load_abs("Non-latin-имя-файла")),
+ {error, nofile} = code:load_abs("Non-latin-имя-файла"),
{module, code_b_test} = code:load_abs(TestDir ++ "/code_b_test"),
code:stick_dir(TestDir),
{error, sticky_directory} = code:load_abs(TestDir ++ "/code_b_test"),
diff --git a/lib/kernel/test/error_logger_SUITE.erl b/lib/kernel/test/error_logger_SUITE.erl
index b6e7551741..bb01c2384d 100644
--- a/lib/kernel/test/error_logger_SUITE.erl
+++ b/lib/kernel/test/error_logger_SUITE.erl
@@ -30,6 +30,7 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
+ off_heap/1,
error_report/1, info_report/1, error/1, info/1,
emulator/1, tty/1, logfile/1, add/1, delete/1]).
@@ -45,7 +46,7 @@ suite() ->
{timetrap,{minutes,1}}].
all() ->
- [error_report, info_report, error, info, emulator, tty,
+ [off_heap, error_report, info_report, error, info, emulator, tty,
logfile, add, delete].
groups() ->
@@ -66,6 +67,16 @@ end_per_group(_GroupName, Config) ->
%%-----------------------------------------------------------------
+off_heap(_Config) ->
+ %% The error_logger process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ MQD = message_queue_data,
+ {MQD,off_heap} = process_info(whereis(error_logger), MQD),
+ ok.
+
+%%-----------------------------------------------------------------
+
error_report(Config) when is_list(Config) ->
error_logger:add_report_handler(?MODULE, self()),
Rep1 = [{tag1,"data1"},{tag2,data2},{tag3,3}],
diff --git a/lib/kernel/test/multi_load_SUITE.erl b/lib/kernel/test/multi_load_SUITE.erl
index 369e25ac64..920839f4f9 100644
--- a/lib/kernel/test/multi_load_SUITE.erl
+++ b/lib/kernel/test/multi_load_SUITE.erl
@@ -144,14 +144,14 @@ prep_magic([H|T]) ->
prep_magic(Tuple) when is_tuple(Tuple) ->
L = prep_magic(tuple_to_list(Tuple)),
list_to_tuple(L);
-prep_magic(Bin) when is_binary(Bin) ->
- try erlang:has_prepared_code_on_load(Bin) of
+prep_magic(Ref) when is_reference(Ref) ->
+ try erlang:has_prepared_code_on_load(Ref) of
false ->
- %% Create a different kind of magic binary.
+ %% Create a different kind of magic ref.
ets:match_spec_compile([{'_',[true],['$_']}])
catch
_:_ ->
- Bin
+ Ref
end;
prep_magic(Other) ->
Other.
diff --git a/lib/kernel/test/rpc_SUITE.erl b/lib/kernel/test/rpc_SUITE.erl
index 1c72ddc87f..d76c4097d8 100644
--- a/lib/kernel/test/rpc_SUITE.erl
+++ b/lib/kernel/test/rpc_SUITE.erl
@@ -21,7 +21,8 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
--export([call/1, block_call/1, multicall/1, multicall_timeout/1,
+-export([off_heap/1,
+ call/1, block_call/1, multicall/1, multicall_timeout/1,
multicall_dies/1, multicall_node_dies/1,
called_dies/1, called_node_dies/1,
called_throws/1, call_benchmark/1, async_call/1]).
@@ -35,7 +36,7 @@ suite() ->
{timetrap,{minutes,2}}].
all() ->
- [call, block_call, multicall, multicall_timeout,
+ [off_heap, call, block_call, multicall, multicall_timeout,
multicall_dies, multicall_node_dies, called_dies,
called_node_dies, called_throws, call_benchmark,
async_call].
@@ -55,6 +56,13 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
+off_heap(_Config) ->
+ %% The rex server process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ MQD = message_queue_data,
+ {MQD,off_heap} = process_info(whereis(rex), MQD),
+ ok.
%% Test different rpc calls.
diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml
index 51c98d0d3e..9f59759cb6 100644
--- a/lib/mnesia/doc/src/notes.xml
+++ b/lib/mnesia/doc/src/notes.xml
@@ -39,7 +39,23 @@
thus constitutes one section in this document. The title of each
section is the version number of Mnesia.</p>
- <section><title>Mnesia 4.14.2</title>
+ <section><title>Mnesia 4.14.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed crash in checkpoint handling when table was deleted
+ during backup.</p>
+ <p>
+ Own Id: OTP-14167</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Mnesia 4.14.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl
index 9eb939e8d3..fc626940b4 100644
--- a/lib/mnesia/src/mnesia_checkpoint.erl
+++ b/lib/mnesia/src/mnesia_checkpoint.erl
@@ -909,7 +909,7 @@ retainer_loop(Cp = #checkpoint_args{name=Name}) ->
retainer_loop(Cp2);
{From, {iter_end, Iter}} ->
- retainer_fixtable(Iter#iter.oid_tab, false),
+ ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)),
Iters = Cp#checkpoint_args.iterators -- [Iter],
reply(From, Name, ok),
retainer_loop(Cp#checkpoint_args{iterators = Iters});
@@ -971,7 +971,8 @@ do_stop(Cp) ->
unset({checkpoint, Name}),
lists:foreach(fun deactivate_tab/1, Cp#checkpoint_args.retainers),
Iters = Cp#checkpoint_args.iterators,
- lists:foreach(fun(I) -> retainer_fixtable(I#iter.oid_tab, false) end, Iters).
+ [?SAFE(retainer_fixtable(Tab, false)) || #iter{main_tab=Tab} <- Iters],
+ ok.
deactivate_tab(R) ->
Name = R#retainer.cp_name,
@@ -1151,7 +1152,7 @@ do_change_copy(Cp, Tab, FromType, ToType) ->
Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
check_iter(From, Iter) when Iter#iter.pid == From ->
- retainer_fixtable(Iter#iter.oid_tab, false),
+ ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)),
false;
check_iter(_From, _Iter) ->
true.
diff --git a/lib/mnesia/src/mnesia_event.erl b/lib/mnesia/src/mnesia_event.erl
index 7320d381ea..6f7531245f 100644
--- a/lib/mnesia/src/mnesia_event.erl
+++ b/lib/mnesia/src/mnesia_event.erl
@@ -114,7 +114,8 @@ handle_table_event({Oper, Record, TransId}, State) ->
handle_system_event({mnesia_checkpoint_activated, _Checkpoint}, State) ->
{ok, State};
-handle_system_event({mnesia_checkpoint_deactivated, _Checkpoint}, State) ->
+handle_system_event({mnesia_checkpoint_deactivated, Checkpoint}, State) ->
+ report_error("Checkpoint '~p' has been deactivated, last table copy deleted.\n",[Checkpoint]),
{ok, State};
handle_system_event({mnesia_up, Node}, State) ->
diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl
index e745ec9b04..044cf501fd 100644
--- a/lib/mnesia/test/mnesia_evil_backup.erl
+++ b/lib/mnesia/test/mnesia_evil_backup.erl
@@ -723,18 +723,18 @@ bup_records(File, Mod) ->
exit(Reason)
end.
-sops_with_checkpoint(doc) ->
+sops_with_checkpoint(doc) ->
["Test schema operations during a checkpoint"];
sops_with_checkpoint(suite) -> [];
sops_with_checkpoint(Config) when is_list(Config) ->
- Ns = ?acquire_nodes(2, Config),
-
+ Ns = [N1,N2] = ?acquire_nodes(2, Config),
+
?match({ok, cp1, Ns}, mnesia:activate_checkpoint([{name, cp1},{max,mnesia:system_info(tables)}])),
- Tab = tab,
+ Tab = tab,
?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies,Ns}])),
OldRecs = [{Tab, K, -K} || K <- lists:seq(1, 5)],
[mnesia:dirty_write(R) || R <- OldRecs],
-
+
?match({ok, cp2, Ns}, mnesia:activate_checkpoint([{name, cp2},{max,mnesia:system_info(tables)}])),
File1 = "cp1_delete_me.BUP",
?match(ok, mnesia:dirty_write({Tab,6,-6})),
@@ -742,16 +742,16 @@ sops_with_checkpoint(Config) when is_list(Config) ->
?match(ok, mnesia:dirty_write({Tab,7,-7})),
File2 = "cp2_delete_me.BUP",
?match(ok, mnesia:backup_checkpoint(cp2, File2)),
-
+
?match(ok, mnesia:deactivate_checkpoint(cp1)),
?match(ok, mnesia:backup_checkpoint(cp2, File1)),
?match(ok, mnesia:dirty_write({Tab,8,-8})),
-
+
?match({atomic,ok}, mnesia:delete_table(Tab)),
?match({error,_}, mnesia:backup_checkpoint(cp2, File2)),
?match({'EXIT',_}, mnesia:dirty_write({Tab,9,-9})),
- ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
+ ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
Test = fun(N) when N > 5 -> ?error("To many records in backup ~p ~n", [N]);
(N) -> case mnesia:dirty_read(Tab,N) of
[{Tab,N,B}] when -B =:= N -> ok;
@@ -759,8 +759,29 @@ sops_with_checkpoint(Config) when is_list(Config) ->
end
end,
[Test(N) || N <- mnesia:dirty_all_keys(Tab)],
- ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
-
+ ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
+
+ %% Mnesia crashes when deleting a table during backup
+ ?match([], mnesia_test_lib:stop_mnesia([N2])),
+ Tab2 = ram,
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies,[N1]}])),
+ ?match({ok, cp3, _}, mnesia:activate_checkpoint([{name, cp3},
+ {ram_overrides_dump,true},
+ {min,[Tab2]}])),
+ Write = fun Loop (N) ->
+ case N > 0 of
+ true ->
+ mnesia:dirty_write({Tab2, N+100, N+100}),
+ Loop(N-1);
+ false ->
+ ok
+ end
+ end,
+ ok = Write(100000),
+ spawn_link(fun() -> ?match({atomic, ok},mnesia:delete_table(Tab2)) end),
+
+ %% We don't check result here, depends on timing of above call
+ mnesia:backup_checkpoint(cp3, File2),
file:delete(File1), file:delete(File2),
- ?verify_mnesia(Ns, []).
+ ?verify_mnesia([N1], [N2]).
diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk
index 439b21e58c..e272a469bb 100644
--- a/lib/mnesia/vsn.mk
+++ b/lib/mnesia/vsn.mk
@@ -1 +1 @@
-MNESIA_VSN = 4.14.2
+MNESIA_VSN = 4.14.3
diff --git a/lib/ssh/src/ssh.app.src b/lib/ssh/src/ssh.app.src
index 76b7d8cd55..2bb7491b0c 100644
--- a/lib/ssh/src/ssh.app.src
+++ b/lib/ssh/src/ssh.app.src
@@ -48,4 +48,3 @@
"stdlib-3.1"
]}]}.
-
diff --git a/lib/ssh/test/ssh_algorithms_SUITE.erl b/lib/ssh/test/ssh_algorithms_SUITE.erl
index 14605ee44f..4327068b7b 100644
--- a/lib/ssh/test/ssh_algorithms_SUITE.erl
+++ b/lib/ssh/test/ssh_algorithms_SUITE.erl
@@ -198,8 +198,6 @@ try_exec_simple_group(Group, Config) ->
%%--------------------------------------------------------------------
%% Testing all default groups
-simple_exec_groups() -> [{timetrap,{minutes,8}}].
-
simple_exec_groups(Config) ->
Sizes = interpolate( public_key:dh_gex_group_sizes() ),
lists:foreach(
diff --git a/lib/ssh/test/ssh_protocol_SUITE.erl b/lib/ssh/test/ssh_protocol_SUITE.erl
index 84290c7ffd..2c4fa8be88 100644
--- a/lib/ssh/test/ssh_protocol_SUITE.erl
+++ b/lib/ssh/test/ssh_protocol_SUITE.erl
@@ -115,7 +115,10 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
TC == gex_client_old_request_noexact ->
Opts = case TC of
gex_client_init_option_groups ->
- [{dh_gex_groups, [{2345, 3, 41}]}];
+ [{dh_gex_groups,
+ [{1023, 5,
+ 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F
+ }]}];
gex_client_init_option_groups_file ->
DataDir = proplists:get_value(data_dir, Config),
F = filename:join(DataDir, "dh_group_test"),
@@ -127,10 +130,12 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
_ when TC == gex_server_gex_limit ;
TC == gex_client_old_request_exact ;
TC == gex_client_old_request_noexact ->
- [{dh_gex_groups, [{ 500, 3, 17},
- {1000, 7, 91},
- {3000, 5, 61}]},
- {dh_gex_limits,{500,1500}}
+ [{dh_gex_groups,
+ [{1023, 2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323},
+ {1535, 5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827},
+ {3071, 2, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9429825A2B}
+ ]},
+ {dh_gex_limits, {1023,2000}}
];
_ ->
[]
@@ -367,20 +372,25 @@ no_common_alg_client_disconnects(Config) ->
%%%--------------------------------------------------------------------
gex_client_init_option_groups(Config) ->
- do_gex_client_init(Config, {2000, 2048, 4000},
- {3,41}).
+ do_gex_client_init(Config, {512, 2048, 4000},
+ {5,16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}
+ ).
gex_client_init_option_groups_file(Config) ->
do_gex_client_init(Config, {2000, 2048, 4000},
- {5,61}).
+ {5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}
+ ).
gex_client_init_option_groups_moduli_file(Config) ->
do_gex_client_init(Config, {2000, 2048, 4000},
- {5,16#B7}).
+ {5, 16#DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F}
+ ).
gex_server_gex_limit(Config) ->
do_gex_client_init(Config, {1000, 3000, 4000},
- {7,91}).
+ %% {7,91}).
+ {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}
+ ).
do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
@@ -408,8 +418,15 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
).
%%%--------------------------------------------------------------------
-gex_client_old_request_exact(Config) -> do_gex_client_init_old(Config, 500, {3,17}).
-gex_client_old_request_noexact(Config) -> do_gex_client_init_old(Config, 800, {7,91}).
+gex_client_old_request_exact(Config) ->
+ do_gex_client_init_old(Config, 1023,
+ {2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323}
+ ).
+
+gex_client_old_request_noexact(Config) ->
+ do_gex_client_init_old(Config, 1400,
+ {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}
+ ).
do_gex_client_init_old(Config, N, {G,P}) ->
{ok,_} =
diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
index 2887bb4b60..87c4b4afc8 100644
--- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
+++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
@@ -1,3 +1,3 @@
-{2222, 5, 61}.
-{1111, 7, 91}.
+{1023, 5, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}.
+{3071, 5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}.
diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
index f6995ba4c9..6d2b4bcb59 100644
--- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
+++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
@@ -1,3 +1,2 @@
-20151021104105 2 6 100 2222 5 B7
-20151021104106 2 6 100 1111 5 4F
-
+20120821044046 2 6 100 1023 2 D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A7711F2C6B
+20120821050554 2 6 100 2047 5 DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F
diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl
index 286ac6e882..1673f52821 100644
--- a/lib/ssh/test/ssh_test_lib.erl
+++ b/lib/ssh/test/ssh_test_lib.erl
@@ -690,13 +690,16 @@ ssh_type() ->
ssh_type1() ->
try
+ ct:log("~p:~p os:find_executable(\"ssh\")",[?MODULE,?LINE]),
case os:find_executable("ssh") of
false ->
ct:log("~p:~p Executable \"ssh\" not found",[?MODULE,?LINE]),
not_found;
- _ ->
+ Path ->
+ ct:log("~p:~p Found \"ssh\" at ~p",[?MODULE,?LINE,Path]),
case os:cmd("ssh -V") of
- "OpenSSH" ++ _ ->
+ Version = "OpenSSH" ++ _ ->
+ ct:log("~p:~p Found OpenSSH ~p",[?MODULE,?LINE,Version]),
openSSH;
Str ->
ct:log("ssh client ~p is unknown",[Str]),
diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl
index 54f83928ee..09d4c3e678 100644
--- a/lib/ssl/src/ssl_config.erl
+++ b/lib/ssl/src/ssl_config.erl
@@ -32,20 +32,20 @@ init(SslOpts, Role) ->
init_manager_name(SslOpts#ssl_options.erl_dist),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert}
+ {ok, #{pem_cache := PemCache} = Config}
= init_certificates(SslOpts, Role),
PrivateKey =
- init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
+ init_private_key(PemCache, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
SslOpts#ssl_options.password, Role),
- DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}.
+ DHParams = init_diffie_hellman(PemCache, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
+ {ok, Config#{private_key => PrivateKey, dh_params => DHParams}}.
init_manager_name(false) ->
put(ssl_manager, ssl_manager:name(normal)),
- put(ssl_cache, ssl_pem_cache:name(normal));
+ put(ssl_pem_cache, ssl_pem_cache:name(normal));
init_manager_name(true) ->
put(ssl_manager, ssl_manager:name(dist)),
- put(ssl_cache, ssl_pem_cache:name(dist)).
+ put(ssl_pem_cache, ssl_pem_cache:name(dist)).
init_certificates(#ssl_options{cacerts = CaCerts,
cacertfile = CACertFile,
@@ -53,7 +53,7 @@ init_certificates(#ssl_options{cacerts = CaCerts,
cert = Cert,
crl_cache = CRLCache
}, Role) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} =
+ {ok, Config} =
try
Certs = case CaCerts of
undefined ->
@@ -61,41 +61,37 @@ init_certificates(#ssl_options{cacerts = CaCerts,
_ ->
{der, CaCerts}
end,
- {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache)
+ {ok,_} = ssl_manager:connection_init(Certs, Role, CRLCache)
catch
_:Reason ->
file_error(CACertFile, {cacertfile, Reason})
end,
- init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CRLDbInfo, CertFile, Role).
+ init_certificates(Cert, Config, CertFile, Role).
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle,
- CRLDbInfo, <<>>, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined};
+init_certificates(undefined, Config, <<>>, _) ->
+ {ok, Config#{own_certificate => undefined}};
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CRLDbInfo, CertFile, client) ->
+init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, client) ->
try
%% Ignoring potential proxy-certificates see:
%% http://dev.globus.org/wiki/Security/ProxyFileFormat
- [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert}
+ [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache),
+ {ok, Config#{own_certificate => OwnCert}}
catch _Error:_Reason ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}
- end;
+ {ok, Config#{own_certificate => undefined}}
+ end;
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle,
- PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) ->
+init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, server) ->
try
- [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert}
+ [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache),
+ {ok, Config#{own_certificate => OwnCert}}
catch
_:Reason ->
file_error(CertFile, {certfile, Reason})
end;
-init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}.
-
+init_certificates(Cert, Config, _, _) ->
+ {ok, Config#{own_certificate => Cert}}.
+
init_private_key(_, undefined, <<>>, _Password, _Client) ->
undefined;
init_private_key(DbHandle, undefined, KeyFile, Password, _) ->
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index 6ed2fc83da..4fbac4cad3 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -323,8 +323,14 @@ handle_session(#server_hello{cipher_suite = CipherSuite,
-spec ssl_config(#ssl_options{}, client | server, #state{}) -> #state{}.
%%--------------------------------------------------------------------
ssl_config(Opts, Role, State) ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo,
- OwnCert, Key, DHParams} =
+ {ok, #{cert_db_ref := Ref,
+ cert_db_handle := CertDbHandle,
+ fileref_db_handle := FileRefHandle,
+ session_cache := CacheHandle,
+ crl_db_info := CRLDbHandle,
+ private_key := Key,
+ dh_params := DHParams,
+ own_certificate := OwnCert}} =
ssl_config:init(Opts, Role),
Handshake = ssl_handshake:init_handshake_history(),
TimeStamp = erlang:monotonic_time(),
@@ -335,7 +341,7 @@ ssl_config(Opts, Role, State) ->
file_ref_db = FileRefHandle,
cert_db_ref = Ref,
cert_db = CertDbHandle,
- crl_db = CRLDbInfo,
+ crl_db = CRLDbHandle,
session_cache = CacheHandle,
private_key = Key,
diffie_hellman_params = DHParams,
@@ -2428,16 +2434,23 @@ handle_sni_extension(#sni{hostname = Hostname}, State0) ->
undefined ->
State0;
_ ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, Key, DHParams} =
- ssl_config:init(NewOptions, State0#state.role),
- State0#state{
- session = State0#state.session#session{own_certificate = OwnCert},
- file_ref_db = FileRefHandle,
- cert_db_ref = Ref,
- cert_db = CertDbHandle,
- crl_db = CRLDbHandle,
- session_cache = CacheHandle,
- private_key = Key,
+ {ok, #{cert_db_ref := Ref,
+ cert_db_handle := CertDbHandle,
+ fileref_db_handle := FileRefHandle,
+ session_cache := CacheHandle,
+ crl_db_info := CRLDbHandle,
+ private_key := Key,
+ dh_params := DHParams,
+ own_certificate := OwnCert}} =
+ ssl_config:init(NewOptions, State0#state.role),
+ State0#state{
+ session = State0#state.session#session{own_certificate = OwnCert},
+ file_ref_db = FileRefHandle,
+ cert_db_ref = Ref,
+ cert_db = CertDbHandle,
+ crl_db = CRLDbHandle,
+ session_cache = CacheHandle,
+ private_key = Key,
diffie_hellman_params = DHParams,
ssl_options = NewOptions,
sni_hostname = Hostname
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 29b15f843f..2b82f18bb5 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -107,8 +107,7 @@ start_link_dist(Opts) ->
%%--------------------------------------------------------------------
-spec connection_init(binary()| {der, list()}, client | server,
{Cb :: atom(), Handle:: term()}) ->
- {ok, certdb_ref(), db_handle(), db_handle(),
- db_handle(), db_handle(), CRLInfo::term()}.
+ {ok, map()}.
%%
%% Description: Do necessary initializations for a new connection.
%%--------------------------------------------------------------------
@@ -128,7 +127,7 @@ cache_pem_file(File, DbHandle) ->
[Content] ->
{ok, Content};
undefined ->
- ssl_pem_cache:insert(File)
+ ssl_pem_cache:insert(File)
end.
%%--------------------------------------------------------------------
@@ -224,7 +223,7 @@ init([ManagerName, PemCacheName, Opts]) ->
CacheCb = proplists:get_value(session_cb, Opts, ssl_session_cache),
SessionLifeTime =
proplists:get_value(session_lifetime, Opts, ?'24H_in_sec'),
- CertDb = ssl_pkix_db:create(),
+ CertDb = ssl_pkix_db:create(PemCacheName),
ClientSessionCache =
CacheCb:init([{role, client} |
proplists:get_value(session_cb_init_args, Opts, [])]),
@@ -261,18 +260,25 @@ init([ManagerName, PemCacheName, Opts]) ->
handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From,
#state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
Ref = make_ref(),
- Result = {ok, Ref, CertDb, FileRefDb, PemChace,
- session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}},
- {reply, Result, State#state{certificate_db = Db}};
+ {reply, {ok, #{cert_db_ref => Ref,
+ cert_db_handle => CertDb,
+ fileref_db_handle => FileRefDb,
+ pem_cache => PemChace,
+ session_cache => session_cache(Role, State),
+ crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State};
handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From,
#state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
case add_trusted_certs(Pid, Trustedcerts, Db) of
{ok, Ref} ->
- {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State),
- {CRLCb, crl_db_info(Db, UserCRLDb)}}, State};
- {error, _} = Error ->
- {reply, Error, State}
+ {reply, {ok, #{cert_db_ref => Ref,
+ cert_db_handle => CertDb,
+ fileref_db_handle => FileRefDb,
+ pem_cache => PemChace,
+ session_cache => session_cache(Role, State),
+ crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State};
+ {error, _} = Error ->
+ {reply, Error, State}
end;
handle_call({{insert_crls, Path, CRLs}, _}, _From,
diff --git a/lib/ssl/src/ssl_pem_cache.erl b/lib/ssl/src/ssl_pem_cache.erl
index 2b31374bcc..f63a301f69 100644
--- a/lib/ssl/src/ssl_pem_cache.erl
+++ b/lib/ssl/src/ssl_pem_cache.erl
@@ -133,7 +133,7 @@ invalidate_pem(File) ->
init([Name]) ->
put(ssl_pem_cache, Name),
process_flag(trap_exit, true),
- PemCache = ssl_pkix_db:create_pem_cache(),
+ PemCache = ssl_pkix_db:create_pem_cache(Name),
Interval = pem_check_interval(),
erlang:send_after(Interval, self(), clear_pem_cache),
{ok, #state{pem_cache = PemCache,
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index 961a555873..cde05bb16f 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -28,7 +28,7 @@
-include_lib("public_key/include/public_key.hrl").
-include_lib("kernel/include/file.hrl").
--export([create/0, create_pem_cache/0,
+-export([create/1, create_pem_cache/1,
add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
extract_trusted_certs/1,
remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1,
@@ -40,13 +40,13 @@
%%====================================================================
%%--------------------------------------------------------------------
--spec create() -> [db_handle(),...].
+-spec create(atom()) -> [db_handle(),...].
%%
%% Description: Creates a new certificate db.
%% Note: lookup_trusted_cert/4 may be called from any process but only
%% the process that called create may call the other functions.
%%--------------------------------------------------------------------
-create() ->
+create(PEMCacheName) ->
[%% Let connection process delete trusted certs
%% that can only belong to one connection. (Supplied directly
%% on DER format to ssl:connect/listen.)
@@ -56,14 +56,14 @@ create() ->
ets:new(ssl_otp_ca_ref_file_mapping, [set, protected])
},
%% Lookups in named table owned by ssl_pem_cache process
- ssl_otp_pem_cache,
+ PEMCacheName,
%% Default cache
{ets:new(ssl_otp_crl_cache, [set, protected]),
ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])}
].
-create_pem_cache() ->
- ets:new(ssl_otp_pem_cache, [named_table, set, protected]).
+create_pem_cache(Name) ->
+ ets:new(Name, [named_table, set, protected]).
%%--------------------------------------------------------------------
-spec remove([db_handle()]) -> ok.
@@ -76,7 +76,9 @@ remove(Dbs) ->
true = ets:delete(Db1);
(undefined) ->
ok;
- (ssl_otp_pem_cache) ->
+ (ssl_pem_cache) ->
+ ok;
+ (ssl_pem_cache_dist) ->
ok;
(Db) ->
true = ets:delete(Db)
@@ -341,3 +343,4 @@ crl_issuer(DerCRL) ->
CRL = public_key:der_decode('CertificateList', DerCRL),
TBSCRL = CRL#'CertificateList'.tbsCertList,
TBSCRL#'TBSCertList'.issuer.
+
diff --git a/lib/stdlib/src/beam_lib.erl b/lib/stdlib/src/beam_lib.erl
index d7ee5c1f5d..461acf03be 100644
--- a/lib/stdlib/src/beam_lib.erl
+++ b/lib/stdlib/src/beam_lib.erl
@@ -63,7 +63,7 @@
-type label() :: integer().
-type chunkid() :: nonempty_string(). % approximation of the strings below
-%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom".
+%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom" | "AtU8".
-type chunkname() :: 'abstract_code' | 'attributes' | 'compile_info'
| 'exports' | 'labeled_exports'
| 'imports' | 'indexed_imports'
@@ -520,6 +520,8 @@ read_chunk_data(File0, ChunkNames0, Options)
end.
%% -> {ok, list()} | throw(Error)
+check_chunks([atoms | Ids], File, IL, L) ->
+ check_chunks(Ids, File, ["Atom", "AtU8" | IL], [{atom_chunk, atoms} | L]);
check_chunks([ChunkName | Ids], File, IL, L) when is_atom(ChunkName) ->
ChunkId = chunk_name_to_id(ChunkName, File),
check_chunks(Ids, File, [ChunkId | IL], [{ChunkId, ChunkName} | L]);
@@ -537,6 +539,10 @@ scan_beam(File, What0, AllowMissingChunks) ->
case scan_beam1(File, What0) of
{missing, _FD, Mod, Data, What} when AllowMissingChunks ->
{ok, Mod, [{Id, missing_chunk} || Id <- What] ++ Data};
+ {missing, _FD, Mod, Data, ["Atom"]} ->
+ {ok, Mod, Data};
+ {missing, _FD, Mod, Data, ["AtU8"]} ->
+ {ok, Mod, Data};
{missing, FD, _Mod, _Data, What} ->
error({missing_chunk, filename(FD), hd(What)});
R ->
@@ -581,18 +587,23 @@ scan_beam(FD, Pos, What, Mod, Data) ->
error({invalid_beam_file, filename(FD), Pos})
end.
-get_data(Cs, "Atom"=Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, Encoding) ->
NewCs = del_chunk(Id, Cs),
{NFD, Chunk} = get_chunk(Id, Pos, Size, FD),
<<_Num:32, Chunk2/binary>> = Chunk,
- {Module, _} = extract_atom(Chunk2),
+ {Module, _} = extract_atom(Chunk2, Encoding),
C = case Cs of
info ->
{Id, Pos, Size};
_ ->
{Id, Chunk}
end,
- scan_beam(NFD, Pos2, NewCs, Module, [C | Data]);
+ scan_beam(NFD, Pos2, NewCs, Module, [C | Data]).
+
+get_data(Cs, "Atom" = Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+ get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, latin1);
+get_data(Cs, "AtU8" = Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+ get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, utf8);
get_data(info, Id, FD, Size, Pos, Pos2, Mod, Data) ->
scan_beam(FD, Pos2, info, Mod, [{Id, Pos, Size} | Data]);
get_data(Chunks, Id, FD, Size, Pos, Pos2, Mod, Data) ->
@@ -624,6 +635,9 @@ get_chunk(Id, Pos, Size, FD) ->
{NFD, Chunk}
end.
+chunks_to_data([{atom_chunk, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) ->
+ {NewAtoms, Ret} = chunk_to_data(Name, <<"">>, File, Cs, Atoms, Module),
+ chunks_to_data(CNs, Chunks, File, Cs, Module, NewAtoms, [Ret | L]);
chunks_to_data([{Id, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) ->
{_Id, Chunk} = lists:keyfind(Id, 1, Chunks),
{NewAtoms, Ret} = chunk_to_data(Name, Chunk, File, Cs, Atoms, Module),
@@ -651,7 +665,7 @@ chunk_to_data(abstract_code=Id, Chunk, File, _Cs, AtomTable, Mod) ->
<<>> ->
{AtomTable, {Id, no_abstract_code}};
<<0:8,N:8,Mode0:N/binary,Rest/binary>> ->
- Mode = list_to_atom(binary_to_list(Mode0)),
+ Mode = binary_to_atom(Mode0, utf8),
decrypt_abst(Mode, Mod, File, Id, AtomTable, Rest);
_ ->
case catch binary_to_term(Chunk) of
@@ -683,7 +697,6 @@ chunk_to_data(ChunkId, Chunk, _File,
_Cs, AtomTable, _Module) when is_list(ChunkId) ->
{AtomTable, {ChunkId, Chunk}}. % Chunk is a binary
-chunk_name_to_id(atoms, _) -> "Atom";
chunk_name_to_id(indexed_imports, _) -> "ImpT";
chunk_name_to_id(imports, _) -> "ImpT";
chunk_name_to_id(exports, _) -> "ExpT";
@@ -738,25 +751,30 @@ atm(AT, N) ->
%% AT is updated.
ensure_atoms({empty, AT}, Cs) ->
- {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs),
- extract_atoms(AtomChunk, AT),
+ case lists:keyfind("AtU8", 1, Cs) of
+ {_Id, AtomChunk} when is_binary(AtomChunk) ->
+ extract_atoms(AtomChunk, AT, utf8);
+ _ ->
+ {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs),
+ extract_atoms(AtomChunk, AT, latin1)
+ end,
AT;
ensure_atoms(AT, _Cs) ->
AT.
-extract_atoms(<<_Num:32, B/binary>>, AT) ->
- extract_atoms(B, 1, AT).
+extract_atoms(<<_Num:32, B/binary>>, AT, Encoding) ->
+ extract_atoms(B, 1, AT, Encoding).
-extract_atoms(<<>>, _I, _AT) ->
+extract_atoms(<<>>, _I, _AT, _Encoding) ->
true;
-extract_atoms(B, I, AT) ->
- {Atom, B1} = extract_atom(B),
+extract_atoms(B, I, AT, Encoding) ->
+ {Atom, B1} = extract_atom(B, Encoding),
true = ets:insert(AT, {I, Atom}),
- extract_atoms(B1, I+1, AT).
+ extract_atoms(B1, I+1, AT, Encoding).
-extract_atom(<<Len, B/binary>>) ->
+extract_atom(<<Len, B/binary>>, Encoding) ->
<<SB:Len/binary, Tail/binary>> = B,
- {list_to_atom(binary_to_list(SB)), Tail}.
+ {binary_to_atom(SB, Encoding), Tail}.
%%% Utils.
@@ -856,12 +874,12 @@ significant_chunks() ->
%% for a module. They are listed in the order that they should be MD5:ed.
md5_chunks() ->
- ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"].
+ ["Atom", "AtU8", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"].
%% The following chunks are mandatory in every Beam file.
mandatory_chunks() ->
- ["Code", "ExpT", "ImpT", "StrT", "Atom"].
+ ["Code", "ExpT", "ImpT", "StrT"].
%%% ====================================================================
%%% The rest of the file handles encrypted debug info.
diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl
index ccc827ca2d..45666fbcb4 100644
--- a/lib/stdlib/src/binary.erl
+++ b/lib/stdlib/src/binary.erl
@@ -24,7 +24,7 @@
-export_type([cp/0]).
--opaque cp() :: {'am' | 'bm', binary()}.
+-opaque cp() :: {'am' | 'bm', reference()}.
-type part() :: {Start :: non_neg_integer(), Length :: integer()}.
%%% BIFs.
diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl
index 5bc9475fc8..e81383775b 100644
--- a/lib/stdlib/src/dets.erl
+++ b/lib/stdlib/src/dets.erl
@@ -1063,11 +1063,8 @@ foldl_bins([Bin | Bins], MP, Terms) ->
compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC('_') = Spec) ->
{Spec, true};
compile_match_spec(select, Spec) ->
- case catch ets:match_spec_compile(Spec) of
- X when is_binary(X) ->
- {Spec, {match_spec, X}};
- _ ->
- badarg
+ try {Spec, {match_spec, ets:match_spec_compile(Spec)}}
+ catch error:_ -> badarg
end;
compile_match_spec(object, Pat) ->
compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC(Pat));
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 20de06fd0b..d6fd1e3ea1 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -51,8 +51,8 @@
-type tab() :: atom() | tid().
-type type() :: set | ordered_set | bag | duplicate_bag.
-type continuation() :: '$end_of_table'
- | {tab(),integer(),integer(),binary(),list(),integer()}
- | {tab(),_,_,integer(),binary(),list(),integer(),integer()}.
+ | {tab(),integer(),integer(),comp_match_spec(),list(),integer()}
+ | {tab(),_,_,integer(),comp_match_spec(),list(),integer(),integer()}.
-opaque tid() :: integer().
@@ -488,7 +488,7 @@ update_element(_, _, _) ->
%%% End of BIFs
--opaque comp_match_spec() :: binary(). %% this one is REALLY opaque
+-opaque comp_match_spec() :: reference().
-spec match_spec_run(List, CompiledMatchSpec) -> list() when
List :: [tuple()],
@@ -505,28 +505,28 @@ match_spec_run(List, CompiledMS) ->
repair_continuation('$end_of_table', _) ->
'$end_of_table';
%% ordered_set
-repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,Bin,L2,N3,N4}, MS)
+repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,MSRef,L2,N3,N4}, MS)
when %% (is_atom(Table) or is_integer(Table)),
is_integer(N2),
- byte_size(Bin) =:= 0,
+ %% is_reference(MSRef),
is_list(L2),
is_integer(N3),
is_integer(N4) ->
- case ets:is_compiled_ms(Bin) of
+ case ets:is_compiled_ms(MSRef) of
true ->
Untouched;
false ->
{Table,Lastkey,EndCondition,N2,ets:match_spec_compile(MS),L2,N3,N4}
end;
%% set/bag/duplicate_bag
-repair_continuation(Untouched = {Table,N1,N2,Bin,L,N3}, MS)
+repair_continuation(Untouched = {Table,N1,N2,MSRef,L,N3}, MS)
when %% (is_atom(Table) or is_integer(Table)),
is_integer(N1),
is_integer(N2),
- byte_size(Bin) =:= 0,
+ %% is_reference(MSRef),
is_list(L),
is_integer(N3) ->
- case ets:is_compiled_ms(Bin) of
+ case ets:is_compiled_ms(MSRef) of
true ->
Untouched;
false ->
diff --git a/lib/stdlib/test/beam_lib_SUITE.erl b/lib/stdlib/test/beam_lib_SUITE.erl
index 4521ecc0ef..279e15f703 100644
--- a/lib/stdlib/test/beam_lib_SUITE.erl
+++ b/lib/stdlib/test/beam_lib_SUITE.erl
@@ -81,12 +81,8 @@ normal(Conf) when is_list(Conf) ->
NoOfTables = length(ets:all()),
P0 = pps(),
- CompileFlags = [{outdir,PrivDir}, debug_info],
- {ok,_} = compile:file(Source, CompileFlags),
- {ok, Binary} = file:read_file(BeamFile),
-
- do_normal(BeamFile),
- do_normal(Binary),
+ do_normal(Source, PrivDir, BeamFile, []),
+ do_normal(Source, PrivDir, BeamFile, [no_utf8_atoms]),
{ok,_} = compile:file(Source, [{outdir,PrivDir}, no_debug_info]),
{ok, {simple, [{abstract_code, no_abstract_code}]}} =
@@ -101,7 +97,15 @@ normal(Conf) when is_list(Conf) ->
true = (P0 == pps()),
ok.
-do_normal(BeamFile) ->
+do_normal(Source, PrivDir, BeamFile, Opts) ->
+ CompileFlags = [{outdir,PrivDir}, debug_info | Opts],
+ {ok,_} = compile:file(Source, CompileFlags),
+ {ok, Binary} = file:read_file(BeamFile),
+
+ do_normal(BeamFile, Opts),
+ do_normal(Binary, Opts).
+
+do_normal(BeamFile, Opts) ->
Imports = {imports, [{erlang, get_module_info, 1},
{erlang, get_module_info, 2},
{lists, member, 2}]},
@@ -130,20 +134,31 @@ do_normal(BeamFile) ->
beam_lib:chunks(BeamFile, [abstract_code]),
%% Test reading optional chunks.
- All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"],
+ All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT", "AtU8"],
{ok,{simple,Chunks}} = beam_lib:chunks(BeamFile, All, [allow_missing_chunks]),
- verify_simple(Chunks).
+ case {verify_simple(Chunks),Opts} of
+ {{missing_chunk, AtomBin}, []} when is_binary(AtomBin) -> ok;
+ {{AtomBin, missing_chunk}, [no_utf8_atoms]} when is_binary(AtomBin) -> ok
+ end,
-verify_simple([{"Atom", AtomBin},
+ %% Make sure that reading the atom chunk works when the 'allow_missing_chunks'
+ %% option is used.
+ Some = ["Code",atoms,"ExpT","LitT"],
+ {ok,{simple,SomeChunks}} = beam_lib:chunks(BeamFile, Some, [allow_missing_chunks]),
+ [{"Code",<<_/binary>>},{atoms,[_|_]},{"ExpT",<<_/binary>>},{"LitT",missing_chunk}] =
+ SomeChunks.
+
+verify_simple([{"Atom", PlainAtomChunk},
{"Code", CodeBin},
{"StrT", StrBin},
{"ImpT", ImpBin},
{"ExpT", ExpBin},
{"FunT", missing_chunk},
- {"LitT", missing_chunk}])
- when is_binary(AtomBin), is_binary(CodeBin), is_binary(StrBin),
+ {"LitT", missing_chunk},
+ {"AtU8", AtU8Chunk}])
+ when is_binary(CodeBin), is_binary(StrBin),
is_binary(ImpBin), is_binary(ExpBin) ->
- ok.
+ {PlainAtomChunk, AtU8Chunk}.
%% Read invalid beam files.
error(Conf) when is_list(Conf) ->
@@ -211,7 +226,7 @@ last_chunk(Bin) ->
do_error(BeamFile, ACopy) ->
%% evil tests
Chunks = chunk_info(BeamFile),
- {value, {_, AtomStart, _}} = lists:keysearch("Atom", 1, Chunks),
+ {value, {_, AtomStart, _}} = lists:keysearch("AtU8", 1, Chunks),
{value, {_, ImportStart, _}} = lists:keysearch("ImpT", 1, Chunks),
{value, {_, AbstractStart, _}} = lists:keysearch("Abst", 1, Chunks),
{value, {_, AttributesStart, _}} =
@@ -234,7 +249,7 @@ do_error(BeamFile, ACopy) ->
verify(not_a_beam_file, beam_lib:info(BF7)),
BF8 = set_byte(ACopy, BeamFile, 13, 17),
- verify(missing_chunk, beam_lib:chunks(BF8, ["Atom"])),
+ verify(missing_chunk, beam_lib:chunks(BF8, ["AtU8"])),
BF9 = set_byte(ACopy, BeamFile, CompileInfoStart+10, 17),
verify(invalid_chunk, beam_lib:chunks(BF9, [compile_info])).
diff --git a/lib/stdlib/test/dets_SUITE.erl b/lib/stdlib/test/dets_SUITE.erl
index aa31fdde5a..95c9b47465 100644
--- a/lib/stdlib/test/dets_SUITE.erl
+++ b/lib/stdlib/test/dets_SUITE.erl
@@ -3012,8 +3012,13 @@ repair_continuation(Config) ->
MS = [{'_',[],[true]}],
- {[true], C1} = dets:select(Tab, MS, 1),
- C2 = binary_to_term(term_to_binary(C1)),
+ SRes = term_to_binary(dets:select(Tab, MS, 1)),
+ %% Get rid of compiled match spec
+ lists:foreach(fun (P) ->
+ garbage_collect(P)
+ end, processes()),
+ {[true], C2} = binary_to_term(SRes),
+
{'EXIT', {badarg, _}} = (catch dets:select(C2)),
C3 = dets:repair_continuation(C2, MS),
{[true], C4} = dets:select(C3),
diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl
index 4ae734eb65..7d0ba967f9 100644
--- a/lib/stdlib/test/erl_scan_SUITE.erl
+++ b/lib/stdlib/test/erl_scan_SUITE.erl
@@ -772,10 +772,9 @@ unicode() ->
erl_scan:string([1089]),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([1089], {1,1}),
- {error,{1,erl_scan,{illegal,atom}},1} =
- erl_scan:string("'a"++[1089]++"b'", 1),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,6}} =
- erl_scan:string("'a"++[1089]++"b'", {1,1}),
+ {error,{{1,3},erl_scan,{illegal,character}},{1,4}} =
+ erl_scan:string("'a" ++ [999999999] ++ "c'", {1,1}),
+
test("\"a"++[1089]++"b\""),
{ok,[{char,1,1}],1} =
erl_scan_string([$$,$\\,$^,1089], 1),
@@ -786,8 +785,8 @@ unicode() ->
erl_scan:format_error(Error),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("\"qa\\x{aaa}",{1,1}),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
- erl_scan:string("'qa\\x{aaa}'",{1,1}),
+ {error,{{1,1},erl_scan,_},{1,11}} =
+ erl_scan:string("'qa\\x{aaa}",{1,1}),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,1089], 1),
@@ -904,9 +903,9 @@ more_chars() ->
%% OTP-10302. Unicode characters scanner/parser.
otp_10302(Config) when is_list(Config) ->
%% From unicode():
- {error,{1,erl_scan,{illegal,atom}},1} =
+ {ok,[{atom,1,'aсb'}],1} =
erl_scan:string("'a"++[1089]++"b'", 1),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
+ {ok,[{atom,{1,1},'qaપ'}],{1,12}} =
erl_scan:string("'qa\\x{aaa}'",{1,1}),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1),
diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl
index 3bff546243..18c4fe902d 100644
--- a/lib/typer/src/typer.erl
+++ b/lib/typer/src/typer.erl
@@ -129,31 +129,22 @@ extract(#analysis{macros = Macros,
%% Process remote types
NewCodeServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(CodeServer1),
+ CodeServer2 =
+ dialyzer_utils:merge_types(CodeServer1,
+ TrustPLT), % XXX change to the PLT?
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(CodeServer1),
case sets:size(NewExpTypes) of 0 -> ok end,
- OldRecords = dialyzer_plt:get_types(TrustPLT), % XXX change to the PLT?
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
- CodeServer2 = dialyzer_codeserver:set_temp_records(MergedRecords, CodeServer1),
CodeServer3 = dialyzer_codeserver:finalize_exported_types(NewExpTypes, CodeServer2),
- {CodeServer4, RecordDict} =
- dialyzer_utils:process_record_remote_types(CodeServer3),
- dialyzer_contracts:process_contract_remote_types(CodeServer4, RecordDict)
+ CodeServer4 = dialyzer_utils:process_record_remote_types(CodeServer3),
+ dialyzer_contracts:process_contract_remote_types(CodeServer4)
catch
throw:{error, ErrorMsg} ->
compile_error(ErrorMsg)
end,
%% Create TrustPLT
- Contracts = dialyzer_codeserver:get_contracts(NewCodeServer),
- Modules = dict:fetch_keys(Contracts),
- FoldFun =
- fun(Module, TmpPlt) ->
- {ok, ModuleContracts} = dict:find(Module, Contracts),
- SpecList = [{MFA, Contract}
- || {MFA, {_FileLine, Contract}} <- maps:to_list(ModuleContracts)],
- dialyzer_plt:insert_contract_list(TmpPlt, SpecList)
- end,
- NewTrustPLT = lists:foldl(FoldFun, TrustPLT, Modules),
+ ContractsDict = dialyzer_codeserver:get_contracts(NewCodeServer),
+ Contracts = orddict:from_list(dict:to_list(ContractsDict)),
+ NewTrustPLT = dialyzer_plt:insert_contract_list(TrustPLT, Contracts),
Analysis#analysis{trust_plt = NewTrustPLT}.
%%--------------------------------------------------------------------
@@ -835,19 +826,14 @@ collect_info(Analysis) ->
TmpCServer = NewAnalysis#analysis.codeserver,
NewCServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer),
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer, NewPlt),
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer),
- OldRecords = dialyzer_plt:get_types(NewPlt),
OldExpTypes = dialyzer_plt:get_exported_types(NewPlt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
MergedExpTypes = sets:union(NewExpTypes, OldExpTypes),
- %% io:format("Merged Records ~p",[MergedRecords]),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
- {TmpCServer3, RecordDict} =
- dialyzer_utils:process_record_remote_types(TmpCServer2),
- dialyzer_contracts:process_contract_remote_types(TmpCServer3, RecordDict)
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3)
catch
throw:{error, ErrorMsg} ->
fatal_error(ErrorMsg)