aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/asn1/doc/src/Makefile3
-rw-r--r--lib/asn1/doc/src/asn1ct.xml45
-rw-r--r--lib/asn1/doc/src/asn1rt.xml135
-rw-r--r--lib/asn1/src/Makefile1
-rw-r--r--lib/asn1/src/asn1.app.src1
-rw-r--r--lib/asn1/src/asn1ct.erl24
-rw-r--r--lib/asn1/src/asn1ct_value.erl7
-rw-r--r--lib/asn1/src/asn1rt.erl184
-rw-r--r--lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl4
-rw-r--r--lib/asn1/test/asn1_SUITE_data/testobj.erl10
-rw-r--r--lib/asn1/test/testPrimStrings.erl22
-rw-r--r--lib/common_test/src/common_test.app.src1
-rw-r--r--lib/common_test/test/Makefile3
-rw-r--r--lib/common_test/test/ct_SUITE.erl53
-rw-r--r--lib/common_test/test/ct_hooks_SUITE.erl112
-rw-r--r--lib/compiler/src/Makefile2
-rw-r--r--lib/compiler/src/beam_a.erl3
-rw-r--r--lib/compiler/src/beam_asm.erl35
-rw-r--r--lib/compiler/src/beam_block.erl3
-rw-r--r--lib/compiler/src/beam_bs.erl3
-rw-r--r--lib/compiler/src/beam_bsm.erl38
-rw-r--r--lib/compiler/src/beam_clean.erl20
-rw-r--r--lib/compiler/src/beam_dead.erl4
-rw-r--r--lib/compiler/src/beam_dict.erl20
-rw-r--r--lib/compiler/src/beam_except.erl9
-rw-r--r--lib/compiler/src/beam_flatten.erl3
-rw-r--r--lib/compiler/src/beam_jump.erl17
-rw-r--r--lib/compiler/src/beam_listing.erl14
-rw-r--r--lib/compiler/src/beam_peep.erl3
-rw-r--r--lib/compiler/src/beam_receive.erl3
-rw-r--r--lib/compiler/src/beam_reorder.erl3
-rw-r--r--lib/compiler/src/beam_split.erl3
-rw-r--r--lib/compiler/src/beam_trim.erl7
-rw-r--r--lib/compiler/src/beam_type.erl3
-rw-r--r--lib/compiler/src/beam_utils.erl58
-rw-r--r--lib/compiler/src/beam_validator.erl4
-rw-r--r--lib/compiler/src/beam_z.erl3
-rw-r--r--lib/compiler/src/cerl.erl12
-rw-r--r--lib/compiler/src/compile.erl361
-rw-r--r--lib/compiler/src/core_scan.erl24
-rw-r--r--lib/compiler/src/sys_core_fold.erl6
-rw-r--r--lib/compiler/src/sys_pre_attributes.erl48
-rw-r--r--lib/compiler/src/v3_codegen.erl4
-rw-r--r--lib/compiler/src/v3_kernel_pp.erl2
-rw-r--r--lib/compiler/src/v3_life.erl9
-rw-r--r--lib/compiler/src/v3_life.hrl8
-rw-r--r--lib/compiler/test/lc_SUITE.erl9
-rw-r--r--lib/crypto/c_src/crypto.c154
-rw-r--r--lib/crypto/src/crypto.erl7
-rw-r--r--lib/crypto/test/crypto_SUITE.erl49
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl113
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl13
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl94
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl14
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl204
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl52
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl22
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl3
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl130
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl33
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl54
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl118
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/chars4
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/anno.erl18
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/chars.erl32
-rw-r--r--lib/eldap/test/Makefile2
-rw-r--r--lib/eldap/test/eldap.cover3
-rw-r--r--lib/hipe/cerl/erl_types.erl189
-rw-r--r--lib/hipe/test/basic_SUITE_data/basic_num_bif.erl217
-rw-r--r--lib/hipe/test/hipe_SUITE.erl6
-rw-r--r--lib/hipe/test/opt_verify_SUITE.erl39
-rw-r--r--lib/inets/doc/src/httpc.xml2
-rw-r--r--lib/inets/src/http_client/httpc.erl92
-rw-r--r--lib/inets/src/http_client/httpc_handler.erl674
-rw-r--r--lib/inets/src/http_client/httpc_response.erl2
-rw-r--r--lib/inets/test/httpc_SUITE.erl71
-rw-r--r--lib/kernel/doc/src/code.xml2
-rw-r--r--lib/kernel/doc/src/config.xml4
-rw-r--r--lib/kernel/doc/src/heart.xml17
-rw-r--r--lib/kernel/doc/src/seq_trace.xml6
-rw-r--r--lib/kernel/src/erts_debug.erl25
-rw-r--r--lib/kernel/src/heart.erl15
-rw-r--r--lib/observer/src/crashdump_viewer.erl21
-rw-r--r--lib/observer/src/etop.erl24
-rw-r--r--lib/observer/src/etop_txt.erl24
-rw-r--r--lib/observer/src/observer_lib.erl14
-rw-r--r--lib/observer/src/observer_pro_wx.erl8
-rw-r--r--lib/os_mon/src/memsup.erl1
-rw-r--r--lib/public_key/src/public_key.erl23
-rw-r--r--lib/runtime_tools/doc/src/LTTng.xml2
-rw-r--r--lib/runtime_tools/src/observer_backend.erl7
-rwxr-xr-xlib/sasl/test/release_handler_SUITE_data/start3
-rwxr-xr-xlib/sasl/test/release_handler_SUITE_data/start_client3
-rw-r--r--lib/snmp/src/app/snmp.appup.src8
-rw-r--r--lib/snmp/src/app/snmp.erl80
-rw-r--r--lib/snmp/src/compile/snmpc_lib.erl4
-rw-r--r--lib/snmp/src/compile/snmpc_mib_gram.yrl6
-rw-r--r--lib/snmp/test/snmp_compiler_test.erl32
-rw-r--r--lib/snmp/test/snmp_test_data/OTP14145-MIB.mib44
-rw-r--r--lib/snmp/vsn.mk4
-rw-r--r--lib/ssh/src/ssh_auth.erl6
-rw-r--r--lib/ssh/src/ssh_bits.erl46
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl9
-rw-r--r--lib/ssh/src/ssh_dbg.erl66
-rw-r--r--lib/ssh/src/ssh_sftp.erl165
-rw-r--r--lib/ssh/src/ssh_sftpd_file_api.erl2
-rw-r--r--lib/ssh/src/ssh_transport.erl14
-rw-r--r--lib/ssh/test/property_test/ssh_eqc_encode_decode.erl370
-rw-r--r--lib/ssh/test/ssh_algorithms_SUITE.erl8
-rw-r--r--lib/ssh/test/ssh_benchmark_SUITE.erl20
-rw-r--r--lib/ssh/test/ssh_options_SUITE.erl11
-rw-r--r--lib/ssh/test/ssh_property_test_SUITE.erl3
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE.erl4
-rw-r--r--lib/ssh/test/ssh_test_lib.erl47
-rw-r--r--lib/ssh/test/ssh_upgrade_SUITE.erl4
-rw-r--r--lib/ssl/doc/src/ssl.xml8
-rw-r--r--lib/ssl/src/ssl.erl8
-rw-r--r--lib/ssl/src/ssl_connection.erl11
-rw-r--r--lib/ssl/src/ssl_handshake.hrl3
-rw-r--r--lib/ssl/src/ssl_internal.hrl3
-rw-r--r--lib/ssl/src/tls_connection.erl50
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl26
-rw-r--r--lib/stdlib/doc/src/dict.xml10
-rw-r--r--lib/stdlib/doc/src/ets.xml4
-rw-r--r--lib/stdlib/doc/src/gb_trees.xml22
-rw-r--r--lib/stdlib/doc/src/gen_event.xml22
-rw-r--r--lib/stdlib/doc/src/gen_fsm.xml5
-rw-r--r--lib/stdlib/doc/src/gen_server.xml5
-rw-r--r--lib/stdlib/doc/src/orddict.xml9
-rw-r--r--lib/stdlib/src/dict.erl23
-rw-r--r--lib/stdlib/src/erl_eval.erl1
-rw-r--r--lib/stdlib/src/erl_parse.yrl47
-rw-r--r--lib/stdlib/src/escript.erl83
-rw-r--r--lib/stdlib/src/gb_trees.erl45
-rw-r--r--lib/stdlib/src/gen_event.erl80
-rw-r--r--lib/stdlib/src/orddict.erl19
-rw-r--r--lib/stdlib/src/otp_internal.erl20
-rw-r--r--lib/stdlib/test/dict_SUITE.erl27
-rw-r--r--lib/stdlib/test/dict_test_lib.erl20
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl53
-rw-r--r--lib/stdlib/test/erl_pp_SUITE.erl13
-rw-r--r--lib/stdlib/test/escript_SUITE.erl15
-rwxr-xr-xlib/stdlib/test/escript_SUITE_data/two_lines2
-rw-r--r--lib/stdlib/test/gen_event_SUITE.erl126
-rw-r--r--lib/stdlib/test/rand_SUITE.erl6
-rw-r--r--lib/stdlib/test/shell_SUITE.erl2
-rw-r--r--lib/typer/src/typer.erl22
-rw-r--r--lib/xmerl/src/xmerl_scan.erl22
-rw-r--r--lib/xmerl/test/xmerl_SUITE.erl18
149 files changed, 3260 insertions, 2479 deletions
diff --git a/lib/asn1/doc/src/Makefile b/lib/asn1/doc/src/Makefile
index 559836116f..9a388e4e8a 100644
--- a/lib/asn1/doc/src/Makefile
+++ b/lib/asn1/doc/src/Makefile
@@ -37,8 +37,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
# Target Specs
# ----------------------------------------------------
XML_APPLICATION_FILES = ref_man.xml
-XML_REF3_FILES = asn1ct.xml \
- asn1rt.xml
+XML_REF3_FILES = asn1ct.xml
GEN_XML = \
asn1_spec.xml
diff --git a/lib/asn1/doc/src/asn1ct.xml b/lib/asn1/doc/src/asn1ct.xml
index e5a7b1bcc4..ebe1ce44dc 100644
--- a/lib/asn1/doc/src/asn1ct.xml
+++ b/lib/asn1/doc/src/asn1ct.xml
@@ -321,45 +321,6 @@ File3.asn</pre>
</func>
<func>
- <name>encode(Module, Type, Value)-> {ok, Bytes} | {error, Reason}</name>
- <fsummary>Encodes an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = term()</v>
- <v>Bytes = binary()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Encodes <c>Value</c> of <c>Type</c> defined in the <c>ASN.1</c> module
- <c>Module</c>. To get as fast execution as possible, the
- encode function performs only the rudimentary tests that input
- <c>Value</c> is a correct instance of <c>Type</c>. So, for example,
- the length of strings is
- not always checked. Returns <c>{ok, Bytes}</c> if successful or
- <c>{error, Reason}</c> if an error occurred.
- </p>
- <p>This function is deprecated.
- Use <c>Module:encode(Type, Value)</c> instead.</p>
- </desc>
- </func>
-
- <func>
- <name>decode(Module, Type, Bytes) -> {ok, Value} | {error, Reason}</name>
- <fsummary>Decode from Bytes into an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = Reason = term()</v>
- <v>Bytes = binary()</v>
- </type>
- <desc>
- <p>Decodes <c>Type</c> from <c>Module</c> from the binary
- <c>Bytes</c>. Returns <c>{ok, Value}</c> if successful.</p>
- <p>This function is deprecated.
- Use <c>Module:decode(Type, Bytes)</c> instead.</p>
- </desc>
- </func>
-
- <func>
<name>value(Module, Type) -> {ok, Value} | {error, Reason}</name>
<fsummary>Creates an ASN.1 value for test purposes.</fsummary>
<type>
@@ -424,11 +385,11 @@ File3.asn</pre>
<p>Schematically, the following occurs for each type in the module:</p>
<code type="none">
{ok, Value} = asn1ct:value(Module, Type),
-{ok, Bytes} = asn1ct:encode(Module, Type, Value),
-{ok, Value} = asn1ct:decode(Module, Type, Bytes).</code>
+{ok, Bytes} = Module:encode(Type, Value),
+{ok, Value} = Module:decode(Type, Bytes).</code>
<p>The <c>test</c> functions use the <c>*.asn1db</c> files
for all included modules. If they are located in a different
- directory than the current working directory, use the include
+ directory than the current working directory, use the <c>include</c>
option to add paths. This is only needed when automatically
generating values. For static values using <c>Value</c> no
options are needed.</p>
diff --git a/lib/asn1/doc/src/asn1rt.xml b/lib/asn1/doc/src/asn1rt.xml
deleted file mode 100644
index 3f53ca0f56..0000000000
--- a/lib/asn1/doc/src/asn1rt.xml
+++ /dev/null
@@ -1,135 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE erlref SYSTEM "erlref.dtd">
-
-<erlref>
- <header>
- <copyright>
- <year>1997</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>asn1rt</title>
- <prepared>Kenneth Lundin</prepared>
- <responsible>Kenneth Lundin</responsible>
- <docno>1</docno>
- <approved>Kenneth Lundin</approved>
- <checked></checked>
- <date>97-10-04</date>
- <rev>A</rev>
- <file>asn1.sgml</file>
- </header>
- <module>asn1rt</module>
- <modulesummary>ASN.1 runtime support functions</modulesummary>
- <description>
- <warning>
- <p>
- All functions in this module are deprecated and will be
- removed in a future release.
- </p>
- </warning>
- </description>
-
- <funcs>
-
- <func>
- <name>decode(Module,Type,Bytes) -> {ok,Value}|{error,Reason}</name>
- <fsummary>Decodes from Bytes into an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = Reason = term()</v>
- <v>Bytes = binary</v>
- </type>
- <desc>
- <p>Decodes <c>Type</c> from <c>Module</c> from the binary <c>Bytes</c>.
- Returns <c>{ok,Value}</c> if successful.</p>
- <p>Use <c>Module:decode(Type, Bytes)</c> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>encode(Module,Type,Value)-> {ok,Bytes} | {error,Reason}</name>
- <fsummary>Encodes an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = term()</v>
- <v>Bytes = binary</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Encodes <c>Value</c> of <c>Type</c> defined in the <c>ASN.1</c>
- module <c>Module</c>. Returns a binary if successful. To get
- as fast execution as possible, the encode function performs
- only the rudimentary test that input <c>Value</c> is a correct
- instance of <c>Type</c>. For example, the length of strings is
- not always checked.</p>
- <p>Use <c>Module:encode(Type, Value)</c> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>info(Module) -> {ok,Info} | {error,Reason}</name>
- <fsummary>Returns compiler information about the Module.</fsummary>
- <type>
- <v>Module = atom()</v>
- <v>Info = list()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Returns the version of the <c>ASN.1</c> compiler that was
- used to compile the module. It also returns the compiler options
- that were used.</p>
- <p>Use <c>Module:info()</c> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>utf8_binary_to_list(UTF8Binary) -> {ok,UnicodeList} | {error,Reason}</name>
- <fsummary>Transforms an UTF8 encoded binary to a unicode list.</fsummary>
- <type>
- <v>UTF8Binary = binary()</v>
- <v>UnicodeList = [integer()]</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Transforms a UTF8 encoded binary
- to a list of integers, where each integer represents one
- character as its unicode value. The function fails if the binary
- is not a properly encoded UTF8 string.</p>
- <p>Use <seealso marker="stdlib:unicode#characters_to_list-1">unicode:characters_to_list/1</seealso> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>utf8_list_to_binary(UnicodeList) -> {ok,UTF8Binary} | {error,Reason}</name>
- <fsummary>Transforms an unicode list to a UTF8 binary.</fsummary>
- <type>
- <v>UnicodeList = [integer()]</v>
- <v>UTF8Binary = binary()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Transforms a list of integers,
- where each integer represents one character as its unicode
- value, to a UTF8 encoded binary.</p>
- <p>Use <seealso marker="stdlib:unicode#characters_to_binary-1">unicode:characters_to_binary/1</seealso> instead of this function.</p>
- </desc>
- </func>
-
- </funcs>
-
-</erlref>
-
diff --git a/lib/asn1/src/Makefile b/lib/asn1/src/Makefile
index 38cf2d496a..ba459f6cd3 100644
--- a/lib/asn1/src/Makefile
+++ b/lib/asn1/src/Makefile
@@ -68,7 +68,6 @@ CT_MODULES= \
$(EVAL_CT_MODULES)
RT_MODULES= \
- asn1rt \
asn1rt_nif
MODULES= $(CT_MODULES) $(RT_MODULES)
diff --git a/lib/asn1/src/asn1.app.src b/lib/asn1/src/asn1.app.src
index 1f8805ff5e..d2da727193 100644
--- a/lib/asn1/src/asn1.app.src
+++ b/lib/asn1/src/asn1.app.src
@@ -2,7 +2,6 @@
[{description, "The Erlang ASN1 compiler version %VSN%"},
{vsn, "%VSN%"},
{modules, [
- asn1rt,
asn1rt_nif
]},
{registered, [
diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl
index 8783b5418d..4e030861f5 100644
--- a/lib/asn1/src/asn1ct.erl
+++ b/lib/asn1/src/asn1ct.erl
@@ -20,17 +20,12 @@
%%
%%
-module(asn1ct).
--deprecated([decode/3,encode/3]).
--compile([{nowarn_deprecated_function,{asn1rt,decode,3}},
- {nowarn_deprecated_function,{asn1rt,encode,2}},
- {nowarn_deprecated_function,{asn1rt,encode,3}}]).
%% Compile Time functions for ASN.1 (e.g ASN.1 compiler).
%%-compile(export_all).
%% Public exports
-export([compile/1, compile/2]).
--export([encode/2, encode/3, decode/3]).
-export([test/1, test/2, test/3, value/2, value/3]).
%% Application internal exports
-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,
@@ -1271,21 +1266,6 @@ pretty2(Module,AbsFile) ->
start(Includes) when is_list(Includes) ->
asn1_db:dbstart(Includes).
-
-encode(Module,Term) ->
- asn1rt:encode(Module,Term).
-
-encode(Module,Type,Term) when is_list(Module) ->
- asn1rt:encode(list_to_atom(Module),Type,Term);
-encode(Module,Type,Term) ->
- asn1rt:encode(Module,Type,Term).
-
-decode(Module,Type,Bytes) when is_list(Module) ->
- asn1rt:decode(list_to_atom(Module),Type,Bytes);
-decode(Module,Type,Bytes) ->
- asn1rt:decode(Module,Type,Bytes).
-
-
test(Module) -> test_module(Module, []).
test(Module, [] = Options) -> test_module(Module, Options);
@@ -1330,10 +1310,10 @@ test_type(Module, Type) ->
test_value(Module, Type, Value) ->
in_process(fun() ->
- case catch encode(Module, Type, Value) of
+ case catch Module:encode(Type, Value) of
{ok, Bytes} ->
NewBytes = prepare_bytes(Bytes),
- case decode(Module, Type, NewBytes) of
+ case Module:decode(Type, NewBytes) of
{ok, Value} ->
{ok, {Module, Type, Value}};
{ok, Res} ->
diff --git a/lib/asn1/src/asn1ct_value.erl b/lib/asn1/src/asn1ct_value.erl
index 57cd3f8af6..b3d41dd9f3 100644
--- a/lib/asn1/src/asn1ct_value.erl
+++ b/lib/asn1/src/asn1ct_value.erl
@@ -19,7 +19,6 @@
%%
%%
-module(asn1ct_value).
--compile([{nowarn_deprecated_function,{asn1rt,utf8_list_to_binary,1}}]).
%% Generate Erlang values for ASN.1 types.
%% The value is randomized within it's constraints
@@ -292,8 +291,10 @@ from_type_prim(M, D) ->
'BMPString' ->
adjust_list(size_random(C),c_string(C,"BMPString"));
'UTF8String' ->
- {ok,Res}=asn1rt:utf8_list_to_binary(adjust_list(random(50),[$U,$T,$F,$8,$S,$t,$r,$i,$n,$g,16#ffff,16#fffffff,16#ffffff,16#fffff,16#fff])),
- Res;
+ L = adjust_list(random(50),
+ [$U,$T,$F,$8,$S,$t,$r,$i,$n,$g,
+ 16#ffff,16#ffee,16#10ffff,16#ffff,16#fff]),
+ unicode:characters_to_binary(L);
'UniversalString' ->
adjust_list(size_random(C),c_string(C,"UniversalString"));
XX ->
diff --git a/lib/asn1/src/asn1rt.erl b/lib/asn1/src/asn1rt.erl
deleted file mode 100644
index 3e09ce2252..0000000000
--- a/lib/asn1/src/asn1rt.erl
+++ /dev/null
@@ -1,184 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
--module(asn1rt).
--deprecated(module).
-
-%% Runtime functions for ASN.1 (i.e encode, decode)
-
--export([encode/2,encode/3,decode/3,load_driver/0,unload_driver/0,info/1]).
-
--export([utf8_binary_to_list/1,utf8_list_to_binary/1]).
-
-encode(Module,{Type,Term}) ->
- encode(Module,Type,Term).
-
-encode(Module,Type,Term) ->
- case catch apply(Module,encode,[Type,Term]) of
- {'EXIT',undef} ->
- {error,{asn1,{undef,Module,Type}}};
- Result ->
- Result
- end.
-
-decode(Module,Type,Bytes) ->
- case catch apply(Module,decode,[Type,Bytes]) of
- {'EXIT',undef} ->
- {error,{asn1,{undef,Module,Type}}};
- Result ->
- Result
- end.
-
-%% Remove in R16A
-load_driver() ->
- ok.
-
-unload_driver() ->
- ok.
-
-info(Module) ->
- case catch apply(Module,info,[]) of
- {'EXIT',{undef,_Reason}} ->
- {error,{asn1,{undef,Module,info}}};
- Result ->
- {ok,Result}
- end.
-
-%% utf8_binary_to_list/1 transforms a utf8 encoded binary to a list of
-%% unicode elements, where each element is the unicode integer value
-%% of a utf8 character.
-%% Bin is a utf8 encoded value. The return value is either {ok,Val} or
-%% {error,Reason}. Val is a list of integers, where each integer is a
-%% unicode character value.
-utf8_binary_to_list(Bin) when is_binary(Bin) ->
- utf8_binary_to_list(Bin,[]).
-
-utf8_binary_to_list(<<>>,Acc) ->
- {ok,lists:reverse(Acc)};
-utf8_binary_to_list(Bin,Acc) ->
- Len = utf8_binary_len(Bin),
- case catch split_binary(Bin,Len) of
- {CharBin,RestBin} ->
- case utf8_binary_char(CharBin) of
- C when is_integer(C) ->
- utf8_binary_to_list(RestBin,[C|Acc]);
- Err -> Err
- end;
- Err -> {error,{asn1,{bad_encoded_utf8string,Err}}}
- end.
-
-utf8_binary_len(<<0:1,_:7,_/binary>>) ->
- 1;
-utf8_binary_len(<<1:1,1:1,0:1,_:5,_/binary>>) ->
- 2;
-utf8_binary_len(<<1:1,1:1,1:1,0:1,_:4,_/binary>>) ->
- 3;
-utf8_binary_len(<<1:1,1:1,1:1,1:1,0:1,_:3,_/binary>>) ->
- 4;
-utf8_binary_len(<<1:1,1:1,1:1,1:1,1:1,0:1,_:2,_/binary>>) ->
- 5;
-utf8_binary_len(<<1:1,1:1,1:1,1:1,1:1,1:1,0:1,_:1,_/binary>>) ->
- 6;
-utf8_binary_len(Bin) ->
- {error,{asn1,{bad_utf8_length,Bin}}}.
-
-utf8_binary_char(<<0:1,Int:7>>) ->
- Int;
-utf8_binary_char(<<_:2,0:1,Int1:5,1:1,0:1,Int2:6>>) ->
- (Int1 bsl 6) bor Int2;
-utf8_binary_char(<<_:3,0:1,Int1:4,1:1,0:1,Int2:6,1:1,0:1,Int3:6>>) ->
- <<Res:16>> = <<Int1:4,Int2:6,Int3:6>>,
- Res;
-utf8_binary_char(<<_:4,0:1,Int1:3,Rest/binary>>) ->
- <<1:1,0:1,Int2:6,1:1,0:1,Int3:6,1:1,0:1,Int4:6>> = Rest,
- <<Res:24>> = <<0:3,Int1:3,Int2:6,Int3:6,Int4:6>>,
- Res;
-utf8_binary_char(<<_:5,0:1,Int1:2,Rest/binary>>) ->
- <<1:1,0:1,Int2:6,1:1,0:1,Int3:6,1:1,0:1,Int4:6,1:1,0:1,Int5:6>> = Rest,
- <<Res:32>> = <<0:6,Int1:2,Int2:6,Int3:6,Int4:6,Int5:6>>,
- Res;
-utf8_binary_char(<<_:6,0:1,I:1,Rest/binary>>) ->
- <<1:1,0:1,Int2:6,1:1,0:1,Int3:6,1:1,0:1,Int4:6,1:1,0:1,
- Int5:6,1:1,0:1,Int6:6>> = Rest,
- <<Res:32>> = <<0:1,I:1,Int2:6,Int3:6,Int4:6,Int5:6,Int6:6>>,
- Res;
-utf8_binary_char(Err) ->
- {error,{asn1,{bad_utf8_character_encoding,Err}}}.
-
-
-%% macros used for utf8 encoding
--define(bit1to6_into_utf8byte(I),16#80 bor (I band 16#3f)).
--define(bit7to12_into_utf8byte(I),16#80 bor ((I band 16#fc0) bsr 6)).
--define(bit13to18_into_utf8byte(I),16#80 bor ((I band 16#3f000) bsr 12)).
--define(bit19to24_into_utf8byte(I),16#80 bor ((Int band 16#fc0000) bsr 18)).
--define(bit25to30_into_utf8byte(I),16#80 bor ((Int band 16#3f000000) bsr 24)).
-
-%% utf8_list_to_binary/1 transforms a list of integers to a
-%% binary. Each element in the input list has the unicode (integer)
-%% value of an utf8 character.
-%% The return value is either {ok,Bin} or {error,Reason}. The
-%% resulting binary is utf8 encoded.
-utf8_list_to_binary(List) ->
- utf8_list_to_binary(List,[]).
-
-utf8_list_to_binary([],Acc) when is_list(Acc) ->
- {ok,list_to_binary(lists:reverse(Acc))};
-utf8_list_to_binary([],Acc) ->
- {error,{asn1,Acc}};
-utf8_list_to_binary([H|T],Acc) ->
- case catch utf8_encode(H,Acc) of
- NewAcc when is_list(NewAcc) ->
- utf8_list_to_binary(T,NewAcc);
- Err -> Err
- end.
-
-
-utf8_encode(Int,Acc) when Int < 128 ->
- %% range 16#00000000 - 16#0000007f
- %% utf8 encoding: 0xxxxxxx
- [Int|Acc];
-utf8_encode(Int,Acc) when Int < 16#800 ->
- %% range 16#00000080 - 16#000007ff
- %% utf8 encoding: 110xxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),16#c0 bor (Int bsr 6)|Acc];
-utf8_encode(Int,Acc) when Int < 16#10000 ->
- %% range 16#00000800 - 16#0000ffff
- %% utf8 encoding: 1110xxxx 10xxxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- 16#e0 bor ((Int band 16#f000) bsr 12)|Acc];
-utf8_encode(Int,Acc) when Int < 16#200000 ->
- %% range 16#00010000 - 16#001fffff
- %% utf8 encoding: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- ?bit13to18_into_utf8byte(Int),
- 16#f0 bor ((Int band 16#1c0000) bsr 18)|Acc];
-utf8_encode(Int,Acc) when Int < 16#4000000 ->
- %% range 16#00200000 - 16#03ffffff
- %% utf8 encoding: 111110xx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- ?bit13to18_into_utf8byte(Int),?bit19to24_into_utf8byte(Int),
- 16#f8 bor ((Int band 16#3000000) bsr 24)|Acc];
-utf8_encode(Int,Acc) ->
- %% range 16#04000000 - 16#7fffffff
- %% utf8 encoding: 1111110x 10xxxxxx ...(total 6 bytes) 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- ?bit13to18_into_utf8byte(Int),?bit19to24_into_utf8byte(Int),
- ?bit25to30_into_utf8byte(Int),
- 16#fc bor ((Int band 16#40000000) bsr 30)|Acc].
diff --git a/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl b/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl
index 6cf8ecf451..cd6c74b995 100644
--- a/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl
+++ b/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl
@@ -120,10 +120,10 @@ run3(Erule) ->
asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE},
asn1_NOVALUE,asn1_NOVALUE}}}}}}},
io:format("~p:~p~n",[Erule,Val]),
- {ok,List}= asn1rt:encode('EUTRA-RRC-Definitions','DL-DCCH-Message',Val),
+ {ok,List}= 'EUTRA-RRC-Definitions':encode('DL-DCCH-Message',Val),
Enc = iolist_to_binary(List),
io:format("Result from encode:~n~p~n",[Enc]),
- {ok,Val2} = asn1rt:decode('EUTRA-RRC-Definitions','DL-DCCH-Message',Enc),
+ {ok,Val2} = 'EUTRA-RRC-Definitions':decode('DL-DCCH-Message', Enc),
io:format("Result from decode:~n~p~n",[Val2]),
case Val2 of
Val -> ok;
diff --git a/lib/asn1/test/asn1_SUITE_data/testobj.erl b/lib/asn1/test/asn1_SUITE_data/testobj.erl
index a0e00f8314..e547ea4572 100644
--- a/lib/asn1/test/asn1_SUITE_data/testobj.erl
+++ b/lib/asn1/test/asn1_SUITE_data/testobj.erl
@@ -1410,16 +1410,14 @@ int2bin(Int) ->
%%%%%%%%%%%%%%%%% wrappers %%%%%%%%%%%%%%%%%%%%%%%%
wrapper_encode(Module,Type,Value) ->
- case asn1rt:encode(Module,Type,Value) of
- {ok,X} when binary(X) ->
+ case Module:encode(Type, Value) of
+ {ok,X} when is_binary(X) ->
{ok, binary_to_list(X)};
- {ok,X} ->
- {ok, binary_to_list(list_to_binary(X))};
Error ->
Error
end.
wrapper_decode(Module, Type, Bytes) when is_binary(Bytes) ->
- asn1rt:decode(Module, Type, Bytes);
+ Module:decode(Type, Bytes);
wrapper_decode(Module, Type, Bytes) when is_list(Bytes) ->
- asn1rt:decode(Module, Type, list_to_binary(Bytes)).
+ Module:decode(Type, list_to_binary(Bytes)).
diff --git a/lib/asn1/test/testPrimStrings.erl b/lib/asn1/test/testPrimStrings.erl
index cb97655c15..b7f0323301 100644
--- a/lib/asn1/test/testPrimStrings.erl
+++ b/lib/asn1/test/testPrimStrings.erl
@@ -19,8 +19,6 @@
%%
%%
-module(testPrimStrings).
--compile([{nowarn_deprecated_function,{asn1rt,utf8_list_to_binary,1}},
- {nowarn_deprecated_function,{asn1rt,utf8_binary_to_list,1}}]).
-export([bit_string/2]).
-export([octet_string/1]).
@@ -756,19 +754,21 @@ utf8_string(_Rules) ->
16#800,
16#ffff,
16#10000,
- 16#1fffff,
- 16#200000,
- 16#3ffffff,
- 16#4000000,
- 16#7fffffff],
+ 16#1ffff,
+ 16#20000,
+ 16#2ffff,
+ 16#e0000,
+ 16#effff,
+ 16#F0000,
+ 16#10ffff],
[begin
- {ok,UTF8} = asn1rt:utf8_list_to_binary([Char]),
- {ok,[Char]} = asn1rt:utf8_binary_to_list(UTF8),
+ UTF8 = unicode:characters_to_binary([Char]),
+ [Char] = unicode:characters_to_list([UTF8]),
roundtrip('UTF', UTF8)
end || Char <- AllRanges],
- {ok,UTF8} = asn1rt:utf8_list_to_binary(AllRanges),
- {ok,AllRanges} = asn1rt:utf8_binary_to_list(UTF8),
+ UTF8 = unicode:characters_to_binary(AllRanges),
+ AllRanges = unicode:characters_to_list(UTF8),
roundtrip('UTF', UTF8),
ok.
diff --git a/lib/common_test/src/common_test.app.src b/lib/common_test/src/common_test.app.src
index 77588af59b..dfa321c901 100644
--- a/lib/common_test/src/common_test.app.src
+++ b/lib/common_test/src/common_test.app.src
@@ -22,6 +22,7 @@
{vsn, "%VSN%"},
{modules, [ct_cover,
ct,
+ ct_default_gl,
ct_event,
ct_framework,
ct_ftp,
diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile
index b1eddfedd7..2f0fc2e05a 100644
--- a/lib/common_test/test/Makefile
+++ b/lib/common_test/test/Makefile
@@ -70,7 +70,8 @@ MODULES= \
test_server_SUITE \
test_server_test_lib \
ct_release_test_SUITE \
- ct_log_SUITE
+ ct_log_SUITE \
+ ct_SUITE
ERL_FILES= $(MODULES:%=%.erl)
HRL_FILES= test_server_test_lib.hrl
diff --git a/lib/common_test/test/ct_SUITE.erl b/lib/common_test/test/ct_SUITE.erl
new file mode 100644
index 0000000000..eb98c2544f
--- /dev/null
+++ b/lib/common_test/test/ct_SUITE.erl
@@ -0,0 +1,53 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ct_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+suite() ->
+ [{timetrap,{seconds,30}}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+all() ->
+ [app_file, appup_file].
+
+%%%-----------------------------------------------------------------
+%%% Test cases
+
+app_file(_Config) ->
+ ok = test_server:app_test(common_test),
+ ok.
+
+appup_file(_Config) ->
+ ok = test_server:appup_test(common_test).
+
diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl
index 690d0af1bb..bc716fb5e3 100644
--- a/lib/common_test/test/ct_hooks_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE.erl
@@ -70,20 +70,20 @@ suite() ->
all() ->
all(suite).
-all(suite) ->
+all(suite) ->
lists:reverse(
[
one_cth, two_cth, faulty_cth_no_init, faulty_cth_id_no_init,
faulty_cth_exit_in_init, faulty_cth_exit_in_id,
- faulty_cth_exit_in_init_scope_suite, minimal_cth,
- minimal_and_maximal_cth, faulty_cth_undef,
+ faulty_cth_exit_in_init_scope_suite, minimal_cth,
+ minimal_and_maximal_cth, faulty_cth_undef,
scope_per_suite_cth, scope_per_group_cth, scope_suite_cth,
- scope_per_suite_state_cth, scope_per_group_state_cth,
+ scope_per_suite_state_cth, scope_per_group_state_cth,
scope_suite_state_cth,
fail_pre_suite_cth, double_fail_pre_suite_cth,
fail_post_suite_cth, skip_pre_suite_cth, skip_pre_end_cth,
skip_post_suite_cth, recover_post_suite_cth, update_config_cth,
- state_update_cth, options_cth, same_id_cth,
+ state_update_cth, options_cth, same_id_cth,
fail_n_skip_with_minimal_cth, prio_cth, no_config,
data_dir, cth_log
]
@@ -96,10 +96,10 @@ all(suite) ->
%%%-----------------------------------------------------------------
%%%
-one_cth(Config) when is_list(Config) ->
+one_cth(Config) when is_list(Config) ->
do_test(one_empty_cth, "ct_cth_empty_SUITE.erl",[empty_cth], Config).
-two_cth(Config) when is_list(Config) ->
+two_cth(Config) when is_list(Config) ->
do_test(two_empty_cth, "ct_cth_empty_SUITE.erl",[empty_cth,empty_cth],
Config).
@@ -119,13 +119,13 @@ minimal_cth(Config) when is_list(Config) ->
minimal_and_maximal_cth(Config) when is_list(Config) ->
do_test(minimal_and_maximal_cth, "ct_cth_empty_SUITE.erl",
[minimal_cth, empty_cth],Config).
-
+
faulty_cth_undef(Config) when is_list(Config) ->
do_test(faulty_cth_undef, "ct_cth_empty_SUITE.erl",
[undef_cth],Config).
faulty_cth_exit_in_init_scope_suite(Config) when is_list(Config) ->
- do_test(faulty_cth_exit_in_init_scope_suite,
+ do_test(faulty_cth_exit_in_init_scope_suite,
"ct_exit_in_init_scope_suite_cth_SUITE.erl",
[],Config).
@@ -205,7 +205,7 @@ state_update_cth(Config) when is_list(Config) ->
options_cth(Config) when is_list(Config) ->
do_test(options_cth, "ct_cth_empty_SUITE.erl",
[{empty_cth,[test]}],Config).
-
+
same_id_cth(Config) when is_list(Config) ->
do_test(same_id_cth, "ct_cth_empty_SUITE.erl",
[same_id_cth,same_id_cth],Config).
@@ -227,9 +227,10 @@ data_dir(Config) when is_list(Config) ->
do_test(data_dir, "ct_data_dir_SUITE.erl",
[verify_data_dir_cth],Config).
-cth_log(Config) when is_list(Config) ->
+cth_log(Config) when is_list(Config) ->
%% test that cth_log_redirect writes properly to
%% unexpected I/O log
+ ct:timetrap({minutes,10}),
StartOpts = do_test(cth_log, "cth_log_SUITE.erl", [], Config),
Logdir = proplists:get_value(logdir, StartOpts),
UnexpIoLogs =
@@ -266,7 +267,6 @@ do_test(Tag, SWC, CTHs, Config, Res) ->
do_test(Tag, SWC, CTHs, Config, Res, 2).
do_test(Tag, SuiteWildCard, CTHs, Config, Res, EC) ->
-
DataDir = ?config(data_dir, Config),
Suites = filelib:wildcard(
filename:join([DataDir,"cth/tests",SuiteWildCard])),
@@ -275,7 +275,7 @@ do_test(Tag, SuiteWildCard, CTHs, Config, Res, EC) ->
Res = ct_test_support:run(Opts, Config),
Events = ct_test_support:get_events(ERPid, Config),
- ct_test_support:log_events(Tag,
+ ct_test_support:log_events(Tag,
reformat(Events, ?eh),
?config(priv_dir, Config),
Opts),
@@ -328,7 +328,7 @@ test_events(one_empty_cth) ->
{?eh,cth,{empty_cth,pre_end_per_testcase,[test_case,'$proplist',[]]}},
{?eh,cth,{empty_cth,post_end_per_testcase,[test_case,'$proplist','_',[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{empty_cth,pre_end_per_suite,
[ct_cth_empty_SUITE,'$proplist',[]]}},
@@ -360,7 +360,7 @@ test_events(two_empty_cth) ->
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
@@ -402,7 +402,7 @@ test_events(minimal_cth) ->
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,tc_done,{ct_cth_empty_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -426,7 +426,7 @@ test_events(minimal_and_maximal_cth) ->
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_suite,[ct_cth_empty_SUITE,'$proplist','_',[]]}},
@@ -452,11 +452,11 @@ test_events(faulty_cth_undef) ->
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case,
{failed, FailReason}}},
{?eh,cth,{'_',on_tc_skip,'_'}},
-
+
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,end_per_suite,
{failed, FailReason}}},
{?eh,cth,{'_',on_tc_skip,'_'}},
-
+
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -515,7 +515,7 @@ test_events(scope_per_suite_cth) ->
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_per_suite_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_suite_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[ct_scope_per_suite_cth_SUITE,'$proplist',[]]}},
@@ -541,7 +541,7 @@ test_events(scope_suite_cth) ->
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_suite_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_suite_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_scope_suite_cth_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_suite,[ct_scope_suite_cth_SUITE,'$proplist','_',[]]}},
@@ -563,18 +563,18 @@ test_events(scope_per_group_cth) ->
{?eh,cth,{'_',init,['_',[]]}},
{?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,test_case}},
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}}},
{?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[]]}},
{?eh,cth,{'_',terminate,[[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]},ok}}],
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,end_per_suite}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -595,7 +595,7 @@ test_events(scope_per_suite_state_cth) ->
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}},
{?eh,tc_done,{ct_scope_per_suite_state_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_suite_state_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[ct_scope_per_suite_state_cth_SUITE,'$proplist',[test]]}},
@@ -621,7 +621,7 @@ test_events(scope_suite_state_cth) ->
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}},
{?eh,tc_done,{ct_scope_suite_state_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_suite_state_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_scope_suite_state_cth_SUITE,'$proplist',[test]]}},
{?eh,cth,{'_',post_end_per_suite,[ct_scope_suite_state_cth_SUITE,'$proplist','_',[test]]}},
@@ -643,18 +643,18 @@ test_events(scope_per_group_state_cth) ->
{?eh,cth,{'_',init,['_',[test]]}},
{?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[test]]}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,{init_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,test_case}},
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,{end_per_group,group1,[]}}},
{?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[test]]}},
{?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[test]]}},
{?eh,cth,{'_',terminate,[[test]]}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,{end_per_group,group1,[]},ok}}],
-
+
{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,end_per_suite}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -666,7 +666,7 @@ test_events(fail_pre_suite_cth) ->
{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,init_per_suite}},
{?eh,cth,{'_',pre_init_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist',
@@ -676,7 +676,7 @@ test_events(fail_pre_suite_cth) ->
{?eh,cth,{'_',on_tc_fail,
[init_per_suite,{failed,"Test failure"},[]]}},
-
+
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case,
{failed,{ct_cth_empty_SUITE,init_per_suite,
{failed,"Test failure"}}}}},
@@ -685,7 +685,7 @@ test_events(fail_pre_suite_cth) ->
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}},[]]}},
-
+
{?eh,tc_auto_skip, {ct_cth_empty_SUITE, end_per_suite,
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}}},
@@ -694,7 +694,7 @@ test_events(fail_pre_suite_cth) ->
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}},[]]}},
-
+
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth, {'_',terminate,[[]]}},
{?eh,stop_logging,[]}
@@ -733,7 +733,7 @@ test_events(fail_post_suite_cth) ->
{failed,{ct_cth_empty_SUITE,init_per_suite,
{failed,"Test failure"}}}}},
{?eh,cth,{'_',on_tc_skip,[test_case,{tc_auto_skip,'_'},[]]}},
-
+
{?eh,tc_auto_skip, {ct_cth_empty_SUITE, end_per_suite,
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}}},
@@ -758,7 +758,7 @@ test_events(skip_pre_suite_cth) ->
{?eh,tc_user_skip,{ct_cth_empty_SUITE,test_case,"Test skip"}},
{?eh,cth,{'_',on_tc_skip,[test_case,{tc_user_skip,"Test skip"},[]]}},
-
+
{?eh,tc_user_skip, {ct_cth_empty_SUITE, end_per_suite,"Test skip"}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -772,18 +772,18 @@ test_events(skip_pre_end_cth) ->
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,init_per_suite}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,init_per_suite,ok}},
-
+
[{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]}}},
{?eh,cth,{'_',id,[[]]}},
{?eh,cth,{'_',init,['_',[]]}},
{?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,test_case}},
{?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}}},
{?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[]]}},
@@ -808,7 +808,7 @@ test_events(skip_post_suite_cth) ->
{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,init_per_suite}},
{?eh,cth,{'_',pre_init_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist','$proplist',[]]}},
@@ -818,9 +818,9 @@ test_events(skip_post_suite_cth) ->
{?eh,tc_user_skip,{ct_cth_empty_SUITE,test_case,"Test skip"}},
{?eh,cth,{'_',on_tc_skip,[test_case,{tc_user_skip,"Test skip"},[]]}},
-
+
{?eh,tc_user_skip, {ct_cth_empty_SUITE, end_per_suite,"Test skip"}},
-
+
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth,{'_',terminate,[[]]}},
{?eh,stop_logging,[]}
@@ -844,7 +844,7 @@ test_events(recover_post_suite_cth) ->
{?eh,cth,{'_',post_end_per_testcase,
[test_case, contains([tc_status]),'_',[]]}},
{?eh,tc_done,{Suite,test_case,ok}},
-
+
{?eh,tc_start,{Suite,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[Suite,not_contains([tc_status]),[]]}},
@@ -861,7 +861,7 @@ test_events(update_config_cth) ->
{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
-
+
{?eh,tc_start,{ct_update_config_SUITE,init_per_suite}},
{?eh,cth,{'_',pre_init_per_suite,
[ct_update_config_SUITE,contains([]),[]]}},
@@ -941,7 +941,7 @@ test_events(update_config_cth) ->
pre_init_per_suite]),
ok,[]]}},
{?eh,tc_done,{ct_update_config_SUITE,{end_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_update_config_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[ct_update_config_SUITE,contains(
@@ -974,7 +974,7 @@ test_events(state_update_cth) ->
{?eh,cth,{'_',init,['_',[]]}},
{?eh,cth,{'_',init,['_',[]]}},
{?eh,tc_start,{'_',init_per_suite}},
-
+
{?eh,tc_done,{'_',end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth,{'_',terminate,[contains(
@@ -1021,7 +1021,7 @@ test_events(options_cth) ->
{?eh,cth,{empty_cth,pre_init_per_testcase,[test_case,'$proplist',[test]]}},
{?eh,cth,{empty_cth,post_end_per_testcase,[test_case,'$proplist','_',[test]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{empty_cth,pre_end_per_suite,
[ct_cth_empty_SUITE,'$proplist',[test]]}},
@@ -1058,7 +1058,7 @@ test_events(same_id_cth) ->
{negative,
{?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{negative,
@@ -1115,17 +1115,14 @@ test_events(fail_n_skip_with_minimal_cth) ->
];
test_events(prio_cth) ->
-
GenPre = fun(Func,States) ->
- [{?eh,cth,{'_',Func,['_','_',State]}} ||
- State <- States]
+ [{?eh,cth,{'_',Func,['_','_',State]}} || State <- States]
end,
GenPost = fun(Func,States) ->
- [{?eh,cth,{'_',Func,['_','_','_',State]}} ||
- State <- States]
+ [{?eh,cth,{'_',Func,['_','_','_',State]}} || State <- States]
end,
-
+
[{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}] ++
@@ -1136,7 +1133,7 @@ test_events(prio_cth) ->
[[1100,100],[600,200],[600,600],[700],[800],[900],[1000],
[1200,1050],[1100],[1200]]) ++
[{?eh,tc_done,{ct_cth_prio_SUITE,init_per_suite,ok}},
-
+
[{?eh,tc_start,{ct_cth_prio_SUITE,{init_per_group,'_',[]}}}] ++
GenPre(pre_init_per_group,
@@ -1147,7 +1144,7 @@ test_events(prio_cth) ->
[900],[900,900],[500,900],[1000],[1200,1050],
[1100],[1200]]) ++
[{?eh,tc_done,{ct_cth_prio_SUITE,{init_per_group,'_',[]},ok}}] ++
-
+
[{?eh,tc_start,{ct_cth_prio_SUITE,test_case}}] ++
GenPre(pre_init_per_testcase,
[[1100,100],[600,200],[600,600],[600],[700],[800],
@@ -1161,7 +1158,7 @@ test_events(prio_cth) ->
[{?eh,tc_done,{ct_cth_prio_SUITE,test_case,ok}},
{?eh,tc_start,{ct_cth_prio_SUITE,{end_per_group,'_',[]}}}] ++
- GenPre(pre_end_per_group,
+ GenPre(pre_end_per_group,
lists:reverse(
[[1100,100],[600,200],[600,600],[600],[700],[800],
[900],[900,900],[500,900],[1000],[1200,1050],
@@ -1300,7 +1297,7 @@ test_events(cth_log) ->
[{suite,cth_log_SUITE},parallel]}}},
{?eh,tc_done,{ct_framework,{end_per_group,g1,
[{suite,cth_log_SUITE},parallel]},ok}}]},
-
+
{?eh,tc_done,{cth_log_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
@@ -1309,7 +1306,6 @@ test_events(cth_log) ->
test_events(ok) ->
ok.
-
%% test events help functions
contains(List) ->
fun(Proplist) when is_list(Proplist) ->
diff --git a/lib/compiler/src/Makefile b/lib/compiler/src/Makefile
index c37f731d8c..cf60355a40 100644
--- a/lib/compiler/src/Makefile
+++ b/lib/compiler/src/Makefile
@@ -126,7 +126,7 @@ ERL_COMPILE_FLAGS += +native
endif
ERL_COMPILE_FLAGS += +inline +warn_unused_import \
-Werror \
- -I../../stdlib/include -I$(EGEN) -W
+ -I../../stdlib/include -I$(EGEN) -W +warn_missing_spec
# ----------------------------------------------------
# Targets
diff --git a/lib/compiler/src/beam_a.erl b/lib/compiler/src/beam_a.erl
index 91e6d80da3..cdb32d5d55 100644
--- a/lib/compiler/src/beam_a.erl
+++ b/lib/compiler/src/beam_a.erl
@@ -25,6 +25,9 @@
-export([module/2]).
+-spec module(beam_asm:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl
index 9c8ed2277f..a2f5dc674c 100644
--- a/lib/compiler/src/beam_asm.erl
+++ b/lib/compiler/src/beam_asm.erl
@@ -24,9 +24,42 @@
-export([module/4]).
-export([encode/2]).
+-export_type([fail/0,label/0,reg/0,src/0,module_code/0,function_name/0]).
+
-import(lists, [map/2,member/2,keymember/3,duplicate/2,splitwith/2]).
-include("beam_opcodes.hrl").
+%% Common types for describing operands for BEAM instructions.
+-type reg_num() :: 0..1023.
+-type reg() :: {'x',reg_num()} | {'y',reg_num()}.
+-type src() :: reg() |
+ {'literal',term()} |
+ {'atom',atom()} |
+ {'integer',integer()} |
+ 'nil' |
+ {'float',float()}.
+-type label() :: pos_integer().
+-type fail() :: {'f',label() | 0}.
+
+%% asm_instruction() describes only the instructions that
+%% are used in BEAM files (as opposed to internal instructions
+%% used only during optimization).
+
+-type asm_instruction() :: atom() | tuple().
+
+-type function_name() :: atom().
+
+-type exports() :: [{function_name(),arity()}].
+
+-type asm_function() ::
+ {'function',function_name(),arity(),label(),[asm_instruction()]}.
+
+-type module_code() ::
+ {module(),[_],[_],[asm_function()],pos_integer()}.
+
+-spec module(module_code(), exports(), [_], [compile:option()]) ->
+ {'ok',binary()}.
+
module(Code, Abst, SourceFile, Opts) ->
{ok,assemble(Code, Abst, SourceFile, Opts)}.
@@ -439,6 +472,8 @@ encode_alloc_list_1([{floats,Floats}|T], Dict, Acc0) ->
encode_alloc_list_1([], Dict, Acc) ->
{iolist_to_binary(Acc),Dict}.
+-spec encode(non_neg_integer(), pos_integer()) -> iodata().
+
encode(Tag, N) when N < 0 ->
encode1(Tag, negative_to_bytes(N));
encode(Tag, N) when N < 16 ->
diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl
index 6a35191f6e..6543e05e20 100644
--- a/lib/compiler/src/beam_block.erl
+++ b/lib/compiler/src/beam_block.erl
@@ -25,6 +25,9 @@
-export([module/2]).
-import(lists, [reverse/1,reverse/2,foldl/3,member/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_bs.erl b/lib/compiler/src/beam_bs.erl
index 2aed98d4e7..beb055b23d 100644
--- a/lib/compiler/src/beam_bs.erl
+++ b/lib/compiler/src/beam_bs.erl
@@ -25,6 +25,9 @@
-export([module/2]).
-import(lists, [mapfoldl/3,reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc0}, _Opt) ->
{Fs,Lc} = mapfoldl(fun function/2, Lc0, Fs0),
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_bsm.erl b/lib/compiler/src/beam_bsm.erl
index ae1b34ba49..9a4e7fb133 100644
--- a/lib/compiler/src/beam_bsm.erl
+++ b/lib/compiler/src/beam_bsm.erl
@@ -60,19 +60,26 @@
%%% data structures or passed to BIFs.
%%%
+-type label() :: beam_asm:label().
+-type func_info() :: {beam_asm:reg(),boolean()}.
+
-record(btb,
- {f, %Gbtrees for all functions.
- index, %{Label,Code} index (for liveness).
- ok_br, %Labels that are OK.
- must_not_save, %Must not save position when
- % optimizing (reaches
- % bs_context_to_binary).
- must_save %Must save position when optimizing.
+ {f :: gb_trees:tree(label(), func_info()),
+ index :: beam_utils:code_index(), %{Label,Code} index (for liveness).
+ ok_br=gb_sets:empty() :: gb_sets:set(label()), %Labels that are OK.
+ must_not_save=false :: boolean(), %Must not save position when
+ % optimizing (reaches
+ % bs_context_to_binary).
+ must_save=false :: boolean() %Must save position when optimizing.
}).
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, Opts) ->
- D = #btb{f=btb_index(Fs0)},
- Fs = [function(F, D) || F <- Fs0],
+ FIndex = btb_index(Fs0),
+ Fs = [function(F, FIndex) || F <- Fs0],
Code = {Mod,Exp,Attr,Fs,Lc},
case proplists:get_bool(bin_opt_info, Opts) of
true ->
@@ -92,10 +99,10 @@ format_error({no_bin_opt,Reason}) ->
%%% Local functions.
%%%
-function({function,Name,Arity,Entry,Is}, D0) ->
+function({function,Name,Arity,Entry,Is}, FIndex) ->
try
Index = beam_utils:index_labels(Is),
- D = D0#btb{index=Index},
+ D = #btb{f=FIndex,index=Index},
{function,Name,Arity,Entry,btb_opt_1(Is, D, [])}
catch
Class:Error ->
@@ -179,15 +186,14 @@ btb_gen_save(false, _, Acc) -> Acc.
%% a bs_context_to_binary instruction.
%%
-btb_reaches_match(Is, RegList, D0) ->
+btb_reaches_match(Is, RegList, D) ->
try
Regs = btb_regs_from_list(RegList),
- D = D0#btb{ok_br=gb_sets:empty(),must_not_save=false,must_save=false},
#btb{must_not_save=MustNotSave,must_save=MustSave} =
- btb_reaches_match_1(Is, Regs, D),
- case MustNotSave and MustSave of
+ btb_reaches_match_1(Is, Regs, D),
+ case MustNotSave andalso MustSave of
true -> btb_error(must_and_must_not_save);
- _ -> {ok,MustSave}
+ false -> {ok,MustSave}
end
catch
throw:{error,_}=Error -> Error
diff --git a/lib/compiler/src/beam_clean.erl b/lib/compiler/src/beam_clean.erl
index 10805a3c36..b736d39f9c 100644
--- a/lib/compiler/src/beam_clean.erl
+++ b/lib/compiler/src/beam_clean.erl
@@ -26,6 +26,9 @@
-export([clean_labels/1]).
-import(lists, [map/2,foldl/3,reverse/1,filter/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,_}, Opts) ->
Order = [Lbl || {function,_,_,Lbl,_} <- Fs0],
All = foldl(fun({function,_,_,Lbl,_}=Func,D) -> dict:store(Lbl, Func, D) end,
@@ -39,6 +42,10 @@ module({Mod,Exp,Attr,Fs0,_}, Opts) ->
{ok,{Mod,Exp,Attr,Fs,Lc}}.
%% Remove all bs_save2/2 instructions not referenced by a bs_restore2/2.
+
+-spec bs_clean_saves([beam_utils:instruction()]) ->
+ [beam_utils:instruction()].
+
bs_clean_saves(Is) ->
Needed = bs_restores(Is, []),
bs_clean_saves_1(Is, gb_sets:from_list(Needed), []).
@@ -98,13 +105,18 @@ add_to_work_list(F, {Fs,Used}=Sets) ->
%%% want to see the expanded code in a .S file.
%%%
--record(st, {lmap, %Translation tables for labels.
- entry, %Number of entry label.
- lc %Label counter
+-type label() :: beam_asm:label().
+
+-record(st, {lmap :: [{label(),label()}], %Translation tables for labels.
+ entry :: beam_asm:label(), %Number of entry label.
+ lc :: non_neg_integer() %Label counter
}).
+-spec clean_labels([beam_utils:instruction()]) ->
+ {[beam_utils:instruction()],pos_integer()}.
+
clean_labels(Fs0) ->
- St0 = #st{lmap=[],lc=1},
+ St0 = #st{lmap=[],entry=1,lc=1},
{Fs1,#st{lmap=Lmap0,lc=Lc}} = function_renumber(Fs0, St0, []),
Lmap = gb_trees:from_orddict(ordsets:from_list(Lmap0)),
Fs = function_replace(Fs1, Lmap, []),
diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl
index 9087586b58..d379fdc4eb 100644
--- a/lib/compiler/src/beam_dead.erl
+++ b/lib/compiler/src/beam_dead.erl
@@ -29,6 +29,10 @@
-import(lists, [mapfoldl/3,reverse/1]).
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,_}, _Opts) ->
{Fs1,Lc1} = beam_clean:clean_labels(Fs0),
{Fs,Lc} = mapfoldl(fun function/2, Lc1, Fs1),
diff --git a/lib/compiler/src/beam_dict.erl b/lib/compiler/src/beam_dict.erl
index 9565ab74c4..719d799fd7 100644
--- a/lib/compiler/src/beam_dict.erl
+++ b/lib/compiler/src/beam_dict.erl
@@ -28,7 +28,7 @@
string_table/1,lambda_table/1,literal_table/1,
line_table/1]).
--type label() :: non_neg_integer().
+-type label() :: beam_asm:label().
-type index() :: non_neg_integer().
@@ -38,13 +38,16 @@
-type line_tab() :: #{{Fname :: index(), Line :: term()} => index()}.
-type literal_tab() :: dict:dict(Literal :: term(), index()).
+-type lambda_info() :: {label(),{index(),label(),non_neg_integer()}}.
+-type lambda_tab() :: {non_neg_integer(),[lambda_info()]}.
+
-record(asm,
{atoms = #{} :: atom_tab(),
exports = [] :: [{label(), arity(), label()}],
locals = [] :: [{label(), arity(), label()}],
imports = gb_trees:empty() :: import_tab(),
strings = <<>> :: binary(), %String pool
- lambdas = {0,[]}, %[{...}]
+ lambdas = {0,[]} :: lambda_tab(),
literals = dict:new() :: literal_tab(),
fnames = #{} :: fname_tab(),
lines = #{} :: line_tab(),
@@ -148,10 +151,7 @@ string(Str, Dict) when is_list(Str) ->
lambda(Lbl, NumFree, #asm{lambdas={OldIndex,Lambdas0}}=Dict) ->
%% Set Index the same as OldIndex.
Index = OldIndex,
- %% Initialize OldUniq to 0. It will be set to an unique value
- %% based on the MD5 checksum of the BEAM code for the module.
- OldUniq = 0,
- Lambdas = [{Lbl,{OldIndex,Lbl,Index,NumFree,OldUniq}}|Lambdas0],
+ Lambdas = [{Lbl,{Index,Lbl,NumFree}}|Lambdas0],
{OldIndex,Dict#asm{lambdas={OldIndex+1,Lambdas}}}.
%% Returns the index for a literal (adding it to the literal table if necessary).
@@ -185,6 +185,9 @@ line([{location,Name,Line}], #asm{lines=Lines,num_lines=N}=Dict0) ->
{Index, Dict1#asm{lines=Lines#{Key=>Index},num_lines=N+1}}
end.
+-spec fname(nonempty_string(), bdict()) ->
+ {non_neg_integer(), bdict()}.
+
fname(Name, #asm{fnames=Fnames}=Dict) ->
case Fnames of
#{Name := Index} -> {Index,Dict};
@@ -239,8 +242,11 @@ lambda_table(#asm{locals=Loc0,lambdas={NumLambdas,Lambdas0}}) ->
Lambdas1 = sofs:relation(Lambdas0),
Loc = sofs:relation([{Lbl,{F,A}} || {F,A,Lbl} <- Loc0]),
Lambdas2 = sofs:relative_product1(Lambdas1, Loc),
+ %% Initialize OldUniq to 0. It will be set to an unique value
+ %% based on the MD5 checksum of the BEAM code for the module.
+ OldUniq = 0,
Lambdas = [<<F:32,A:32,Lbl:32,Index:32,NumFree:32,OldUniq:32>> ||
- {{_,Lbl,Index,NumFree,OldUniq},{F,A}} <- sofs:to_external(Lambdas2)],
+ {{Index,Lbl,NumFree},{F,A}} <- sofs:to_external(Lambdas2)],
{NumLambdas,Lambdas}.
%% Returns the literal table.
diff --git a/lib/compiler/src/beam_except.erl b/lib/compiler/src/beam_except.erl
index 4a181c1923..9801c68ee2 100644
--- a/lib/compiler/src/beam_except.erl
+++ b/lib/compiler/src/beam_except.erl
@@ -33,6 +33,9 @@
-import(lists, [reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
@@ -49,9 +52,9 @@ function({function,Name,Arity,CLabel,Is0}) ->
end.
-record(st,
- {lbl, %func_info label
- loc, %location for func_info
- arity %arity for function
+ {lbl :: beam_asm:label(), %func_info label
+ loc :: [_], %location for func_info
+ arity :: arity() %arity for function
}).
function_1(Is0) ->
diff --git a/lib/compiler/src/beam_flatten.erl b/lib/compiler/src/beam_flatten.erl
index c9ff07b496..a4d45a4ca6 100644
--- a/lib/compiler/src/beam_flatten.erl
+++ b/lib/compiler/src/beam_flatten.erl
@@ -25,6 +25,9 @@
-import(lists, [reverse/1,reverse/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs,Lc}, _Opt) ->
{ok,{Mod,Exp,Attr,[function(F) || F <- Fs],Lc}}.
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index e096270d8c..4365451356 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -130,6 +130,11 @@
-import(lists, [reverse/1,reverse/2,foldl/3]).
+-type instruction() :: beam_utils:instruction().
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
@@ -269,9 +274,9 @@ extract_seq_1(_, _) -> no.
-record(st,
{
- entry, %Entry label (must not be moved).
- mlbl, %Moved labels.
- labels :: cerl_sets:set() %Set of referenced labels.
+ entry :: beam_asm:label(), %Entry label (must not be moved).
+ mlbl :: #{beam_asm:label() := [beam_asm:label()]}, %Moved labels.
+ labels :: cerl_sets:set() %Set of referenced labels.
}).
opt(Is0, CLabel) ->
@@ -453,6 +458,8 @@ is_label_used(L, St) ->
%% is_unreachable_after(Instruction) -> boolean()
%% Test whether the code after Instruction is unreachable.
+-spec is_unreachable_after(instruction()) -> boolean().
+
is_unreachable_after({func_info,_M,_F,_A}) -> true;
is_unreachable_after(return) -> true;
is_unreachable_after({jump,_Lbl}) -> true;
@@ -465,6 +472,8 @@ is_unreachable_after(I) -> is_exit_instruction(I).
%% Test whether the instruction Instruction always
%% causes an exit/failure.
+-spec is_exit_instruction(instruction()) -> boolean().
+
is_exit_instruction({call_ext,_,{extfunc,M,F,A}}) ->
erl_bifs:is_exit_bif(M, F, A);
is_exit_instruction(if_end) -> true;
@@ -477,6 +486,8 @@ is_exit_instruction(_) -> false.
%% Remove all unused labels. Also remove unreachable
%% instructions following labels that are removed.
+-spec remove_unused_labels([instruction()]) -> [instruction()].
+
remove_unused_labels(Is) ->
Used0 = initial_labels(Is),
Used = foldl(fun ulbl/2, Used0, Is),
diff --git a/lib/compiler/src/beam_listing.erl b/lib/compiler/src/beam_listing.erl
index d82ed8639d..94b47cf568 100644
--- a/lib/compiler/src/beam_listing.erl
+++ b/lib/compiler/src/beam_listing.erl
@@ -21,14 +21,24 @@
-export([module/2]).
+-include("core_parse.hrl").
+-include("v3_kernel.hrl").
-include("v3_life.hrl").
-import(lists, [foreach/2]).
-module(File, Core) when element(1, Core) == c_module ->
+-type code() :: cerl:c_module()
+ | beam_utils:module_code()
+ | #k_mdef{}
+ | {module(),_,_,_} %v3_life
+ | [_]. %form-based format
+
+-spec module(file:io_device(), code()) -> 'ok'.
+
+module(File, #c_module{}=Core) ->
%% This is a core module.
io:put_chars(File, core_pp:format(Core));
-module(File, Kern) when element(1, Kern) == k_mdef ->
+module(File, #k_mdef{}=Kern) ->
%% This is a kernel module.
io:put_chars(File, v3_kernel_pp:format(Kern));
%%io:put_chars(File, io_lib:format("~p~n", [Kern]));
diff --git a/lib/compiler/src/beam_peep.erl b/lib/compiler/src/beam_peep.erl
index c8bef31824..6df5c02334 100644
--- a/lib/compiler/src/beam_peep.erl
+++ b/lib/compiler/src/beam_peep.erl
@@ -24,6 +24,9 @@
-import(lists, [reverse/1,member/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,_}, _Opts) ->
%% First coalesce adjacent labels.
{Fs1,Lc} = beam_clean:clean_labels(Fs0),
diff --git a/lib/compiler/src/beam_receive.erl b/lib/compiler/src/beam_receive.erl
index 89cafe27ce..1403e1e05e 100644
--- a/lib/compiler/src/beam_receive.erl
+++ b/lib/compiler/src/beam_receive.erl
@@ -65,6 +65,9 @@
%%% as the SomeUniqInteger.
%%%
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [function(F) || F <- Fs0],
Code = {Mod,Exp,Attr,Fs,Lc},
diff --git a/lib/compiler/src/beam_reorder.erl b/lib/compiler/src/beam_reorder.erl
index 6a7c033ec6..910b7f6b0a 100644
--- a/lib/compiler/src/beam_reorder.erl
+++ b/lib/compiler/src/beam_reorder.erl
@@ -23,6 +23,9 @@
-export([module/2]).
-import(lists, [member/2,reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_split.erl b/lib/compiler/src/beam_split.erl
index feeab0af50..d041f18806 100644
--- a/lib/compiler/src/beam_split.erl
+++ b/lib/compiler/src/beam_split.erl
@@ -23,6 +23,9 @@
-import(lists, [reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [split_blocks(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_trim.erl b/lib/compiler/src/beam_trim.erl
index d40669083e..4da0985085 100644
--- a/lib/compiler/src/beam_trim.erl
+++ b/lib/compiler/src/beam_trim.erl
@@ -24,10 +24,13 @@
-import(lists, [reverse/1,reverse/2,splitwith/2,sort/1]).
-record(st,
- {safe, %Safe labels.
- lbl %Code at each label.
+ {safe :: gb_sets:set(beam_asm:label()), %Safe labels.
+ lbl :: beam_utils:code_index() %Code at each label.
}).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index 9866dcd070..050c599d6b 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -26,6 +26,9 @@
-import(lists, [filter/2,foldl/3,keyfind/3,member/2,
reverse/1,reverse/2,sort/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index ffeff9ea81..cc6e54ca16 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -28,11 +28,31 @@
live_opt/1,delete_live_annos/1,combine_heap_needs/2,
split_even/1]).
+-export_type([code_index/0,module_code/0,instruction/0]).
+
-import(lists, [member/2,sort/1,reverse/1,splitwith/2]).
+%% instruction() describes all instructions that are used during optimzation
+%% (from beam_a to beam_z).
+-type instruction() :: atom() | tuple().
+
+-type code_index() :: gb_trees:tree(beam_asm:label(), [instruction()]).
+
+-type int_function() :: {'function',beam_asm:function_name(),arity(),
+ beam_asm:label(),[instruction()]}.
+
+-type module_code() ::
+ {module(),[_],[_],[int_function()],pos_integer()}.
+
+%% Internal types.
+-type fail() :: beam_asm:fail() | 'fail'.
+-type test() :: {'test',atom(),fail(),[beam_asm:src()]} |
+ {'test',atom(),fail(),integer(),list(),beam_asm:reg()}.
+-type result_cache() :: gb_trees:tree(beam_asm:label(), 'killed' | 'used').
+
-record(live,
- {lbl, %Label to code index.
- res}). %Result cache for each label.
+ {lbl :: code_index(), %Label to code index.
+ res :: result_cache()}). %Result cache for each label.
%% is_killed_block(Register, [Instruction]) -> true|false
@@ -44,6 +64,8 @@
%% i.e. it is OK to enter the instruction sequence with Register
%% containing garbage.
+-spec is_killed_block(beam_asm:reg(), [instruction()]) -> boolean().
+
is_killed_block({x,X}, [{set,_,_,{alloc,Live,_}}|_]) ->
X >= Live;
is_killed_block(R, [{set,Ds,Ss,_Op}|Is]) ->
@@ -65,6 +87,8 @@ is_killed_block(_, []) -> false.
%% The state (constructed by index_instructions/1) is used to allow us
%% to determine the kill state across branches.
+-spec is_killed(beam_asm:reg(), [instruction()], code_index()) -> boolean().
+
is_killed(R, Is, D) ->
St = #live{lbl=D,res=gb_trees:empty()},
case check_liveness(R, Is, St) of
@@ -75,6 +99,8 @@ is_killed(R, Is, D) ->
%% is_killed_at(Reg, Lbl, State) -> true|false
%% Determine whether Reg is killed at label Lbl.
+-spec is_killed_at(beam_asm:reg(), beam_asm:label(), code_index()) -> boolean().
+
is_killed_at(R, Lbl, D) when is_integer(Lbl) ->
St0 = #live{lbl=D,res=gb_trees:empty()},
case check_liveness_at(R, Lbl, St0) of
@@ -89,6 +115,8 @@ is_killed_at(R, Lbl, D) when is_integer(Lbl) ->
%% The state is used to allow us to determine the usage state
%% across branches.
+-spec is_not_used(beam_asm:reg(), [instruction()], code_index()) -> boolean().
+
is_not_used(R, Is, D) ->
St = #live{lbl=D,res=gb_trees:empty()},
case check_liveness(R, Is, St) of
@@ -100,18 +128,25 @@ is_not_used(R, Is, D) ->
%% Index the instruction sequence so that we can quickly
%% look up the instruction following a specific label.
+-spec index_labels([instruction()]) -> code_index().
+
index_labels(Is) ->
index_labels_1(Is, []).
%% empty_label_index() -> State
%% Create an empty label index.
+-spec empty_label_index() -> code_index().
+
empty_label_index() ->
gb_trees:empty().
%% index_label(Label, [Instruction], State) -> State
%% Add an index for a label.
+-spec index_label(beam_asm:label(), [instruction()], code_index()) ->
+ code_index().
+
index_label(Lbl, Is0, Acc) ->
Is = drop_labels(Is0),
gb_trees:enter(Lbl, Is, Acc).
@@ -120,12 +155,16 @@ index_label(Lbl, Is0, Acc) ->
%% code_at(Label, State) -> [I].
%% Retrieve the code at the given label.
+-spec code_at(beam_asm:label(), code_index()) -> [instruction()].
+
code_at(L, Ll) ->
gb_trees:get(L, Ll).
%% bif_to_test(Bif, [Op], Fail) -> {test,Test,Fail,[Op]}
%% Convert a BIF to a test. Fail if not possible.
+-spec bif_to_test(atom(), list(), fail()) -> test().
+
bif_to_test(is_atom, [_]=Ops, Fail) -> {test,is_atom,Fail,Ops};
bif_to_test(is_boolean, [_]=Ops, Fail) -> {test,is_boolean,Fail,Ops};
bif_to_test(is_binary, [_]=Ops, Fail) -> {test,is_binary,Fail,Ops};
@@ -158,6 +197,9 @@ bif_to_test(is_record, [_,_,_]=Ops, Fail) -> {test,is_record,Fail,Ops}.
%% Return 'true' if the test instruction does not modify any
%% registers and/or bit syntax matching state.
%%
+
+-spec is_pure_test(test()) -> boolean().
+
is_pure_test({test,is_eq,_,[_,_]}) -> true;
is_pure_test({test,is_ne,_,[_,_]}) -> true;
is_pure_test({test,is_eq_exact,_,[_,_]}) -> true;
@@ -180,7 +222,9 @@ is_pure_test({test,Op,_,Ops}) ->
%% whose destination is a register that will not be used.
%% Also insert {'%live',Live,Regs} annotations at the beginning
%% and end of each block.
-%%
+
+-spec live_opt([instruction()]) -> [instruction()].
+
live_opt(Is0) ->
{[{label,Fail}|_]=Bef,[Fi|Is]} =
splitwith(fun({func_info,_,_,_}) -> false;
@@ -193,7 +237,9 @@ live_opt(Is0) ->
%% delete_live_annos([Instruction]) -> [Instruction].
%% Delete all live annotations.
-%%
+
+-spec delete_live_annos([instruction()]) -> [instruction()].
+
delete_live_annos([{block,Bl0}|Is]) ->
case delete_live_annos(Bl0) of
[] -> delete_live_annos(Is);
@@ -208,6 +254,8 @@ delete_live_annos([]) -> [].
%% combine_heap_needs(HeapNeed1, HeapNeed2) -> HeapNeed
%% Combine the heap need for two allocation instructions.
+-spec combine_heap_needs(term(), term()) -> term().
+
combine_heap_needs({alloc,Alloc1}, {alloc,Alloc2}) ->
{alloc,combine_alloc_lists(Alloc1, Alloc2)};
combine_heap_needs({alloc,Alloc}, Words) when is_integer(Words) ->
@@ -220,6 +268,8 @@ combine_heap_needs(H1, H2) when is_integer(H1), is_integer(H2) ->
%% split_even/1
%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]}
+-spec split_even(list()) -> {list(),list()}.
+
split_even(Rs) -> split_even(Rs, [], []).
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 5659077c5d..bf33ae0aeb 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -32,6 +32,10 @@
-import(lists, [reverse/1,foldl/3,foreach/2,dropwhile/2]).
%% To be called by the compiler.
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) ->
case validate(Mod, Fs) of
diff --git a/lib/compiler/src/beam_z.erl b/lib/compiler/src/beam_z.erl
index 6c7f8543c2..787e33c142 100644
--- a/lib/compiler/src/beam_z.erl
+++ b/lib/compiler/src/beam_z.erl
@@ -26,6 +26,9 @@
-import(lists, [dropwhile/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_asm:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl
index b1be6ffc6d..6b936a7687 100644
--- a/lib/compiler/src/cerl.erl
+++ b/lib/compiler/src/cerl.erl
@@ -1584,6 +1584,8 @@ ann_make_list(_, [], Node) ->
%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
%% map constructor, otherwise <code>false</code>.
+-type map_op() :: #c_literal{val::'assoc'} | #c_literal{val::'exact'}.
+
-spec is_c_map(cerl()) -> boolean().
is_c_map(#c_map{}) ->
@@ -1679,8 +1681,16 @@ update_c_map(#c_map{is_pat=true}=Old, M, Es) ->
update_c_map(#c_map{is_pat=false}=Old, M, Es) ->
ann_c_map(get_ann(Old), M, Es).
+-spec map_pair_key(c_map_pair()) -> cerl().
+
map_pair_key(#c_map_pair{key=K}) -> K.
+
+-spec map_pair_val(c_map_pair()) -> cerl().
+
map_pair_val(#c_map_pair{val=V}) -> V.
+
+-spec map_pair_op(c_map_pair()) -> map_op().
+
map_pair_op(#c_map_pair{op=Op}) -> Op.
-spec c_map_pair(cerl(), cerl()) -> c_map_pair().
@@ -1699,6 +1709,8 @@ c_map_pair_exact(Key,Val) ->
ann_c_map_pair(As,Op,K,V) ->
#c_map_pair{op=Op, key = K, val=V, anno = As}.
+-spec update_c_map_pair(c_map_pair(), map_op(), cerl(), cerl()) -> c_map_pair().
+
update_c_map_pair(Old,Op,K,V) ->
#c_map_pair{op=Op, key=K, val=V, anno = get_ann(Old)}.
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 8608d2daac..069add7890 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -147,8 +147,8 @@ env_compiler_options() -> env_default_opts().
%% Local functions
%%
--define(pass(P), {P,fun P/1}).
--define(pass(P,T), {P,fun T/1,fun P/1}).
+-define(pass(P), {P,fun P/2}).
+-define(pass(P,T), {P,fun T/1,fun P/2}).
env_default_opts() ->
Key = "ERL_COMPILER_OPTIONS",
@@ -173,17 +173,25 @@ env_default_opts() ->
do_compile(Input, Opts0) ->
Opts = expand_opts(Opts0),
- {Pid,Ref} =
- spawn_monitor(fun() ->
- exit(try
- internal(Input, Opts)
- catch
- error:Reason ->
- {error,Reason}
- end)
- end),
- receive
- {'DOWN',Ref,process,Pid,Rep} -> Rep
+ IntFun = fun() -> try
+ internal(Input, Opts)
+ catch
+ error:Reason ->
+ {error,Reason}
+ end
+ end,
+ %% Dialyzer has already spawned workers.
+ case lists:member(dialyzer, Opts) of
+ true ->
+ IntFun();
+ false ->
+ {Pid,Ref} =
+ spawn_monitor(fun() ->
+ exit(IntFun())
+ end),
+ receive
+ {'DOWN',Ref,process,Pid,Rep} -> Rep
+ end
end.
expand_opts(Opts0) ->
@@ -220,6 +228,8 @@ expand_opt(O, Os) -> [O|Os].
%% format_error(ErrorDescriptor) -> string()
+-spec format_error(term()) -> iolist().
+
format_error(no_native_support) ->
"this system is not configured for native-code compilation.";
format_error(no_crypto) ->
@@ -280,34 +290,35 @@ format_error_reason({Reason, Stack}) when is_list(Stack) ->
format_error_reason(Reason) ->
io_lib:format("~tp", [Reason]).
+-type err_warn_info() :: tuple().
+
%% The compile state record.
-record(compile, {filename="" :: file:filename(),
dir="" :: file:filename(),
base="" :: file:filename(),
ifile="" :: file:filename(),
ofile="" :: file:filename(),
- module=[],
- code=[],
- core_code=[],
- abstract_code=[], %Abstract code for debugger.
- options=[] :: [option()], %Options for compilation
+ module=[] :: module() | [],
+ core_code=[] :: cerl:c_module() | [],
+ abstract_code=[] :: binary() | [], %Abstract code for debugger.
+ options=[] :: [option()], %Options for compilation
mod_options=[] :: [option()], %Options for module_info
encoding=none :: none | epp:source_encoding(),
- errors=[],
- warnings=[]}).
+ errors=[] :: [err_warn_info()],
+ warnings=[] :: [err_warn_info()]}).
internal({forms,Forms}, Opts0) ->
{_,Ps} = passes(forms, Opts0),
Source = proplists:get_value(source, Opts0, ""),
Opts1 = proplists:delete(source, Opts0),
- Compile = #compile{code=Forms,options=Opts1,mod_options=Opts1},
- internal_comp(Ps, Source, "", Compile);
+ Compile = #compile{options=Opts1,mod_options=Opts1},
+ internal_comp(Ps, Forms, Source, "", Compile);
internal({file,File}, Opts) ->
{Ext,Ps} = passes(file, Opts),
Compile = #compile{options=Opts,mod_options=Opts},
- internal_comp(Ps, File, Ext, Compile).
+ internal_comp(Ps, none, File, Ext, Compile).
-internal_comp(Passes, File, Suffix, St0) ->
+internal_comp(Passes, Code0, File, Suffix, St0) ->
Dir = filename:dirname(File),
Base = filename:basename(File, Suffix),
St1 = St0#compile{filename=File, dir=Dir, base=Base,
@@ -317,36 +328,41 @@ internal_comp(Passes, File, Suffix, St0) ->
Run0 = case member(time, Opts) of
true ->
io:format("Compiling ~tp\n", [File]),
- fun run_tc/2;
- false -> fun({_Name,Fun}, St) -> catch Fun(St) end
+ fun run_tc/3;
+ false ->
+ fun({_Name,Fun}, Code, St) ->
+ catch Fun(Code, St)
+ end
end,
Run = case keyfind(eprof, 1, Opts) of
{eprof,EprofPass} ->
- fun(P, St) ->
- run_eprof(P, EprofPass, St)
+ fun(P, Code, St) ->
+ run_eprof(P, Code, EprofPass, St)
end;
false ->
Run0
end,
- case fold_comp(Passes, Run, St1) of
- {ok,St2} -> comp_ret_ok(St2);
+ case fold_comp(Passes, Run, Code0, St1) of
+ {ok,Code,St2} -> comp_ret_ok(Code, St2);
{error,St2} -> comp_ret_err(St2)
end.
-fold_comp([{delay,Ps0}|Passes], Run, #compile{options=Opts}=St) ->
+fold_comp([{delay,Ps0}|Passes], Run, Code, #compile{options=Opts}=St) ->
Ps = select_passes(Ps0, Opts) ++ Passes,
- fold_comp(Ps, Run, St);
-fold_comp([{Name,Test,Pass}|Ps], Run, St) ->
+ fold_comp(Ps, Run, Code, St);
+fold_comp([{Name,Test,Pass}|Ps], Run, Code, St) ->
case Test(St) of
false -> %Pass is not needed.
- fold_comp(Ps, Run, St);
+ fold_comp(Ps, Run, Code, St);
true -> %Run pass in the usual way.
- fold_comp([{Name,Pass}|Ps], Run, St)
+ fold_comp([{Name,Pass}|Ps], Run, Code, St)
end;
-fold_comp([{Name,Pass}|Ps], Run, St0) ->
- case Run({Name,Pass}, St0) of
- {ok,St1} -> fold_comp(Ps, Run, St1);
- {error,_St1} = Error -> Error;
+fold_comp([{Name,Pass}|Ps], Run, Code0, St0) ->
+ case Run({Name,Pass}, Code0, St0) of
+ {ok,Code,St1} ->
+ fold_comp(Ps, Run, Code, St1);
+ {error,_St1}=Error ->
+ Error;
{'EXIT',Reason} ->
Es = [{St0#compile.ifile,[{none,?MODULE,{crash,Name,Reason}}]}],
{error,St0#compile{errors=St0#compile.errors ++ Es}};
@@ -354,11 +370,11 @@ fold_comp([{Name,Pass}|Ps], Run, St0) ->
Es = [{St0#compile.ifile,[{none,?MODULE,{bad_return,Name,Other}}]}],
{error,St0#compile{errors=St0#compile.errors ++ Es}}
end;
-fold_comp([], _Run, St) -> {ok,St}.
+fold_comp([], _Run, Code, St) -> {ok,Code,St}.
-run_tc({Name,Fun}, St) ->
+run_tc({Name,Fun}, Code, St) ->
T1 = erlang:monotonic_time(),
- Val = (catch Fun(St)),
+ Val = (catch Fun(Code, St)),
T2 = erlang:monotonic_time(),
Elapsed = erlang:convert_time_unit(T2 - T1, native, millisecond),
Mem0 = erts_debug:flat_size(Val)*erlang:system_info(wordsize),
@@ -367,17 +383,17 @@ run_tc({Name,Fun}, St) ->
[Name,Elapsed/1000,Mem]),
Val.
-run_eprof({Name,Fun}, Name, St) ->
+run_eprof({Name,Fun}, Code, Name, St) ->
io:format("~p: Running eprof\n", [Name]),
c:appcall(tools, eprof, start_profiling, [[self()]]),
- Val = (catch Fun(St)),
+ Val = (catch Fun(Code, St)),
c:appcall(tools, eprof, stop_profiling, []),
c:appcall(tools, eprof, analyze, []),
Val;
-run_eprof({_,Fun}, _, St) ->
- catch Fun(St).
+run_eprof({_,Fun}, Code, _, St) ->
+ catch Fun(Code, St).
-comp_ret_ok(#compile{code=Code,warnings=Warn0,module=Mod,options=Opts}=St) ->
+comp_ret_ok(Code, #compile{warnings=Warn0,module=Mod,options=Opts}=St) ->
case werror(St) of
true ->
case member(report_warnings, Opts) of
@@ -532,21 +548,21 @@ pass(_) -> none.
%%
select_passes([{pass,Mod}|Ps], Opts) ->
- F = fun(St) ->
- case catch Mod:module(St#compile.code, St#compile.options) of
+ F = fun(Code0, St) ->
+ case catch Mod:module(Code0, St#compile.options) of
{ok,Code} ->
- {ok,St#compile{code=Code}};
+ {ok,Code,St};
{ok,Code,Ws} ->
- {ok,St#compile{code=Code,warnings=St#compile.warnings++Ws}};
+ {ok,Code,St#compile{warnings=St#compile.warnings++Ws}};
{error,Es} ->
{error,St#compile{errors=St#compile.errors ++ Es}}
end
end,
[{Mod,F}|select_passes(Ps, Opts)];
select_passes([{src_listing,Ext}|_], _Opts) ->
- [{listing,fun (St) -> src_listing(Ext, St) end}];
+ [{listing,fun (Code, St) -> src_listing(Ext, Code, St) end}];
select_passes([{listing,Ext}|_], _Opts) ->
- [{listing,fun (St) -> listing(Ext, St) end}];
+ [{listing,fun (Code, St) -> listing(Ext, Code, St) end}];
select_passes([done|_], _Opts) ->
[];
select_passes([{done,Ext}|_], Opts) ->
@@ -662,14 +678,14 @@ core_passes() ->
[{iff,clint0,?pass(core_lint_module)},
{delay,
[{unless,no_copt,
- [{core_old_inliner,fun test_old_inliner/1,fun core_old_inliner/1},
+ [{core_old_inliner,fun test_old_inliner/1,fun core_old_inliner/2},
{iff,doldinline,{listing,"oldinline"}},
{pass,sys_core_fold},
{iff,dcorefold,{listing,"corefold"}},
- {core_inline_module,fun test_core_inliner/1,fun core_inline_module/1},
+ {core_inline_module,fun test_core_inliner/1,fun core_inline_module/2},
{iff,dinline,{listing,"inline"}},
{core_fold_after_inlining,fun test_any_inliner/1,
- fun core_fold_module_after_inlining/1},
+ fun core_fold_module_after_inlining/2},
?pass(core_transforms)]},
{iff,dcopt,{listing,"copt"}},
{iff,'to_core',{done,"core"}}]}
@@ -741,7 +757,7 @@ asm_passes() ->
| binary_passes()].
binary_passes() ->
- [{native_compile,fun test_native/1,fun native_compile/1},
+ [{native_compile,fun test_native/1,fun native_compile/2},
{unless,binary,?pass(save_binary,not_werror)}].
%%%
@@ -749,9 +765,9 @@ binary_passes() ->
%%%
%% Remove the target file so we don't have an old one if the compilation fail.
-remove_file(St) ->
+remove_file(Code, St) ->
_ = file:delete(St#compile.ofile),
- {ok,St}.
+ {ok,Code,St}.
-record(asm_module, {module,
exports,
@@ -799,28 +815,28 @@ collect_asm([{attributes, Attr} | Rest], R) ->
collect_asm([X | Rest], R) ->
collect_asm(Rest, R#asm_module{code=R#asm_module.code++[X]}).
-beam_consult_asm(St) ->
+beam_consult_asm(_Code, St) ->
case file:consult(St#compile.ifile) of
- {ok, Forms0} ->
+ {ok,Forms0} ->
Encoding = epp:read_encoding(St#compile.ifile),
- {Module, Forms} = preprocess_asm_forms(Forms0),
- {ok,St#compile{module=Module, code=Forms, encoding=Encoding}};
+ {Module,Forms} = preprocess_asm_forms(Forms0),
+ {ok,Forms,St#compile{module=Module,encoding=Encoding}};
{error,E} ->
Es = [{St#compile.ifile,[{none,?MODULE,{open,E}}]}],
{error,St#compile{errors=St#compile.errors ++ Es}}
end.
-read_beam_file(St) ->
+read_beam_file(_Code, St) ->
case file:read_file(St#compile.ifile) of
{ok,Beam} ->
Infile = St#compile.ifile,
case no_native_compilation(Infile, St) of
true ->
- {ok,St#compile{module=none,code=none}};
+ {ok,none,St#compile{module=none}};
false ->
Mod0 = filename:rootname(filename:basename(Infile)),
Mod = list_to_atom(Mod0),
- {ok,St#compile{module=Mod,code=Beam,ofile=Infile}}
+ {ok,Beam,St#compile{module=Mod,ofile=Infile}}
end;
{error,E} ->
Es = [{St#compile.ifile,[{none,?MODULE,{open,E}}]}],
@@ -839,17 +855,17 @@ no_native_compilation(BeamFile, #compile{options=Opts0}) ->
_ -> false
end.
-parse_module(St0) ->
+parse_module(_Code, St0) ->
case do_parse_module(utf8, St0) of
- {ok,_}=Ret ->
+ {ok,_,_}=Ret ->
Ret;
{error,_}=Ret ->
Ret;
{invalid_unicode,File,Line} ->
case do_parse_module(latin1, St0) of
- {ok,St} ->
+ {ok,Code,St} ->
Es = [{File,[{Line,?MODULE,reparsing_invalid_unicode}]}],
- {ok,St#compile{warnings=Es++St#compile.warnings}};
+ {ok,Code,St#compile{warnings=Es++St#compile.warnings}};
{error,St} ->
Es = [{File,[{Line,?MODULE,reparsing_invalid_unicode}]}],
{error,St#compile{errors=Es++St#compile.errors}}
@@ -867,13 +883,13 @@ do_parse_module(DefEncoding, #compile{ifile=File,options=Opts,dir=Dir}=St) ->
Encoding = proplists:get_value(encoding, Extra),
case find_invalid_unicode(Forms, File) of
none ->
- {ok,St#compile{code=Forms,encoding=Encoding}};
+ {ok,Forms,St#compile{encoding=Encoding}};
{invalid_unicode,_,_}=Ret ->
case Encoding of
none ->
Ret;
_ ->
- {ok,St#compile{code=Forms,encoding=Encoding}}
+ {ok,Forms,St#compile{encoding=Encoding}}
end
end;
{error,E} ->
@@ -892,7 +908,7 @@ find_invalid_unicode([H|T], File0) ->
end;
find_invalid_unicode([], _) -> none.
-parse_core(St) ->
+parse_core(_Code, St) ->
case file:read_file(St#compile.ifile) of
{ok,Bin} ->
case core_scan:string(binary_to_list(Bin)) of
@@ -900,7 +916,7 @@ parse_core(St) ->
case core_parse:parse(Toks) of
{ok,Mod} ->
Name = (Mod#c_module.name)#c_literal.val,
- {ok,St#compile{module=Name,code=Mod}};
+ {ok,Mod,St#compile{module=Name}};
{error,E} ->
Es = [{St#compile.ifile,[E]}],
{error,St#compile{errors=St#compile.errors ++ Es}}
@@ -937,31 +953,36 @@ clean_parse_transforms_1([], Acc) -> reverse(Acc).
transforms(Os) -> [ M || {parse_transform,M} <- Os ].
-transform_module(#compile{options=Opt,code=Code0}=St0) ->
+transform_module(Code0, #compile{options=Opt}=St) ->
%% Extract compile options from code into options field.
case transforms(Opt ++ compile_options(Code0)) of
- [] -> {ok,St0}; %No parse transforms.
+ [] ->
+ %% No parse transforms.
+ {ok,Code0,St};
Ts ->
%% Remove parse_transform attributes from the abstract code to
%% prevent parse transforms to be run more than once.
Code = clean_parse_transforms(Code0),
- St = St0#compile{code=Code},
- foldl_transform(St, Ts)
+ foldl_transform(Ts, Code, St)
end.
-foldl_transform(St, [T|Ts]) ->
+foldl_transform([T|Ts], Code0, St) ->
Name = "transform " ++ atom_to_list(T),
case code:ensure_loaded(T) =:= {module,T} andalso
- erlang:function_exported(T, parse_transform, 2) of
+ erlang:function_exported(T, parse_transform, 2) of
true ->
- Fun = fun(S) ->
- T:parse_transform(S#compile.code, S#compile.options)
+ Fun = fun(Code, S) ->
+ T:parse_transform(Code, S#compile.options)
end,
Run = case member(time, St#compile.options) of
- true -> fun run_tc/2;
- false -> fun({_Name,F}, S) -> catch F(S) end
+ true ->
+ fun run_tc/3;
+ false ->
+ fun({_Name,F}, Code, S) ->
+ catch F(Code, S)
+ end
end,
- case Run({Name, Fun}, St) of
+ case Run({Name, Fun}, Code0, St) of
{error,Es,Ws} ->
{error,St#compile{warnings=St#compile.warnings ++ Ws,
errors=St#compile.errors ++ Es}};
@@ -970,41 +991,44 @@ foldl_transform(St, [T|Ts]) ->
{parse_transform,T,R}}]}],
{error,St#compile{errors=St#compile.errors ++ Es}};
{warning, Forms, Ws} ->
- foldl_transform(
- St#compile{code=Forms,
- warnings=St#compile.warnings ++ Ws}, Ts);
+ foldl_transform(Ts, Forms,
+ St#compile{warnings=St#compile.warnings ++ Ws});
Forms ->
- foldl_transform(St#compile{code=Forms}, Ts)
+ foldl_transform(Ts, Forms, St)
end;
false ->
Es = [{St#compile.ifile,[{none,compile,
{undef_parse_transform,T}}]}],
{error,St#compile{errors=St#compile.errors ++ Es}}
end;
-foldl_transform(St, []) -> {ok,St}.
+foldl_transform([], Code, St) -> {ok,Code,St}.
get_core_transforms(Opts) -> [M || {core_transform,M} <- Opts].
-core_transforms(St) ->
+core_transforms(Code, St) ->
%% The options field holds the complete list of options at this
Ts = get_core_transforms(St#compile.options),
- foldl_core_transforms(St, Ts).
+ foldl_core_transforms(Ts, Code, St).
-foldl_core_transforms(St, [T|Ts]) ->
+foldl_core_transforms([T|Ts], Code0, St) ->
Name = "core transform " ++ atom_to_list(T),
- Fun = fun(S) -> T:core_transform(S#compile.code, S#compile.options) end,
+ Fun = fun(Code, S) -> T:core_transform(Code, S#compile.options) end,
Run = case member(time, St#compile.options) of
- true -> fun run_tc/2;
- false -> fun({_Name,F}, S) -> catch F(S) end
+ true ->
+ fun run_tc/3;
+ false ->
+ fun({_Name,F}, Code, S) ->
+ catch F(Code, S)
+ end
end,
- case Run({Name, Fun}, St) of
+ case Run({Name, Fun}, Code0, St) of
{'EXIT',R} ->
Es = [{St#compile.ifile,[{none,compile,{core_transform,T,R}}]}],
{error,St#compile{errors=St#compile.errors ++ Es}};
Forms ->
- foldl_core_transforms(St#compile{code=Forms}, Ts)
+ foldl_core_transforms(Ts, Forms, St)
end;
-foldl_core_transforms(St, []) -> {ok,St}.
+foldl_core_transforms([], Code, St) -> {ok,Code,St}.
%%% Fetches the module name from a list of forms. The module attribute must
%%% be present.
@@ -1025,31 +1049,28 @@ add_default_base(St, Forms) ->
St
end.
-lint_module(St) ->
- case erl_lint:module(St#compile.code,
- St#compile.ifile, St#compile.options) of
+lint_module(Code, St) ->
+ case erl_lint:module(Code, St#compile.ifile, St#compile.options) of
{ok,Ws} ->
%% Insert name of module as base name, if needed. This is
%% for compile:forms to work with listing files.
- St1 = add_default_base(St, St#compile.code),
- {ok,St1#compile{warnings=St1#compile.warnings ++ Ws}};
+ St1 = add_default_base(St, Code),
+ {ok,Code,St1#compile{warnings=St1#compile.warnings ++ Ws}};
{error,Es,Ws} ->
{error,St#compile{warnings=St#compile.warnings ++ Ws,
errors=St#compile.errors ++ Es}}
end.
-core_lint_module(St) ->
- case core_lint:module(St#compile.code, St#compile.options) of
+core_lint_module(Code, St) ->
+ case core_lint:module(Code, St#compile.options) of
{ok,Ws} ->
- {ok,St#compile{warnings=St#compile.warnings ++ Ws}};
+ {ok,Code,St#compile{warnings=St#compile.warnings ++ Ws}};
{error,Es,Ws} ->
{error,St#compile{warnings=St#compile.warnings ++ Ws,
errors=St#compile.errors ++ Es}}
end.
-makedep(#compile{code=Code,options=Opts}=St) ->
- Ifile = St#compile.ifile,
- Ofile = St#compile.ofile,
+makedep(Code0, #compile{ifile=Ifile,ofile=Ofile,options=Opts}=St) ->
%% Get the target of the Makefile rule.
Target0 =
@@ -1081,7 +1102,7 @@ makedep(#compile{code=Code,options=Opts}=St) ->
%% List the dependencies (includes) for this target.
{MainRule,PhonyRules} = makedep_add_headers(
Ifile, % The input file name.
- Code, % The parsed source.
+ Code0, % The parsed source.
[], % The list of dependencies already added.
length(Target), % The current line length.
Target, % The target.
@@ -1101,7 +1122,8 @@ makedep(#compile{code=Code,options=Opts}=St) ->
true -> MainRule ++ PhonyRules;
_ -> MainRule
end,
- {ok,St#compile{code=iolist_to_binary([Makefile,"\n"])}}.
+ Code = iolist_to_binary([Makefile,"\n"]),
+ {ok,Code,St}.
makedep_add_headers(Ifile, [{attribute,_,file,{File,_}}|Rest],
Included, LineLen, MainTarget, Phony, Opts) ->
@@ -1166,7 +1188,7 @@ makedep_add_header(Ifile, Included, LineLen, MainTarget, Phony, File) ->
end
end.
-makedep_output(#compile{code=Code,options=Opts,ofile=Ofile}=St) ->
+makedep_output(Code, #compile{options=Opts,ofile=Ofile}=St) ->
%% Write this Makefile (Code) to the selected output.
%% If no output is specified, the default is to write to a file named after
%% the output file.
@@ -1208,7 +1230,7 @@ makedep_output(#compile{code=Code,options=Opts,ofile=Ofile}=St) ->
CloseOutput -> ok = file:close(Output1);
true -> ok
end,
- {ok,St}
+ {ok,Code,St}
catch
error:_ ->
%% Couldn't write to output Makefile.
@@ -1225,33 +1247,33 @@ makedep_output(#compile{code=Code,options=Opts,ofile=Ofile}=St) ->
{error,St#compile{errors=St#compile.errors++[Err]}}
end.
-expand_records(#compile{code=Code0,options=Opts}=St0) ->
+expand_records(Code0, #compile{options=Opts}=St) ->
Code = erl_expand_records:module(Code0, Opts),
- {ok,St0#compile{code=Code}}.
+ {ok,Code,St}.
-core(#compile{code=Forms,options=Opts0}=St) ->
+core(Forms, #compile{options=Opts0}=St) ->
Opts1 = lists:flatten([C || {attribute,_,compile,C} <- Forms] ++ Opts0),
Opts = expand_opts(Opts1),
{ok,Core,Ws} = v3_core:module(Forms, Opts),
Mod = cerl:concrete(cerl:module_name(Core)),
- {ok,St#compile{module=Mod,code=Core,options=Opts,
- warnings=St#compile.warnings++Ws}}.
+ {ok,Core,St#compile{module=Mod,options=Opts,
+ warnings=St#compile.warnings++Ws}}.
-core_fold_module_after_inlining(#compile{code=Code0,options=Opts}=St) ->
+core_fold_module_after_inlining(Code0, #compile{options=Opts}=St) ->
%% Inlining may produce code that generates spurious warnings.
%% Ignore all warnings.
{ok,Code,_Ws} = sys_core_fold:module(Code0, Opts),
- {ok,St#compile{code=Code}}.
+ {ok,Code,St}.
-v3_kernel(#compile{code=Code0,options=Opts,warnings=Ws0}=St) ->
+v3_kernel(Code0, #compile{options=Opts,warnings=Ws0}=St) ->
{ok,Code,Ws} = v3_kernel:module(Code0, Opts),
case Ws =:= [] orelse test_core_inliner(St) of
false ->
- {ok,St#compile{code=Code,warnings=Ws0++Ws}};
+ {ok,Code,St#compile{warnings=Ws0++Ws}};
true ->
%% cerl_inline may produce code that generates spurious
%% warnings. Ignore any such warnings.
- {ok,St#compile{code=Code}}
+ {ok,Code,St}
end.
test_old_inliner(#compile{options=Opts}) ->
@@ -1275,23 +1297,23 @@ test_core_inliner(#compile{options=Opts}) ->
test_any_inliner(St) ->
test_old_inliner(St) orelse test_core_inliner(St).
-core_old_inliner(#compile{code=Code0,options=Opts}=St) ->
+core_old_inliner(Code0, #compile{options=Opts}=St) ->
{ok,Code} = sys_core_inline:module(Code0, Opts),
- {ok,St#compile{code=Code}}.
+ {ok,Code,St}.
-core_inline_module(#compile{code=Code0,options=Opts}=St) ->
+core_inline_module(Code0, #compile{options=Opts}=St) ->
Code = cerl_inline:core_transform(Code0, Opts),
- {ok,St#compile{code=Code}}.
+ {ok,Code,St}.
-save_abstract_code(#compile{ifile=File}=St) ->
- case abstract_code(St) of
- {ok,Code} ->
- {ok,St#compile{abstract_code=Code}};
+save_abstract_code(Code, #compile{ifile=File}=St) ->
+ case abstract_code(Code, St) of
+ {ok,Abstr} ->
+ {ok,Code,St#compile{abstract_code=Abstr}};
{error,Es} ->
{error,St#compile{errors=St#compile.errors ++ [{File,Es}]}}
end.
-abstract_code(#compile{code=Code0,options=Opts,ofile=OFile}) ->
+abstract_code(Code0, #compile{options=Opts,ofile=OFile}) ->
Code = erl_parse:anno_to_term(Code0),
Abstr = erlang:term_to_binary({raw_abstract_v1,Code}, [compressed]),
case member(encrypt_debug_info, Opts) of
@@ -1313,7 +1335,7 @@ abstract_code(#compile{code=Code0,options=Opts,ofile=OFile}) ->
end
end;
false ->
- {ok, Abstr}
+ {ok,Abstr}
end.
encrypt_abs_code(Abstr, Key0) ->
@@ -1351,18 +1373,17 @@ encrypt({des3_cbc=Type,Key,IVec,BlockSize}, Bin0) ->
TypeString = atom_to_list(Type),
list_to_binary([0,length(TypeString),TypeString,Bin]).
-save_core_code(St) ->
- {ok,St#compile{core_code=cerl:from_records(St#compile.code)}}.
+save_core_code(Code, St) ->
+ {ok,Code,St#compile{core_code=cerl:from_records(Code)}}.
-beam_asm(#compile{ifile=File,code=Code0,
- abstract_code=Abst,mod_options=Opts0}=St) ->
+beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,mod_options=Opts0}=St) ->
Source = paranoid_absname(File),
Opts1 = lists:map(fun({debug_info_key,_}) -> {debug_info_key,'********'};
(Other) -> Other
end, Opts0),
Opts2 = [O || O <- Opts1, effects_code_generation(O)],
case beam_asm:module(Code0, Abst, Source, Opts2) of
- {ok,Code} -> {ok,St#compile{code=Code,abstract_code=[]}}
+ {ok,Code} -> {ok,Code,St#compile{abstract_code=[]}}
end.
paranoid_absname(""=File) ->
@@ -1386,17 +1407,17 @@ is_native_enabled([no_native|_]) -> false;
is_native_enabled([_|Opts]) -> is_native_enabled(Opts);
is_native_enabled([]) -> false.
-native_compile(#compile{code=none}=St) -> {ok,St};
-native_compile(St) ->
+native_compile(none, St) -> {ok,none,St};
+native_compile(Code, St) ->
case erlang:system_info(hipe_architecture) of
undefined ->
Ws = [{St#compile.ifile,[{none,compile,no_native_support}]}],
- {ok,St#compile{warnings=St#compile.warnings ++ Ws}};
+ {ok,Code,St#compile{warnings=St#compile.warnings ++ Ws}};
_ ->
- native_compile_1(St)
+ native_compile_1(Code, St)
end.
-native_compile_1(St) ->
+native_compile_1(Code, St) ->
Opts0 = St#compile.options,
IgnoreErrors = member(ignore_native_errors, Opts0),
Opts = case keyfind(hipe, 1, Opts0) of
@@ -1406,10 +1427,10 @@ native_compile_1(St) ->
end,
try hipe:compile(St#compile.module,
St#compile.core_code,
- St#compile.code,
+ Code,
Opts) of
{ok,{_Type,Bin}=T} when is_binary(Bin) ->
- {ok,embed_native_code(St, T)};
+ {ok,embed_native_code(Code, T),St};
{error,R} ->
case IgnoreErrors of
true ->
@@ -1432,13 +1453,13 @@ native_compile_1(St) ->
end
end.
-embed_native_code(St, {Architecture,NativeCode}) ->
- {ok, _, Chunks0} = beam_lib:all_chunks(St#compile.code),
+embed_native_code(Code, {Architecture,NativeCode}) ->
+ {ok, _, Chunks0} = beam_lib:all_chunks(Code),
ChunkName = hipe_unified_loader:chunk_name(Architecture),
Chunks1 = lists:keydelete(ChunkName, 1, Chunks0),
Chunks = Chunks1 ++ [{ChunkName,NativeCode}],
- {ok, BeamPlusNative} = beam_lib:build_module(Chunks),
- St#compile{code=BeamPlusNative}.
+ {ok,BeamPlusNative} = beam_lib:build_module(Chunks),
+ BeamPlusNative.
%% effects_code_generation(Option) -> true|false.
%% Determine whether the option could have any effect on the
@@ -1458,18 +1479,17 @@ effects_code_generation(Option) ->
_ -> true
end.
-save_binary(#compile{code=none}=St) -> {ok,St};
-save_binary(#compile{module=Mod,ofile=Outfile,
- options=Opts}=St) ->
+save_binary(none, St) -> {ok,none,St};
+save_binary(Code, #compile{module=Mod,ofile=Outfile,options=Opts}=St) ->
%% Test that the module name and output file name match.
case member(no_error_module_mismatch, Opts) of
true ->
- save_binary_1(St);
+ save_binary_1(Code, St);
false ->
Base = filename:rootname(filename:basename(Outfile)),
case atom_to_list(Mod) of
Base ->
- save_binary_1(St);
+ save_binary_1(Code, St);
_ ->
Es = [{St#compile.ofile,
[{none,?MODULE,{module_name,Mod,Base}}]}],
@@ -1477,14 +1497,14 @@ save_binary(#compile{module=Mod,ofile=Outfile,
end
end.
-save_binary_1(St) ->
+save_binary_1(Code, St) ->
Ofile = St#compile.ofile,
Tfile = tmpfile(Ofile), %Temp working file
- case write_binary(Tfile, St#compile.code, St) of
+ case write_binary(Tfile, Code, St) of
ok ->
case file:rename(Tfile, Ofile) of
ok ->
- {ok,St};
+ {ok,none,St};
{error,RenameError} ->
Es0 = [{Ofile,[{none,?MODULE,{rename,Tfile,Ofile,
RenameError}}]}],
@@ -1584,6 +1604,9 @@ list_errors(_F, []) -> ok.
%% tmpfile(ObjFile) -> TmpFile
%% Work out the correct input and output file names.
+-spec iofile(atom() | file:filename_all()) ->
+ {file:name_all(),file:name_all()}.
+
iofile(File) when is_atom(File) ->
iofile(atom_to_list(File));
iofile(File) ->
@@ -1624,29 +1647,29 @@ pre_defs([]) -> [].
inc_paths(Opts) ->
[ P || {i,P} <- Opts, is_list(P) ].
-src_listing(Ext, St) ->
+src_listing(Ext, Code, St) ->
listing(fun (Lf, {_Mod,_Exp,Fs}) -> do_src_listing(Lf, Fs);
(Lf, Fs) -> do_src_listing(Lf, Fs) end,
- Ext, St).
+ Ext, Code, St).
do_src_listing(Lf, Fs) ->
Opts = [lists:keyfind(encoding, 1, io:getopts(Lf))],
foreach(fun (F) -> io:put_chars(Lf, [erl_pp:form(F, Opts),"\n"]) end,
Fs).
-listing(Ext, St0) ->
+listing(Ext, Code, St0) ->
St = St0#compile{encoding = none},
- listing(fun(Lf, Fs) -> beam_listing:module(Lf, Fs) end, Ext, St).
+ listing(fun(Lf, Fs) -> beam_listing:module(Lf, Fs) end, Ext, Code, St).
-listing(LFun, Ext, St) ->
+listing(LFun, Ext, Code, St) ->
Lfile = outfile(St#compile.base, Ext, St#compile.options),
case file:open(Lfile, [write,delayed_write]) of
{ok,Lf} ->
- Code = restore_expanded_types(Ext, St#compile.code),
+ Code = restore_expanded_types(Ext, Code),
output_encoding(Lf, St),
LFun(Lf, Code),
ok = file:close(Lf),
- {ok,St};
+ {ok,Code,St};
{error,Error} ->
Es = [{Lfile,[{none,compile,{write_error,Error}}]}],
{error,St#compile{errors=St#compile.errors ++ Es}}
@@ -1718,6 +1741,8 @@ help(_) ->
%% compile(AbsFileName, Outfilename, Options)
%% Compile entry point for erl_compile.
+-spec compile(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile(File0, _OutFile, Options) ->
pre_load(),
File = shorten_filename(File0),
@@ -1726,6 +1751,8 @@ compile(File0, _OutFile, Options) ->
Other -> Other
end.
+-spec compile_beam(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile_beam(File0, _OutFile, Opts) ->
File = shorten_filename(File0),
case file(File, [from_beam|make_erl_options(Opts)]) of
@@ -1733,6 +1760,8 @@ compile_beam(File0, _OutFile, Opts) ->
Other -> Other
end.
+-spec compile_asm(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile_asm(File0, _OutFile, Opts) ->
File = shorten_filename(File0),
case file(File, [from_asm|make_erl_options(Opts)]) of
@@ -1740,6 +1769,8 @@ compile_asm(File0, _OutFile, Opts) ->
Other -> Other
end.
+-spec compile_core(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile_core(File0, _OutFile, Opts) ->
File = shorten_filename(File0),
case file(File, [from_core|make_erl_options(Opts)]) of
diff --git a/lib/compiler/src/core_scan.erl b/lib/compiler/src/core_scan.erl
index 11b52f6c5f..15bfc78c8b 100644
--- a/lib/compiler/src/core_scan.erl
+++ b/lib/compiler/src/core_scan.erl
@@ -49,13 +49,37 @@
-import(lists, [reverse/1]).
+-type location() :: integer().
+-type category() :: atom().
+-type symbol() :: atom() | float() | integer() | string().
+-type token() :: {category(), Anno :: location(), symbol()}
+ | {category(), Anno :: location()}.
+-type tokens() :: [token()].
+-type error_description() :: term().
+-type error_info() :: {erl_anno:location(), module(), error_description()}.
+
%% string([Char]) ->
%% string([Char], StartPos) ->
%% {ok, [Tok], EndPos} |
%% {error, {Pos,core_scan,What}, EndPos}
+-spec string(String) -> Return when
+ String :: string(),
+ Return :: {'ok', Tokens :: tokens(), EndLocation}
+ | {'error', ErrorInfo :: error_info(), ErrorLocation},
+ EndLocation :: location(),
+ ErrorLocation :: location().
+
string(Cs) -> string(Cs, 1).
+-spec string(String, StartLocation) -> Return when
+ String :: string(),
+ Return :: {'ok', Tokens :: tokens(), EndLocation}
+ | {'error', ErrorInfo :: error_info(), ErrorLocation},
+ StartLocation :: location(),
+ EndLocation :: location(),
+ ErrorLocation :: location().
+
string(Cs, Sp) ->
%% Add an 'eof' to always get correct handling.
case string_pre_scan(Cs, [], Sp) of
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 50d28c0a5f..3673a339f6 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -1893,10 +1893,10 @@ case_opt_arg_1(E0, Cs0, LitExpr) ->
true ->
E = case_opt_compiler_generated(E0),
Cs = case_opt_nomatch(E, Cs0, LitExpr),
- case cerl:data_type(E) of
- {atomic,_} ->
+ case cerl:is_literal(E) of
+ true ->
case_opt_lit(E, Cs);
- _ ->
+ false ->
case_opt_data(E, Cs)
end
end.
diff --git a/lib/compiler/src/sys_pre_attributes.erl b/lib/compiler/src/sys_pre_attributes.erl
index bc93c85989..67adae5acf 100644
--- a/lib/compiler/src/sys_pre_attributes.erl
+++ b/lib/compiler/src/sys_pre_attributes.erl
@@ -25,10 +25,10 @@
-define(OPTION_TAG, attributes).
--record(state, {forms,
- pre_ops = [],
- post_ops = [],
- options}).
+-record(state, {forms :: [form()],
+ pre_ops = [] :: [op()],
+ post_ops = [] :: [op()],
+ options :: [option()]}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Inserts, deletes and replaces Erlang compiler attributes.
@@ -59,9 +59,23 @@
%% due to that the pre_transform pass did not find the attribute plus
%% all insert operations.
+-type attribute() :: atom().
+-type value() :: term().
+-type form() :: {function, integer(), atom(), arity(), _}
+ | {attribute, integer(), attribute(), _}.
+-type option() :: compile:option()
+ | {'attribute', 'insert', attribute(), value()}
+ | {'attribute', 'replace', attribute(), value()}
+ | {'attribute', 'delete', attribute()}.
+-type op() :: {'insert', attribute(), value()}
+ | {'replace', attribute(), value()}
+ | {'delete', attribute()}.
+
+-spec parse_transform([form()], [option()]) -> [form()].
+
parse_transform(Forms, Options) ->
S = #state{forms = Forms, options = Options},
- S2 = init_transform(S),
+ S2 = init_transform(Options, S),
report_verbose("Pre options: ~p~n", [S2#state.pre_ops], S2),
report_verbose("Post options: ~p~n", [S2#state.post_ops], S2),
S3 = pre_transform(S2),
@@ -71,13 +85,6 @@ parse_transform(Forms, Options) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Computes the lists of pre_ops and post_ops that are
%% used in the real transformation.
-init_transform(S) ->
- case S#state.options of
- Options when is_list(Options) ->
- init_transform(Options, S);
- Option ->
- init_transform([Option], S)
- end.
init_transform([{attribute, insert, Name, Val} | Tail], S) ->
Op = {insert, Name, Val},
@@ -92,12 +99,9 @@ init_transform([{attribute, delete, Name} | Tail], S) ->
Op = {delete, Name},
PreOps = [Op | S#state.pre_ops],
init_transform(Tail, S#state{pre_ops = PreOps});
-init_transform([], S) ->
- S;
init_transform([_ | T], S) ->
init_transform(T, S);
-init_transform(BadOpt, S) ->
- report_error("Illegal option (ignored): ~p~n", [BadOpt], S),
+init_transform([], S) ->
S.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -176,18 +180,9 @@ attrs([], _, _) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Report functions.
%%
-%% Errors messages are controlled with the 'report_errors' compiler option
%% Warning messages are controlled with the 'report_warnings' compiler option
%% Verbose messages are controlled with the 'verbose' compiler option
-report_error(Format, Args, S) ->
- case is_error(S) of
- true ->
- io:format("~p: * ERROR * " ++ Format, [?MODULE | Args]);
- false ->
- ok
- end.
-
report_warning(Format, Args, S) ->
case is_warning(S) of
true ->
@@ -204,9 +199,6 @@ report_verbose(Format, Args, S) ->
ok
end.
-is_error(S) ->
- lists:member(report_errors, S#state.options) or is_verbose(S).
-
is_warning(S) ->
lists:member(report_warnings, S#state.options) or is_verbose(S).
diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl
index 3627cdb7cd..47c1567f10 100644
--- a/lib/compiler/src/v3_codegen.erl
+++ b/lib/compiler/src/v3_codegen.erl
@@ -69,6 +69,10 @@
stk=[], %Stack table
res=[]}). %Reserved regs: [{reserved,I,V}]
+-type life_module() :: {module(),_,_,[_]}.
+
+-spec module(life_module(), [compile:option()]) -> {'ok',beam_asm:module_code()}.
+
module({Mod,Exp,Attr,Forms}, _Options) ->
{Fs,St} = functions(Forms, {atom,Mod}),
{ok,{Mod,Exp,Attr,Fs,St#cg.lcount}}.
diff --git a/lib/compiler/src/v3_kernel_pp.erl b/lib/compiler/src/v3_kernel_pp.erl
index d5f6ee19c9..187e69a22c 100644
--- a/lib/compiler/src/v3_kernel_pp.erl
+++ b/lib/compiler/src/v3_kernel_pp.erl
@@ -47,7 +47,7 @@
canno(Cthing) -> element(2, Cthing).
--spec format(cerl:cerl()) -> iolist().
+-spec format(#k_mdef{}) -> iolist().
format(Node) -> format(Node, #ctxt{}).
diff --git a/lib/compiler/src/v3_life.erl b/lib/compiler/src/v3_life.erl
index 0f2aeda87f..be3ade47ff 100644
--- a/lib/compiler/src/v3_life.erl
+++ b/lib/compiler/src/v3_life.erl
@@ -52,10 +52,15 @@
-include("v3_kernel.hrl").
-include("v3_life.hrl").
+-type fa() :: {atom(),arity()}.
+
%% These are not defined in v3_kernel.hrl.
get_kanno(Kthing) -> element(2, Kthing).
%%set_kanno(Kthing, Anno) -> setelement(2, Kthing, Anno).
+-spec module(#k_mdef{}, [compile:option()]) ->
+ {'ok',{module(),[fa()],[_],[_]}}.
+
module(#k_mdef{name=M,exports=Es,attributes=As,body=Fs0}, _Opts) ->
Fs1 = functions(Fs0, []),
{ok,{M,Es,As,Fs1}}.
@@ -416,6 +421,10 @@ add_var(V, F, L, Vdb) ->
vdb_new(Vs) ->
sort([{V,0,0} || {var,V} <- Vs]).
+-type var() :: atom().
+
+-spec vdb_find(var(), [vdb_entry()]) -> 'error' | vdb_entry().
+
vdb_find(V, Vdb) ->
case lists:keyfind(V, 1, Vdb) of
false -> error;
diff --git a/lib/compiler/src/v3_life.hrl b/lib/compiler/src/v3_life.hrl
index 9d03a86ccd..5c76312067 100644
--- a/lib/compiler/src/v3_life.hrl
+++ b/lib/compiler/src/v3_life.hrl
@@ -20,8 +20,10 @@
%% This record contains variable life-time annotation for a
%% kernel expression. Added by v3_life, used by v3_codegen.
+-type vdb_entry() :: {atom(),non_neg_integer(),non_neg_integer()}.
+
-record(l, {ke, %Kernel expression
- i=0, %Op number
- vdb=[], %Variable database
- a}). %Core annotation
+ i=0 :: non_neg_integer(), %Op number
+ vdb=[] :: [vdb_entry()], %Variable database
+ a=[] :: [term()]}). %Core annotation
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index 3cb49433ce..adb96fb87d 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -19,7 +19,7 @@
%%
-module(lc_SUITE).
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+-export([all/0, suite/0, groups/0, init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
basic/1,deeply_nested/1,no_generator/1,
@@ -32,11 +32,11 @@ suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,1}}].
-all() ->
+all() ->
test_lib:recompile(?MODULE),
[{group,p}].
-groups() ->
+groups() ->
[{p,test_lib:parallel(),
[basic,
deeply_nested,
@@ -214,6 +214,7 @@ shadow(Config) when is_list(Config) ->
ok.
effect(Config) when is_list(Config) ->
+ ct:timetrap({minutes,10}),
[{42,{a,b,c}}] =
do_effect(fun(F, L) ->
[F({V1,V2}) ||
@@ -240,7 +241,7 @@ do_effect(Lc, L) ->
lists:reverse(erase(?MODULE)).
id(I) -> I.
-
+
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args,_}|_]}}) -> ok;
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Arity,_}|_]}})
when length(Args) =:= Arity ->
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 0031f9b962..38b49c7a76 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -120,7 +120,7 @@
# endif
#endif
-#if defined(NID_chacha20) && !defined(OPENSSL_NO_CHACHA) && !defined(OPENSSL_NO_POLY1305)
+#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,1,0)
# define HAVE_CHACHA20_POLY1305
#endif
@@ -138,27 +138,6 @@
#include <openssl/ecdsa.h>
#endif
-/*
- * FIXME: The support for ChaCha and Poly1305 is based on pre-releases
- * of OpenSSL 1.1.0. It is seriously broken when used with the released
- * OpenSSL 1.1.0 or later.
- */
-#undef HAVE_CHACHA20_POLY1305
-
-#if defined(HAVE_CHACHA20_POLY1305)
-#include <openssl/chacha.h>
-#include <openssl/poly1305.h>
-
-#if !defined(CHACHA20_NONCE_LEN)
-# define CHACHA20_NONCE_LEN 8
-#endif
-#if !defined(POLY1305_TAG_LEN)
-# define POLY1305_TAG_LEN 16
-#endif
-
-#endif
-
-
#ifdef VALGRIND
# include <valgrind/memcheck.h>
@@ -941,7 +920,7 @@ static ERL_NIF_TERM algo_hash[8]; /* increase when extending the list */
static int algo_pubkey_cnt, algo_pubkey_fips_cnt;
static ERL_NIF_TERM algo_pubkey[7]; /* increase when extending the list */
static int algo_cipher_cnt, algo_cipher_fips_cnt;
-static ERL_NIF_TERM algo_cipher[23]; /* increase when extending the list */
+static ERL_NIF_TERM algo_cipher[24]; /* increase when extending the list */
static void init_algorithms_types(ErlNifEnv* env)
{
@@ -2093,71 +2072,61 @@ out_err:
}
#endif /* HAVE_GCM_EVP_DECRYPT_BUG */
-#if defined(HAVE_CHACHA20_POLY1305)
-static void
-poly1305_update_with_length(poly1305_state *poly1305,
- const unsigned char *data, size_t data_len)
-{
- size_t j = data_len;
- unsigned char length_bytes[8];
- unsigned i;
-
- for (i = 0; i < sizeof(length_bytes); i++) {
- length_bytes[i] = j;
- j >>= 8;
- }
-
- CRYPTO_poly1305_update(poly1305, data, data_len);
- CRYPTO_poly1305_update(poly1305, length_bytes, sizeof(length_bytes));
-}
-#endif
static ERL_NIF_TERM chacha20_poly1305_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Key,Iv,AAD,In) */
#if defined(HAVE_CHACHA20_POLY1305)
+ EVP_CIPHER_CTX *ctx;
+ const EVP_CIPHER *cipher = NULL;
ErlNifBinary key, iv, aad, in;
- unsigned char *outp;
+ unsigned char *outp, *tagp;
ERL_NIF_TERM out, out_tag;
- ErlNifUInt64 in_len_64;
- unsigned char poly1305_key[32];
- poly1305_state poly1305;
+ int len;
if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 32
- || !enif_inspect_binary(env, argv[1], &iv) || iv.size != CHACHA20_NONCE_LEN
+ || !enif_inspect_binary(env, argv[1], &iv) || iv.size == 0 || iv.size > 16
|| !enif_inspect_iolist_as_binary(env, argv[2], &aad)
|| !enif_inspect_iolist_as_binary(env, argv[3], &in)) {
return enif_make_badarg(env);
}
- /* Take from OpenSSL patch set/LibreSSL:
- *
- * The underlying ChaCha implementation may not overflow the block
- * counter into the second counter word. Therefore we disallow
- * individual operations that work on more than 2TB at a time.
- * in_len_64 is needed because, on 32-bit platforms, size_t is only
- * 32-bits and this produces a warning because it's always false.
- * Casting to uint64_t inside the conditional is not sufficient to stop
- * the warning. */
- in_len_64 = in.size;
- if (in_len_64 >= (1ULL << 32) * 64 - 64)
- return enif_make_badarg(env);
+ cipher = EVP_chacha20_poly1305();
+
+ ctx = EVP_CIPHER_CTX_new();
+
+ if (EVP_EncryptInit_ex(ctx, cipher, NULL, NULL, NULL) != 1)
+ goto out_err;
- memset(poly1305_key, 0, sizeof(poly1305_key));
- CRYPTO_chacha_20(poly1305_key, poly1305_key, sizeof(poly1305_key), key.data, iv.data, 0);
+ EVP_CIPHER_CTX_set_padding(ctx, 0);
+
+ if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, iv.size, NULL) != 1)
+ goto out_err;
+ if (EVP_EncryptInit_ex(ctx, NULL, NULL, key.data, iv.data) != 1)
+ goto out_err;
+ if (EVP_EncryptUpdate(ctx, NULL, &len, aad.data, aad.size) != 1)
+ goto out_err;
outp = enif_make_new_binary(env, in.size, &out);
- CRYPTO_poly1305_init(&poly1305, poly1305_key);
- poly1305_update_with_length(&poly1305, aad.data, aad.size);
- CRYPTO_chacha_20(outp, in.data, in.size, key.data, iv.data, 1);
- poly1305_update_with_length(&poly1305, outp, in.size);
+ if (EVP_EncryptUpdate(ctx, outp, &len, in.data, in.size) != 1)
+ goto out_err;
+ if (EVP_EncryptFinal_ex(ctx, outp+len, &len) != 1)
+ goto out_err;
+
+ tagp = enif_make_new_binary(env, 16, &out_tag);
- CRYPTO_poly1305_finish(&poly1305, enif_make_new_binary(env, POLY1305_TAG_LEN, &out_tag));
+ if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, 16, tagp) != 1)
+ goto out_err;
+
+ EVP_CIPHER_CTX_free(ctx);
CONSUME_REDS(env, in);
return enif_make_tuple2(env, out, out_tag);
+out_err:
+ EVP_CIPHER_CTX_free(ctx);
+ return atom_error;
#else
return enif_raise_exception(env, atom_notsup);
#endif
@@ -2166,53 +2135,52 @@ static ERL_NIF_TERM chacha20_poly1305_encrypt(ErlNifEnv* env, int argc, const ER
static ERL_NIF_TERM chacha20_poly1305_decrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Key,Iv,AAD,In,Tag) */
#if defined(HAVE_CHACHA20_POLY1305)
+ EVP_CIPHER_CTX *ctx;
+ const EVP_CIPHER *cipher = NULL;
ErlNifBinary key, iv, aad, in, tag;
unsigned char *outp;
ERL_NIF_TERM out;
- ErlNifUInt64 in_len_64;
- unsigned char poly1305_key[32];
- unsigned char mac[POLY1305_TAG_LEN];
- poly1305_state poly1305;
+ int len;
if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 32
- || !enif_inspect_binary(env, argv[1], &iv) || iv.size != CHACHA20_NONCE_LEN
+ || !enif_inspect_binary(env, argv[1], &iv) || iv.size == 0 || iv.size > 16
|| !enif_inspect_iolist_as_binary(env, argv[2], &aad)
|| !enif_inspect_iolist_as_binary(env, argv[3], &in)
- || !enif_inspect_iolist_as_binary(env, argv[4], &tag) || tag.size != POLY1305_TAG_LEN) {
+ || !enif_inspect_iolist_as_binary(env, argv[4], &tag) || tag.size != 16) {
return enif_make_badarg(env);
}
- /* Take from OpenSSL patch set/LibreSSL:
- *
- * The underlying ChaCha implementation may not overflow the block
- * counter into the second counter word. Therefore we disallow
- * individual operations that work on more than 2TB at a time.
- * in_len_64 is needed because, on 32-bit platforms, size_t is only
- * 32-bits and this produces a warning because it's always false.
- * Casting to uint64_t inside the conditional is not sufficient to stop
- * the warning. */
- in_len_64 = in.size;
- if (in_len_64 >= (1ULL << 32) * 64 - 64)
- return enif_make_badarg(env);
-
- memset(poly1305_key, 0, sizeof(poly1305_key));
- CRYPTO_chacha_20(poly1305_key, poly1305_key, sizeof(poly1305_key), key.data, iv.data, 0);
+ cipher = EVP_chacha20_poly1305();
- CRYPTO_poly1305_init(&poly1305, poly1305_key);
- poly1305_update_with_length(&poly1305, aad.data, aad.size);
- poly1305_update_with_length(&poly1305, in.data, in.size);
- CRYPTO_poly1305_finish(&poly1305, mac);
+ ctx = EVP_CIPHER_CTX_new();
- if (memcmp(mac, tag.data, POLY1305_TAG_LEN) != 0)
- return atom_error;
+ if (EVP_DecryptInit_ex(ctx, cipher, NULL, NULL, NULL) != 1)
+ goto out_err;
+ if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_IVLEN, iv.size, NULL) != 1)
+ goto out_err;
+ if (EVP_DecryptInit_ex(ctx, NULL, NULL, key.data, iv.data) != 1)
+ goto out_err;
+ if (EVP_DecryptUpdate(ctx, NULL, &len, aad.data, aad.size) != 1)
+ goto out_err;
outp = enif_make_new_binary(env, in.size, &out);
- CRYPTO_chacha_20(outp, in.data, in.size, key.data, iv.data, 1);
+ if (EVP_DecryptUpdate(ctx, outp, &len, in.data, in.size) != 1)
+ goto out_err;
+ if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, tag.size, tag.data) != 1)
+ goto out_err;
+ if (EVP_DecryptFinal_ex(ctx, outp+len, &len) != 1)
+ goto out_err;
+
+ EVP_CIPHER_CTX_free(ctx);
CONSUME_REDS(env, in);
return out;
+
+out_err:
+ EVP_CIPHER_CTX_free(ctx);
+ return atom_error;
#else
return enif_raise_exception(env, atom_notsup);
#endif
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 0b62964efa..9767a13dfc 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -159,10 +159,11 @@ cmac(Type, Key, Data, MacSize) ->
des3_cbc | des3_cbf | des3_cfb | des_ede3 |
blowfish_cbc | blowfish_cfb64 | blowfish_ofb64 |
aes_cbc128 | aes_cfb8 | aes_cfb128 | aes_cbc256 | aes_ige256 |
- aes_cbc |
+ aes_cbc |
rc2_cbc,
- Key::iodata(), Ivec::binary(), Data::iodata()) -> binary();
- (aes_gcm | chacha20_poly1305, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata()}) -> {binary(), binary()}.
+ Key::iodata(), Ivec::binary(), Data::iodata()) -> binary();
+ (aes_gcm | chacha20_poly1305, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata()}) -> {binary(), binary()};
+ (aes_gcm, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata(), TagLength::1..16}) -> {binary(), binary()}.
block_encrypt(Type, Key, Ivec, Data) when Type =:= des_cbc;
Type =:= des_cfb;
diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl
index 0c3b7a0445..31f4e89ffe 100644
--- a/lib/crypto/test/crypto_SUITE.erl
+++ b/lib/crypto/test/crypto_SUITE.erl
@@ -2249,16 +2249,49 @@ aes_gcm() ->
1} %% TagLength
].
-%% http://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04
+%% https://tools.ietf.org/html/rfc7539#appendix-A.5
chacha20_poly1305() ->
[
- {chacha20_poly1305, hexstr2bin("4290bcb154173531f314af57f3be3b500" %% Key
- "6da371ece272afa1b5dbdd1100a1007"),
- hexstr2bin("86d09974840bded2a5ca"), %% PlainText
- hexstr2bin("cd7cf67be39c794a"), %% Nonce
- hexstr2bin("87e229d4500845a079c0"), %% AAD
- hexstr2bin("e3e446f7ede9a19b62a4"), %% CipherText
- hexstr2bin("677dabf4e3d24b876bb284753896e1d6")} %% CipherTag
+ {chacha20_poly1305,
+ hexstr2bin("1c9240a5eb55d38af333888604f6b5f0" %% Key
+ "473917c1402b80099dca5cbc207075c0"),
+ hexstr2bin("496e7465726e65742d44726166747320" %% PlainText
+ "61726520647261667420646f63756d65"
+ "6e74732076616c696420666f72206120"
+ "6d6178696d756d206f6620736978206d"
+ "6f6e74687320616e64206d6179206265"
+ "20757064617465642c207265706c6163"
+ "65642c206f72206f62736f6c65746564"
+ "206279206f7468657220646f63756d65"
+ "6e747320617420616e792074696d652e"
+ "20497420697320696e617070726f7072"
+ "6961746520746f2075736520496e7465"
+ "726e65742d4472616674732061732072"
+ "65666572656e6365206d617465726961"
+ "6c206f7220746f206369746520746865"
+ "6d206f74686572207468616e20617320"
+ "2fe2809c776f726b20696e2070726f67"
+ "726573732e2fe2809d"),
+ hexstr2bin("000000000102030405060708"), %% Nonce
+ hexstr2bin("f33388860000000000004e91"), %% AAD
+ hexstr2bin("64a0861575861af460f062c79be643bd" %% CipherText
+ "5e805cfd345cf389f108670ac76c8cb2"
+ "4c6cfc18755d43eea09ee94e382d26b0"
+ "bdb7b73c321b0100d4f03b7f355894cf"
+ "332f830e710b97ce98c8a84abd0b9481"
+ "14ad176e008d33bd60f982b1ff37c855"
+ "9797a06ef4f0ef61c186324e2b350638"
+ "3606907b6a7c02b0f9f6157b53c867e4"
+ "b9166c767b804d46a59b5216cde7a4e9"
+ "9040c5a40433225ee282a1b0a06c523e"
+ "af4534d7f83fa1155b0047718cbc546a"
+ "0d072b04b3564eea1b422273f548271a"
+ "0bb2316053fa76991955ebd63159434e"
+ "cebb4e466dae5a1073a6727627097a10"
+ "49e617d91d361094fa68f0ff77987130"
+ "305beaba2eda04df997b714d6c6f2c29"
+ "a6ad5cb4022b02709b"),
+ hexstr2bin("eead9d67890cbb22392336fea1851f38")} %% CipherTag
].
rsa_plain() ->
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index b5510731e0..ae1e4d8c38 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -94,9 +94,9 @@ loop(#server_state{parent = Parent} = State,
{AnalPid, cserver, CServer, Plt} ->
send_codeserver_plt(Parent, CServer, Plt),
loop(State, Analysis, ExtCalls);
- {AnalPid, done, Plt, DocPlt} ->
+ {AnalPid, done, MiniPlt, DocPlt} ->
send_ext_calls(Parent, ExtCalls),
- send_analysis_done(Parent, Plt, DocPlt);
+ send_analysis_done(Parent, MiniPlt, DocPlt);
{AnalPid, ext_calls, NewExtCalls} ->
loop(State, Analysis, NewExtCalls);
{AnalPid, ext_types, ExtTypes} ->
@@ -114,6 +114,7 @@ loop(#server_state{parent = Parent} = State,
%% The Analysis
%%--------------------------------------------------------------------
+%% Calls to erlang:garbage_collect() help to reduce the heap size.
analysis_start(Parent, Analysis, LegalWarnings) ->
CServer = dialyzer_codeserver:new(),
Plt = Analysis#analysis.plt,
@@ -150,12 +151,9 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
+ erlang:garbage_collect(),
?timing(State#analysis_state.timing_server, "remote",
- begin
- TmpCServer3 =
- dialyzer_utils:process_record_remote_types(TmpCServer2),
- dialyzer_contracts:process_contract_remote_types(TmpCServer3)
- end)
+ contracts_and_records(TmpCServer2))
catch
throw:{error, _ErrorMsg} = Error -> exit(Error)
end,
@@ -164,48 +162,75 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes),
State0 = State#analysis_state{plt = NewPlt1},
dump_callgraph(Callgraph, State0, Analysis),
- State1 = State0#analysis_state{codeserver = NewCServer},
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
- Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
+ Plt1_a = dialyzer_plt:delete_list(NewPlt1, AllNodes),
+ Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer),
+ State1 = State0#analysis_state{codeserver = NewCServer, plt = Plt1},
Exports = dialyzer_codeserver:get_exports(NewCServer),
+ NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
+ NonExportsList = sets:to_list(NonExports),
NewCallgraph =
case Analysis#analysis.race_detection of
true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
false -> Callgraph
end,
- State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}),
+ State2 = analyze_callgraph(NewCallgraph, State1),
+ #analysis_state{plt = MiniPlt2, doc_plt = DocPlt} = State2,
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
- NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
- NonExportsList = sets:to_list(NonExports),
- Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList),
- send_codeserver_plt(Parent, CServer, State2#analysis_state.plt),
- send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt).
+ %% Since the PLT is never used, a dummy is sent:
+ DummyPlt = dialyzer_plt:new(),
+ send_codeserver_plt(Parent, CServer, DummyPlt),
+ MiniPlt3 = dialyzer_plt:delete_list(MiniPlt2, NonExportsList),
+ send_analysis_done(Parent, MiniPlt3, DocPlt).
+
+contracts_and_records(CodeServer) ->
+ Fun = contrs_and_recs(CodeServer),
+ {Pid, Ref} = erlang:spawn_monitor(Fun),
+ dialyzer_codeserver:give_away(CodeServer, Pid),
+ Pid ! {self(), go},
+ receive {'DOWN', Ref, process, Pid, Return} ->
+ Return
+ end.
+
+-spec contrs_and_recs(dialyzer_codeserver:codeserver()) ->
+ fun(() -> no_return()).
+
+contrs_and_recs(TmpCServer2) ->
+ fun() ->
+ Parent = receive {Pid, go} -> Pid end,
+ {TmpCServer3, RecordDict} =
+ dialyzer_utils:process_record_remote_types(TmpCServer2),
+ TmpServer4 =
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3,
+ RecordDict),
+ dialyzer_codeserver:give_away(TmpServer4, Parent),
+ exit(TmpServer4)
+ end.
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
+ plt = Plt,
timing_server = TimingServer,
parent = Parent,
solvers = Solvers} = State) ->
- Plt = dialyzer_plt:insert_callbacks(State#analysis_state.plt, Codeserver),
- {NewPlt, NewDocPlt} =
- case State#analysis_state.analysis_type of
- plt_build ->
- NewPlt0 =
- dialyzer_succ_typings:analyze_callgraph(Callgraph, Plt, Codeserver,
- TimingServer, Solvers, Parent),
- {NewPlt0, DocPlt};
- succ_typings ->
- {Warnings, NewPlt0, NewDocPlt0} =
- dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- TimingServer, Solvers, Parent),
- Warnings1 = filter_warnings(Warnings, Codeserver),
- send_warnings(State#analysis_state.parent, Warnings1),
- {NewPlt0, NewDocPlt0}
- end,
- dialyzer_callgraph:delete(Callgraph),
- State#analysis_state{plt = NewPlt, doc_plt = NewDocPlt}.
+ case State#analysis_state.analysis_type of
+ plt_build ->
+ NewMiniPlt =
+ dialyzer_succ_typings:analyze_callgraph(Callgraph, Plt, Codeserver,
+ TimingServer, Solvers, Parent),
+ dialyzer_callgraph:delete(Callgraph),
+ State#analysis_state{plt = NewMiniPlt, doc_plt = DocPlt};
+ succ_typings ->
+ {Warnings, NewMiniPlt, NewDocPlt} =
+ dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
+ TimingServer, Solvers, Parent),
+ dialyzer_callgraph:delete(Callgraph),
+ Warnings1 = filter_warnings(Warnings, Codeserver),
+ send_warnings(State#analysis_state.parent, Warnings1),
+ State#analysis_state{plt = NewMiniPlt, doc_plt = NewDocPlt}
+ end.
%%--------------------------------------------------------------------
%% Build the callgraph and fill the codeserver.
@@ -562,8 +587,9 @@ is_ok_fun({_Filename, _Line, {_M, _F, _A} = MFA}, Codeserver) ->
is_ok_tag(Tag, {_F, _L, MorMFA}, Codeserver) ->
not dialyzer_utils:is_suppressed_tag(MorMFA, Tag, Codeserver).
-send_analysis_done(Parent, Plt, DocPlt) ->
- Parent ! {self(), done, Plt, DocPlt},
+send_analysis_done(Parent, MiniPlt, DocPlt) ->
+ ok = dialyzer_plt:give_away(MiniPlt, Parent),
+ Parent ! {self(), done, MiniPlt, DocPlt},
ok.
send_ext_calls(_Parent, none) ->
@@ -576,7 +602,7 @@ send_ext_types(Parent, ExtTypes) ->
Parent ! {self(), ext_types, ExtTypes},
ok.
-send_codeserver_plt(Parent, CServer, Plt ) ->
+send_codeserver_plt(Parent, CServer, Plt) ->
Parent ! {self(), cserver, CServer, Plt},
ok.
@@ -595,14 +621,14 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
{_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
Msg = {call_to_missing, [M, F, A]},
- {File, Line} = find_call_file_and_line(FunCode, To),
+ {File, Line} = find_call_file_and_line(FromMFA, FunCode, To, CodeServer),
WarningInfo = {File, Line, FromMFA},
NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc],
format_bad_calls(Left, CodeServer, NewAcc);
format_bad_calls([], _CodeServer, Acc) ->
Acc.
-find_call_file_and_line(Tree, MFA) ->
+find_call_file_and_line({Module, _, _}, Tree, MFA, CodeServer) ->
Fun =
fun(SubTree, Acc) ->
case cerl:is_c_call(SubTree) of
@@ -615,7 +641,7 @@ find_call_file_and_line(Tree, MFA) ->
case {cerl:concrete(M), cerl:concrete(F), A} of
MFA ->
Ann = cerl:get_ann(SubTree),
- [{get_file(Ann), get_line(Ann)}|Acc];
+ [{get_file(CodeServer, Module, Ann), get_line(Ann)}|Acc];
{erlang, make_fun, 3} ->
[CA1, CA2, CA3] = cerl:call_args(SubTree),
case
@@ -631,7 +657,8 @@ find_call_file_and_line(Tree, MFA) ->
of
MFA ->
Ann = cerl:get_ann(SubTree),
- [{get_file(Ann), get_line(Ann)}|Acc];
+ [{get_file(CodeServer, Module, Ann),
+ get_line(Ann)}|Acc];
_ ->
Acc
end;
@@ -651,8 +678,10 @@ get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file(Codeserver, Module, [{file, FakeFile}|_]) ->
+ dialyzer_codeserver:translate_fake_file(Codeserver, Module, FakeFile);
+get_file(Codeserver, Module, [_|Tail]) ->
+ get_file(Codeserver, Module, Tail).
-spec dump_callgraph(dialyzer_callgraph:callgraph(), #analysis_state{}, #analysis{}) ->
'ok'.
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index e79a5d1cd9..d380ab2a50 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -55,9 +55,9 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
_ ->
MFA = {Module,module_info,0},
{_Var,Code} = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
- File = get_file(cerl:get_ann(Code)),
+ File = get_file(Codeserver, Module, cerl:get_ann(Code)),
State = #state{plt = Plt, filename = File, behlines = BehLines,
- codeserver = Codeserver, records = Records},
+ codeserver = Codeserver, records = Records},
Warnings = get_warnings(Module, Behaviours, State),
[add_tag_warning_info(Module, W, State) || W <- Warnings]
end.
@@ -206,12 +206,15 @@ add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity},
State#state.codeserver),
Anns = cerl:get_ann(FunCode),
- WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}},
+ File = get_file(State#state.codeserver, Module, Anns),
+ WarningInfo = {File, get_line(Anns), {Module, Fun, Arity}},
{?WARN_BEHAVIOUR, WarningInfo, Warn}.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file(Codeserver, Module, [{file, FakeFile}|_]) ->
+ dialyzer_codeserver:translate_fake_file(Codeserver, Module, FakeFile);
+get_file(Codeserver, Module, [_|Tail]) ->
+ get_file(Codeserver, Module, Tail).
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index 227ee2a892..68f3d7a240 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -112,7 +112,11 @@
-opaque callgraph() :: #callgraph{}.
--type active_digraph() :: {'d', digraph:graph()} | {'e', ets:tid(), ets:tid()}.
+-type active_digraph() :: {'d', digraph:graph()}
+ | {'e',
+ Out :: ets:tid(),
+ In :: ets:tid(),
+ Map :: ets:tid()}.
%%----------------------------------------------------------------------
@@ -241,24 +245,30 @@ find_non_local_calls([], Set) ->
-spec get_depends_on(scc() | module(), callgraph()) -> [scc()].
-get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In}}) ->
- case ets_lookup_dict(SCC, Out) of
- {ok, Value} -> Value;
- error -> []
- end;
+get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) ->
+ lookup_scc(SCC, Out, Maps);
get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) ->
digraph:out_neighbours(DG, SCC).
-spec get_required_by(scc() | module(), callgraph()) -> [scc()].
-get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In}}) ->
- case ets_lookup_dict(SCC, In) of
- {ok, Value} -> Value;
- error -> []
- end;
+get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
+ lookup_scc(SCC, In, Maps);
get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
digraph:in_neighbours(DG, SCC).
+lookup_scc(SCC, Table, Maps) ->
+ case ets_lookup_dict({'scc', SCC}, Maps) of
+ {ok, SCCInt} ->
+ case ets_lookup_dict(SCCInt, Table) of
+ {ok, Ints} ->
+ [ets:lookup_element(Maps, Int, 2) || Int <- Ints];
+ error ->
+ []
+ end;
+ error -> []
+ end.
+
%%----------------------------------------------------------------------
%% Handling of modules & SCCs
%%----------------------------------------------------------------------
@@ -575,9 +585,10 @@ digraph_delete(DG) ->
active_digraph_delete({'d', DG}) ->
digraph:delete(DG);
-active_digraph_delete({'e', Out, In}) ->
+active_digraph_delete({'e', Out, In, Maps}) ->
ets:delete(Out),
- ets:delete(In).
+ ets:delete(In),
+ ets:delete(Maps).
digraph_edges(DG) ->
digraph:edges(DG).
@@ -751,37 +762,28 @@ to_ps(#callgraph{} = CG, File, Args) ->
ok.
condensation(G) ->
- SCs = digraph_utils:strong_components(G),
- V2I = ets:new(condensation_v2i, []),
- I2C = ets:new(condensation_i2c, []),
- I2I = ets:new(condensation_i2i, [bag]),
- CFun =
- fun(SC, N) ->
- lists:foreach(fun(V) -> true = ets:insert(V2I, {V,N}) end, SC),
- true = ets:insert(I2C, {N, SC}),
- N + 1
- end,
- lists:foldl(CFun, 1, SCs),
- Fun1 =
- fun({V1, V2}) ->
- I1 = ets:lookup_element(V2I, V1, 2),
- I2 = ets:lookup_element(V2I, V2, 2),
- I1 =:= I2 orelse ets:insert(I2I, {I1, I2})
- end,
- lists:foreach(Fun1, digraph:edges(G)),
- Fun3 =
- fun({I1, I2}, {Out, In}) ->
- SC1 = ets:lookup_element(I2C, I1, 2),
- SC2 = ets:lookup_element(I2C, I2, 2),
- {dict:append(SC1, SC2, Out), dict:append(SC2, SC1, In)}
- end,
- {OutDict, InDict} = ets:foldl(Fun3, {dict:new(), dict:new()}, I2I),
- [OutETS, InETS] =
+ SCCs = digraph_utils:strong_components(G),
+ %% Assign unique numbers to SCCs:
+ Ints = lists:seq(1, length(SCCs)),
+ IntToSCC = lists:zip(Ints, SCCs),
+ IntScc = sofs:relation(IntToSCC, [{int, scc}]),
+ %% Subsitute strong components for vertices in edges using the
+ %% unique numbers:
+ C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
+ I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
+ Es = sofs:relation(digraph:edges(G), [{v, v}]),
+ R1 = sofs:relative_product(I2V, Es),
+ R2 = sofs:relative_product(I2V, sofs:converse(R1)),
+ %% Create in- and out-neighbours:
+ In = sofs:relation_to_family(sofs:strict_relation(R2)),
+ R3 = sofs:converse(R2),
+ Out = sofs:relation_to_family(sofs:strict_relation(R3)),
+ [OutETS, InETS, MapsETS] =
[ets:new(Name,[{read_concurrency, true}]) ||
- Name <- [callgraph_deps_out, callgraph_deps_in]],
- ets:insert(OutETS, dict:to_list(OutDict)),
- ets:insert(InETS, dict:to_list(InDict)),
- ets:delete(V2I),
- ets:delete(I2C),
- ets:delete(I2I),
- {{'e', OutETS, InETS}, SCs}.
+ Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
+ ets:insert(OutETS, sofs:to_external(Out)),
+ ets:insert(InETS, sofs:to_external(In)),
+ %% Create mappings from SCCs to unique integers, and the inverse:
+ ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
+ ets:insert(MapsETS, IntToSCC),
+ {{'e', OutETS, InETS, MapsETS}, SCCs}.
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index b43711446b..158ee761af 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -630,8 +630,8 @@ cl_loop(State, LogCache) ->
{BackendPid, warnings, Warnings} ->
NewState = store_warnings(State, Warnings),
cl_loop(NewState, LogCache);
- {BackendPid, done, NewPlt, _NewDocPlt} ->
- return_value(State, NewPlt);
+ {BackendPid, done, NewMiniPlt, _NewDocPlt} ->
+ return_value(State, NewMiniPlt);
{BackendPid, ext_calls, ExtCalls} ->
cl_loop(State#cl_state{external_calls = ExtCalls}, LogCache);
{BackendPid, ext_types, ExtTypes} ->
@@ -647,6 +647,7 @@ cl_loop(State, LogCache) ->
cl_error(State, Msg);
_Other ->
%% io:format("Received ~p\n", [_Other]),
+ %% Note: {BackendPid, cserver, CodeServer, Plt} is ignored.
cl_loop(State, LogCache)
end.
@@ -692,10 +693,13 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
output_plt = OutputPlt,
plt_info = PltInfo,
stored_warnings = StoredWarnings},
- Plt) ->
+ MiniPlt) ->
case OutputPlt =:= none of
- true -> ok;
- false -> dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
+ true ->
+ dialyzer_plt:delete(MiniPlt);
+ false ->
+ Plt = dialyzer_plt:restore_full_plt(MiniPlt),
+ dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
end,
UnknownWarnings = unknown_warnings(State),
RetValue =
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index dd383ea828..f53c713bfe 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -22,7 +22,9 @@
-module(dialyzer_codeserver).
-export([delete/1,
- finalize_contracts/3,
+ store_temp_contracts/4,
+ give_away/2,
+ finalize_contracts/1,
finalize_exported_types/2,
finalize_records/2,
get_contracts/1,
@@ -31,7 +33,9 @@
get_exports/1,
get_records/1,
get_next_core_label/1,
- get_temp_contracts/1,
+ get_temp_contracts/2,
+ contracts_modules/1,
+ store_contracts/4,
get_temp_exported_types/1,
get_temp_records/1,
insert/3,
@@ -41,6 +45,7 @@
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
+ lookup_mfa_var_label/2,
lookup_mod_records/2,
lookup_mod_contracts/2,
lookup_mfa_contract/2,
@@ -49,21 +54,22 @@
set_next_core_label/2,
set_temp_records/2,
store_temp_records/3,
- store_temp_contracts/4]).
+ translate_fake_file/3]).
--export_type([codeserver/0, fun_meta_info/0]).
+-export_type([codeserver/0, fun_meta_info/0, contracts/0]).
-include("dialyzer.hrl").
%%--------------------------------------------------------------------
-type dict_ets() :: ets:tid().
+-type map_ets() :: ets:tid().
-type set_ets() :: ets:tid().
-type types() :: erl_types:type_table().
--type mod_records() :: dict:dict(module(), types()).
+-type mod_records() :: erl_types:mod_records().
--type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
+-type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}.
-type mod_contracts() :: dict:dict(module(), contracts()).
%% A property-list of data compiled from -compile and -dialyzer attributes.
@@ -74,16 +80,16 @@
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
- exported_types :: set_ets() | 'undefined', % set(mfa())
- records :: dict_ets() | 'undefined',
- contracts :: dict_ets() | 'undefined',
- callbacks :: dict_ets() | 'undefined',
+ exported_types :: set_ets(), % set(mfa())
+ records :: map_ets(),
+ contracts :: map_ets(),
+ callbacks :: map_ets(),
fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
- temp_records :: 'clean' | dict_ets(),
- temp_contracts :: 'clean' | dict_ets(),
- temp_callbacks :: 'clean' | dict_ets()
+ temp_records :: 'clean' | map_ets(),
+ temp_contracts :: 'clean' | map_ets(),
+ temp_callbacks :: 'clean' | map_ets()
}).
-opaque codeserver() :: #codeserver{}.
@@ -97,7 +103,7 @@ ets_dict_find(Key, Table) ->
_:_ -> error
end.
-ets_dict_store(Key, Element, Table) ->
+ets_map_store(Key, Element, Table) ->
true = ets:insert(Table, {Key, Element}),
Table.
@@ -121,9 +127,6 @@ ets_set_to_set(Table) ->
Fold = fun({E}, Set) -> sets:add_element(E, Set) end,
ets:foldl(Fold, sets:new(), Table).
-ets_read_concurrent_table(Name) ->
- ets:new(Name, [{read_concurrency, true}]).
-
%%--------------------------------------------------------------------
-spec new() -> codeserver().
@@ -131,6 +134,13 @@ ets_read_concurrent_table(Name) ->
new() ->
CodeOptions = [compressed, public, {read_concurrency, true}],
Code = ets:new(dialyzer_codeserver_code, CodeOptions),
+ ReadOptions = [compressed, {read_concurrency, true}],
+ [Contracts, Callbacks, Records, ExportedTypes] =
+ [ets:new(Name, ReadOptions) ||
+ Name <- [dialyzer_codeserver_contracts,
+ dialyzer_codeserver_callbacks,
+ dialyzer_codeserver_records,
+ dialyzer_codeserver_exported_types]],
TempOptions = [public, {write_concurrency, true}],
[Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts,
TempCallbacks] =
@@ -143,6 +153,10 @@ new() ->
#codeserver{code = Code,
exports = Exports,
fun_meta_info = FunMetaInfo,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks,
temp_exported_types = TempExportedTypes,
temp_records = TempRecords,
temp_contracts = TempContracts,
@@ -163,13 +177,15 @@ insert(Mod, ModCode, CS) ->
Exports = cerl:module_exports(ModCode),
Attrs = cerl:module_attrs(ModCode),
Defs = cerl:module_defs(ModCode),
+ {Files, SmallDefs} = compress_file_anno(Defs),
As = cerl:get_ann(ModCode),
Funs =
[{{Mod, cerl:fname_id(Var), cerl:fname_arity(Var)},
- Val} || Val = {Var, _Fun} <- Defs],
- Keys = [Key || {Key, _Value} <- Funs],
+ Val, {Var, cerl_trees:get_label(Fun)}} || Val = {Var, Fun} <- SmallDefs],
+ Keys = [Key || {Key, _Value, _Label} <- Funs],
ModEntry = {Mod, {Name, Exports, Attrs, Keys, As}},
- true = ets:insert(CS#codeserver.code, [ModEntry|Funs]),
+ ModFileEntry = {{mod, Mod}, Files},
+ true = ets:insert(CS#codeserver.code, [ModEntry, ModFileEntry|Funs]),
CS.
-spec get_temp_exported_types(codeserver()) -> sets:set(mfa()).
@@ -213,12 +229,12 @@ get_exports(#codeserver{exports = Exports}) ->
-spec finalize_exported_types(sets:set(mfa()), codeserver()) -> codeserver().
-finalize_exported_types(Set, CS) ->
- ExportedTypes = ets_read_concurrent_table(dialyzer_codeserver_exported_types),
+finalize_exported_types(Set,
+ #codeserver{exported_types = ExportedTypes,
+ temp_exported_types = TempETypes} = CS) ->
true = ets_set_insert_set(Set, ExportedTypes),
- TempExpTypes = CS#codeserver.temp_exported_types,
- true = ets:delete(TempExpTypes),
- CS#codeserver{exported_types = ExportedTypes, temp_exported_types = clean}.
+ true = ets:delete(TempETypes),
+ CS#codeserver{temp_exported_types = clean}.
-spec lookup_mod_code(atom(), codeserver()) -> cerl:c_module().
@@ -230,6 +246,11 @@ lookup_mod_code(Mod, CS) when is_atom(Mod) ->
lookup_mfa_code({_M, _F, _A} = MFA, CS) ->
table__lookup(CS#codeserver.code, MFA).
+-spec lookup_mfa_var_label(mfa(), codeserver()) -> {cerl:c_var(), label()}.
+
+lookup_mfa_var_label({_M, _F, _A} = MFA, CS) ->
+ ets:lookup_element(CS#codeserver.code, MFA, 3).
+
-spec get_next_core_label(codeserver()) -> label().
get_next_core_label(#codeserver{next_core_label = NCL}) ->
@@ -244,8 +265,8 @@ set_next_core_label(NCL, CS) ->
lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) ->
case ets_dict_find(Mod, RecDict) of
- error -> dict:new();
- {ok, Dict} -> Dict
+ error -> maps:new();
+ {ok, Map} -> Map
end.
-spec get_records(codeserver()) -> mod_records().
@@ -255,11 +276,11 @@ get_records(#codeserver{records = RecDict}) ->
-spec store_temp_records(module(), types(), codeserver()) -> codeserver().
-store_temp_records(Mod, Dict, #codeserver{temp_records = TempRecDict} = CS)
+store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS)
when is_atom(Mod) ->
- case dict:size(Dict) =:= 0 of
+ case maps:size(Map) =:= 0 of
true -> CS;
- false -> CS#codeserver{temp_records = ets_dict_store(Mod, Dict, TempRecDict)}
+ false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)}
end.
-spec get_temp_records(codeserver()) -> mod_records().
@@ -277,20 +298,20 @@ set_temp_records(Dict, CS) ->
-spec finalize_records(mod_records(), codeserver()) -> codeserver().
-finalize_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- Records = ets_read_concurrent_table(dialyzer_codeserver_records),
+finalize_records(Dict, #codeserver{temp_records = TmpRecords,
+ records = Records} = CS) ->
+ true = ets:delete(TmpRecords),
true = ets_dict_store_dict(Dict, Records),
- CS#codeserver{records = Records, temp_records = clean}.
+ CS#codeserver{temp_records = clean}.
-spec lookup_mod_contracts(atom(), codeserver()) -> contracts().
lookup_mod_contracts(Mod, #codeserver{contracts = ContDict})
when is_atom(Mod) ->
case ets_dict_find(Mod, ContDict) of
- error -> dict:new();
+ error -> maps:new();
{ok, Keys} ->
- dict:from_list([get_file_contract(Key, ContDict)|| Key <- Keys])
+ maps:from_list([get_file_contract(Key, ContDict)|| Key <- Keys])
end.
get_file_contract(Key, ContDict) ->
@@ -323,48 +344,69 @@ get_callbacks(#codeserver{callbacks = CallbDict}) ->
-spec store_temp_contracts(module(), contracts(), contracts(), codeserver()) ->
codeserver().
-store_temp_contracts(Mod, SpecDict, CallbackDict,
+store_temp_contracts(Mod, SpecMap, CallbackMap,
#codeserver{temp_contracts = Cn,
temp_callbacks = Cb} = CS)
when is_atom(Mod) ->
- CS1 =
- case dict:size(SpecDict) =:= 0 of
- true -> CS;
- false ->
- CS#codeserver{temp_contracts = ets_dict_store(Mod, SpecDict, Cn)}
- end,
- case dict:size(CallbackDict) =:= 0 of
- true -> CS1;
- false ->
- CS1#codeserver{temp_callbacks = ets_dict_store(Mod, CallbackDict, Cb)}
- end.
-
--spec get_temp_contracts(codeserver()) -> {mod_contracts(), mod_contracts()}.
+ CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)},
+ CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}.
-get_temp_contracts(#codeserver{temp_contracts = TempContDict,
- temp_callbacks = TempCallDict}) ->
- {ets_dict_to_dict(TempContDict), ets_dict_to_dict(TempCallDict)}.
+-spec contracts_modules(codeserver()) -> [module()].
--spec finalize_contracts(mod_contracts(), mod_contracts(), codeserver()) ->
- codeserver().
+contracts_modules(#codeserver{temp_contracts = TempContTable}) ->
+ ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]).
-finalize_contracts(SpecDict, CallbackDict, CS) ->
- Contracts = ets_read_concurrent_table(dialyzer_codeserver_contracts),
- Callbacks = ets_read_concurrent_table(dialyzer_codeserver_callbacks),
- Contracts = dict:fold(fun decompose_spec_dict/3, Contracts, SpecDict),
- Callbacks = dict:fold(fun decompose_cb_dict/3, Callbacks, CallbackDict),
- CS#codeserver{contracts = Contracts, callbacks = Callbacks,
- temp_contracts = clean, temp_callbacks = clean}.
+-spec store_contracts(module(), contracts(), contracts(), codeserver()) ->
+ codeserver().
-decompose_spec_dict(Mod, Dict, Table) ->
- Keys = dict:fetch_keys(Dict),
- true = ets:insert(Table, dict:to_list(Dict)),
- true = ets:insert(Table, {Mod, Keys}),
- Table.
+store_contracts(Mod, SpecMap, CallbackMap, CS) ->
+ #codeserver{contracts = SpecDict, callbacks = CallbackDict} = CS,
+ Keys = maps:keys(SpecMap),
+ true = ets:insert(SpecDict, maps:to_list(SpecMap)),
+ true = ets:insert(SpecDict, {Mod, Keys}),
+ true = ets:insert(CallbackDict, maps:to_list(CallbackMap)),
+ CS.
-decompose_cb_dict(_Mod, Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)),
- Table.
+-spec get_temp_contracts(module(), codeserver()) ->
+ {contracts(), contracts()}.
+
+get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict,
+ temp_callbacks = TempCallDict}) ->
+ [{Mod, Contracts}] = ets:lookup(TempContDict, Mod),
+ true = ets:delete(TempContDict, Mod),
+ [{Mod, Callbacks}] = ets:lookup(TempCallDict, Mod),
+ true = ets:delete(TempCallDict, Mod),
+ {Contracts, Callbacks}.
+
+-spec give_away(codeserver(), pid()) -> 'ok'.
+
+give_away(#codeserver{temp_records = TempRecords,
+ temp_contracts = TempContracts,
+ temp_callbacks = TempCallbacks,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks}, Pid) ->
+ _ = [true = ets:give_away(Table, Pid, any) ||
+ Table <- [TempRecords, TempContracts, TempCallbacks,
+ Records, Contracts, Callbacks],
+ Table =/= clean],
+ ok.
+
+-spec finalize_contracts(codeserver()) -> codeserver().
+
+finalize_contracts(#codeserver{temp_contracts = TempContDict,
+ temp_callbacks = TempCallDict} = CS) ->
+ true = ets:delete(TempContDict),
+ true = ets:delete(TempCallDict),
+ CS#codeserver{temp_contracts = clean, temp_callbacks = clean}.
+
+-spec translate_fake_file(codeserver(), module(), file:filename()) ->
+ file:filename().
+
+translate_fake_file(#codeserver{code = Code}, Module, FakeFile) ->
+ Files = ets:lookup_element(Code, {mod, Module}, 2),
+ {FakeFile, File} = lists:keyfind(FakeFile, 1, Files),
+ File.
table__lookup(TablePid, M) when is_atom(M) ->
{Name, Exports, Attrs, Keys, As} = ets:lookup_element(TablePid, M, 2),
@@ -372,3 +414,25 @@ table__lookup(TablePid, M) when is_atom(M) ->
cerl:ann_c_module(As, Name, Exports, Attrs, Defs);
table__lookup(TablePid, MFA) ->
ets:lookup_element(TablePid, MFA, 2).
+
+compress_file_anno(Term) ->
+ {Files, SmallTerm} = compress_file_anno(Term, []),
+ {[{FakeFile, File} || {File, {file, FakeFile}} <- Files], SmallTerm}.
+
+compress_file_anno({file, F}, Fs) when is_list(F) ->
+ case lists:keyfind(F, 1, Fs) of
+ false ->
+ I = integer_to_list(length(Fs)),
+ FileI = {file, I},
+ NFs = [{F, FileI}|Fs],
+ {NFs, FileI};
+ {F, FileI} -> {Fs, FileI}
+ end;
+compress_file_anno(T, Fs) when is_tuple(T) ->
+ {NFs, NL} = compress_file_anno(tuple_to_list(T), Fs),
+ {NFs, list_to_tuple(NL)};
+compress_file_anno([E|L], Fs) ->
+ {Fs1, NE} = compress_file_anno(E, Fs),
+ {NFs, NL} = compress_file_anno(L, Fs1),
+ {NFs, [NE|NL]};
+compress_file_anno(T, Fs) -> {Fs, T}.
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 7cc4a9d3eb..2078e58ce8 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -24,7 +24,7 @@
get_contract_return/2,
%% get_contract_signature/1,
is_overloaded/1,
- process_contract_remote_types/1,
+ process_contract_remote_types/2,
store_tmp_contract/5]).
-export_type([file_contract/0, plt_contracts/0]).
@@ -139,14 +139,13 @@ sequence([], _Delimiter) -> "";
sequence([H], _Delimiter) -> H;
sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
--spec process_contract_remote_types(dialyzer_codeserver:codeserver()) ->
- dialyzer_codeserver:codeserver().
+-spec process_contract_remote_types(dialyzer_codeserver:codeserver(),
+ erl_types:mod_records()) ->
+ dialyzer_codeserver:codeserver().
-process_contract_remote_types(CodeServer) ->
- {TmpContractDict, TmpCallbackDict} =
- dialyzer_codeserver:get_temp_contracts(CodeServer),
+process_contract_remote_types(CodeServer, RecordDict) ->
+ Mods = dialyzer_codeserver:contracts_modules(CodeServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
- RecordDict = dialyzer_codeserver:get_records(CodeServer),
ContractFun =
fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) ->
#tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract,
@@ -158,20 +157,21 @@ process_contract_remote_types(CodeServer) ->
{{MFA, {File, Contract, Xtra}}, C2}
end,
ModuleFun =
- fun({ModuleName, ContractDict}, C3) ->
- {NewContractList, C4} =
- lists:mapfoldl(ContractFun, C3, dict:to_list(ContractDict)),
- {{ModuleName, dict:from_list(NewContractList)}, C4}
+ fun(ModuleName) ->
+ Cache = erl_types:cache__new(),
+ {ContractMap, CallbackMap} =
+ dialyzer_codeserver:get_temp_contracts(ModuleName, CodeServer),
+ {NewContractList, Cache1} =
+ lists:mapfoldl(ContractFun, Cache, maps:to_list(ContractMap)),
+ {NewCallbackList, _NewCache} =
+ lists:mapfoldl(ContractFun, Cache1, maps:to_list(CallbackMap)),
+ dialyzer_codeserver:store_contracts(ModuleName,
+ maps:from_list(NewContractList),
+ maps:from_list(NewCallbackList),
+ CodeServer)
end,
- Cache = erl_types:cache__new(),
- {NewContractList, C5} =
- lists:mapfoldl(ModuleFun, Cache, dict:to_list(TmpContractDict)),
- {NewCallbackList, _C6} =
- lists:mapfoldl(ModuleFun, C5, dict:to_list(TmpCallbackDict)),
- NewContractDict = dict:from_list(NewContractList),
- NewCallbackDict = dict:from_list(NewCallbackList),
- dialyzer_codeserver:finalize_contracts(NewContractDict, NewCallbackDict,
- CodeServer).
+ lists:foreach(ModuleFun, Mods),
+ dialyzer_codeserver:finalize_contracts(CodeServer).
-type opaques_fun() :: fun((module()) -> [erl_types:erl_type()]).
@@ -390,7 +390,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% ?debug("Inf: ~s\n", [erl_types:t_to_string(Inf)]),
%% erl_types:t_assign_variables_to_subtype(Contract, Inf).
--type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
+-type contracts() :: dialyzer_codeserver:contracts().
%% Checks the contracts for functions that are not implemented
-spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) ->
@@ -400,12 +400,12 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
|| {Label, Arity} <- AllFuns0],
AllFuns2 = [{M, F, A} || {{ok, {M, F, _}}, A} <- AllFuns1],
- AllContractMFAs = dict:fetch_keys(Contracts),
+ AllContractMFAs = maps:keys(Contracts),
ErrorContractMFAs = AllContractMFAs -- AllFuns2,
[warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
- {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
+ {{File, Line}, _Contract, _Xtra} = maps:get(MFA, Contracts),
WarningInfo = {File, Line, MFA},
{?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
@@ -438,11 +438,11 @@ insert_constraints([], Map) -> Map.
-spec store_tmp_contract(mfa(), file_line(), spec_data(), contracts(), types()) ->
contracts().
-store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) ->
+store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecMap, RecordsDict) ->
%% io:format("contract from form: ~p\n", [TypeSpec]),
TmpContract = contract_from_form(TypeSpec, MFA, RecordsDict, FileLine),
%% io:format("contract: ~p\n", [TmpContract]),
- dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict).
+ maps:put(MFA, {FileLine, TmpContract, Xtra}, SpecMap).
contract_from_form(Forms, MFA, RecDict, FileLine) ->
{CFuns, Forms1} = contract_from_form(Forms, MFA, RecDict, FileLine, [], []),
@@ -670,7 +670,7 @@ get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) ->
get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, FindOpaques, Acc) ->
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(Mod, CodeServer),
- Contracts2 = dict:to_list(Contracts1),
+ Contracts2 = maps:to_list(Contracts1),
Records = dialyzer_codeserver:lookup_mod_records(Mod, CodeServer),
NewAcc = get_invalid_contract_warnings_funs(Contracts2, Plt, Records, FindOpaques, Acc),
get_invalid_contract_warnings_modules(Mods, CodeServer, Plt, FindOpaques, NewAcc);
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index fdcf8269e4..f706ebfb02 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -522,7 +522,7 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
case is_race_analysis_enabled(State) of
true ->
Ann = cerl:get_ann(Tree),
- File = get_file(Ann),
+ File = get_file(Ann, State),
Line = abs(get_line(Ann)),
dialyzer_races:store_race_call(Fun, ArgTypes, Args,
{File, Line}, State);
@@ -3083,7 +3083,7 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Ann = cerl:get_ann(Tree),
case Force of
true ->
- WarningInfo = {get_file(Ann),
+ WarningInfo = {get_file(Ann, State),
abs(get_line(Ann)),
State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
@@ -3093,7 +3093,9 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
case is_compiler_generated(Ann) of
true -> State;
false ->
- WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ WarningInfo = {get_file(Ann, State),
+ get_line(Ann),
+ State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
case Tag of
?WARN_CONTRACT_RANGE -> ok;
@@ -3492,6 +3494,12 @@ state__put_races(Races, State) ->
state__records_only(#state{records = Records}) ->
#state{records = Records}.
+-spec state__translate_file(file:filename(), state()) -> file:filename().
+
+state__translate_file(FakeFile, State) ->
+ #state{codeserver = CodeServer, module = Module} = State,
+ dialyzer_codeserver:translate_fake_file(CodeServer, Module, FakeFile).
+
%%% ===========================================================================
%%%
%%% Races
@@ -3563,9 +3571,11 @@ get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([]) -> [];
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file([], _State) -> [];
+get_file([{file, FakeFile}|_], State) ->
+ state__translate_file(FakeFile, State);
+get_file([_|Tail], State) ->
+ get_file(Tail, State).
is_compiler_generated(Ann) ->
lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index 91f7fbe467..d1b955044b 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -498,8 +498,9 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
end,
ExplanationPid = spawn_link(Fun),
gui_loop(State#gui_state{expl_pid = ExplanationPid});
- {BackendPid, done, _NewPlt, NewDocPlt} ->
+ {BackendPid, done, NewMiniPlt, NewDocPlt} ->
message(State, "Analysis done"),
+ dialyzer_plt:delete(NewMiniPlt),
config_gui_stop(State),
gui_loop(State#gui_state{doc_plt = NewDocPlt});
{'EXIT', BackendPid, {error, Reason}} ->
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 64b10af1ba..37c22fef48 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -51,7 +51,9 @@
get_specs/4,
to_file/4,
get_mini_plt/1,
- restore_full_plt/2
+ restore_full_plt/1,
+ delete/1,
+ give_away/2
]).
%% Debug utilities
@@ -75,14 +77,16 @@
%%----------------------------------------------------------------------
-record(plt, {info = table_new() :: dict:dict(),
- types = table_new() :: dict:dict(),
+ types = table_new() :: erl_types:mod_records(),
contracts = table_new() :: dict:dict(),
callbacks = table_new() :: dict:dict(),
exported_types = sets:new() :: sets:set()}).
-record(mini_plt, {info :: ets:tid(),
+ types :: ets:tid(),
contracts :: ets:tid(),
- callbacks :: ets:tid()
+ callbacks :: ets:tid(),
+ exported_types :: ets:tid()
}).
-opaque plt() :: #plt{} | #mini_plt{}.
@@ -123,6 +127,10 @@ delete_module(#plt{info = Info, types = Types,
-spec delete_list(plt(), [mfa() | integer()]) -> plt().
+delete_list(#mini_plt{info = Info,
+ contracts = Contracts}=Plt, List) ->
+ Plt#mini_plt{info = ets_table_delete_list(Info, List),
+ contracts = ets_table_delete_list(Contracts, List)};
delete_list(#plt{info = Info, types = Types,
contracts = Contracts,
callbacks = Callbacks,
@@ -176,7 +184,7 @@ lookup(Plt, Label) when is_integer(Label) ->
lookup_1(#mini_plt{info = Info}, MFAorLabel) ->
ets_table_lookup(Info, MFAorLabel).
--spec insert_types(plt(), dict:dict()) -> plt().
+-spec insert_types(plt(), erl_types:mod_records()) -> plt().
insert_types(PLT, Rec) ->
PLT#plt{types = Rec}.
@@ -186,7 +194,7 @@ insert_types(PLT, Rec) ->
insert_exported_types(PLT, Set) ->
PLT#plt{exported_types = Set}.
--spec get_types(plt()) -> dict:dict().
+-spec get_types(plt()) -> erl_types:mod_records().
get_types(#plt{types = Types}) ->
Types.
@@ -246,8 +254,10 @@ from_file(FileName, ReturnInfo) ->
Msg = io_lib:format("Old PLT file ~s\n", [FileName]),
plt_error(Msg);
ok ->
+ Types = [{Mod, maps:from_list(dict:to_list(Types))} ||
+ {Mod, Types} <- dict:to_list(Rec#file_plt.types)],
Plt = #plt{info = Rec#file_plt.info,
- types = Rec#file_plt.types,
+ types = dict:from_list(Types),
contracts = Rec#file_plt.contracts,
callbacks = Rec#file_plt.callbacks,
exported_types = Rec#file_plt.exported_types},
@@ -364,12 +374,14 @@ to_file(FileName,
end,
OldModDeps, ModDeps),
ImplMd5 = compute_implementation_md5(),
+ FileTypes = dict:from_list([{Mod, dict:from_list(maps:to_list(MTypes))} ||
+ {Mod, MTypes} <- dict:to_list(Types)]),
Record = #file_plt{version = ?VSN,
file_md5_list = MD5,
info = Info,
contracts = Contracts,
callbacks = Callbacks,
- types = Types,
+ types = FileTypes,
exported_types = ExpTypes,
mod_deps = NewModDeps,
implementation_md5 = ImplMd5},
@@ -503,32 +515,100 @@ init_md5_list_1(Md5List, [], Acc) ->
-spec get_mini_plt(plt()) -> plt().
-get_mini_plt(#plt{info = Info, contracts = Contracts, callbacks = Callbacks}) ->
- [ETSInfo, ETSContracts, ETSCallbacks] =
- [ets:new(Name, [public]) || Name <- [plt_info, plt_contracts, plt_callbacks]],
+get_mini_plt(#plt{info = Info,
+ types = Types,
+ contracts = Contracts,
+ callbacks = Callbacks,
+ exported_types = ExpTypes}) ->
+ [ETSInfo, ETSTypes, ETSContracts, ETSCallbacks, ETSExpTypes] =
+ [ets:new(Name, [public]) ||
+ Name <- [plt_info, plt_types, plt_contracts, plt_callbacks,
+ plt_exported_types]],
CallbackList = dict:to_list(Callbacks),
CallbacksByModule =
[{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} ||
M <- lists:usort([M || {{M,_,_},_} <- CallbackList])],
- [true, true] =
+ [true, true, true] =
[ets:insert(ETS, dict:to_list(Data)) ||
- {ETS, Data} <- [{ETSInfo, Info}, {ETSContracts, Contracts}]],
+ {ETS, Data} <- [{ETSInfo, Info},
+ {ETSTypes, Types},
+ {ETSContracts, Contracts}]],
true = ets:insert(ETSCallbacks, CallbacksByModule),
- #mini_plt{info = ETSInfo, contracts = ETSContracts, callbacks = ETSCallbacks};
+ true = ets:insert(ETSExpTypes, [{ET} || ET <- sets:to_list(ExpTypes)]),
+ #mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes};
get_mini_plt(undefined) ->
undefined.
--spec restore_full_plt(plt(), plt()) -> plt().
-
-restore_full_plt(#mini_plt{info = ETSInfo, contracts = ETSContracts}, Plt) ->
- Info = dict:from_list(ets:tab2list(ETSInfo)),
- Contracts = dict:from_list(ets:tab2list(ETSContracts)),
- ets:delete(ETSContracts),
- ets:delete(ETSInfo),
- Plt#plt{info = Info, contracts = Contracts};
-restore_full_plt(undefined, undefined) ->
+-spec restore_full_plt(plt()) -> plt().
+
+restore_full_plt(#mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes} = MiniPlt) ->
+ Info = dict:from_list(tab2list(ETSInfo)),
+ Contracts = dict:from_list(tab2list(ETSContracts)),
+ Types = dict:from_list(tab2list(ETSTypes)),
+ Callbacks =
+ dict:from_list([Cb || {_M, Cbs} <- tab2list(ETSCallbacks), Cb <- Cbs]),
+ ExpTypes = sets:from_list([E || {E} <- tab2list(ETSExpTypes)]),
+ ok = delete(MiniPlt),
+ #plt{info = Info,
+ types = Types,
+ contracts = Contracts,
+ callbacks = Callbacks,
+ exported_types = ExpTypes};
+restore_full_plt(undefined) ->
undefined.
+-spec delete(plt()) -> 'ok'.
+
+delete(#mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes}) ->
+ true = ets:delete(ETSContracts),
+ true = ets:delete(ETSTypes),
+ true = ets:delete(ETSInfo),
+ true = ets:delete(ETSCallbacks),
+ true = ets:delete(ETSExpTypes),
+ ok.
+
+-spec give_away(plt(), pid()) -> 'ok'.
+
+give_away(#mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes},
+ Pid) ->
+ true = ets:give_away(ETSContracts, Pid, any),
+ true = ets:give_away(ETSTypes, Pid, any),
+ true = ets:give_away(ETSInfo, Pid, any),
+ true = ets:give_away(ETSCallbacks, Pid, any),
+ true = ets:give_away(ETSExpTypes, Pid, any),
+ ok.
+
+%% Somewhat slower than ets:tab2list(), but uses less memory.
+tab2list(T) ->
+ tab2list(ets:first(T), T, []).
+
+tab2list('$end_of_table', T, A) ->
+ case ets:first(T) of % no safe_fixtable()...
+ '$end_of_table' -> A;
+ Key -> tab2list(Key, T, A)
+ end;
+tab2list(Key, T, A) ->
+ Vs = ets:lookup(T, Key),
+ Key1 = ets:next(T, Key),
+ ets:delete(T, Key),
+ tab2list(Key1, T, Vs ++ A).
+
%%---------------------------------------------------------------------------
%% Edoc
@@ -600,6 +680,12 @@ table_delete_module1(Plt, Mod) ->
table_delete_module2(Plt, Mod) ->
dict:filter(fun(M, _Val) -> M =/= Mod end, Plt).
+ets_table_delete_list(Tab, [H|T]) ->
+ ets:delete(Tab, H),
+ ets_table_delete_list(Tab, T);
+ets_table_delete_list(Tab, []) ->
+ Tab.
+
table_delete_list(Plt, [H|T]) ->
table_delete_list(dict:erase(H, Plt), T);
table_delete_list(Plt, []) ->
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index df12796dd4..3c90f46e95 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -89,7 +89,7 @@ analyze_callgraph(Callgraph, Plt, Codeserver, TimingServer, Solvers, Parent) ->
NewState =
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
- dialyzer_plt:restore_full_plt(NewState#st.plt, Plt).
+ NewState#st.plt.
%%--------------------------------------------------------------------
@@ -104,6 +104,7 @@ init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
timing_server = TimingServer} = State) ->
+ erlang:garbage_collect(),
case find_succ_typings(SCCs, State) of
{fixpoint, State1} -> State1;
{not_fixpoint, NotFixpoint1, State1} ->
@@ -148,8 +149,8 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver,
?timing(TimingServer, "warning",
get_warnings_from_modules(Mods, InitState, MiniDocPlt)),
{postprocess_warnings(CWarns ++ ModWarns, Codeserver),
- dialyzer_plt:restore_full_plt(MiniPlt, Plt),
- dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}.
+ MiniPlt,
+ dialyzer_plt:restore_full_plt(MiniDocPlt)}.
get_warnings_from_modules(Mods, State, DocPlt) ->
#st{callgraph = Callgraph, codeserver = Codeserver,
@@ -167,10 +168,10 @@ collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) ->
%% Check if there are contracts for functions that do not exist
Warnings1 =
dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
+ Attrs = cerl:module_attrs(ModCode),
{Warnings2, FunTypes} =
dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver,
Records),
- Attrs = cerl:module_attrs(ModCode),
Warnings3 =
dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver),
DocPlt = insert_into_doc_plt(FunTypes, Callgraph, DocPlt),
@@ -255,7 +256,7 @@ refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) ->
NewFunTypes =
dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records),
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer),
- Contracts = orddict:from_list(dict:to_list(Contracts1)),
+ Contracts = orddict:from_list(maps:to_list(Contracts1)),
FindOpaques = find_opaques_fun(Records),
DecoratedFunTypes =
decorate_succ_typings(Contracts, Callgraph, NewFunTypes, FindOpaques),
@@ -341,21 +342,25 @@ find_succ_typings(SCCs, #st{codeserver = Codeserver, callgraph = Callgraph,
-spec find_succ_types_for_scc(scc(), typesig_init_data()) -> [mfa_or_funlbl()].
-find_succ_types_for_scc(SCC, {Codeserver, Callgraph, Plt, Solvers}) ->
- SCC_Info = [{MFA,
- dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
- dialyzer_codeserver:lookup_mod_records(M, Codeserver)}
- || {M, _, _} = MFA <- SCC],
+find_succ_types_for_scc(SCC0, {Codeserver, Callgraph, Plt, Solvers}) ->
+ SCC = [MFA || {_, _, _} = MFA <- SCC0],
Contracts1 = [{MFA, dialyzer_codeserver:lookup_mfa_contract(MFA, Codeserver)}
- || {_, _, _} = MFA <- SCC],
+ || MFA <- SCC],
Contracts2 = [{MFA, Contract} || {MFA, {ok, Contract}} <- Contracts1],
Contracts3 = orddict:from_list(Contracts2),
Label = dialyzer_codeserver:get_next_core_label(Codeserver),
- AllFuns = collect_fun_info([Fun || {_MFA, {_Var, Fun}, _Rec} <- SCC_Info]),
+ AllFuns = lists:append(
+ [begin
+ {_Var, Fun} =
+ dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ collect_fun_info([Fun])
+ end || MFA <- SCC]),
+ erlang:garbage_collect(),
PropTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
%% Assume that the PLT contains the current propagated types
- FunTypes = dialyzer_typesig:analyze_scc(SCC_Info, Label, Callgraph,
- Plt, PropTypes, Solvers),
+ FunTypes = dialyzer_typesig:analyze_scc(SCC, Label, Callgraph,
+ Codeserver, Plt, PropTypes,
+ Solvers),
AllFunSet = sets:from_list([X || {X, _} <- AllFuns]),
FilteredFunTypes =
dict:filter(fun(X, _) -> sets:is_element(X, AllFunSet) end, FunTypes),
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 457db9df83..b33484bda4 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -22,7 +22,7 @@
-module(dialyzer_typesig).
--export([analyze_scc/6]).
+-export([analyze_scc/7]).
-export([get_safe_underapprox/2]).
%%-import(helper, %% 'helper' could be any module doing sanity checks...
@@ -94,10 +94,9 @@
-type types() :: erl_types:type_table().
--type typesig_scc() :: [{mfa(), {cerl:c_var(), cerl:c_fun()}, types()}].
-type typesig_funmap() :: #{type_var() => type_var()}.
--type prop_types() :: dict:dict(label(), types()).
+-type prop_types() :: dict:dict(label(), erl_types:erl_type()).
-record(state, {callgraph :: dialyzer_callgraph:callgraph()
| 'undefined',
@@ -114,7 +113,7 @@
plt :: dialyzer_plt:plt()
| 'undefined',
prop_types = dict:new() :: prop_types(),
- records = dict:new() :: types(),
+ records = maps:new() :: types(),
scc = [] :: ordsets:ordset(type_var()),
mfas :: [mfa()],
solvers = [] :: [solver()]
@@ -153,11 +152,10 @@
%%-----------------------------------------------------------------------------
%% Analysis of strongly connected components.
%%
-%% analyze_scc(SCC, NextLabel, CallGraph, PLT, PropTypes, Solvers) -> FunTypes
+%% analyze_scc(SCC, NextLabel, CallGraph, CodeServer,
+%% PLT, PropTypes, Solvers) -> FunTypes
%%
-%% SCC - [{MFA, Def, Records}]
-%% where Def = {Var, Fun} as in the Core Erlang module definitions.
-%% Records = dict(RecName, {Arity, [{FieldName, FieldType}]})
+%% SCC - [{MFA}]
%% NextLabel - An integer that is higher than any label in the code.
%% CallGraph - A callgraph as produced by dialyzer_callgraph.erl
%% Note: The callgraph must have been built with all the
@@ -169,28 +167,27 @@
%% Solvers - User specified solvers.
%%-----------------------------------------------------------------------------
--spec analyze_scc(typesig_scc(), label(),
+-spec analyze_scc([mfa()], label(),
dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
dialyzer_plt:plt(), prop_types(), [solver()]) -> prop_types().
-analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers0) ->
+analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) ->
Solvers = solvers(Solvers0),
- assert_format_of_scc(SCC),
- State1 = new_state(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers),
- DefSet = add_def_list([Var || {_MFA, {Var, _Fun}, _Rec} <- SCC], sets:new()),
- State2 = traverse_scc(SCC, DefSet, State1),
+ State1 = new_state(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes,
+ Solvers),
+ DefSet = add_def_list(maps:values(State1#state.name_map), sets:new()),
+ ModRecs = [{M, dialyzer_codeserver:lookup_mod_records(M, CServer)} ||
+ M <- lists:usort([M || {M, _, _} <- SCC])],
+ State2 = traverse_scc(SCC, CServer, DefSet, ModRecs, State1),
State3 = state__finalize(State2),
+ erlang:garbage_collect(),
Funs = state__scc(State3),
pp_constrs_scc(Funs, State3),
constraints_to_dot_scc(Funs, State3),
T = solve(Funs, State3),
dict:from_list(maps:to_list(T)).
-assert_format_of_scc([{_MFA, {_Var, _Fun}, _Records}|Left]) ->
- assert_format_of_scc(Left);
-assert_format_of_scc([]) ->
- ok.
-
solvers([]) -> [v2];
solvers(Solvers) -> Solvers.
@@ -200,12 +197,14 @@ solvers(Solvers) -> Solvers.
%%
%% ============================================================================
-traverse_scc([{_MFA, Def, Rec}|Left], DefSet, AccState) ->
+traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, ModRecs, AccState) ->
+ Def = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ {M, Rec} = lists:keyfind(M, 1, ModRecs),
TmpState1 = state__set_rec_dict(AccState, Rec),
DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)),
{NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1),
- traverse_scc(Left, DefSet, NewAccState);
-traverse_scc([], _DefSet, AccState) ->
+ traverse_scc(Left, Codeserver, DefSet, ModRecs, NewAccState);
+traverse_scc([], _Codeserver, _DefSet, _ModRecs, AccState) ->
AccState.
traverse(Tree, DefinedVars, State) ->
@@ -2695,11 +2694,14 @@ pp_map(_S, _Map) ->
%%
%% ============================================================================
-new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes, Solvers) ->
- List = [{MFA, Var} || {MFA, {Var, _Fun}, _Rec} <- SCC0],
+new_state(MFAs, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers) ->
+ List_SCC =
+ [begin
+ {Var, Label} = dialyzer_codeserver:lookup_mfa_var_label(MFA, CServer),
+ {{MFA, Var}, t_var(Label)}
+ end || MFA <- MFAs],
+ {List, SCC} = lists:unzip(List_SCC),
NameMap = maps:from_list(List),
- MFAs = [MFA || {MFA, _Var} <- List],
- SCC = [mk_var(Fun) || {_MFA, {_Var, Fun}, _Rec} <- SCC0],
SelfRec =
case SCC of
[OneF] ->
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 8480129dab..432d27571b 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -195,7 +195,7 @@ get_core_from_abstract_code(AbstrCode, Opts) ->
get_record_and_type_info(AbstractCode) ->
Module = get_module(AbstractCode),
- get_record_and_type_info(AbstractCode, Module, dict:new()).
+ get_record_and_type_info(AbstractCode, Module, maps:new()).
-spec get_record_and_type_info(abstract_code(), module(), type_table()) ->
{'ok', type_table()} | {'error', string()}.
@@ -208,7 +208,7 @@ get_record_and_type_info([{attribute, A, record, {Name, Fields0}}|Left],
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
FN = {File, erl_anno:line(A)},
- NewRecDict = dict:store({record, Name}, {FN, [{Arity,Fields}]}, RecDict),
+ NewRecDict = maps:put({record, Name}, {FN, [{Arity,Fields}]}, RecDict),
get_record_and_type_info(Left, Module, NewRecDict, File);
get_record_and_type_info([{attribute, A, type, {{record, Name}, Fields0, []}}
|Left], Module, RecDict, File) ->
@@ -216,7 +216,7 @@ get_record_and_type_info([{attribute, A, type, {{record, Name}, Fields0, []}}
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
FN = {File, erl_anno:line(A)},
- NewRecDict = dict:store({record, Name}, {FN, [{Arity, Fields}]}, RecDict),
+ NewRecDict = maps:put({record, Name}, {FN, [{Arity, Fields}]}, RecDict),
get_record_and_type_info(Left, Module, NewRecDict, File);
get_record_and_type_info([{attribute, A, Attr, {Name, TypeForm}}|Left],
Module, RecDict, File)
@@ -256,9 +256,9 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, FN,
false ->
try erl_types:t_var_names(ArgForms) of
ArgNames ->
- dict:store({TypeOrOpaque, Name, Arity},
- {{Module, FN, TypeForm, ArgNames},
- erl_types:t_any()}, RecDict)
+ maps:put({TypeOrOpaque, Name, Arity},
+ {{Module, FN, TypeForm, ArgNames},
+ erl_types:t_any()}, RecDict)
catch
_:_ ->
throw({error, flat_format("Type declaration for ~w does not "
@@ -289,19 +289,18 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) ->
get_record_fields([], _RecDict, Acc) ->
lists:reverse(Acc).
--spec process_record_remote_types(codeserver()) -> codeserver().
+-spec process_record_remote_types(codeserver()) ->
+ {codeserver(), mod_records()}.
%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
TempRecords = dialyzer_codeserver:get_temp_records(CServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
- Cache = erl_types:cache__new(),
- {TempRecords1, Cache1} =
- process_opaque_types0(TempRecords, ExpTypes, Cache),
+ TempRecords1 = process_opaque_types0(TempRecords, ExpTypes),
%% A cache (not the field type cache) is used for speeding things up a bit.
VarTable = erl_types:var_table__new(),
ModuleFun =
- fun({Module, Record}, C0) ->
+ fun({Module, Record}) ->
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -327,24 +326,27 @@ process_record_remote_types(CServer) ->
_Other -> {{Key, Value}, C2}
end
end,
- {RecordList, C1} =
- lists:mapfoldl(RecordFun, C0, dict:to_list(Record)),
- {{Module, dict:from_list(RecordList)}, C1}
+ Cache = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, Cache, maps:to_list(Record)),
+ {Module, maps:from_list(RecordList)}
end,
- {NewRecordsList, C1} =
- lists:mapfoldl(ModuleFun, Cache1, dict:to_list(TempRecords1)),
+ NewRecordsList = lists:map(ModuleFun, dict:to_list(TempRecords1)),
NewRecords = dict:from_list(NewRecordsList),
- _C8 = check_record_fields(NewRecords, ExpTypes, C1),
- dialyzer_codeserver:finalize_records(NewRecords, CServer).
+ check_record_fields(NewRecords, ExpTypes),
+ {dialyzer_codeserver:finalize_records(NewRecords, CServer), NewRecords}.
%% erl_types:t_from_form() substitutes the declaration of opaque types
%% for the expanded type in some cases. To make sure the initial type,
%% any(), is not used, the expansion is done twice.
%% XXX: Recursive opaque types are not handled well.
-process_opaque_types0(TempRecords0, TempExpTypes, Cache) ->
- {TempRecords1, NewCache} =
+process_opaque_types0(TempRecords0, TempExpTypes) ->
+ Cache = erl_types:cache__new(),
+ {TempRecords1, Cache1} =
process_opaque_types(TempRecords0, TempExpTypes, Cache),
- process_opaque_types(TempRecords1, TempExpTypes, NewCache).
+ {TempRecords, _NewCache} =
+ process_opaque_types(TempRecords1, TempExpTypes, Cache1),
+ TempRecords.
process_opaque_types(TempRecords, TempExpTypes, Cache) ->
VarTable = erl_types:var_table__new(),
@@ -364,8 +366,8 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) ->
end
end,
{RecordList, C1} =
- lists:mapfoldl(RecordFun, C0, dict:to_list(Record)),
- {{Module, dict:from_list(RecordList)}, C1}
+ lists:mapfoldl(RecordFun, C0, maps:to_list(Record)),
+ {{Module, maps:from_list(RecordList)}, C1}
%% dict:map(RecordFun, Record)
end,
{TempRecordList, NewCache} =
@@ -373,7 +375,8 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) ->
{dict:from_list(TempRecordList), NewCache}.
%% dict:map(ModuleFun, TempRecords).
-check_record_fields(Records, TempExpTypes, Cache) ->
+check_record_fields(Records, TempExpTypes) ->
+ Cache = erl_types:cache__new(),
VarTable = erl_types:var_table__new(),
CheckFun =
fun({Module, Element}, C0) ->
@@ -403,9 +406,10 @@ check_record_fields(Records, TempExpTypes, Cache) ->
msg_with_position(Fun, FileLine)
end
end,
- lists:foldl(ElemFun, C0, dict:to_list(Element))
+ lists:foldl(ElemFun, C0, maps:to_list(Element))
end,
- lists:foldl(CheckFun, Cache, dict:to_list(Records)).
+ _NewCache = lists:foldl(CheckFun, Cache, dict:to_list(Records)),
+ ok.
msg_with_position(Fun, FileLine) ->
try Fun()
@@ -428,17 +432,17 @@ merge_records(NewRecords, OldRecords) ->
%%
%% ============================================================================
--type spec_dict() :: dict:dict().
--type callback_dict() :: dict:dict().
+-type spec_map() :: dialyzer_codeserver:contracts().
+-type callback_map() :: dialyzer_codeserver:contracts().
-spec get_spec_info(module(), abstract_code(), type_table()) ->
- {'ok', spec_dict(), callback_dict()} | {'error', string()}.
+ {'ok', spec_map(), callback_map()} | {'error', string()}.
-get_spec_info(ModName, AbstractCode, RecordsDict) ->
+get_spec_info(ModName, AbstractCode, RecordsMap) ->
OptionalCallbacks0 = get_optional_callbacks(AbstractCode, ModName),
OptionalCallbacks = gb_sets:from_list(OptionalCallbacks0),
- get_spec_info(AbstractCode, dict:new(), dict:new(),
- RecordsDict, ModName, OptionalCallbacks, "nofile").
+ get_spec_info(AbstractCode, maps:new(), maps:new(),
+ RecordsMap, ModName, OptionalCallbacks, "nofile").
get_optional_callbacks(Abs, ModName) ->
[{ModName, F, A} || {F, A} <- get_optional_callbacks(Abs)].
@@ -456,7 +460,7 @@ get_optional_callbacks(Abs) ->
%% are erl_types:erl_type()
get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
- SpecDict, CallbackDict, RecordsDict, ModName, OptCb, File)
+ SpecMap, CallbackMap, RecordsMap, ModName, OptCb, File)
when ((Contract =:= 'spec') or (Contract =:= 'callback')),
is_list(TypeSpec) ->
Ln = erl_anno:line(Anno),
@@ -465,24 +469,24 @@ get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
{F, A} -> {ModName, F, A}
end,
Xtra = [optional_callback || gb_sets:is_member(MFA, OptCb)],
- ActiveDict =
+ ActiveMap =
case Contract of
- spec -> SpecDict;
- callback -> CallbackDict
+ spec -> SpecMap;
+ callback -> CallbackMap
end,
- try dict:find(MFA, ActiveDict) of
+ try maps:find(MFA, ActiveMap) of
error ->
SpecData = {TypeSpec, Xtra},
- NewActiveDict =
+ NewActiveMap =
dialyzer_contracts:store_tmp_contract(MFA, {File, Ln}, SpecData,
- ActiveDict, RecordsDict),
- {NewSpecDict, NewCallbackDict} =
+ ActiveMap, RecordsMap),
+ {NewSpecMap, NewCallbackMap} =
case Contract of
- spec -> {NewActiveDict, CallbackDict};
- callback -> {SpecDict, NewActiveDict}
+ spec -> {NewActiveMap, CallbackMap};
+ callback -> {SpecMap, NewActiveMap}
end,
- get_spec_info(Left, NewSpecDict, NewCallbackDict,
- RecordsDict, ModName, OptCb, File);
+ get_spec_info(Left, NewSpecMap, NewCallbackMap,
+ RecordsMap, ModName, OptCb, File);
{ok, {{OtherFile, L}, _D}} ->
{Mod, Fun, Arity} = MFA,
Msg = flat_format(" Contract/callback for function ~w:~w/~w "
@@ -495,16 +499,16 @@ get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
[Ln, Error])}
end;
get_spec_info([{attribute, _, file, {IncludeFile, _}}|Left],
- SpecDict, CallbackDict, RecordsDict, ModName, OptCb, _File) ->
- get_spec_info(Left, SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, IncludeFile);
-get_spec_info([_Other|Left], SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, File) ->
- get_spec_info(Left, SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, File);
-get_spec_info([], SpecDict, CallbackDict,
- _RecordsDict, _ModName, _OptCb, _File) ->
- {ok, SpecDict, CallbackDict}.
+ SpecMap, CallbackMap, RecordsMap, ModName, OptCb, _File) ->
+ get_spec_info(Left, SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, IncludeFile);
+get_spec_info([_Other|Left], SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, File) ->
+ get_spec_info(Left, SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, File);
+get_spec_info([], SpecMap, CallbackMap,
+ _RecordsMap, _ModName, _OptCb, _File) ->
+ {ok, SpecMap, CallbackMap}.
-spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) ->
dialyzer_codeserver:fun_meta_info() | {'error', string()}.
@@ -700,7 +704,7 @@ format_errors([]) ->
-spec format_sig(erl_types:erl_type()) -> string().
format_sig(Type) ->
- format_sig(Type, dict:new()).
+ format_sig(Type, maps:new()).
-spec format_sig(erl_types:erl_type(), type_table()) -> string().
@@ -952,9 +956,7 @@ label(Tree) ->
-spec parallelism() -> integer().
parallelism() ->
- CPUs = erlang:system_info(logical_processors_available),
- Schedulers = erlang:system_info(schedulers),
- min(CPUs, Schedulers).
+ erlang:system_info(schedulers_online).
-spec family([{K,V}]) -> [{K,[V]}].
diff --git a/lib/dialyzer/test/small_SUITE_data/results/chars b/lib/dialyzer/test/small_SUITE_data/results/chars
new file mode 100644
index 0000000000..2c1f8f8d17
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/chars
@@ -0,0 +1,4 @@
+
+chars.erl:29: Invalid type specification for function chars:f/1. The success typing is (#{'b':=50}) -> 'ok'
+chars.erl:32: Function t1/0 has no local return
+chars.erl:32: The call chars:f(#{'b':=50}) breaks the contract (#{'a':=49,'b'=>50,'c'=>51}) -> 'ok'
diff --git a/lib/dialyzer/test/small_SUITE_data/src/anno.erl b/lib/dialyzer/test/small_SUITE_data/src/anno.erl
new file mode 100644
index 0000000000..70f1d42141
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/anno.erl
@@ -0,0 +1,18 @@
+-module(anno).
+
+%% OTP-14131
+
+-export([t1/0, t2/0, t3/0]).
+
+t1() ->
+ A = erl_parse:anno_from_term({attribute, 1, module, my_test}),
+ compile:forms([A], []).
+
+t2() ->
+ A = erl_parse:new_anno({attribute, 1, module, my_test}),
+ compile:forms([A], []).
+
+t3() ->
+ A = erl_parse:new_anno({attribute, 1, module, my_test}),
+ T = erl_parse:anno_to_term(A),
+ {attribute, 1, module, my_test} = T.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/chars.erl b/lib/dialyzer/test/small_SUITE_data/src/chars.erl
new file mode 100644
index 0000000000..1e9c8ab6b9
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/chars.erl
@@ -0,0 +1,32 @@
+-module(chars).
+
+%% ERL-313
+
+-export([t/0]).
+-export([t1/0]).
+
+-record(r, {f :: $A .. $Z}).
+
+-type cs() :: $A..$Z | $a .. $z | $/.
+
+-spec t() -> $0-$0..$9-$0| $?.
+
+t() ->
+ c(#r{f = $z - 3}),
+ c($z - 3),
+ c($B).
+
+-spec c(cs()) -> $3-$0..$9-$0.
+
+c($A + 1) -> 2;
+c(C) ->
+ case C of
+ $z - 3 -> 3;
+ #r{f = $z - 3} -> 7
+ end.
+
+%% Display contract with character in warning:
+-spec f(#{a := $1, b => $2, c => $3}) -> ok. % invalid type spec
+f(_) -> ok.
+
+t1() -> f(#{b => $2}). % breaks the contract
diff --git a/lib/eldap/test/Makefile b/lib/eldap/test/Makefile
index 21a0da926f..81fa8f187a 100644
--- a/lib/eldap/test/Makefile
+++ b/lib/eldap/test/Makefile
@@ -42,7 +42,7 @@ TARGET_FILES= \
SPEC_FILES = eldap.spec
-# COVER_FILE = eldap.cover
+COVER_FILE = eldap.cover
# ----------------------------------------------------
diff --git a/lib/eldap/test/eldap.cover b/lib/eldap/test/eldap.cover
new file mode 100644
index 0000000000..8c15956e72
--- /dev/null
+++ b/lib/eldap/test/eldap.cover
@@ -0,0 +1,3 @@
+{incl_app,eldap,details}.
+
+{excl_mods, eldap, ['ELDAPv3']}.
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 5a4cf77b81..91ee104f77 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -228,7 +228,8 @@
-export([t_is_identifier/1]).
-endif.
--export_type([erl_type/0, opaques/0, type_table/0, var_table/0, cache/0]).
+-export_type([erl_type/0, opaques/0, type_table/0, mod_records/0,
+ var_table/0, cache/0]).
%%-define(DEBUG, true).
@@ -371,8 +372,9 @@
-type type_value() :: {{module(), {file:name(), erl_anno:line()},
erl_parse:abstract_type(), ArgNames :: [atom()]},
erl_type()}.
--type type_table() :: dict:dict(record_key() | type_key(),
- record_value() | type_value()).
+-type type_table() :: #{record_key() | type_key() =>
+ record_value() | type_value()}.
+-type mod_records() :: dict:dict(module(), type_table()).
-opaque var_table() :: #{atom() => erl_type()}.
@@ -741,16 +743,16 @@ decorate_tuples_in_sets([], _L, _Opaques, Acc) ->
-spec t_opaque_from_records(type_table()) -> [erl_type()].
-t_opaque_from_records(RecDict) ->
- OpaqueRecDict =
- dict:filter(fun(Key, _Value) ->
+t_opaque_from_records(RecMap) ->
+ OpaqueRecMap =
+ maps:filter(fun(Key, _Value) ->
case Key of
{opaque, _Name, _Arity} -> true;
_ -> false
end
- end, RecDict),
- OpaqueTypeDict =
- dict:map(fun({opaque, Name, _Arity},
+ end, RecMap),
+ OpaqueTypeMap =
+ maps:map(fun({opaque, Name, _Arity},
{{Module, _FileLine, _Form, ArgNames}, _Type}) ->
%% Args = args_to_types(ArgNames),
%% List = lists:zip(ArgNames, Args),
@@ -759,8 +761,8 @@ t_opaque_from_records(RecDict) ->
Rep = t_any(), % not used for anything right now
Args = [t_any() || _ <- ArgNames],
t_opaque(Module, Name, Args, Rep)
- end, OpaqueRecDict),
- [OpaqueType || {_Key, OpaqueType} <- dict:to_list(OpaqueTypeDict)].
+ end, OpaqueRecMap),
+ [OpaqueType || {_Key, OpaqueType} <- maps:to_list(OpaqueTypeMap)].
%% Decompose opaque instances of type arg2 to structured types, in arg1
%% XXX: Same as t_unopaque
@@ -794,10 +796,6 @@ list_struct_from_opaque(Types, Opaques) ->
[t_struct_from_opaque(Type, Opaques) || Type <- Types].
%%-----------------------------------------------------------------------------
-
--type mod_records() :: dict:dict(module(), type_table()).
-
-%%-----------------------------------------------------------------------------
%% Unit type. Signals non termination.
%%
@@ -3059,88 +3057,91 @@ is_compat_args([A1|Args1], [A2|Args2]) ->
is_compat_args([], []) -> true;
is_compat_args(_, _) -> false.
-is_compat_arg(A1, A2) ->
- is_specialization(A1, A2) orelse is_specialization(A2, A1).
-
--spec is_specialization(erl_type(), erl_type()) -> boolean().
-
-%% Returns true if the first argument is a specialization of the
-%% second argument in the sense that every type is a specialization of
-%% any(). For example, {_,_} is a specialization of any(), but not of
-%% tuple(). Does not handle variables, but any() and unions (sort of).
-
-is_specialization(T, T) -> true;
-is_specialization(_, ?any) -> true;
-is_specialization(?any, _) -> false;
-is_specialization(?function(Domain1, Range1), ?function(Domain2, Range2)) ->
- (is_specialization(Domain1, Domain2) andalso
- is_specialization(Range1, Range2));
-is_specialization(?list(Contents1, Termination1, Size1),
- ?list(Contents2, Termination2, Size2)) ->
+-spec is_compat_arg(erl_type(), erl_type()) -> boolean().
+
+%% The intention is that 'true' is to be returned iff one of the
+%% arguments is a specialization of the other argument in the sense
+%% that every type is a specialization of any(). For example, {_,_} is
+%% a specialization of any(), but not of tuple(). Does not handle
+%% variables, but any() and unions (sort of). However, the
+%% implementation is more relaxed as any() is compatible to anything.
+
+is_compat_arg(T, T) -> true;
+is_compat_arg(_, ?any) -> true;
+is_compat_arg(?any, _) -> true;
+is_compat_arg(?function(Domain1, Range1), ?function(Domain2, Range2)) ->
+ (is_compat_arg(Domain1, Domain2) andalso
+ is_compat_arg(Range1, Range2));
+is_compat_arg(?list(Contents1, Termination1, Size1),
+ ?list(Contents2, Termination2, Size2)) ->
(Size1 =:= Size2 andalso
- is_specialization(Contents1, Contents2) andalso
- is_specialization(Termination1, Termination2));
-is_specialization(?product(Types1), ?product(Types2)) ->
- specialization_list(Types1, Types2);
-is_specialization(?tuple(?any, ?any, ?any), ?tuple(_, _, _)) -> false;
-is_specialization(?tuple(_, _, _), ?tuple(?any, ?any, ?any)) -> false;
-is_specialization(?tuple(Elements1, Arity, _),
- ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
- specialization_list(Elements1, Elements2);
-is_specialization(?tuple_set([{Arity, List}]),
- ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
- specialization_list(sup_tuple_elements(List), Elements2);
-is_specialization(?tuple(Elements1, Arity, _),
- ?tuple_set([{Arity, List}])) when Arity =/= ?any ->
- specialization_list(Elements1, sup_tuple_elements(List));
-is_specialization(?tuple_set(List1), ?tuple_set(List2)) ->
+ is_compat_arg(Contents1, Contents2) andalso
+ is_compat_arg(Termination1, Termination2));
+is_compat_arg(?product(Types1), ?product(Types2)) ->
+ is_compat_list(Types1, Types2);
+is_compat_arg(?map(Pairs1, DefK1, DefV1), ?map(Pairs2, DefK2, DefV2)) ->
+ (is_compat_list(Pairs1, Pairs2) andalso
+ is_compat_arg(DefK1, DefK2) andalso
+ is_compat_arg(DefV1, DefV2));
+is_compat_arg(?tuple(?any, ?any, ?any), ?tuple(_, _, _)) -> false;
+is_compat_arg(?tuple(_, _, _), ?tuple(?any, ?any, ?any)) -> false;
+is_compat_arg(?tuple(Elements1, Arity, _),
+ ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
+ is_compat_list(Elements1, Elements2);
+is_compat_arg(?tuple_set([{Arity, List}]),
+ ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
+ is_compat_list(sup_tuple_elements(List), Elements2);
+is_compat_arg(?tuple(Elements1, Arity, _),
+ ?tuple_set([{Arity, List}])) when Arity =/= ?any ->
+ is_compat_list(Elements1, sup_tuple_elements(List));
+is_compat_arg(?tuple_set(List1), ?tuple_set(List2)) ->
try
- specialization_list_list([sup_tuple_elements(T) || {_Arity, T} <- List1],
- [sup_tuple_elements(T) || {_Arity, T} <- List2])
+ is_compat_list_list([sup_tuple_elements(T) || {_Arity, T} <- List1],
+ [sup_tuple_elements(T) || {_Arity, T} <- List2])
catch _:_ -> false
end;
-is_specialization(?opaque(_) = T1, T2) ->
- is_specialization(t_opaque_structure(T1), T2);
-is_specialization(T1, ?opaque(_) = T2) ->
- is_specialization(T1, t_opaque_structure(T2));
-is_specialization(?union(List1)=T1, ?union(List2)=T2) ->
- case specialization_union2(T1, T2) of
- {yes, Type1, Type2} -> is_specialization(Type1, Type2);
- no -> specialization_list(List1, List2)
+is_compat_arg(?opaque(_) = T1, T2) ->
+ is_compat_arg(t_opaque_structure(T1), T2);
+is_compat_arg(T1, ?opaque(_) = T2) ->
+ is_compat_arg(T1, t_opaque_structure(T2));
+is_compat_arg(?union(List1)=T1, ?union(List2)=T2) ->
+ case is_compat_union2(T1, T2) of
+ {yes, Type1, Type2} -> is_compat_arg(Type1, Type2);
+ no -> is_compat_list(List1, List2)
end;
-is_specialization(?union(List), T2) ->
+is_compat_arg(?union(List), T2) ->
case unify_union(List) of
- {yes, Type} -> is_specialization(Type, T2);
+ {yes, Type} -> is_compat_arg(Type, T2);
no -> false
end;
-is_specialization(T1, ?union(List)) ->
+is_compat_arg(T1, ?union(List)) ->
case unify_union(List) of
- {yes, Type} -> is_specialization(T1, Type);
+ {yes, Type} -> is_compat_arg(T1, Type);
no -> false
end;
-is_specialization(?var(_), _) -> exit(error);
-is_specialization(_, ?var(_)) -> exit(error);
-is_specialization(?none, _) -> false;
-is_specialization(_, ?none) -> false;
-is_specialization(?unit, _) -> false;
-is_specialization(_, ?unit) -> false;
-is_specialization(#c{}, #c{}) -> false.
+is_compat_arg(?var(_), _) -> exit(error);
+is_compat_arg(_, ?var(_)) -> exit(error);
+is_compat_arg(?none, _) -> false;
+is_compat_arg(_, ?none) -> false;
+is_compat_arg(?unit, _) -> false;
+is_compat_arg(_, ?unit) -> false;
+is_compat_arg(#c{}, #c{}) -> false.
-specialization_list_list(LL1, LL2) ->
- length(LL1) =:= length(LL2) andalso specialization_list_list1(LL1, LL2).
+is_compat_list_list(LL1, LL2) ->
+ length(LL1) =:= length(LL2) andalso is_compat_list_list1(LL1, LL2).
-specialization_list_list1([], []) -> true;
-specialization_list_list1([L1|LL1], [L2|LL2]) ->
- specialization_list(L1, L2) andalso specialization_list_list1(LL1, LL2).
+is_compat_list_list1([], []) -> true;
+is_compat_list_list1([L1|LL1], [L2|LL2]) ->
+ is_compat_list(L1, L2) andalso is_compat_list_list1(LL1, LL2).
-specialization_list(L1, L2) ->
- length(L1) =:= length(L2) andalso specialization_list1(L1, L2).
+is_compat_list(L1, L2) ->
+ length(L1) =:= length(L2) andalso is_compat_list1(L1, L2).
-specialization_list1([], []) -> true;
-specialization_list1([T1|L1], [T2|L2]) ->
- is_specialization(T1, T2) andalso specialization_list1(L1, L2).
+is_compat_list1([], []) -> true;
+is_compat_list1([T1|L1], [T2|L2]) ->
+ is_compat_arg(T1, T2) andalso is_compat_list1(L1, L2).
-specialization_union2(?union(List1)=T1, ?union(List2)=T2) ->
+is_compat_union2(?union(List1)=T1, ?union(List2)=T2) ->
case {unify_union(List1), unify_union(List2)} of
{{yes, Type1}, {yes, Type2}} -> {yes, Type1, Type2};
{{yes, Type1}, no} -> {yes, Type1, T2};
@@ -4173,7 +4174,7 @@ t_map(Fun, T) ->
-spec t_to_string(erl_type()) -> string().
t_to_string(T) ->
- t_to_string(T, dict:new()).
+ t_to_string(T, maps:new()).
-spec t_to_string(erl_type(), type_table()) -> string().
@@ -4534,6 +4535,8 @@ from_form({atom, _L, Atom}, _S, _D, L, C) ->
{t_atom(Atom), L, C};
from_form({integer, _L, Int}, _S, _D, L, C) ->
{t_integer(Int), L, C};
+from_form({char, _L, Char}, _S, _D, L, C) ->
+ {t_integer(Char), L, C};
from_form({op, _L, _Op, _Arg} = Op, _S, _D, L, C) ->
case erl_eval:partial_eval(Op) of
{integer, _, Val} ->
@@ -5048,6 +5051,7 @@ check_record_fields({remote_type, _L, [{atom, _, _}, {atom, _, _}, Args]},
list_check_record_fields(Args, S, C);
check_record_fields({atom, _L, _}, _S, C) -> C;
check_record_fields({integer, _L, _}, _S, C) -> C;
+check_record_fields({char, _L, _}, _S, C) -> C;
check_record_fields({op, _L, _Op, _Arg}, _S, C) -> C;
check_record_fields({op, _L, _Op, _Arg1, _Arg2}, _S, C) -> C;
check_record_fields({type, _L, tuple, any}, _S, C) -> C;
@@ -5149,6 +5153,7 @@ t_form_to_string({var, _L, Name}) -> atom_to_list(Name);
t_form_to_string({atom, _L, Atom}) ->
io_lib:write_string(atom_to_list(Atom), $'); % To quote or not to quote... '
t_form_to_string({integer, _L, Int}) -> integer_to_list(Int);
+t_form_to_string({char, _L, Char}) -> integer_to_list(Char);
t_form_to_string({op, _L, _Op, _Arg} = Op) ->
case erl_eval:partial_eval(Op) of
{integer, _, _} = Int -> t_form_to_string(Int);
@@ -5231,7 +5236,7 @@ t_form_to_string({type, _L, union, Args}) ->
t_form_to_string({type, _L, Name, []} = T) ->
try
M = mod,
- D0 = dict:new(),
+ D0 = maps:new(),
MR = dict:from_list([{M, D0}]),
Site = {type, {M,Name,0}},
V = var_table__new(),
@@ -5295,8 +5300,8 @@ is_erl_type(_) -> false.
-spec lookup_record(atom(), type_table()) ->
'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
-lookup_record(Tag, RecDict) when is_atom(Tag) ->
- case dict:find({record, Tag}, RecDict) of
+lookup_record(Tag, Table) when is_atom(Tag) ->
+ case maps:find({record, Tag}, Table) of
{ok, {_FileLine, [{_Arity, Fields}]}} ->
{ok, Fields};
{ok, {_FileLine, List}} when is_list(List) ->
@@ -5310,18 +5315,18 @@ lookup_record(Tag, RecDict) when is_atom(Tag) ->
-spec lookup_record(atom(), arity(), type_table()) ->
'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
-lookup_record(Tag, Arity, RecDict) when is_atom(Tag) ->
- case dict:find({record, Tag}, RecDict) of
+lookup_record(Tag, Arity, Table) when is_atom(Tag) ->
+ case maps:find({record, Tag}, Table) of
{ok, {_FileLine, [{Arity, Fields}]}} -> {ok, Fields};
{ok, {_FileLine, OrdDict}} -> orddict:find(Arity, OrdDict);
error -> error
end.
-spec lookup_type(_, _, _) -> {'type' | 'opaque', type_value()} | 'error'.
-lookup_type(Name, Arity, RecDict) ->
- case dict:find({type, Name, Arity}, RecDict) of
+lookup_type(Name, Arity, Table) ->
+ case maps:find({type, Name, Arity}, Table) of
error ->
- case dict:find({opaque, Name, Arity}, RecDict) of
+ case maps:find({opaque, Name, Arity}, Table) of
error -> error;
{ok, Found} -> {opaque, Found}
end;
@@ -5331,8 +5336,8 @@ lookup_type(Name, Arity, RecDict) ->
-spec type_is_defined('type' | 'opaque', atom(), arity(), type_table()) ->
boolean().
-type_is_defined(TypeOrOpaque, Name, Arity, RecDict) ->
- dict:is_key({TypeOrOpaque, Name, Arity}, RecDict).
+type_is_defined(TypeOrOpaque, Name, Arity, Table) ->
+ maps:is_key({TypeOrOpaque, Name, Arity}, Table).
cannot_have_opaque(Type, TypeName, TypeNames) ->
t_is_none(Type) orelse is_recursive(TypeName, TypeNames).
diff --git a/lib/hipe/test/basic_SUITE_data/basic_num_bif.erl b/lib/hipe/test/basic_SUITE_data/basic_num_bif.erl
new file mode 100644
index 0000000000..807c4b0d0d
--- /dev/null
+++ b/lib/hipe/test/basic_SUITE_data/basic_num_bif.erl
@@ -0,0 +1,217 @@
+%%% -*- erlang-indent-level: 2 -*-
+%%%-------------------------------------------------------------------
+%%% File : basic_num_bif.erl
+%%% Description : Taken from the compiler test suite
+%%%-------------------------------------------------------------------
+-module(basic_num_bif).
+
+-export([test/0]).
+
+%% Tests optimization of the BIFs:
+%% abs/1
+%% float/1
+%% float_to_list/1
+%% integer_to_list/1
+%% list_to_float/1
+%% list_to_integer/1
+%% round/1
+%% trunc/1
+
+test() ->
+ Funs = [fun t_abs/0, fun t_float/0,
+ fun t_float_to_list/0, fun t_integer_to_list/0,
+ fun t_list_to_float_safe/0, fun t_list_to_float_risky/0,
+ fun t_list_to_integer/0, fun t_round/0, fun t_trunc/0],
+ lists:foreach(fun (F) -> ok = F() end, Funs).
+
+t_abs() ->
+ %% Floats.
+ 5.5 = abs(5.5),
+ 0.0 = abs(0.0),
+ 100.0 = abs(-100.0),
+ %% Integers.
+ 5 = abs(5),
+ 0 = abs(0),
+ 100 = abs(-100),
+ %% The largest smallnum. OTP-3190.
+ X = (1 bsl 27) - 1,
+ X = abs(X),
+ X = abs(X-1)+1,
+ X = abs(X+1)-1,
+ X = abs(-X),
+ X = abs(-X-1)-1,
+ X = abs(-X+1)+1,
+ %% Bignums.
+ BigNum = 13984792374983749,
+ BigNum = abs(BigNum),
+ BigNum = abs(-BigNum),
+ ok.
+
+t_float() ->
+ 0.0 = float(0),
+ 2.5 = float(2.5),
+ 0.0 = float(0.0),
+ -100.55 = float(-100.55),
+ 42.0 = float(42),
+ -100.0 = float(-100),
+ %% Bignums.
+ 4294967305.0 = float(4294967305),
+ -4294967305.0 = float(-4294967305),
+ %% Extremely big bignums.
+ Big = list_to_integer(lists:duplicate(2000, $1)),
+ {'EXIT', {badarg, _}} = (catch float(Big)),
+ ok.
+
+%% Tests float_to_list/1.
+
+t_float_to_list() ->
+ test_ftl("0.0e+0", 0.0),
+ test_ftl("2.5e+1", 25.0),
+ test_ftl("2.5e+0", 2.5),
+ test_ftl("2.5e-1", 0.25),
+ test_ftl("-3.5e+17", -350.0e15),
+ ok.
+
+test_ftl(Expect, Float) ->
+ %% No on the next line -- we want the line number from t_float_to_list.
+ Expect = remove_zeros(lists:reverse(float_to_list(Float)), []).
+
+%% Removes any non-significant zeros in a floating point number.
+%% Example: 2.500000e+01 -> 2.5e+1
+
+remove_zeros([$+, $e|Rest], [$0, X|Result]) ->
+ remove_zeros([$+, $e|Rest], [X|Result]);
+remove_zeros([$-, $e|Rest], [$0, X|Result]) ->
+ remove_zeros([$-, $e|Rest], [X|Result]);
+remove_zeros([$0, $.|Rest], [$e|Result]) ->
+ remove_zeros(Rest, [$., $0, $e|Result]);
+remove_zeros([$0|Rest], [$e|Result]) ->
+ remove_zeros(Rest, [$e|Result]);
+remove_zeros([Char|Rest], Result) ->
+ remove_zeros(Rest, [Char|Result]);
+remove_zeros([], Result) ->
+ Result.
+
+%% Tests integer_to_list/1.
+
+t_integer_to_list() ->
+ "0" = integer_to_list(0),
+ "42" = integer_to_list(42),
+ "-42" = integer_to_list(-42),
+ "-42" = integer_to_list(-42),
+ "32768" = integer_to_list(32768),
+ "268435455" = integer_to_list(268435455),
+ "-268435455" = integer_to_list(-268435455),
+ "123456932798748738738" = integer_to_list(123456932798748738738),
+ Big_List = lists:duplicate(2000, $1),
+ Big = list_to_integer(Big_List),
+ Big_List = integer_to_list(Big),
+ ok.
+
+%% Tests list_to_float/1.
+
+t_list_to_float_safe() ->
+ 0.0 = list_to_float("0.0"),
+ 0.0 = list_to_float("-0.0"),
+ 0.5 = list_to_float("0.5"),
+ -0.5 = list_to_float("-0.5"),
+ 100.0 = list_to_float("1.0e2"),
+ 127.5 = list_to_float("127.5"),
+ -199.5 = list_to_float("-199.5"),
+ ok.
+
+%% This might crash the emulator...
+%% (Known to crash the Unix version of Erlang 4.4.1)
+
+t_list_to_float_risky() ->
+ Many_Ones = lists:duplicate(25000, $1),
+ _ = list_to_float("2."++Many_Ones),
+ {'EXIT', {badarg, _}} = (catch list_to_float("2"++Many_Ones)),
+ ok.
+
+%% Tests list_to_integer/1.
+
+t_list_to_integer() ->
+ 0 = list_to_integer("0"),
+ 0 = list_to_integer("00"),
+ 0 = list_to_integer("-0"),
+ 1 = list_to_integer("1"),
+ -1 = list_to_integer("-1"),
+ 42 = list_to_integer("42"),
+ -12 = list_to_integer("-12"),
+ 32768 = list_to_integer("32768"),
+ 268435455 = list_to_integer("268435455"),
+ -268435455 = list_to_integer("-268435455"),
+ %% Bignums.
+ 123456932798748738738 = list_to_integer("123456932798748738738"),
+ _ = list_to_integer(lists:duplicate(2000, $1)),
+ ok.
+
+%% Tests round/1.
+
+t_round() ->
+ 0 = round(0.0),
+ 0 = round(0.4),
+ 1 = round(0.5),
+ 0 = round(-0.4),
+ -1 = round(-0.5),
+ 255 = round(255.3),
+ 256 = round(255.6),
+ -1033 = round(-1033.3),
+ -1034 = round(-1033.6),
+ %% OTP-3722:
+ X = (1 bsl 27) - 1,
+ MX = -X,
+ MXm1 = -X-1,
+ MXp1 = -X+1,
+ F = X + 0.0,
+ X = round(F),
+ X = round(F+1)-1,
+ X = round(F-1)+1,
+ MX = round(-F),
+ MXm1 = round(-F-1),
+ MXp1 = round(-F+1),
+ X = round(F+0.1),
+ X = round(F+1+0.1)-1,
+ X = round(F-1+0.1)+1,
+ MX = round(-F+0.1),
+ MXm1 = round(-F-1+0.1),
+ MXp1 = round(-F+1+0.1),
+ X = round(F-0.1),
+ X = round(F+1-0.1)-1,
+ X = round(F-1-0.1)+1,
+ MX = round(-F-0.1),
+ MXm1 = round(-F-1-0.1),
+ MXp1 = round(-F+1-0.1),
+ 0.5 = abs(round(F+0.5)-(F+0.5)),
+ 0.5 = abs(round(F-0.5)-(F-0.5)),
+ 0.5 = abs(round(-F-0.5)-(-F-0.5)),
+ 0.5 = abs(round(-F+0.5)-(-F+0.5)),
+ %% Bignums.
+ 4294967296 = round(4294967296.1),
+ 4294967297 = round(4294967296.9),
+ -4294967296 = -round(4294967296.1),
+ -4294967297 = -round(4294967296.9),
+ ok.
+
+t_trunc() ->
+ 0 = trunc(0.0),
+ 5 = trunc(5.3333),
+ -10 = trunc(-10.978987),
+ %% The largest smallnum, converted to float (OTP-3722):
+ X = (1 bsl 27) - 1,
+ F = X + 0.0,
+ %% io:format("X = ~p/~w/~w, F = ~p/~w/~w, trunc(F) = ~p/~w/~w~n",
+ %% [X, X, binary_to_list(term_to_binary(X)),
+ %% F, F, binary_to_list(term_to_binary(F)),
+ %% trunc(F), trunc(F), binary_to_list(term_to_binary(trunc(F)))]),
+ X = trunc(F),
+ X = trunc(F+1)-1,
+ X = trunc(F-1)+1,
+ X = -trunc(-F),
+ X = -trunc(-F-1)-1,
+ X = -trunc(-F+1)+1,
+ %% Bignums.
+ 4294967305 = trunc(4294967305.7),
+ -4294967305 = trunc(-4294967305.7),
+ ok.
diff --git a/lib/hipe/test/hipe_SUITE.erl b/lib/hipe/test/hipe_SUITE.erl
index a5b3924aa8..b9adb660f2 100644
--- a/lib/hipe/test/hipe_SUITE.erl
+++ b/lib/hipe/test/hipe_SUITE.erl
@@ -16,7 +16,11 @@
%%
-module(hipe_SUITE).
--compile([export_all]).
+-export([all/0, groups/0,
+ init_per_suite/1, end_per_suite/1,
+ init_per_group/2, end_per_group/2,
+ app/0, app/1, appup/0, appup/1]).
+
-include_lib("common_test/include/ct.hrl").
all() ->
diff --git a/lib/hipe/test/opt_verify_SUITE.erl b/lib/hipe/test/opt_verify_SUITE.erl
index 61952e81d7..86083fa02b 100644
--- a/lib/hipe/test/opt_verify_SUITE.erl
+++ b/lib/hipe/test/opt_verify_SUITE.erl
@@ -1,6 +1,9 @@
-module(opt_verify_SUITE).
--compile([export_all]).
+-export([all/0, groups/0,
+ init_per_suite/1, end_per_suite/1,
+ init_per_group/2, end_per_group/2,
+ call_elim/0, call_elim/1]).
all() ->
[call_elim].
@@ -23,23 +26,6 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-call_elim_test_file(Config, FileName, Option) ->
- PrivDir = test_server:lookup_config(priv_dir, Config),
- TempOut = test_server:temp_name(filename:join(PrivDir, "call_elim_out")),
- {ok, TestCase} = compile:file(FileName),
- {ok, TestCase} = hipe:c(TestCase, [Option, {pp_range_icode, {file, TempOut}}]),
- {ok, Icode} = file:read_file(TempOut),
- ok = file:delete(TempOut),
- Icode.
-
-substring_count(Icode, Substring) ->
- substring_count(Icode, Substring, 0).
-substring_count(Icode, Substring, N) ->
- case string:str(Icode, Substring) of
- 0 -> N;
- I -> substring_count(lists:nthtail(I, Icode), Substring, N+1)
- end.
-
call_elim() ->
[{doc, "Test that the call elimination optimization pass is ok"}].
call_elim(Config) ->
@@ -60,3 +46,20 @@ call_elim(Config) ->
Icode6 = call_elim_test_file(Config, F3, no_icode_call_elim),
3 = substring_count(binary:bin_to_list(Icode6), "is_key"),
ok.
+
+call_elim_test_file(Config, FileName, Option) ->
+ PrivDir = test_server:lookup_config(priv_dir, Config),
+ TempOut = test_server:temp_name(filename:join(PrivDir, "call_elim_out")),
+ {ok, TestCase} = compile:file(FileName),
+ {ok, TestCase} = hipe:c(TestCase, [Option, {pp_range_icode, {file, TempOut}}]),
+ {ok, Icode} = file:read_file(TempOut),
+ ok = file:delete(TempOut),
+ Icode.
+
+substring_count(Icode, Substring) ->
+ substring_count(Icode, Substring, 0).
+substring_count(Icode, Substring, N) ->
+ case string:str(Icode, Substring) of
+ 0 -> N;
+ I -> substring_count(lists:nthtail(I, Icode), Substring, N+1)
+ end.
diff --git a/lib/inets/doc/src/httpc.xml b/lib/inets/doc/src/httpc.xml
index 705afec022..4217b3c4fb 100644
--- a/lib/inets/doc/src/httpc.xml
+++ b/lib/inets/doc/src/httpc.xml
@@ -83,7 +83,7 @@
<title>HTTP DATA TYPES</title>
<p>Type definitions related to HTTP:</p>
- <p><c>method() = head | get | put | post | trace | options | delete</c></p>
+ <p><c>method() = head | get | put | post | trace | options | delete | patch</c></p>
<taglist>
<tag><c>request()</c></tag>
<item><p>= <c>{url(), headers()}</c></p>
diff --git a/lib/inets/src/http_client/httpc.erl b/lib/inets/src/http_client/httpc.erl
index 91d87289a2..bd5f6df39e 100644
--- a/lib/inets/src/http_client/httpc.erl
+++ b/lib/inets/src/http_client/httpc.erl
@@ -147,6 +147,26 @@ request(Method, Request, HttpOptions, Options) ->
request(Method, Request, HttpOptions, Options, default_profile()).
request(Method,
+ {Url, Headers, ContentType, TupleBody},
+ HTTPOptions, Options, Profile)
+ when ((Method =:= post) orelse (Method =:= patch) orelse (Method =:= put) orelse (Method =:= delete))
+ andalso (is_atom(Profile) orelse is_pid(Profile)) andalso
+ is_list(ContentType) andalso is_tuple(TupleBody)->
+ case check_body_gen(TupleBody) of
+ ok ->
+ do_request(Method, {Url, Headers, ContentType, TupleBody}, HTTPOptions, Options, Profile);
+ Error ->
+ Error
+ end;
+request(Method,
+ {Url, Headers, ContentType, Body},
+ HTTPOptions, Options, Profile)
+ when ((Method =:= post) orelse (Method =:= patch) orelse (Method =:= put) orelse (Method =:= delete))
+ andalso (is_atom(Profile) orelse is_pid(Profile)) andalso
+ is_list(ContentType) andalso (is_list(Body) orelse is_binary(Body)) ->
+ do_request(Method, {Url, Headers, ContentType, Body}, HTTPOptions, Options, Profile);
+
+request(Method,
{Url, Headers},
HTTPOptions, Options, Profile)
when (Method =:= options) orelse
@@ -155,12 +175,6 @@ request(Method,
(Method =:= delete) orelse
(Method =:= trace) andalso
(is_atom(Profile) orelse is_pid(Profile)) ->
- ?hcrt("request", [{method, Method},
- {url, Url},
- {headers, Headers},
- {http_options, HTTPOptions},
- {options, Options},
- {profile, Profile}]),
case uri_parse(Url, Options) of
{error, Reason} ->
{error, Reason};
@@ -172,21 +186,9 @@ request(Method,
handle_request(Method, Url, ParsedUrl, Headers, [], [],
HTTPOptions, Options, Profile)
end
- end;
-
-request(Method,
- {Url, Headers, ContentType, Body},
- HTTPOptions, Options, Profile)
- when ((Method =:= post) orelse (Method =:= patch) orelse (Method =:= put) orelse
- (Method =:= delete)) andalso (is_atom(Profile) orelse is_pid(Profile)) ->
- ?hcrt("request", [{method, Method},
- {url, Url},
- {headers, Headers},
- {content_type, ContentType},
- {body, Body},
- {http_options, HTTPOptions},
- {options, Options},
- {profile, Profile}]),
+ end.
+
+do_request(Method, {Url, Headers, ContentType, Body}, HTTPOptions, Options, Profile) ->
case uri_parse(Url, Options) of
{error, Reason} ->
{error, Reason};
@@ -196,7 +198,6 @@ request(Method,
HTTPOptions, Options, Profile)
end.
-
%%--------------------------------------------------------------------------
%% cancel_request(RequestId) -> ok
%% cancel_request(RequestId, Profile) -> ok
@@ -209,7 +210,6 @@ cancel_request(RequestId) ->
cancel_request(RequestId, Profile)
when is_atom(Profile) orelse is_pid(Profile) ->
- ?hcrt("cancel request", [{request_id, RequestId}, {profile, Profile}]),
httpc_manager:cancel_request(RequestId, profile_name(Profile)).
@@ -232,7 +232,6 @@ cancel_request(RequestId, Profile)
set_options(Options) ->
set_options(Options, default_profile()).
set_options(Options, Profile) when is_atom(Profile) orelse is_pid(Profile) ->
- ?hcrt("set options", [{options, Options}, {profile, Profile}]),
case validate_options(Options) of
{ok, Opts} ->
httpc_manager:set_options(Opts, profile_name(Profile));
@@ -272,7 +271,6 @@ get_options(all = _Options, Profile) ->
get_options(Options, Profile)
when (is_list(Options) andalso
(is_atom(Profile) orelse is_pid(Profile))) ->
- ?hcrt("get options", [{options, Options}, {profile, Profile}]),
case Options -- get_options() of
[] ->
try
@@ -314,9 +312,6 @@ store_cookies(SetCookieHeaders, Url) ->
store_cookies(SetCookieHeaders, Url, Profile)
when is_atom(Profile) orelse is_pid(Profile) ->
- ?hcrt("store cookies", [{set_cookie_headers, SetCookieHeaders},
- {url, Url},
- {profile, Profile}]),
try
begin
%% Since the Address part is not actually used
@@ -353,9 +348,6 @@ cookie_header(Url, Opts) when is_list(Opts) ->
cookie_header(Url, Opts, Profile)
when (is_list(Opts) andalso (is_atom(Profile) orelse is_pid(Profile))) ->
- ?hcrt("cookie header", [{url, Url},
- {opts, Opts},
- {profile, Profile}]),
try
begin
httpc_manager:which_cookies(Url, Opts, profile_name(Profile))
@@ -398,7 +390,6 @@ which_sessions() ->
which_sessions(default_profile()).
which_sessions(Profile) ->
- ?hcrt("which sessions", [{profile, Profile}]),
try
begin
httpc_manager:which_sessions(profile_name(Profile))
@@ -419,7 +410,6 @@ info() ->
info(default_profile()).
info(Profile) ->
- ?hcrt("info", [{profile, Profile}]),
try
begin
httpc_manager:info(profile_name(Profile))
@@ -440,7 +430,6 @@ reset_cookies() ->
reset_cookies(default_profile()).
reset_cookies(Profile) ->
- ?hcrt("reset cookies", [{profile, Profile}]),
try
begin
httpc_manager:reset_cookies(profile_name(Profile))
@@ -458,7 +447,6 @@ reset_cookies(Profile) ->
%% same behavior as active once for sockets.
%%-------------------------------------------------------------------------
stream_next(Pid) ->
- ?hcrt("stream next", [{handler, Pid}]),
httpc_handler:stream_next(Pid).
@@ -466,7 +454,6 @@ stream_next(Pid) ->
%%% Behaviour callbacks
%%%========================================================================
start_standalone(PropList) ->
- ?hcrt("start standalone", [{proplist, PropList}]),
case proplists:get_value(profile, PropList) of
undefined ->
{error, no_profile};
@@ -477,14 +464,11 @@ start_standalone(PropList) ->
end.
start_service(Config) ->
- ?hcrt("start service", [{config, Config}]),
httpc_profile_sup:start_child(Config).
stop_service(Profile) when is_atom(Profile) ->
- ?hcrt("stop service", [{profile, Profile}]),
httpc_profile_sup:stop_child(Profile);
stop_service(Pid) when is_pid(Pid) ->
- ?hcrt("stop service", [{pid, Pid}]),
case service_info(Pid) of
{ok, [{profile, Profile}]} ->
stop_service(Profile);
@@ -510,7 +494,6 @@ service_info(Pid) ->
%%%========================================================================
%%% Internal functions
%%%========================================================================
-
handle_request(Method, Url,
{Scheme, UserInfo, Host, Port, Path, Query},
Headers0, ContentType, Body0,
@@ -521,9 +504,6 @@ handle_request(Method, Url,
try
begin
- ?hcrt("begin processing", [{started, Started},
- {new_headers, NewHeaders0}]),
-
{NewHeaders, Body} =
case Body0 of
{chunkify, ProcessBody, Acc}
@@ -575,16 +555,13 @@ handle_request(Method, Url,
{ok, RequestId} ->
handle_answer(RequestId, Sync, Options);
{error, Reason} ->
- ?hcrd("request failed", [{reason, Reason}]),
{error, Reason}
end
end
catch
error:{noproc, _} ->
- ?hcrv("noproc", [{profile, Profile}]),
{error, {not_started, Profile}};
throw:Error ->
- ?hcrv("throw", [{error, Error}]),
Error
end.
@@ -620,15 +597,10 @@ handle_answer(RequestId, false, _) ->
handle_answer(RequestId, true, Options) ->
receive
{http, {RequestId, saved_to_file}} ->
- ?hcrt("received saved-to-file", [{request_id, RequestId}]),
{ok, saved_to_file};
{http, {RequestId, {_,_,_} = Result}} ->
- ?hcrt("received answer", [{request_id, RequestId},
- {result, Result}]),
return_answer(Options, Result);
{http, {RequestId, {error, Reason}}} ->
- ?hcrt("received error", [{request_id, RequestId},
- {reason, Reason}]),
{error, Reason}
end.
@@ -1257,18 +1229,14 @@ child_name(Pid, [{Name, Pid} | _]) ->
child_name(Pid, [_ | Children]) ->
child_name(Pid, Children).
-%% d(F) ->
-%% d(F, []).
-
-%% d(F, A) ->
-%% d(get(dbg), F, A).
-
-%% d(true, F, A) ->
-%% io:format(user, "~w:~w:" ++ F ++ "~n", [self(), ?MODULE | A]);
-%% d(_, _, _) ->
-%% ok.
-
host_address(Host, false) ->
Host;
host_address(Host, true) ->
string:strip(string:strip(Host, right, $]), left, $[).
+
+check_body_gen({Fun, _}) when is_function(Fun) ->
+ ok;
+check_body_gen({chunkify, Fun, _}) when is_function(Fun) ->
+ ok;
+check_body_gen(Gen) ->
+ {error, {bad_body_generator, Gen}}.
diff --git a/lib/inets/src/http_client/httpc_handler.erl b/lib/inets/src/http_client/httpc_handler.erl
index 2e7df8e424..c99200777b 100644
--- a/lib/inets/src/http_client/httpc_handler.erl
+++ b/lib/inets/src/http_client/httpc_handler.erl
@@ -32,7 +32,6 @@
%% Internal Application API
-export([
start_link/4,
- %% connect_and_send/2,
send/2,
cancel/2,
stream_next/1,
@@ -165,14 +164,12 @@ info(Pid) ->
%%--------------------------------------------------------------------
%% Request should not be streamed
stream(BodyPart, #request{stream = none} = Request, _) ->
- ?hcrt("stream - none", []),
{false, BodyPart, Request};
%% Stream to caller
stream(BodyPart, #request{stream = Self} = Request, Code)
when ?IS_STREAMED(Code) andalso
((Self =:= self) orelse (Self =:= {self, once})) ->
- ?hcrt("stream - self", [{stream, Self}, {code, Code}]),
httpc_response:send(Request#request.from,
{Request#request.id, stream, BodyPart}),
{true, <<>>, Request};
@@ -182,10 +179,8 @@ stream(BodyPart, #request{stream = Self} = Request, Code)
%% We keep this for backward compatibillity...
stream(BodyPart, #request{stream = Filename} = Request, Code)
when ?IS_STREAMED(Code) andalso is_list(Filename) ->
- ?hcrt("stream - filename", [{stream, Filename}, {code, Code}]),
case file:open(Filename, [write, raw, append, delayed_write]) of
{ok, Fd} ->
- ?hcrt("stream - file open ok", [{fd, Fd}]),
stream(BodyPart, Request#request{stream = Fd}, 200);
{error, Reason} ->
exit({stream_to_file_failed, Reason})
@@ -194,7 +189,6 @@ stream(BodyPart, #request{stream = Filename} = Request, Code)
%% Stream to file
stream(BodyPart, #request{stream = Fd} = Request, Code)
when ?IS_STREAMED(Code) ->
- ?hcrt("stream to file", [{stream, Fd}, {code, Code}]),
case file:write(Fd, BodyPart) of
ok ->
{true, <<>>, Request};
@@ -203,7 +197,6 @@ stream(BodyPart, #request{stream = Fd} = Request, Code)
end;
stream(BodyPart, Request,_) -> % only 200 and 206 responses can be streamed
- ?hcrt("stream - ignore", [{request, Request}]),
{false, BodyPart, Request}.
@@ -257,22 +250,148 @@ init([Parent, Request, Options, ProfileName]) ->
%% {stop, Reason, State} (terminate/2 is called)
%% Description: Handling call messages
%%--------------------------------------------------------------------
-handle_call(#request{address = Addr} = Request, _,
+handle_call(Request, From, State) ->
+ try do_handle_call(Request, From, State) of
+ Result ->
+ Result
+ catch
+ _:Reason ->
+ {stop, {shutdown, Reason} , State}
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Function: handle_cast(Msg, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%% Description: Handling cast messages
+%%--------------------------------------------------------------------
+handle_cast(Msg, State) ->
+ try do_handle_cast(Msg, State) of
+ Result ->
+ Result
+ catch
+ _:Reason ->
+ {stop, {shutdown, Reason} , State}
+ end.
+
+%%--------------------------------------------------------------------
+%% Function: handle_info(Info, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%% Description: Handling all non call/cast messages
+%%--------------------------------------------------------------------
+handle_info(Info, State) ->
+ try do_handle_info(Info, State) of
+ Result ->
+ Result
+ catch
+ _:Reason ->
+ {stop, {shutdown, Reason} , State}
+ end.
+
+%%--------------------------------------------------------------------
+%% Function: terminate(Reason, State) -> _ (ignored by gen_server)
+%% Description: Shutdown the httpc_handler
+%%--------------------------------------------------------------------
+
+%% Init error there is no socket to be closed.
+terminate(normal,
+ #state{request = Request,
+ session = {send_failed, _} = Reason} = State) ->
+ maybe_send_answer(Request,
+ httpc_response:error(Request, Reason),
+ State),
+ ok;
+
+terminate(normal,
+ #state{request = Request,
+ session = {connect_failed, _} = Reason} = State) ->
+ maybe_send_answer(Request,
+ httpc_response:error(Request, Reason),
+ State),
+ ok;
+
+terminate(normal, #state{session = undefined}) ->
+ ok;
+
+%% Init error sending, no session information has been setup but
+%% there is a socket that needs closing.
+terminate(normal,
+ #state{session = #session{id = undefined} = Session}) ->
+ close_socket(Session);
+
+%% Socket closed remotely
+terminate(normal,
+ #state{session = #session{socket = {remote_close, Socket},
+ socket_type = SocketType,
+ id = Id},
+ profile_name = ProfileName,
+ request = Request,
+ timers = Timers,
+ pipeline = Pipeline,
+ keep_alive = KeepAlive} = State) ->
+ %% Clobber session
+ (catch httpc_manager:delete_session(Id, ProfileName)),
+
+ maybe_retry_queue(Pipeline, State),
+ maybe_retry_queue(KeepAlive, State),
+
+ %% Cancel timers
+ cancel_timers(Timers),
+
+ %% Maybe deliver answers to requests
+ deliver_answer(Request),
+
+ %% And, just in case, close our side (**really** overkill)
+ http_transport:close(SocketType, Socket);
+
+terminate(_Reason, #state{session = #session{id = Id,
+ socket = Socket,
+ socket_type = SocketType},
+ request = undefined,
+ profile_name = ProfileName,
+ timers = Timers,
+ pipeline = Pipeline,
+ keep_alive = KeepAlive} = State) ->
+
+ %% Clobber session
+ (catch httpc_manager:delete_session(Id, ProfileName)),
+
+ maybe_retry_queue(Pipeline, State),
+ maybe_retry_queue(KeepAlive, State),
+
+ cancel_timer(Timers#timers.queue_timer, timeout_queue),
+ http_transport:close(SocketType, Socket);
+
+terminate(_Reason, #state{request = undefined}) ->
+ ok;
+
+terminate(Reason, #state{request = Request} = State) ->
+ NewState = maybe_send_answer(Request,
+ httpc_response:error(Request, Reason),
+ State),
+ terminate(Reason, NewState#state{request = undefined}).
+
+%%--------------------------------------------------------------------
+%% Func: code_change(_OldVsn, State, Extra) -> {ok, NewState}
+%% Purpose: Convert process state when code is changed
+%%--------------------------------------------------------------------
+
+code_change(_, State, _) ->
+ {ok, State}.
+
+%%%--------------------------------------------------------------------
+%%% Internal functions
+%%%--------------------------------------------------------------------
+do_handle_call(#request{address = Addr} = Request, _,
#state{status = Status,
session = #session{type = pipeline} = Session,
timers = Timers,
options = #options{proxy = Proxy} = _Options,
profile_name = ProfileName} = State0)
when Status =/= undefined ->
-
- ?hcrv("new request on a pipeline session",
- [{request, Request},
- {profile, ProfileName},
- {status, Status},
- {timers, Timers}]),
-
Address = handle_proxy(Addr, Proxy),
-
case httpc_request:send(Address, Session, Request) of
ok ->
@@ -287,9 +406,8 @@ handle_call(#request{address = Addr} = Request, _,
case State0#state.request of
#request{} = OldRequest -> %% Old request not yet finished
- ?hcrd("old request still not finished", []),
%% Make sure to use the new value of timers in state
- NewTimers = State1#state.timers,
+ NewTimers = State1#state.timers,
NewPipeline = queue:in(Request, State1#state.pipeline),
NewSession =
Session#session{queue_length =
@@ -297,7 +415,6 @@ handle_call(#request{address = Addr} = Request, _,
queue:len(NewPipeline) + 1,
client_close = ClientClose},
insert_session(NewSession, ProfileName),
- ?hcrd("session updated", []),
{reply, ok, State1#state{
request = OldRequest,
pipeline = NewPipeline,
@@ -306,7 +423,6 @@ handle_call(#request{address = Addr} = Request, _,
undefined ->
%% Note: tcp-message receiving has already been
%% activated by handle_pipeline/2.
- ?hcrd("no current request", []),
cancel_timer(Timers#timers.queue_timer,
timeout_queue),
NewSession =
@@ -314,18 +430,16 @@ handle_call(#request{address = Addr} = Request, _,
client_close = ClientClose},
httpc_manager:insert_session(NewSession, ProfileName),
NewTimers = Timers#timers{queue_timer = undefined},
- ?hcrd("session created", []),
State = init_wait_for_response_state(Request, State1#state{session = NewSession,
timers = NewTimers}),
{reply, ok, State}
end;
{error, Reason} ->
- ?hcri("failed sending request", [{reason, Reason}]),
NewPipeline = queue:in(Request, State0#state.pipeline),
- {stop, shutdown, {pipeline_failed, Reason}, State0#state{pipeline = NewPipeline}}
+ {stop, {shutdown, {pipeline_failed, Reason}}, State0#state{pipeline = NewPipeline}}
end;
-handle_call(#request{address = Addr} = Request, _,
+do_handle_call(#request{address = Addr} = Request, _,
#state{status = Status,
session = #session{type = keep_alive} = Session,
timers = Timers,
@@ -333,17 +447,11 @@ handle_call(#request{address = Addr} = Request, _,
profile_name = ProfileName} = State0)
when Status =/= undefined ->
- ?hcrv("new request on a keep-alive session",
- [{request, Request},
- {profile, ProfileName},
- {status, Status}]),
-
ClientClose = httpc_request:is_client_closing(Request#request.headers),
case State0#state.request of
#request{} -> %% Old request not yet finished
%% Make sure to use the new value of timers in state
- ?hcrd("old request still not finished", []),
NewKeepAlive = queue:in(Request, State0#state.keep_alive),
NewSession =
Session#session{queue_length =
@@ -351,13 +459,11 @@ handle_call(#request{address = Addr} = Request, _,
queue:len(NewKeepAlive) + 1,
client_close = ClientClose},
insert_session(NewSession, ProfileName),
- ?hcrd("session updated", []),
{reply, ok, State0#state{keep_alive = NewKeepAlive,
session = NewSession}};
undefined ->
%% Note: tcp-message receiving has already been
%% activated by handle_pipeline/2.
- ?hcrd("no current request", []),
cancel_timer(Timers#timers.queue_timer,
timeout_queue),
NewTimers = Timers#timers{queue_timer = undefined},
@@ -365,8 +471,6 @@ handle_call(#request{address = Addr} = Request, _,
Address = handle_proxy(Addr, Proxy),
case httpc_request:send(Address, Session, Request) of
ok ->
- ?hcrd("request sent", []),
-
%% Activate the request time out for the new request
State2 =
activate_request_timeout(State1#state{request = Request}),
@@ -377,22 +481,13 @@ handle_call(#request{address = Addr} = Request, _,
State = init_wait_for_response_state(Request, State2#state{session = NewSession}),
{reply, ok, State};
{error, Reason} ->
- ?hcri("failed sending request", [{reason, Reason}]),
- {stop, shutdown, {keepalive_failed, Reason}, State1}
+ {stop, {shutdown, {keepalive_failed, Reason}}, State1}
end
end;
-
-handle_call(info, _, State) ->
+do_handle_call(info, _, State) ->
Info = handler_info(State),
{reply, Info, State}.
-%%--------------------------------------------------------------------
-%% Function: handle_cast(Msg, State) -> {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%% Description: Handling cast messages
-%%--------------------------------------------------------------------
-
%% When the request in process has been canceled the handler process is
%% stopped and the pipelined requests will be reissued or remaining
%% requests will be sent on a new connection. This is is
@@ -405,145 +500,102 @@ handle_call(info, _, State) ->
%% handle_keep_alive_queue/2 on the other hand will just skip the
%% request as if it was never issued as in this case the request will
%% not have been sent.
-handle_cast({cancel, RequestId},
+do_handle_cast({cancel, RequestId},
#state{request = #request{id = RequestId} = Request,
- profile_name = ProfileName,
canceled = Canceled} = State) ->
- ?hcrv("cancel current request", [{request_id, RequestId},
- {profile, ProfileName},
- {canceled, Canceled}]),
{stop, normal,
State#state{canceled = [RequestId | Canceled],
request = Request#request{from = answer_sent}}};
-handle_cast({cancel, RequestId},
- #state{profile_name = ProfileName,
- request = #request{id = CurrId},
- canceled = Canceled} = State) ->
- ?hcrv("cancel", [{request_id, RequestId},
- {curr_req_id, CurrId},
- {profile, ProfileName},
- {canceled, Canceled}]),
+do_handle_cast({cancel, RequestId},
+ #state{request = #request{},
+ canceled = Canceled} = State) ->
{noreply, State#state{canceled = [RequestId | Canceled]}};
-handle_cast({cancel, RequestId},
- #state{profile_name = ProfileName,
- request = undefined,
- canceled = Canceled} = State) ->
- ?hcrv("cancel", [{request_id, RequestId},
- {curr_req_id, undefined},
- {profile, ProfileName},
- {canceled, Canceled}]),
+do_handle_cast({cancel, _},
+ #state{request = undefined} = State) ->
{noreply, State};
-
-handle_cast(stream_next, #state{session = Session} = State) ->
+do_handle_cast(stream_next, #state{session = Session} = State) ->
activate_once(Session),
%% Inactivate the #state.once here because we don't want
%% next_body_chunk/1 to activate the socket twice.
{noreply, State#state{once = inactive}}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_info(Info, State) -> {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%% Description: Handling all non call/cast messages
-%%--------------------------------------------------------------------
-handle_info({Proto, _Socket, Data},
+do_handle_info({Proto, _Socket, Data},
#state{mfa = {Module, Function, Args},
- request = #request{method = Method,
- stream = Stream} = Request,
+ request = #request{method = Method} = Request,
session = Session,
status_line = StatusLine} = State)
when (Proto =:= tcp) orelse
(Proto =:= ssl) orelse
(Proto =:= httpc_handler) ->
- ?hcri("received data", [{proto, Proto},
- {module, Module},
- {function, Function},
- {method, Method},
- {stream, Stream},
- {session, Session},
- {status_line, StatusLine}]),
-
- FinalResult =
- try Module:Function([Data | Args]) of
- {ok, Result} ->
- ?hcrd("data processed - ok", []),
- handle_http_msg(Result, State);
- {_, whole_body, _} when Method =:= head ->
- ?hcrd("data processed - whole body", []),
- handle_response(State#state{body = <<>>});
- {Module, whole_body, [Body, Length]} ->
- ?hcrd("data processed - whole body", [{length, Length}]),
- {_, Code, _} = StatusLine,
- {Streamed, NewBody, NewRequest} = stream(Body, Request, Code),
- %% When we stream we will not keep the already
- %% streamed data, that would be a waste of memory.
- NewLength =
- case Streamed of
- false ->
- Length;
- true ->
- Length - size(Body)
- end,
-
- NewState = next_body_chunk(State, Code),
- NewMFA = {Module, whole_body, [NewBody, NewLength]},
- {noreply, NewState#state{mfa = NewMFA,
- request = NewRequest}};
- {Module, decode_size,
- [TotalChunk, HexList,
+ try Module:Function([Data | Args]) of
+ {ok, Result} ->
+ handle_http_msg(Result, State);
+ {_, whole_body, _} when Method =:= head ->
+ handle_response(State#state{body = <<>>});
+ {Module, whole_body, [Body, Length]} ->
+ {_, Code, _} = StatusLine,
+ {Streamed, NewBody, NewRequest} = stream(Body, Request, Code),
+ %% When we stream we will not keep the already
+ %% streamed data, that would be a waste of memory.
+ NewLength =
+ case Streamed of
+ false ->
+ Length;
+ true ->
+ Length - size(Body)
+ end,
+
+ NewState = next_body_chunk(State, Code),
+ NewMFA = {Module, whole_body, [NewBody, NewLength]},
+ {noreply, NewState#state{mfa = NewMFA,
+ request = NewRequest}};
+ {Module, decode_size,
+ [TotalChunk, HexList, AccHeaderSize,
{MaxBodySize, BodySoFar, AccLength, MaxHeaderSize}]}
- when BodySoFar =/= <<>> ->
- ?hcrd("data processed - decode_size", []),
- %% The response body is chunk-encoded. Steal decoded
- %% chunks as much as possible to stream.
- {_, Code, _} = StatusLine,
- {_, NewBody, NewRequest} = stream(BodySoFar, Request, Code),
- NewState = next_body_chunk(State, Code),
- NewMFA = {Module, decode_size,
- [TotalChunk, HexList,
+ when BodySoFar =/= <<>> ->
+ %% The response body is chunk-encoded. Steal decoded
+ %% chunks as much as possible to stream.
+ {_, Code, _} = StatusLine,
+ {_, NewBody, NewRequest} = stream(BodySoFar, Request, Code),
+ NewState = next_body_chunk(State, Code),
+ NewMFA = {Module, decode_size,
+ [TotalChunk, HexList, AccHeaderSize,
{MaxBodySize, NewBody, AccLength, MaxHeaderSize}]},
+ {noreply, NewState#state{mfa = NewMFA,
+ request = NewRequest}};
+ {Module, decode_data,
+ [ChunkSize, TotalChunk,
+ {MaxBodySize, BodySoFar, AccLength, MaxHeaderSize}]}
+ when TotalChunk =/= <<>> orelse BodySoFar =/= <<>> ->
+ %% The response body is chunk-encoded. Steal decoded
+ %% chunks as much as possible to stream.
+ ChunkSizeToSteal = min(ChunkSize, byte_size(TotalChunk)),
+ <<StolenChunk:ChunkSizeToSteal/binary, NewTotalChunk/binary>> = TotalChunk,
+ StolenBody = <<BodySoFar/binary, StolenChunk/binary>>,
+ NewChunkSize = ChunkSize - ChunkSizeToSteal,
+ {_, Code, _} = StatusLine,
+
+ {_, NewBody, NewRequest} = stream(StolenBody, Request, Code),
+ NewState = next_body_chunk(State, Code),
+ NewMFA = {Module, decode_data,
+ [NewChunkSize, NewTotalChunk,
+ {MaxBodySize, NewBody, AccLength, MaxHeaderSize}]},
{noreply, NewState#state{mfa = NewMFA,
request = NewRequest}};
- {Module, decode_data,
- [ChunkSize, TotalChunk,
- {MaxBodySize, BodySoFar, AccLength, MaxHeaderSize}]}
- when TotalChunk =/= <<>> orelse BodySoFar =/= <<>> ->
- ?hcrd("data processed - decode_data", []),
- %% The response body is chunk-encoded. Steal decoded
- %% chunks as much as possible to stream.
- ChunkSizeToSteal = min(ChunkSize, byte_size(TotalChunk)),
- <<StolenChunk:ChunkSizeToSteal/binary, NewTotalChunk/binary>> = TotalChunk,
- StolenBody = <<BodySoFar/binary, StolenChunk/binary>>,
- NewChunkSize = ChunkSize - ChunkSizeToSteal,
- {_, Code, _} = StatusLine,
-
- {_, NewBody, NewRequest} = stream(StolenBody, Request, Code),
- NewState = next_body_chunk(State, Code),
- NewMFA = {Module, decode_data,
- [NewChunkSize, NewTotalChunk,
- {MaxBodySize, NewBody, AccLength, MaxHeaderSize}]},
- {noreply, NewState#state{mfa = NewMFA,
- request = NewRequest}};
- NewMFA ->
- ?hcrd("data processed - new mfa", []),
- activate_once(Session),
- {noreply, State#state{mfa = NewMFA}}
- catch
- _:_Reason ->
- ?hcrd("data processing exit", [{exit, _Reason}]),
- ClientReason = {could_not_parse_as_http, Data},
- ClientErrMsg = httpc_response:error(Request, ClientReason),
- NewState = answer_request(Request, ClientErrMsg, State),
- {stop, normal, NewState}
- end,
- ?hcri("data processed", [{final_result, FinalResult}]),
- FinalResult;
-
+ NewMFA ->
+ activate_once(Session),
+ {noreply, State#state{mfa = NewMFA}}
+ catch
+ _:Reason ->
+ ClientReason = {could_not_parse_as_http, Data},
+ ClientErrMsg = httpc_response:error(Request, ClientReason),
+ NewState = answer_request(Request, ClientErrMsg, State),
+ {stop, {shutdown, Reason}, NewState}
+ end;
-handle_info({Proto, Socket, Data},
+do_handle_info({Proto, Socket, Data},
#state{mfa = MFA,
request = Request,
session = Session,
@@ -568,200 +620,107 @@ handle_info({Proto, Socket, Data},
{noreply, State};
-
%% The Server may close the connection to indicate that the
%% whole body is now sent instead of sending an length
%% indicator.
-handle_info({tcp_closed, _}, State = #state{mfa = {_, whole_body, Args}}) ->
+do_handle_info({tcp_closed, _}, State = #state{mfa = {_, whole_body, Args}}) ->
handle_response(State#state{body = hd(Args)});
-handle_info({ssl_closed, _}, State = #state{mfa = {_, whole_body, Args}}) ->
+do_handle_info({ssl_closed, _}, State = #state{mfa = {_, whole_body, Args}}) ->
handle_response(State#state{body = hd(Args)});
%%% Server closes idle pipeline
-handle_info({tcp_closed, _}, State = #state{request = undefined}) ->
+do_handle_info({tcp_closed, _}, State = #state{request = undefined}) ->
{stop, normal, State};
-handle_info({ssl_closed, _}, State = #state{request = undefined}) ->
+do_handle_info({ssl_closed, _}, State = #state{request = undefined}) ->
{stop, normal, State};
%%% Error cases
-handle_info({tcp_closed, _}, #state{session = Session0} = State) ->
+do_handle_info({tcp_closed, _}, #state{session = Session0} = State) ->
Socket = Session0#session.socket,
Session = Session0#session{socket = {remote_close, Socket}},
%% {stop, session_remotly_closed, State};
{stop, normal, State#state{session = Session}};
-handle_info({ssl_closed, _}, #state{session = Session0} = State) ->
+do_handle_info({ssl_closed, _}, #state{session = Session0} = State) ->
Socket = Session0#session.socket,
Session = Session0#session{socket = {remote_close, Socket}},
%% {stop, session_remotly_closed, State};
{stop, normal, State#state{session = Session}};
-handle_info({tcp_error, _, _} = Reason, State) ->
+do_handle_info({tcp_error, _, _} = Reason, State) ->
{stop, Reason, State};
-handle_info({ssl_error, _, _} = Reason, State) ->
+do_handle_info({ssl_error, _, _} = Reason, State) ->
{stop, Reason, State};
%% Timeouts
%% Internally, to a request handling process, a request timeout is
%% seen as a canceled request.
-handle_info({timeout, RequestId},
+do_handle_info({timeout, RequestId},
#state{request = #request{id = RequestId} = Request,
canceled = Canceled,
profile_name = ProfileName} = State) ->
- ?hcri("timeout of current request", [{id, RequestId}]),
httpc_response:send(Request#request.from,
httpc_response:error(Request, timeout)),
httpc_manager:request_done(RequestId, ProfileName),
- ?hcrv("response (timeout) sent - now terminate", []),
{stop, normal,
State#state{request = Request#request{from = answer_sent},
canceled = [RequestId | Canceled]}};
-handle_info({timeout, RequestId},
+do_handle_info({timeout, RequestId},
#state{canceled = Canceled,
profile_name = ProfileName} = State) ->
- ?hcri("timeout", [{id, RequestId}]),
Filter =
fun(#request{id = Id, from = From} = Request) when Id =:= RequestId ->
- ?hcrv("found request", [{id, Id}, {from, From}]),
%% Notify the owner
httpc_response:send(From,
httpc_response:error(Request, timeout)),
httpc_manager:request_done(RequestId, ProfileName),
- ?hcrv("response (timeout) sent", []),
[Request#request{from = answer_sent}];
(_) ->
true
end,
case State#state.status of
pipeline ->
- ?hcrd("pipeline", []),
Pipeline = queue:filter(Filter, State#state.pipeline),
{noreply, State#state{canceled = [RequestId | Canceled],
pipeline = Pipeline}};
keep_alive ->
- ?hcrd("keep_alive", []),
KeepAlive = queue:filter(Filter, State#state.keep_alive),
{noreply, State#state{canceled = [RequestId | Canceled],
keep_alive = KeepAlive}}
end;
-handle_info(timeout_queue, State = #state{request = undefined}) ->
+do_handle_info(timeout_queue, State = #state{request = undefined}) ->
{stop, normal, State};
%% Timing was such as the queue_timeout was not canceled!
-handle_info(timeout_queue, #state{timers = Timers} = State) ->
+do_handle_info(timeout_queue, #state{timers = Timers} = State) ->
{noreply, State#state{timers =
Timers#timers{queue_timer = undefined}}};
%% Setting up the connection to the server somehow failed.
-handle_info({init_error, Tag, ClientErrMsg},
+do_handle_info({init_error, Reason, ClientErrMsg},
State = #state{request = Request}) ->
- ?hcrv("init error", [{tag, Tag}, {client_error, ClientErrMsg}]),
NewState = answer_request(Request, ClientErrMsg, State),
- {stop, normal, NewState};
-
+ {stop, {shutdown, Reason}, NewState};
%%% httpc_manager process dies.
-handle_info({'EXIT', _, _}, State = #state{request = undefined}) ->
+do_handle_info({'EXIT', _, _}, State = #state{request = undefined}) ->
{stop, normal, State};
%%Try to finish the current request anyway,
%% there is a fairly high probability that it can be done successfully.
%% Then close the connection, hopefully a new manager is started that
%% can retry requests in the pipeline.
-handle_info({'EXIT', _, _}, State) ->
+do_handle_info({'EXIT', _, _}, State) ->
{noreply, State#state{status = close}}.
-%%--------------------------------------------------------------------
-%% Function: terminate(Reason, State) -> _ (ignored by gen_server)
-%% Description: Shutdown the httpc_handler
-%%--------------------------------------------------------------------
-
-%% Init error there is no socket to be closed.
-terminate(normal,
- #state{request = Request,
- session = {send_failed, AReason} = Reason} = State) ->
- ?hcrd("terminate", [{send_reason, AReason}, {request, Request}]),
- maybe_send_answer(Request,
- httpc_response:error(Request, Reason),
- State),
- ok;
-
-terminate(normal,
- #state{request = Request,
- session = {connect_failed, AReason} = Reason} = State) ->
- ?hcrd("terminate", [{connect_reason, AReason}, {request, Request}]),
- maybe_send_answer(Request,
- httpc_response:error(Request, Reason),
- State),
- ok;
-
-terminate(normal, #state{session = undefined}) ->
- ok;
-
-%% Init error sending, no session information has been setup but
-%% there is a socket that needs closing.
-terminate(normal,
- #state{session = #session{id = undefined} = Session}) ->
- close_socket(Session);
-
-%% Socket closed remotely
-terminate(normal,
- #state{session = #session{socket = {remote_close, Socket},
- socket_type = SocketType,
- id = Id},
- profile_name = ProfileName,
- request = Request,
- timers = Timers,
- pipeline = Pipeline,
- keep_alive = KeepAlive} = State) ->
- ?hcrt("terminate(normal) - remote close",
- [{id, Id}, {profile, ProfileName}]),
-
- %% Clobber session
- (catch httpc_manager:delete_session(Id, ProfileName)),
-
- maybe_retry_queue(Pipeline, State),
- maybe_retry_queue(KeepAlive, State),
-
- %% Cancel timers
- cancel_timers(Timers),
-
- %% Maybe deliver answers to requests
- deliver_answer(Request),
-
- %% And, just in case, close our side (**really** overkill)
- http_transport:close(SocketType, Socket);
-
-terminate(Reason, #state{session = #session{id = Id,
- socket = Socket,
- socket_type = SocketType},
- request = undefined,
- profile_name = ProfileName,
- timers = Timers,
- pipeline = Pipeline,
- keep_alive = KeepAlive} = State) ->
- ?hcrt("terminate",
- [{id, Id}, {profile, ProfileName}, {reason, Reason}]),
-
- %% Clobber session
- (catch httpc_manager:delete_session(Id, ProfileName)),
-
- maybe_retry_queue(Pipeline, State),
- maybe_retry_queue(KeepAlive, State),
-
- cancel_timer(Timers#timers.queue_timer, timeout_queue),
- http_transport:close(SocketType, Socket);
+call(Msg, Pid) ->
+ call(Msg, Pid, infinity).
-terminate(Reason, #state{request = undefined}) ->
- ?hcrt("terminate", [{reason, Reason}]),
- ok;
+call(Msg, Pid, Timeout) ->
+ gen_server:call(Pid, Msg, Timeout).
-terminate(Reason, #state{request = Request} = State) ->
- ?hcrd("terminate", [{reason, Reason}, {request, Request}]),
- NewState = maybe_send_answer(Request,
- httpc_response:error(Request, Reason),
- State),
- terminate(Reason, NewState#state{request = undefined}).
+cast(Msg, Pid) ->
+ gen_server:cast(Pid, Msg).
maybe_retry_queue(Q, State) ->
case queue:is_empty(Q) of
@@ -776,45 +735,13 @@ maybe_send_answer(#request{from = answer_sent}, _Reason, State) ->
maybe_send_answer(Request, Answer, State) ->
answer_request(Request, Answer, State).
-deliver_answer(#request{id = Id, from = From} = Request)
+deliver_answer(#request{from = From} = Request)
when is_pid(From) ->
Response = httpc_response:error(Request, socket_closed_remotely),
- ?hcrd("deliver answer", [{id, Id}, {from, From}, {response, Response}]),
httpc_response:send(From, Response);
-deliver_answer(Request) ->
- ?hcrd("skip deliver answer", [{request, Request}]),
+deliver_answer(_Request) ->
ok.
-
-%%--------------------------------------------------------------------
-%% Func: code_change(_OldVsn, State, Extra) -> {ok, NewState}
-%% Purpose: Convert process state when code is changed
-%%--------------------------------------------------------------------
-
-code_change(_, State, _) ->
- {ok, State}.
-
-
-%% new_http_options({http_options, TimeOut, AutoRedirect, SslOpts,
-%% Auth, Relaxed}) ->
-%% {http_options, "HTTP/1.1", TimeOut, AutoRedirect, SslOpts,
-%% Auth, Relaxed}.
-
-%% old_http_options({http_options, _, TimeOut, AutoRedirect,
-%% SslOpts, Auth, Relaxed}) ->
-%% {http_options, TimeOut, AutoRedirect, SslOpts, Auth, Relaxed}.
-
-%% new_queue(Queue, Fun) ->
-%% List = queue:to_list(Queue),
-%% NewList =
-%% lists:map(fun(Request) ->
-%% Settings =
-%% Fun(Request#request.settings),
-%% Request#request{settings = Settings}
-%% end, List),
-%% queue:from_list(NewList).
-
-
%%%--------------------------------------------------------------------
%%% Internal functions
%%%--------------------------------------------------------------------
@@ -872,26 +799,21 @@ connect(SocketType, ToAddress,
connect_and_send_first_request(Address, Request, #state{options = Options} = State) ->
SocketType = socket_type(Request),
ConnTimeout = (Request#request.settings)#http_options.connect_timeout,
- ?hcri("connect",
- [{address, Address}, {request, Request}, {options, Options}]),
case connect(SocketType, Address, Options, ConnTimeout) of
{ok, Socket} ->
ClientClose =
- httpc_request:is_client_closing(
- Request#request.headers),
+ httpc_request:is_client_closing(
+ Request#request.headers),
SessionType = httpc_manager:session_type(Options),
SocketType = socket_type(Request),
Session = #session{id = {Request#request.address, self()},
scheme = Request#request.scheme,
socket = Socket,
- socket_type = SocketType,
- client_close = ClientClose,
- type = SessionType},
- ?hcri("connected - now send first request", [{socket, Socket}]),
-
+ socket_type = SocketType,
+ client_close = ClientClose,
+ type = SessionType},
case httpc_request:send(Address, Session, Request) of
ok ->
- ?hcri("first request sent", []),
TmpState = State#state{request = Request,
session = Session,
mfa = init_mfa(Request, State),
@@ -949,12 +871,6 @@ handler_info(#state{request = Request,
options = _Options,
timers = _Timers} = _State) ->
- ?hcrt("handler info", [{request, Request},
- {session, Session},
- {pipeline, Pipeline},
- {keep_alive, KeepAlive},
- {status, Status}]),
-
%% Info about the current request
RequestInfo =
case Request of
@@ -965,8 +881,6 @@ handler_info(#state{request = Request,
[{id, Id}, {started, ReqStarted}]
end,
- ?hcrt("handler info", [{request_info, RequestInfo}]),
-
%% Info about the current session/socket
SessionType = Session#session.type,
QueueLen = case SessionType of
@@ -979,22 +893,12 @@ handler_info(#state{request = Request,
Socket = Session#session.socket,
SocketType = Session#session.socket_type,
- ?hcrt("handler info", [{session_type, SessionType},
- {queue_length, QueueLen},
- {scheme, Scheme},
- {socket, Socket}]),
-
SocketOpts = http_transport:getopts(SocketType, Socket),
SocketStats = http_transport:getstat(SocketType, Socket),
Remote = http_transport:peername(SocketType, Socket),
Local = http_transport:sockname(SocketType, Socket),
- ?hcrt("handler info", [{remote, Remote},
- {local, Local},
- {socket_opts, SocketOpts},
- {socket_stats, SocketStats}]),
-
SocketInfo = [{remote, Remote},
{local, Local},
{socket_opts, SocketOpts},
@@ -1014,7 +918,6 @@ handler_info(#state{request = Request,
handle_http_msg({Version, StatusCode, ReasonPharse, Headers, Body},
State = #state{request = Request}) ->
- ?hcrt("handle_http_msg", [{headers, Headers}]),
case Headers#http_response_h.'content-type' of
"multipart/byteranges" ++ _Param ->
exit({not_yet_implemented, multypart_byteranges});
@@ -1028,15 +931,12 @@ handle_http_msg({Version, StatusCode, ReasonPharse, Headers, Body},
end;
handle_http_msg({ChunkedHeaders, Body},
#state{status_line = {_, Code, _}, headers = Headers} = State) ->
- ?hcrt("handle_http_msg",
- [{chunked_headers, ChunkedHeaders}, {headers, Headers}]),
NewHeaders = http_chunk:handle_headers(Headers, ChunkedHeaders),
{_, NewBody, NewRequest} = stream(Body, State#state.request, Code),
handle_response(State#state{headers = NewHeaders,
body = NewBody,
request = NewRequest});
handle_http_msg(Body, #state{status_line = {_,Code, _}} = State) ->
- ?hcrt("handle_http_msg", [{code, Code}]),
{_, NewBody, NewRequest} = stream(Body, State#state.request, Code),
handle_response(State#state{body = NewBody, request = NewRequest}).
@@ -1051,41 +951,28 @@ handle_http_body(_, #state{status = {ssl_tunnel, Request},
{stop, normal, NewState};
handle_http_body(<<>>, #state{status_line = {_,304, _}} = State) ->
- ?hcrt("handle_http_body - 304", []),
handle_response(State#state{body = <<>>});
handle_http_body(<<>>, #state{status_line = {_,204, _}} = State) ->
- ?hcrt("handle_http_body - 204", []),
handle_response(State#state{body = <<>>});
handle_http_body(<<>>, #state{request = #request{method = head}} = State) ->
- ?hcrt("handle_http_body - head", []),
handle_response(State#state{body = <<>>});
handle_http_body(Body, #state{headers = Headers,
max_body_size = MaxBodySize,
status_line = {_,Code, _},
request = Request} = State) ->
- ?hcrt("handle_http_body",
- [{max_body_size, MaxBodySize}, {headers, Headers}, {code, Code}]),
TransferEnc = Headers#http_response_h.'transfer-encoding',
case case_insensitive_header(TransferEnc) of
"chunked" ->
- ?hcrt("handle_http_body - chunked", []),
try http_chunk:decode(Body, State#state.max_body_size,
State#state.max_header_size) of
{Module, Function, Args} ->
- ?hcrt("handle_http_body - new mfa",
- [{module, Module},
- {function, Function},
- {args, Args}]),
NewState = next_body_chunk(State, Code),
{noreply, NewState#state{mfa =
{Module, Function, Args}}};
{ok, {ChunkedHeaders, NewBody}} ->
- ?hcrt("handle_http_body - new body",
- [{chunked_headers, ChunkedHeaders},
- {new_body, NewBody}]),
NewHeaders = http_chunk:handle_headers(Headers,
ChunkedHeaders),
case Body of
@@ -1107,7 +994,6 @@ handle_http_body(Body, #state{headers = Headers,
{stop, normal, NewState}
end;
Enc when Enc =:= "identity"; Enc =:= undefined ->
- ?hcrt("handle_http_body - identity", []),
Length =
list_to_integer(Headers#http_response_h.'content-length'),
case ((Length =< MaxBodySize) orelse (MaxBodySize =:= nolimit)) of
@@ -1131,7 +1017,6 @@ handle_http_body(Body, #state{headers = Headers,
{stop, normal, NewState}
end;
Encoding when is_list(Encoding) ->
- ?hcrt("handle_http_body - other", [{encoding, Encoding}]),
NewState = answer_request(Request,
httpc_response:error(Request,
unknown_encoding),
@@ -1152,18 +1037,10 @@ handle_response(#state{request = Request,
options = Options,
profile_name = ProfileName} = State)
when Status =/= new ->
-
- ?hcrd("handle response", [{profile, ProfileName},
- {status, Status},
- {request, Request},
- {session, Session},
- {status_line, StatusLine}]),
-
handle_cookies(Headers, Request, Options, ProfileName),
case httpc_response:result({StatusLine, Headers, Body}, Request) of
%% 100-continue
continue ->
- ?hcrd("handle response - continue", []),
%% Send request body
{_, RequestBody} = Request#request.content,
send_raw(Session, RequestBody),
@@ -1180,7 +1057,6 @@ handle_response(#state{request = Request,
%% Ignore unexpected 100-continue response and receive the
%% actual response that the server will send right away.
{ignore, Data} ->
- ?hcrd("handle response - ignore", [{data, Data}]),
Relaxed = (Request#request.settings)#http_options.relaxed,
MFA = {httpc_response, parse,
[State#state.max_header_size, Relaxed]},
@@ -1194,23 +1070,17 @@ handle_response(#state{request = Request,
%% obsolete and the manager will create a new request
%% with the same id as the current.
{redirect, NewRequest, Data} ->
- ?hcrt("handle response - redirect",
- [{new_request, NewRequest}, {data, Data}]),
ok = httpc_manager:redirect_request(NewRequest, ProfileName),
handle_queue(State#state{request = undefined}, Data);
{retry, TimeNewRequest, Data} ->
- ?hcrt("handle response - retry",
- [{time_new_request, TimeNewRequest}, {data, Data}]),
ok = httpc_manager:retry_request(TimeNewRequest, ProfileName),
handle_queue(State#state{request = undefined}, Data);
{ok, Msg, Data} ->
- ?hcrd("handle response - ok", []),
stream_remaining_body(Body, Request, StatusLine),
end_stream(StatusLine, Request),
NewState = maybe_send_answer(Request, Msg, State),
handle_queue(NewState, Data);
{stop, Msg} ->
- ?hcrd("handle response - stop", [{msg, Msg}]),
end_stream(StatusLine, Request),
NewState = maybe_send_answer(Request, Msg, State),
{stop, normal, NewState}
@@ -1245,28 +1115,19 @@ handle_pipeline(#state{status = pipeline,
profile_name = ProfileName,
options = #options{pipeline_timeout = TimeOut}} = State,
Data) ->
-
- ?hcrd("handle pipeline", [{profile, ProfileName},
- {session, Session},
- {timeout, TimeOut}]),
-
case queue:out(State#state.pipeline) of
{empty, _} ->
- ?hcrd("pipeline queue empty", []),
handle_empty_queue(Session, ProfileName, TimeOut, State);
{{value, NextRequest}, Pipeline} ->
- ?hcrd("pipeline queue non-empty", []),
case lists:member(NextRequest#request.id,
State#state.canceled) of
true ->
- ?hcrv("next request had been cancelled", []),
%% See comment for handle_cast({cancel, RequestId})
{stop, normal,
State#state{request =
NextRequest#request{from = answer_sent},
pipeline = Pipeline}};
false ->
- ?hcrv("next request", [{request, NextRequest}]),
NewSession =
Session#session{queue_length =
%% Queue + current
@@ -1283,25 +1144,16 @@ handle_keep_alive_queue(#state{status = keep_alive,
options = #options{keep_alive_timeout = TimeOut,
proxy = Proxy}} = State,
Data) ->
-
- ?hcrd("handle keep_alive", [{profile, ProfileName},
- {session, Session},
- {timeout, TimeOut}]),
-
case queue:out(State#state.keep_alive) of
{empty, _} ->
- ?hcrd("keep_alive queue empty", []),
handle_empty_queue(Session, ProfileName, TimeOut, State);
{{value, NextRequest}, KeepAlive} ->
- ?hcrd("keep_alive queue non-empty", []),
case lists:member(NextRequest#request.id,
State#state.canceled) of
true ->
- ?hcrv("next request has already been canceled", []),
handle_keep_alive_queue(
State#state{keep_alive = KeepAlive}, Data);
false ->
- ?hcrv("next request", [{request, NextRequest}]),
#request{address = Addr} = NextRequest,
Address = handle_proxy(Addr, Proxy),
case httpc_request:send(Address, Session, NextRequest) of
@@ -1314,7 +1166,6 @@ handle_keep_alive_queue(#state{status = keep_alive,
end
end
end.
-
handle_empty_queue(Session, ProfileName, TimeOut, State) ->
%% The server may choose too terminate an idle pipline| keep_alive session
%% in this case we want to receive the close message
@@ -1350,7 +1201,6 @@ init_wait_for_response_state(Request, State) ->
status_line = undefined,
headers = undefined,
body = undefined}.
-
gather_data(<<>>, Session, State) ->
activate_once(Session),
{noreply, State};
@@ -1381,10 +1231,6 @@ activate_request_timeout(
State;
_ ->
ReqId = Request#request.id,
- ?hcrt("activate request timer",
- [{request_id, ReqId},
- {time_consumed, t() - Request#request.started},
- {timeout, Timeout}]),
Msg = {timeout, ReqId},
Ref = erlang:send_after(Timeout, self(), Msg),
Request2 = Request#request{timer = Ref},
@@ -1427,10 +1273,6 @@ try_to_enable_pipeline_or_keep_alive(
status_line = {Version, _, _},
headers = Headers,
profile_name = ProfileName} = State) ->
- ?hcrd("try to enable pipeline or keep-alive",
- [{version, Version},
- {headers, Headers},
- {session, Session}]),
case is_keep_alive_enabled_server(Version, Headers) andalso
is_keep_alive_connection(Headers, Session) of
true ->
@@ -1455,7 +1297,6 @@ answer_request(#request{id = RequestId, from = From} = Request, Msg,
#state{session = Session,
timers = Timers,
profile_name = ProfileName} = State) ->
- ?hcrt("answer request", [{request, Request}, {msg, Msg}]),
httpc_response:send(From, Msg),
RequestTimers = Timers#timers.request_timers,
TimerRef =
@@ -1602,42 +1443,32 @@ socket_type(#request{scheme = http}) ->
ip_comm;
socket_type(#request{scheme = https, settings = Settings}) ->
Settings#http_options.ssl.
-%% socket_type(http) ->
-%% ip_comm;
-%% socket_type(https) ->
-%% {ssl1, []}. %% Dummy value ok for ex setopts that does not use this value
start_stream({_Version, _Code, _ReasonPhrase}, _Headers,
#request{stream = none} = Request) ->
- ?hcrt("start stream - none", []),
{ok, Request};
start_stream({_Version, Code, _ReasonPhrase}, Headers,
#request{stream = self} = Request)
when ?IS_STREAMED(Code) ->
- ?hcrt("start stream - self", [{code, Code}]),
Msg = httpc_response:stream_start(Headers, Request, ignore),
httpc_response:send(Request#request.from, Msg),
{ok, Request};
start_stream({_Version, Code, _ReasonPhrase}, Headers,
#request{stream = {self, once}} = Request)
when ?IS_STREAMED(Code) ->
- ?hcrt("start stream - self:once", [{code, Code}]),
Msg = httpc_response:stream_start(Headers, Request, self()),
httpc_response:send(Request#request.from, Msg),
{ok, Request};
start_stream({_Version, Code, _ReasonPhrase}, _Headers,
#request{stream = Filename} = Request)
when ?IS_STREAMED(Code) andalso is_list(Filename) ->
- ?hcrt("start stream", [{code, Code}, {filename, Filename}]),
case file:open(Filename, [write, raw, append, delayed_write]) of
{ok, Fd} ->
- ?hcri("start stream - file open ok", [{fd, Fd}]),
{ok, Request#request{stream = Fd}};
{error, Reason} ->
exit({stream_to_file_failed, Reason})
end;
start_stream(_StatusLine, _Headers, Request) ->
- ?hcrt("start stream - no op", []),
{ok, Request}.
stream_remaining_body(<<>>, _, _) ->
@@ -1648,16 +1479,12 @@ stream_remaining_body(Body, Request, {_, Code, _}) ->
%% Note the end stream message is handled by httpc_response and will
%% be sent by answer_request
end_stream(_, #request{stream = none}) ->
- ?hcrt("end stream - none", []),
ok;
end_stream(_, #request{stream = self}) ->
- ?hcrt("end stream - self", []),
ok;
end_stream(_, #request{stream = {self, once}}) ->
- ?hcrt("end stream - self:once", []),
ok;
end_stream({_,200,_}, #request{stream = Fd}) ->
- ?hcrt("end stream - 200", [{stream, Fd}]),
case file:close(Fd) of
ok ->
ok;
@@ -1665,15 +1492,13 @@ end_stream({_,200,_}, #request{stream = Fd}) ->
file:close(Fd)
end;
end_stream({_,206,_}, #request{stream = Fd}) ->
- ?hcrt("end stream - 206", [{stream, Fd}]),
case file:close(Fd) of
ok ->
ok;
{error, enospc} -> % Could be due to delayed_write
file:close(Fd)
end;
-end_stream(SL, R) ->
- ?hcrt("end stream", [{status_line, SL}, {request, R}]),
+end_stream(_, _) ->
ok.
@@ -1702,11 +1527,8 @@ handle_verbose(trace) ->
handle_verbose(_) ->
ok.
-
-
send_raw(#session{socket = Socket, socket_type = SocketType},
{ProcessBody, Acc}) when is_function(ProcessBody, 1) ->
- ?hcrt("send raw", [{acc, Acc}]),
send_raw(SocketType, Socket, ProcessBody, Acc);
send_raw(#session{socket = Socket, socket_type = SocketType}, Body) ->
http_transport:send(SocketType, Socket, Body).
@@ -1717,7 +1539,6 @@ send_raw(SocketType, Socket, ProcessBody, Acc) ->
ok;
{ok, Data, NewAcc} ->
DataBin = iolist_to_binary(Data),
- ?hcrd("send", [{data, DataBin}]),
case http_transport:send(SocketType, Socket, DataBin) of
ok ->
send_raw(SocketType, Socket, ProcessBody, NewAcc);
@@ -1883,16 +1704,3 @@ update_session(ProfileName, #session{id = SessionId} = Session, Pos, Value) ->
end.
-%% ---------------------------------------------------------------------
-
-call(Msg, Pid) ->
- call(Msg, Pid, infinity).
-
-call(Msg, Pid, Timeout) ->
- gen_server:call(Pid, Msg, Timeout).
-
-cast(Msg, Pid) ->
- gen_server:cast(Pid, Msg).
-
-t() ->
- http_util:timestamp().
diff --git a/lib/inets/src/http_client/httpc_response.erl b/lib/inets/src/http_client/httpc_response.erl
index d8bdac24e3..0fd5faa466 100644
--- a/lib/inets/src/http_client/httpc_response.erl
+++ b/lib/inets/src/http_client/httpc_response.erl
@@ -363,7 +363,7 @@ redirect(Response = {StatusLine, Headers, Body}, Request) ->
%% Automatic redirection
{ok, {Scheme, _, Host, Port, Path, Query}} ->
NewHeaders =
- (Request#request.headers)#http_request_h{host = Host},
+ (Request#request.headers)#http_request_h{host = Host++":"++integer_to_list(Port)},
NewRequest =
Request#request{redircount =
Request#request.redircount+1,
diff --git a/lib/inets/test/httpc_SUITE.erl b/lib/inets/test/httpc_SUITE.erl
index a64ae2b87c..8aea38037d 100644
--- a/lib/inets/test/httpc_SUITE.erl
+++ b/lib/inets/test/httpc_SUITE.erl
@@ -88,7 +88,8 @@ real_requests()->
stream_through_mfa,
streaming_error,
inet_opts,
- invalid_headers
+ invalid_headers,
+ invalid_body
].
only_simulated() ->
@@ -125,7 +126,9 @@ only_simulated() ->
redirect_see_other,
redirect_temporary_redirect,
port_in_host_header,
- relaxed
+ redirect_port_in_host_header,
+ relaxed,
+ multipart_chunks
].
misc() ->
@@ -1000,10 +1003,25 @@ invalid_headers(Config) ->
Request = {url(group_name(Config), "/dummy.html", Config), [{"cookie", undefined}]},
{error, _} = httpc:request(get, Request, [], []).
+%%-------------------------------------------------------------------------
+
+invalid_body(Config) ->
+ URL = url(group_name(Config), "/dummy.html", Config),
+ try
+ httpc:request(post, {URL, [], <<"text/plain">>, "foobar"},
+ [], []),
+ ct:fail(accepted_invalid_input)
+ catch
+ error:function_clause ->
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
remote_socket_close(Config) when is_list(Config) ->
URL = url(group_name(Config), "/just_close.html", Config),
{error, socket_closed_remotely} = httpc:request(URL).
+
%%-------------------------------------------------------------------------
remote_socket_close_async(Config) when is_list(Config) ->
@@ -1102,7 +1120,20 @@ port_in_host_header(Config) when is_list(Config) ->
Request = {url(group_name(Config), "/ensure_host_header_with_port.html", Config), []},
{ok, {{_, 200, _}, _, Body}} = httpc:request(get, Request, [], []),
inets_test_lib:check_body(Body).
+%%-------------------------------------------------------------------------
+redirect_port_in_host_header(Config) when is_list(Config) ->
+ Request = {url(group_name(Config), "/redirect_ensure_host_header_with_port.html", Config), []},
+ {ok, {{_, 200, _}, _, Body}} = httpc:request(get, Request, [], []),
+ inets_test_lib:check_body(Body).
+
+%%-------------------------------------------------------------------------
+multipart_chunks(Config) when is_list(Config) ->
+ Request = {url(group_name(Config), "/multipart_chunks.html", Config), []},
+ {ok, Ref} = httpc:request(get, Request, [], [{sync, false}, {stream, self}]),
+ ok = receive_stream_n(Ref, 10),
+ httpc:cancel_request(Ref).
+
%%-------------------------------------------------------------------------
timeout_memory_leak() ->
[{doc, "Check OTP-8739"}].
@@ -1398,7 +1429,7 @@ dummy_server(Caller, SocketType, Inet, Extra) ->
end.
dummy_server_init(Caller, ip_comm, Inet, _) ->
- BaseOpts = [binary, {packet, 0}, {reuseaddr,true}, {active, false}],
+ BaseOpts = [binary, {packet, 0}, {reuseaddr,true}, {keepalive, true}, {active, false}],
{ok, ListenSocket} = gen_tcp:listen(0, [Inet | BaseOpts]),
{ok, Port} = inet:port(ListenSocket),
Caller ! {port, Port},
@@ -1680,6 +1711,12 @@ handle_uri(_,"/ensure_host_header_with_port.html",_,Headers,_,_) ->
"HTTP/1.1 500 Internal Server Error\r\n" ++
"Content-Length:" ++ Len ++ "\r\n\r\n" ++ B
end;
+handle_uri(_,"/redirect_ensure_host_header_with_port.html",Port,_,Socket,_) ->
+ NewUri = url_start(Socket) ++
+ integer_to_list(Port) ++ "/ensure_host_header_with_port.html",
+ "HTTP/1.1 302 Found \r\n" ++
+ "Location:" ++ NewUri ++ "\r\n" ++
+ "Content-Length:0\r\n\r\n";
handle_uri(_,"/300.html",Port,_,Socket,_) ->
NewUri = url_start(Socket) ++
@@ -1968,6 +2005,16 @@ handle_uri(_,"/missing_CR.html",_,_,_,_) ->
"Content-Length:32\r\n\n" ++
"<HTML><BODY>foobar</BODY></HTML>";
+handle_uri(_,"/multipart_chunks.html",_,_,Socket,_) ->
+ Head = "HTTP/1.1 200 ok\r\n" ++
+ "Transfer-Encoding:chunked\r\n" ++
+ "Date: " ++ httpd_util:rfc1123_date() ++ "\r\n"
+ "Connection: Keep-Alive\r\n" ++
+ "Content-Type: multipart/x-mixed-replace; boundary=chunk_boundary\r\n" ++
+ "\r\n",
+ send(Socket, Head),
+ send_multipart_chunks(Socket),
+ http_chunk:encode_last();
handle_uri("HEAD",_,_,_,_,_) ->
"HTTP/1.1 200 ok\r\n" ++
"Content-Length:0\r\n\r\n";
@@ -2264,3 +2311,21 @@ otp_8739_dummy_server_main(_Parent, ListenSocket) ->
Error ->
exit(Error)
end.
+
+send_multipart_chunks(Socket) ->
+ send(Socket, http_chunk:encode("--chunk_boundary\r\n")),
+ send(Socket, http_chunk:encode("Content-Type: text/plain\r\nContent-Length: 4\r\n\r\n")),
+ send(Socket, http_chunk:encode("test\r\n")),
+ ct:sleep(500),
+ send_multipart_chunks(Socket).
+
+receive_stream_n(_, 0) ->
+ ok;
+receive_stream_n(Ref, N) ->
+ receive
+ {http, {Ref, stream_start, _}} ->
+ receive_stream_n(Ref, N);
+ {http, {Ref,stream, Data}} ->
+ ct:pal("Data: ~p", [Data]),
+ receive_stream_n(Ref, N-1)
+ end.
diff --git a/lib/kernel/doc/src/code.xml b/lib/kernel/doc/src/code.xml
index f881fd76fd..878a450f0f 100644
--- a/lib/kernel/doc/src/code.xml
+++ b/lib/kernel/doc/src/code.xml
@@ -258,7 +258,7 @@ zip:create("mnesia-4.4.7.ez",
both strings and atoms, but a future release will probably only allow
the arguments that are documented.</p>
- <p>As from Erlang/OTP R12B, functions in this module generally fail with an
+ <p>Functions in this module generally fail with an
exception if they are passed an incorrect type (for example, an integer or a tuple
where an atom is expected). An error tuple is returned if the argument type
is correct, but there are some other errors (for example, a non-existing directory
diff --git a/lib/kernel/doc/src/config.xml b/lib/kernel/doc/src/config.xml
index c5f37fd036..c10f11b187 100644
--- a/lib/kernel/doc/src/config.xml
+++ b/lib/kernel/doc/src/config.xml
@@ -77,8 +77,8 @@
to update the application configurations.</p>
<p>This means that specifying another <c>.config</c> file, or more
<c>.config</c> files, leads to inconsistent update of application
- configurations. Therefore, in Erlang 5.4/OTP R10B, the syntax of
- <c>sys.config</c> was extended to allow pointing out other
+ configurations. There is, however, a syntax for
+ <c>sys.config</c> that allows pointing out other
<c>.config</c> files:</p>
<code type="none">
[{Application, [{Par, Val}]} | File].</code>
diff --git a/lib/kernel/doc/src/heart.xml b/lib/kernel/doc/src/heart.xml
index 59a046bf4d..5b5b71e521 100644
--- a/lib/kernel/doc/src/heart.xml
+++ b/lib/kernel/doc/src/heart.xml
@@ -37,10 +37,7 @@
the <c>heart</c> port program is to check that the Erlang runtime system
it is supervising is still running. If the port program has not
received any heartbeats within <c>HEART_BEAT_TIMEOUT</c> seconds
- (defaults to 60 seconds), the system can be rebooted. Also, if
- the system is equipped with a hardware watchdog timer and is
- running Solaris, the watchdog can be used to supervise the entire
- system.</p>
+ (defaults to 60 seconds), the system can be rebooted.</p>
<p>An Erlang runtime system to be monitored by a heart program
is to be started with command-line flag <c>-heart</c> (see
also <seealso marker="erts:erl"><c>erl(1)</c></seealso>).
@@ -51,17 +48,13 @@
or a terminated Erlang runtime system, environment variable
<c>HEART_COMMAND</c> must be set before the system is started.
If this variable is not set, a warning text is printed but
- the system does not reboot. However, if the hardware watchdog is
- used, it still triggers a reboot <c>HEART_BEAT_BOOT_DELAY</c>
- seconds later (defaults to 60 seconds).</p>
+ the system does not reboot.</p>
<p>To reboot on Windows, <c>HEART_COMMAND</c> can be
set to <c>heart -shutdown</c> (included in the Erlang delivery)
or to any other suitable program that can activate a reboot.</p>
- <p>The hardware watchdog is not started under Solaris if
- environment variable <c>HW_WD_DISABLE</c> is set.</p>
- <p>The environment variables <c>HEART_BEAT_TIMEOUT</c> and
- <c>HEART_BEAT_BOOT_DELAY</c> can be used to configure the heart
- time-outs; they can be set in the operating system shell before Erlang
+ <p>The environment variable <c>HEART_BEAT_TIMEOUT</c>
+ can be used to configure the heart
+ time-outs; it can be set in the operating system shell before Erlang
is started or be specified at the command line:</p>
<pre>
% <input>erl -heart -env HEART_BEAT_TIMEOUT 30 ...</input></pre>
diff --git a/lib/kernel/doc/src/seq_trace.xml b/lib/kernel/doc/src/seq_trace.xml
index ba7259219d..b80e87c118 100644
--- a/lib/kernel/doc/src/seq_trace.xml
+++ b/lib/kernel/doc/src/seq_trace.xml
@@ -427,12 +427,6 @@ prev_cnt := tcurr</code>
built with <c>Erl_Interface</c> only maintains one trace token, which
means that the C-node appears as one process from
the sequential tracing point of view.</p>
- <p>To be able to perform sequential tracing between
- distributed Erlang nodes, the distribution protocol has been
- extended (in a backward compatible way). An Erlang node
- supporting sequential tracing can communicate with an older
- (Erlang/OTP R3B) node but messages passed within that node can
- not be traced.</p>
</section>
<section>
diff --git a/lib/kernel/src/erts_debug.erl b/lib/kernel/src/erts_debug.erl
index 7b3f1e313a..ad92aafc2f 100644
--- a/lib/kernel/src/erts_debug.erl
+++ b/lib/kernel/src/erts_debug.erl
@@ -35,7 +35,8 @@
dump_monitors/1, dump_links/1, flat_size/1,
get_internal_state/1, instructions/0, lock_counters/1,
map_info/1, same/2, set_internal_state/2,
- size_shared/1, copy_shared/1]).
+ size_shared/1, copy_shared/1, dirty_cpu/2, dirty_io/2,
+ dirty/3]).
-spec breakpoint(MFA, Flag) -> non_neg_integer() when
MFA :: {Module :: module(),
@@ -182,6 +183,28 @@ same(_, _) ->
set_internal_state(_, _) ->
erlang:nif_error(undef).
+-spec dirty_cpu(Term1, Term2) -> term() when
+ Term1 :: term(),
+ Term2 :: term().
+
+dirty_cpu(_, _) ->
+ erlang:nif_error(undef).
+
+-spec dirty_io(Term1, Term2) -> term() when
+ Term1 :: term(),
+ Term2 :: term().
+
+dirty_io(_, _) ->
+ erlang:nif_error(undef).
+
+-spec dirty(Term1, Term2, Term3) -> term() when
+ Term1 :: term(),
+ Term2 :: term(),
+ Term3 :: term().
+
+dirty(_, _, _) ->
+ erlang:nif_error(undef).
+
%%% End of BIFs
%% size(Term)
diff --git a/lib/kernel/src/heart.erl b/lib/kernel/src/heart.erl
index eea78aabdf..8fa48d56fb 100644
--- a/lib/kernel/src/heart.erl
+++ b/lib/kernel/src/heart.erl
@@ -198,16 +198,11 @@ start_portprogram() ->
end.
get_heart_timeouts() ->
- HeartOpts = case os:getenv("HEART_BEAT_TIMEOUT") of
- false -> "";
- H when is_list(H) ->
- "-ht " ++ H
- end,
- HeartOpts ++ case os:getenv("HEART_BEAT_BOOT_DELAY") of
- false -> "";
- W when is_list(W) ->
- " -wt " ++ W
- end.
+ case os:getenv("HEART_BEAT_TIMEOUT") of
+ false -> "";
+ H when is_list(H) ->
+ "-ht " ++ H
+ end.
check_start_heart() ->
case init:get_argument(heart) of
diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl
index 2f9f81104a..13e73f027d 100644
--- a/lib/observer/src/crashdump_viewer.erl
+++ b/lib/observer/src/crashdump_viewer.erl
@@ -928,7 +928,10 @@ general_info(File) ->
WholeLine -> WholeLine
end,
- GI = get_general_info(Fd,#general_info{created=Created}),
+ {Slogan,SysVsn} = get_slogan_and_sysvsn(Fd,[]),
+ GI = get_general_info(Fd,#general_info{created=Created,
+ slogan=Slogan,
+ system_vsn=SysVsn}),
{MemTot,MemMax} =
case lookup_index(?memory) of
@@ -982,12 +985,20 @@ general_info(File) ->
mem_max=MemMax,
instr_info=InstrInfo}.
+get_slogan_and_sysvsn(Fd,Acc) ->
+ case val(Fd,eof) of
+ "Slogan: " ++ SloganPart when Acc==[] ->
+ get_slogan_and_sysvsn(Fd,[SloganPart]);
+ "System version: " ++ SystemVsn ->
+ {lists:append(lists:reverse(Acc)),SystemVsn};
+ eof ->
+ {lists:append(lists:reverse(Acc)),"-1"};
+ SloganPart ->
+ get_slogan_and_sysvsn(Fd,[[$\n|SloganPart]|Acc])
+ end.
+
get_general_info(Fd,GenInfo) ->
case line_head(Fd) of
- "Slogan" ->
- get_general_info(Fd,GenInfo#general_info{slogan=val(Fd)});
- "System version" ->
- get_general_info(Fd,GenInfo#general_info{system_vsn=val(Fd)});
"Compiled" ->
get_general_info(Fd,GenInfo#general_info{compile_time=val(Fd)});
"Taints" ->
diff --git a/lib/observer/src/etop.erl b/lib/observer/src/etop.erl
index fcb900960b..925f4456bb 100644
--- a/lib/observer/src/etop.erl
+++ b/lib/observer/src/etop.erl
@@ -23,7 +23,7 @@
-export([start/0, start/1, config/2, stop/0, dump/1, help/0]).
%% Internal
-export([update/1]).
--export([loadinfo/1, meminfo/2, getopt/2]).
+-export([loadinfo/2, meminfo/2, getopt/2]).
-include("etop.hrl").
-include("etop_defs.hrl").
@@ -319,18 +319,18 @@ output(graphical) -> exit({deprecated, "Use observer instead"});
output(text) -> etop_txt.
-loadinfo(SysI) ->
+loadinfo(SysI,Prev) ->
#etop_info{n_procs = Procs,
run_queue = RQ,
now = Now,
wall_clock = WC,
runtime = RT} = SysI,
- Cpu = calculate_cpu_utilization(WC,RT),
+ Cpu = calculate_cpu_utilization(WC,RT,Prev#etop_info.runtime),
Clock = io_lib:format("~2.2.0w:~2.2.0w:~2.2.0w",
tuple_to_list(element(2,calendar:now_to_datetime(Now)))),
{Cpu,Procs,RQ,Clock}.
-calculate_cpu_utilization({_,WC},{_,RT}) ->
+calculate_cpu_utilization({_,WC},{_,RT},_) ->
%% Old version of observer_backend, using statistics(wall_clock)
%% and statistics(runtime)
case {WC,RT} of
@@ -341,15 +341,23 @@ calculate_cpu_utilization({_,WC},{_,RT}) ->
_ ->
round(100*RT/WC)
end;
-calculate_cpu_utilization(_,undefined) ->
+calculate_cpu_utilization(_,undefined,_) ->
%% First time collecting - no cpu utilization has been measured
%% since scheduler_wall_time flag is not yet on
0;
-calculate_cpu_utilization(_,RTInfo) ->
+calculate_cpu_utilization(WC,RTInfo,undefined) ->
+ %% Second time collecting - RTInfo shows scheduler_wall_time since
+ %% flag was set to true. Faking previous values by setting
+ %% everything to zero.
+ ZeroRT = [{Id,0,0} || {Id,_,_} <- RTInfo],
+ calculate_cpu_utilization(WC,RTInfo,ZeroRT);
+calculate_cpu_utilization(_,RTInfo,PrevRTInfo) ->
%% New version of observer_backend, using statistics(scheduler_wall_time)
- Sum = lists:foldl(fun({_,A,T},{AAcc,TAcc}) -> {A+AAcc,T+TAcc} end,
+ Sum = lists:foldl(fun({{_, A0, T0}, {_, A1, T1}},{AAcc,TAcc}) ->
+ {(A1 - A0)+AAcc,(T1 - T0)+TAcc}
+ end,
{0,0},
- RTInfo),
+ lists:zip(PrevRTInfo,RTInfo)),
case Sum of
{0,0} ->
0;
diff --git a/lib/observer/src/etop_txt.erl b/lib/observer/src/etop_txt.erl
index 3b4c176478..6b8f9df24f 100644
--- a/lib/observer/src/etop_txt.erl
+++ b/lib/observer/src/etop_txt.erl
@@ -22,35 +22,35 @@
%%-compile(export_all).
-export([init/1,stop/1]).
--export([do_update/3]).
+-export([do_update/4]).
-include("etop.hrl").
-include("etop_defs.hrl").
--import(etop,[loadinfo/1,meminfo/2]).
+-import(etop,[loadinfo/2,meminfo/2]).
-define(PROCFORM,"~-15w~-20s~8w~8w~8w~8w ~-20s~n").
stop(Pid) -> Pid ! stop.
init(Config) ->
- loop(Config).
+ loop(#etop_info{},Config).
-loop(Config) ->
- Info = do_update(Config),
+loop(Prev,Config) ->
+ Info = do_update(Prev,Config),
receive
stop -> stopped;
- {dump,Fd} -> do_update(Fd,Info,Config), loop(Config);
- {config,_,Config1} -> loop(Config1)
- after Config#opts.intv -> loop(Config)
+ {dump,Fd} -> do_update(Fd,Info,Prev,Config), loop(Info,Config);
+ {config,_,Config1} -> loop(Info,Config1)
+ after Config#opts.intv -> loop(Info,Config)
end.
-do_update(Config) ->
+do_update(Prev,Config) ->
Info = etop:update(Config),
- do_update(standard_io,Info,Config).
+ do_update(standard_io,Info,Prev,Config).
-do_update(Fd,Info,Config) ->
- {Cpu,NProcs,RQ,Clock} = loadinfo(Info),
+do_update(Fd,Info,Prev,Config) ->
+ {Cpu,NProcs,RQ,Clock} = loadinfo(Info,Prev),
io:nl(Fd),
writedoubleline(Fd),
case Info#etop_info.memi of
diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl
index 59a2f9f205..1eaba31a3a 100644
--- a/lib/observer/src/observer_lib.erl
+++ b/lib/observer/src/observer_lib.erl
@@ -461,14 +461,16 @@ create_box(Parent, Data) ->
link_entry(Panel,Value);
_ ->
Value = to_str(Value0),
- case length(Value) > 100 of
- true ->
- Shown = lists:sublist(Value, 80),
+ case string:sub_word(lists:sublist(Value, 80),1,$\n) of
+ Value ->
+ %% Short string, no newlines - show all
+ wxStaticText:new(Panel, ?wxID_ANY, Value);
+ Shown ->
+ %% Long or with newlines,
+ %% use tooltip to show all
TCtrl = wxStaticText:new(Panel, ?wxID_ANY, [Shown,"..."]),
wxWindow:setToolTip(TCtrl,wxToolTip:new(Value)),
- TCtrl;
- false ->
- wxStaticText:new(Panel, ?wxID_ANY, Value)
+ TCtrl
end
end,
wxSizer:add(Line, 10, 0), % space of size 10 horisontally
diff --git a/lib/observer/src/observer_pro_wx.erl b/lib/observer/src/observer_pro_wx.erl
index ee6829b847..f07b9e295a 100644
--- a/lib/observer/src/observer_pro_wx.erl
+++ b/lib/observer/src/observer_pro_wx.erl
@@ -511,7 +511,13 @@ table_holder(#holder{info=Info, attrs=Attrs,
table_holder(S0);
{dump, Fd} ->
EtopInfo = (S0#holder.etop)#etop_info{procinfo=array:to_list(Info)},
- etop_txt:do_update(Fd, EtopInfo, #opts{node=Node}),
+ %% The empty #etop_info{} below is a dummy previous info
+ %% value. It is used by etop to calculate the scheduler
+ %% utilization since last update. When dumping to file,
+ %% there is no previous measurement to use, so we just add
+ %% a dummy here, and the value shown will be since the
+ %% tool was started.
+ etop_txt:do_update(Fd, EtopInfo, #etop_info{}, #opts{node=Node}),
file:close(Fd),
table_holder(S0);
stop ->
diff --git a/lib/os_mon/src/memsup.erl b/lib/os_mon/src/memsup.erl
index 4729d090f8..0a9a883390 100644
--- a/lib/os_mon/src/memsup.erl
+++ b/lib/os_mon/src/memsup.erl
@@ -701,6 +701,7 @@ get_os_wordsize_with_uname() ->
"sparc64" -> 64;
"amd64" -> 64;
"ppc64" -> 64;
+ "s390x" -> 64;
_ -> 32
end.
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 05c09f8996..3d6238d998 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -1029,19 +1029,16 @@ do_pkix_crls_validate(OtpCert, [{DP, CRL, DeltaCRL} | Rest], All, Options, Revo
end.
sort_dp_crls(DpsAndCrls, FreshCB) ->
- Sorted = do_sort_dp_crls(DpsAndCrls, dict:new()),
- sort_crls(Sorted, FreshCB, []).
-
-do_sort_dp_crls([], Dict) ->
- dict:to_list(Dict);
-do_sort_dp_crls([{DP, CRL} | Rest], Dict0) ->
- Dict = try dict:fetch(DP, Dict0) of
- _ ->
- dict:append(DP, CRL, Dict0)
- catch _:_ ->
- dict:store(DP, [CRL], Dict0)
- end,
- do_sort_dp_crls(Rest, Dict).
+ sort_crls(maps:to_list(lists:foldl(fun group_dp_crls/2,
+ #{},
+ DpsAndCrls)),
+ FreshCB, []).
+
+group_dp_crls({DP,CRL}, M) ->
+ case M of
+ #{DP := CRLs} -> M#{DP := [CRL|CRLs]};
+ _ -> M#{DP => [CRL]}
+ end.
sort_crls([], _, Acc) ->
Acc;
diff --git a/lib/runtime_tools/doc/src/LTTng.xml b/lib/runtime_tools/doc/src/LTTng.xml
index 82a4c79379..7aae5e5c41 100644
--- a/lib/runtime_tools/doc/src/LTTng.xml
+++ b/lib/runtime_tools/doc/src/LTTng.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf8" ?>
+<?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
<header>
diff --git a/lib/runtime_tools/src/observer_backend.erl b/lib/runtime_tools/src/observer_backend.erl
index e943fb4a3e..b27bc63d15 100644
--- a/lib/runtime_tools/src/observer_backend.erl
+++ b/lib/runtime_tools/src/observer_backend.erl
@@ -314,13 +314,12 @@ etop_collect(Collector) ->
case SchedulerWallTime of
undefined ->
- spawn(fun() -> flag_holder_proc(Collector) end),
+ erlang:system_flag(scheduler_wall_time,true),
+ spawn(fun() -> flag_holder_proc(Collector) end),
ok;
_ ->
ok
- end,
-
- erlang:system_flag(scheduler_wall_time,true).
+ end.
flag_holder_proc(Collector) ->
Ref = erlang:monitor(process,Collector),
diff --git a/lib/sasl/test/release_handler_SUITE_data/start b/lib/sasl/test/release_handler_SUITE_data/start
index 87275045b1..eab2b77aed 100755
--- a/lib/sasl/test/release_handler_SUITE_data/start
+++ b/lib/sasl/test/release_handler_SUITE_data/start
@@ -21,8 +21,7 @@ then
fi
HEART_COMMAND=$ROOTDIR/bin/start
-HW_WD_DISABLE=true
-export HW_WD_DISABLE HEART_COMMAND
+export HEART_COMMAND
START_ERL_DATA=${1:-$RELDIR/start_erl.data}
diff --git a/lib/sasl/test/release_handler_SUITE_data/start_client b/lib/sasl/test/release_handler_SUITE_data/start_client
index 5ea94d6f7c..05d744f06e 100755
--- a/lib/sasl/test/release_handler_SUITE_data/start_client
+++ b/lib/sasl/test/release_handler_SUITE_data/start_client
@@ -24,8 +24,7 @@ RELDIR=$CLIENTDIR/releases
# Note that this scripts is modified an copied to $CLIENTDIR/bin/start
# in release_handler_SUITE:copy_client - therefore HEART_COMMAND is as follows:
HEART_COMMAND=$CLIENTDIR/bin/start
-HW_WD_DISABLE=true
-export HW_WD_DISABLE HEART_COMMAND
+export HEART_COMMAND
START_ERL_DATA=${1:-$RELDIR/start_erl.data}
diff --git a/lib/snmp/src/app/snmp.appup.src b/lib/snmp/src/app/snmp.appup.src
index ca61782639..db09ec3dc5 100644
--- a/lib/snmp/src/app/snmp.appup.src
+++ b/lib/snmp/src/app/snmp.appup.src
@@ -8,6 +8,10 @@
%% {update, snmpa_local_db, soft, soft_purge, soft_purge, []}
%% {add_module, snmpm_net_if_mt}
[
+ {<<"5\\.2\\.4">>,
+ [{load_module, snmp, soft_purge, soft_purge, []},
+ {load_module, snmpc_lib, soft_purge, soft_purge, []},
+ {load_module, snmpc_mib_gram, soft_purge, soft_purge, []}]},
{<<"5\\..*">>, [{restart_application, snmp}]},
{<<"4\\..*">>, [{restart_application, snmp}]}
],
@@ -17,6 +21,10 @@
%% {remove, {snmpm_net_if_mt, soft_purge, soft_purge}}
[
+ {<<"5\\.2\\.4">>,
+ [{load_module, snmp, soft_purge, soft_purge, []},
+ {load_module, snmpc_lib, soft_purge, soft_purge, []},
+ {load_module, snmpc_mib_gram, soft_purge, soft_purge, []}]},
{<<"5\\..*">>, [{restart_application, snmp}]},
{<<"4\\..*">>, [{restart_application, snmp}]}
]
diff --git a/lib/snmp/src/app/snmp.erl b/lib/snmp/src/app/snmp.erl
index df3933ea01..8a736f688b 100644
--- a/lib/snmp/src/app/snmp.erl
+++ b/lib/snmp/src/app/snmp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -573,9 +573,16 @@ print_mod_info(Prefix, {Module, Info}) ->
CompDate =
case key1search(compile_time, Info) of
{value, {Year, Month, Day, Hour, Min, Sec}} ->
- lists:flatten(
- io_lib:format("~w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w",
- [Year, Month, Day, Hour, Min, Sec]));
+ io_lib:format(
+ "~w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w",
+ [Year, Month, Day, Hour, Min, Sec]);
+ _ ->
+ "Not found"
+ end,
+ Digest =
+ case key1search(md5, Info) of
+ {value, MD5} when is_binary(MD5) ->
+ [io_lib:format("~2.16.0b", [Byte]) || <<Byte>> <= MD5];
_ ->
"Not found"
end,
@@ -583,12 +590,14 @@ print_mod_info(Prefix, {Module, Info}) ->
"~s Vsn: ~s~n"
"~s App vsn: ~s~n"
"~s Compiler ver: ~s~n"
- "~s Compile time: ~s~n",
+ "~s Compile time: ~s~n"
+ "~s MD5 digest: ~s~n",
[Prefix, Module,
Prefix, Vsn,
Prefix, AppVsn,
- Prefix, CompVer,
- Prefix, CompDate]),
+ Prefix, CompVer,
+ Prefix, CompDate,
+ Prefix, Digest]),
ok.
key1search(Key, Vals) ->
@@ -617,7 +626,7 @@ versions1() ->
Error ->
Error
end.
-
+
versions2() ->
case ms2() of
{ok, Mods} ->
@@ -625,25 +634,56 @@ versions2() ->
Error ->
Error
end.
-
+
version_info(Mods) ->
SysInfo = sys_info(),
OsInfo = os_info(),
ModInfo = [mod_version_info(Mod) || Mod <- Mods],
[{sys_info, SysInfo}, {os_info, OsInfo}, {mod_info, ModInfo}].
-
+
mod_version_info(Mod) ->
Info = Mod:module_info(),
- {value, {attributes, Attr}} = lists:keysearch(attributes, 1, Info),
- {value, {vsn, [Vsn]}} = lists:keysearch(vsn, 1, Attr),
- {value, {app_vsn, AppVsn}} = lists:keysearch(app_vsn, 1, Attr),
- {value, {compile, Comp}} = lists:keysearch(compile, 1, Info),
- {value, {version, Ver}} = lists:keysearch(version, 1, Comp),
- {value, {time, Time}} = lists:keysearch(time, 1, Comp),
- {Mod, [{vsn, Vsn},
- {app_vsn, AppVsn},
- {compiler_version, Ver},
- {compile_time, Time}]}.
+ {Mod,
+ case key1search(attributes, Info) of
+ {value, Attr} ->
+ case key1search(vsn, Attr) of
+ {value, [Vsn]} ->
+ [{vsn, Vsn}];
+ not_found ->
+ []
+ end ++
+ case key1search(app_vsn, Attr) of
+ {value, AppVsn} ->
+ [{app_vsn, AppVsn}];
+ not_found ->
+ []
+ end;
+ not_found ->
+ []
+ end ++
+ case key1search(compile, Info) of
+ {value, Comp} ->
+ case key1search(version, Comp) of
+ {value, Ver} ->
+ [{compiler_version, Ver}];
+ not_found ->
+ []
+ end ++
+ case key1search(time, Comp) of
+ {value, Ver} ->
+ [{compile_time, Ver}];
+ not_found ->
+ []
+ end;
+ not_found ->
+ []
+ end ++
+ case key1search(md5, Info) of
+ {value, Bin} ->
+ [{md5, Bin}];
+ not_found ->
+ []
+ end}.
sys_info() ->
SysArch = string:strip(erlang:system_info(system_architecture),right,$\n),
diff --git a/lib/snmp/src/compile/snmpc_lib.erl b/lib/snmp/src/compile/snmpc_lib.erl
index 51690b6e7e..33ddd78308 100644
--- a/lib/snmp/src/compile/snmpc_lib.erl
+++ b/lib/snmp/src/compile/snmpc_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -99,7 +99,7 @@ make_ASN1type({{type_with_size,Type,{range,Lo,Hi}},Line}) ->
print_error("Undefined type '~w'",[Type],Line),
guess_string_type()
end;
-make_ASN1type({{integer_with_enum,Type,Enums},Line}) ->
+make_ASN1type({{type_with_enum,Type,Enums},Line}) ->
case lookup_vartype(Type) of
{value,ASN1type} -> ASN1type#asn1_type{assocList = [{enums, Enums}]};
false ->
diff --git a/lib/snmp/src/compile/snmpc_mib_gram.yrl b/lib/snmp/src/compile/snmpc_mib_gram.yrl
index 743c3a6550..14a668127e 100644
--- a/lib/snmp/src/compile/snmpc_mib_gram.yrl
+++ b/lib/snmp/src/compile/snmpc_mib_gram.yrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -387,10 +387,12 @@ syntax -> type : {{type, cat('$1')},line_of('$1')}.
syntax -> type size : {{type_with_size, cat('$1'), '$2'},line_of('$1')}.
syntax -> usertype size : {{type_with_size,val('$1'), '$2'},line_of('$1')}.
syntax -> 'INTEGER' '{' namedbits '}' :
- {{integer_with_enum, 'INTEGER', '$3'}, line_of('$1')}.
+ {{type_with_enum, 'INTEGER', '$3'}, line_of('$1')}.
syntax -> 'BITS' '{' namedbits '}' :
ensure_ver(2,'$1'),
{{bits, '$3'}, line_of('$1')}.
+syntax -> usertype '{' namedbits '}' :
+ {{type_with_enum, 'INTEGER', '$3'}, line_of('$1')}.
syntax -> 'SEQUENCE' 'OF' usertype :
{{sequence_of,val('$3')},line_of('$1')}.
diff --git a/lib/snmp/test/snmp_compiler_test.erl b/lib/snmp/test/snmp_compiler_test.erl
index 2c8851c2a7..9b3c2bfd2c 100644
--- a/lib/snmp/test/snmp_compiler_test.erl
+++ b/lib/snmp/test/snmp_compiler_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -56,7 +56,8 @@
otp_8574/1,
otp_8595/1,
otp_10799/1,
- otp_10808/1
+ otp_10808/1,
+ otp_14145/1
]).
@@ -135,7 +136,8 @@ all() ->
].
groups() ->
- [{tickets, [], [otp_6150, otp_8574, otp_8595, otp_10799, otp_10808]}].
+ [{tickets, [],
+ [otp_6150, otp_8574, otp_8595, otp_10799, otp_10808, otp_14145]}].
init_per_group(_GroupName, Config) ->
Config.
@@ -431,6 +433,30 @@ otp_10808(Config) when is_list(Config) ->
%%======================================================================
+otp_14145(suite) ->
+ [];
+otp_14145(Config) when is_list(Config) ->
+ put(tname, otp10808),
+ p("starting with Config: ~p~n", [Config]),
+
+ Dir = ?config(case_top_dir, Config),
+ MibDir = ?config(mib_dir, Config),
+ MibName = "OTP14145-MIB",
+ MibFile = join(MibDir, MibName++".mib"),
+ ?line {ok, MibBin} =
+ snmpc:compile(MibFile, [{outdir, Dir},
+ {verbosity, trace},
+ {group_check, false},
+ module_compliance]),
+ p("Mib: ~n~p~n", [MibBin]),
+ MIB = read_mib(MibBin),
+ Oid = [1,3,6,1,2,1,67,4],
+ check_mib(MIB#mib.mes, Oid, undefined),
+ ok.
+
+
+%%======================================================================
+
augments_extra_info(suite) ->
[];
augments_extra_info(Config) when is_list(Config) ->
diff --git a/lib/snmp/test/snmp_test_data/OTP14145-MIB.mib b/lib/snmp/test/snmp_test_data/OTP14145-MIB.mib
new file mode 100644
index 0000000000..f29c65c4c2
--- /dev/null
+++ b/lib/snmp/test/snmp_test_data/OTP14145-MIB.mib
@@ -0,0 +1,44 @@
+OTP14145-MIB DEFINITIONS ::= BEGIN
+
+IMPORTS
+ MODULE-IDENTITY, OBJECT-TYPE,
+ mib-2 FROM SNMPv2-SMI
+ InetAddressType, InetAddress FROM INET-ADDRESS-MIB
+ MODULE-COMPLIANCE, OBJECT-GROUP FROM SNMPv2-CONF;
+
+testMibId MODULE-IDENTITY
+ LAST-UPDATED "200608210000Z" -- 21 August 2006
+ ORGANIZATION "a"
+ CONTACT-INFO "a"
+ DESCRIPTION "a"
+ REVISION "200608210000Z" -- 21 August 2006
+ DESCRIPTION "a"
+ ::= { mib-2 67 }
+
+testObj OBJECT-TYPE
+ SYNTAX InetAddressType
+ -- SYNTAX InetAddress
+ MAX-ACCESS read-only
+ STATUS current
+ DESCRIPTION "a"
+ ::= { testMibId 2 }
+
+testObjId OBJECT IDENTIFIER ::= { testMibId 3 }
+
+testMibCompliance MODULE-COMPLIANCE
+ STATUS current
+ DESCRIPTION "a"
+ MODULE
+ OBJECT testObj
+ SYNTAX InetAddressType { ipv4(1), ipv6(2) }
+ -- SYNTAX InetAddress ( SIZE(4|16) )
+ DESCRIPTION "a"
+ ::= { testMibId 4 }
+
+testObjGroup OBJECT-GROUP
+ OBJECTS { testObj }
+ STATUS current
+ DESCRIPTION "a"
+ ::= { testObjId 1 }
+
+END
diff --git a/lib/snmp/vsn.mk b/lib/snmp/vsn.mk
index 28eba0d0d6..30b8ee1124 100644
--- a/lib/snmp/vsn.mk
+++ b/lib/snmp/vsn.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1997-2016. All Rights Reserved.
+# Copyright Ericsson AB 1997-2017. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,6 +19,6 @@
# %CopyrightEnd%
APPLICATION = snmp
-SNMP_VSN = 5.2.4
+SNMP_VSN = 5.2.5
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(SNMP_VSN)$(PRE_VSN)"
diff --git a/lib/ssh/src/ssh_auth.erl b/lib/ssh/src/ssh_auth.erl
index ac35b70209..9b54ecb2dd 100644
--- a/lib/ssh/src/ssh_auth.erl
+++ b/lib/ssh/src/ssh_auth.erl
@@ -406,7 +406,11 @@ handle_userauth_info_response(#ssh_msg_userauth_info_response{num_responses = 1,
kb_tries_left = KbTriesLeft,
user = User,
userauth_supported_methods = Methods} = Ssh) ->
- SendOneEmpty = proplists:get_value(tstflg, Opts) == one_empty,
+ SendOneEmpty =
+ (proplists:get_value(tstflg,Opts) == one_empty)
+ orelse
+ proplists:get_value(one_empty, proplists:get_value(tstflg,Opts,[]), false),
+
case check_password(User, unicode:characters_to_list(Password), Opts, Ssh) of
{true,Ssh1} when SendOneEmpty==true ->
Msg = #ssh_msg_userauth_info_request{name = "",
diff --git a/lib/ssh/src/ssh_bits.erl b/lib/ssh/src/ssh_bits.erl
index 8bedaaf0c5..3ce7758447 100644
--- a/lib/ssh/src/ssh_bits.erl
+++ b/lib/ssh/src/ssh_bits.erl
@@ -30,39 +30,31 @@
-export([random/1]).
%%%----------------------------------------------------------------
-name_list([Name]) -> to_bin(Name);
-name_list([Name|Ns]) -> <<(to_bin(Name))/binary, ",", (name_list(Ns))/binary>>;
-name_list([]) -> <<>>.
-
-to_bin(A) when is_atom(A) -> list_to_binary(atom_to_list(A));
-to_bin(S) when is_list(S) -> list_to_binary(S);
-to_bin(B) when is_binary(B) -> B.
+name_list(NamesList) -> list_to_binary(lists:join($,, NamesList)).
%%%----------------------------------------------------------------
%%% Multi Precision Integer encoding
mpint(-1) -> <<0,0,0,1,16#ff>>;
mpint(0) -> <<0,0,0,0>>;
-mpint(X) when X < 0 -> mpint_neg(X,0,[]);
-mpint(X) -> mpint_pos(X,0,[]).
-
-mpint_neg(-1,I,Ds=[MSB|_]) ->
- if MSB band 16#80 =/= 16#80 ->
- <<?UINT32((I+1)), (list_to_binary([255|Ds]))/binary>>;
- true ->
- <<?UINT32(I), (list_to_binary(Ds))/binary>>
- end;
-mpint_neg(X,I,Ds) ->
- mpint_neg(X bsr 8,I+1,[(X band 255)|Ds]).
-
-mpint_pos(0,I,Ds=[MSB|_]) ->
- if MSB band 16#80 == 16#80 ->
- <<?UINT32((I+1)), (list_to_binary([0|Ds]))/binary>>;
- true ->
- <<?UINT32(I), (list_to_binary(Ds))/binary>>
+mpint(I) when I>0 ->
+ <<B1,V/binary>> = binary:encode_unsigned(I),
+ case B1 band 16#80 of
+ 16#80 ->
+ <<(size(V)+2):32/unsigned-big-integer, 0,B1,V/binary >>;
+ _ ->
+ <<(size(V)+1):32/unsigned-big-integer, B1,V/binary >>
end;
-mpint_pos(X,I,Ds) ->
- mpint_pos(X bsr 8,I+1,[(X band 255)|Ds]).
-
+mpint(N) when N<0 ->
+ Sxn = 8*size(binary:encode_unsigned(-N)),
+ Sxn1 = Sxn+8,
+ <<W:Sxn1>> = <<1, 0:Sxn>>,
+ <<B1,V/binary>> = binary:encode_unsigned(W+N),
+ case B1 band 16#80 of
+ 16#80 ->
+ <<(size(V)+1):32/unsigned-big-integer, B1,V/binary >>;
+ _ ->
+ <<(size(V)+2):32/unsigned-big-integer, 255,B1,V/binary >>
+ end.
%%%----------------------------------------------------------------
%% random/1
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index 7451c9e6d0..8718e92fa2 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -1206,7 +1206,7 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
catch
_C:_E ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Encountered unexpected input"},
+ description = "Bad packet"},
StateName, D)
end;
@@ -1221,13 +1221,12 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
{bad_mac, Ssh1} ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Bad mac"},
+ description = "Bad packet"},
StateName, D0#data{ssh_params=Ssh1});
- {error, {exceeds_max_size,PacketLen}} ->
+ {error, {exceeds_max_size,_PacketLen}} ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Bad packet length "
- ++ integer_to_list(PacketLen)},
+ description = "Bad packet"},
StateName, D0)
catch
_C:_E ->
diff --git a/lib/ssh/src/ssh_dbg.erl b/lib/ssh/src/ssh_dbg.erl
index dff2bae9f2..0345bbdea7 100644
--- a/lib/ssh/src/ssh_dbg.erl
+++ b/lib/ssh/src/ssh_dbg.erl
@@ -50,50 +50,61 @@ messages(Write, MangleArg) when is_function(Write,2),
is_function(MangleArg,1) ->
catch dbg:start(),
setup_tracer(Write, MangleArg),
- dbg:p(new,c),
+ dbg:p(new,[c,timestamp]),
dbg_ssh_messages().
dbg_ssh_messages() ->
dbg:tp(ssh_message,encode,1, x),
dbg:tp(ssh_message,decode,1, x),
- dbg:tpl(ssh_transport,select_algorithm,3, x).
-
+ dbg:tpl(ssh_transport,select_algorithm,3, x),
+ dbg:tp(ssh_transport,hello_version_msg,1, x),
+ dbg:tp(ssh_transport,handle_hello_version,1, x).
+
%%%----------------------------------------------------------------
stop() ->
dbg:stop().
%%%================================================================
-msg_formater({trace,Pid,call,{ssh_message,encode,[Msg]}}, D) ->
- fmt("~nSEND ~p ~s~n", [Pid,wr_record(shrink_bin(Msg))], D);
-msg_formater({trace,_Pid,return_from,{ssh_message,encode,1},_Res}, D) ->
+msg_formater({trace_ts,Pid,call,{ssh_message,encode,[Msg]},TS}, D) ->
+ fmt("~n~s SEND ~p ~s~n", [ts(TS),Pid,wr_record(shrink_bin(Msg))], D);
+msg_formater({trace_ts,_Pid,return_from,{ssh_message,encode,1},_Res,_TS}, D) ->
D;
-msg_formater({trace,_Pid,call,{ssh_message,decode,_}}, D) ->
+msg_formater({trace_ts,_Pid,call,{ssh_message,decode,_},_TS}, D) ->
D;
-msg_formater({trace,Pid,return_from,{ssh_message,decode,1},Msg}, D) ->
- fmt("~n~p RECV ~s~n", [Pid,wr_record(shrink_bin(Msg))], D);
+msg_formater({trace_ts,Pid,return_from,{ssh_message,decode,1},Msg,TS}, D) ->
+ fmt("~n~s ~p RECV ~s~n", [ts(TS),Pid,wr_record(shrink_bin(Msg))], D);
-msg_formater({trace,_Pid,call,{ssh_transport,select_algorithm,_}}, D) ->
+msg_formater({trace_ts,_Pid,call,{ssh_transport,select_algorithm,_},_TS}, D) ->
+ D;
+msg_formater({trace_ts,Pid,return_from,{ssh_transport,select_algorithm,3},{ok,Alg},TS}, D) ->
+ fmt("~n~s ~p ALGORITHMS~n~s~n", [ts(TS),Pid, wr_record(Alg)], D);
+
+msg_formater({trace_ts,_Pid,call,{ssh_transport,hello_version_msg,_},_TS}, D) ->
D;
-msg_formater({trace,Pid,return_from,{ssh_transport,select_algorithm,3},{ok,Alg}}, D) ->
- fmt("~n~p ALGORITHMS~n~s~n", [Pid, wr_record(Alg)], D);
+msg_formater({trace_ts,Pid,return_from,{ssh_transport,hello_version_msg,1},Hello,TS}, D) ->
+ fmt("~n~s ~p TCP SEND HELLO~n ~p~n", [ts(TS),Pid,lists:flatten(Hello)], D);
+msg_formater({trace_ts,Pid,call,{ssh_transport,handle_hello_version,[Hello]},TS}, D) ->
+ fmt("~n~s ~p RECV HELLO~n ~p~n", [ts(TS),Pid,lists:flatten(Hello)], D);
+msg_formater({trace_ts,_Pid,return_from,{ssh_transport,handle_hello_version,1},_,_TS}, D) ->
+ D;
-msg_formater({trace,Pid,send,{tcp,Sock,Bytes},Pid}, D) ->
- fmt("~n~p TCP SEND on ~p~n ~p~n", [Pid,Sock, shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,send,{tcp,Sock,Bytes},Pid,TS}, D) ->
+ fmt("~n~s ~p TCP SEND on ~p~n ~p~n", [ts(TS),Pid,Sock, shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,send,{tcp,Sock,Bytes},Dest}, D) ->
- fmt("~n~p TCP SEND from ~p TO ~p~n ~p~n", [Pid,Sock,Dest, shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,send,{tcp,Sock,Bytes},Dest,TS}, D) ->
+ fmt("~n~s ~p TCP SEND from ~p TO ~p~n ~p~n", [ts(TS),Pid,Sock,Dest, shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,send,ErlangMsg,Dest}, D) ->
- fmt("~n~p ERL MSG SEND TO ~p~n ~p~n", [Pid,Dest, shrink_bin(ErlangMsg)], D);
+msg_formater({trace_ts,Pid,send,ErlangMsg,Dest,TS}, D) ->
+ fmt("~n~s ~p ERL MSG SEND TO ~p~n ~p~n", [ts(TS),Pid,Dest, shrink_bin(ErlangMsg)], D);
-msg_formater({trace,Pid,'receive',{tcp,Sock,Bytes}}, D) ->
- fmt("~n~p TCP RECEIVE on ~p~n ~p~n", [Pid,Sock,shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,'receive',{tcp,Sock,Bytes},TS}, D) ->
+ fmt("~n~s ~p TCP RECEIVE on ~p~n ~p~n", [ts(TS),Pid,Sock,shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,'receive',ErlangMsg}, D) ->
- fmt("~n~p ERL MSG RECEIVE~n ~p~n", [Pid,shrink_bin(ErlangMsg)], D);
+msg_formater({trace_ts,Pid,'receive',ErlangMsg,TS}, D) ->
+ fmt("~n~s ~p ERL MSG RECEIVE~n ~p~n", [ts(TS),Pid,shrink_bin(ErlangMsg)], D);
msg_formater(M, D) ->
@@ -106,6 +117,11 @@ msg_formater(M, D) ->
fmt(Fmt, Args, D=#data{writer=Write,acc=Acc}) ->
D#data{acc = Write(io_lib:format(Fmt, Args), Acc)}.
+ts({_,_,Usec}=Now) ->
+ {_Date,{HH,MM,SS}} = calendar:now_to_local_time(Now),
+ io_lib:format("~.2.0w:~.2.0w:~.2.0w.~.6.0w",[HH,MM,SS,Usec]);
+ts(_) ->
+ "-".
%%%----------------------------------------------------------------
setup_tracer(Write, MangleArg) ->
Handler = fun(Arg, D) ->
@@ -116,11 +132,11 @@ setup_tracer(Write, MangleArg) ->
ok.
%%%----------------------------------------------------------------
-shrink_bin(B) when is_binary(B), size(B)>100 -> {'*** SHRINKED BIN',
+shrink_bin(B) when is_binary(B), size(B)>256 -> {'*** SHRINKED BIN',
size(B),
- element(1,split_binary(B,20)),
+ element(1,split_binary(B,64)),
'...',
- element(2,split_binary(B,size(B)-20))
+ element(2,split_binary(B,size(B)-64))
};
shrink_bin(L) when is_list(L) -> lists:map(fun shrink_bin/1, L);
shrink_bin(T) when is_tuple(T) -> list_to_tuple(shrink_bin(tuple_to_list(T)));
diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl
index afc2fb88ff..b937f0412d 100644
--- a/lib/ssh/src/ssh_sftp.erl
+++ b/lib/ssh/src/ssh_sftp.erl
@@ -37,7 +37,7 @@
-export([open/3, open_tar/3, opendir/2, close/2, readdir/2, pread/4, read/3,
open/4, open_tar/4, opendir/3, close/3, readdir/3, pread/5, read/4,
apread/4, aread/3, pwrite/4, write/3, apwrite/4, awrite/3,
- pwrite/5, write/4,
+ pwrite/5, write/4,
position/3, real_path/2, read_file_info/2, get_file_info/2,
position/4, real_path/3, read_file_info/3, get_file_info/3,
write_file_info/3, read_link_info/2, read_link/2, make_symlink/3,
@@ -52,7 +52,7 @@
%% TODO: Should be placed elsewhere ssh_sftpd should not call functions in ssh_sftp!
-export([info_to_attr/1, attr_to_info/1]).
--record(state,
+-record(state,
{
xf,
rep_buf = <<>>,
@@ -64,7 +64,7 @@
-record(fileinf,
{
- handle,
+ handle,
offset,
size,
mode
@@ -81,7 +81,7 @@
enc_text_buf = <<>>, % Encrypted text
plain_text_buf = <<>> % Decrypted text
}).
-
+
-define(FILEOP_TIMEOUT, infinity).
-define(NEXT_REQID(S),
@@ -98,7 +98,7 @@ start_channel(Cm) when is_pid(Cm) ->
start_channel(Socket) when is_port(Socket) ->
start_channel(Socket, []);
start_channel(Host) when is_list(Host) ->
- start_channel(Host, []).
+ start_channel(Host, []).
start_channel(Socket, Options) when is_port(Socket) ->
Timeout =
@@ -110,7 +110,7 @@ start_channel(Socket, Options) when is_port(Socket) ->
TO
end,
case ssh:connect(Socket, Options, Timeout) of
- {ok,Cm} ->
+ {ok,Cm} ->
case start_channel(Cm, Options) of
{ok, Pid} ->
{ok, Pid, Cm};
@@ -124,13 +124,13 @@ start_channel(Cm, Opts) when is_pid(Cm) ->
Timeout = proplists:get_value(timeout, Opts, infinity),
{_, ChanOpts, SftpOpts} = handle_options(Opts, [], [], []),
case ssh_xfer:attach(Cm, [], ChanOpts) of
- {ok, ChannelId, Cm} ->
- case ssh_channel:start(Cm, ChannelId,
+ {ok, ChannelId, Cm} ->
+ case ssh_channel:start(Cm, ChannelId,
?MODULE, [Cm, ChannelId, SftpOpts]) of
{ok, Pid} ->
case wait_for_version_negotiation(Pid, Timeout) of
ok ->
- {ok, Pid};
+ {ok, Pid};
TimeOut ->
TimeOut
end;
@@ -150,7 +150,7 @@ start_channel(Host, Port, Opts) ->
Timeout = proplists:get_value(timeout, SftpOpts, infinity),
case ssh_xfer:connect(Host, Port, SshOpts, ChanOpts, Timeout) of
{ok, ChannelId, Cm} ->
- case ssh_channel:start(Cm, ChannelId, ?MODULE, [Cm,
+ case ssh_channel:start(Cm, ChannelId, ?MODULE, [Cm,
ChannelId, SftpOpts]) of
{ok, Pid} ->
case wait_for_version_negotiation(Pid, Timeout) of
@@ -165,7 +165,7 @@ start_channel(Host, Port, Opts) ->
{error, ignore}
end;
Error ->
- Error
+ Error
end.
stop_channel(Pid) ->
@@ -174,12 +174,12 @@ stop_channel(Pid) ->
OldValue = process_flag(trap_exit, true),
link(Pid),
exit(Pid, ssh_sftp_stop_channel),
- receive
+ receive
{'EXIT', Pid, normal} ->
ok
after 5000 ->
exit(Pid, kill),
- receive
+ receive
{'EXIT', Pid, killed} ->
ok
end
@@ -209,9 +209,9 @@ open_tar(Pid, File, Mode, FileOpTimeout) ->
erl_tar:init(Pid, write,
fun(write, {_,Data}) ->
write_to_remote_tar(Pid, Handle, to_bin(Data), FileOpTimeout);
- (position, {_,Pos}) ->
+ (position, {_,Pos}) ->
position(Pid, Handle, Pos, FileOpTimeout);
- (close, _) ->
+ (close, _) ->
close(Pid, Handle, FileOpTimeout)
end);
{true,false,[{crypto,{CryptoInitFun,CryptoEncryptFun,CryptoEndFun}}]} ->
@@ -245,9 +245,9 @@ open_tar(Pid, File, Mode, FileOpTimeout) ->
erl_tar:init(Pid, read,
fun(read2, {_,Len}) ->
read_repeat(Pid, Handle, Len, FileOpTimeout);
- (position, {_,Pos}) ->
+ (position, {_,Pos}) ->
position(Pid, Handle, Pos, FileOpTimeout);
- (close, _) ->
+ (close, _) ->
close(Pid, Handle, FileOpTimeout)
end);
{false,true,[{crypto,{CryptoInitFun,CryptoDecryptFun}}]} ->
@@ -258,9 +258,9 @@ open_tar(Pid, File, Mode, FileOpTimeout) ->
erl_tar:init(Pid, read,
fun(read2, {_,Len}) ->
read_buf(Pid, SftpHandle, BufHandle, Len, FileOpTimeout);
- (position, {_,Pos}) ->
+ (position, {_,Pos}) ->
position_buf(Pid, SftpHandle, BufHandle, Pos, FileOpTimeout);
- (close, _) ->
+ (close, _) ->
call(Pid, {erase_bufinf,BufHandle}, FileOpTimeout),
close(Pid, SftpHandle, FileOpTimeout)
end);
@@ -292,7 +292,7 @@ pread(Pid, Handle, Offset, Len, FileOpTimeout) ->
read(Pid, Handle, Len) ->
read(Pid, Handle, Len, ?FILEOP_TIMEOUT).
read(Pid, Handle, Len, FileOpTimeout) ->
- call(Pid, {read,false,Handle, Len}, FileOpTimeout).
+ call(Pid, {read,false,Handle, Len}, FileOpTimeout).
%% TODO this ought to be a cast! Is so in all practial meaning
%% even if it is obscure!
@@ -301,7 +301,7 @@ apread(Pid, Handle, Offset, Len) ->
%% TODO this ought to be a cast!
aread(Pid, Handle, Len) ->
- call(Pid, {read,true,Handle, Len}, infinity).
+ call(Pid, {read,true,Handle, Len}, infinity).
pwrite(Pid, Handle, Offset, Data) ->
pwrite(Pid, Handle, Offset, Data, ?FILEOP_TIMEOUT).
@@ -367,7 +367,7 @@ make_symlink(Pid, Name, Target) ->
make_symlink(Pid, Name, Target, ?FILEOP_TIMEOUT).
make_symlink(Pid, Name, Target, FileOpTimeout) ->
call(Pid, {make_symlink,false, Name, Target}, FileOpTimeout).
-
+
rename(Pid, FromFile, ToFile) ->
rename(Pid, FromFile, ToFile, ?FILEOP_TIMEOUT).
rename(Pid, FromFile, ToFile, FileOpTimeout) ->
@@ -411,8 +411,8 @@ list_dir(Pid, Name, FileOpTimeout) ->
close(Pid, Handle, FileOpTimeout),
case Res of
{ok, List} ->
- NList = lists:foldl(fun({Nm, _Info},Acc) ->
- [Nm|Acc] end,
+ NList = lists:foldl(fun({Nm, _Info},Acc) ->
+ [Nm|Acc] end,
[], List),
{ok,NList};
Error -> Error
@@ -482,7 +482,7 @@ write_file_loop(Pid, Handle, Pos, Bin, Remain, PacketSz, FileOpTimeout) ->
<<_:Pos/binary, Data:PacketSz/binary, _/binary>> = Bin,
case write(Pid, Handle, Data, FileOpTimeout) of
ok ->
- write_file_loop(Pid, Handle,
+ write_file_loop(Pid, Handle,
Pos+PacketSz, Bin, Remain-PacketSz,
PacketSz, FileOpTimeout);
Error ->
@@ -510,7 +510,7 @@ init([Cm, ChannelId, Options]) ->
Xf = #ssh_xfer{cm = Cm,
channel = ChannelId},
{ok, #state{xf = Xf,
- req_id = 0,
+ req_id = 0,
rep_buf = <<>>,
inf = new_inf(),
opts = Options}};
@@ -519,7 +519,7 @@ init([Cm, ChannelId, Options]) ->
Error ->
{stop, {shutdown, Error}}
end.
-
+
%%--------------------------------------------------------------------
%% Function: handle_call/3
%% Description: Handling call messages
@@ -541,7 +541,7 @@ handle_call({{timeout, Timeout}, wait_for_version_negotiation}, From,
handle_call({_, wait_for_version_negotiation}, _, State) ->
{reply, ok, State};
-
+
handle_call({{timeout, infinity}, Msg}, From, State) ->
do_handle_call(Msg, From, State);
handle_call({{timeout, Timeout}, Msg}, From, #state{req_id = Id} = State) ->
@@ -555,13 +555,13 @@ code_change(_OldVsn, State, _Extra) ->
{ok, State}.
do_handle_call({get_bufinf,BufHandle}, _From, S=#state{inf=I0}) ->
- {reply, dict:find(BufHandle,I0), S};
+ {reply, maps:find(BufHandle,I0), S};
do_handle_call({put_bufinf,BufHandle,B}, _From, S=#state{inf=I0}) ->
- {reply, ok, S#state{inf=dict:store(BufHandle,B,I0)}};
+ {reply, ok, S#state{inf=maps:put(BufHandle,B,I0)}};
do_handle_call({erase_bufinf,BufHandle}, _From, S=#state{inf=I0}) ->
- {reply, ok, S#state{inf=dict:erase(BufHandle,I0)}};
+ {reply, ok, S#state{inf=maps:remove(BufHandle,I0)}};
do_handle_call({open, Async,FileName,Mode}, From, #state{xf = XF} = State) ->
{Access,Flags,Attrs} = open_mode(XF#ssh_xfer.vsn, Mode),
@@ -636,7 +636,7 @@ do_handle_call({pread,Async,Handle,At,Length}, From, State) ->
binary -> {{ok,Data}, State2};
text -> {{ok,binary_to_list(Data)}, State2}
end;
- (Rep, State2) ->
+ (Rep, State2) ->
{Rep, State2}
end);
Error ->
@@ -777,7 +777,7 @@ do_handle_call(recv_window, _From, State) ->
do_handle_call(stop, _From, State) ->
{stop, shutdown, ok, State};
-do_handle_call(Call, _From, State) ->
+do_handle_call(Call, _From, State) ->
{reply, {error, bad_call, Call, State}, State}.
%%--------------------------------------------------------------------
@@ -785,13 +785,13 @@ do_handle_call(Call, _From, State) ->
%%
%% Description: Handles channel messages
%%--------------------------------------------------------------------
-handle_ssh_msg({ssh_cm, _ConnectionManager,
- {data, _ChannelId, 0, Data}}, #state{rep_buf = Data0} =
+handle_ssh_msg({ssh_cm, _ConnectionManager,
+ {data, _ChannelId, 0, Data}}, #state{rep_buf = Data0} =
State0) ->
State = handle_reply(State0, <<Data0/binary,Data/binary>>),
{ok, State};
-handle_ssh_msg({ssh_cm, _ConnectionManager,
+handle_ssh_msg({ssh_cm, _ConnectionManager,
{data, _ChannelId, 1, Data}}, State) ->
error_logger:format("ssh: STDERR: ~s\n", [binary_to_list(Data)]),
{ok, State};
@@ -803,7 +803,7 @@ handle_ssh_msg({ssh_cm, _, {signal, _, _}}, State) ->
%% Ignore signals according to RFC 4254 section 6.9.
{ok, State};
-handle_ssh_msg({ssh_cm, _, {exit_signal, ChannelId, _, Error, _}},
+handle_ssh_msg({ssh_cm, _, {exit_signal, ChannelId, _, Error, _}},
State0) ->
State = reply_all(State0, {error, Error}),
{stop, ChannelId, State};
@@ -823,7 +823,7 @@ handle_msg({ssh_channel_up, _, _}, #state{opts = Options, xf = Xf} = State) ->
{ok, State};
%% Version negotiation timed out
-handle_msg({timeout, undefined, From},
+handle_msg({timeout, undefined, From},
#state{xf = #ssh_xfer{channel = ChannelId}} = State) ->
ssh_channel:reply(From, {error, timeout}),
{stop, ChannelId, State};
@@ -839,12 +839,12 @@ handle_msg({timeout, Id, From}, #state{req_list = ReqList0} = State) ->
end;
%% Connection manager goes down
-handle_msg({'DOWN', _Ref, _Type, _Process, _},
+handle_msg({'DOWN', _Ref, _Type, _Process, _},
#state{xf = #ssh_xfer{channel = ChannelId}} = State) ->
{stop, ChannelId, State};
-
+
%% Stopped by user
-handle_msg({'EXIT', _, ssh_sftp_stop_channel},
+handle_msg({'EXIT', _, ssh_sftp_stop_channel},
#state{xf = #ssh_xfer{channel = ChannelId}} = State) ->
{stop, ChannelId, State};
@@ -883,10 +883,10 @@ call(Pid, Msg, TimeOut) ->
handle_reply(State, <<?UINT32(Len),Reply:Len/binary,Rest/binary>>) ->
do_handle_reply(State, Reply, Rest);
-handle_reply(State, Data) ->
+handle_reply(State, Data) ->
State#state{rep_buf = Data}.
-do_handle_reply(#state{xf = Xf} = State,
+do_handle_reply(#state{xf = Xf} = State,
<<?SSH_FXP_VERSION, ?UINT32(Version), BinExt/binary>>, Rest) ->
Ext = ssh_xfer:decode_ext(BinExt),
case Xf#ssh_xfer.vsn of
@@ -899,7 +899,7 @@ do_handle_reply(#state{xf = Xf} = State,
ok
end,
ssh_channel:reply(From, ok)
- end,
+ end,
State#state{xf = Xf#ssh_xfer{vsn = Version, ext = Ext}, rep_buf = Rest};
do_handle_reply(State0, Data, Rest) ->
@@ -919,9 +919,9 @@ handle_req_reply(State0, {_, ReqID, _} = XfReply) ->
List = lists:keydelete(ReqID, 1, State0#state.req_list),
State1 = State0#state { req_list = List },
case catch Fun(xreply(XfReply),State1) of
- {'EXIT', _} ->
+ {'EXIT', _} ->
State1;
- State ->
+ State ->
State
end
end.
@@ -998,15 +998,15 @@ reply_all(State, Reply) ->
make_reply(ReqID, true, From, State) ->
{reply, {async, ReqID},
update_request_info(ReqID, State,
- fun(Reply,State1) ->
+ fun(Reply,State1) ->
async_reply(ReqID,Reply,From,State1)
end)};
make_reply(ReqID, false, From, State) ->
{noreply,
update_request_info(ReqID, State,
- fun(Reply,State1) ->
- sync_reply(Reply, From, State1)
+ fun(Reply,State1) ->
+ sync_reply(Reply, From, State1)
end)}.
make_reply_post(ReqID, true, From, State, PostFun) ->
@@ -1074,13 +1074,13 @@ attr_to_info(A) when is_record(A, ssh_xfer_attr) ->
unix_to_datetime(undefined) ->
undefined;
unix_to_datetime(UTCSecs) ->
- UTCDateTime =
+ UTCDateTime =
calendar:gregorian_seconds_to_datetime(UTCSecs + 62167219200),
erlang:universaltime_to_localtime(UTCDateTime).
datetime_to_unix(undefined) ->
undefined;
-datetime_to_unix(LocalDateTime) ->
+datetime_to_unix(LocalDateTime) ->
UTCDateTime = erlang:localtime_to_universaltime(LocalDateTime),
calendar:datetime_to_gregorian_seconds(UTCDateTime) - 62167219200.
@@ -1128,11 +1128,11 @@ open_mode3(Modes) ->
end,
{[], Fl, A}.
-%% accessors for inf dict
-new_inf() -> dict:new().
+%% accessors for inf map
+new_inf() -> #{}.
add_new_handle(Handle, FileMode, Inf) ->
- dict:store(Handle, #fileinf{offset=0, size=0, mode=FileMode}, Inf).
+ maps:put(Handle, #fileinf{offset=0, size=0, mode=FileMode}, Inf).
update_size(Handle, NewSize, State) ->
OldSize = get_size(Handle, State),
@@ -1152,27 +1152,24 @@ update_offset(Handle, NewOffset, State0) ->
%% access size and offset for handle
put_size(Handle, Size, State) ->
Inf0 = State#state.inf,
- case dict:find(Handle, Inf0) of
+ case maps:find(Handle, Inf0) of
{ok, FI} ->
- State#state{inf=dict:store(Handle, FI#fileinf{size=Size}, Inf0)};
+ State#state{inf=maps:put(Handle, FI#fileinf{size=Size}, Inf0)};
_ ->
- State#state{inf=dict:store(Handle, #fileinf{size=Size,offset=0},
- Inf0)}
+ State#state{inf=maps:put(Handle, #fileinf{size=Size,offset=0}, Inf0)}
end.
put_offset(Handle, Offset, State) ->
Inf0 = State#state.inf,
- case dict:find(Handle, Inf0) of
+ case maps:find(Handle, Inf0) of
{ok, FI} ->
- State#state{inf=dict:store(Handle, FI#fileinf{offset=Offset},
- Inf0)};
+ State#state{inf=maps:put(Handle, FI#fileinf{offset=Offset}, Inf0)};
_ ->
- State#state{inf=dict:store(Handle, #fileinf{size=Offset,
- offset=Offset}, Inf0)}
+ State#state{inf=maps:put(Handle, #fileinf{size=Offset, offset=Offset}, Inf0)}
end.
get_size(Handle, State) ->
- case dict:find(Handle, State#state.inf) of
+ case maps:find(Handle, State#state.inf) of
{ok, FI} ->
FI#fileinf.size;
_ ->
@@ -1180,11 +1177,11 @@ get_size(Handle, State) ->
end.
%% get_offset(Handle, State) ->
-%% {ok, FI} = dict:find(Handle, State#state.inf),
+%% {ok, FI} = maps:find(Handle, State#state.inf),
%% FI#fileinf.offset.
get_mode(Handle, State) ->
- case dict:find(Handle, State#state.inf) of
+ case maps:find(Handle, State#state.inf) of
{ok, FI} ->
FI#fileinf.mode;
_ ->
@@ -1192,14 +1189,14 @@ get_mode(Handle, State) ->
end.
erase_handle(Handle, State) ->
- FI = dict:erase(Handle, State#state.inf),
+ FI = maps:remove(Handle, State#state.inf),
State#state{inf = FI}.
%%
%% Caluclate a integer offset
%%
lseek_position(Handle, Pos, State) ->
- case dict:find(Handle, State#state.inf) of
+ case maps:find(Handle, State#state.inf) of
{ok, #fileinf{offset=O, size=S}} ->
lseek_pos(Pos, O, S);
_ ->
@@ -1229,7 +1226,7 @@ lseek_pos({cur, Offset}, CurOffset, _CurSize)
true ->
{ok, NewOffset}
end;
-lseek_pos({eof, Offset}, _CurOffset, CurSize)
+lseek_pos({eof, Offset}, _CurOffset, CurSize)
when is_integer(Offset) andalso -(?SSH_FILEXFER_LARGEFILESIZE) =< Offset andalso
Offset < ?SSH_FILEXFER_LARGEFILESIZE ->
NewOffset = CurSize + Offset,
@@ -1239,7 +1236,7 @@ lseek_pos({eof, Offset}, _CurOffset, CurSize)
{ok, NewOffset}
end;
lseek_pos(_, _, _) ->
- {error, einval}.
+ {error, einval}.
%%%================================================================
%%%
@@ -1277,13 +1274,13 @@ position_buf(Pid, SftpHandle, BufHandle, Pos, FileOpTimeout) ->
case Pos of
{cur,0} when Mode==write ->
{ok,Size+size(Buf0)};
-
+
{cur,0} when Mode==read ->
{ok,Size};
-
+
_ when Mode==read, is_integer(Pos) ->
Skip = Pos-Size,
- if
+ if
Skip < 0 ->
{error, cannot_rewind};
Skip == 0 ->
@@ -1318,7 +1315,7 @@ read_buf(Pid, SftpHandle, BufHandle, WantedLen, FileOpTimeout) ->
eof
end.
-do_the_read_buf(_Pid, _SftpHandle, WantedLen, _Packet, _FileOpTimeout,
+do_the_read_buf(_Pid, _SftpHandle, WantedLen, _Packet, _FileOpTimeout,
B=#bufinf{plain_text_buf=PlainBuf0,
size = Size})
when size(PlainBuf0) >= WantedLen ->
@@ -1327,7 +1324,7 @@ do_the_read_buf(_Pid, _SftpHandle, WantedLen, _Packet, _FileOpTimeout,
{ok,ResultBin,B#bufinf{plain_text_buf=PlainBuf,
size = Size + WantedLen}};
-do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
+do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
B0=#bufinf{plain_text_buf = PlainBuf0,
enc_text_buf = EncBuf0,
chunksize = undefined
@@ -1335,12 +1332,12 @@ do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
when size(EncBuf0) > 0 ->
%% We have (at least) one decodable byte waiting for decodeing.
{ok,DecodedBin,B} = apply_crypto(EncBuf0, B0),
- do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
+ do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
B#bufinf{plain_text_buf = <<PlainBuf0/binary, DecodedBin/binary>>,
enc_text_buf = <<>>
});
-
-do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
+
+do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
B0=#bufinf{plain_text_buf = PlainBuf0,
enc_text_buf = EncBuf0,
chunksize = ChunkSize0
@@ -1349,11 +1346,11 @@ do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
%% We have (at least) one chunk of decodable bytes waiting for decodeing.
<<ToDecode:ChunkSize0/binary, EncBuf/binary>> = EncBuf0,
{ok,DecodedBin,B} = apply_crypto(ToDecode, B0),
- do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
+ do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout,
B#bufinf{plain_text_buf = <<PlainBuf0/binary, DecodedBin/binary>>,
enc_text_buf = EncBuf
});
-
+
do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout, B=#bufinf{enc_text_buf = EncBuf0}) ->
%% We must read more bytes and append to the buffer of encoded bytes.
case read(Pid, SftpHandle, Packet, FileOpTimeout) of
@@ -1370,7 +1367,7 @@ do_the_read_buf(Pid, SftpHandle, WantedLen, Packet, FileOpTimeout, B=#bufinf{enc
write_buf(Pid, SftpHandle, BufHandle, PlainBin, FileOpTimeout) ->
{ok,{_Window,Packet}} = send_window(Pid, FileOpTimeout),
{ok,B0=#bufinf{plain_text_buf=PTB}} = call(Pid, {get_bufinf,BufHandle}, FileOpTimeout),
- case do_the_write_buf(Pid, SftpHandle, Packet, FileOpTimeout,
+ case do_the_write_buf(Pid, SftpHandle, Packet, FileOpTimeout,
B0#bufinf{plain_text_buf = <<PTB/binary,PlainBin/binary>>}) of
{ok, B} ->
call(Pid, {put_bufinf,BufHandle,B}, FileOpTimeout),
@@ -1379,7 +1376,7 @@ write_buf(Pid, SftpHandle, BufHandle, PlainBin, FileOpTimeout) ->
{error,Error}
end.
-do_the_write_buf(Pid, SftpHandle, Packet, FileOpTimeout,
+do_the_write_buf(Pid, SftpHandle, Packet, FileOpTimeout,
B=#bufinf{enc_text_buf = EncBuf0,
size = Size})
when size(EncBuf0) >= Packet ->
@@ -1421,9 +1418,9 @@ do_the_write_buf(_Pid, _SftpHandle, _Packet, _FileOpTimeout, B) ->
apply_crypto(In, B=#bufinf{crypto_state = CState0,
crypto_fun = F}) ->
case F(In,CState0) of
- {ok,EncodedBin,CState} ->
+ {ok,EncodedBin,CState} ->
{ok, EncodedBin, B#bufinf{crypto_state=CState}};
- {ok,EncodedBin,CState,ChunkSize} ->
+ {ok,EncodedBin,CState,ChunkSize} ->
{ok, EncodedBin, B#bufinf{crypto_state=CState,
chunksize=ChunkSize}}
end.
diff --git a/lib/ssh/src/ssh_sftpd_file_api.erl b/lib/ssh/src/ssh_sftpd_file_api.erl
index 78f452df67..e444e52ac0 100644
--- a/lib/ssh/src/ssh_sftpd_file_api.erl
+++ b/lib/ssh/src/ssh_sftpd_file_api.erl
@@ -36,7 +36,7 @@
-callback list_dir(file:name(), State::term()) ->
{{ok, Filenames::term()}, State::term()} | {{error, Reason::term()}, State::term()}.
-callback make_dir(Dir::term(), State::term()) ->
- {{ok, State::term()},State::term()} | {{error, Reason::term()}, State::term()}.
+ {ok, State::term()} | {{error, Reason::term()}, State::term()}.
-callback make_symlink(Path2::term(), Path::term(), State::term()) ->
{ok, State::term()} | {{error, Reason::term()}, State::term()}.
-callback open(Path::term(), Flags::term(), State::term()) ->
diff --git a/lib/ssh/src/ssh_transport.erl b/lib/ssh/src/ssh_transport.erl
index 21ba34506a..53e9ef485b 100644
--- a/lib/ssh/src/ssh_transport.erl
+++ b/lib/ssh/src/ssh_transport.erl
@@ -367,7 +367,7 @@ handle_kexdh_init(#ssh_msg_kexdh_init{e = E},
h_sig = H_SIG
}, Ssh0),
{ok, SshPacket, Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}},
- shared_secret = K,
+ shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh1, H)}};
@@ -393,7 +393,7 @@ handle_kexdh_reply(#ssh_msg_kexdh_reply{public_host_key = PeerPubHostKey,
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
Error ->
@@ -532,7 +532,7 @@ handle_kex_dh_gex_init(#ssh_msg_kex_dh_gex_init{e = E},
ssh_packet(#ssh_msg_kex_dh_gex_reply{public_host_key = MyPubHostKey,
f = Public,
h_sig = H_SIG}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)
}};
@@ -568,7 +568,7 @@ handle_kex_dh_gex_reply(#ssh_msg_kex_dh_gex_reply{public_host_key = PeerPubHostK
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
_Error ->
@@ -618,7 +618,7 @@ handle_kex_ecdh_init(#ssh_msg_kex_ecdh_init{q_c = PeerPublic},
h_sig = H_SIG},
Ssh0),
{ok, SshPacket, Ssh1#ssh{keyex_key = {{MyPublic,MyPrivate},Curve},
- shared_secret = K,
+ shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh1, H)}}
catch
@@ -644,7 +644,7 @@ handle_kex_ecdh_reply(#ssh_msg_kex_ecdh_reply{public_host_key = PeerPubHostKey,
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
Error ->
@@ -1577,7 +1577,7 @@ hash(SSH, Char, Bits) ->
hash(_SSH, _Char, 0, _HASH) ->
<<>>;
hash(SSH, Char, N, HASH) ->
- K = ssh_bits:mpint(SSH#ssh.shared_secret),
+K = SSH#ssh.shared_secret, % K = ssh_bits:mpint(SSH#ssh.shared_secret),
H = SSH#ssh.exchanged_hash,
SessionID = SSH#ssh.session_id,
K1 = HASH([K, H, Char, SessionID]),
diff --git a/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl b/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
index dc3b7dc7e6..8ca29b9399 100644
--- a/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
+++ b/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
@@ -54,15 +54,18 @@
-endif.
-endif.
+%% Public key records:
+-include_lib("public_key/include/public_key.hrl").
%%% Properties:
prop_ssh_decode() ->
- ?FORALL(Msg, ssh_msg(),
- try ssh_message:decode(Msg)
+ ?FORALL({Msg,KexFam}, ?LET(KF, kex_family(), {ssh_msg(KF),KF} ),
+ try ssh_message:decode(decode_state(Msg,KexFam))
of
_ -> true
catch
+
C:E -> io:format('~p:~p~n',[C,E]),
false
end
@@ -71,122 +74,101 @@ prop_ssh_decode() ->
%%% This fails because ssh_message is not symmetric in encode and decode regarding data types
prop_ssh_decode_encode() ->
- ?FORALL(Msg, ssh_msg(),
- Msg == ssh_message:encode(ssh_message:decode(Msg))
+ ?FORALL({Msg,KexFam}, ?LET(KF, kex_family(), {ssh_msg(KF),KF} ),
+ Msg == ssh_message:encode(
+ fix_asym(
+ ssh_message:decode(decode_state(Msg,KexFam))))
).
%%%================================================================
%%%
-%%% Scripts to generate message generators
-%%%
-
-%% awk '/^( |\t)+byte( |\t)+SSH/,/^( |\t)*$/{print}' rfc425?.txt | sed 's/^\( \|\\t\)*//' > msgs.txt
-
-%% awk '/^byte( |\t)+SSH/{print $2","}' < msgs.txt
-
-%% awk 'BEGIN{print "%%%---- BEGIN GENERATED";prev=0} END{print " >>.\n%%%---- END GENERATED"} /^byte( |\t)+SSH/{if (prev==1) print " >>.\n"; prev=1; printf "%c%s%c",39,$2,39; print "()->\n <<?"$2;next} /^string( |\t)+\"/{print " ,"$2;next} /^string( |\t)+.*address/{print " ,(ssh_string_address())/binary %%",$2,$3,$4,$5,$6;next}/^string( |\t)+.*US-ASCII/{print " ,(ssh_string_US_ASCII())/binary %%",$2,$3,$4,$5,$6;next} /^string( |\t)+.*UTF-8/{print " ,(ssh_string_UTF_8())/binary %% ",$2,$3,$4,$5,$6;next} /^[a-z0-9]+( |\t)/{print " ,(ssh_"$1"())/binary %%",$2,$3,$4,$5,$6;next} /^byte\[16\]( |\t)+/{print" ,(ssh_byte_16())/binary %%",$2,$3,$4,$5,$6;next} /^name-list( |\t)+/{print" ,(ssh_name_list())/binary %%",$2,$3,$4,$5,$6;next} /./{print "?? %%",$0}' < msgs.txt > gen.txt
-
-%%%================================================================
-%%%
%%% Generators
%%%
-ssh_msg() -> ?LET(M,oneof(
-[[msg_code('SSH_MSG_CHANNEL_CLOSE'),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_DATA'),gen_uint32(),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_EOF'),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_EXTENDED_DATA'),gen_uint32(),gen_uint32(),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_FAILURE'),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("direct-tcpip"),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( ),gen_uint32(),gen_string( ),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("forwarded-tcpip"),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( ),gen_uint32(),gen_string( ),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("session"),gen_uint32(),gen_uint32(),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("x11"),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( ),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string( ),gen_uint32(),gen_uint32(),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN_CONFIRMATION'),gen_uint32(),gen_uint32(),gen_uint32(),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_OPEN_FAILURE'),gen_uint32(),gen_uint32(),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("env"),gen_boolean(),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("exec"),gen_boolean(),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("exit-signal"),0,gen_string( ),gen_boolean(),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("exit-status"),0,gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("pty-req"),gen_boolean(),gen_string( ),gen_uint32(),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("shell"),gen_boolean()],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("signal"),0,gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("subsystem"),gen_boolean(),gen_string( )],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("window-change"),0,gen_uint32(),gen_uint32(),gen_uint32(),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("x11-req"),gen_boolean(),gen_boolean(),gen_string( ),gen_string( ),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("xon-xoff"),0,gen_boolean()],
- [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string( ),gen_boolean()],
- [msg_code('SSH_MSG_CHANNEL_SUCCESS'),gen_uint32()],
- [msg_code('SSH_MSG_CHANNEL_WINDOW_ADJUST'),gen_uint32(),gen_uint32()],
-%%Assym [msg_code('SSH_MSG_DEBUG'),gen_boolean(),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_DISCONNECT'),gen_uint32(),gen_string( ),gen_string( )],
-%%Assym [msg_code('SSH_MSG_GLOBAL_REQUEST'),gen_string("cancel-tcpip-forward"),gen_boolean(),gen_string( ),gen_uint32()],
-%%Assym [msg_code('SSH_MSG_GLOBAL_REQUEST'),gen_string("tcpip-forward"),gen_boolean(),gen_string( ),gen_uint32()],
-%%Assym [msg_code('SSH_MSG_GLOBAL_REQUEST'),gen_string( ),gen_boolean()],
- [msg_code('SSH_MSG_IGNORE'),gen_string( )],
- %% [msg_code('SSH_MSG_KEXDH_INIT'),gen_mpint()],
- %% [msg_code('SSH_MSG_KEXDH_REPLY'),gen_string( ),gen_mpint(),gen_string( )],
- %% [msg_code('SSH_MSG_KEXINIT'),gen_byte(16),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_boolean(),gen_uint32()],
- [msg_code('SSH_MSG_KEX_DH_GEX_GROUP'),gen_mpint(),gen_mpint()],
- [msg_code('SSH_MSG_NEWKEYS')],
- [msg_code('SSH_MSG_REQUEST_FAILURE')],
- [msg_code('SSH_MSG_REQUEST_SUCCESS')],
- [msg_code('SSH_MSG_REQUEST_SUCCESS'),gen_uint32()],
- [msg_code('SSH_MSG_SERVICE_ACCEPT'),gen_string( )],
- [msg_code('SSH_MSG_SERVICE_REQUEST'),gen_string( )],
- [msg_code('SSH_MSG_UNIMPLEMENTED'),gen_uint32()],
- [msg_code('SSH_MSG_USERAUTH_BANNER'),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_USERAUTH_FAILURE'),gen_name_list(),gen_boolean()],
- [msg_code('SSH_MSG_USERAUTH_PASSWD_CHANGEREQ'),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_USERAUTH_PK_OK'),gen_string( ),gen_string( )],
- [msg_code('SSH_MSG_USERAUTH_SUCCESS')]
-]
-
-), list_to_binary(M)).
-
-
-%%%================================================================
-%%%
-%%% Generator
-%%%
-
-do() ->
- io_lib:format('[~s~n]',
- [write_gen(
- files(["rfc4254.txt",
- "rfc4253.txt",
- "rfc4419.txt",
- "rfc4252.txt",
- "rfc4256.txt"]))]).
-
-
-write_gen(L) when is_list(L) ->
- string:join(lists:map(fun write_gen/1, L), ",\n ");
-write_gen({MsgName,Args}) ->
- lists:flatten(["[",generate_args([MsgName|Args]),"]"]).
-
-generate_args(As) -> string:join([generate_arg(A) || A <- As], ",").
-
-generate_arg({<<"string">>, <<"\"",B/binary>>}) ->
- S = get_string($",B),
- ["gen_string(\"",S,"\")"];
-generate_arg({<<"string">>, _}) -> "gen_string( )";
-generate_arg({<<"byte[",B/binary>>, _}) ->
- io_lib:format("gen_byte(~p)",[list_to_integer(get_string($],B))]);
-generate_arg({<<"byte">> ,_}) -> "gen_byte()";
-generate_arg({<<"uint16">>,_}) -> "gen_uint16()";
-generate_arg({<<"uint32">>,_}) -> "gen_uint32()";
-generate_arg({<<"uint64">>,_}) -> "gen_uint64()";
-generate_arg({<<"mpint">>,_}) -> "gen_mpint()";
-generate_arg({<<"name-list">>,_}) -> "gen_name_list()";
-generate_arg({<<"boolean">>,<<"FALSE">>}) -> "0";
-generate_arg({<<"boolean">>,<<"TRUE">>}) -> "1";
-generate_arg({<<"boolean">>,_}) -> "gen_boolean()";
-generate_arg({<<"....">>,_}) -> ""; %% FIXME
-generate_arg(Name) when is_binary(Name) ->
- lists:flatten(["msg_code('",binary_to_list(Name),"')"]).
-
+ssh_msg(<<"dh">>) ->
+ ?LET(M,oneof(
+ [
+ [msg_code('SSH_MSG_KEXDH_INIT'),gen_mpint()], % 30
+ [msg_code('SSH_MSG_KEXDH_REPLY'),gen_pubkey_string(rsa),gen_mpint(),gen_signature_string(rsa)] % 31
+ | rest_ssh_msgs()
+ ]),
+ list_to_binary(M));
+
+ssh_msg(<<"dh_gex">>) ->
+ ?LET(M,oneof(
+ [
+ [msg_code('SSH_MSG_KEX_DH_GEX_REQUEST_OLD'),gen_uint32()], % 30
+ [msg_code('SSH_MSG_KEX_DH_GEX_GROUP'),gen_mpint(),gen_mpint()] % 31
+ | rest_ssh_msgs()
+ ]),
+ list_to_binary(M));
+
+ ssh_msg(<<"ecdh">>) ->
+ ?LET(M,oneof(
+ [
+ [msg_code('SSH_MSG_KEX_ECDH_INIT'),gen_mpint()], % 30
+ [msg_code('SSH_MSG_KEX_ECDH_REPLY'),gen_pubkey_string(ecdsa),gen_mpint(),gen_signature_string(ecdsa)] % 31
+ | rest_ssh_msgs()
+ ]),
+ list_to_binary(M)).
+
+
+rest_ssh_msgs() ->
+ [%% SSH_MSG_USERAUTH_INFO_RESPONSE
+ %% hard args SSH_MSG_USERAUTH_INFO_REQUEST
+ %% rfc4252 p12 error SSH_MSG_USERAUTH_REQUEST
+ [msg_code('SSH_MSG_KEX_DH_GEX_REQUEST'),gen_uint32(),gen_uint32(),gen_uint32()],
+ [msg_code('SSH_MSG_KEX_DH_GEX_INIT'),gen_mpint()],
+ [msg_code('SSH_MSG_KEX_DH_GEX_REPLY'),gen_pubkey_string(rsa),gen_mpint(),gen_signature_string(rsa)],
+ [msg_code('SSH_MSG_CHANNEL_CLOSE'),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_DATA'),gen_uint32(),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_EOF'),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_EXTENDED_DATA'),gen_uint32(),gen_uint32(),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_FAILURE'),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("direct-tcpip"),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( ),gen_uint32(),gen_string( ),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("forwarded-tcpip"),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( ),gen_uint32(),gen_string( ),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("session"),gen_uint32(),gen_uint32(),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string("x11"),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( ),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN'),gen_string( ),gen_uint32(),gen_uint32(),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN_CONFIRMATION'),gen_uint32(),gen_uint32(),gen_uint32(),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_OPEN_FAILURE'),gen_uint32(),gen_uint32(),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("env"),gen_boolean(),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("exec"),gen_boolean(),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("exit-signal"),0,gen_string( ),gen_boolean(),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("exit-status"),0,gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("pty-req"),gen_boolean(),gen_string( ),gen_uint32(),gen_uint32(),gen_uint32(),gen_uint32(),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("shell"),gen_boolean()],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("signal"),0,gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("subsystem"),gen_boolean(),gen_string( )],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("window-change"),0,gen_uint32(),gen_uint32(),gen_uint32(),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("x11-req"),gen_boolean(),gen_boolean(),gen_string( ),gen_string( ),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string("xon-xoff"),0,gen_boolean()],
+ [msg_code('SSH_MSG_CHANNEL_REQUEST'),gen_uint32(),gen_string( ),gen_boolean()],
+ [msg_code('SSH_MSG_CHANNEL_SUCCESS'),gen_uint32()],
+ [msg_code('SSH_MSG_CHANNEL_WINDOW_ADJUST'),gen_uint32(),gen_uint32()],
+ [msg_code('SSH_MSG_DEBUG'),gen_boolean(),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_DISCONNECT'),gen_uint32(),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_GLOBAL_REQUEST'),gen_string("cancel-tcpip-forward"),gen_boolean(),gen_string( ),gen_uint32()],
+ [msg_code('SSH_MSG_GLOBAL_REQUEST'),gen_string("tcpip-forward"),gen_boolean(),gen_string( ),gen_uint32()],
+ [msg_code('SSH_MSG_GLOBAL_REQUEST'),gen_string( ),gen_boolean()],
+ [msg_code('SSH_MSG_IGNORE'),gen_string( )],
+ [msg_code('SSH_MSG_KEXINIT'),gen_byte(16),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_name_list(),gen_boolean(),gen_uint32()],
+ [msg_code('SSH_MSG_NEWKEYS')],
+ [msg_code('SSH_MSG_REQUEST_FAILURE')],
+ [msg_code('SSH_MSG_REQUEST_SUCCESS')],
+ [msg_code('SSH_MSG_REQUEST_SUCCESS'),gen_uint32()],
+ [msg_code('SSH_MSG_SERVICE_ACCEPT'),gen_string( )],
+ [msg_code('SSH_MSG_SERVICE_REQUEST'),gen_string( )],
+ [msg_code('SSH_MSG_UNIMPLEMENTED'),gen_uint32()],
+ [msg_code('SSH_MSG_USERAUTH_BANNER'),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_USERAUTH_FAILURE'),gen_name_list(),gen_boolean()],
+ [msg_code('SSH_MSG_USERAUTH_PASSWD_CHANGEREQ'),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_USERAUTH_PK_OK'),gen_string( ),gen_string( )],
+ [msg_code('SSH_MSG_USERAUTH_SUCCESS')]
+ ].
+
+kex_family() -> oneof([<<"dh">>, <<"dh_gex">>, <<"ecdh">>]).
gen_boolean() -> choose(0,1).
@@ -202,10 +184,7 @@ gen_byte(N) when N>0 -> [gen_byte() || _ <- lists:seq(1,N)].
gen_char() -> choose($a,$z).
-gen_mpint() -> ?LET(Size, choose(1,20),
- ?LET(Str, vector(Size, gen_byte()),
- gen_string( strip_0s(Str) )
- )).
+gen_mpint() -> ?LET(I, largeint(), ssh_bits:mpint(I)).
strip_0s([0|T]) -> strip_0s(T);
strip_0s(X) -> X.
@@ -230,13 +209,22 @@ gen_name() -> gen_string().
uint32_to_list(I) -> binary_to_list(<<I:32/unsigned-big-integer>>).
-%%%----
-get_string(Delim, B) ->
- binary_to_list( element(1, split_binary(B, count_string_chars(Delim,B,0))) ).
-
-count_string_chars(Delim, <<Delim,_/binary>>, Acc) -> Acc;
-count_string_chars(Delim, <<_,B/binary>>, Acc) -> count_string_chars(Delim, B, Acc+1).
+gen_pubkey_string(Type) ->
+ PubKey = case Type of
+ rsa -> #'RSAPublicKey'{modulus = 12345,publicExponent = 2};
+ ecdsa -> {#'ECPoint'{point=[1,2,3,4,5]},
+ {namedCurve,{1,2,840,10045,3,1,7}}} % 'secp256r1' nistp256
+ end,
+ gen_string(public_key:ssh_encode(PubKey, ssh2_pubkey)).
+
+gen_signature_string(Type) ->
+ Signature = <<"hejhopp">>,
+ Id = case Type of
+ rsa -> "ssh-rsa";
+ ecdsa -> "ecdsa-sha2-nistp256"
+ end,
+ gen_string(gen_string(Id) ++ gen_string(Signature)).
-define(MSG_CODE(Name,Num),
msg_code(Name) -> Num;
@@ -273,124 +261,34 @@ msg_code(Num) -> Name
?MSG_CODE('SSH_MSG_CHANNEL_FAILURE', 100);
?MSG_CODE('SSH_MSG_USERAUTH_INFO_REQUEST', 60);
?MSG_CODE('SSH_MSG_USERAUTH_INFO_RESPONSE', 61);
+?MSG_CODE('SSH_MSG_KEXDH_INIT', 30);
+?MSG_CODE('SSH_MSG_KEXDH_REPLY', 31);
?MSG_CODE('SSH_MSG_KEX_DH_GEX_REQUEST_OLD', 30);
?MSG_CODE('SSH_MSG_KEX_DH_GEX_REQUEST', 34);
?MSG_CODE('SSH_MSG_KEX_DH_GEX_GROUP', 31);
?MSG_CODE('SSH_MSG_KEX_DH_GEX_INIT', 32);
-?MSG_CODE('SSH_MSG_KEX_DH_GEX_REPLY', 33).
-
-%%%=============================================================================
-%%%=============================================================================
-%%%=============================================================================
-
-files(Fs) ->
- Defs = lists:usort(lists:flatten(lists:map(fun file/1, Fs))),
- DefinedIDs = lists:usort([binary_to_list(element(1,D)) || D <- Defs]),
- WantedIDs = lists:usort(wanted_messages()),
- Missing = WantedIDs -- DefinedIDs,
- case Missing of
- [] -> ok;
- _ -> io:format('%% Warning: missing ~p~n', [Missing])
- end,
- Defs.
-
-
-file(F) ->
- {ok,B} = file:read_file(F),
- hunt_msg_def(B).
-
-
-hunt_msg_def(<<"\n",B/binary>>) -> some_hope(skip_blanks(B));
-hunt_msg_def(<<_, B/binary>>) -> hunt_msg_def(B);
-hunt_msg_def(<<>>) -> [].
-
-some_hope(<<"byte ", B/binary>>) -> try_message(skip_blanks(B));
-some_hope(B) -> hunt_msg_def(B).
-
-try_message(B = <<"SSH_MSG_",_/binary>>) ->
- {ID,Rest} = get_id(B),
- case lists:member(binary_to_list(ID), wanted_messages()) of
- true ->
- {Lines,More} = get_def_lines(skip_blanks(Rest), []),
- [{ID,lists:reverse(Lines)} | hunt_msg_def(More)];
- false ->
- hunt_msg_def(Rest)
- end;
-try_message(B) -> hunt_msg_def(B).
-
-
-skip_blanks(<<32, B/binary>>) -> skip_blanks(B);
-skip_blanks(<< 9, B/binary>>) -> skip_blanks(B);
-skip_blanks(B) -> B.
-
-get_def_lines(B0 = <<"\n",B/binary>>, Acc) ->
- {ID,Rest} = get_id(skip_blanks(B)),
- case {size(ID), skip_blanks(Rest)} of
- {0,<<"....",More/binary>>} ->
- {Text,LineEnd} = get_to_eol(skip_blanks(More)),
- get_def_lines(LineEnd, [{<<"....">>,Text}|Acc]);
- {0,_} ->
- {Acc,B0};
- {_,Rest1} ->
- {Text,LineEnd} = get_to_eol(Rest1),
- get_def_lines(LineEnd, [{ID,Text}|Acc])
- end;
-get_def_lines(B, Acc) ->
- {Acc,B}.
-
-
-get_to_eol(B) -> split_binary(B, count_to_eol(B,0)).
-
-count_to_eol(<<"\n",_/binary>>, Acc) -> Acc;
-count_to_eol(<<>>, Acc) -> Acc;
-count_to_eol(<<_,B/binary>>, Acc) -> count_to_eol(B,Acc+1).
-
-
-get_id(B) -> split_binary(B, count_id_chars(B,0)).
-
-count_id_chars(<<C,B/binary>>, Acc) when $A=<C,C=<$Z -> count_id_chars(B,Acc+1);
-count_id_chars(<<C,B/binary>>, Acc) when $a=<C,C=<$z -> count_id_chars(B,Acc+1);
-count_id_chars(<<C,B/binary>>, Acc) when $0=<C,C=<$9 -> count_id_chars(B,Acc+1);
-count_id_chars(<<"_",B/binary>>, Acc) -> count_id_chars(B,Acc+1);
-count_id_chars(<<"-",B/binary>>, Acc) -> count_id_chars(B,Acc+1); %% e.g name-list
-count_id_chars(<<"[",B/binary>>, Acc) -> count_id_chars(B,Acc+1); %% e.g byte[16]
-count_id_chars(<<"]",B/binary>>, Acc) -> count_id_chars(B,Acc+1); %% e.g byte[16]
-count_id_chars(_, Acc) -> Acc.
-
-wanted_messages() ->
- ["SSH_MSG_CHANNEL_CLOSE",
- "SSH_MSG_CHANNEL_DATA",
- "SSH_MSG_CHANNEL_EOF",
- "SSH_MSG_CHANNEL_EXTENDED_DATA",
- "SSH_MSG_CHANNEL_FAILURE",
- "SSH_MSG_CHANNEL_OPEN",
- "SSH_MSG_CHANNEL_OPEN_CONFIRMATION",
- "SSH_MSG_CHANNEL_OPEN_FAILURE",
- "SSH_MSG_CHANNEL_REQUEST",
- "SSH_MSG_CHANNEL_SUCCESS",
- "SSH_MSG_CHANNEL_WINDOW_ADJUST",
- "SSH_MSG_DEBUG",
- "SSH_MSG_DISCONNECT",
- "SSH_MSG_GLOBAL_REQUEST",
- "SSH_MSG_IGNORE",
- "SSH_MSG_KEXDH_INIT",
- "SSH_MSG_KEXDH_REPLY",
- "SSH_MSG_KEXINIT",
- "SSH_MSG_KEX_DH_GEX_GROUP",
- "SSH_MSG_KEX_DH_GEX_REQUEST",
- "SSH_MSG_KEX_DH_GEX_REQUEST_OLD",
- "SSH_MSG_NEWKEYS",
- "SSH_MSG_REQUEST_FAILURE",
- "SSH_MSG_REQUEST_SUCCESS",
- "SSH_MSG_SERVICE_ACCEPT",
- "SSH_MSG_SERVICE_REQUEST",
- "SSH_MSG_UNIMPLEMENTED",
- "SSH_MSG_USERAUTH_BANNER",
- "SSH_MSG_USERAUTH_FAILURE",
-%% hard args "SSH_MSG_USERAUTH_INFO_REQUEST",
-%% "SSH_MSG_USERAUTH_INFO_RESPONSE",
- "SSH_MSG_USERAUTH_PASSWD_CHANGEREQ",
- "SSH_MSG_USERAUTH_PK_OK",
-%%rfc4252 p12 error "SSH_MSG_USERAUTH_REQUEST",
- "SSH_MSG_USERAUTH_SUCCESS"].
+?MSG_CODE('SSH_MSG_KEX_DH_GEX_REPLY', 33);
+?MSG_CODE('SSH_MSG_KEX_ECDH_INIT', 30);
+?MSG_CODE('SSH_MSG_KEX_ECDH_REPLY', 31).
+
+%%%====================================================
+%%%=== WARNING: Knowledge of the test object ahead! ===
+%%%====================================================
+
+%% SSH message records:
+-include_lib("ssh/src/ssh_connect.hrl").
+-include_lib("ssh/src/ssh_transport.hrl").
+
+%%% Encoding and decodeing is asymetric so out=binary in=string. Sometimes. :(
+fix_asym(#ssh_msg_global_request{name=N} = M) -> M#ssh_msg_global_request{name = binary_to_list(N)};
+fix_asym(#ssh_msg_debug{message=D,language=L} = M) -> M#ssh_msg_debug{message = binary_to_list(D),
+ language = binary_to_list(L)};
+fix_asym(#ssh_msg_kexinit{cookie=C} = M) -> M#ssh_msg_kexinit{cookie = <<C:128>>};
+fix_asym(M) -> M.
+
+%%% Message codes 30 and 31 are overloaded depending on kex family so arrange the decoder
+%%% input as the test object does
+decode_state(<<30,_/binary>>=Msg, KexFam) -> <<KexFam/binary, Msg/binary>>;
+decode_state(<<31,_/binary>>=Msg, KexFam) -> <<KexFam/binary, Msg/binary>>;
+decode_state(Msg, _) -> Msg.
diff --git a/lib/ssh/test/ssh_algorithms_SUITE.erl b/lib/ssh/test/ssh_algorithms_SUITE.erl
index 8b2db0e1a8..14605ee44f 100644
--- a/lib/ssh/test/ssh_algorithms_SUITE.erl
+++ b/lib/ssh/test/ssh_algorithms_SUITE.erl
@@ -198,7 +198,7 @@ try_exec_simple_group(Group, Config) ->
%%--------------------------------------------------------------------
%% Testing all default groups
-simple_exec_groups() -> [{timetrap,{minutes,5}}].
+simple_exec_groups() -> [{timetrap,{minutes,8}}].
simple_exec_groups(Config) ->
Sizes = interpolate( public_key:dh_gex_group_sizes() ),
@@ -206,10 +206,8 @@ simple_exec_groups(Config) ->
fun(Sz) ->
ct:log("Try size ~p",[Sz]),
ct:comment(Sz),
- case simple_exec_group(Sz, Config) of
- expected -> ct:log("Size ~p ok",[Sz]);
- _ -> ct:log("Size ~p not ok",[Sz])
- end
+ simple_exec_group(Sz, Config),
+ ct:log("Size ~p ok",[Sz])
end, Sizes),
ct:comment("~p",[lists:map(fun({_,I,_}) -> I;
(I) -> I
diff --git a/lib/ssh/test/ssh_benchmark_SUITE.erl b/lib/ssh/test/ssh_benchmark_SUITE.erl
index c2bfc48449..85750f8fbd 100644
--- a/lib/ssh/test/ssh_benchmark_SUITE.erl
+++ b/lib/ssh/test/ssh_benchmark_SUITE.erl
@@ -30,7 +30,7 @@
suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,2}]}]},
- {timetrap,{minutes,3}}
+ {timetrap,{minutes,6}}
].
%%suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -70,9 +70,12 @@ init_per_group(opensshc_erld, Config) ->
ssh_test_lib:setup_dsa(DataDir, UserDir),
ssh_test_lib:setup_rsa(DataDir, UserDir),
ssh_test_lib:setup_ecdsa("256", DataDir, UserDir),
+ AlgsD = ssh:default_algorithms(),
+ AlgsC = ssh_test_lib:default_algorithms(sshc),
Common = ssh_test_lib:intersect_bi_dir(
- ssh_test_lib:intersection(ssh:default_algorithms(),
- ssh_test_lib:default_algorithms(sshc))),
+ ssh_test_lib:intersection(AlgsD, AlgsC)),
+ ct:pal("~p~n~nErld:~n~p~n~nOpenSSHc:~n~p~n~nCommon:~n~p",
+ [inet:gethostname(), AlgsD, AlgsC, Common]),
[{c_kexs, ssh_test_lib:sshc(kex)},
{c_ciphers, ssh_test_lib:sshc(cipher)},
{common_algs, Common}
@@ -427,13 +430,20 @@ function_algs_times_sizes(EncDecs, L) ->
|| {Alg,Size,Time} <- lists:foldl(fun increment/2, [], Raw)].
function_ats_result({ssh_transport,encrypt,2}, #call{args=[S,Data]}) ->
- {{encrypt,S#ssh.encrypt}, size(Data)};
+ {{encrypt,S#ssh.encrypt}, binsize(Data)};
function_ats_result({ssh_transport,decrypt,2}, #call{args=[S,Data]}) ->
- {{decrypt,S#ssh.decrypt}, size(Data)};
+ {{decrypt,S#ssh.decrypt}, binsize(Data)};
function_ats_result({ssh_message,encode,1}, #call{result=Data}) ->
{encode, size(Data)};
function_ats_result({ssh_message,decode,1}, #call{args=[Data]}) ->
{decode, size(Data)}.
+
+binsize(B) when is_binary(B) -> size(B);
+binsize({B1,B2}) when is_binary(B1), is_binary(B2) -> size(B1) + size(B2);
+binsize({B1,B2,_}) when is_binary(B1), is_binary(B2) -> size(B1) + size(B2).
+
+
+
increment({Alg,Sz,T}, [{Alg,SumSz,SumT}|Acc]) ->
diff --git a/lib/ssh/test/ssh_options_SUITE.erl b/lib/ssh/test/ssh_options_SUITE.erl
index 8f060bebd8..86f5cb1746 100644
--- a/lib/ssh/test/ssh_options_SUITE.erl
+++ b/lib/ssh/test/ssh_options_SUITE.erl
@@ -831,10 +831,13 @@ supported_hash(HashAlg) ->
really_do_hostkey_fingerprint_check(Config, HashAlg) ->
PrivDir = proplists:get_value(priv_dir, Config),
- UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
- file:make_dir(UserDir),
+ UserDirServer = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDirServer),
SysDir = proplists:get_value(data_dir, Config),
+ UserDirClient =
+ ssh_test_lib:create_random_dir(Config), % Ensure no 'known_hosts' disturbs
+
%% All host key fingerprints. Trust that public_key has checked the ssh_hostkey_fingerprint
%% function since that function is used by the ssh client...
FPs = [case HashAlg of
@@ -857,7 +860,7 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
%% Start daemon with the public keys that we got fingerprints from
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SysDir},
- {user_dir, UserDir},
+ {user_dir, UserDirServer},
{password, "morot"}]),
FP_check_fun = fun(PeerName, FP) ->
@@ -876,7 +879,7 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
end},
{user, "foo"},
{password, "morot"},
- {user_dir, UserDir},
+ {user_dir, UserDirClient},
{user_interaction, false}]),
ssh:stop_daemon(Pid).
diff --git a/lib/ssh/test/ssh_property_test_SUITE.erl b/lib/ssh/test/ssh_property_test_SUITE.erl
index 7ba2732a88..9b2a84d8e4 100644
--- a/lib/ssh/test/ssh_property_test_SUITE.erl
+++ b/lib/ssh/test/ssh_property_test_SUITE.erl
@@ -68,9 +68,6 @@ init_per_group(_, Config) ->
end_per_group(_, Config) ->
Config.
-%%% Always skip the testcase that is not quite in phase with the
-%%% ssh_message.erl code
-init_per_testcase(decode_encode, _) -> {skip, "Fails - testcase is not ok"};
init_per_testcase(_TestCase, Config) -> Config.
end_per_testcase(_TestCase, Config) -> Config.
diff --git a/lib/ssh/test/ssh_sftp_SUITE.erl b/lib/ssh/test/ssh_sftp_SUITE.erl
index 70662f5d93..acf76157a2 100644
--- a/lib/ssh/test/ssh_sftp_SUITE.erl
+++ b/lib/ssh/test/ssh_sftp_SUITE.erl
@@ -1038,7 +1038,7 @@ oldprep(Config) ->
prepare(Config0) ->
PrivDir = proplists:get_value(priv_dir, Config0),
- Dir = filename:join(PrivDir, random_chars(10)),
+ Dir = filename:join(PrivDir, ssh_test_lib:random_chars(10)),
file:make_dir(Dir),
Keys = [filename,
testfile,
@@ -1058,8 +1058,6 @@ prepare(Config0) ->
[{sftp_priv_dir,Dir} | Config2].
-random_chars(N) -> [crypto:rand_uniform($a,$z) || _<-lists:duplicate(N,x)].
-
foldl_keydelete(Keys, L) ->
lists:foldl(fun(K,E) -> lists:keydelete(K,1,E) end,
L,
diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl
index f93237f3e7..286ac6e882 100644
--- a/lib/ssh/test/ssh_test_lib.erl
+++ b/lib/ssh/test/ssh_test_lib.erl
@@ -113,19 +113,27 @@ std_simple_exec(Host, Port, Config) ->
std_simple_exec(Host, Port, Config, []).
std_simple_exec(Host, Port, Config, Opts) ->
+ ct:log("~p:~p std_simple_exec",[?MODULE,?LINE]),
ConnectionRef = ssh_test_lib:std_connect(Config, Host, Port, Opts),
+ ct:log("~p:~p connected! ~p",[?MODULE,?LINE,ConnectionRef]),
{ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity),
- success = ssh_connection:exec(ConnectionRef, ChannelId, "23+21-2.", infinity),
- Data = {ssh_cm, ConnectionRef, {data, ChannelId, 0, <<"42\n">>}},
- case ssh_test_lib:receive_exec_result(Data) of
- expected ->
- ok;
- Other ->
- ct:fail(Other)
- end,
- ssh_test_lib:receive_exec_end(ConnectionRef, ChannelId),
- ssh:close(ConnectionRef).
-
+ ct:log("~p:~p session_channel ok ~p",[?MODULE,?LINE,ChannelId]),
+ ExecResult = ssh_connection:exec(ConnectionRef, ChannelId, "23+21-2.", infinity),
+ ct:log("~p:~p exec ~p",[?MODULE,?LINE,ExecResult]),
+ case ExecResult of
+ success ->
+ Expected = {ssh_cm, ConnectionRef, {data,ChannelId,0,<<"42\n">>}},
+ case receive_exec_result(Expected) of
+ expected ->
+ ok;
+ Other ->
+ ct:fail(Other)
+ end,
+ receive_exec_end(ConnectionRef, ChannelId),
+ ssh:close(ConnectionRef);
+ _ ->
+ ct:fail(ExecResult)
+ end.
start_shell(Port, IOServer) ->
start_shell(Port, IOServer, []).
@@ -834,3 +842,20 @@ get_kex_init(Conn, Ref, TRef) ->
end
end.
+%%%----------------------------------------------------------------
+%%% Return a string with N random characters
+%%%
+random_chars(N) -> [crypto:rand_uniform($a,$z) || _<-lists:duplicate(N,x)].
+
+
+create_random_dir(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Name = filename:join(PrivDir, random_chars(15)),
+ case file:make_dir(Name) of
+ ok ->
+ Name;
+ {error,eexist} ->
+ %% The Name already denotes an existing file system object, try again.
+ %% The likelyhood of always generating an existing file name is low
+ create_random_dir(Config)
+ end.
diff --git a/lib/ssh/test/ssh_upgrade_SUITE.erl b/lib/ssh/test/ssh_upgrade_SUITE.erl
index b5b27c369a..7b9b109fa1 100644
--- a/lib/ssh/test/ssh_upgrade_SUITE.erl
+++ b/lib/ssh/test/ssh_upgrade_SUITE.erl
@@ -199,6 +199,4 @@ close(#state{server = Server,
connection = undefined}.
-random_contents() -> list_to_binary( random_chars(3) ).
-
-random_chars(N) -> [crypto:rand_uniform($a,$z) || _<-lists:duplicate(N,x)].
+random_contents() -> list_to_binary( ssh_test_lib:random_chars(3) ).
diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml
index edc7e0d8b2..916b41742e 100644
--- a/lib/ssl/doc/src/ssl.xml
+++ b/lib/ssl/doc/src/ssl.xml
@@ -424,6 +424,14 @@ marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_valid
</taglist>
</item>
+
+ <tag><c>max_handshake_size</c></tag>
+ <item>
+ <p>Integer (24 bits unsigned). Used to limit the size of
+ valid TLS handshake packets to avoid DoS attacks.
+ Defaults to 256*1024.</p>
+ </item>
+
</taglist>
</item>
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index c72ee44a95..0b7229b67e 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -765,7 +765,8 @@ handle_options(Opts0, Role) ->
client, Role),
crl_check = handle_option(crl_check, Opts, false),
crl_cache = handle_option(crl_cache, Opts, {ssl_crl_cache, {internal, []}}),
- v2_hello_compatible = handle_option(v2_hello_compatible, Opts, false)
+ v2_hello_compatible = handle_option(v2_hello_compatible, Opts, false),
+ max_handshake_size = handle_option(max_handshake_size, Opts, ?DEFAULT_MAX_HANDSHAKE_SIZE)
},
CbInfo = proplists:get_value(cb_info, Opts, default_cb_info(Protocol)),
@@ -780,7 +781,8 @@ handle_options(Opts0, Role) ->
alpn_preferred_protocols, next_protocols_advertised,
client_preferred_next_protocols, log_alert,
server_name_indication, honor_cipher_order, padding_check, crl_check, crl_cache,
- fallback, signature_algs, eccs, honor_ecc_order, beast_mitigation, v2_hello_compatible],
+ fallback, signature_algs, eccs, honor_ecc_order, beast_mitigation, v2_hello_compatible,
+ max_handshake_size],
SockOpts = lists:foldl(fun(Key, PropList) ->
proplists:delete(Key, PropList)
@@ -1028,6 +1030,8 @@ validate_option(beast_mitigation, Value) when Value == one_n_minus_one orelse
Value;
validate_option(v2_hello_compatible, Value) when is_boolean(Value) ->
Value;
+validate_option(max_handshake_size, Value) when is_integer(Value) andalso Value =< ?MAX_UNIT24 ->
+ Value;
validate_option(Opt, Value) ->
throw({error, {options, {Opt, Value}}}).
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index 6e7c8c5ddd..6ed2fc83da 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -864,11 +864,11 @@ handle_call({close, {Pid, Timeout}}, From, StateName, State0, Connection) when i
%% When downgrading an TLS connection to a transport connection
%% we must recive the close alert from the peer before releasing the
%% transport socket.
- {next_state, downgrade, State, [{timeout, Timeout, downgrade}]};
+ {next_state, downgrade, State#state{terminated = true}, [{timeout, Timeout, downgrade}]};
handle_call({close, _} = Close, From, StateName, State, Connection) ->
%% Run terminate before returning so that the reuseaddr
- %% inet-option
- Result = Connection:terminate(Close, StateName, State),
+ %% inet-option works properly
+ Result = Connection:terminate(Close, StateName, State#state{terminated = true}),
{stop_and_reply, {shutdown, normal},
{reply, From, Result}, State};
handle_call({shutdown, How0}, From, _,
@@ -1010,7 +1010,10 @@ handle_info(Msg, StateName, #state{socket = Socket, error_tag = Tag} = State) ->
terminate(_, _, #state{terminated = true}) ->
%% Happens when user closes the connection using ssl:close/1
%% we want to guarantee that Transport:close has been called
- %% when ssl:close/1 returns.
+ %% when ssl:close/1 returns unless it is a downgrade where
+ %% we want to guarantee that close alert is recived before
+ %% returning. In both cases terminate has been run manually
+ %% before run by gen_statem which will end up here
ok;
terminate({shutdown, transport_closed} = Reason,
diff --git a/lib/ssl/src/ssl_handshake.hrl b/lib/ssl/src/ssl_handshake.hrl
index fde92035a2..324b7dbde3 100644
--- a/lib/ssl/src/ssl_handshake.hrl
+++ b/lib/ssl/src/ssl_handshake.hrl
@@ -80,6 +80,9 @@
-define(CLIENT_KEY_EXCHANGE, 16).
-define(FINISHED, 20).
+-define(MAX_UNIT24, 8388607).
+-define(DEFAULT_MAX_HANDSHAKE_SIZE, (256*1024)).
+
-record(random, {
gmt_unix_time, % uint32
random_bytes % opaque random_bytes[28]
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 98b89bb811..c34af9f82c 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -142,7 +142,8 @@
signature_algs,
eccs,
honor_ecc_order :: boolean(),
- v2_hello_compatible :: boolean()
+ v2_hello_compatible :: boolean(),
+ max_handshake_size :: integer()
}).
-record(socket_options,
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 32991d3079..77606911be 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -424,18 +424,26 @@ handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE, fragment = Data},
ssl_options = Options} = State0) ->
try
{Packets, Buf} = tls_handshake:get_tls_handshake(Version,Data,Buf0, Options),
- State =
+ State1 =
State0#state{protocol_buffers =
Buffers#protocol_buffers{tls_handshake_buffer = Buf}},
- Events = tls_handshake_events(Packets),
- case StateName of
- connection ->
- ssl_connection:hibernate_after(StateName, State, Events);
- _ ->
- {next_state, StateName, State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
- end
+ case Packets of
+ [] ->
+ assert_buffer_sanity(Buf, Options),
+ {Record, State} = next_record(State1),
+ next_event(StateName, Record, State);
+ _ ->
+ Events = tls_handshake_events(Packets),
+ case StateName of
+ connection ->
+ ssl_connection:hibernate_after(StateName, State1, Events);
+ _ ->
+ {next_state, StateName,
+ State1#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
+ end
+ end
catch throw:#alert{} = Alert ->
- ssl_connection:handle_own_alert(Alert, Version, StateName, State0)
+ ssl_connection:handle_own_alert(Alert, Version, StateName, State0)
end;
%%% TLS record protocol level application data messages
handle_common_event(internal, #ssl_tls{type = ?APPLICATION_DATA, fragment = Data}, StateName, State) ->
@@ -615,8 +623,6 @@ next_event(StateName, Record, State, Actions) ->
{next_state, StateName, State, [{next_event, internal, Alert} | Actions]}
end.
-tls_handshake_events([]) ->
- throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE, malformed_handshake));
tls_handshake_events(Packets) ->
lists:map(fun(Packet) ->
{next_event, internal, {handshake, Packet}}
@@ -735,3 +741,25 @@ unprocessed_events(Events) ->
%% handshake events left to process before we should
%% process more TLS-records received on the socket.
erlang:length(Events)-1.
+
+
+assert_buffer_sanity(<<?BYTE(_Type), ?UINT24(Length), Rest/binary>>, #ssl_options{max_handshake_size = Max}) when
+ Length =< Max ->
+ case size(Rest) of
+ N when N < Length ->
+ true;
+ N when N > Length ->
+ throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE,
+ too_big_handshake_data));
+ _ ->
+ throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE,
+ malformed_handshake_data))
+ end;
+assert_buffer_sanity(Bin, _) ->
+ case size(Bin) of
+ N when N < 3 ->
+ true;
+ _ ->
+ throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE,
+ malformed_handshake_data))
+ end.
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index 52c1af5b4c..de5895d7ba 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -136,7 +136,8 @@ options_tests() ->
honor_server_cipher_order,
honor_client_cipher_order,
unordered_protocol_versions_server,
- unordered_protocol_versions_client
+ unordered_protocol_versions_client,
+ max_handshake_size
].
options_tests_tls() ->
@@ -3860,6 +3861,29 @@ unordered_protocol_versions_client(Config) when is_list(Config) ->
ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg).
%%--------------------------------------------------------------------
+max_handshake_size() ->
+ [{doc,"Test that we can set max_handshake_size to max value."}].
+
+max_handshake_size(Config) when is_list(Config) ->
+ ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, [{max_handshake_size, 8388607} |ServerOpts]}]),
+ Port = ssl_test_lib:inet_port(Server),
+
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, [{max_handshake_size, 8388607} | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, ok, Client, ok).
+
+%%--------------------------------------------------------------------
server_name_indication_option() ->
[{doc,"Test API server_name_indication option to connect."}].
diff --git a/lib/stdlib/doc/src/dict.xml b/lib/stdlib/doc/src/dict.xml
index c926ff1b5b..c229a18721 100644
--- a/lib/stdlib/doc/src/dict.xml
+++ b/lib/stdlib/doc/src/dict.xml
@@ -106,6 +106,16 @@
</func>
<func>
+ <name name="take" arity="2"/>
+ <fsummary>Return value and new dictionary without element with this value.</fsummary>
+ <desc>
+ <p>This function returns value from dictionary and a
+ new dictionary without this value.
+ Returns <c>error</c> if the key is not present in the dictionary.</p>
+ </desc>
+ </func>
+
+ <func>
<name name="filter" arity="2"/>
<fsummary>Select elements that satisfy a predicate.</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/ets.xml b/lib/stdlib/doc/src/ets.xml
index 5f5d2b7f36..05401a2d40 100644
--- a/lib/stdlib/doc/src/ets.xml
+++ b/lib/stdlib/doc/src/ets.xml
@@ -541,10 +541,6 @@ Error: fun containing local Erlang function calls
<c><anno>Tab</anno></c> is
not of the correct type, or if <c><anno>Item</anno></c> is not
one of the allowed values, a <c>badarg</c> exception is raised.</p>
- <warning>
- <p>In Erlang/OTP R11B and earlier, this function would not fail but
- return <c>undefined</c> for invalid values for <c>Item</c>.</p>
- </warning>
<p>In addition to the <c>{<anno>Item</anno>,<anno>Value</anno>}</c>
pairs defined for <seealso marker="#info/1"><c>info/1</c></seealso>,
the following items are allowed:</p>
diff --git a/lib/stdlib/doc/src/gb_trees.xml b/lib/stdlib/doc/src/gb_trees.xml
index 790d4b8bf1..5cfff021c1 100644
--- a/lib/stdlib/doc/src/gb_trees.xml
+++ b/lib/stdlib/doc/src/gb_trees.xml
@@ -109,6 +109,28 @@
</func>
<func>
+ <name name="take" arity="2"/>
+ <fsummary>Returns a value and new tree without node with key <c>Key</c>.</fsummary>
+ <desc>
+ <p>Returns a value <c><anno>Value</anno></c> from node with key <c><anno>Key</anno></c>
+ and new <c><anno>Tree2</anno></c> without the node with this value.
+ Assumes that the node with key is present in the tree,
+ crashes otherwise.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="take_any" arity="2"/>
+ <fsummary>Returns a value and new tree without node with key <c>Key</c>.</fsummary>
+ <desc>
+ <p>Returns a value <c><anno>Value</anno></c> from node with key <c><anno>Key</anno></c>
+ and new <c><anno>Tree2</anno></c> without the node with this value.
+ Returns <c>error</c> if the node with the key is not present in
+ the tree.</p>
+ </desc>
+ </func>
+
+ <func>
<name name="empty" arity="0"/>
<fsummary>Return an empty tree.</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/gen_event.xml b/lib/stdlib/doc/src/gen_event.xml
index c24542002a..42e952fd46 100644
--- a/lib/stdlib/doc/src/gen_event.xml
+++ b/lib/stdlib/doc/src/gen_event.xml
@@ -350,13 +350,18 @@ gen_event:stop -----> Module:terminate/2
<func>
<name>start() -> Result</name>
- <name>start(EventMgrName) -> Result</name>
+ <name>start(EventMgrName | Options) -> Result</name>
+ <name>start(EventMgrName, Options) -> Result</name>
<fsummary>Create a stand-alone event manager process.</fsummary>
<type>
- <v>EventMgrName = {local,Name} | {global,GlobalName}
- | {via,Module,ViaName}</v>
+ <v>EventMgrName = {local,Name} | {global,GlobalName} | {via,Module,ViaName}</v>
<v>&nbsp;Name = atom()</v>
<v>&nbsp;GlobalName = ViaName = term()</v>
+ <v>Options = [Option]</v>
+ <v>&nbsp;Option = {debug,Dbgs} | {timeout,Time} | {spawn_opt,SOpts}</v>
+ <v>&nbsp;&nbsp;Dbgs = [Dbg]</v>
+ <v>&nbsp;&nbsp;&nbsp;Dbg = trace | log | statistics | {log_to_file,FileName} | {install,{Func,FuncState}}</v>
+ <v>&nbsp;&nbsp;SOpts = [term()]</v>
<v>Result = {ok,Pid} | {error,{already_started,Pid}}</v>
<v>&nbsp;Pid = pid()</v>
</type>
@@ -371,14 +376,19 @@ gen_event:stop -----> Module:terminate/2
<func>
<name>start_link() -> Result</name>
- <name>start_link(EventMgrName) -> Result</name>
+ <name>start_link(EventMgrName | Options) -> Result</name>
+ <name>start_link(EventMgrName, Options) -> Result</name>
<fsummary>Create a generic event manager process in a supervision tree.
</fsummary>
<type>
- <v>EventMgrName = {local,Name} | {global,GlobalName}
- | {via,Module,ViaName}</v>
+ <v>EventMgrName = {local,Name} | {global,GlobalName} | {via,Module,ViaName}</v>
<v>&nbsp;Name = atom()</v>
<v>&nbsp;GlobalName = ViaName = term()</v>
+ <v>Options = [Option]</v>
+ <v>&nbsp;Option = {debug,Dbgs} | {timeout,Time} | {spawn_opt,SOpts}</v>
+ <v>&nbsp;&nbsp;Dbgs = [Dbg]</v>
+ <v>&nbsp;&nbsp;&nbsp;Dbg = trace | log | statistics | {log_to_file,FileName} | {install,{Func,FuncState}}</v>
+ <v>&nbsp;&nbsp;SOpts = [term()]</v>
<v>Result = {ok,Pid} | {error,{already_started,Pid}}</v>
<v>&nbsp;Pid = pid()</v>
</type>
diff --git a/lib/stdlib/doc/src/gen_fsm.xml b/lib/stdlib/doc/src/gen_fsm.xml
index de06987d38..719ab2b558 100644
--- a/lib/stdlib/doc/src/gen_fsm.xml
+++ b/lib/stdlib/doc/src/gen_fsm.xml
@@ -534,11 +534,6 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4
the function call fails.</p>
<p>Return value <c>Reply</c> is defined in the return value
of <c>Module:StateName/3</c>.</p>
- <note>
- <p>The ancient behavior of sometimes consuming the server
- exit message if the server died during the call while
- linked to the client was removed in Erlang 5.6/OTP R12B.</p>
- </note>
</desc>
</func>
</funcs>
diff --git a/lib/stdlib/doc/src/gen_server.xml b/lib/stdlib/doc/src/gen_server.xml
index 4a7dd60858..662076b5f0 100644
--- a/lib/stdlib/doc/src/gen_server.xml
+++ b/lib/stdlib/doc/src/gen_server.xml
@@ -162,11 +162,6 @@ gen_server:abcast -----> Module:handle_cast/2
of <c>Module:handle_call/3</c>.</p>
<p>The call can fail for many reasons, including time-out and the
called <c>gen_server</c> process dying before or during the call.</p>
- <note>
- <p>The ancient behavior of sometimes consuming the server
- exit message if the server died during the call while
- linked to the client was removed in Erlang 5.6/OTP R12B.</p>
- </note>
</desc>
</func>
diff --git a/lib/stdlib/doc/src/orddict.xml b/lib/stdlib/doc/src/orddict.xml
index 109b038cb5..26bbf499c6 100644
--- a/lib/stdlib/doc/src/orddict.xml
+++ b/lib/stdlib/doc/src/orddict.xml
@@ -113,6 +113,15 @@
</func>
<func>
+ <name name="take" arity="2"/>
+ <fsummary>Return value and new dictionary without element with this value.</fsummary>
+ <desc>
+ <p>This function returns value from dictionary and new dictionary without this value.
+ Returns <c>error</c> if the key is not present in the dictionary.</p>
+ </desc>
+ </func>
+
+ <func>
<name name="filter" arity="2"/>
<fsummary>Select elements that satisfy a predicate.</fsummary>
<desc>
diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl
index f921e28ef6..9449ba3dc2 100644
--- a/lib/stdlib/src/dict.erl
+++ b/lib/stdlib/src/dict.erl
@@ -38,7 +38,7 @@
%% Standard interface.
-export([new/0,is_key/2,to_list/1,from_list/1,size/1,is_empty/1]).
--export([fetch/2,find/2,fetch_keys/1,erase/2]).
+-export([fetch/2,find/2,fetch_keys/1,erase/2,take/2]).
-export([store/3,append/3,append_list/3,update/3,update/4,update_counter/3]).
-export([fold/3,map/2,filter/2,merge/3]).
@@ -172,6 +172,27 @@ erase_key(Key, [E|Bkt0]) ->
{[E|Bkt1],Dc};
erase_key(_, []) -> {[],0}.
+-spec take(Key, Dict) -> {Value, Dict1} | error when
+ Dict :: dict(Key, Value),
+ Dict1 :: dict(Key, Value),
+ Key :: term(),
+ Value :: term().
+
+take(Key, D0) ->
+ Slot = get_slot(D0, Key),
+ case on_bucket(fun (B0) -> take_key(Key, B0) end, D0, Slot) of
+ {D1,{Value,Dc}} ->
+ {Value, maybe_contract(D1, Dc)};
+ {_,error} -> error
+ end.
+
+take_key(Key, [?kv(Key,Val)|Bkt]) ->
+ {Bkt,{Val,1}};
+take_key(Key, [E|Bkt0]) ->
+ {Bkt1,Res} = take_key(Key, Bkt0),
+ {[E|Bkt1],Res};
+take_key(_, []) -> {[],error}.
+
-spec store(Key, Value, Dict1) -> Dict2 when
Dict1 :: dict(Key, Value),
Dict2 :: dict(Key, Value).
diff --git a/lib/stdlib/src/erl_eval.erl b/lib/stdlib/src/erl_eval.erl
index 40a34aa30f..eafee346eb 100644
--- a/lib/stdlib/src/erl_eval.erl
+++ b/lib/stdlib/src/erl_eval.erl
@@ -1306,6 +1306,7 @@ partial_eval(Expr) ->
ev_expr({op,_,Op,L,R}) -> erlang:Op(ev_expr(L), ev_expr(R));
ev_expr({op,_,Op,A}) -> erlang:Op(ev_expr(A));
ev_expr({integer,_,X}) -> X;
+ev_expr({char,_,X}) -> X;
ev_expr({float,_,X}) -> X;
ev_expr({atom,_,X}) -> X;
ev_expr({tuple,_,Es}) ->
diff --git a/lib/stdlib/src/erl_parse.yrl b/lib/stdlib/src/erl_parse.yrl
index 9cd95705af..922455a6f2 100644
--- a/lib/stdlib/src/erl_parse.yrl
+++ b/lib/stdlib/src/erl_parse.yrl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -33,7 +33,6 @@ list tail
list_comprehension lc_expr lc_exprs
binary_comprehension
tuple
-%struct
record_expr record_tuple record_field record_fields
map_expr map_tuple map_field map_field_assoc map_field_exact map_fields map_key
if_expr if_clause if_clauses case_expr cr_clause cr_clauses receive_expr
@@ -108,9 +107,8 @@ type_sig -> fun_type 'when' type_guards : {type, ?anno('$1'), bounded_fun,
type_guards -> type_guard : ['$1'].
type_guards -> type_guard ',' type_guards : ['$1'|'$3'].
-type_guard -> atom '(' top_types ')' : {type, ?anno('$1'), constraint,
- ['$1', '$3']}.
-type_guard -> var '::' top_type : build_def('$1', '$3').
+type_guard -> atom '(' top_types ')' : build_compat_constraint('$1', '$3').
+type_guard -> var '::' top_type : build_constraint('$1', '$3').
top_types -> top_type : ['$1'].
top_types -> top_type ',' top_types : ['$1'|'$3'].
@@ -156,6 +154,7 @@ type -> '#' atom '{' field_types '}' : {type, ?anno('$1'),
record, ['$2'|'$4']}.
type -> binary_type : '$1'.
type -> integer : '$1'.
+type -> char : '$1'.
type -> 'fun' '(' ')' : {type, ?anno('$1'), 'fun', []}.
type -> 'fun' '(' fun_type_100 ')' : '$3'.
@@ -268,7 +267,6 @@ expr_max -> binary : '$1'.
expr_max -> list_comprehension : '$1'.
expr_max -> binary_comprehension : '$1'.
expr_max -> tuple : '$1'.
-%%expr_max -> struct : '$1'.
expr_max -> '(' expr ')' : '$2'.
expr_max -> 'begin' exprs 'end' : {block,?anno('$1'),'$2'}.
expr_max -> if_expr : '$1'.
@@ -327,10 +325,6 @@ lc_expr -> binary '<=' expr : {b_generate,?anno('$2'),'$1','$3'}.
tuple -> '{' '}' : {tuple,?anno('$1'),[]}.
tuple -> '{' exprs '}' : {tuple,?anno('$1'),'$2'}.
-
-%%struct -> atom tuple :
-%% {struct,?anno('$1'),element(3, '$1'),element(3, '$2')}.
-
map_expr -> '#' map_tuple :
{map, ?anno('$1'),'$2'}.
map_expr -> expr_max '#' map_tuple :
@@ -1056,13 +1050,13 @@ build_typed_attribute({atom,Aa,Attr},_) ->
end.
build_type_spec({Kind,Aa}, {SpecFun, TypeSpecs})
- when (Kind =:= spec) or (Kind =:= callback) ->
+ when Kind =:= spec ; Kind =:= callback ->
NewSpecFun =
case SpecFun of
{atom, _, Fun} ->
{Fun, find_arity_from_specs(TypeSpecs)};
- {{atom,_, Mod}, {atom,_, Fun}} ->
- {Mod,Fun,find_arity_from_specs(TypeSpecs)}
+ {{atom, _, Mod}, {atom, _, Fun}} ->
+ {Mod, Fun, find_arity_from_specs(TypeSpecs)}
end,
{attribute,Aa,Kind,{NewSpecFun, TypeSpecs}}.
@@ -1076,11 +1070,24 @@ find_arity_from_specs([Spec|_]) ->
{type, _, 'fun', [{type, _, product, Args},_]} = Fun,
length(Args).
-build_def({var, A, '_'}, _Types) ->
+%% The 'is_subtype(V, T)' syntax is not supported as of Erlang/OTP
+%% 19.0, but is kept for backward compatibility.
+build_compat_constraint({atom, _, is_subtype}, [{var, _, _}=LHS, Type]) ->
+ build_constraint(LHS, Type);
+build_compat_constraint({atom, _, is_subtype}, [LHS, _Type]) ->
+ ret_err(?anno(LHS), "bad type variable");
+build_compat_constraint({atom, A, Atom}, _Types) ->
+ ret_err(A, io_lib:format("unsupported constraint ~w", [Atom])).
+
+build_constraint({atom, _, is_subtype}, [{var, _, _}=LHS, Type]) ->
+ build_constraint(LHS, Type);
+build_constraint({atom, A, Atom}, _Foo) ->
+ ret_err(A, io_lib:format("unsupported constraint ~w", [Atom]));
+build_constraint({var, A, '_'}, _Types) ->
ret_err(A, "bad type variable");
-build_def(LHS, Types) ->
+build_constraint(LHS, Type) ->
IsSubType = {atom, ?anno(LHS), is_subtype},
- {type, ?anno(LHS), constraint, [IsSubType, [LHS, Types]]}.
+ {type, ?anno(LHS), constraint, [IsSubType, [LHS, Type]]}.
lift_unions(T1, {type, _Aa, union, List}) ->
{type, ?anno(T1), union, [T1|List]};
@@ -1573,13 +1580,17 @@ new_anno(Term) ->
Abstr :: erl_parse_tree().
anno_to_term(Abstract) ->
- map_anno(fun erl_anno:to_term/1, Abstract).
+ F = fun(Anno, Acc) -> {erl_anno:to_term(Anno), Acc} end,
+ {NewAbstract, []} = modify_anno1(Abstract, [], F),
+ NewAbstract.
-spec anno_from_term(Term) -> erl_parse_tree() when
Term :: term().
anno_from_term(Term) ->
- map_anno(fun erl_anno:from_term/1, Term).
+ F = fun(T, Acc) -> {erl_anno:from_term(T), Acc} end,
+ {NewTerm, []} = modify_anno1(Term, [], F),
+ NewTerm.
%% Forms.
modify_anno1({function,F,A}, Ac, _Mf) ->
diff --git a/lib/stdlib/src/escript.erl b/lib/stdlib/src/escript.erl
index 7f5ef4df42..c42ae981e7 100644
--- a/lib/stdlib/src/escript.erl
+++ b/lib/stdlib/src/escript.erl
@@ -481,46 +481,49 @@ find_first_body_line(Fd, HeaderSz0, LineNo, KeepFirst, Sections) ->
%% Look for special comment on second line
Line2 = get_line(Fd),
{ok, HeaderSz2} = file:position(Fd, cur),
- case classify_line(Line2) of
- emu_args ->
- %% Skip special comment on second line
- Line3 = get_line(Fd),
- {HeaderSz2, LineNo + 2, Fd,
- Sections#sections{type = guess_type(Line3),
- comment = undefined,
- emu_args = Line2}};
- Line2Type ->
- %% Look for special comment on third line
- Line3 = get_line(Fd),
- {ok, HeaderSz3} = file:position(Fd, cur),
- Line3Type = classify_line(Line3),
- if
- Line3Type =:= emu_args ->
- %% Skip special comment on third line
- Line4 = get_line(Fd),
- {HeaderSz3, LineNo + 3, Fd,
- Sections#sections{type = guess_type(Line4),
- comment = Line2,
- emu_args = Line3}};
- Sections#sections.shebang =:= undefined,
- KeepFirst =:= true ->
- %% No shebang. Use the entire file
- {HeaderSz0, LineNo, Fd,
- Sections#sections{type = guess_type(Line2)}};
- Sections#sections.shebang =:= undefined ->
- %% No shebang. Skip the first line
- {HeaderSz1, LineNo, Fd,
- Sections#sections{type = guess_type(Line2)}};
- Line2Type =:= comment ->
- %% Skip shebang on first line and comment on second
- {HeaderSz2, LineNo + 2, Fd,
- Sections#sections{type = guess_type(Line3),
- comment = Line2}};
- true ->
- %% Just skip shebang on first line
- {HeaderSz1, LineNo + 1, Fd,
- Sections#sections{type = guess_type(Line2)}}
- end
+ if
+ Sections#sections.shebang =:= undefined,
+ KeepFirst =:= true ->
+ %% No shebang. Use the entire file
+ {HeaderSz0, LineNo, Fd,
+ Sections#sections{type = guess_type(Line2)}};
+ Sections#sections.shebang =:= undefined ->
+ %% No shebang. Skip the first line
+ {HeaderSz1, LineNo, Fd,
+ Sections#sections{type = guess_type(Line2)}};
+ true ->
+ case classify_line(Line2) of
+ emu_args ->
+ %% Skip special comment on second line
+ Line3 = get_line(Fd),
+ {HeaderSz2, LineNo + 2, Fd,
+ Sections#sections{type = guess_type(Line3),
+ comment = undefined,
+ emu_args = Line2}};
+ comment ->
+ %% Look for special comment on third line
+ Line3 = get_line(Fd),
+ {ok, HeaderSz3} = file:position(Fd, cur),
+ Line3Type = classify_line(Line3),
+ if
+ Line3Type =:= emu_args ->
+ %% Skip special comment on third line
+ Line4 = get_line(Fd),
+ {HeaderSz3, LineNo + 3, Fd,
+ Sections#sections{type = guess_type(Line4),
+ comment = Line2,
+ emu_args = Line3}};
+ true ->
+ %% Skip shebang on first line and comment on second
+ {HeaderSz2, LineNo + 2, Fd,
+ Sections#sections{type = guess_type(Line3),
+ comment = Line2}}
+ end;
+ _ ->
+ %% Just skip shebang on first line
+ {HeaderSz1, LineNo + 1, Fd,
+ Sections#sections{type = guess_type(Line2)}}
+ end
end.
classify_line(Line) ->
diff --git a/lib/stdlib/src/gb_trees.erl b/lib/stdlib/src/gb_trees.erl
index 457287fa52..c0cdde012e 100644
--- a/lib/stdlib/src/gb_trees.erl
+++ b/lib/stdlib/src/gb_trees.erl
@@ -52,6 +52,13 @@
%% - delete_any(X, T): removes key X from tree T if the key is present
%% in the tree, otherwise does nothing; returns new tree.
%%
+%% - take(X, T): removes element with key X from tree T; returns new tree
+%% without removed element. Assumes that the key is present in the tree.
+%%
+%% - take_any(X, T): removes element with key X from tree T and returns
+%% a new tree if the key is present; otherwise does nothing and returns
+%% 'error'.
+%%
%% - balance(T): rebalances tree T. Note that this is rarely necessary,
%% but may be motivated when a large number of entries have been
%% deleted from the tree without further insertions. Rebalancing could
@@ -114,7 +121,8 @@
-export([empty/0, is_empty/1, size/1, lookup/2, get/2, insert/3,
update/3, enter/3, delete/2, delete_any/2, balance/1,
is_defined/2, keys/1, values/1, to_list/1, from_orddict/1,
- smallest/1, largest/1, take_smallest/1, take_largest/1,
+ smallest/1, largest/1, take/2, take_any/2,
+ take_smallest/1, take_largest/1,
iterator/1, iterator_from/2, next/1, map/2]).
@@ -416,6 +424,41 @@ merge(Smaller, Larger) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-spec take_any(Key, Tree1) -> {Value, Tree2} | 'error' when
+ Tree1 :: tree(Key, _),
+ Tree2 :: tree(Key, _),
+ Key :: term(),
+ Value :: term().
+
+take_any(Key, Tree) ->
+ case is_defined(Key, Tree) of
+ true -> take(Key, Tree);
+ false -> error
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-spec take(Key, Tree1) -> {Value, Tree2} when
+ Tree1 :: tree(Key, _),
+ Tree2 :: tree(Key, _),
+ Key :: term(),
+ Value :: term().
+
+take(Key, {S, T}) when is_integer(S), S >= 0 ->
+ {Value, Res} = take_1(Key, T),
+ {Value, {S - 1, Res}}.
+
+take_1(Key, {Key1, Value, Smaller, Larger}) when Key < Key1 ->
+ {Value2, Smaller1} = take_1(Key, Smaller),
+ {Value2, {Key1, Value, Smaller1, Larger}};
+take_1(Key, {Key1, Value, Smaller, Bigger}) when Key > Key1 ->
+ {Value2, Bigger1} = take_1(Key, Bigger),
+ {Value2, {Key1, Value, Smaller, Bigger1}};
+take_1(_, {_Key, Value, Smaller, Larger}) ->
+ {Value, merge(Smaller, Larger)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
-spec take_smallest(Tree1) -> {Key, Value, Tree2} when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
diff --git a/lib/stdlib/src/gen_event.erl b/lib/stdlib/src/gen_event.erl
index ccacf658e9..4839fe4f2c 100644
--- a/lib/stdlib/src/gen_event.erl
+++ b/lib/stdlib/src/gen_event.erl
@@ -32,7 +32,9 @@
%%% Modified by Martin - uses proc_lib, sys and gen!
--export([start/0, start/1, start_link/0, start_link/1, stop/1, stop/3,
+-export([start/0, start/1, start/2,
+ start_link/0, start_link/1, start_link/2,
+ stop/1, stop/3,
notify/2, sync_notify/2,
add_handler/3, add_sup_handler/3, delete_handler/3, swap_handler/3,
swap_sup_handler/3, which_handlers/1, call/3, call/4, wake_hib/4]).
@@ -117,30 +119,64 @@
-type del_handler_ret() :: ok | term() | {'EXIT',term()}.
-type emgr_name() :: {'local', atom()} | {'global', atom()}
- | {'via', atom(), term()}.
+ | {'via', atom(), term()}.
+-type debug_flag() :: 'trace' | 'log' | 'statistics' | 'debug'
+ | {'logfile', string()}.
+-type option() :: {'timeout', timeout()}
+ | {'debug', [debug_flag()]}
+ | {'spawn_opt', [proc_lib:spawn_option()]}.
-type emgr_ref() :: atom() | {atom(), atom()} | {'global', atom()}
- | {'via', atom(), term()} | pid().
+ | {'via', atom(), term()} | pid().
-type start_ret() :: {'ok', pid()} | {'error', term()}.
%%---------------------------------------------------------------------------
-define(NO_CALLBACK, 'no callback module').
+%% -----------------------------------------------------------------
+%% Starts a generic event handler.
+%% start()
+%% start(MgrName | Options)
+%% start(MgrName, Options)
+%% start_link()
+%% start_link(MgrName | Options)
+%% start_link(MgrName, Options)
+%% MgrName ::= {local, atom()} | {global, atom()} | {via, atom(), term()}
+%% Options ::= [{timeout, Timeout} | {debug, [Flag]} | {spawn_opt,SOpts}]
+%% Flag ::= trace | log | {logfile, File} | statistics | debug
+%% (debug == log && statistics)
+%% Returns: {ok, Pid} |
+%% {error, {already_started, Pid}} |
+%% {error, Reason}
+%% -----------------------------------------------------------------
+
-spec start() -> start_ret().
start() ->
gen:start(?MODULE, nolink, ?NO_CALLBACK, [], []).
--spec start(emgr_name()) -> start_ret().
-start(Name) ->
- gen:start(?MODULE, nolink, Name, ?NO_CALLBACK, [], []).
+-spec start(emgr_name() | [option()]) -> start_ret().
+start(Name) when is_tuple(Name) ->
+ gen:start(?MODULE, nolink, Name, ?NO_CALLBACK, [], []);
+start(Options) when is_list(Options) ->
+ gen:start(?MODULE, nolink, ?NO_CALLBACK, [], Options).
+
+-spec start(emgr_name(), [option()]) -> start_ret().
+start(Name, Options) ->
+ gen:start(?MODULE, nolink, Name, ?NO_CALLBACK, [], Options).
-spec start_link() -> start_ret().
start_link() ->
gen:start(?MODULE, link, ?NO_CALLBACK, [], []).
--spec start_link(emgr_name()) -> start_ret().
-start_link(Name) ->
- gen:start(?MODULE, link, Name, ?NO_CALLBACK, [], []).
+-spec start_link(emgr_name() | [option()]) -> start_ret().
+start_link(Name) when is_tuple(Name) ->
+ gen:start(?MODULE, link, Name, ?NO_CALLBACK, [], []);
+start_link(Options) when is_list(Options) ->
+ gen:start(?MODULE, link, ?NO_CALLBACK, [], Options).
+
+-spec start_link(emgr_name(), [option()]) -> start_ret().
+start_link(Name, Options) ->
+ gen:start(?MODULE, link, Name, ?NO_CALLBACK, [], Options).
%% -spec init_it(pid(), 'self' | pid(), emgr_name(), module(), [term()], [_]) ->
init_it(Starter, self, Name, Mod, Args, Options) ->
@@ -160,7 +196,7 @@ add_sup_handler(M, Handler, Args) ->
rpc(M, {add_sup_handler, Handler, Args, self()}).
-spec notify(emgr_ref(), term()) -> 'ok'.
-notify(M, Event) -> send(M, {notify, Event}).
+notify(M, Event) -> send(M, {notify, Event}).
-spec sync_notify(emgr_ref(), term()) -> 'ok'.
sync_notify(M, Event) -> rpc(M, {sync_notify, Event}).
@@ -193,7 +229,7 @@ stop(M) ->
stop(M, Reason, Timeout) ->
gen:stop(M, Reason, Timeout).
-rpc(M, Cmd) ->
+rpc(M, Cmd) ->
{ok, Reply} = gen:call(M, self(), Cmd, infinity),
Reply.
@@ -421,7 +457,7 @@ server_add_handler({Mod,Id}, Args, MSL) ->
Handler = #handler{module = Mod,
id = Id},
server_add_handler(Mod, Handler, Args, MSL);
-server_add_handler(Mod, Args, MSL) ->
+server_add_handler(Mod, Args, MSL) ->
Handler = #handler{module = Mod},
server_add_handler(Mod, Handler, Args, MSL).
@@ -446,7 +482,7 @@ server_add_sup_handler({Mod,Id}, Args, MSL, Parent) ->
id = Id,
supervised = Parent},
server_add_handler(Mod, Handler, Args, MSL);
-server_add_sup_handler(Mod, Args, MSL, Parent) ->
+server_add_sup_handler(Mod, Args, MSL, Parent) ->
link(Parent),
Handler = #handler{module = Mod,
supervised = Parent},
@@ -454,7 +490,7 @@ server_add_sup_handler(Mod, Args, MSL, Parent) ->
%% server_delete_handler(HandlerId, Args, MSL) -> {Ret, MSL'}
-server_delete_handler(HandlerId, Args, MSL, SName) ->
+server_delete_handler(HandlerId, Args, MSL, SName) ->
case split(HandlerId, MSL) of
{Mod, Handler, MSL1} ->
{do_terminate(Mod, Handler, Args,
@@ -511,7 +547,7 @@ split_and_terminate(HandlerId, Args, MSL, SName, Handler2, Sup) ->
%% server_notify(Event, Func, MSL, SName) -> MSL'
-server_notify(Event, Func, [Handler|T], SName) ->
+server_notify(Event, Func, [Handler|T], SName) ->
case server_update(Handler, Func, Event, SName) of
{ok, Handler1} ->
{Hib, NewHandlers} = server_notify(Event, Func, T, SName),
@@ -531,9 +567,9 @@ server_update(Handler1, Func, Event, SName) ->
Mod1 = Handler1#handler.module,
State = Handler1#handler.state,
case catch Mod1:Func(Event, State) of
- {ok, State1} ->
+ {ok, State1} ->
{ok, Handler1#handler{state = State1}};
- {ok, State1, hibernate} ->
+ {ok, State1, hibernate} ->
{hibernate, Handler1#handler{state = State1}};
{swap_handler, Args1, State1, Handler2, Args2} ->
do_swap(Mod1, Handler1, Args1, State1, Handler2, Args2, SName);
@@ -644,14 +680,14 @@ server_call_update(Handler1, Query, SName) ->
Mod1 = Handler1#handler.module,
State = Handler1#handler.state,
case catch Mod1:handle_call(Query, State) of
- {ok, Reply, State1} ->
+ {ok, Reply, State1} ->
{{ok, Handler1#handler{state = State1}}, Reply};
- {ok, Reply, State1, hibernate} ->
- {{hibernate, Handler1#handler{state = State1}},
+ {ok, Reply, State1, hibernate} ->
+ {{hibernate, Handler1#handler{state = State1}},
Reply};
{swap_handler, Reply, Args1, State1, Handler2, Args2} ->
{do_swap(Mod1,Handler1,Args1,State1,Handler2,Args2,SName), Reply};
- {remove_handler, Reply} ->
+ {remove_handler, Reply} ->
do_terminate(Mod1, Handler1, remove_handler, State,
remove, SName, normal),
{no, Reply};
@@ -686,7 +722,7 @@ report_error(_Handler, normal, _, _, _) -> ok;
report_error(_Handler, shutdown, _, _, _) -> ok;
report_error(_Handler, {swapped,_,_}, _, _, _) -> ok;
report_error(Handler, Reason, State, LastIn, SName) ->
- Reason1 =
+ Reason1 =
case Reason of
{'EXIT',{undef,[{M,F,A,L}|MFAs]}} ->
case code:is_loaded(M) of
diff --git a/lib/stdlib/src/orddict.erl b/lib/stdlib/src/orddict.erl
index 37cf0084f0..caa59099af 100644
--- a/lib/stdlib/src/orddict.erl
+++ b/lib/stdlib/src/orddict.erl
@@ -22,7 +22,7 @@
%% Standard interface.
-export([new/0,is_key/2,to_list/1,from_list/1,size/1,is_empty/1]).
--export([fetch/2,find/2,fetch_keys/1,erase/2]).
+-export([fetch/2,find/2,fetch_keys/1,erase/2,take/2]).
-export([store/3,append/3,append_list/3,update/3,update/4,update_counter/3]).
-export([fold/3,map/2,filter/2,merge/3]).
@@ -106,6 +106,23 @@ erase(Key, [{K,_}=E|Dict]) when Key > K ->
erase(_Key, [{_K,_Val}|Dict]) -> Dict; %Key == K
erase(_, []) -> [].
+-spec take(Key, Orddict) -> {Value, Orddict1} | error when
+ Orddict :: orddict(Key, Value),
+ Orddict1 :: orddict(Key, Value),
+ Key :: term(),
+ Value :: term().
+
+take(Key, Dict) ->
+ take_1(Key, Dict, []).
+
+take_1(Key, [{K,_}|_], _Acc) when Key < K ->
+ error;
+take_1(Key, [{K,_}=P|D], Acc) when Key > K ->
+ take_1(Key, D, [P|Acc]);
+take_1(_Key, [{_K,Value}|D], Acc) ->
+ {Value,lists:reverse(Acc, D)};
+take_1(_, [], _) -> error.
+
-spec store(Key, Value, Orddict1) -> Orddict2 when
Orddict1 :: orddict(Key, Value),
Orddict2 :: orddict(Key, Value).
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index 4161ced9ab..f4257fb571 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -408,7 +408,7 @@ obsolete_1(docb_xml_check, _, _) ->
%% Added in R15B
obsolete_1(asn1rt, F, _) when F == load_driver; F == unload_driver ->
- {deprecated,"deprecated (will be removed in OTP 18); has no effect as drivers are no longer used"};
+ {removed,"removed (will be removed in OTP 18); has no effect as drivers are no longer used"};
obsolete_1(ssl, pid, 1) ->
{removed,"was removed in R16; is no longer needed"};
obsolete_1(inviso, _, _) ->
@@ -463,21 +463,23 @@ obsolete_1(wxCursor, new, 4) ->
%% Added in OTP 17.
obsolete_1(asn1ct, decode,3) ->
- {deprecated,"deprecated; use Mod:decode/2 instead"};
+ {removed,"removed; use Mod:decode/2 instead"};
+obsolete_1(asn1ct, encode, 2) ->
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1ct, encode, 3) ->
- {deprecated,"deprecated; use Mod:encode/2 instead"};
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1rt, decode,3) ->
- {deprecated,"deprecated; use Mod:decode/2 instead"};
+ {removed,"removed; use Mod:decode/2 instead"};
obsolete_1(asn1rt, encode, 2) ->
- {deprecated,"deprecated; use Mod:encode/2 instead"};
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1rt, encode, 3) ->
- {deprecated,"deprecated; use Mod:encode/2 instead"};
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1rt, info, 1) ->
- {deprecated,"deprecated; use Mod:info/0 instead"};
+ {removed,"removed; use Mod:info/0 instead"};
obsolete_1(asn1rt, utf8_binary_to_list, 1) ->
- {deprecated,{unicode,characters_to_list,1}};
+ {removed,{unicode,characters_to_list,1},"OTP 20"};
obsolete_1(asn1rt, utf8_list_to_binary, 1) ->
- {deprecated,{unicode,characters_to_binary,1}};
+ {removed,{unicode,characters_to_binary,1},"OTP 20"};
%% Added in OTP 18.
obsolete_1(core_lib, get_anno, 1) ->
diff --git a/lib/stdlib/test/dict_SUITE.erl b/lib/stdlib/test/dict_SUITE.erl
index 47358d729f..e99af9ad42 100644
--- a/lib/stdlib/test/dict_SUITE.erl
+++ b/lib/stdlib/test/dict_SUITE.erl
@@ -23,10 +23,10 @@
-module(dict_SUITE).
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
- create/1,store/1,iterate/1]).
+ create/1,store/1,iterate/1,remove/1]).
-include_lib("common_test/include/ct.hrl").
@@ -37,7 +37,7 @@ suite() ->
{timetrap,{minutes,5}}].
all() ->
- [create, store, iterate].
+ [create, store, remove, iterate].
groups() ->
[].
@@ -92,6 +92,27 @@ store_1(List, M) ->
end,
D0.
+remove(_Config) ->
+ test_all([{0,87}], fun remove_1/2).
+
+remove_1(List0, M) ->
+ %% Make sure that keys are unique. Randomize key order.
+ List1 = orddict:from_list(List0),
+ List2 = lists:sort([{rand:uniform(),E} || E <- List1]),
+ List = [E || {_,E} <- List2],
+ D0 = M(from_list, List),
+ remove_2(List, D0, M).
+
+remove_2([{Key,Val}|T], D0, M) ->
+ {Val,D1} = M(take, {Key,D0}),
+ error = M(take, {Key,D1}),
+ D2 = M(erase, {Key,D0}),
+ true = M(equal, {D1,D2}),
+ remove_2(T, D1, M);
+remove_2([], D, M) ->
+ true = M(is_empty, D),
+ D.
+
%%%
%%% Test specifics for gb_trees.
%%%
diff --git a/lib/stdlib/test/dict_test_lib.erl b/lib/stdlib/test/dict_test_lib.erl
index 7c4c3572ae..f6fef7bdf4 100644
--- a/lib/stdlib/test/dict_test_lib.erl
+++ b/lib/stdlib/test/dict_test_lib.erl
@@ -33,7 +33,9 @@ new(Mod, Eq) ->
(iterator, S) -> Mod:iterator(S);
(iterator_from, {Start, S}) -> Mod:iterator_from(Start, S);
(next, I) -> Mod:next(I);
- (to_list, D) -> to_list(Mod, D)
+ (to_list, D) -> to_list(Mod, D);
+ (erase, {K,D}) -> erase(Mod, K, D);
+ (take, {K,D}) -> take(Mod, K, D)
end.
empty(Mod) ->
@@ -67,3 +69,19 @@ enter(Mod, Key, Val, Dict) ->
true ->
Mod:store(Key, Val, Dict)
end.
+
+erase(Mod, Key, Val) when Mod =:= dict; Mod =:= orddict ->
+ Mod:erase(Key, Val);
+erase(gb_trees, Key, Val) ->
+ gb_trees:delete_any(Key, Val).
+
+take(gb_trees, Key, Val) ->
+ Res = try
+ gb_trees:take(Key, Val)
+ catch
+ error:_ ->
+ error
+ end,
+ Res = gb_trees:take_any(Key, Val);
+take(Mod, Key, Val) ->
+ Mod:take(Key, Val).
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index c86e17f70c..c90f855b3b 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -64,7 +64,7 @@
predef/1,
maps/1,maps_type/1,maps_parallel_match/1,
otp_11851/1,otp_11879/1,otp_13230/1,
- record_errors/1]).
+ record_errors/1, otp_xxxxx/1]).
suite() ->
[{ct_hooks,[ts_install_cth]},
@@ -84,7 +84,7 @@ all() ->
too_many_arguments, basic_errors, bin_syntax_errors, predef,
maps, maps_type, maps_parallel_match,
otp_11851, otp_11879, otp_13230,
- record_errors].
+ record_errors, otp_xxxxx].
groups() ->
[{unused_vars_warn, [],
@@ -3869,6 +3869,55 @@ record_errors(Config) when is_list(Config) ->
{3,erl_lint,{redefine_field,r,a}}],[]}}],
run(Config, Ts).
+otp_xxxxx(Config) ->
+ Ts = [{constraint1,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,
+ [{2,erl_parse,"unsupported constraint " ++ ["is_subtype"]}],
+ []}},
+ {constraint2,
+ <<"-export([t/1]).
+ -spec t(X) -> X when bad_atom(X, integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,
+ [{2,erl_parse,"unsupported constraint " ++ ["bad_atom"]}],
+ []}},
+ {constraint3,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(bad_variable, integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,[{2,erl_parse,"bad type variable"}],[]}},
+ {constraint4,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(atom(), integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,[{2,erl_parse,"bad type variable"}],[]}},
+ {constraint5,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(X, integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ []},
+ {constraint6,
+ <<"-export([t/1]).
+ -spec t(X) -> X when X :: integer().
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ []}],
+ run(Config, Ts).
+
run(Config, Tests) ->
F = fun({N,P,Ws,E}, BadL) ->
case catch run_test(Config, P, Ws) of
diff --git a/lib/stdlib/test/erl_pp_SUITE.erl b/lib/stdlib/test/erl_pp_SUITE.erl
index 13c5662741..31ea3210a8 100644
--- a/lib/stdlib/test/erl_pp_SUITE.erl
+++ b/lib/stdlib/test/erl_pp_SUITE.erl
@@ -825,12 +825,13 @@ type_examples() ->
%% is_subtype(V, T) syntax, we need a few examples of the syntax.
{ex31,<<"-spec t1(FooBar :: t99()) -> t99();"
"(t2()) -> t2();"
- "('\\'t::4'()) -> '\\'t::4'() when is_subtype('\\'t::4'(), t24);"
- "(t23()) -> t23() when is_subtype(t23(), atom()),"
- " is_subtype(t23(), t14());"
- "(t24()) -> t24() when is_subtype(t24(), atom()),"
- " is_subtype(t24(), t14()),"
- " is_subtype(t24(), '\\'t::4'()).">>},
+ "('\\'t::4'()) -> {'\\'t::4'(), B}"
+ " when is_subtype(B, '\\'t::4'());"
+ "(t23()) -> C when is_subtype(C, atom()),"
+ " is_subtype(C, t14());"
+ "(t24()) -> D when is_subtype(D, atom()),"
+ " is_subtype(D, t14()),"
+ " is_subtype(D, '\\'t::4'()).">>},
{ex32,<<"-spec mod:t2() -> any(). ">>},
{ex33,<<"-opaque attributes_data() :: "
"[{'column', column()} | {'line', info_line()} |"
diff --git a/lib/stdlib/test/escript_SUITE.erl b/lib/stdlib/test/escript_SUITE.erl
index 28d69232a0..0b9106a99c 100644
--- a/lib/stdlib/test/escript_SUITE.erl
+++ b/lib/stdlib/test/escript_SUITE.erl
@@ -28,6 +28,7 @@
strange_name/1,
emulator_flags/1,
emulator_flags_no_shebang/1,
+ two_lines/1,
module_script/1,
beam_script/1,
archive_script/1,
@@ -49,7 +50,7 @@ suite() ->
all() ->
[basic, errors, strange_name, emulator_flags,
- emulator_flags_no_shebang,
+ emulator_flags_no_shebang, two_lines,
module_script, beam_script, archive_script, epp,
create_and_extract, foldl, overflow,
archive_script_file_access, unicode].
@@ -153,6 +154,18 @@ emulator_flags(Config) when is_list(Config) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+two_lines(Config) when is_list(Config) ->
+ Data = proplists:get_value(data_dir, Config),
+ Dir = filename:absname(Data), %Get rid of trailing slash.
+ run(Dir, "two_lines -arg1 arg2 arg3",
+ [<<"main:[\"-arg1\",\"arg2\",\"arg3\"]\n"
+ "ERL_FLAGS=false\n"
+ "unknown:[]\n"
+ "ExitCode:0">>]),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
emulator_flags_no_shebang(Config) when is_list(Config) ->
Data = proplists:get_value(data_dir, Config),
Dir = filename:absname(Data), %Get rid of trailing slash.
diff --git a/lib/stdlib/test/escript_SUITE_data/two_lines b/lib/stdlib/test/escript_SUITE_data/two_lines
new file mode 100755
index 0000000000..cf4e99639c
--- /dev/null
+++ b/lib/stdlib/test/escript_SUITE_data/two_lines
@@ -0,0 +1,2 @@
+#! /usr/bin/env escript
+main(MainArgs) -> io:format("main:~p\n", [MainArgs]), ErlArgs = init:get_arguments(), io:format("ERL_FLAGS=~p\n", [os:getenv("ERL_FLAGS")]), io:format("unknown:~p\n",[[E || E <- ErlArgs, element(1, E) =:= unknown]]).
diff --git a/lib/stdlib/test/gen_event_SUITE.erl b/lib/stdlib/test/gen_event_SUITE.erl
index 4415c2d09d..9a7400c84e 100644
--- a/lib/stdlib/test/gen_event_SUITE.erl
+++ b/lib/stdlib/test/gen_event_SUITE.erl
@@ -21,22 +21,24 @@
-include_lib("common_test/include/ct.hrl").
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
-export([start/1, add_handler/1, add_sup_handler/1,
delete_handler/1, swap_handler/1, swap_sup_handler/1,
notify/1, sync_notify/1, call/1, info/1, hibernate/1,
call_format_status/1, call_format_status_anon/1,
- error_format_status/1, get_state/1, replace_state/1]).
+ error_format_status/1, get_state/1, replace_state/1,
+ start_opt/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
-all() ->
+all() ->
[start, {group, test_all}, hibernate,
call_format_status, call_format_status_anon, error_format_status,
- get_state, replace_state].
+ get_state, replace_state,
+ start_opt].
-groups() ->
+groups() ->
[{test_all, [],
[add_handler, add_sup_handler, delete_handler,
swap_handler, swap_sup_handler, notify, sync_notify,
@@ -59,6 +61,9 @@ end_per_group(_GroupName, Config) ->
%% Start an event manager.
%% --------------------------------------
+-define(LMGR, {local, my_dummy_name}).
+-define(GMGR, {global, my_dummy_name}).
+
start(Config) when is_list(Config) ->
OldFl = process_flag(trap_exit, true),
@@ -72,40 +77,36 @@ start(Config) when is_list(Config) ->
[] = gen_event:which_handlers(Pid1),
ok = gen_event:stop(Pid1),
- {ok, Pid2} = gen_event:start({local, my_dummy_name}),
+ {ok, Pid2} = gen_event:start(?LMGR),
[] = gen_event:which_handlers(my_dummy_name),
[] = gen_event:which_handlers(Pid2),
ok = gen_event:stop(my_dummy_name),
- {ok, Pid3} = gen_event:start_link({local, my_dummy_name}),
+ {ok, Pid3} = gen_event:start_link(?LMGR),
[] = gen_event:which_handlers(my_dummy_name),
[] = gen_event:which_handlers(Pid3),
ok = gen_event:stop(my_dummy_name),
- {ok, Pid4} = gen_event:start_link({global, my_dummy_name}),
- [] = gen_event:which_handlers({global, my_dummy_name}),
+ {ok, Pid4} = gen_event:start_link(?GMGR),
+ [] = gen_event:which_handlers(?GMGR),
[] = gen_event:which_handlers(Pid4),
- ok = gen_event:stop({global, my_dummy_name}),
+ ok = gen_event:stop(?GMGR),
{ok, Pid5} = gen_event:start_link({via, dummy_via, my_dummy_name}),
[] = gen_event:which_handlers({via, dummy_via, my_dummy_name}),
[] = gen_event:which_handlers(Pid5),
ok = gen_event:stop({via, dummy_via, my_dummy_name}),
- {ok, _} = gen_event:start_link({local, my_dummy_name}),
- {error, {already_started, _}} =
- gen_event:start_link({local, my_dummy_name}),
- {error, {already_started, _}} =
- gen_event:start({local, my_dummy_name}),
+ {ok, _} = gen_event:start_link(?LMGR),
+ {error, {already_started, _}} = gen_event:start_link(?LMGR),
+ {error, {already_started, _}} = gen_event:start(?LMGR),
ok = gen_event:stop(my_dummy_name),
- {ok, Pid6} = gen_event:start_link({global, my_dummy_name}),
- {error, {already_started, _}} =
- gen_event:start_link({global, my_dummy_name}),
- {error, {already_started, _}} =
- gen_event:start({global, my_dummy_name}),
+ {ok, Pid6} = gen_event:start_link(?GMGR),
+ {error, {already_started, _}} = gen_event:start_link(?GMGR),
+ {error, {already_started, _}} = gen_event:start(?GMGR),
- ok = gen_event:stop({global, my_dummy_name}, shutdown, 10000),
+ ok = gen_event:stop(?GMGR, shutdown, 10000),
receive
{'EXIT', Pid6, shutdown} -> ok
after 10000 ->
@@ -113,10 +114,8 @@ start(Config) when is_list(Config) ->
end,
{ok, Pid7} = gen_event:start_link({via, dummy_via, my_dummy_name}),
- {error, {already_started, _}} =
- gen_event:start_link({via, dummy_via, my_dummy_name}),
- {error, {already_started, _}} =
- gen_event:start({via, dummy_via, my_dummy_name}),
+ {error, {already_started, _}} = gen_event:start_link({via, dummy_via, my_dummy_name}),
+ {error, {already_started, _}} = gen_event:start({via, dummy_via, my_dummy_name}),
exit(Pid7, shutdown),
receive
@@ -128,6 +127,83 @@ start(Config) when is_list(Config) ->
process_flag(trap_exit, OldFl),
ok.
+start_opt(Config) when is_list(Config) ->
+ OldFl = process_flag(trap_exit, true),
+
+ dummy_via:reset(),
+
+ {ok, Pid0} = gen_event:start([]), %anonymous
+ [] = gen_event:which_handlers(Pid0),
+ ok = gen_event:stop(Pid0),
+
+ {ok, Pid1} = gen_event:start_link([]), %anonymous
+ [] = gen_event:which_handlers(Pid1),
+ ok = gen_event:stop(Pid1),
+
+ {ok, Pid2} = gen_event:start(?LMGR, []),
+ [] = gen_event:which_handlers(my_dummy_name),
+ [] = gen_event:which_handlers(Pid2),
+ ok = gen_event:stop(my_dummy_name),
+
+ {ok, Pid3} = gen_event:start_link(?LMGR, []),
+ [] = gen_event:which_handlers(my_dummy_name),
+ [] = gen_event:which_handlers(Pid3),
+ ok = gen_event:stop(my_dummy_name),
+
+ {ok, Pid4} = gen_event:start_link(?GMGR, []),
+ [] = gen_event:which_handlers(?GMGR),
+ [] = gen_event:which_handlers(Pid4),
+ ok = gen_event:stop(?GMGR),
+
+ {ok, Pid5} = gen_event:start_link({via, dummy_via, my_dummy_name}, []),
+ [] = gen_event:which_handlers({via, dummy_via, my_dummy_name}),
+ [] = gen_event:which_handlers(Pid5),
+ ok = gen_event:stop({via, dummy_via, my_dummy_name}),
+
+ {ok, _} = gen_event:start_link(?LMGR, []),
+ {error, {already_started, _}} = gen_event:start_link(?LMGR, []),
+ {error, {already_started, _}} = gen_event:start(?LMGR, []),
+ ok = gen_event:stop(my_dummy_name),
+
+ {ok, Pid7} = gen_event:start_link(?GMGR),
+ {error, {already_started, _}} = gen_event:start_link(?GMGR, []),
+ {error, {already_started, _}} = gen_event:start(?GMGR, []),
+
+ ok = gen_event:stop(?GMGR, shutdown, 10000),
+ receive
+ {'EXIT', Pid7, shutdown} -> ok
+ after 10000 ->
+ ct:fail(exit_gen_event)
+ end,
+
+ {ok, Pid8} = gen_event:start_link({via, dummy_via, my_dummy_name}),
+ {error, {already_started, _}} = gen_event:start_link({via, dummy_via, my_dummy_name}, []),
+ {error, {already_started, _}} = gen_event:start({via, dummy_via, my_dummy_name}, []),
+
+ exit(Pid8, shutdown),
+ receive
+ {'EXIT', Pid8, shutdown} -> ok
+ after 10000 ->
+ ct:fail(exit_gen_event)
+ end,
+
+ %% test spawn_opt
+ MinHeapSz = 10000,
+ {ok, Pid9} = gen_event:start_link(?LMGR, [{spawn_opt, [{min_heap_size, MinHeapSz}]}]),
+ {error, {already_started, _}} = gen_event:start_link(?LMGR, []),
+ {error, {already_started, _}} = gen_event:start(?LMGR, []),
+ {heap_size, HeapSz} = erlang:process_info(Pid9, heap_size),
+ true = HeapSz > MinHeapSz,
+ ok = gen_event:stop(my_dummy_name),
+
+ %% test debug opt
+ {ok, _} = gen_event:start_link(?LMGR, [{debug,[debug]}]),
+ {error, {already_started, _}} = gen_event:start_link(?LMGR, []),
+ {error, {already_started, _}} = gen_event:start(?LMGR, []),
+ ok = gen_event:stop(my_dummy_name),
+
+ process_flag(trap_exit, OldFl),
+ ok.
hibernate(Config) when is_list(Config) ->
{ok,Pid} = gen_event:start({local, my_dummy_handler}),
diff --git a/lib/stdlib/test/rand_SUITE.erl b/lib/stdlib/test/rand_SUITE.erl
index 8e7ac223a7..fe5eaccda5 100644
--- a/lib/stdlib/test/rand_SUITE.erl
+++ b/lib/stdlib/test/rand_SUITE.erl
@@ -283,13 +283,13 @@ gen(_, _, Acc) -> lists:reverse(Acc).
%% Check that the algorithms generate sound values.
basic_stats_uniform_1(Config) when is_list(Config) ->
- ct:timetrap({minutes,6}), %% valgrind needs a lot of time
+ ct:timetrap({minutes,15}), %% valgrind needs a lot of time
[basic_uniform_1(?LOOP, rand:seed_s(Alg), 0.0, array:new([{default, 0}]))
|| Alg <- algs()],
ok.
basic_stats_uniform_2(Config) when is_list(Config) ->
- ct:timetrap({minutes,6}), %% valgrind needs a lot of time
+ ct:timetrap({minutes,15}), %% valgrind needs a lot of time
[basic_uniform_2(?LOOP, rand:seed_s(Alg), 0, array:new([{default, 0}]))
|| Alg <- algs()],
ok.
@@ -396,7 +396,7 @@ crypto_uniform_n(N, State0) ->
%% Not a test but measures the time characteristics of the different algorithms
measure(Suite) when is_atom(Suite) -> [];
measure(_Config) ->
- ct:timetrap({minutes,6}), %% valgrind needs a lot of time
+ ct:timetrap({minutes,15}), %% valgrind needs a lot of time
Algos = [crypto64|algs()],
io:format("RNG uniform integer performance~n",[]),
_ = measure_1(random, fun(State) -> {int, random:uniform_s(10000, State)} end),
diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl
index 56d81eb1f2..15ccdea284 100644
--- a/lib/stdlib/test/shell_SUITE.erl
+++ b/lib/stdlib/test/shell_SUITE.erl
@@ -2326,7 +2326,7 @@ otp_6554(Config) when is_list(Config) ->
"[unproper | list]).">>),
%% Cheating:
"exception error: no function clause matching "
- "erl_eval:do_apply(4)" ++ _ =
+ "shell:apply_fun(4)" ++ _ =
comm_err(<<"erlang:error(function_clause, [4]).">>),
"exception error: no function clause matching "
"lists:reverse(" ++ _ =
diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl
index 6aee749741..3bff546243 100644
--- a/lib/typer/src/typer.erl
+++ b/lib/typer/src/typer.erl
@@ -136,8 +136,9 @@ extract(#analysis{macros = Macros,
MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
CodeServer2 = dialyzer_codeserver:set_temp_records(MergedRecords, CodeServer1),
CodeServer3 = dialyzer_codeserver:finalize_exported_types(NewExpTypes, CodeServer2),
- CodeServer4 = dialyzer_utils:process_record_remote_types(CodeServer3),
- dialyzer_contracts:process_contract_remote_types(CodeServer4)
+ {CodeServer4, RecordDict} =
+ dialyzer_utils:process_record_remote_types(CodeServer3),
+ dialyzer_contracts:process_contract_remote_types(CodeServer4, RecordDict)
catch
throw:{error, ErrorMsg} ->
compile_error(ErrorMsg)
@@ -149,7 +150,7 @@ extract(#analysis{macros = Macros,
fun(Module, TmpPlt) ->
{ok, ModuleContracts} = dict:find(Module, Contracts),
SpecList = [{MFA, Contract}
- || {MFA, {_FileLine, Contract}} <- dict:to_list(ModuleContracts)],
+ || {MFA, {_FileLine, Contract}} <- maps:to_list(ModuleContracts)],
dialyzer_plt:insert_contract_list(TmpPlt, SpecList)
end,
NewTrustPLT = lists:foldl(FoldFun, TrustPLT, Modules),
@@ -165,8 +166,10 @@ get_type_info(#analysis{callgraph = CallGraph,
StrippedCallGraph = remove_external(CallGraph, TrustPLT),
%% io:format("--- Analyzing callgraph... "),
try
- NewPlt = dialyzer_succ_typings:analyze_callgraph(StrippedCallGraph,
- TrustPLT, CodeServer),
+ NewMiniPlt = dialyzer_succ_typings:analyze_callgraph(StrippedCallGraph,
+ TrustPLT,
+ CodeServer),
+ NewPlt = dialyzer_plt:restore_full_plt(NewMiniPlt),
Analysis#analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt}
catch
error:What ->
@@ -217,7 +220,7 @@ get_external(Exts, Plt) ->
-type fa() :: {atom(), arity()}.
-type func_info() :: {line(), atom(), arity()}.
--record(info, {records = map__new() :: map_dict(),
+-record(info, {records = maps:new() :: erl_types:type_table(),
functions = [] :: [func_info()],
types = map__new() :: map_dict(),
edoc = false :: boolean()}).
@@ -260,7 +263,7 @@ write_inc_files(Inc) ->
Functions = [Key || {Key, _} <- Val],
Val1 = [{{F,A},Type} || {{_Line,F,A},Type} <- Val],
Info = #info{types = map__from_list(Val1),
- records = map__new(),
+ records = maps:new(),
%% Note we need to sort functions here!
functions = lists:keysort(1, Functions)},
%% io:format("Types ~p\n", [Info#info.types]),
@@ -842,8 +845,9 @@ collect_info(Analysis) ->
TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
- TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
- dialyzer_contracts:process_contract_remote_types(TmpCServer3)
+ {TmpCServer3, RecordDict} =
+ dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3, RecordDict)
catch
throw:{error, ErrorMsg} ->
fatal_error(ErrorMsg)
diff --git a/lib/xmerl/src/xmerl_scan.erl b/lib/xmerl/src/xmerl_scan.erl
index 5e0459ec21..9f6b27113e 100644
--- a/lib/xmerl/src/xmerl_scan.erl
+++ b/lib/xmerl/src/xmerl_scan.erl
@@ -2225,16 +2225,18 @@ processed_whole_element(S=#xmerl_scanner{hook_fun = _Hook,
AllAttrs =
case S#xmerl_scanner.default_attrs of
true ->
- [ #xmlAttribute{name = AttName,
- parents = [{Name, Pos} | Parents],
- language = Lang,
- nsinfo = NSI,
- namespace = Namespace,
- value = AttValue,
- normalized = true} ||
- {AttName, AttValue} <- get_default_attrs(S, Name),
- AttValue =/= no_value,
- not lists:keymember(AttName, #xmlAttribute.name, Attrs) ];
+ DefaultAttrs =
+ [ #xmlAttribute{name = AttName,
+ parents = [{Name, Pos} | Parents],
+ language = Lang,
+ nsinfo = NSI,
+ namespace = Namespace,
+ value = AttValue,
+ normalized = true} ||
+ {AttName, AttValue} <- get_default_attrs(S, Name),
+ AttValue =/= no_value,
+ not lists:keymember(AttName, #xmlAttribute.name, Attrs) ],
+ lists:append(Attrs, DefaultAttrs);
false ->
Attrs
end,
diff --git a/lib/xmerl/test/xmerl_SUITE.erl b/lib/xmerl/test/xmerl_SUITE.erl
index e97b8c6a4b..cf7c0b7548 100644
--- a/lib/xmerl/test/xmerl_SUITE.erl
+++ b/lib/xmerl/test/xmerl_SUITE.erl
@@ -54,7 +54,8 @@ groups() ->
cpd_expl_provided_DTD]},
{misc, [],
[latin1_alias, syntax_bug1, syntax_bug2, syntax_bug3,
- pe_ref1, copyright, testXSEIF, export_simple1, export]},
+ pe_ref1, copyright, testXSEIF, export_simple1, export,
+ default_attrs_bug]},
{eventp_tests, [], [sax_parse_and_export]},
{ticket_tests, [],
[ticket_5998, ticket_7211, ticket_7214, ticket_7430,
@@ -223,6 +224,21 @@ syntax_bug3(Config) ->
Err -> Err
end.
+default_attrs_bug(Config) ->
+ file:set_cwd(datadir(Config)),
+ Doc = "<!DOCTYPE doc [<!ATTLIST doc b CDATA \"default\">]>\n"
+ "<doc a=\"explicit\"/>",
+ {#xmlElement{attributes = [#xmlAttribute{name = a, value = "explicit"},
+ #xmlAttribute{name = b, value = "default"}]},
+ []
+ } = xmerl_scan:string(Doc, [{default_attrs, true}]),
+ Doc2 = "<!DOCTYPE doc [<!ATTLIST doc b CDATA \"default\">]>\n"
+ "<doc b=\"also explicit\" a=\"explicit\"/>",
+ {#xmlElement{attributes = [#xmlAttribute{name = b, value = "also explicit"},
+ #xmlAttribute{name = a, value = "explicit"}]},
+ []
+ } = xmerl_scan:string(Doc2, [{default_attrs, true}]).
+
pe_ref1(Config) ->
file:set_cwd(datadir(Config)),
{#xmlElement{},[]} = xmerl_scan:file(datadir_join(Config,[misc,"PE_ref1.xml"]),[{validation,true}]).