aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/common_test/doc/src/ct.xml10
-rw-r--r--lib/common_test/src/ct.erl17
-rw-r--r--lib/common_test/src/test_server_ctrl.erl2
-rw-r--r--lib/common_test/src/test_server_node.erl8
-rw-r--r--lib/common_test/test_server/ts_erl_config.erl10
-rw-r--r--lib/common_test/test_server/ts_run.erl2
-rw-r--r--lib/compiler/src/compile.erl2
-rw-r--r--lib/compiler/test/bs_match_SUITE.erl10
-rw-r--r--lib/crypto/c_src/crypto.c14
-rw-r--r--lib/debugger/src/dbg_icmd.erl2
-rw-r--r--lib/debugger/src/dbg_wx_win.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl44
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/results/compiler2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/compile.erl6
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl44
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/unused_funs5
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/unused_funs.erl21
-rw-r--r--lib/erl_docgen/priv/xsl/Makefile5
-rw-r--r--lib/erl_docgen/vsn.mk2
-rw-r--r--lib/hipe/main/hipe.erl6
-rw-r--r--lib/inets/src/http_server/mod_esi.erl44
-rw-r--r--lib/kernel/doc/src/Makefile1
-rw-r--r--lib/kernel/doc/src/config.xml4
-rw-r--r--lib/kernel/doc/src/erl_epmd.xml104
-rw-r--r--lib/kernel/doc/src/error_logger.xml18
-rw-r--r--lib/kernel/doc/src/kernel_app.xml155
-rw-r--r--lib/kernel/doc/src/logger.xml159
-rw-r--r--lib/kernel/doc/src/logger_chapter.xml127
-rw-r--r--lib/kernel/doc/src/logger_disk_log_h.xml8
-rw-r--r--lib/kernel/doc/src/logger_filters.xml4
-rw-r--r--lib/kernel/doc/src/logger_formatter.xml55
-rw-r--r--lib/kernel/doc/src/logger_std_h.xml11
-rw-r--r--lib/kernel/doc/src/ref_man.xml5
-rw-r--r--lib/kernel/doc/src/specs.xml1
-rw-r--r--lib/kernel/src/application_controller.erl12
-rw-r--r--lib/kernel/src/erl_epmd.erl64
-rw-r--r--lib/kernel/src/erl_signal_handler.erl11
-rw-r--r--lib/kernel/src/error_logger.erl43
-rw-r--r--lib/kernel/src/file.erl2
-rw-r--r--lib/kernel/src/inet_tcp_dist.erl148
-rw-r--r--lib/kernel/src/kernel.app.src5
-rw-r--r--lib/kernel/src/kernel.erl22
-rw-r--r--lib/kernel/src/kernel_config.erl7
-rw-r--r--lib/kernel/src/logger.erl360
-rw-r--r--lib/kernel/src/logger_config.erl2
-rw-r--r--lib/kernel/src/logger_disk_log_h.erl64
-rw-r--r--lib/kernel/src/logger_filters.erl6
-rw-r--r--lib/kernel/src/logger_formatter.erl175
-rw-r--r--lib/kernel/src/logger_h_common.erl16
-rw-r--r--lib/kernel/src/logger_h_common.hrl2
-rw-r--r--lib/kernel/src/logger_internal.hrl2
-rw-r--r--lib/kernel/src/logger_server.erl338
-rw-r--r--lib/kernel/src/logger_simple.erl69
-rw-r--r--lib/kernel/src/logger_std_h.erl22
-rw-r--r--lib/kernel/test/Makefile1
-rw-r--r--lib/kernel/test/application_SUITE.erl12
-rw-r--r--lib/kernel/test/erl_distribution_SUITE.erl2
-rw-r--r--lib/kernel/test/error_logger_warn_SUITE.erl14
-rw-r--r--lib/kernel/test/heart_SUITE.erl8
-rw-r--r--lib/kernel/test/kernel_config_SUITE.erl2
-rw-r--r--lib/kernel/test/logger_SUITE.erl219
-rw-r--r--lib/kernel/test/logger_disk_log_h_SUITE.erl219
-rw-r--r--lib/kernel/test/logger_env_var_SUITE.erl815
-rw-r--r--lib/kernel/test/logger_filters_SUITE.erl25
-rw-r--r--lib/kernel/test/logger_formatter_SUITE.erl142
-rw-r--r--lib/kernel/test/logger_simple_SUITE.erl203
-rw-r--r--lib/kernel/test/logger_std_h_SUITE.erl327
-rw-r--r--lib/kernel/test/logger_test_lib.erl82
-rw-r--r--lib/kernel/test/os_SUITE.erl6
-rw-r--r--lib/observer/src/observer_lib.erl2
-rw-r--r--lib/parsetools/src/yecc.erl8
-rw-r--r--lib/public_key/src/pubkey_pem.erl2
-rw-r--r--lib/public_key/src/public_key.erl37
-rw-r--r--lib/public_key/test/pbe_SUITE.erl10
-rw-r--r--lib/public_key/test/public_key_SUITE.erl39
-rw-r--r--lib/public_key/test/public_key_SUITE_data/dsa_key_pkcs8.pem9
-rw-r--r--lib/public_key/test/public_key_SUITE_data/ec_key_pkcs8.pem5
-rw-r--r--lib/public_key/test/public_key_SUITE_data/rsa_key_pkcs8.pem10
-rw-r--r--lib/sasl/doc/src/sasl_app.xml24
-rw-r--r--lib/sasl/src/sasl.erl4
-rw-r--r--lib/sasl/test/sasl_report_SUITE.erl6
-rw-r--r--lib/ssh/doc/src/ssh.xml6
-rw-r--r--lib/ssh/src/ssh.hrl7
-rw-r--r--lib/ssh/src/ssh_client_channel.erl4
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl425
-rw-r--r--lib/ssh/src/ssh_options.erl21
-rw-r--r--lib/ssh/src/ssh_sftp.erl25
-rw-r--r--lib/ssh/test/ssh_basic_SUITE.erl135
-rw-r--r--lib/ssh/test/ssh_test_lib.erl2
-rw-r--r--lib/ssl/src/Makefile4
-rw-r--r--lib/ssl/src/dtls_connection.erl13
-rw-r--r--lib/ssl/src/dtls_listener_sup.erl (renamed from lib/ssl/src/dtls_udp_sup.erl)6
-rw-r--r--lib/ssl/src/dtls_packet_demux.erl (renamed from lib/ssl/src/dtls_udp_listener.erl)56
-rw-r--r--lib/ssl/src/dtls_socket.erl34
-rw-r--r--lib/ssl/src/inet_tls_dist.erl100
-rw-r--r--lib/ssl/src/ssl.app.src4
-rw-r--r--lib/ssl/src/ssl.erl69
-rw-r--r--lib/ssl/src/ssl_cipher.erl84
-rw-r--r--lib/ssl/src/ssl_connection_sup.erl10
-rw-r--r--lib/ssl/src/ssl_handshake.erl5
-rw-r--r--lib/ssl/src/ssl_internal.hrl2
-rw-r--r--lib/ssl/test/ssl_ECC.erl44
-rw-r--r--lib/ssl/test/ssl_ECC_openssl_SUITE.erl2
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl27
-rw-r--r--lib/ssl/test/ssl_test_lib.erl12
-rw-r--r--lib/ssl/test/ssl_to_openssl_SUITE.erl28
-rw-r--r--lib/stdlib/doc/src/Makefile1
-rw-r--r--lib/stdlib/doc/src/io_lib.xml34
-rw-r--r--lib/stdlib/doc/src/lib.xml103
-rw-r--r--lib/stdlib/doc/src/ref_man.xml1
-rw-r--r--lib/stdlib/doc/src/specs.xml1
-rw-r--r--lib/stdlib/src/Makefile3
-rw-r--r--lib/stdlib/src/epp.erl127
-rw-r--r--lib/stdlib/src/erl_error.erl (renamed from lib/stdlib/src/lib.erl)327
-rw-r--r--lib/stdlib/src/erl_eval.erl221
-rw-r--r--lib/stdlib/src/escript.erl2
-rw-r--r--lib/stdlib/src/ets.erl26
-rw-r--r--lib/stdlib/src/gen_fsm.erl38
-rw-r--r--lib/stdlib/src/gen_server.erl4
-rw-r--r--lib/stdlib/src/gen_statem.erl2
-rw-r--r--lib/stdlib/src/otp_internal.erl9
-rw-r--r--lib/stdlib/src/proc_lib.erl12
-rw-r--r--lib/stdlib/src/qlc.erl8
-rw-r--r--lib/stdlib/src/shell.erl6
-rw-r--r--lib/stdlib/src/slave.erl14
-rw-r--r--lib/stdlib/src/stdlib.app.src4
-rw-r--r--lib/stdlib/src/string.erl180
-rw-r--r--lib/stdlib/test/epp_SUITE.erl171
-rw-r--r--lib/stdlib/test/ets_SUITE.erl58
-rw-r--r--lib/stdlib/test/io_SUITE.erl2
-rw-r--r--lib/stdlib/test/qlc_SUITE.erl6
-rw-r--r--lib/stdlib/test/shell_SUITE.erl6
-rw-r--r--lib/stdlib/test/string_SUITE.erl12
-rw-r--r--lib/syntax_tools/src/epp_dodger.erl36
-rw-r--r--lib/syntax_tools/src/erl_prettypr.erl7
-rw-r--r--lib/syntax_tools/src/erl_syntax_lib.erl2
-rw-r--r--lib/tools/doc/src/fprof.xml10
-rw-r--r--lib/tools/src/xref.erl6
-rw-r--r--lib/tools/test/eprof_SUITE_data/eed.erl6
139 files changed, 4414 insertions, 2669 deletions
diff --git a/lib/common_test/doc/src/ct.xml b/lib/common_test/doc/src/ct.xml
index afd8741cd1..3d35ae4f54 100644
--- a/lib/common_test/doc/src/ct.xml
+++ b/lib/common_test/doc/src/ct.xml
@@ -572,6 +572,16 @@
</func>
<func>
+ <name>get_progname() -&gt; string()</name>
+ <fsummary>Returns the command used to start this Erlang instance.</fsummary>
+ <desc><marker id="get_progname-0"/>
+ <p>Returns the command used to start this Erlang instance.
+ If this information could not be found, the string
+ <c>"no_prog_name"</c> is returned.</p>
+ </desc>
+ </func>
+
+ <func>
<name>get_status() -&gt; TestStatus | {error, Reason} | no_tests_running</name>
<fsummary>Returns status of ongoing test.</fsummary>
<type>
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index fd7fa07b81..14a9ec07cf 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -87,6 +87,7 @@
decrypt_config_file/2, decrypt_config_file/3]).
-export([get_target_name/1]).
+-export([get_progname/0]).
-export([parse_table/1, listenv/1]).
-export([remaining_test_procs/0]).
@@ -975,7 +976,20 @@ make_priv_dir() ->
%%% belongs to.
get_target_name(Handle) ->
ct_util:get_target_name(Handle).
-
+
+%%%-----------------------------------------------------------------
+%%% @doc Return the command used to start (this) erlang
+
+-spec get_progname() -> string().
+
+get_progname() ->
+ case init:get_argument(progname) of
+ {ok, [[Prog]]} ->
+ Prog;
+ _Other ->
+ "no_prog_name"
+ end.
+
%%%-----------------------------------------------------------------
%%% @spec parse_table(Data) -> {Heading,Table}
%%% Data = [string()]
@@ -1006,7 +1020,6 @@ parse_table(Data) ->
listenv(Telnet) ->
ct_util:listenv(Telnet).
-
%%%-----------------------------------------------------------------
%%% @spec testcases(TestDir, Suite) -> Testcases | {error,Reason}
%%% TestDir = string()
diff --git a/lib/common_test/src/test_server_ctrl.erl b/lib/common_test/src/test_server_ctrl.erl
index 1ae6c8c7c7..67645cac08 100644
--- a/lib/common_test/src/test_server_ctrl.erl
+++ b/lib/common_test/src/test_server_ctrl.erl
@@ -4382,7 +4382,7 @@ do_format_exception(Reason={Error,Stack}) ->
PF = fun(Term, I) ->
io_lib:format("~." ++ integer_to_list(I) ++ "tp", [Term])
end,
- case catch lib:format_exception(1, error, Error, Stack, StackFun, PF, utf8) of
+ case catch erl_error:format_exception(1, error, Error, Stack, StackFun, PF, utf8) of
{'EXIT',_R} ->
{"~tp",Reason};
Formatted ->
diff --git a/lib/common_test/src/test_server_node.erl b/lib/common_test/src/test_server_node.erl
index b2d4f199c3..76588e6887 100644
--- a/lib/common_test/src/test_server_node.erl
+++ b/lib/common_test/src/test_server_node.erl
@@ -591,7 +591,7 @@ cast_to_list(X) -> lists:flatten(io_lib:format("~tw", [X])).
%%% this
%%%
pick_erl_program(default) ->
- cast_to_list(lib:progname());
+ ct:get_progname();
pick_erl_program(L) ->
P = random_element(L),
case P of
@@ -600,7 +600,7 @@ pick_erl_program(L) ->
{release, S} ->
find_release(S);
this ->
- cast_to_list(lib:progname())
+ ct:get_progname()
end.
%% This is an attempt to distinguish between spaces in the program
@@ -611,8 +611,8 @@ pick_erl_program(L) ->
%% ({prog,String}) or if the -program switch to beam is used and
%% includes arguments (typically done by cerl in OTP test environment
%% in order to ensure that slave/peer nodes are started with the same
-%% emulator and flags as the test node. The return from lib:progname()
-%% could then typically be '/<full_path_to>/cerl -gcov').
+%% emulator and flags as the test node. The return from ct:get_progname()
+%% could then typically be "/<full_path_to>/cerl -gcov").
quote_progname(Progname) ->
do_quote_progname(string:lexemes(Progname," ")).
diff --git a/lib/common_test/test_server/ts_erl_config.erl b/lib/common_test/test_server/ts_erl_config.erl
index c7fe4ccf83..e37fa844bb 100644
--- a/lib/common_test/test_server/ts_erl_config.erl
+++ b/lib/common_test/test_server/ts_erl_config.erl
@@ -358,7 +358,15 @@ link_library(_LibName,_Other) ->
%% Returns emulator specific variables.
emu_vars(Vars) ->
[{is_source_build, is_source_build()},
- {erl_name, atom_to_list(lib:progname())}|Vars].
+ {erl_name, get_progname()}|Vars].
+
+get_progname() ->
+ case init:get_argument(progname) of
+ {ok, [[Prog]]} ->
+ Prog;
+ _Other ->
+ "no_prog_name"
+ end.
is_source_build() ->
string:find(erlang:system_info(system_version), "source") =/= nomatch.
diff --git a/lib/common_test/test_server/ts_run.erl b/lib/common_test/test_server/ts_run.erl
index 3f594236bc..5dbbaca916 100644
--- a/lib/common_test/test_server/ts_run.erl
+++ b/lib/common_test/test_server/ts_run.erl
@@ -199,7 +199,7 @@ make_command(Vars, Spec, State) ->
TestPath = filename:nativename(TestDir),
Erl = case os:getenv("TS_RUN_VALGRIND") of
false ->
- atom_to_list(lib:progname());
+ ct:get_progname();
_ ->
case State#state.file of
Dir when is_list(Dir) ->
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index c6a0056a70..a37b2064b2 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -295,7 +295,7 @@ format_error_reason({Reason, Stack}) when is_list(Stack) ->
end,
FormatFun = fun (Term, _) -> io_lib:format("~tp", [Term]) end,
[io_lib:format("~tp", [Reason]),"\n\n",
- lib:format_stacktrace(1, Stack, StackFun, FormatFun)];
+ erl_error:format_stacktrace(1, Stack, StackFun, FormatFun)];
format_error_reason(Reason) ->
io_lib:format("~tp", [Reason]).
diff --git a/lib/compiler/test/bs_match_SUITE.erl b/lib/compiler/test/bs_match_SUITE.erl
index 235956a714..3b6ffa8d68 100644
--- a/lib/compiler/test/bs_match_SUITE.erl
+++ b/lib/compiler/test/bs_match_SUITE.erl
@@ -330,6 +330,11 @@ save_restore(Config) when is_list(Config) ->
{"-",<<"x">>} = nnn(C),
{"-",<<"x">>} = ooo(C),
+ a = multiple_matches(<<777:16>>, <<777:16>>),
+ b = multiple_matches(<<777:16>>, <<999:16>>),
+ c = multiple_matches(<<777:16>>, <<57:8>>),
+ d = multiple_matches(<<17:8>>, <<1111:16>>),
+
Bin = <<-1:64>>,
case bad_float_unpack_match(Bin) of
-1 -> ok;
@@ -357,6 +362,11 @@ nnn(<<Char, Tail/binary>>) -> {[Char],Tail}. %% Buggy Tail!
ooo(<<" - ", Tail/binary>>) -> Tail;
ooo(<<Char, Tail/binary>>) -> {[Char],Tail}.
+multiple_matches(<<Y:16>>, <<Y:16>>) -> a;
+multiple_matches(<<_:16>>, <<_:16>>) -> b;
+multiple_matches(<<_:16>>, <<_:8>>) -> c;
+multiple_matches(<<_:8>>, <<_:16>>) -> d.
+
bad_float_unpack_match(<<F:64/float>>) -> F;
bad_float_unpack_match(<<I:64/integer-signed>>) -> I.
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index df4e2245f4..6e113ef39e 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -544,6 +544,7 @@ static int zero_terminate(ErlNifBinary bin, char **buf);
#endif
static int library_refc = 0; /* number of users of this dynamic library */
+static int library_initialized = 0;
static ErlNifFunc nif_funcs[] = {
{"info_lib", 0, info_lib},
@@ -1005,14 +1006,14 @@ static int initialize(ErlNifEnv* env, ERL_NIF_TERM load_info)
PRINTF_ERR0("CRYPTO: Could not open resource type 'ENGINE_CTX'");
return __LINE__;
}
+#endif
- if (library_refc > 0) {
+ if (library_initialized) {
/* Repeated loading of this library (module upgrade).
* Atoms and callbacks are already set, we are done.
*/
return 0;
}
-#endif
atom_true = enif_make_atom(env,"true");
atom_false = enif_make_atom(env,"false");
@@ -1119,10 +1120,6 @@ static int initialize(ErlNifEnv* env, ERL_NIF_TERM load_info)
atom_password = enif_make_atom(env,"password");
#endif
- init_digest_types(env);
- init_cipher_types(env);
- init_algorithms_types(env);
-
#ifdef HAVE_DYNAMIC_CRYPTO_LIB
{
void* handle;
@@ -1168,6 +1165,11 @@ static int initialize(ErlNifEnv* env, ERL_NIF_TERM load_info)
}
#endif /* OPENSSL_THREADS */
+ init_digest_types(env);
+ init_cipher_types(env);
+ init_algorithms_types(env);
+
+ library_initialized = 1;
return 0;
}
diff --git a/lib/debugger/src/dbg_icmd.erl b/lib/debugger/src/dbg_icmd.erl
index 4cd3dce670..55cbada53b 100644
--- a/lib/debugger/src/dbg_icmd.erl
+++ b/lib/debugger/src/dbg_icmd.erl
@@ -467,7 +467,7 @@ mark_break(Cm, LineNo, Le) ->
parse_cmd(Cmd, LineNo) ->
{ok,Tokens,_} = erl_scan:string(Cmd, LineNo, [text]),
- {ok,Forms,Bs} = lib:extended_parse_exprs(Tokens),
+ {ok,Forms,Bs} = erl_eval:extended_parse_exprs(Tokens),
{Forms, Bs}.
%%====================================================================
diff --git a/lib/debugger/src/dbg_wx_win.erl b/lib/debugger/src/dbg_wx_win.erl
index f1298154ab..fea94156c1 100644
--- a/lib/debugger/src/dbg_wx_win.erl
+++ b/lib/debugger/src/dbg_wx_win.erl
@@ -275,7 +275,7 @@ entry(Parent, Title, Prompt, {Type, Value}) ->
verify(Type, Str) ->
case erl_scan:string(Str, 1, [text]) of
{ok, Tokens, _EndLine} when Type==term ->
- case lib:extended_parse_term(Tokens++[{dot, erl_anno:new(1)}]) of
+ case erl_eval:extended_parse_term(Tokens++[{dot, erl_anno:new(1)}]) of
{ok, Value} -> {edit, Value};
_Error ->
ignore
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index c5f93a3392..45b4abb253 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -102,6 +102,8 @@
| 'undefined', % race
fun_homes :: dict:dict(label(), mfa())
| 'undefined', % race
+ reachable_funs :: sets:set(label())
+ | 'undefined', % race
plt :: dialyzer_plt:plt()
| 'undefined', % race
opaques :: [type()]
@@ -269,9 +271,11 @@ traverse(Tree, Map, State) ->
case state__warning_mode(State) of
true -> {State, Map, Type};
false ->
- State2 = state__add_work(get_label(Tree), State),
+ FunLbl = get_label(Tree),
+ State2 = state__add_work(FunLbl, State),
State3 = state__update_fun_env(Tree, Map, State2),
- {State3, Map, Type}
+ State4 = state__add_reachable(FunLbl, State3),
+ {State4, Map, Type}
end;
'let' ->
handle_let(Tree, Map, State);
@@ -3039,25 +3043,35 @@ state__new(Callgraph, Codeserver, Tree, Plt, Module, Records) ->
{TreeMap, FunHomes} = build_tree_map(Tree, Callgraph),
Funs = dict:fetch_keys(TreeMap),
FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
- ExportedFuns =
- [Fun || Fun <- Funs--[top], dialyzer_callgraph:is_escaping(Fun, Callgraph)],
- Work = init_work(ExportedFuns),
+ ExportedFunctions =
+ [Fun ||
+ Fun <- Funs--[top],
+ dialyzer_callgraph:is_escaping(Fun, Callgraph),
+ dialyzer_callgraph:lookup_name(Fun, Callgraph) =/= error
+ ],
+ Work = init_work(ExportedFunctions),
Env = lists:foldl(fun(Fun, Env) -> dict:store(Fun, map__new(), Env) end,
dict:new(), Funs),
#state{callgraph = Callgraph, codeserver = Codeserver,
envs = Env, fun_tab = FunTab, fun_homes = FunHomes, opaques = Opaques,
plt = Plt, races = dialyzer_races:new(), records = Records,
warning_mode = false, warnings = [], work = Work, tree_map = TreeMap,
- module = Module}.
+ module = Module, reachable_funs = sets:new()}.
state__warning_mode(#state{warning_mode = WM}) ->
WM.
state__set_warning_mode(#state{tree_map = TreeMap, fun_tab = FunTab,
- races = Races} = State) ->
+ races = Races, callgraph = Callgraph,
+ reachable_funs = ReachableFuns} = State) ->
?debug("==========\nStarting warning pass\n==========\n", []),
Funs = dict:fetch_keys(TreeMap),
- State#state{work = init_work([top|Funs--[top]]),
+ Work =
+ [Fun ||
+ Fun <- Funs--[top],
+ dialyzer_callgraph:lookup_name(Fun, Callgraph) =/= error orelse
+ sets:is_element(Fun, ReachableFuns)],
+ State#state{work = init_work(Work),
fun_tab = FunTab, warning_mode = true,
races = dialyzer_races:put_race_analysis(true, Races)}.
@@ -3149,7 +3163,8 @@ state__get_race_warnings(#state{races = Races} = State) ->
State1#state{races = Races1}.
state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
- callgraph = Callgraph, plt = Plt} = State) ->
+ callgraph = Callgraph, plt = Plt,
+ reachable_funs = ReachableFuns} = State) ->
FoldFun =
fun({top, _}, AccState) -> AccState;
({FunLbl, Fun}, AccState) ->
@@ -3184,7 +3199,12 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
GenRet = dialyzer_contracts:get_contract_return(C),
not t_is_unit(GenRet)
end,
- case Warn of
+ %% Do not output warnings for unreachable funs.
+ case
+ Warn andalso
+ (dialyzer_callgraph:lookup_name(FunLbl, Callgraph) =/= error
+ orelse sets:is_element(FunLbl, ReachableFuns))
+ of
true ->
case classify_returns(Fun) of
no_match ->
@@ -3255,6 +3275,10 @@ state__get_args_and_status(Tree, #state{fun_tab = FunTab}) ->
{ok, {ArgTypes, _}} -> {ArgTypes, true}
end.
+state__add_reachable(FunLbl, #state{reachable_funs = ReachableFuns}=State) ->
+ NewReachableFuns = sets:add_element(FunLbl, ReachableFuns),
+ State#state{reachable_funs = NewReachableFuns}.
+
build_tree_map(Tree, Callgraph) ->
Fun =
fun(T, {Dict, Homes, FunLbls} = Acc) ->
diff --git a/lib/dialyzer/test/options1_SUITE_data/results/compiler b/lib/dialyzer/test/options1_SUITE_data/results/compiler
index cbb5115c91..e1dc038800 100644
--- a/lib/dialyzer/test/options1_SUITE_data/results/compiler
+++ b/lib/dialyzer/test/options1_SUITE_data/results/compiler
@@ -28,7 +28,7 @@ cerl_inline.erl:2750: The pattern <{[], L, D}, Vs> can never match the type <[1.
cerl_inline.erl:2752: The pattern <{[], _L, D}, Vs> can never match the type <[1..255,...],[any()]>
cerl_inline.erl:2754: The pattern <{F, L, D}, Vs> can never match the type <[1..255,...],[any()]>
cerl_inline.erl:2756: The pattern <{F, _L, D}, Vs> can never match the type <[1..255,...],[any()]>
-compile.erl:788: The pattern {'error', Es} can never match the type {'ok',<<_:64,_:_*8>>}
+compile.erl:792: The pattern {'error', Es} can never match the type {'ok',<<_:64,_:_*8>>}
core_lint.erl:473: The pattern <{'c_atom', _, 'all'}, 'binary', _Def, St> can never match the type <_,#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}},tl::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}}},[any()],_>
core_lint.erl:505: The pattern <_Req, 'unknown', St> can never match the type <non_neg_integer(),non_neg_integer(),_>
sys_pre_expand.erl:625: Call to missing or unexported function erlang:hash/2
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/compile.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/compile.erl
index 7e5ccde2fd..6838cf6734 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/compile.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/compile.erl
@@ -228,11 +228,15 @@ os_process_size() ->
case os:type() of
{unix, sunos} ->
Size = os:cmd("ps -o vsz -p " ++ os:getpid() ++ " | tail -1"),
- list_to_integer(lib:nonl(Size));
+ list_to_integer(nonl(Size));
_ ->
0
end.
+nonl([$\n]) -> [];
+nonl([]) -> [];
+nonl([H|T]) -> [H|nonl(T)].
+
run_tc({Name,Fun}, St) ->
Before0 = statistics(runtime),
Val = (catch Fun(St)),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
index a48f73274b..ce144e061f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
@@ -285,7 +285,7 @@ eval(Info,"GET",CGIBody,Modules) ->
"~n Modules: ~p",[Modules]),
case auth(CGIBody,Modules) of
true ->
- case lib:eval_str(string:concat(CGIBody,". ")) of
+ case eval_str(string:concat(CGIBody,". ")) of
{error,Reason} ->
?vlog("eval -> error:"
"~n Reason: ~p",[Reason]),
@@ -318,6 +318,48 @@ auth(CGIBody,Modules) ->
false
end.
+%% eval_str(InStr) -> {ok, OutStr} | {error, ErrStr'}
+%% InStr must represent a body
+%% Note: If InStr is a binary it has to be a Latin-1 string.
+%% If you have a UTF-8 encoded binary you have to call
+%% unicode:characters_to_list/1 before the call to eval_str().
+
+-define(result(F,D), lists:flatten(io_lib:format(F, D))).
+
+-spec eval_str(string() | unicode:latin1_binary()) ->
+ {'ok', string()} | {'error', string()}.
+
+eval_str(Str) when is_list(Str) ->
+ case erl_scan:tokens([], Str, 0) of
+ {more, _} ->
+ {error, "Incomplete form (missing .<cr>)??"};
+ {done, {ok, Toks, _}, Rest} ->
+ case all_white(Rest) of
+ true ->
+ case erl_parse:parse_exprs(Toks) of
+ {ok, Exprs} ->
+ case catch erl_eval:exprs(Exprs, erl_eval:new_bindings()) of
+ {value, Val, _} ->
+ {ok, Val};
+ Other ->
+ {error, ?result("*** eval: ~p", [Other])}
+ end;
+ {error, {_Line, Mod, Args}} ->
+ Msg = ?result("*** ~ts",[Mod:format_error(Args)]),
+ {error, Msg}
+ end;
+ false ->
+ {error, ?result("Non-white space found after "
+ "end-of-form :~ts", [Rest])}
+ end
+ end.
+
+all_white([$\s|T]) -> all_white(T);
+all_white([$\n|T]) -> all_white(T);
+all_white([$\t|T]) -> all_white(T);
+all_white([]) -> true;
+all_white(_) -> false.
+
%%----------------------------------------------------------------------
%%Creates the environment list that will be the first arg to the
%%Functions that is called through the ErlScript Schema
diff --git a/lib/dialyzer/test/small_SUITE_data/results/unused_funs b/lib/dialyzer/test/small_SUITE_data/results/unused_funs
new file mode 100644
index 0000000000..c468457ead
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/unused_funs
@@ -0,0 +1,5 @@
+
+unused_funs.erl:10: The pattern 'error' can never match the type 'other_error'
+unused_funs.erl:15: Function not_used/0 will never be called
+unused_funs.erl:19: Function foo/1 will never be called
+unused_funs.erl:7: Function test/0 has no local return
diff --git a/lib/dialyzer/test/small_SUITE_data/src/unused_funs.erl b/lib/dialyzer/test/small_SUITE_data/src/unused_funs.erl
new file mode 100644
index 0000000000..c24cf3ea81
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/unused_funs.erl
@@ -0,0 +1,21 @@
+%% See also ERL-593.
+
+-module(unused_funs).
+
+-export([test/0]).
+
+test() -> % "has no local return"
+ Var = outer_scope,
+ case other_error of
+ error -> % "can never match"
+ %% No warnings "no local return" and "_ = 1 can never match 0" (!)
+ foo(fun() -> {Var, 1 = 0} end)
+ end.
+
+not_used() -> % "will never be called"
+ %% No warnings "no local return" and "1 can never match 0".
+ foo(fun() -> 1 = 0 end).
+
+foo(Fun) -> % "will never be called"
+ 1 = 0, % No pattern match warning (foo/1 is not traversed at all).
+ Fun().
diff --git a/lib/erl_docgen/priv/xsl/Makefile b/lib/erl_docgen/priv/xsl/Makefile
index d0dd227169..d381bd4cf7 100644
--- a/lib/erl_docgen/priv/xsl/Makefile
+++ b/lib/erl_docgen/priv/xsl/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2009-2016. All Rights Reserved.
+# Copyright Ericsson AB 2009-2018. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -44,7 +44,8 @@ XSL_FILES = \
db_html.xsl \
db_html_params.xsl \
db_man.xsl \
- db_eix.xsl
+ db_eix.xsl \
+ db_funcs.xsl
# ----------------------------------------------------
diff --git a/lib/erl_docgen/vsn.mk b/lib/erl_docgen/vsn.mk
index 95b2329ac5..a556b73103 100644
--- a/lib/erl_docgen/vsn.mk
+++ b/lib/erl_docgen/vsn.mk
@@ -1 +1 @@
-ERL_DOCGEN_VSN = 0.7.2
+ERL_DOCGEN_VSN = 0.7.3
diff --git a/lib/hipe/main/hipe.erl b/lib/hipe/main/hipe.erl
index 97814fe217..ac2e6c1e3b 100644
--- a/lib/hipe/main/hipe.erl
+++ b/lib/hipe/main/hipe.erl
@@ -852,8 +852,8 @@ finalize_fun_sequential({MFA, Icode}, Opts, Servers) ->
print_crash_message(What, Error, StackTrace) ->
StackFun = fun(_,_,_) -> false end,
FormatFun = fun (Term, _) -> io_lib:format("~p", [Term]) end,
- StackTrace = lib:format_stacktrace(1, StackTrace,
- StackFun, FormatFun),
+ StackTraceS = erl_error:format_stacktrace(1, StackTrace,
+ StackFun, FormatFun),
WhatS = case What of
{M,F,A} -> io_lib:format("~w:~w/~w", [M,F,A]);
Mod -> io_lib:format("~w", [Mod])
@@ -862,7 +862,7 @@ print_crash_message(What, Error, StackTrace) ->
"while compiling ~s~n"
"crash reason: ~p~n"
"~s~n",
- [WhatS, Error, StackTrace]).
+ [WhatS, Error, StackTraceS]).
pp_server_start(Opts) ->
set_architecture(Opts),
diff --git a/lib/inets/src/http_server/mod_esi.erl b/lib/inets/src/http_server/mod_esi.erl
index 3206d957d9..b49b3a7093 100644
--- a/lib/inets/src/http_server/mod_esi.erl
+++ b/lib/inets/src/http_server/mod_esi.erl
@@ -561,7 +561,7 @@ eval(#mod{method = Method} = ModData, ESIBody, Modules)
end.
generate_webpage(ESIBody) ->
- (catch lib:eval_str(string:concat(ESIBody,". "))).
+ (catch eval_str(string:concat(ESIBody,". "))).
is_authorized(_ESIBody, [all]) ->
true;
@@ -573,3 +573,45 @@ is_authorized(ESIBody, Modules) ->
nomatch ->
false
end.
+
+%% eval_str(InStr) -> {ok, OutStr} | {error, ErrStr'}
+%% InStr must represent a body
+%% Note: If InStr is a binary it has to be a Latin-1 string.
+%% If you have a UTF-8 encoded binary you have to call
+%% unicode:characters_to_list/1 before the call to eval_str().
+
+-define(result(F,D), lists:flatten(io_lib:format(F, D))).
+
+-spec eval_str(string()) ->
+ {'ok', string()} | {'error', string()}.
+
+eval_str(Str) when is_list(Str) ->
+ case erl_scan:tokens([], Str, 0) of
+ {more, _} ->
+ {error, "Incomplete form (missing .<cr>)??"};
+ {done, {ok, Toks, _}, Rest} ->
+ case all_white(Rest) of
+ true ->
+ case erl_parse:parse_exprs(Toks) of
+ {ok, Exprs} ->
+ case catch erl_eval:exprs(Exprs, erl_eval:new_bindings()) of
+ {value, Val, _} ->
+ {ok, Val};
+ Other ->
+ {error, ?result("*** eval: ~p", [Other])}
+ end;
+ {error, {_Line, Mod, Args}} ->
+ Msg = ?result("*** ~ts",[Mod:format_error(Args)]),
+ {error, Msg}
+ end;
+ false ->
+ {error, ?result("Non-white space found after "
+ "end-of-form :~ts", [Rest])}
+ end
+ end.
+
+all_white([$\s|T]) -> all_white(T);
+all_white([$\n|T]) -> all_white(T);
+all_white([$\t|T]) -> all_white(T);
+all_white([]) -> true;
+all_white(_) -> false.
diff --git a/lib/kernel/doc/src/Makefile b/lib/kernel/doc/src/Makefile
index 82869d7b15..29dc73a523 100644
--- a/lib/kernel/doc/src/Makefile
+++ b/lib/kernel/doc/src/Makefile
@@ -42,6 +42,7 @@ XML_REF3_FILES = application.xml \
disk_log.xml \
erl_boot_server.xml \
erl_ddll.xml \
+ erl_epmd.xml \
erl_prim_loader_stub.xml \
erlang_stub.xml \
error_handler.xml \
diff --git a/lib/kernel/doc/src/config.xml b/lib/kernel/doc/src/config.xml
index fdb2d29f63..8850c1736b 100644
--- a/lib/kernel/doc/src/config.xml
+++ b/lib/kernel/doc/src/config.xml
@@ -37,10 +37,10 @@
data in the system configuration file <c>Name.config</c>.</p>
<p>Configuration parameter values in the configuration file
override the values in the application resource files (see
- <seealso marker="app"><c>app(4)</c></seealso>.
+ <seealso marker="app"><c>app(4)</c></seealso>).
The values in the configuration file can be
overridden by command-line flags (see
- <seealso marker="erts:erl"><c>erts:erl(1)</c></seealso>.</p>
+ <seealso marker="erts:erl"><c>erts:erl(1)</c></seealso>).</p>
<p>The value of a configuration parameter is retrieved by calling
<c>application:get_env/1,2</c>.</p>
</description>
diff --git a/lib/kernel/doc/src/erl_epmd.xml b/lib/kernel/doc/src/erl_epmd.xml
new file mode 100644
index 0000000000..8b076cd2d7
--- /dev/null
+++ b/lib/kernel/doc/src/erl_epmd.xml
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2018</year><year>2018</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ </legalnotice>
+
+ <title>erl_epmd</title>
+ <prepared>Timmo Verlaan</prepared>
+ <docno>1</docno>
+ <date>2018-02-19</date>
+ <rev>A</rev>
+ </header>
+ <module>erl_epmd</module>
+ <modulesummary>
+ Erlang interface towards epmd
+ </modulesummary>
+ <description>
+ <p>This module communicates with the EPMD daemon, see <seealso
+ marker="erts:epmd">epmd</seealso>. To implement your own epmd module please
+ see <seealso marker="erts:alt_disco">ERTS User's Guide: How to Implement an
+ Alternative Service Discovery for Erlang Distribution</seealso></p>
+ </description>
+
+ <funcs>
+ <func>
+ <name name="start_link" arity="0"/>
+ <fsummary>Callback for erl_distribution supervisor.</fsummary>
+ <desc>
+ <p>This function is invoked as this module is added as a child of the
+ <c>erl_distribution</c> supervisor.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="register_node" arity="2"/>
+ <name name="register_node" arity="3"/>
+ <fsummary>Registers the node with <c>epmd</c>.</fsummary>
+ <desc>
+ <p>Registers the node with <c>epmd</c> and tells epmd what port will be
+ used for the current node. It returns a creation number. This number is
+ incremented on each register to help with identifying if a node is
+ reconnecting to epmd.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="port_please" arity="2"/>
+ <name name="port_please" arity="3"/>
+ <fsummary>Returns the port number for a given node.</fsummary>
+ <desc>
+ <p>Requests the distribution port for the given node of an EPMD
+ instance. Together with the port it returns a distribution protocol
+ version which has been 5 since Erlang/OTP R6.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="address_please" arity="3"/>
+ <fsummary>Returns address and port.</fsummary>
+ <desc>
+ <p>Called by the distribution module. Resolves the <c>Host</c> to an IP
+ address.</p>
+ <p>Another epmd module may return port and distribution protocol version
+ as well.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="names" arity="1"/>
+ <fsummary>Names of Erlang nodes at a host.</fsummary>
+ <desc>
+ <p>Called by <seealso marker="net_adm"><c>net_adm:names/0</c></seealso>.
+ <c>Host</c> defaults to the localhost. Returns the names and associated
+ port numbers of the Erlang nodes that <c>epmd</c> registered at the
+ specified host. Returns <c>{error, address}</c> if <c>epmd</c> is not
+ operational.</p>
+ <p><em>Example:</em></p>
+ <pre>
+(arne@dunn)1> <input>erl_epmd:names(localhost).</input>
+{ok,[{"arne",40262}]}</pre>
+ </desc>
+ </func>
+ </funcs>
+
+</erlref>
+
diff --git a/lib/kernel/doc/src/error_logger.xml b/lib/kernel/doc/src/error_logger.xml
index cb6165c73e..c9fe9484e4 100644
--- a/lib/kernel/doc/src/error_logger.xml
+++ b/lib/kernel/doc/src/error_logger.xml
@@ -181,17 +181,21 @@ ok</pre>
<func>
<name name="get_format_depth" arity="0"/>
<fsummary>Get the value of the Kernel application variable
- <c>logger_format_depth</c>.</fsummary>
+ <c>error_logger_format_depth</c>.</fsummary>
<desc>
<p>Returns <c>max(10, Depth)</c>, where <c>Depth</c> is the
- value of
- <seealso marker="kernel_app#logger_format_depth">
- logger_format_depth</seealso>
+ value of <c>error_logger_format_depth</c>
in the Kernel application, if Depth is an integer. Otherwise,
<c>unlimited</c> is returned.</p>
- <p>For backwards compatibility, the value
- of <c>error_logger_format_depth</c> is used
- if <c>logger_format_depth</c> is not set.</p>
+ <note>
+ <p>The <c>error_logger_format_depth</c> variable
+ is <seealso marker="kernel_app#deprecated-configuration-parameters">
+ deprecated</seealso> since
+ the <seealso marker="logger">Logger API</seealso> was
+ introduced in OTP-21. The variable, and this function, are
+ kept for backwards compatibility since they still might be
+ used by legacy report handlers.</p>
+ </note>
</desc>
</func>
<func>
diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml
index f96d946a5d..7cd05dab14 100644
--- a/lib/kernel/doc/src/kernel_app.xml
+++ b/lib/kernel/doc/src/kernel_app.xml
@@ -122,21 +122,6 @@
application. For more information about configuration parameters,
see file <seealso marker="app"><c>app(4)</c></seealso>.</p>
<taglist>
- <tag><c>browser_cmd = string() | {M,F,A}</c></tag>
- <item>
- <p>When pressing the <em>Help</em> button in a tool such as Debugger,
- the help text (an HTML file <c>File</c>) is by default
- displayed in a Netscape browser, which is required to be
- operational. This parameter can be used to change the command for
- how to display the help text if another browser than Netscape
- is preferred, or if another platform than Unix or Windows is
- used.</p>
- <p>If set to a string <c>Command</c>, the command
- <c>"Command File"</c> is evaluated using
- <seealso marker="os#cmd/1"><c>os:cmd/1</c></seealso>.</p>
- <p>If set to a module-function-args tuple, <c>{M,F,A}</c>,
- the call <c>apply(M,F,[File|A])</c> is evaluated.</p>
- </item>
<tag><c>distributed = [Distrib]</c></tag>
<item>
<p>Specifies which applications that are distributed and on which
@@ -180,78 +165,33 @@
<p>Permissions are described in
<seealso marker="application#permit/2"><c>application:permit/2</c></seealso>.</p>
</item>
- <tag><c>logger_dest = Value</c></tag>
+ <tag><marker id="logger"/><c>logger = [Config]</c></tag>
<item>
- <p><c>Value</c> is one of:</p>
- <taglist>
- <tag><c>tty</c></tag>
- <item><p>Installs the standard handler, <seealso marker="logger_std_h">
- <c>logger_std_h(3)</c></seealso>, with <c>type</c> set
- to <c>standard_io</c>. This is the default
- option.</p></item>
- <tag><c>{file, FileName}</c></tag>
- <item><p>Installs the standard handler, <seealso marker="logger_std_h">
- <c>logger_std_h(3)</c></seealso>, with <c>type</c> set
- to <c>{file, FileName}</c>, where <c>FileName</c>
- is a string. The file is opened with encoding UTF-8.</p></item>
- <tag><c>{disk_log, FileName}</c></tag>
- <item><p>Installs the disk_log handler, <seealso marker="logger_disk_log_h">
- <c>logger_disk_log_h(3)</c></seealso>, with <c>file</c> set
- to <c>FileName</c> (a string), and possibly other disk_log
- parameters set by the environment variables
- <c>logger_disk_log_type</c>, <c>logger_disk_log_maxfiles</c> and
- <c>logger_disk_log_maxbytes</c>,
- see <seealso marker="#disk_log_vars">below</seealso>. The
- file is opened with encoding UTF-8.</p></item>
- <tag><c>false</c></tag>
- <item>
- <p>No standard handler is installed, but
- the initial, primitive handler is kept, printing
- raw event messages to <c>tty</c>.</p>
- </item>
- <tag><c>silent</c></tag>
- <item>
- <p>No standard handler is started, and the initial,
- primitive handler is removed.</p>
- </item>
- </taglist>
+ <p>Specifies how <seealso marker="logger"><c>logger</c></seealso> should be
+ configured.</p>
+ <p>For more details and examples, see the <seealso marker="logger_chapter#logger">
+ Configuration</seealso> section in the <seealso marker="logger_chapter">
+ Logger User's Guide</seealso>.
+ </p>
</item>
- <tag><c>logger_level = Level</c></tag>
+ <tag><marker id="logger_level"/><c>logger_level = Level</c></tag>
<item>
- <p><c>Value = emergency | alert | critical | error | warning |
+ <p><c>Level = emergency | alert | critical | error | warning |
notice | info | debug</c></p>
<p>This parameter specifies which log levels to log. The
specified level, and all levels that are more severe, will
be logged.</p>
- <p>This configuration parameter is used both for the global
- logger level, and for the standard handler started by
- the Kernel application (see <c>logger_dest</c> variable above).</p>
<p>The default value is <c>info</c>.</p>
- </item>
- <tag><marker id="disk_log_vars"/>
- <c>logger_disk_log_type = halt | wrap</c></tag>
- <item/>
- <tag><c>logger_disk_log_maxfiles = integer()</c></tag>
- <item/>
- <tag><c>logger_disk_log_maxbytes = integer()</c></tag>
- <item>
- <p>If <c>logger_dest</c> is set to {disk_log,File}, then these
- parameters specify the configuration to use when opening the
- disk log file. They specify the type of disk log, the
- maximum number of files (if the type is wrap) and the
- maximum size of each file, respectively.</p>
- <p>The default values are:</p>
- <code>
-logger_disk_log_type = wrap
-logger_disk_log_maxfiles = 10
-logger_disk_log_maxbytes = 1048576</code>
+ <p>To change the global log level at run-time, use
+ <seealso marker="logger#set_logger_config/2">
+ <c>logger:set_logger_config(level, error)</c></seealso>.</p>
</item>
<tag><marker id="logger_sasl_compatible"/>
<c>logger_sasl_compatible = boolean()</c></tag>
<item>
- <p>If this parameter is set to true, then the logger handler
- started by kernel will not log any progress-, crash-, or
- supervisor reports. If the SASL application is started,
+ <p>If this parameter is set to true, then the <c>default</c> logger handler
+ will not log any progress-, crash-, or supervisor reports.
+ If the SASL application is started,
these log events will be sent to a second handler instance
named <c>sasl_h</c>, according to values of the SASL
environment variables <c>sasl_error_logger</c>
@@ -262,6 +202,8 @@ logger_disk_log_maxbytes = 1048576</code>
<p>See chapter <seealso marker="logger_chapter#compatibility">Backwards
compatibility with error_logger</seealso> for more
information about handling of the so called SASL reports.</p>
+ <note><p>This configuration option only effects the <c>default</c>
+ and <c>sasl</c> handler. Any other handlers are uneffected.</p></note>
</item>
<tag><marker id="logger_log_progress"/>
<c>logger_log_progress = boolean()</c></tag>
@@ -269,51 +211,13 @@ logger_disk_log_maxbytes = 1048576</code>
<p>If <c>logger_sasl_compatible = false</c>,
then <c>logger_log_progress</c> specifies if progress
reports from <c>supervisor</c>
- and <c>application_controller</c> shall be logged or
- not.</p>
+ and <c>application_controller</c> shall be logged by the
+ default logger.</p>
<p>If <c>logger_sasl_compatible = true</c>,
then <c>logger_log_progress</c> is ignored.</p>
- </item>
- <tag><marker id="logger_format_depth"/>
- <c>logger_format_depth = Depth</c></tag>
- <item>
- <p>Can be used to limit the size of the
- formatted output from the logger handlers.</p>
-
- <p><c>Depth</c> is a positive integer representing the maximum
- depth to which terms are printed by the logger
- handlers included in OTP. This
- configuration parameter is used by the default formatter,
- <seealso marker="logger_formatter"><c>logger_formatter(3)</c></seealso>,
- unless the formatter's <c>depth</c> parameter is explicitly set.
- (If you have implemented your own formatter, this configuration
- parameter has no effect on that.)</p>
-
- <p><c>Depth</c> is used as follows: Format strings
- received by the formatter are rewritten.
- The format controls <c>~p</c> and <c>~w</c> are replaced with
- <c>~P</c> and <c>~W</c>, respectively, and <c>Depth</c> is
- used as the depth parameter. For details, see
- <seealso marker="stdlib:io#format/2"><c>io:format/2</c></seealso>
- in STDLIB.</p>
-
- <note><p>A reasonable starting value for <c>Depth</c> is
- <c>30</c>. We recommend to test crashing various processes in your
- application, examine the logs from the crashes, and then
- increase or decrease the value.</p></note>
- </item>
- <tag><c>logger_max_size = integer() | unlimited</c></tag>
- <item>
- <p>This parameter specifies a hard maximum size limit (number
- of characters) each log event can have when printed by the
- default logger formatter. If the resulting string after
- formatting an event is bigger than this, it will be
- truncated before printed to the handler's destination.</p>
- </item>
- <tag><c>logger_utc = boolean()</c></tag>
- <item>
- <p>If set to <c>true</c>, the default formatter will display
- all dates in Universal Coordinated Time.</p>
+ <p>The default value is <c>false</c></p>
+ <note><p>This configuration option only effects the <c>default</c>
+ and <c>sasl</c> handler. Any other handlers are uneffected.</p></note>
</item>
<tag><c>global_groups = [GroupTuple]</c></tag>
<item>
@@ -587,9 +491,20 @@ MaxT = TickTime + TickTime / 4</code>
variables are not set.</p>
<taglist>
<tag><c>error_logger</c></tag>
- <item>Replaced by <c>logger_dest</c></item>
+ <item>Replaced by setting the type of the default
+ <seealso marker="logger_std_h#type"><c>logger_std_h</c></seealso>
+ to the same value. Example:
+ <code type="none">
+erl -kernel logger '[{handler,default,logger_std_h,#{logger_std_h=>#{type=>{file,"/tmp/erlang.log"}}}}]'
+ </code>
+ </item>
<tag><c>error_logger_format_depth</c></tag>
- <item>Replaced by <c>logger_format_depth</c></item>
+ <item>Replaced by setting the <seealso marker="logger_formatter#depth"><c>depth</c></seealso>
+ parameter of the default handlers formatter. Example:
+ <code type="none">
+erl -kernel logger '[{handler,default,logger_std_h,#{formatter=>{logger_formatter,#{legacy_header=>true,template=>[{logger_formatter,header},"\n",msg,"\n"],depth=>10}}}]'
+ </code>
+ </item>
</taglist>
<p>See <seealso marker="logger_chapter#compatibility">Backwards
compatibility with error_logger</seealso> for more
diff --git a/lib/kernel/doc/src/logger.xml b/lib/kernel/doc/src/logger.xml
index d901454e62..2ee1059df8 100644
--- a/lib/kernel/doc/src/logger.xml
+++ b/lib/kernel/doc/src/logger.xml
@@ -33,10 +33,49 @@
<file>logger.xml</file>
</header>
<module>logger</module>
- <modulesummary>API module for the logger application.</modulesummary>
+ <modulesummary>API module for the logger.</modulesummary>
<description>
-
+ <p>
+ This module is the main logger API. It contains functions that allow
+ application to use a single log API and the system to manage those log
+ events independently. To log events the logger
+ <seealso marker="#macros">macros</seealso> should be used. For instance,
+ to log a new error log event:</p>
+ <code>
+?LOG_ERROR("error happened because: ~p",[Reason]). %% With macro
+logger:error("error happened because: ~p",[Reason]). %% Without macro
+ </code>
+ <p>This log event will then be sent to the configured log handlers which
+ by default means that it will be printed to the console. If you want
+ your systems logs to be printed to a file instead of the console you
+ have to configure the default handler to do so. The simplest way is
+ to include the following in your <seealso marker="config"><c>sys.config</c></seealso>.</p>
+ <code>
+[{kernel,
+ [{logger,
+ [{handler,default,logger_std_h,
+ #{logger_std_h=>#{type=>{file,"path/to/file.log"}}}}]}]}].
+ </code>
+ <p>
+ For more information about:
+ </p>
+ <list type="bulleted">
+ <item>how to use the API,
+ see <seealso marker="logger_chapter">the User's Guide</seealso>.</item>
+ <item>how to configure logger,
+ see the <seealso marker="logger_chapter#configuration">Configuration</seealso>
+ section in the User's Guide.</item>
+ <item>the convinience macros in logger.hrl,
+ see <seealso marker="#macros">the macro section</seealso>.</item>
+ <item>what the builtin formatter can do,
+ see <seealso marker="logger_formatter">logger_formatter</seealso>.</item>
+ <item>what the builtin handlers can do,
+ see <seealso marker="logger_std_h">logger_std_h</seealso> and
+ <seealso marker="logger_disk_log_h">logger_disk_log_h</seealso>.</item>
+ <item>what builtin filters are available,
+ see <seealso marker="logger_filters">logger_filters</seealso>.</item>
+ </list>
</description>
<datatypes>
@@ -348,16 +387,16 @@
<code><![CDATA[1> logger:i(print).
Current logger configuration:
Level: info
- FilterDefault: log
+ Filter Default: log
Filters:
Handlers:
- Id: logger_std_h
+ Id: default
Module: logger_std_h
Level: info
Formatter:
Module: logger_formatter
- Config: #{template => [{logger_formatter,header},"\n",msg,"\n"],
- legacy_header => true}
+ Config: #{legacy_header => true,single_line => false,
+ template => [{logger_formatter,header},"\n",msg,"\n"]}
Filter Default: stop
Filters:
Id: stop_progress
@@ -489,7 +528,7 @@ Current logger configuration:
<desc>
<p>Add a handler with the given configuration.</p>
<p><c><anno>HandlerId</anno></c> is a unique identifier which
- must be used in all subsequent calls reffering to this
+ must be used in all subsequent calls referring to this
handler.</p>
</desc>
</func>
@@ -550,17 +589,68 @@ Current logger configuration:
</func>
<func>
+ <name name="add_handlers" arity="1" clause_i="1"/>
+ <fsummary>Setup logger handlers from the applications configuration parameters.</fsummary>
+ <desc>
+ <p>Reads the application configuration parameter <c>logger</c> and
+ calls <c>logger:add_handlers/1</c> with it contents.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="add_handlers" arity="1" clause_i="2"/>
+ <fsummary>Setup logger handlers.</fsummary>
+ <type name="config_handler"/>
+ <desc>
+ <p>This function should be used by custom logger handlers to make
+ configuration consistent no matter which handler the system uses.
+ Normal usage to to add a call to <c>logger:add_handlers/1</c>
+ just after the processes that the handler needs are started
+ and pass the applications logger config as an argument. Eg.</p>
+ <code>
+-behaviour(application).
+start(_, []) ->
+ case supervisor:start_link({local, my_sup}, my_sup, []) of
+ {ok, Pid} ->
+ ok = logger:add_handlers(my_app),
+ {ok, Pid, []};
+ Error -> Error
+ end.</code>
+ <p>This will read the <c>logger</c> configuration parameter from
+ the handler application and start the configured handlers. The contents
+ of the configuration use the same rules as the
+ <seealso marker="logger_chapter#handler-configuration">logger handler configuration</seealso>.
+ </p>
+ <p>If the handler is meant to replace the default handler the kernels
+ default handlers have to be disabled before the new handler is added.
+ A <c>sys.config</c> file that disables the kernel handler and adds
+ a custom handler could looks like this:</p>
+ <code>
+[{kernel,
+ [{logger,
+ %% Disable the default kernel handler
+ [{handler,default,undefined}]}]},
+ {my_app,
+ [{logger,
+ %% Enable this handler as the default
+ [{handler,default,my_handler,#{}}]}]}].
+ </code>
+ </desc>
+ </func>
+
+ <func>
<name name="set_logger_config" arity="1"/>
<fsummary>Set configuration data for the logger.</fsummary>
<desc>
<p>Set configuration data for the logger. This overwrites the
current logger configuration.</p>
<p>To modify the existing configuration,
- use <seealso marker="#set_logger_config-2"><c>set_logger_config/2</c>
- </seealso>, or read the current configuration
+ use <seealso marker="#update_logger_config-1">
+ <c>update_logger_config/1</c></seealso>, or, if a more
+ complex merge is needed, read the current configuration
with <seealso marker="#get_logger_config-0"><c>get_logger_config/0</c>
- </seealso>, then merge in your added or updated
- associations before writing it back.</p>
+ </seealso>, then do the merge before writing the new
+ configuration back with this function.</p>
<p>If a key is removed compared to the current configuration,
the default value is used.</p>
</desc>
@@ -573,7 +663,23 @@ Current logger configuration:
<p>Add or update configuration data for the logger. If the
given <c><anno>Key</anno></c> already exists, its associated
value will be changed to <c><anno>Value</anno></c>. If it
- doesn't exist, it will be added.</p>
+ does not exist, it will be added.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="update_logger_config" arity="1"/>
+ <fsummary>Update configuration data for the logger.</fsummary>
+ <desc>
+ <p>Update configuration data for the logger. This function
+ behaves as if it was implemented as follows:</p>
+ <code type="erl">
+{ok,Old} = logger:get_logger_config(),
+logger:set_logger_config(maps:merge(Old,Config)).
+ </code>
+ <p>To overwrite the existing configuration without any merge,
+ use <seealso marker="#set_logger_config-1"><c>set_logger_config/1</c>
+ </seealso>.</p>
</desc>
</func>
@@ -584,11 +690,12 @@ Current logger configuration:
<p>Set configuration data for the specified handler. This
overwrites the current handler configuration.</p>
<p>To modify the existing configuration,
- use <seealso marker="#set_handler_config-3"><c>set_handler_config/3</c>
- </seealso>, or read the current configuration
+ use <seealso marker="#update_handler_config-2">
+ <c>update_handler_config/2</c></seealso>, or, if a more
+ complex merge is needed, read the current configuration
with <seealso marker="#get_handler_config-1"><c>get_handler_config/1</c>
- </seealso>, then merge in your added or updated
- associations before writing it back.</p>
+ </seealso>, then do the merge before writing the new
+ configuration back with this function.</p>
<p>If a key is removed compared to the current configuration,
and the key is know by Logger, the default value is used. If
it is a custom key, then it is up to the handler
@@ -605,12 +712,28 @@ Current logger configuration:
<p>Add or update configuration data for the specified
handler. If the given <c><anno>Key</anno></c> already
exists, its associated value will be changed
- to <c><anno>Value</anno></c>. If it doesn't exist, it will
+ to <c><anno>Value</anno></c>. If it does not exist, it will
be added.</p>
</desc>
</func>
<func>
+ <name name="update_handler_config" arity="2"/>
+ <fsummary>Update configuration data for the specified handler.</fsummary>
+ <desc>
+ <p>Update configuration data for the specified handler. This function
+ behaves as if it was implemented as follows:</p>
+ <code type="erl">
+{ok,{_,Old}} = logger:get_handler_config(HandlerId),
+logger:set_handler_config(HandlerId,maps:merge(Old,Config)).
+ </code>
+ <p>To overwrite the existing configuration without any merge,
+ use <seealso marker="#set_handler_config-2"><c>set_handler_config/2</c>
+ </seealso>.</p>
+ </desc>
+ </func>
+
+ <func>
<name name="compare_levels" arity="2"/>
<fsummary>Compare the severity of two log levels.</fsummary>
<desc>
@@ -650,7 +773,7 @@ Current logger configuration:
<p>If process metadata exists for the current process, this
function behaves as if it was implemented as follows:</p>
<code type="erl">
-logger:set_process_metadata(maps:merge(logger:get_process_metadata(),Meta))
+logger:set_process_metadata(maps:merge(logger:get_process_metadata(),Meta)).
</code>
<p>If no process metadata exists, the function behaves as
<seealso marker="#set_process_metadata-1">
diff --git a/lib/kernel/doc/src/logger_chapter.xml b/lib/kernel/doc/src/logger_chapter.xml
index 519df2ba48..4232429589 100644
--- a/lib/kernel/doc/src/logger_chapter.xml
+++ b/lib/kernel/doc/src/logger_chapter.xml
@@ -136,7 +136,7 @@
<item>
<p>Filters can be set on the logger or on a handler. Logger
filters are applied first, and if passed, the handler filters
- for each handler are applied. The handler plugin is only
+ for each handler are applied. The handler callback is only
called if all handler filters for the handler in question also
pass.</p>
@@ -159,7 +159,7 @@
<code>format(Log,Extra) -> unicode:chardata()</code>
- <p>The formatter plugin is called by each handler, and the
+ <p>The formatter callback is called by each handler, and the
returned string can be printed to the handler's destination
(stdout, file, ...).</p>
</item>
@@ -214,8 +214,8 @@
<tag><c>logger_filters:level/2</c></tag>
<item>
<p>This filter provides a way of filtering log events based
- on the log level. See <seealso marker="logger_filters#domain-2">
- <c>logger_filters:domain/2</c></seealso></p>
+ on the log level. See <seealso marker="logger_filters#level-2">
+ <c>logger_filters:level/2</c></seealso></p>
</item>
<tag><c>logger_filters:progress/2</c></tag>
@@ -248,11 +248,97 @@
<section>
<title>Configuration</title>
+ <p>Logger can be configured either when the system starts through
+ <seealso marker="config">configuration parameters</seealso>,
+ or at run-time by using the <seealso marker="logger">logger</seealso>
+ API. The recommended approach is to do the initial configuration in
+ the <c>sys.config</c> file and then use the API when some configuration
+ has to be changed at run-time, such as the logging level.</p>
+
<section>
- <title>Application environment variables</title>
- <p>See <seealso marker="kernel_app#configuration">Kernel(6)</seealso> for
- information about the application environment variables that can
- be used for configuring logger.</p>
+ <title>Application configuration parameters</title>
+ <p>Logger is best configured by using the configuration parameters
+ of kernel. There are three possible configuration parameters:
+ <seealso marker="#logger"><c>logger</c></seealso>,
+ <seealso marker="kernel_app#logger_level"><c>logger_level</c></seealso>,
+ <seealso marker="kernel_app#logger_sasl_compatible"><c>logger_sasl_compatible</c></seealso> and
+ <seealso marker="kernel_app#logger_log_progress"><c>logger_log_progress</c></seealso>.
+ logger_level, logger_sasl_compatible and logger_log_progress are described in the
+ <seealso marker="kernel_app#configuration">Kernel Configuration</seealso>,
+ while <c>logger</c> is described below.</p>
+ <section>
+ <marker id="logger"/>
+ <title>logger</title>
+ <p>The <c>logger</c> application configuration parameter is used to configure
+ three different logger aspects; handlers, logger filters and module levels.
+ The configuration is a list containing tagged tuples that look like this:</p>
+ <taglist>
+ <tag><c>DisableHandler = {handler,default,undefined}</c></tag>
+ <item>Disable the default handler. This will allow another application
+ to add its own default handler. See <seealso marker="logger#add_handlers/1">
+ <c>logger:add_handlers/1</c></seealso> for more details.</item>
+ <tag><c>AddHandler = {handler,HandlerId,Module,HandlerConfig}</c></tag>
+ <item>Add a handler as if <seealso marker="logger:add_handler/3">
+ <c>logger:add_handler(HandlerId,Module,HandlerConfig)</c></seealso> had been
+ called.</item>
+ <tag><c>Filters = {filters, FilterDefault, [Filter]}</c><br/>
+ <c>FilterDefault = log | stop</c><br/>
+ <c>Filter = {FilterId, {FilterFun, FilterConfig}}</c></tag>
+ <item>Add the specified <seealso marker="logger#add_logger_filter/2">
+ logger filters</seealso>. Only one entry is allowed of this option.</item>
+ <tag><c>ModuleLevel</c></tag>
+ <item><c>{module_level, Level, [Module]}</c>,
+ this option configures the <seealso marker="logger#set_module_level/2">
+ module log level</seealso> to be used. It is possible to have multiple
+ <c>module_level</c> entries.</item>
+ </taglist>
+ <p>Examples:</p>
+ <list>
+ <item>
+ <p>Output logs into a the file &quot;logs/erlang.log&quot;</p>
+ <code>
+[{kernel,
+ [{logger,
+ [{handler, default, logger_std_h,
+ #{ logger_std_h => #{ type => {file,"log/erlang.log"}}}}]}]}].
+ </code>
+ </item>
+ <item>
+ <p>Output logs in single line format</p>
+ <code>
+[{kernel,
+ [{logger,
+ [{handler, default, logger_std_h,
+ #{ formatter => { logger_formatter,#{ single_line => true}}}}]}]}].
+ </code>
+ </item>
+ <item>
+ <p>Add the pid to each log event</p>
+ <code>
+[{kernel,
+ [{logger,
+ [{handler, default, logger_std_h,
+ #{ formatter => { logger_formatter,
+ #{ template => [time," ",pid," ",msg,"\n"]}}
+ }}]}]}].
+ </code>
+ </item>
+ <item>
+ <p>Use a different file for debug logging</p>
+ <code>
+[{kernel,
+ [{logger,
+ [{handler, default, logger_std_h,
+ #{ level => error,
+ logger_std_h => #{ type => {file, "log/erlang.log"}}}},
+ {handler, info, logger_std_h,
+ #{ level => debug,
+ logger_std_h => #{ type => {file, "log/debug.log"}}}}
+ ]}]}].
+ </code>
+ </item>
+ </list>
+ </section>
</section>
<section>
@@ -330,6 +416,13 @@
<c>logger_formatter</c></seealso>, and <c>Extra</c> is
it's configuration map.</p>
</item>
+ <tag>HandlerConfig, <c>term() = term()</c></tag>
+ <item>
+ Any keys not listed above are considered to be handler specific
+ configuration. The configuration of the Kernel handlers can be found in
+ <seealso marker="logger_std_h"><c>logger_std_h</c></seealso> and
+ <seealso marker="logger_disk_log_h"><c>logger_disk_log_h</c></seealso>.
+ </item>
</taglist>
<p>Note that <c>level</c> and <c>filters</c> are obeyed by
@@ -425,7 +518,7 @@ error_logger:add_report_handler/1,2.
handler named <c>sasl_h</c>.</p>
<p>All SASL reports have a metadata
field <c>domain=>[beam,erlang,otp,sasl]</c>, which can be
- used, for example, by filters to to stop or allow the
+ used, for example, by filters to stop or allow the
events.</p>
</item>
</taglist>
@@ -661,10 +754,20 @@ do_log(Fd,Log,#{formatter:={FModule,FConfig}}) ->
</item>
</taglist>
- <p>For the overload protection algorithm to work properly, it is a
- requirement that:</p>
+ <p>For the overload protection algorithm to work properly, it is
+ required that:</p>
+
+ <p><c>toggle_sync_qlen =&lt; drop_new_reqs_qlen =&lt; flush_reqs_qlen</c></p>
+
+ <p>and that:</p>
+
+ <p><c>drop_new_reqs_qlen &gt; 1</c></p>
- <p><c>toggle_sync_qlen &lt; drop_new_reqs_qlen &lt; flush_reqs_qlen</c></p>
+ <p>If <c>toggle_sync_qlen</c> is set to <c>0</c>, the handler will handle all
+ requests synchronously. Setting the value of <c>toggle_sync_qlen</c> to the same
+ as <c>drop_new_reqs_qlen</c>, disables the synchronous mode. Likewise, setting
+ the value of <c>drop_new_reqs_qlen</c> to the same as <c>flush_reqs_qlen</c>,
+ disables the drop mode.</p>
<p>During high load scenarios, the length of the handler message queue
rarely grows in a linear and predictable way. Instead, whenever the
diff --git a/lib/kernel/doc/src/logger_disk_log_h.xml b/lib/kernel/doc/src/logger_disk_log_h.xml
index 90cc4fec30..440ae28e5d 100644
--- a/lib/kernel/doc/src/logger_disk_log_h.xml
+++ b/lib/kernel/doc/src/logger_disk_log_h.xml
@@ -121,11 +121,11 @@ logger:add_handler(my_disk_log_h, logger_disk_log_h,
#{filesync_repeat_interval => 1000}}).
</code>
<p>In order to use the disk_log handler instead of the default standard
- handler when starting en Erlang node, use the kernel configuration parameter
- <seealso marker="kernel_app#configuration"><c>logger_dest</c></seealso> with
- value <c>{disk_log,FileName}</c>. Example:</p>
+ handler when starting en Erlang node, change the Kernel default logger to
+ use disk_log. Example:</p>
<code type="none">
-erl -kernel logger_dest '{disk_log,"./system_disk_log"}'
+erl -kernel logger '[{handler,default,logger_disk_log_h,
+ #{ disk_log_opts => #{ file => "./system_disk_log"}}}]'
</code>
</description>
diff --git a/lib/kernel/doc/src/logger_filters.xml b/lib/kernel/doc/src/logger_filters.xml
index c34ec7d14c..1bbae8be21 100644
--- a/lib/kernel/doc/src/logger_filters.xml
+++ b/lib/kernel/doc/src/logger_filters.xml
@@ -78,6 +78,10 @@
<tag><c><anno>Compare</anno> = equals</c></tag>
<item><p>The filter matches if <c>Domain</c> is equal
to <c>MatchDomain</c>.</p></item>
+ <tag><c><anno>Compare</anno> = differs</c></tag>
+ <item><p>The filter matches if <c>Domain</c> differs
+ from <c>MatchDomain</c>, or if there is no domain field
+ in metadata.</p></item>
<tag><c><anno>Compare</anno> = no_domain</c></tag>
<item><p>The filter matches if there is no domain field in
metadata. In this case <c><anno>MatchDomain</anno></c> shall
diff --git a/lib/kernel/doc/src/logger_formatter.xml b/lib/kernel/doc/src/logger_formatter.xml
index 7df4c88f40..370d61d338 100644
--- a/lib/kernel/doc/src/logger_formatter.xml
+++ b/lib/kernel/doc/src/logger_formatter.xml
@@ -66,7 +66,7 @@
be truncated by the <c>max_size</c> parameter.</p>
</note>
</item>
- <tag><c>depth = pos_integer() | unlimited</c></tag>
+ <tag><marker id="depth"/><c>depth = pos_integer() | unlimited</c></tag>
<item>
<p>A positive integer representing the maximum depth to
which terms shall be printed by this formatter. Format
@@ -155,11 +155,40 @@
and <c>single_line</c>. See <seealso marker="#default_templates">Default
Templates</seealso> for more information</p>
</item>
- <tag><c>utc = boolean()</c></tag>
+ <tag><c>time_designator = byte()</c></tag>
<item>
- <p>If set to <c>true</c>, all dates are displayed in Universal
- Coordinated Time.</p>
- <p>Default is <c>false</c>.</p>
+ <p>Timestamps are formatted according to RFC3339, and the time
+ designator is the character used as date and time
+ separator.</p>
+ <p>Default is <c>$T</c>.</p>
+ <p>The value of this parameter is used as
+ the <c>time_designator</c> option
+ to <seealso marker="stdlib:calendar#system_time_to_rfc3339-2">
+ <c>calendar:system_time_to_rcf3339/2</c></seealso>.</p>
+ </item>
+ <tag><c>time_offset = integer() | [byte()]</c></tag>
+ <item>
+ <p>The time offset, either a string or an integer, to be
+ used when formatting the timestamp.</p>
+ <p>An empty string is interpreted as local time. The
+ values <c>"Z"</c>, <c>"z"</c> or <c>0</c> are interpreted as
+ Universal Coordinated Time (UTC).</p>
+ <p>Strings, other than <c>"Z"</c>, <c>"z"</c>, or <c>""</c>,
+ must be on the form <c>±[hh]:[mm]</c>, for
+ example <c>"-02:00"</c> or <c>"+00:00"</c>.</p>
+ <p>Integers must be in microseconds, meaning that the
+ offset <c>7200000000</c> is equivalent
+ to <c>"+02:00"</c>.</p>
+ <p>The default value is an empty string, meaning that
+ timestamps are displayed in local time. However, for
+ backwards compatibility, if the SASL environment
+ variable <seealso marker="sasl:sasl_app#utc_log">
+ <c>utc_log</c></seealso><c>=true</c>, the default is
+ changed to <c>"Z"</c>, meaning that timestamps are displayed
+ in UTC.</p>
+ <p>The value of this parameter is used as the <c>offset</c>
+ option to <seealso marker="stdlib:calendar#system_time_to_rfc3339-2">
+ <c>calendar:system_time_to_rcf3339/2</c></seealso>.</p>
</item>
</taglist>
</section>
@@ -174,7 +203,7 @@
<p>The log event used in the examples is:</p>
<code>
-?LOG_ERROR("name: ~p~nexit_reason: ~p",[my_reg_name,"It crashed"])</code>
+?LOG_ERROR("name: ~p~nexit_reason: ~p",[my_name,"It crashed"])</code>
<taglist>
<tag><c>legacy_header=true</c></tag>
@@ -182,9 +211,9 @@
<p>Default template: <c>[{logger_formatter,header},"\n",msg,"\n"]</c></p>
<p>Example log entry:</p>
- <code>
-=ERROR REPORT==== 29-Dec-2017::13:30:51.245123 ===
-name: my_reg_name
+ <code type="none">
+2018-05-16T11:55:50.448382+02:00 error:
+name: my_name
exit_reason: "It crashed"</code>
<p>Notice that all eight levels might occur in the heading,
@@ -198,7 +227,7 @@ exit_reason: "It crashed"</code>
<p>Default template: <c>[time," ",level,": ",msg,"\n"]</c></p>
<p>Example log entry:</p>
- <code>2017-12-29 13:31:49.640317 error: name: my_reg_name, exit_reason: "It crashed"</code>
+ <code type="none">2018-05-16T11:55:50.448382+02:00 error: name: my_name, exit_reason: "It crashed"</code>
</item>
<tag><c>legacy_header=false, single_line=false</c></tag>
@@ -206,9 +235,9 @@ exit_reason: "It crashed"</code>
<p>Default template: <c>[time," ",level,":\n",msg,"\n"]</c></p>
<p>Example log entry:</p>
- <code>
-2017-12-29 13:32:25.191925 error:
-name: my_reg_name
+ <code type="none">
+2018-05-16T11:55:50.448382+02:00 error:
+name: my_name
exit_reason: "It crashed"</code>
</item>
</taglist>
diff --git a/lib/kernel/doc/src/logger_std_h.xml b/lib/kernel/doc/src/logger_std_h.xml
index fe9b9ca5a9..bf23d874c8 100644
--- a/lib/kernel/doc/src/logger_std_h.xml
+++ b/lib/kernel/doc/src/logger_std_h.xml
@@ -40,7 +40,7 @@
application. Multiple instances of this handler can be added to
logger, and each instance will print logs to <c>standard_io</c>,
<c>standard_error</c> or to file. The default instance that starts
- with kernel is named <c>logger_std_h</c> - which is the name to be used
+ with kernel is named <c>default</c> - which is the name to be used
for reconfiguration.</p>
<p>The handler has an overload protection mechanism that will keep the handler
process and the kernel application alive during a high load of log
@@ -57,7 +57,7 @@
are stored in a sub map with the key <c>logger_std_h</c>. The following
keys and values may be specified:</p>
<taglist>
- <tag><c>type</c></tag>
+ <tag><marker id="type"/><c>type</c></tag>
<item>
<p>This will have the value <c>standard_io</c>, <c>standard_error</c>,
<c>{file,LogFileName}</c>, or <c>{file,LogFileName,LogFileOpts}</c>,
@@ -105,11 +105,10 @@ logger:add_handler(my_standard_h, logger_std_h,
</code>
<p>In order to configure the default handler (that starts initially with
the kernel application) to log to file instead of <c>standard_io</c>,
- use the kernel configuration parameter
- <seealso marker="kernel_app#configuration"><c>logger_dest</c></seealso> with
- value <c>{file,FileName}</c>. Example:</p>
+ change the Kernel default logger to use a file. Example:</p>
<code type="none">
-erl -kernel logger_dest '{file,"./erl.log"}'
+erl -kernel logger '[{handler,default,logger_std_h,
+ #{ logger_std_h => #{ type => {file,"./log.log"}}}}]'
</code>
<p>An example of how to replace the standard handler with a disk_log handler
at startup can be found in the manual of
diff --git a/lib/kernel/doc/src/ref_man.xml b/lib/kernel/doc/src/ref_man.xml
index c06914d23d..b6c2714664 100644
--- a/lib/kernel/doc/src/ref_man.xml
+++ b/lib/kernel/doc/src/ref_man.xml
@@ -32,12 +32,15 @@
</description>
<xi:include href="kernel_app.xml"/>
+ <xi:include href="app.xml"/>
<xi:include href="application.xml"/>
<xi:include href="auth.xml"/>
<xi:include href="code.xml"/>
+ <xi:include href="config.xml"/>
<xi:include href="disk_log.xml"/>
<xi:include href="erl_boot_server.xml"/>
<xi:include href="erl_ddll.xml"/>
+ <xi:include href="erl_epmd.xml"/>
<xi:include href="erl_prim_loader_stub.xml"/>
<xi:include href="erlang_stub.xml"/>
<xi:include href="error_handler.xml"/>
@@ -66,6 +69,4 @@
<xi:include href="user.xml"/>
<xi:include href="wrap_log_reader.xml"/>
<xi:include href="zlib_stub.xml"/>
- <xi:include href="app.xml"/>
- <xi:include href="config.xml"/>
</application>
diff --git a/lib/kernel/doc/src/specs.xml b/lib/kernel/doc/src/specs.xml
index bcc422930e..b8c25ca53b 100644
--- a/lib/kernel/doc/src/specs.xml
+++ b/lib/kernel/doc/src/specs.xml
@@ -6,6 +6,7 @@
<xi:include href="../specs/specs_disk_log.xml"/>
<xi:include href="../specs/specs_erl_boot_server.xml"/>
<xi:include href="../specs/specs_erl_ddll.xml"/>
+ <xi:include href="../specs/specs_erl_epmd.xml"/>
<xi:include href="../specs/specs_erl_prim_loader_stub.xml"/>
<xi:include href="../specs/specs_erlang_stub.xml"/>
<xi:include href="../specs/specs_error_handler.xml"/>
diff --git a/lib/kernel/src/application_controller.erl b/lib/kernel/src/application_controller.erl
index b9cb722575..ff5df667b5 100644
--- a/lib/kernel/src/application_controller.erl
+++ b/lib/kernel/src/application_controller.erl
@@ -1272,9 +1272,7 @@ load(S, {ApplData, ApplEnv, IncApps, Descr, Id, Vsn, Apps}) ->
NewEnv = merge_app_env(ApplEnv, ConfEnv),
CmdLineEnv = get_cmd_env(Name),
NewEnv2 = merge_app_env(NewEnv, CmdLineEnv),
- NewEnv3 = keyreplaceadd(included_applications, 1, NewEnv2,
- {included_applications, IncApps}),
- add_env(Name, NewEnv3),
+ add_env(Name, NewEnv2),
Appl = #appl{name = Name, descr = Descr, id = Id, vsn = Vsn,
appl_data = ApplData, inc_apps = IncApps, apps = Apps},
ets:insert(ac_tab, {{loaded, Name}, Appl}),
@@ -1292,7 +1290,7 @@ load(S, {ApplData, ApplEnv, IncApps, Descr, Id, Vsn, Apps}) ->
{ok, NewS}.
unload(AppName, S) ->
- {ok, IncApps} = get_env(AppName, included_applications),
+ {ok, IncApps} = get_key(AppName, included_applications),
del_env(AppName),
ets:delete(ac_tab, {loaded, AppName}),
foldl(fun(App, S1) ->
@@ -1583,13 +1581,9 @@ do_change_appl({ok, {ApplData, Env, IncApps, Descr, Id, Vsn, Apps}},
CmdLineEnv = get_cmd_env(AppName),
NewEnv2 = merge_app_env(NewEnv1, CmdLineEnv),
- %% included_apps is made into an env parameter as well
- NewEnv3 = keyreplaceadd(included_applications, 1, NewEnv2,
- {included_applications, IncApps}),
-
%% Update ets table with new application env
del_env(AppName),
- add_env(AppName, NewEnv3),
+ add_env(AppName, NewEnv2),
OldAppl#appl{appl_data=ApplData,
descr=Descr,
diff --git a/lib/kernel/src/erl_epmd.erl b/lib/kernel/src/erl_epmd.erl
index f96bc88913..9a0939972d 100644
--- a/lib/kernel/src/erl_epmd.erl
+++ b/lib/kernel/src/erl_epmd.erl
@@ -29,10 +29,20 @@
-define(port_please_failure2(Term), noop).
-endif.
+-ifndef(erlang_daemon_port).
+-define(erlang_daemon_port, 4369).
+-endif.
+-ifndef(epmd_dist_high).
+-define(epmd_dist_high, 4370).
+-endif.
+-ifndef(epmd_dist_low).
+-define(epmd_dist_low, 4370).
+-endif.
+
%% External exports
-export([start/0, start_link/0, stop/0, port_please/2,
port_please/3, names/0, names/1,
- register_node/2, register_node/3, open/0, open/1, open/2]).
+ register_node/2, register_node/3, address_please/3, open/0, open/1, open/2]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -53,7 +63,7 @@
start() ->
gen_server:start({local, erl_epmd}, ?MODULE, [], []).
-
+-spec start_link() -> {ok, pid()} | ignore | {error,term()}.
start_link() ->
gen_server:start_link({local, erl_epmd}, ?MODULE, [], []).
@@ -66,9 +76,22 @@ stop() ->
%% return {port, P, Version} | noport
%%
+-spec port_please(Name, Host) -> {ok, Port, Version} | noport when
+ Name :: string(),
+ Host :: inet:ip_address(),
+ Port :: non_neg_integer(),
+ Version :: non_neg_integer().
+
port_please(Node, Host) ->
port_please(Node, Host, infinity).
+-spec port_please(Name, Host, Timeout) -> {ok, Port, Version} | noport when
+ Name :: string(),
+ Host :: inet:ip_address(),
+ Timeout :: non_neg_integer() | infinity,
+ Port :: non_neg_integer(),
+ Version :: non_neg_integer().
+
port_please(Node,HostName, Timeout) when is_atom(HostName) ->
port_please1(Node,atom_to_list(HostName), Timeout);
port_please(Node,HostName, Timeout) when is_list(HostName) ->
@@ -92,10 +115,21 @@ port_please1(Node,HostName, Timeout) ->
Else
end.
+-spec names() -> {ok, [{Name, Port}]} | {error, Reason} when
+ Name :: string(),
+ Port :: non_neg_integer(),
+ Reason :: address | file:posix().
+
names() ->
{ok, H} = inet:gethostname(),
names(H).
+-spec names(Host) -> {ok, [{Name, Port}]} | {error, Reason} when
+ Host :: atom() | string() | inet:ip_address(),
+ Name :: string(),
+ Port :: non_neg_integer(),
+ Reason :: address | file:posix().
+
names(HostName) when is_atom(HostName); is_list(HostName) ->
case inet:gethostbyname(HostName) of
{ok,{hostent, _Name, _ , _Af, _Size, [EpmdAddr | _]}} ->
@@ -106,9 +140,22 @@ names(HostName) when is_atom(HostName); is_list(HostName) ->
names(EpmdAddr) ->
get_names(EpmdAddr).
+-spec register_node(Name, Port) -> Result when
+ Name :: string(),
+ Port :: non_neg_integer(),
+ Creation :: non_neg_integer(),
+ Result :: {ok, Creation} | {error, already_registered} | term().
register_node(Name, PortNo) ->
- register_node(Name, PortNo, inet).
+ register_node(Name, PortNo, inet).
+
+-spec register_node(Name, Port, Driver) -> Result when
+ Name :: string(),
+ Port :: non_neg_integer(),
+ Driver :: inet_tcp | inet6_tcp | inet | inet6,
+ Creation :: non_neg_integer(),
+ Result :: {ok, Creation} | {error, already_registered} | term().
+
register_node(Name, PortNo, inet_tcp) ->
register_node(Name, PortNo, inet);
register_node(Name, PortNo, inet6_tcp) ->
@@ -116,6 +163,17 @@ register_node(Name, PortNo, inet6_tcp) ->
register_node(Name, PortNo, Family) ->
gen_server:call(erl_epmd, {register, Name, PortNo, Family}, infinity).
+-spec address_please(Name, Host, AddressFamily) -> Success | {error, term()} when
+ Name :: string(),
+ Host :: string() | inet:ip_address(),
+ AddressFamily :: inet | inet6,
+ Port :: non_neg_integer(),
+ Version :: non_neg_integer(),
+ Success :: {ok, inet:ip_address()} | {ok, inet:ip_address(), Port, Version}.
+
+address_please(_Name, Host, AddressFamily) ->
+ inet:getaddr(Host, AddressFamily).
+
%%%----------------------------------------------------------------------
%%% Callback functions from gen_server
%%%----------------------------------------------------------------------
diff --git a/lib/kernel/src/erl_signal_handler.erl b/lib/kernel/src/erl_signal_handler.erl
index 22f235d4e4..b76c2a217a 100644
--- a/lib/kernel/src/erl_signal_handler.erl
+++ b/lib/kernel/src/erl_signal_handler.erl
@@ -19,12 +19,21 @@
-module(erl_signal_handler).
-behaviour(gen_event).
--export([init/1, format_status/2,
+-export([start/0, init/1, format_status/2,
handle_event/2, handle_call/2, handle_info/2,
terminate/2, code_change/3]).
-record(state,{}).
+start() ->
+ %% add signal handler
+ case whereis(erl_signal_server) of
+ %% in case of minimal mode
+ undefined -> ok;
+ _ ->
+ gen_event:add_handler(erl_signal_server, erl_signal_handler, [])
+ end.
+
init(_Args) ->
{ok, #state{}}.
diff --git a/lib/kernel/src/error_logger.erl b/lib/kernel/src/error_logger.erl
index 47d0ca5ea3..6c3b308308 100644
--- a/lib/kernel/src/error_logger.erl
+++ b/lib/kernel/src/error_logger.erl
@@ -529,18 +529,38 @@ logfile(filename) ->
Flag :: boolean().
tty(true) ->
- case lists:member(error_logger_tty_h, which_report_handlers()) of
- false ->
- add_report_handler(error_logger_tty_h, []);
- true ->
- ignore
- end,
+ _ = case lists:member(error_logger_tty_h, which_report_handlers()) of
+ false ->
+ case logger:get_handler_config(default) of
+ {ok,{logger_std_h,#{logger_std_h:=#{type:=standard_io}}}} ->
+ logger:remove_handler_filter(default,
+ error_logger_tty_false);
+ _ ->
+ logger:add_handler(error_logger_tty_true,logger_std_h,
+ #{filter_default=>stop,
+ filters=>?DEFAULT_HANDLER_FILTERS(
+ [beam,erlang,otp]),
+ formatter=>{?DEFAULT_FORMATTER,
+ ?DEFAULT_FORMAT_CONFIG},
+ logger_std_h=>#{type=>standard_io}})
+ end;
+ true ->
+ ok
+ end,
ok;
tty(false) ->
- delete_report_handler(error_logger_tty_h).
+ delete_report_handler(error_logger_tty_h),
+ _ = logger:remove_handler(error_logger_tty_true),
+ _ = case logger:get_handler_config(default) of
+ {ok,{logger_std_h,#{logger_std_h:=#{type:=standard_io}}}} ->
+ logger:add_handler_filter(default,error_logger_tty_false,
+ {fun(_,_) -> stop end, ok});
+ _ ->
+ ok
+ end,
+ ok.
%%%-----------------------------------------------------------------
-
-spec limit_term(term()) -> term().
limit_term(Term) ->
@@ -552,4 +572,9 @@ limit_term(Term) ->
-spec get_format_depth() -> 'unlimited' | pos_integer().
get_format_depth() ->
- logger:get_format_depth().
+ case application:get_env(kernel, error_logger_format_depth) of
+ {ok, Depth} when is_integer(Depth) ->
+ max(10, Depth);
+ undefined ->
+ unlimited
+ end.
diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl
index 57d8fc7a15..1d4e37196c 100644
--- a/lib/kernel/src/file.erl
+++ b/lib/kernel/src/file.erl
@@ -69,7 +69,7 @@
%% Types that can be used from other modules -- alphabetically ordered.
-export_type([date_time/0, fd/0, file_info/0, filename/0, filename_all/0,
- io_device/0, name/0, name_all/0, posix/0]).
+ io_device/0, mode/0, name/0, name_all/0, posix/0]).
%%% Includes and defines
-include("file_int.hrl").
diff --git a/lib/kernel/src/inet_tcp_dist.erl b/lib/kernel/src/inet_tcp_dist.erl
index e3fdb1bb22..b4b50899f7 100644
--- a/lib/kernel/src/inet_tcp_dist.erl
+++ b/lib/kernel/src/inet_tcp_dist.erl
@@ -283,73 +283,22 @@ do_setup(Driver, Kernel, Node, Type, MyNode, LongOrShortNames, SetupTime) ->
?trace("~p~n",[{inet_tcp_dist,self(),setup,Node}]),
[Name, Address] = splitnode(Driver, Node, LongOrShortNames),
AddressFamily = Driver:family(),
- case inet:getaddr(Address, AddressFamily) of
+ ErlEpmd = net_kernel:epmd_module(),
+ {ARMod, ARFun} = get_address_resolver(ErlEpmd),
+ Timer = dist_util:start_timer(SetupTime),
+ case ARMod:ARFun(Name, Address, AddressFamily) of
+ {ok, Ip, TcpPort, Version} ->
+ ?trace("address_please(~p) -> version ~p~n",
+ [Node,Version]),
+ do_setup_connect(Driver, Kernel, Node, Address, AddressFamily,
+ Ip, TcpPort, Version, Type, MyNode, Timer);
{ok, Ip} ->
- Timer = dist_util:start_timer(SetupTime),
- ErlEpmd = net_kernel:epmd_module(),
case ErlEpmd:port_please(Name, Ip) of
{port, TcpPort, Version} ->
?trace("port_please(~p) -> version ~p~n",
[Node,Version]),
- dist_util:reset_timer(Timer),
- case
- Driver:connect(
- Ip, TcpPort,
- connect_options([{active, false}, {packet, 2}]))
- of
- {ok, Socket} ->
- HSData = #hs_data{
- kernel_pid = Kernel,
- other_node = Node,
- this_node = MyNode,
- socket = Socket,
- timer = Timer,
- this_flags = 0,
- other_version = Version,
- f_send = fun Driver:send/2,
- f_recv = fun Driver:recv/3,
- f_setopts_pre_nodeup =
- fun(S) ->
- inet:setopts
- (S,
- [{active, false},
- {packet, 4},
- nodelay()])
- end,
- f_setopts_post_nodeup =
- fun(S) ->
- inet:setopts
- (S,
- [{active, true},
- {deliver, port},
- {packet, 4},
- nodelay()])
- end,
-
- f_getll = fun inet:getll/1,
- f_address =
- fun(_,_) ->
- #net_address{
- address = {Ip,TcpPort},
- host = Address,
- protocol = tcp,
- family = AddressFamily}
- end,
- mf_tick = fun(S) -> ?MODULE:tick(Driver, S) end,
- mf_getstat = fun ?MODULE:getstat/1,
- request_type = Type,
- mf_setopts = fun ?MODULE:setopts/2,
- mf_getopts = fun ?MODULE:getopts/2
- },
- dist_util:handshake_we_started(HSData);
- _ ->
- %% Other Node may have closed since
- %% port_please !
- ?trace("other node (~p) "
- "closed since port_please.~n",
- [Node]),
- ?shutdown(Node)
- end;
+ do_setup_connect(Driver, Kernel, Node, Address, AddressFamily,
+ Ip, TcpPort, Version, Type, MyNode, Timer);
_ ->
?trace("port_please (~p) "
"failed.~n", [Node]),
@@ -361,6 +310,71 @@ do_setup(Driver, Kernel, Node, Type, MyNode, LongOrShortNames, SetupTime) ->
?shutdown(Node)
end.
+%%
+%% Actual setup of connection
+%%
+do_setup_connect(Driver, Kernel, Node, Address, AddressFamily,
+ Ip, TcpPort, Version, Type, MyNode, Timer) ->
+ dist_util:reset_timer(Timer),
+ case
+ Driver:connect(
+ Ip, TcpPort,
+ connect_options([{active, false}, {packet, 2}]))
+ of
+ {ok, Socket} ->
+ HSData = #hs_data{
+ kernel_pid = Kernel,
+ other_node = Node,
+ this_node = MyNode,
+ socket = Socket,
+ timer = Timer,
+ this_flags = 0,
+ other_version = Version,
+ f_send = fun Driver:send/2,
+ f_recv = fun Driver:recv/3,
+ f_setopts_pre_nodeup =
+ fun(S) ->
+ inet:setopts
+ (S,
+ [{active, false},
+ {packet, 4},
+ nodelay()])
+ end,
+ f_setopts_post_nodeup =
+ fun(S) ->
+ inet:setopts
+ (S,
+ [{active, true},
+ {deliver, port},
+ {packet, 4},
+ nodelay()])
+ end,
+
+ f_getll = fun inet:getll/1,
+ f_address =
+ fun(_,_) ->
+ #net_address{
+ address = {Ip,TcpPort},
+ host = Address,
+ protocol = tcp,
+ family = AddressFamily}
+ end,
+ mf_tick = fun(S) -> ?MODULE:tick(Driver, S) end,
+ mf_getstat = fun ?MODULE:getstat/1,
+ request_type = Type,
+ mf_setopts = fun ?MODULE:setopts/2,
+ mf_getopts = fun ?MODULE:getopts/2
+ },
+ dist_util:handshake_we_started(HSData);
+ _ ->
+ %% Other Node may have closed since
+ %% discovery !
+ ?trace("other node (~p) "
+ "closed since discovery (port_please).~n",
+ [Node]),
+ ?shutdown(Node)
+ end.
+
connect_options(Opts) ->
case application:get_env(kernel, inet_dist_connect_options) of
{ok,ConnectOpts} ->
@@ -430,6 +444,16 @@ get_tcp_address(Driver, Socket) ->
}.
%% ------------------------------------------------------------
+%% Determine if EPMD module supports address resolving. Default
+%% is to use inet:getaddr/2.
+%% ------------------------------------------------------------
+get_address_resolver(EpmdModule) ->
+ case erlang:function_exported(EpmdModule, address_please, 3) of
+ true -> {EpmdModule, address_please};
+ _ -> {inet, getaddr}
+ end.
+
+%% ------------------------------------------------------------
%% Do only accept new connection attempts from nodes at our
%% own LAN, if the check_ip environment parameter is true.
%% ------------------------------------------------------------
diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src
index afffcd156e..23ac5b3444 100644
--- a/lib/kernel/src/kernel.app.src
+++ b/lib/kernel/src/kernel.app.src
@@ -140,7 +140,10 @@
inet_db,
pg2]},
{applications, []},
- {env, []},
+ {env, [{logger_level, info},
+ {logger_sasl_compatible, false},
+ {logger_log_progress, false}
+ ]},
{mod, {kernel, []}},
{runtime_dependencies, ["erts-10.0", "stdlib-3.5", "sasl-3.0"]}
]
diff --git a/lib/kernel/src/kernel.erl b/lib/kernel/src/kernel.erl
index 20aa47f602..b0e8c00bbf 100644
--- a/lib/kernel/src/kernel.erl
+++ b/lib/kernel/src/kernel.erl
@@ -30,23 +30,13 @@
%%% Callback functions for the kernel application.
%%%-----------------------------------------------------------------
start(_, []) ->
+ %% Setup the logger and configure the kernel logger environment
+ ok = logger:internal_init_logger(),
case supervisor:start_link({local, kernel_sup}, kernel, []) of
{ok, Pid} ->
- %% add signal handler
- case whereis(erl_signal_server) of
- %% in case of minimal mode
- undefined -> ok;
- _ ->
- ok = gen_event:add_handler(erl_signal_server, erl_signal_handler, [])
- end,
- %% add error handler
- case logger:setup_standard_handler() of
- ok -> {ok, Pid, []};
- Error ->
- %% Not necessary since the node will crash anyway:
- exit(Pid, shutdown),
- Error
- end;
+ ok = erl_signal_handler:start(),
+ ok = logger:add_handlers(kernel),
+ {ok, Pid, []};
Error -> Error
end.
@@ -153,7 +143,7 @@ init([]) ->
case init:get_argument(mode) of
{ok, [["minimal"]]} ->
{ok, {SupFlags,
- [Code, File, StdError, User, Config, RefC, SafeSup, LoggerSup]}};
+ [Code, File, StdError, User, LoggerSup, Config, RefC, SafeSup]}};
_ ->
Rpc = #{id => rex,
start => {rpc, start_link, []},
diff --git a/lib/kernel/src/kernel_config.erl b/lib/kernel/src/kernel_config.erl
index 535083ef27..c5ff1887c2 100644
--- a/lib/kernel/src/kernel_config.erl
+++ b/lib/kernel/src/kernel_config.erl
@@ -30,11 +30,8 @@
%%%-----------------------------------------------------------------
%%% This module implements a process that configures the kernel
%%% application.
-%%% Its purpose is that in the init phase add an error_logger
-%%% and when it dies (when the kernel application dies) deleting the
-%%% previously installed error_logger.
-%%% Also, this process waits for other nodes at startup, if
-%%% specified.
+%%% Its purpose is that in the init phase waits for other nodes at startup,
+%%% if specified.
%%%-----------------------------------------------------------------
start_link() -> gen_server:start_link(kernel_config, [], []).
diff --git a/lib/kernel/src/logger.erl b/lib/kernel/src/logger.erl
index 98a9937111..a839f97e62 100644
--- a/lib/kernel/src/logger.erl
+++ b/lib/kernel/src/logger.erl
@@ -40,15 +40,18 @@
set_module_level/2, reset_module_level/1,
set_logger_config/1, set_logger_config/2,
set_handler_config/2, set_handler_config/3,
- get_logger_config/0, get_handler_config/1]).
+ update_logger_config/1, update_handler_config/2,
+ get_logger_config/0, get_handler_config/1,
+ add_handlers/1]).
+
+%% Private configuration
+-export([internal_init_logger/0]).
%% Misc
-export([compare_levels/2]).
-export([set_process_metadata/1, update_process_metadata/1,
unset_process_metadata/0, get_process_metadata/0]).
-export([i/0, i/1]).
--export([setup_standard_handler/0, replace_simple_handler/3]).
--export([limit_term/1, get_format_depth/0, get_max_size/0, get_utc_config/0]).
%% Basic report formatting
-export([format_report/1, format_otp_report/1]).
@@ -93,8 +96,10 @@
term() => term()}.
-type timestamp() :: integer().
+-type config_handler() :: {handler, handler_id(), module(), config()}.
+
-export_type([log/0,level/0,report/0,msg_fun/0,metadata/0,config/0,handler_id/0,
- filter_id/0,filter/0,filter_arg/0,filter_return/0]).
+ filter_id/0,filter/0,filter_arg/0,filter_return/0, config_handler/0]).
%%%-----------------------------------------------------------------
%%% API
@@ -357,10 +362,22 @@ set_handler_config(HandlerId,Key,Value) ->
set_handler_config(HandlerId,Config) ->
logger_server:set_config(HandlerId,Config).
+-spec update_logger_config(Config) -> ok | {error,term()} when
+ Config :: config().
+update_logger_config(Config) ->
+ logger_server:update_config(logger,Config).
+
+-spec update_handler_config(HandlerId,Config) -> ok | {error,term()} when
+ HandlerId :: handler_id(),
+ Config :: config().
+update_handler_config(HandlerId,Config) ->
+ logger_server:update_config(HandlerId,Config).
+
-spec get_logger_config() -> {ok,Config} when
Config :: config().
get_logger_config() ->
- logger_config:get(?LOGGER_TABLE,logger).
+ {ok,Config} = logger_config:get(?LOGGER_TABLE,logger),
+ {ok,maps:remove(handlers,Config)}.
-spec get_handler_config(HandlerId) -> {ok,{Module,Config}} | {error,term()} when
HandlerId :: handler_id(),
@@ -441,8 +458,9 @@ i() ->
i(_Action = print) ->
io:put_chars(i(string));
i(_Action = string) ->
- #{logger := #{level := Level, handlers := Handlers,
- filters := Filters, filter_default := FilterDefault},
+ #{logger := #{level := Level,
+ filters := Filters,
+ filter_default := FilterDefault},
handlers := HandlerConfigs,
module_levels := Modules} = i(term),
[io_lib:format("Current logger configuration:~n", []),
@@ -451,16 +469,15 @@ i(_Action = string) ->
io_lib:format(" Filters: ~n", []),
print_filters(4, Filters),
io_lib:format(" Handlers: ~n", []),
- print_handlers([C || {Id, _, _} = C <- HandlerConfigs,
- lists:member(Id, Handlers)]),
+ print_handlers(HandlerConfigs),
io_lib:format(" Level set per module: ~n", []),
print_module_levels(Modules)
];
i(_Action = term) ->
{Logger, Handlers, Modules} = logger_config:get(tid()),
- #{logger=>Logger,
- handlers=>Handlers,
- module_levels=>Modules}.
+ #{logger=>maps:remove(handlers,Logger),
+ handlers=>lists:keysort(1,Handlers),
+ module_levels=>lists:keysort(1,Modules)}.
print_filters(Indent, {Id, {Fun, Config}}) ->
io_lib:format("~sId: ~p~n"
@@ -504,118 +521,184 @@ print_module_levels({Module,Level}) ->
print_module_levels(ModuleLevels) ->
lists:map(fun print_module_levels/1, ModuleLevels).
--spec setup_standard_handler() -> ok | {error,term()}.
-setup_standard_handler() ->
- case get_logger_type() of
- {ok,silent} ->
- Level = get_logger_level(),
- ok = set_logger_config(level,Level),
- remove_handler(logger_simple);
- {ok,Type} ->
- Level = get_logger_level(),
- ok = set_logger_config(level,Level),
- Filters = get_logger_filters(),
- setup_standard_handler(Type,#{level=>Level,
- filter_default=>stop,
- filters=>Filters});
- Error ->
- Error
+-spec internal_init_logger() -> ok | {error,term()}.
+%% This function is responsible for config of the logger
+%% This is done before add_handlers because we want the
+%% logger settings to take effect before the kernel supervisor
+%% tree is started.
+internal_init_logger() ->
+ try
+ ok = logger:set_logger_config(level, get_logger_level()),
+ ok = logger:set_logger_config(filter_default, get_logger_filter_default()),
+
+ [case logger:add_logger_filter(Id, Filter) of
+ ok -> ok;
+ {error, Reason} -> throw(Reason)
+ end || {Id, Filter} <- get_logger_filters()],
+
+ _ = [[case logger:set_module_level(Module, Level) of
+ ok -> ok;
+ {error, Reason} -> throw(Reason)
+ end || Module <- Modules]
+ || {module_level, Level, Modules} <- get_logger_env()],
+
+ case logger:set_handler_config(logger_simple,filters,
+ get_default_handler_filters()) of
+ ok -> ok;
+ {error,{not_found,logger_simple}} -> ok
+ end,
+
+ init_kernel_handlers()
+ catch throw:Reason ->
+ ?LOG_ERROR("Invalid logger config: ~p", [Reason]),
+ {error, {bad_config, {kernel, Reason}}}
end.
--spec setup_standard_handler(Type,Config) -> ok | {error,term()} when
- Type :: tty | standard_io | standard_error | {file,File} |
- {file,File,Modes} | {disk_log,LogOpts} | false,
- File :: file:filename(),
- Modes :: [term()], % [file:mode()], or more specific?
- Config :: config(),
- LogOpts :: map().
-setup_standard_handler(false,#{level:=Level,filters:=Filters}) ->
- case set_handler_config(logger_simple,level,Level) of
- ok ->
- set_handler_config(logger_simple,filters,Filters);
- Error ->
- Error
- end;
-setup_standard_handler(Type,Config) ->
- {Module,TypeConfig} = get_type_config(Type),
- replace_simple_handler(?STANDARD_HANDLER,
- Module,
- maps:merge(Config,TypeConfig)).
-
--spec replace_simple_handler(Id,Module,Config) -> ok | {error,term()} when
- Id :: handler_id(),
- Module :: module(),
- Config :: config().
-replace_simple_handler(Id,Module,Config) ->
- _ = code:ensure_loaded(Module),
- DoBuffer = erlang:function_exported(Module,swap_buffer,2),
- case add_handler(Id,Module,Config#{wait_for_buffer=>DoBuffer}) of
- ok ->
- if DoBuffer ->
- {ok,Buffered} = logger_simple:get_buffer(),
- _ = remove_handler(logger_simple),
- Module:swap_buffer(?STANDARD_HANDLER,Buffered);
- true ->
- _ = remove_handler(logger_simple),
- ok
- end,
- ok;
- Error ->
- Error
+-spec init_kernel_handlers() -> ok | {error,term()}.
+%% Setup the kernel environment variables to be correct
+%% The actual handlers are started by a call to add_handlers.
+init_kernel_handlers() ->
+ try
+ case get_logger_type() of
+ {ok,silent} ->
+ ok = logger:remove_handler(logger_simple);
+ {ok,false} ->
+ ok;
+ {ok,Type} ->
+ init_default_config(Type)
+ end
+ catch throw:Reason ->
+ ?LOG_ERROR("Invalid default handler config: ~p", [Reason]),
+ {error, {bad_config, {kernel, Reason}}}
+ end.
+
+-spec add_handlers(Application) -> ok | {error,term()} when
+ Application :: atom();
+ (HandlerConfig) -> ok | {error,term()} when
+ HandlerConfig :: [config_handler()].
+%% This function is responsible for resolving the handler config
+%% and then starting the correct handlers. This is done after the
+%% kernel supervisor tree has been started as it needs the logger_sup.
+add_handlers(App) when is_atom(App) ->
+ add_handlers(application:get_env(App, logger, []));
+add_handlers(HandlerConfig) ->
+ try
+ check_logger_config(HandlerConfig),
+ DefaultAdded =
+ lists:foldl(
+ fun({handler, default = Id, Module, Config}, _)
+ when not is_map_key(filters, Config) ->
+ %% The default handler should have a couple of extra filters
+ %% set on it by default.
+ DefConfig = #{ filter_default => stop,
+ filters => get_default_handler_filters()},
+ setup_handler(Id, Module, maps:merge(DefConfig,Config)),
+ true;
+ ({handler, Id, Module, Config}, Default) ->
+ setup_handler(Id, Module, Config),
+ Default orelse Id == default;
+ (_, Default) -> Default
+ end, false, HandlerConfig),
+ %% If a default handler was added we try to remove the simple_logger
+ %% If the simple logger exists it will replay its log events
+ %% to the handler(s) added in the fold above.
+ _ = [case logger:remove_handler(logger_simple) of
+ ok -> ok;
+ {error,{not_found,logger_simple}} -> ok
+ end || DefaultAdded],
+ ok
+ catch throw:Reason ->
+ ?LOG_ERROR("Invalid logger handler config: ~p", [Reason]),
+ {error, {bad_config, {handler, Reason}}}
end.
+setup_handler(Id, Module, Config) ->
+ case logger:add_handler(Id, Module, Config) of
+ ok -> ok;
+ {error, Reason} -> throw(Reason)
+ end.
+
+check_logger_config(_) ->
+ ok.
+
+-spec get_logger_type() -> {ok, standard_io | false | silent |
+ {file, file:name_all()} |
+ {file, file:name_all(), [file:mode()]}}.
get_logger_type() ->
- Type0 =
- case application:get_env(kernel, logger_dest) of
- undefined ->
- application:get_env(kernel, error_logger);
- T ->
- T
- end,
- case Type0 of
+ case application:get_env(kernel, error_logger) of
{ok, tty} ->
- {ok, tty};
+ {ok, standard_io};
{ok, {file, File}} when is_list(File) ->
{ok, {file, File}};
{ok, {file, File, Modes}} when is_list(File), is_list(Modes) ->
{ok, {file, File, Modes}};
- {ok, {disk_log, File}} when is_list(File) ->
- {ok, {disk_log, get_disk_log_config(File)}};
{ok, false} ->
{ok, false};
{ok, silent} ->
{ok, silent};
undefined ->
- {ok, tty}; % default value
+ case lists:member({handler,default,undefined}, get_logger_env()) of
+ true ->
+ {ok, false};
+ false ->
+ {ok, standard_io} % default value
+ end;
{ok, Bad} ->
- {error,{bad_config, {kernel, {logger_dest, Bad}}}}
+ throw({error_logger, Bad})
end.
-get_disk_log_config(File) ->
- Config1 =
- case application:get_env(kernel,logger_disk_log_maxfiles) of
- undefined -> #{};
- {ok,MF} -> #{max_no_files=>MF}
- end,
- Config2 =
- case application:get_env(kernel,logger_disk_log_maxbytes) of
- undefined -> Config1;
- {ok,MB} -> Config1#{max_no_bytes=>MB}
- end,
- Config3 =
- case application:get_env(kernel,logger_disk_log_type) of
- undefined -> Config2;
- {ok,T} -> Config1#{type=>T}
- end,
- Config3#{file=>File}.
-
get_logger_level() ->
- case application:get_env(kernel,logger_level) of
- undefined -> info;
- {ok,Level} when ?IS_LEVEL(Level) -> Level
+ case application:get_env(kernel,logger_level,info) of
+ Level when ?IS_LEVEL(Level) ->
+ Level;
+ Level ->
+ throw({logger_level, Level})
+ end.
+
+get_logger_filter_default() ->
+ case lists:keyfind(filters,1,get_logger_env()) of
+ {filters,Default,_} ->
+ Default;
+ false ->
+ log
end.
get_logger_filters() ->
+ lists:foldl(
+ fun({filters, _, Filters}, _Acc) ->
+ Filters;
+ (_, Acc) ->
+ Acc
+ end, [], get_logger_env()).
+
+%% This function looks at the kernel logger environment
+%% and updates it so that the correct logger is configured
+init_default_config(Type) when Type==standard_io;
+ Type==standard_error;
+ element(1,Type)==file ->
+ Env = get_logger_env(),
+ DefaultConfig = #{logger_std_h=>#{type=>Type}},
+ NewLoggerEnv =
+ case lists:keyfind(default, 2, Env) of
+ {handler, default, Module, Config} ->
+ lists:map(
+ fun({handler, default, logger_std_h, _}) ->
+ %% Only want to add the logger_std_h config
+ %% if not configured by user AND the default
+ %% handler is still the logger_std_h.
+ {handler, default, Module, maps:merge(DefaultConfig,Config)};
+ (Other) ->
+ Other
+ end, Env);
+ _ ->
+ %% Nothing has been configured, use default
+ [{handler, default, logger_std_h, DefaultConfig} | Env]
+ end,
+ application:set_env(kernel, logger, NewLoggerEnv, [{timeout,infinity}]);
+init_default_config(Type) ->
+ throw({illegal_logger_type,Type}).
+
+get_default_handler_filters() ->
case application:get_env(kernel, logger_sasl_compatible, false) of
true ->
?DEFAULT_HANDLER_FILTERS([beam,erlang,otp]);
@@ -631,77 +714,8 @@ get_logger_filters() ->
Extra ++ ?DEFAULT_HANDLER_FILTERS([beam,erlang,otp,sasl])
end.
-get_type_config({disk_log,LogOpts}) ->
- {logger_disk_log_h,#{disk_log_opts=>LogOpts}};
-get_type_config(tty) ->
- %% This is only for backwards compatibility with error_logger and
- %% old kernel and sasl environment variables
- get_type_config(standard_io);
-get_type_config(Type) when Type==standard_io;
- Type==standard_error;
- element(1,Type)==file ->
- {logger_std_h,#{logger_std_h=>#{type=>Type}}};
-get_type_config(Type) ->
- {error,{illegal_logger_type,Type}}.
-
-%%%-----------------------------------------------------------------
--spec limit_term(term()) -> term().
-
-limit_term(Term) ->
- try get_format_depth() of
- unlimited -> Term;
- D -> io_lib:limit_term(Term, D)
- catch error:badarg ->
- %% This could happen during system termination, after
- %% application_controller process is dead.
- unlimited
- end.
-
--spec get_format_depth() -> 'unlimited' | pos_integer().
-
-get_format_depth() ->
- Depth =
- case application:get_env(kernel, logger_format_depth) of
- {ok, D} when is_integer(D) ->
- D;
- undefined ->
- case application:get_env(kernel, error_logger_format_depth) of
- {ok, D} when is_integer(D) ->
- D;
- undefined ->
- unlimited
- end
- end,
- max(10, Depth).
-
--spec get_max_size() -> 'unlimited' | pos_integer().
-
-get_max_size() ->
- case application:get_env(kernel, logger_max_size) of
- {ok, Size} when is_integer(Size) ->
- max(50, Size);
- undefined ->
- unlimited
- end.
-
--spec get_utc_config() -> boolean().
-
-get_utc_config() ->
- %% Kernel's logger_utc configuration overrides SASL utc_log, which
- %% in turn overrides stdlib config - in order to have uniform
- %% timestamps in log messages
- case application:get_env(kernel, logger_utc) of
- {ok, Val} -> Val;
- undefined ->
- case application:get_env(sasl, utc_log) of
- {ok, Val} -> Val;
- undefined ->
- case application:get_env(stdlib, utc_log) of
- {ok, Val} -> Val;
- undefined -> false
- end
- end
- end.
+get_logger_env() ->
+ application:get_env(kernel, logger, []).
%%%-----------------------------------------------------------------
%%% Internal
diff --git a/lib/kernel/src/logger_config.erl b/lib/kernel/src/logger_config.erl
index 799aea9617..40dc1b1e1b 100644
--- a/lib/kernel/src/logger_config.erl
+++ b/lib/kernel/src/logger_config.erl
@@ -31,7 +31,7 @@
-include("logger_internal.hrl").
new(Name) ->
- _ = ets:new(Name,[set,protected,named_table]),
+ _ = ets:new(Name,[set,protected,named_table,{write_concurrency,true}]),
ets:whereis(Name).
delete(Tid,Id) ->
diff --git a/lib/kernel/src/logger_disk_log_h.erl b/lib/kernel/src/logger_disk_log_h.erl
index 0150fa781a..57c54ce27e 100644
--- a/lib/kernel/src/logger_disk_log_h.erl
+++ b/lib/kernel/src/logger_disk_log_h.erl
@@ -278,10 +278,11 @@ init([Name, Config = #{disk_log_opts := LogOpts},
last_log_ts => T0,
burst_win_ts => T0,
burst_msg_count => 0,
+ last_op => sync,
prev_log_result => ok,
prev_sync_result => ok,
prev_disk_log_info => undefined}),
- gen_server:cast(self(), {repeated_disk_log_sync,T0}),
+ gen_server:cast(self(), repeated_disk_log_sync),
enter_loop(Config, State1);
Error ->
logger_h_common:error_notify({open_disk_log,Name,Error}),
@@ -316,8 +317,7 @@ handle_call(disk_log_sync, _From, State = #{id := Name}) ->
{reply, Result, State1};
handle_call({change_config,_OldConfig,NewConfig}, _From,
- State = #{filesync_repeat_interval := FSyncInt0,
- last_log_ts := LastLogTS}) ->
+ State = #{filesync_repeat_interval := FSyncInt0}) ->
HConfig = maps:get(?MODULE, NewConfig, #{}),
State1 = #{toggle_sync_qlen := TSQL,
drop_new_reqs_qlen := DNRQL,
@@ -338,9 +338,8 @@ handle_call({change_config,_OldConfig,NewConfig}, _From,
_ = logger_h_common:cancel_timer(maps:get(rep_sync_tref,
State,
undefined)),
- _ = gen_server:cast(self(), {repeated_disk_log_sync,
- LastLogTS})
- end,
+ _ = gen_server:cast(self(), repeated_disk_log_sync)
+ end,
{reply, ok, State1};
false ->
{reply, {error,{invalid_levels,{TSQL,DNRQL,FRQL}}}, State}
@@ -370,24 +369,23 @@ handle_cast({log, Bin}, State) ->
%% clause gets called repeatedly by the handler. In order to
%% guarantee that a filesync *always* happens after the last log
%% request, the repeat operation must be active!
-handle_cast({repeated_disk_log_sync,LastLogTS0},
+handle_cast(repeated_disk_log_sync,
State = #{id := Name,
filesync_repeat_interval := FSyncInt,
- last_log_ts := LastLogTS1}) ->
+ last_op := LastOp}) ->
State1 =
if is_integer(FSyncInt) ->
%% only do filesync if something has been
%% written since last time we checked
- NewState = if LastLogTS1 == LastLogTS0 ->
+ NewState = if LastOp == sync ->
State;
true ->
disk_log_sync(Name, State)
end,
{ok,TRef} =
timer:apply_after(FSyncInt, gen_server,cast,
- [self(),
- {repeated_disk_log_sync,LastLogTS1}]),
- NewState#{rep_sync_tref => TRef};
+ [self(),repeated_disk_log_sync]),
+ NewState#{rep_sync_tref => TRef, last_op => sync};
true ->
State
end,
@@ -649,10 +647,9 @@ close_disk_log(Name, _) ->
ok.
disk_log_write(Name, Bin, State) ->
- Result =
case ?disk_log_blog(Name, Bin) of
ok ->
- ok;
+ State#{prev_log_result => ok, last_op => write};
LogError ->
_ = case maps:get(prev_log_result, State) of
LogError ->
@@ -664,29 +661,26 @@ disk_log_write(Name, Bin, State) ->
LogOpts,
LogError})
end,
- LogError
- end,
- State#{prev_log_result => Result}.
+ State#{prev_log_result => LogError}
+ end.
disk_log_sync(Name, State) ->
- Result =
- case ?disk_log_sync(Name) of
- ok ->
- ok;
- SyncError ->
- _ = case maps:get(prev_sync_result, State) of
- SyncError ->
- %% don't report same error twice
- ok;
- _ ->
- LogOpts = maps:get(log_opts, State),
- logger_h_common:error_notify({Name,sync,
- LogOpts,
- SyncError})
- end,
- SyncError
- end,
- State#{prev_sync_result => Result}.
+ case ?disk_log_sync(Name) of
+ ok ->
+ State#{prev_sync_result => ok, last_op => sync};
+ SyncError ->
+ _ = case maps:get(prev_sync_result, State) of
+ SyncError ->
+ %% don't report same error twice
+ ok;
+ _ ->
+ LogOpts = maps:get(log_opts, State),
+ logger_h_common:error_notify({Name,sync,
+ LogOpts,
+ SyncError})
+ end,
+ State#{prev_sync_result => SyncError}
+ end.
error_notify_new(Info,Info, _Term) ->
ok;
diff --git a/lib/kernel/src/logger_filters.erl b/lib/kernel/src/logger_filters.erl
index 85928f0fd6..592ff28cc2 100644
--- a/lib/kernel/src/logger_filters.erl
+++ b/lib/kernel/src/logger_filters.erl
@@ -38,6 +38,7 @@ domain(#{meta:=Meta}=Log,{Action,Compare,MatchDomain})
(Compare==prefix_of orelse
Compare==starts_with orelse
Compare==equals orelse
+ Compare==differs orelse
Compare==no_domain) andalso
is_list(MatchDomain) ->
filter_domain(Compare,Meta,MatchDomain,on_match(Action,Log));
@@ -87,9 +88,12 @@ filter_domain(starts_with,#{domain:=Domain},MatchDomain,OnMatch) ->
is_prefix(MatchDomain,Domain,OnMatch);
filter_domain(equals,#{domain:=Domain},Domain,OnMatch) ->
OnMatch;
+filter_domain(differs,#{domain:=Domain},MatchDomain,OnMatch)
+ when Domain=/=MatchDomain ->
+ OnMatch;
filter_domain(Action,Meta,_,OnMatch) ->
case maps:is_key(domain,Meta) of
- false when Action==no_domain -> OnMatch;
+ false when Action==no_domain; Action==differs -> OnMatch;
_ -> ignore
end.
diff --git a/lib/kernel/src/logger_formatter.erl b/lib/kernel/src/logger_formatter.erl
index 8e954f8d98..602c666cc7 100644
--- a/lib/kernel/src/logger_formatter.erl
+++ b/lib/kernel/src/logger_formatter.erl
@@ -20,6 +20,7 @@
-module(logger_formatter).
-export([format/2]).
+-export([check_config/1]).
-include("logger_internal.hrl").
@@ -38,7 +39,8 @@
max_size=>pos_integer() | unlimited,
depth=>pos_integer() | unlimited,
template=>template(),
- utc=>boolean()}.
+ time_designator=>byte(),
+ time_offset=>integer()|[byte()]}.
format(#{level:=Level,msg:=Msg0,meta:=Meta},Config0)
when is_map(Config0) ->
Config = add_default_config(Config0),
@@ -195,16 +197,12 @@ truncate(String,Size) ->
String
end.
-format_time(Timestamp,Config) when is_integer(Timestamp) ->
- {Date,Time,Micro} = timestamp_to_datetimemicro(Timestamp,Config),
- format_time(Date,Time,Micro);
-format_time(Other,_Config) ->
- %% E.g. a string
- to_string(Other).
-
-format_time({Y,M,D},{H,Min,S},Micro) ->
- io_lib:format("~4w-~2..0w-~2..0w ~2w:~2..0w:~2..0w.~6..0w",
- [Y,M,D,H,Min,S,Micro]).
+format_time(Timestamp,#{time_offset:=Offset,time_designator:=Des})
+ when is_integer(Timestamp) ->
+ SysTime = Timestamp + erlang:time_offset(microsecond),
+ calendar:system_time_to_rfc3339(SysTime,[{unit,microsecond},
+ {offset,Offset},
+ {time_designator,Des}]).
%% Assuming this is monotonic time in microseconds
timestamp_to_datetimemicro(Timestamp,Config) when is_integer(Timestamp) ->
@@ -212,12 +210,12 @@ timestamp_to_datetimemicro(Timestamp,Config) when is_integer(Timestamp) ->
Micro = SysTime rem 1000000,
Sec = SysTime div 1000000,
UniversalTime = erlang:posixtime_to_universaltime(Sec),
- {Date,Time} =
- case Config of
- #{utc:=true} -> UniversalTime;
- _ -> erlang:universaltime_to_localtime(UniversalTime)
+ {{Date,Time},UtcStr} =
+ case offset_to_utc(maps:get(time_offset,Config)) of
+ true -> {UniversalTime,"UTC "};
+ _ -> {erlang:universaltime_to_localtime(UniversalTime),""}
end,
- {Date,Time,Micro}.
+ {Date,Time,Micro,UtcStr}.
format_mfa({M,F,A}) when is_atom(M), is_atom(F), is_integer(A) ->
atom_to_list(M)++":"++atom_to_list(F)++"/"++integer_to_list(A);
@@ -230,9 +228,11 @@ maybe_add_legacy_header(Level,
#{time:=Timestamp}=Meta,
#{legacy_header:=true}=Config) ->
#{title:=Title}=MyMeta = add_legacy_title(Level,maps:get(?MODULE,Meta,#{})),
- {{Y,Mo,D},{H,Mi,S},Micro} = timestamp_to_datetimemicro(Timestamp,Config),
- Header = io_lib:format("=~ts==== ~w-~s-~4w::~2..0w:~2..0w:~2..0w.~6..0w ~s===",
- [Title,D,month(Mo),Y,H,Mi,S,Micro,utcstr(Config)]),
+ {{Y,Mo,D},{H,Mi,S},Micro,UtcStr} =
+ timestamp_to_datetimemicro(Timestamp,Config),
+ Header =
+ io_lib:format("=~ts==== ~w-~s-~4w::~2..0w:~2..0w:~2..0w.~6..0w ~s===",
+ [Title,D,month(Mo),Y,H,Mi,S,Micro,UtcStr]),
Meta#{?MODULE=>MyMeta#{header=>Header}};
maybe_add_legacy_header(_,Meta,_) ->
Meta.
@@ -256,20 +256,20 @@ month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
-utcstr(#{utc:=true}) -> "UTC ";
-utcstr(_) -> "".
-
-add_default_config(#{utc:=_}=Config0) ->
+%% Ensure that all valid configuration parameters exist in the final
+%% configuration map
+add_default_config(Config0) ->
Default =
#{legacy_header=>false,
single_line=>true,
- chars_limit=>unlimited},
- MaxSize = get_max_size(maps:get(max_size,Config0,false)),
- Depth = get_depth(maps:get(depth,Config0,false)),
+ chars_limit=>unlimited,
+ time_designator=>$T},
+ MaxSize = get_max_size(maps:get(max_size,Config0,undefined)),
+ Depth = get_depth(maps:get(depth,Config0,undefined)),
+ Offset = get_offset(maps:get(time_offset,Config0,undefined)),
add_default_template(maps:merge(Default,Config0#{max_size=>MaxSize,
- depth=>Depth}));
-add_default_config(Config) ->
- add_default_config(Config#{utc=>logger:get_utc_config()}).
+ depth=>Depth,
+ time_offset=>Offset})).
add_default_template(#{template:=_}=Config) ->
Config;
@@ -283,12 +283,121 @@ default_template(#{single_line:=true}) ->
default_template(_) ->
?DEFAULT_FORMAT_TEMPLATE.
-get_max_size(false) ->
- logger:get_max_size();
+get_max_size(undefined) ->
+ unlimited;
get_max_size(S) ->
max(10,S).
-get_depth(false) ->
- logger:get_format_depth();
+get_depth(undefined) ->
+ error_logger:get_format_depth();
get_depth(S) ->
max(5,S).
+
+get_offset(undefined) ->
+ utc_to_offset(get_utc_config());
+get_offset(Offset) ->
+ Offset.
+
+utc_to_offset(true) ->
+ "Z";
+utc_to_offset(false) ->
+ "".
+
+get_utc_config() ->
+ %% SASL utc_log overrides stdlib config - in order to have uniform
+ %% timestamps in log messages
+ case application:get_env(sasl, utc_log) of
+ {ok, Val} when is_boolean(Val) -> Val;
+ _ ->
+ case application:get_env(stdlib, utc_log) of
+ {ok, Val} when is_boolean(Val) -> Val;
+ _ -> false
+ end
+ end.
+
+offset_to_utc(Z) when Z=:=0; Z=:="z"; Z=:="Z" ->
+ true;
+offset_to_utc([$+|Tz]) ->
+ case io_lib:fread("~d:~d", Tz) of
+ {ok, [0, 0], []} ->
+ true;
+ _ ->
+ false
+ end;
+offset_to_utc(_) ->
+ false.
+
+check_config(Config) when is_map(Config) ->
+ do_check_config(maps:to_list(Config));
+check_config(Config) ->
+ {error,{invalid_formatter_config,?MODULE,Config}}.
+
+do_check_config([{Type,L}|Config]) when Type == chars_limit;
+ Type == depth;
+ Type == max_size ->
+ case check_limit(L) of
+ ok -> do_check_config(Config);
+ error -> {error,{invalid_formatter_config,?MODULE,{Type,L}}}
+ end;
+do_check_config([{single_line,SL}|Config]) when is_boolean(SL) ->
+ do_check_config(Config);
+do_check_config([{legacy_header,LH}|Config]) when is_boolean(LH) ->
+ do_check_config(Config);
+do_check_config([{report_cb,RCB}|Config]) when is_function(RCB,1) ->
+ do_check_config(Config);
+do_check_config([{template,T}|Config]) when is_list(T) ->
+ case lists:all(fun(X) when is_atom(X) -> true;
+ (X) when is_tuple(X), is_atom(element(1,X)) -> true;
+ (X) when is_list(X) -> io_lib:printable_unicode_list(X);
+ (_) -> false
+ end,
+ T) of
+ true ->
+ do_check_config(Config);
+ false ->
+ {error,{invalid_formatter_template,?MODULE,T}}
+ end;
+do_check_config([{time_offset,Offset}|Config]) ->
+ case check_offset(Offset) of
+ ok ->
+ do_check_config(Config);
+ error ->
+ {error,{invalid_formatter_config,?MODULE,{time_offset,Offset}}}
+ end;
+do_check_config([{time_designator,Char}|Config]) when Char>=0, Char=<255 ->
+ case io_lib:printable_latin1_list([Char]) of
+ true ->
+ do_check_config(Config);
+ false ->
+ {error,{invalid_formatter_config,?MODULE,{time_designator,Char}}}
+ end;
+do_check_config([C|_]) ->
+ {error,{invalid_formatter_config,?MODULE,C}};
+do_check_config([]) ->
+ ok.
+
+check_limit(L) when is_integer(L), L>0 ->
+ ok;
+check_limit(unlimited) ->
+ ok;
+check_limit(_) ->
+ error.
+
+check_offset(I) when is_integer(I) ->
+ ok;
+check_offset(Tz) when Tz=:=""; Tz=:="Z"; Tz=:="z" ->
+ ok;
+check_offset([Sign|Tz]) when Sign=:=$+; Sign=:=$- ->
+ check_timezone(Tz);
+check_offset(_) ->
+ error.
+
+check_timezone(Tz) ->
+ try io_lib:fread("~d:~d", Tz) of
+ {ok, [_, _], []} ->
+ ok;
+ _ ->
+ error
+ catch _:_ ->
+ error
+ end.
diff --git a/lib/kernel/src/logger_h_common.erl b/lib/kernel/src/logger_h_common.erl
index 7caad366ae..901c4c0dad 100644
--- a/lib/kernel/src/logger_h_common.erl
+++ b/lib/kernel/src/logger_h_common.erl
@@ -135,7 +135,8 @@ call_cast_or_drop(Name, Bin) ->
_:{timeout,_} ->
?observe(Name,{dropped,1})
end;
- drop -> ?observe(Name,{dropped,1})
+ drop ->
+ ?observe(Name,{dropped,1})
catch
%% if the ETS table doesn't exist (maybe because of a
%% handler restart), we can only drop the request
@@ -152,12 +153,15 @@ check_load(State = #{id:=Name, mode := Mode,
flush_reqs_qlen := FlushQLen}) ->
{_,Mem} = process_info(self(), memory),
?observe(Name,{max_mem,Mem}),
- %% make sure the handler process doesn't get scheduled
- %% out between the message_queue_len check below and the
- %% action that follows (flush or write).
{_,QLen} = process_info(self(), message_queue_len),
?observe(Name,{max_qlen,QLen}),
-
+ %% When the handler process gets scheduled in, it's impossible
+ %% to predict the QLen. We could jump "up" arbitrarily from say
+ %% async to sync, async to drop, sync to flush, etc. However, when
+ %% the handler process manages the log requests (without flushing),
+ %% one after the other, we will move "down" from drop to sync and
+ %% from sync to async. This way we don't risk getting stuck in
+ %% drop or sync mode with an empty mailbox.
{Mode1,_NewDrops,_NewFlushes} =
if
QLen >= FlushQLen ->
@@ -292,7 +296,7 @@ overload_levels_ok(HandlerConfig) ->
TSQL = maps:get(toggle_sync_qlen, HandlerConfig, ?TOGGLE_SYNC_QLEN),
DNRQL = maps:get(drop_new_reqs_qlen, HandlerConfig, ?DROP_NEW_REQS_QLEN),
FRQL = maps:get(flush_reqs_qlen, HandlerConfig, ?FLUSH_REQS_QLEN),
- (TSQL < DNRQL) andalso (DNRQL < FRQL).
+ (DNRQL > 1) andalso (TSQL =< DNRQL) andalso (DNRQL =< FRQL).
error_notify(Term) ->
?internal_log(error, Term).
diff --git a/lib/kernel/src/logger_h_common.hrl b/lib/kernel/src/logger_h_common.hrl
index 89378dbb10..ed365ce6eb 100644
--- a/lib/kernel/src/logger_h_common.hrl
+++ b/lib/kernel/src/logger_h_common.hrl
@@ -124,7 +124,7 @@
%%% slow down execution and therefore should not be include in code
%%% to be officially released.
-%% -define(TEST_HOOKS, true).
+-define(TEST_HOOKS, true).
-ifdef(TEST_HOOKS).
-define(TEST_HOOKS_TAB, logger_h_test_hooks).
diff --git a/lib/kernel/src/logger_internal.hrl b/lib/kernel/src/logger_internal.hrl
index 8c0fc2725d..f9377259f3 100644
--- a/lib/kernel/src/logger_internal.hrl
+++ b/lib/kernel/src/logger_internal.hrl
@@ -22,7 +22,7 @@
-define(LOGGER_KEY,'$logger_config$').
-define(HANDLER_KEY,'$handler_config$').
-define(LOGGER_META_KEY,'$logger_metadata$').
--define(STANDARD_HANDLER, logger_std_h).
+-define(STANDARD_HANDLER, default).
-define(DEFAULT_HANDLER_FILTERS,
?DEFAULT_HANDLER_FILTERS([beam,erlang,otp])).
-define(DEFAULT_HANDLER_FILTERS(Domain),
diff --git a/lib/kernel/src/logger_server.erl b/lib/kernel/src/logger_server.erl
index a7f302ac8f..275b9c476f 100644
--- a/lib/kernel/src/logger_server.erl
+++ b/lib/kernel/src/logger_server.erl
@@ -27,7 +27,7 @@
add_filter/2, remove_filter/2,
set_module_level/2, reset_module_level/1,
cache_module_level/1,
- set_config/2, set_config/3]).
+ set_config/2, set_config/3, update_config/2]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -36,8 +36,9 @@
-include("logger_internal.hrl").
-define(SERVER, logger).
+-define(LOGGER_SERVER_TAG, '$logger_cb_process').
--record(state, {tid}).
+-record(state, {tid, async_req, async_req_queue}).
%%%===================================================================
%%% API
@@ -47,23 +48,18 @@ start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
add_handler(Id,Module,Config0) ->
- case sanity_check(logger,handlers,[Id]) of
- ok ->
- try check_mod(Module) of
+ try {check_id(Id),check_mod(Module)} of
+ {ok,ok} ->
+ case sanity_check(Id,Config0) of
ok ->
- case sanity_check(Id,Config0) of
- ok ->
- Default = default_config(Id),
- Config = maps:merge(Default,Config0),
- call({add_handler,Id,Module,Config});
- Error ->
- Error
- end
- catch throw:Error ->
- {error,Error}
- end;
- Error ->
- Error
+ Default = default_config(Id),
+ Config = maps:merge(Default,Config0),
+ call({add_handler,Id,Module,Config});
+ Error ->
+ Error
+ end
+ catch throw:Error ->
+ {error,Error}
end.
remove_handler(HandlerId) ->
@@ -96,10 +92,7 @@ cache_module_level(Module) ->
set_config(Owner,Key,Value) ->
- case sanity_check(Owner,Key,Value) of
- ok -> call({update_config,Owner,#{Key=>Value}});
- Error -> Error
- end.
+ update_config(Owner,#{Key=>Value}).
set_config(Owner,Config0) ->
case sanity_check(Owner,Config0) of
@@ -110,6 +103,14 @@ set_config(Owner,Config0) ->
Error
end.
+update_config(Owner, Config) ->
+ case sanity_check(Owner,Config) of
+ ok ->
+ call({update_config,Owner,Config});
+ Error ->
+ Error
+ end.
+
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
@@ -122,93 +123,99 @@ init([]) ->
logger_config:create(Tid,logger,LoggerConfig),
SimpleConfig0 = maps:merge(default_config(logger_simple),
#{filter_default=>stop,
- filters=>?DEFAULT_HANDLER_FILTERS,
- logger_simple=>#{buffer=>true}}),
+ filters=>?DEFAULT_HANDLER_FILTERS}),
%% If this fails, then the node should crash
{ok,SimpleConfig} =
logger_simple:adding_handler(logger_simple,SimpleConfig0),
logger_config:create(Tid,logger_simple,logger_simple,SimpleConfig),
- {ok, #state{tid=Tid}}.
-
-handle_call({add_handler,Id,Module,HConfig}, _From, #state{tid=Tid}=State) ->
- Reply =
- case logger_config:exist(Tid,Id) of
- true ->
- {error,{already_exist,Id}};
- false ->
- %% inform the handler
- case call_h(Module,adding_handler,[Id,HConfig],{ok,HConfig}) of
- {ok,HConfig1} ->
- logger_config:create(Tid,Id,Module,HConfig1),
- {ok,Config} = do_get_config(Tid,logger),
- Handlers = maps:get(handlers,Config,[]),
- do_set_config(Tid,logger,
- Config#{handlers=>[Id|Handlers]}),
- ok;
- {error,HReason} ->
- {error,{handler_not_added,HReason}}
- end
- end,
- {reply,Reply,State};
-handle_call({remove_handler,HandlerId}, _From, #state{tid=Tid}=State) ->
- Reply =
- case logger_config:get(Tid,HandlerId) of
- {ok,{Module,_}} ->
- {ok,Config} = do_get_config(Tid,logger),
- Handlers0 = maps:get(handlers,Config,[]),
- Handlers = lists:delete(HandlerId,Handlers0),
- %% inform the handler
- _ = call_h(Module,removing_handler,[HandlerId,Config],ok),
- do_set_config(Tid,logger,Config#{handlers=>Handlers}),
- logger_config:delete(Tid,HandlerId),
- ok;
- _ ->
- {error,{not_found,HandlerId}}
- end,
- {reply,Reply,State};
+ {ok, #state{tid=Tid, async_req_queue = queue:new()}}.
+
+handle_call({add_handler,Id,Module,HConfig}, From, #state{tid=Tid}=State) ->
+ case logger_config:exist(Tid,Id) of
+ true ->
+ {reply,{error,{already_exist,Id}},State};
+ false ->
+ call_h_async(
+ fun() ->
+ %% inform the handler
+ call_h(Module,adding_handler,[Id,HConfig],{ok,HConfig})
+ end,
+ fun({ok,HConfig1}) ->
+ %% We know that the call_h would have loaded the module
+ %% if it existed, so it is safe here to call function_exported
+ %% to find out if this is a valid handler
+ case erlang:function_exported(Module, log, 2) of
+ true ->
+ logger_config:create(Tid,Id,Module,HConfig1),
+ {ok,Config} = do_get_config(Tid,logger),
+ Handlers = maps:get(handlers,Config,[]),
+ do_set_config(Tid,logger,
+ Config#{handlers=>[Id|Handlers]});
+ false ->
+ {error,{invalid_handler,
+ {function_not_exported,
+ {Module,log,2}}}}
+ end;
+ ({error,HReason}) ->
+ {error,{handler_not_added,HReason}}
+ end,From,State)
+ end;
+handle_call({remove_handler,HandlerId}, From, #state{tid=Tid}=State) ->
+ case logger_config:get(Tid,HandlerId) of
+ {ok,{Module,HConfig}} ->
+ {ok,Config} = do_get_config(Tid,logger),
+ Handlers0 = maps:get(handlers,Config,[]),
+ Handlers = lists:delete(HandlerId,Handlers0),
+ call_h_async(
+ fun() ->
+ %% inform the handler
+ call_h(Module,removing_handler,[HandlerId,HConfig],ok)
+ end,
+ fun(_Res) ->
+ do_set_config(Tid,logger,Config#{handlers=>Handlers}),
+ logger_config:delete(Tid,HandlerId),
+ ok
+ end,From,State);
+ _ ->
+ {reply,{error,{not_found,HandlerId}},State}
+ end;
handle_call({add_filter,Id,Filter}, _From,#state{tid=Tid}=State) ->
Reply = do_add_filter(Tid,Id,Filter),
{reply,Reply,State};
handle_call({remove_filter,Id,FilterId}, _From, #state{tid=Tid}=State) ->
Reply = do_remove_filter(Tid,Id,FilterId),
{reply,Reply,State};
-handle_call({update_config,Id,NewConfig}, _From, #state{tid=Tid}=State) ->
- Reply =
- case logger_config:get(Tid,Id) of
- {ok,{Module,OldConfig}} ->
- Config = maps:merge(OldConfig,NewConfig),
- case call_h(Module,changing_config,[Id,OldConfig,Config],
- {ok,Config}) of
- {ok,Config1} ->
- do_set_config(Tid,Id,Config1);
- Error ->
- Error
- end;
- {ok,OldConfig} ->
- Config = maps:merge(OldConfig,NewConfig),
- do_set_config(Tid,Id,Config);
- Error ->
- Error
- end,
- {reply,Reply,State};
+handle_call({update_config,Id,NewConfig}, From, #state{tid=Tid}=State) ->
+ case logger_config:get(Tid,Id) of
+ {ok,{_Module,OldConfig}} ->
+ Config = maps:merge(OldConfig,NewConfig),
+ handle_call({set_config,Id,Config}, From, State);
+ {ok,OldConfig} ->
+ Config = maps:merge(OldConfig,NewConfig),
+ {reply,do_set_config(Tid,Id,Config),State};
+ Error ->
+ {reply,Error,State}
+ end;
handle_call({set_config,logger,Config}, _From, #state{tid=Tid}=State) ->
- Reply = do_set_config(Tid,logger,Config),
- {reply,Reply,State};
-handle_call({set_config,HandlerId,Config}, _From, #state{tid=Tid}=State) ->
- Reply =
- case logger_config:get(Tid,HandlerId) of
- {ok,{Module,OldConfig}} ->
- case call_h(Module,changing_config,[HandlerId,OldConfig,Config],
- {ok,Config}) of
- {ok,Config1} ->
- do_set_config(Tid,HandlerId,Config1);
- Error ->
- Error
- end;
- _ ->
- {error,{not_found,HandlerId}}
- end,
+ {ok,#{handlers:=Handlers}} = logger_config:get(Tid,logger),
+ Reply = do_set_config(Tid,logger,Config#{handlers=>Handlers}),
{reply,Reply,State};
+handle_call({set_config,HandlerId,Config}, From, #state{tid=Tid}=State) ->
+ case logger_config:get(Tid,HandlerId) of
+ {ok,{Module,OldConfig}} ->
+ call_h_async(
+ fun() ->
+ call_h(Module,changing_config,[HandlerId,OldConfig,Config],
+ {ok,Config})
+ end,
+ fun({ok,Config1}) ->
+ do_set_config(Tid,HandlerId,Config1);
+ (Error) ->
+ Error
+ end,From,State);
+ _ ->
+ {reply,{error,{not_found,HandlerId}},State}
+ end;
handle_call({set_module_level,Module,Level}, _From, #state{tid=Tid}=State) ->
Reply = logger_config:set_module_level(Tid,Module,Level),
{reply,Reply,State};
@@ -216,6 +223,8 @@ handle_call({reset_module_level,Module}, _From, #state{tid=Tid}=State) ->
Reply = logger_config:reset_module_level(Tid,Module),
{reply,Reply,State}.
+handle_cast({async_req_reply,_Ref,_Reply} = Reply,State) ->
+ call_h_reply(Reply,State);
handle_cast({cache_module_level,Module}, #state{tid=Tid}=State) ->
logger_config:cache_module_level(Tid,Module),
{noreply, State}.
@@ -235,11 +244,21 @@ handle_info({log,Level,Report,Meta}, State) ->
handle_info({Ref,_Reply},State) when is_reference(Ref) ->
%% Assuming this is a timed-out gen_server reply - ignoring
{noreply, State};
-handle_info(Unexpected,State) ->
+handle_info({'DOWN',_Ref,_Proc,_Pid,_Reason} = Down,State) ->
+ call_h_reply(Down,State);
+handle_info(Unexpected,State) when element(1,Unexpected) == 'EXIT' ->
+ %% The simple logger will send an 'EXIT' message when it is replaced
+ %% We may as well ignore all 'EXIT' messages that we get
?LOG_INTERNAL(debug,
[{logger,got_unexpected_message},
{process,?SERVER},
{message,Unexpected}]),
+ {noreply,State};
+handle_info(Unexpected,State) ->
+ ?LOG_INTERNAL(info,
+ [{logger,got_unexpected_message},
+ {process,?SERVER},
+ {message,Unexpected}]),
{noreply,State}.
terminate(_Reason, _State) ->
@@ -249,8 +268,11 @@ terminate(_Reason, _State) ->
%%% Internal functions
%%%===================================================================
call(Request) ->
- case whereis(?SERVER) of
- Pid when Pid==self() ->
+ Action = element(1,Request),
+ case get(?LOGGER_SERVER_TAG) of
+ true when
+ Action == add_handler; Action == remove_handler;
+ Action == update_config; Action == set_config ->
{error,{attempting_syncronous_call_to_self,Request}};
_ ->
gen_server:call(?SERVER,Request,?DEFAULT_LOGGER_CALL_TIMEOUT)
@@ -301,8 +323,7 @@ do_set_config(Tid,Id,Config) ->
default_config(logger) ->
#{level=>info,
filters=>[],
- filter_default=>log,
- handlers=>[]};
+ filter_default=>log};
default_config(_) ->
#{level=>info,
filters=>[],
@@ -333,9 +354,6 @@ get_type(Id) ->
check_config(Owner,[{level,Level}|Config]) ->
check_level(Level),
check_config(Owner,Config);
-check_config(logger,[{handlers,Handlers}|Config]) ->
- check_handlers(Handlers),
- check_config(logger,Config);
check_config(Owner,[{filters,Filters}|Config]) ->
check_filters(Filters),
check_config(Owner,Config);
@@ -373,14 +391,6 @@ check_level(Level) ->
throw({invalid_level,Level})
end.
-check_handlers([Id|Handlers]) ->
- check_id(Id),
- check_handlers(Handlers);
-check_handlers([]) ->
- ok;
-check_handlers(Handlers) ->
- throw({invalid_handlers,Handlers}).
-
check_filters([{Id,{Fun,_Args}}|Filters]) when is_atom(Id), is_function(Fun,2) ->
check_filters(Filters);
check_filters([Filter|_]) ->
@@ -395,40 +405,24 @@ check_filter_default(FD) when FD==stop; FD==log ->
check_filter_default(FD) ->
throw({invalid_filter_default,FD}).
-check_formatter({logger_formatter,Config}) when is_map(Config) ->
- check_logger_formatter_config(maps:to_list(Config));
-check_formatter({logger_formatter,Config}) ->
- throw({invalid_formatter_config,Config});
-check_formatter({Mod,_}) ->
- %% no knowledge of other formatters
- check_mod(Mod);
+check_formatter({Mod,Config}) ->
+ check_mod(Mod),
+ try Mod:check_config(Config) of
+ ok -> ok;
+ {error,Error} -> throw(Error)
+ catch
+ C:R:S ->
+ case {C,R,S} of
+ {error,undef,[{Mod,check_config,[Config],_}|_]} ->
+ ok;
+ _ ->
+ throw({callback_crashed,
+ {C,R,logger:filter_stacktrace(?MODULE,S)}})
+ end
+ end;
check_formatter(Formatter) ->
throw({invalid_formatter,Formatter}).
-
-check_logger_formatter_config([{template,T}|Config]) when is_list(T) ->
- case lists:all(fun(X) when is_atom(X) -> true;
- (X) when is_tuple(X), is_atom(element(1,X)) -> true;
- (X) when is_list(X) -> io_lib:printable_unicode_list(X);
- (_) -> false
- end,
- T) of
- true ->
- check_logger_formatter_config(Config);
- false ->
- throw({invalid_formatter_template,T})
- end;
-check_logger_formatter_config([{legacy_header,LH}|Config]) when is_boolean(LH) ->
- check_logger_formatter_config(Config);
-check_logger_formatter_config([{single_line,SL}|Config]) when is_boolean(SL) ->
- check_logger_formatter_config(Config);
-check_logger_formatter_config([{utc,Utc}|Config]) when is_boolean(Utc) ->
- check_logger_formatter_config(Config);
-check_logger_formatter_config([C|_]) ->
- throw({invalid_formatter_config,C});
-check_logger_formatter_config([]) ->
- ok.
-
call_h(Module, Function, Args, DefRet) ->
%% Not calling code:ensure_loaded + erlang:function_exported here,
%% since in some rare terminal cases, the code_server might not
@@ -440,7 +434,59 @@ call_h(Module, Function, Args, DefRet) ->
{error,undef,[{Module,Function,Args,_}|_]} ->
DefRet;
_ ->
- {error,{callback_crashed,
- {C,R,logger:filter_stacktrace(?MODULE,S)}}}
+ ST = logger:filter_stacktrace(?MODULE,S),
+ ?LOG_INTERNAL(error,
+ [{logger,callback_crashed},
+ {process,?SERVER},
+ {reason,{C,R,ST}}]),
+ {error,{callback_crashed,{C,R,ST}}}
end
end.
+
+%% There are all sort of API functions that can cause deadlocks if called
+%% from the handler callbacks. So we spawn a process that does the request
+%% for the logger_server. There are still APIs that will cause problems,
+%% namely logger:add_handler
+call_h_async(AsyncFun,PostFun,From,#state{ async_req = undefined } = State) ->
+ Parent = self(),
+ {Pid, Ref} = spawn_monitor(
+ fun() ->
+ put(?LOGGER_SERVER_TAG,true),
+ receive Ref -> Ref end,
+ gen_server:cast(Parent, {async_req_reply, Ref, AsyncFun()})
+ end),
+ Pid ! Ref,
+ {noreply,State#state{ async_req = {Ref,PostFun,From} }};
+call_h_async(AsyncFun,PostFun,From,#state{ async_req_queue = Q } = State) ->
+ {noreply,State#state{ async_req_queue = queue:in({AsyncFun,PostFun,From},Q) }}.
+
+call_h_reply({async_req_reply,Ref,Reply},
+ #state{ async_req = {Ref,PostFun,From}, async_req_queue = Q} = State) ->
+ erlang:demonitor(Ref,[flush]),
+ _ = gen_server:reply(From, PostFun(Reply)),
+ {Value,NewQ} = queue:out(Q),
+ NewState = State#state{ async_req = undefined,
+ async_req_queue = NewQ },
+ case Value of
+ {value,{AsyncFun,NPostFun,NFrom}} ->
+ call_h_async(AsyncFun,NPostFun,NFrom,NewState);
+ empty ->
+ {noreply,NewState}
+ end;
+call_h_reply({'DOWN',Ref,_Proc,Pid,Reason}, #state{ async_req = {Ref,_PostFun,_From}} = State) ->
+ %% This clause should only be triggered if someone explicitly sends an exit signal
+ %% to the spawned process. It is only here to make sure that the logger_server does
+ %% not deadlock if that happens.
+ ?LOG_INTERNAL(error,
+ [{logger,process_exited},
+ {process,Pid},
+ {reason,Reason}]),
+ call_h_reply(
+ {async_req_reply,Ref,{error,{logger_process_exited,Pid,Reason}}},
+ State);
+call_h_reply(Unexpected,State) ->
+ ?LOG_INTERNAL(info,
+ [{logger,got_unexpected_message},
+ {process,?SERVER},
+ {message,Unexpected}]),
+ {noreply,State}.
diff --git a/lib/kernel/src/logger_simple.erl b/lib/kernel/src/logger_simple.erl
index a1b427b96c..5272455a2d 100644
--- a/lib/kernel/src/logger_simple.erl
+++ b/lib/kernel/src/logger_simple.erl
@@ -20,37 +20,18 @@
-module(logger_simple).
-export([adding_handler/2, removing_handler/2, log/2]).
--export([get_buffer/0]).
%% This module implements a simple handler for logger. It is the
%% default used during system start.
%%%-----------------------------------------------------------------
-%%% API
-get_buffer() ->
- case whereis(?MODULE) of
- undefined ->
- {error,noproc};
- Pid ->
- Ref = erlang:monitor(process,Pid),
- Pid ! {get_buffer,self()},
- receive
- {buffer,Buffer} ->
- erlang:demonitor(Ref,[flush]),
- {ok,Buffer};
- {'DOWN',Ref,process,Pid,Reason} ->
- {error,Reason}
- end
- end.
-
-%%%-----------------------------------------------------------------
%%% Logger callback
adding_handler(?MODULE,Config) ->
Me = self(),
case whereis(?MODULE) of
undefined ->
- {Pid,Ref} = spawn_opt(fun() -> init(Me,Config) end,
+ {Pid,Ref} = spawn_opt(fun() -> init(Me) end,
[link,monitor,{message_queue_data,off_heap}]),
receive
{'DOWN',Ref,process,Pid,Reason} ->
@@ -102,48 +83,44 @@ log(_,_) ->
%%%-----------------------------------------------------------------
%%% Process
-init(Starter,Config) ->
+init(Starter) ->
register(?MODULE,self()),
Starter ! {self(),started},
- BufferSize =
- case Config of
- #{?MODULE:=#{buffer:=true}} ->
- 10;
- _ ->
- infinity
- end,
- loop(#{buffer_size=>BufferSize,dropped=>0,buffer=>[]},infinity).
+ loop(#{buffer_size=>10,dropped=>0,buffer=>[]}).
-loop(Buffer,Timeout) ->
+loop(Buffer) ->
receive
stop ->
- ok;
- {get_buffer,From} ->
- loop(Buffer#{send_to=>From},0);
+ %% We replay the logger messages of there is
+ %% a default handler when the simple handler
+ %% is removed.
+ case logger:get_handler_config(default) of
+ {ok, _} ->
+ replay_buffer(Buffer);
+ _ ->
+ ok
+ end;
{log,#{msg:=_,meta:=#{time:=_}}=Log} ->
do_log(Log),
- loop(update_buffer(Buffer,Log),Timeout);
+ loop(update_buffer(Buffer,Log));
_ ->
%% Unexpected message - flush it!
- loop(Buffer,Timeout)
- after Timeout ->
- #{dropped:=D,buffer:=B,send_to:=Pid} = Buffer,
- LogList = lists:reverse(B) ++ drop_msg(D),
- Pid ! {buffer,LogList},
- loop(Buffer#{buffer_size=>infinity,
- dropped=>0,
- buffer=>[],
- send_to=>false},
- infinity)
+ loop(Buffer)
end.
-update_buffer(#{buffer_size:=infinity}=Buffer,_Log) ->
- Buffer;
update_buffer(#{buffer_size:=0,dropped:=D}=Buffer,_Log) ->
Buffer#{dropped=>D+1};
update_buffer(#{buffer_size:=S,buffer:=B}=Buffer,Log) ->
Buffer#{buffer_size=>S-1,buffer=>[Log|B]}.
+replay_buffer(#{ dropped := D, buffer := Buffer }) ->
+ lists:foreach(
+ fun F(#{msg := {Tag, Msg}} = L) when Tag =:= string; Tag =:= report ->
+ F(L#{ msg := Msg });
+ F(#{ level := Level, msg := Msg, meta := MD}) ->
+ logger:log(Level, Msg, MD)
+ end, lists:reverse(Buffer, drop_msg(D))).
+
drop_msg(0) ->
[];
drop_msg(N) ->
diff --git a/lib/kernel/src/logger_std_h.erl b/lib/kernel/src/logger_std_h.erl
index 31edcfea8b..e5e0febc88 100644
--- a/lib/kernel/src/logger_std_h.erl
+++ b/lib/kernel/src/logger_std_h.erl
@@ -257,10 +257,11 @@ init([Name, Config,
file_ctrl_sync => FileCtrlSyncInt,
last_qlen => 0,
last_log_ts => T0,
+ last_op => sync,
burst_win_ts => T0,
burst_msg_count => 0}),
proc_lib:init_ack({ok,self()}),
- gen_server:cast(self(), {repeated_filesync,T0}),
+ gen_server:cast(self(), repeated_filesync),
enter_loop(Config, State1);
Error ->
logger_h_common:error_notify({init_handler,Name,Error}),
@@ -310,12 +311,11 @@ handle_call(filesync, _From, State = #{type := Type,
if is_atom(Type) ->
{reply, ok, State};
true ->
- {reply, file_ctrl_filesync_sync(FileCtrlPid), State}
+ {reply, file_ctrl_filesync_sync(FileCtrlPid), State#{last_op=>sync}}
end;
handle_call({change_config,_OldConfig,NewConfig}, _From,
- State = #{filesync_repeat_interval := FSyncInt0,
- last_log_ts := LastLogTS}) ->
+ State = #{filesync_repeat_interval := FSyncInt0}) ->
HConfig = maps:get(?MODULE, NewConfig, #{}),
State1 = maps:merge(State, HConfig),
case logger_h_common:overload_levels_ok(State1) of
@@ -334,8 +334,7 @@ handle_call({change_config,_OldConfig,NewConfig}, _From,
_ = logger_h_common:cancel_timer(maps:get(rep_sync_tref,
State,
undefined)),
- gen_server:cast(self(), {repeated_filesync,
- LastLogTS})
+ gen_server:cast(self(), repeated_filesync)
end,
{reply, ok, State1};
false ->
@@ -365,24 +364,24 @@ handle_cast({log, Bin}, State) ->
%% clause gets called repeatedly by the handler. In order to
%% guarantee that a filesync *always* happens after the last log
%% request, the repeat operation must be active!
-handle_cast({repeated_filesync,LastLogTS0},
+handle_cast(repeated_filesync,
State = #{type := Type,
file_ctrl_pid := FileCtrlPid,
filesync_repeat_interval := FSyncInt,
- last_log_ts := LastLogTS1}) ->
+ last_op := LastOp}) ->
State1 =
if not is_atom(Type), is_integer(FSyncInt) ->
%% only do filesync if something has been
%% written since last time we checked
- if LastLogTS1 == LastLogTS0 ->
+ if LastOp == sync ->
ok;
true ->
file_ctrl_filesync_async(FileCtrlPid)
end,
{ok,TRef} =
timer:apply_after(FSyncInt, gen_server,cast,
- [self(),{repeated_filesync,LastLogTS1}]),
- State#{rep_sync_tref => TRef};
+ [self(),repeated_filesync]),
+ State#{rep_sync_tref => TRef, last_op => sync};
true ->
State
end,
@@ -600,6 +599,7 @@ write(Name, Mode, T1, Bin, _CallOrCast,
State1#{mode => Mode1,
last_qlen := LastQLen1,
last_log_ts => T1,
+ last_op => write,
burst_win_ts => BurstWinT,
burst_msg_count => BurstMsgCount1,
file_ctrl_sync =>
diff --git a/lib/kernel/test/Makefile b/lib/kernel/test/Makefile
index 8599a3d814..2f637ca9de 100644
--- a/lib/kernel/test/Makefile
+++ b/lib/kernel/test/Makefile
@@ -79,6 +79,7 @@ MODULES= \
logger_legacy_SUITE \
logger_simple_SUITE \
logger_std_h_SUITE \
+ logger_test_lib \
os_SUITE \
pg2_SUITE \
seq_trace_SUITE \
diff --git a/lib/kernel/test/application_SUITE.erl b/lib/kernel/test/application_SUITE.erl
index c00fb44c46..988f26280f 100644
--- a/lib/kernel/test/application_SUITE.erl
+++ b/lib/kernel/test/application_SUITE.erl
@@ -1603,8 +1603,7 @@ get_key(Conf) when is_list(Conf) ->
{ok, [{init, [kalle]}, {takeover, []}, {go, [sune]}]} =
rpc:call(Cp1, application, get_key, [appinc, start_phases]),
{ok, Env} = rpc:call(Cp1, application, get_key, [appinc ,env]),
- [{included_applications,[appinc1,appinc2]},
- {own2,val2},{own_env1,value1}] = lists:sort(Env),
+ [{own2,val2},{own_env1,value1}] = lists:sort(Env),
{ok, []} = rpc:call(Cp1, application, get_key, [appinc, modules]),
{ok, {application_starter, [ch_sup, {appinc, 41, 43}] }} =
rpc:call(Cp1, application, get_key, [appinc, mod]),
@@ -1625,8 +1624,7 @@ get_key(Conf) when is_list(Conf) ->
{mod, {application_starter, [ch_sup, {appinc, 41, 43}] }},
{start_phases, [{init, [kalle]}, {takeover, []}, {go, [sune]}]}]} =
rpc:call(Cp1, application, get_all_key, [appinc]),
- [{included_applications,[appinc1,appinc2]},
- {own2,val2},{own_env1,value1}] = lists:sort(Env),
+ [{own2,val2},{own_env1,value1}] = lists:sort(Env),
{ok, "Test of new app file, including appnew"} =
gen_server:call({global, {ch,41}}, {get_pid_key, description}),
@@ -1643,8 +1641,7 @@ get_key(Conf) when is_list(Conf) ->
{ok, [{init, [kalle]}, {takeover, []}, {go, [sune]}]} =
gen_server:call({global, {ch,41}}, {get_pid_key, start_phases}),
{ok, Env} = gen_server:call({global, {ch,41}}, {get_pid_key, env}),
- [{included_applications,[appinc1,appinc2]},
- {own2,val2},{own_env1,value1}] = lists:sort(Env),
+ [{own2,val2},{own_env1,value1}] = lists:sort(Env),
{ok, []} =
gen_server:call({global, {ch,41}}, {get_pid_key, modules}),
{ok, {application_starter, [ch_sup, {appinc, 41, 43}] }} =
@@ -1671,8 +1668,7 @@ get_key(Conf) when is_list(Conf) ->
{mod, {application_starter, [ch_sup, {appinc, 41, 43}] }},
{start_phases, [{init, [kalle]}, {takeover, []}, {go, [sune]}]}]} =
gen_server:call({global, {ch,41}}, get_pid_all_key),
- [{included_applications,[appinc1,appinc2]},
- {own2,val2},{own_env1,value1}] = lists:sort(Env),
+ [{own2,val2},{own_env1,value1}] = lists:sort(Env),
stop_node_nice(Cp1),
ok.
diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl
index 0470f09f29..9c6712ad74 100644
--- a/lib/kernel/test/erl_distribution_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_SUITE.erl
@@ -244,7 +244,7 @@ illegal(Name) ->
test_node(Name) ->
test_node(Name, false).
test_node(Name, Illigal) ->
- ProgName = atom_to_list(lib:progname()),
+ ProgName = ct:get_progname(),
Command = ProgName ++ " -noinput " ++ long_or_short() ++ Name ++
" -eval \"net_adm:ping('" ++ atom_to_list(node()) ++ "')\"" ++
case Illigal of
diff --git a/lib/kernel/test/error_logger_warn_SUITE.erl b/lib/kernel/test/error_logger_warn_SUITE.erl
index a8087e11f9..ef55a2d339 100644
--- a/lib/kernel/test/error_logger_warn_SUITE.erl
+++ b/lib/kernel/test/error_logger_warn_SUITE.erl
@@ -480,9 +480,12 @@ rb_utc() ->
UtcLog=case application:get_env(sasl,utc_log) of
{ok,true} ->
true;
- _AllOthers ->
+ {ok,false} ->
application:set_env(sasl,utc_log,true),
- false
+ false;
+ undefined ->
+ application:set_env(sasl,utc_log,true),
+ undefined
end,
application:start(sasl),
rb:start([{report_dir, rd()}]),
@@ -494,7 +497,12 @@ rb_utc() ->
Sum=one_rb_findstr([],"UTC"),
rb:stop(),
application:stop(sasl),
- application:set_env(sasl,utc_log,UtcLog),
+ case UtcLog of
+ undefined ->
+ application:unset_env(sasl,utc_log);
+ _ ->
+ application:set_env(sasl,utc_log,UtcLog)
+ end,
stop_node(Node),
ok.
diff --git a/lib/kernel/test/heart_SUITE.erl b/lib/kernel/test/heart_SUITE.erl
index 22db24de5f..e95635b800 100644
--- a/lib/kernel/test/heart_SUITE.erl
+++ b/lib/kernel/test/heart_SUITE.erl
@@ -168,7 +168,7 @@ reboot(Config) when is_list(Config) ->
{ok, Node} = start_check(slave, ?UNIQ_NODE_NAME),
ok = rpc:call(Node, heart, set_cmd,
- [atom_to_list(lib:progname()) ++
+ [ct:get_progname() ++
" -noshell -heart " ++ name(Node) ++ "&"]),
rpc:call(Node, init, reboot, []),
receive
@@ -203,7 +203,7 @@ node_start_immediately_after_crash_test(Config) when is_list(Config) ->
[{"ERL_CRASH_DUMP_SECONDS", "0"}]),
ok = rpc:call(Node, heart, set_cmd,
- [atom_to_list(lib:progname()) ++
+ [ct:get_progname() ++
" -noshell -heart " ++ name(Node) ++ "&"]),
Mod = exhaust_atoms,
@@ -254,7 +254,7 @@ node_start_soon_after_crash_test(Config) when is_list(Config) ->
[{"ERL_CRASH_DUMP_SECONDS", "10"}]),
ok = rpc:call(Node, heart, set_cmd,
- [atom_to_list(lib:progname()) ++
+ [ct:get_progname() ++
" -noshell -heart " ++ name(Node) ++ "&"]),
Mod = exhaust_atoms,
@@ -309,7 +309,7 @@ set_cmd(Config) when is_list(Config) ->
clear_cmd(Config) when is_list(Config) ->
{ok, Node} = start_check(slave, ?UNIQ_NODE_NAME),
ok = rpc:call(Node, heart, set_cmd,
- [atom_to_list(lib:progname()) ++
+ [ct:get_progname() ++
" -noshell -heart " ++ name(Node) ++ "&"]),
rpc:call(Node, init, reboot, []),
receive
diff --git a/lib/kernel/test/kernel_config_SUITE.erl b/lib/kernel/test/kernel_config_SUITE.erl
index 9a4578917d..a21020ff97 100644
--- a/lib/kernel/test/kernel_config_SUITE.erl
+++ b/lib/kernel/test/kernel_config_SUITE.erl
@@ -76,7 +76,7 @@ sync(Conf) when is_list(Conf) ->
%% Reset wall_clock
{T1,_} = erlang:statistics(wall_clock),
io:format("~p~n", [{t1, T1}]),
- Command = lists:concat([lib:progname(),
+ Command = lists:append([ct:get_progname(),
" -detached -sname cp1 ",
"-config ", Config,
" -env ERL_CRASH_DUMP erl_crash_dump.cp1"]),
diff --git a/lib/kernel/test/logger_SUITE.erl b/lib/kernel/test/logger_SUITE.erl
index f311a9c7ed..e602fa2576 100644
--- a/lib/kernel/test/logger_SUITE.erl
+++ b/lib/kernel/test/logger_SUITE.erl
@@ -40,18 +40,18 @@ suite() ->
[{timetrap,{seconds,30}}].
init_per_suite(Config) ->
- case logger:get_handler_config(logger_std_h) of
+ case logger:get_handler_config(?STANDARD_HANDLER) of
{ok,StdH} ->
- ok = logger:remove_handler(logger_std_h),
- [{logger_std_h,StdH}|Config];
+ ok = logger:remove_handler(?STANDARD_HANDLER),
+ [{default_handler,StdH}|Config];
_ ->
Config
end.
end_per_suite(Config) ->
- case ?config(logger_std_h,Config) of
+ case ?config(default_handler,Config) of
{HMod,HConfig} ->
- ok = logger:add_handler(logger_std_h,HMod,HConfig);
+ ok = logger:add_handler(?STANDARD_HANDLER,HMod,HConfig);
_ ->
ok
end.
@@ -105,12 +105,12 @@ start_stop(_Config) ->
add_remove_handler(_Config) ->
register(callback_receiver,self()),
- {ok,#{handlers:=Hs0}} = logger:get_logger_config(),
+ #{handlers:=Hs0} = logger:i(),
{error,{not_found,h1}} = logger:get_handler_config(h1),
ok = logger:add_handler(h1,?MODULE,#{}),
[add] = test_server:messages_get(),
- {ok,#{handlers:=Hs}} = logger:get_logger_config(),
- [h1|Hs0] = Hs,
+ #{handlers:=Hs} = logger:i(),
+ {value,_,Hs0} = lists:keytake(h1,1,Hs),
{ok,{?MODULE,#{level:=info,filters:=[],filter_default:=log}}} = % defaults
logger:get_handler_config(h1),
ok = logger:set_handler_config(h1,filter_default,stop),
@@ -124,7 +124,7 @@ add_remove_handler(_Config) ->
ok = check_logged(info,"hello",[],?MY_LOC(1)),
ok = logger:remove_handler(h1),
[remove] = test_server:messages_get(),
- {ok,#{handlers:=Hs0}} = logger:get_logger_config(),
+ #{handlers:=Hs0} = logger:i(),
{error,{not_found,h1}} = logger:get_handler_config(h1),
{error,{not_found,h1}} = logger:remove_handler(h1),
logger:info("hello",[]),
@@ -218,33 +218,52 @@ change_config(_Config) ->
{ok,{?MODULE,#{level:=info,filter_default:=stop}=C2}} =
logger:get_handler_config(h1),
false = maps:is_key(custom,C2),
- {error,fail} = logger:set_handler_config(h1,#{fail=>true}),
+ {error,fail} = logger:set_handler_config(h1,#{conf_call=>fun() -> {error,fail} end}),
{error,{attempting_syncronous_call_to_self,_}} =
logger:set_handler_config(
- h1,#{call=>fun() -> logger:set_module_level(?MODULE,debug) end}),
+ h1,#{conf_call=>fun() -> logger:set_handler_config(?MODULE,#{}) end}),
+ ok =
+ logger:set_handler_config(
+ h1,#{conf_call=>fun() -> logger:set_module_level(?MODULE,debug) end}),
{ok,{?MODULE,C2}} = logger:get_handler_config(h1),
- %% Change one key only
- {error,fail} = logger:set_handler_config(h1,fail,true),
+ %% Change handler config: Single key
+ {error,fail} = logger:set_handler_config(h1,conf_call,fun() -> {error,fail} end),
ok = logger:set_handler_config(h1,custom,custom),
[changing_config] = test_server:messages_get(),
{ok,{?MODULE,#{custom:=custom}=C3}} = logger:get_handler_config(h1),
C2 = maps:remove(custom,C3),
+ %% Change handler config: Map
+ ok = logger:update_handler_config(h1,#{custom=>new_custom}),
+ [changing_config] = test_server:messages_get(),
+ {ok,{_,C4}} = logger:get_handler_config(h1),
+ C4 = C3#{custom:=new_custom},
+
+ %% Change logger config: Single key
+ {ok,LConfig0} = logger:get_logger_config(),
+ ok = logger:set_logger_config(level,warning),
+ {ok,LConfig1} = logger:get_logger_config(),
+ LConfig1 = LConfig0#{level:=warning},
+
+ %% Change logger config: Map
+ ok = logger:update_logger_config(#{level=>error}),
+ {ok,LConfig2} = logger:get_logger_config(),
+ LConfig2 = LConfig1#{level:=error},
+
%% Overwrite logger config - check that defaults are added
- {ok,LConfig} = logger:get_logger_config(),
ok = logger:set_logger_config(#{filter_default=>stop}),
- {ok,#{level:=info,filters:=[],handlers:=[],filter_default:=stop}=LC1} =
- logger:get_logger_config(),
- 4 = maps:size(LC1),
-
- %% Change one key only
- ok = logger:set_logger_config(handlers,[h1]),
- {ok,#{level:=info,filters:=[],handlers:=[h1],filter_default:=stop}} =
+ {ok,#{level:=info,filters:=[],filter_default:=stop}=LC1} =
logger:get_logger_config(),
+ 3 = maps:size(LC1),
+ %% Check that internal 'handlers' field has not been changed
+ #{handlers:=HCs} = logger:i(),
+ HIds1 = [Id || {Id,_,_} <- HCs],
+ {ok,#{handlers:=HIds2}} = logger_config:get(?LOGGER_TABLE,logger),
+ HIds1 = lists:sort(HIds2),
%% Cleanup
- ok = logger:set_logger_config(LConfig),
+ ok = logger:set_logger_config(LConfig0),
[] = test_server:messages_get(),
ok.
@@ -425,6 +444,7 @@ filter_failed(cleanup,_Config) ->
ok.
handler_failed(_Config) ->
+ register(callback_receiver,self()),
{error,{invalid_id,1}} = logger:add_handler(1,?MODULE,#{}),
{error,{invalid_module,"nomodule"}} = logger:add_handler(h1,"nomodule",#{}),
{error,{invalid_handler_config,bad}} = logger:add_handler(h1,?MODULE,bad),
@@ -434,26 +454,62 @@ handler_failed(_Config) ->
logger:add_handler(h1,?MODULE,#{filter_default=>true}),
{error,{invalid_formatter,[]}} =
logger:add_handler(h1,?MODULE,#{formatter=>[]}),
- ok = logger:add_handler(h1,nomodule,#{filter_default=>log}),
+ {error,{invalid_handler,_}} = logger:add_handler(h1,nomodule,#{filter_default=>log}),
logger:info(?map_rep),
check_no_log(),
- #{logger:=#{handlers:=Ids1},
- handlers:=H1} = logger:i(),
- false = lists:member(h1,Ids1),
+ #{handlers:=H1} = logger:i(),
false = lists:keymember(h1,1,H1),
{error,{not_found,h1}} = logger:remove_handler(h1),
- ok = logger:add_handler(h2,?MODULE,#{filter_default=>log,crash=>true}),
+ ok = logger:add_handler(h2,?MODULE,#{filter_default=>log,log_call=>fun() -> a = b end}),
{error,{already_exist,h2}} = logger:add_handler(h2,othermodule,#{}),
+ [add] = test_server:messages_get(),
logger:info(?map_rep),
- check_no_log(),
- #{logger:=#{handlers:=Ids2},
- handlers:=H2} = logger:i(),
- false = lists:member(h2,Ids2),
+ [remove] = test_server:messages_get(),
+ #{handlers:=H2} = logger:i(),
false = lists:keymember(h2,1,H2),
{error,{not_found,h2}} = logger:remove_handler(h2),
+ CallAddHandler = fun() -> logger:add_handler(h2,?MODULE,#{}) end,
+ CrashHandler = fun() -> a = b end,
+ KillHandler = fun() -> exit(self(), die) end,
+
+ {error,{handler_not_added,{attempting_syncronous_call_to_self,_}}} =
+ logger:add_handler(h1,?MODULE,#{add_call=>CallAddHandler}),
+ {error,{handler_not_added,{callback_crashed,_}}} =
+ logger:add_handler(h1,?MODULE,#{add_call=>CrashHandler}),
+ {error,{handler_not_added,{logger_process_exited,_,die}}} =
+ logger:add_handler(h1,?MODULE,#{add_call=>KillHandler}),
+
+ check_no_log(),
+ ok = logger:add_handler(h1,?MODULE,#{}),
+ {error,{attempting_syncronous_call_to_self,_}} =
+ logger:set_handler_config(h1,#{conf_call=>CallAddHandler}),
+ {error,{callback_crashed,_}} =
+ logger:set_handler_config(h1,#{conf_call=>CrashHandler}),
+ {error,{logger_process_exited,_,die}} =
+ logger:set_handler_config(h1,#{conf_call=>KillHandler}),
+
+ {error,{attempting_syncronous_call_to_self,_}} =
+ logger:set_handler_config(h1,conf_call,CallAddHandler),
+ {error,{callback_crashed,_}} =
+ logger:set_handler_config(h1,conf_call,CrashHandler),
+ {error,{logger_process_exited,_,die}} =
+ logger:set_handler_config(h1,conf_call,KillHandler),
+
+ ok = logger:remove_handler(h1),
+ [add,remove] = test_server:messages_get(),
+
+ check_no_log(),
+ ok = logger:add_handler(h1,?MODULE,#{rem_call=>CallAddHandler}),
+ ok = logger:remove_handler(h1),
+ ok = logger:add_handler(h1,?MODULE,#{rem_call=>CrashHandler}),
+ ok = logger:remove_handler(h1),
+ ok = logger:add_handler(h1,?MODULE,#{rem_call=>KillHandler}),
+ ok = logger:remove_handler(h1),
+ [add,add,add] = test_server:messages_get(),
+
ok.
handler_failed(cleanup,_Config) ->
@@ -466,10 +522,6 @@ config_sanity_check(_Config) ->
{error,{invalid_filter_default,bad}} =
logger:set_logger_config(filter_default,bad),
{error,{invalid_level,bad}} = logger:set_logger_config(level,bad),
- {error,{invalid_handlers,bad}} = logger:set_logger_config(handlers,bad),
- {error,{invalid_id,{bad,bad}}} =
- logger:set_logger_config(handlers,[{bad,bad}]),
- {error,{invalid_id,"bad"}} = logger:set_logger_config(handlers,["bad"]),
{error,{invalid_filters,bad}} = logger:set_logger_config(filters,bad),
{error,{invalid_filter,bad}} = logger:set_logger_config(filters,[bad]),
{error,{invalid_filter,{_,_}}} =
@@ -499,29 +551,96 @@ config_sanity_check(_Config) ->
logger:set_handler_config(h1,formatter,bad),
{error,{invalid_module,{bad}}} =
logger:set_handler_config(h1,formatter,{{bad},cfg}),
- {error,{invalid_formatter_config,bad}} =
+ {error,{invalid_formatter_config,logger_formatter,bad}} =
logger:set_handler_config(h1,formatter,{logger_formatter,bad}),
- {error,{invalid_formatter_config,{bad,bad}}} =
+ {error,{invalid_formatter_config,logger_formatter,{bad,bad}}} =
logger:set_handler_config(h1,formatter,{logger_formatter,#{bad=>bad}}),
- {error,{invalid_formatter_config,{template,bad}}} =
+ {error,{invalid_formatter_config,logger_formatter,{template,bad}}} =
logger:set_handler_config(h1,formatter,{logger_formatter,
#{template=>bad}}),
- {error,{invalid_formatter_template,[1]}} =
+ {error,{invalid_formatter_template,logger_formatter,[1]}} =
logger:set_handler_config(h1,formatter,{logger_formatter,
#{template=>[1]}}),
ok = logger:set_handler_config(h1,formatter,{logger_formatter,
#{template=>[]}}),
- {error,{invalid_formatter_config,{single_line,bad}}} =
+ {error,{invalid_formatter_config,logger_formatter,{single_line,bad}}} =
logger:set_handler_config(h1,formatter,{logger_formatter,
#{single_line=>bad}}),
ok = logger:set_handler_config(h1,formatter,{logger_formatter,
#{single_line=>true}}),
- {error,{invalid_formatter_config,{legacy_header,bad}}} =
+ {error,{invalid_formatter_config,logger_formatter,{legacy_header,bad}}} =
logger:set_handler_config(h1,formatter,{logger_formatter,
#{legacy_header=>bad}}),
ok = logger:set_handler_config(h1,formatter,{logger_formatter,
#{legacy_header=>true}}),
+ {error,{invalid_formatter_config,logger_formatter,{report_cb,bad}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{report_cb=>bad}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{report_cb=>fun(R) ->
+ {"~p",[R]}
+ end}}),
+ {error,{invalid_formatter_config,logger_formatter,{chars_limit,bad}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{chars_limit=>bad}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{chars_limit=>unlimited}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{chars_limit=>4}}),
+ {error,{invalid_formatter_config,logger_formatter,{depth,bad}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{depth=>bad}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{depth=>unlimited}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{depth=>4}}),
+ {error,{invalid_formatter_config,logger_formatter,{max_size,bad}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{max_size=>bad}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{max_size=>unlimited}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{max_size=>4}}),
+ ok = logger:set_handler_config(h1,formatter,{module,config}),
+ {error,{callback_crashed,{error,{badmatch,3},[{?MODULE,check_config,1,_}]}}} =
+ logger:set_handler_config(h1,formatter,{?MODULE,crash}),
ok = logger:set_handler_config(h1,custom,custom),
+
+ %% Old utc parameter is no longer allowed (replaced by time_offset)
+ {error,{invalid_formatter_config,logger_formatter,{utc,true}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{utc=>true}}),
+ {error,{invalid_formatter_config,logger_formatter,{time_offset,bad}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>bad}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>0}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>""}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>"Z"}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>"z"}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>"-0:0"}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>"+10:13"}}),
+
+ {error,{invalid_formatter_config,logger_formatter,{time_offset,"+0"}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_offset=>"+0"}}),
+
+ {error,{invalid_formatter_config,logger_formatter,{time_designator,bad}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_designator=>bad}}),
+ {error,{invalid_formatter_config,logger_formatter,{time_designator,"s"}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_designator=>"s"}}),
+ {error,{invalid_formatter_config,logger_formatter,{time_designator,0}}} =
+ logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_designator=>0}}),
+ ok = logger:set_handler_config(h1,formatter,{logger_formatter,
+ #{time_designator=>$\s}}),
ok.
config_sanity_check(cleanup,_Config) ->
@@ -720,16 +839,19 @@ check_maps(Expected,Got,What) ->
end.
%% Handler
+adding_handler(_Id,#{add_call:=Fun}) ->
+ Fun();
adding_handler(_Id,Config) ->
maybe_send(add),
{ok,Config}.
+
+removing_handler(_Id,#{rem_call:=Fun}) ->
+ Fun();
removing_handler(_Id,_Config) ->
maybe_send(remove),
ok.
-changing_config(_Id,_Old,#{call:=Fun}) ->
+changing_config(_Id,_Old,#{conf_call:=Fun}) ->
Fun();
-changing_config(_Id,_Old,#{fail:=true}) ->
- {error,fail};
changing_config(_Id,_Old,Config) ->
maybe_send(changing_config),
{ok,Config}.
@@ -740,8 +862,8 @@ maybe_send(Msg) ->
Pid -> Pid ! Msg
end.
-log(_Log,#{crash:=true}) ->
- a=b;
+log(_Log,#{log_call:=Fun}) ->
+ Fun();
log(Log,Config) ->
TcProc = maps:get(tc_proc,Config,self()),
TcProc ! {Log,Config},
@@ -829,3 +951,8 @@ test_macros(emergency=Level) ->
%%% Called by macro ?TRY(X)
my_try(Fun) ->
try Fun() catch C:R -> {C,R} end.
+
+check_config(crash) ->
+ erlang:error({badmatch,3});
+check_config(_) ->
+ ok.
diff --git a/lib/kernel/test/logger_disk_log_h_SUITE.erl b/lib/kernel/test/logger_disk_log_h_SUITE.erl
index 63e5b56021..3aa1c3557b 100644
--- a/lib/kernel/test/logger_disk_log_h_SUITE.erl
+++ b/lib/kernel/test/logger_disk_log_h_SUITE.erl
@@ -31,7 +31,8 @@
end).
suite() ->
- [{timetrap,{seconds,30}}].
+ [{timetrap,{seconds,30}},
+ {ct_hooks,[logger_test_lib]}].
init_per_suite(Config) ->
timer:start(), % to avoid progress report
@@ -327,7 +328,8 @@ formatter_fail(Config) ->
logger:add_handler(Name, logger_disk_log_h, HConfig),
Pid = whereis(Name),
true = is_pid(Pid),
- {ok,#{handlers:=H}} = logger:get_logger_config(),
+ #{handlers:=HC1} = logger:i(),
+ H = [Id || {Id,_,_} <- HC1],
true = lists:member(Name,H),
%% Formatter is added automatically
@@ -356,7 +358,8 @@ formatter_fail(Config) ->
%% Check that handler is still alive and was never dead
Pid = whereis(Name),
- {ok,#{handlers:=H}} = logger:get_logger_config(),
+ #{handlers:=HC2} = logger:i(),
+ H = [Id || {Id,_,_} <- HC2],
ok.
formatter_fail(cleanup,_Config) ->
@@ -369,10 +372,18 @@ config_fail(_Config) ->
#{logger_disk_log_h => #{bad => bad},
filter_default=>log,
formatter=>{?MODULE,self()}}),
- {error,{handler_not_added,{invalid_levels,{42,42,_}}}} =
+
+ {error,{handler_not_added,{invalid_levels,{_,1,_}}}} =
+ logger:add_handler(?MODULE,logger_disk_log_h,
+ #{logger_disk_log_h => #{drop_new_reqs_qlen=>1}}),
+ {error,{handler_not_added,{invalid_levels,{43,42,_}}}} =
logger:add_handler(?MODULE,logger_disk_log_h,
- #{logger_disk_log_h => #{toggle_sync_qlen=>42,
+ #{logger_disk_log_h => #{toggle_sync_qlen=>43,
drop_new_reqs_qlen=>42}}),
+ {error,{handler_not_added,{invalid_levels,{_,43,42}}}} =
+ logger:add_handler(?MODULE,logger_disk_log_h,
+ #{logger_disk_log_h => #{drop_new_reqs_qlen=>43,
+ flush_reqs_qlen=>42}}),
ok = logger:add_handler(?MODULE,logger_disk_log_h,
#{filter_default=>log,
@@ -717,7 +728,7 @@ write_failure(Config) ->
Log = lists:concat([File,".1"]),
ct:pal("Log = ~p", [Log]),
- Node = start_h_on_new_node(Config, ?FUNCTION_NAME, File),
+ Node = start_h_on_new_node(Config, File),
false = (undefined == rpc:call(Node, ets, whereis, [?TEST_HOOKS_TAB])),
rpc:call(Node, ets, insert, [?TEST_HOOKS_TAB,{tester,self()}]),
rpc:call(Node, ?MODULE, set_internal_log, [?MODULE,internal_log]),
@@ -761,7 +772,7 @@ sync_failure(Config) ->
File = filename:join(Dir, FileName),
- Node = start_h_on_new_node(Config, ?FUNCTION_NAME, File),
+ Node = start_h_on_new_node(Config, File),
false = (undefined == rpc:call(Node, ets, whereis, [?TEST_HOOKS_TAB])),
rpc:call(Node, ets, insert, [?TEST_HOOKS_TAB,{tester,self()}]),
rpc:call(Node, ?MODULE, set_internal_log, [?MODULE,internal_log]),
@@ -801,21 +812,12 @@ sync_failure(cleanup, _Config) ->
Nodes = nodes(),
[test_server:stop_node(Node) || Node <- Nodes].
-start_h_on_new_node(_Config, Func, File) ->
- Pa = filename:dirname(code:which(?MODULE)),
- Dest =
- case os:type() of
- {win32,_} ->
- lists:concat([" {disk_log,\\\"",File,"\\\"}"]);
- _ ->
- lists:concat([" \'{disk_log,\"",File,"\"}\'"])
- end,
- Args = lists:concat([" -kernel ",logger_dest,Dest," -pa ",Pa]),
- NodeName = lists:concat([?MODULE,"_",Func]),
- ct:pal("Starting ~s with ~tp", [NodeName,Args]),
- {ok,Node} = test_server:start_node(NodeName, peer, [{args, Args}]),
- Pid = rpc:call(Node,erlang,whereis,[?STANDARD_HANDLER]),
- true = is_pid(Pid),
+start_h_on_new_node(Config, File) ->
+ {ok,_,Node} =
+ logger_test_lib:setup(
+ Config,
+ [{logger,[{handler,default,logger_disk_log_h,
+ #{ disk_log_opts => #{ file => File }}}]}]),
ok = rpc:call(Node,logger,set_handler_config,[?STANDARD_HANDLER,formatter,
{?MODULE,nl}]),
Node.
@@ -848,62 +850,115 @@ internal_log(Type, Term) ->
op_switch_to_sync(Config) ->
{Log,HConfig,DLHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ NumOfReqs = 500,
NewHConfig =
- HConfig#{logger_disk_log_h => DLHConfig#{toggle_sync_qlen => 3,
- drop_new_reqs_qlen => 501,
- flush_reqs_qlen => 2000,
+ HConfig#{logger_disk_log_h => DLHConfig#{toggle_sync_qlen => 2,
+ drop_new_reqs_qlen => NumOfReqs+1,
+ flush_reqs_qlen => 2*NumOfReqs,
enable_burst_limit => false}},
ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 500,
send_burst({n,NumOfReqs}, seq, {chars,79}, info),
- NumOfReqs = count_lines(Log),
- ok = file:delete(Log).
+ Lines = count_lines(Log),
+ ok = file:delete(Log),
+ NumOfReqs = Lines,
+ ok.
op_switch_to_sync(cleanup, _Config) ->
ok = stop_handler(?MODULE).
+op_switch_to_drop() ->
+ [{timetrap,{seconds,180}}].
op_switch_to_drop(Config) ->
- {Log,HConfig,DLHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
-
- NewHConfig =
- HConfig#{logger_disk_log_h => DLHConfig#{toggle_sync_qlen => 2,
- drop_new_reqs_qlen => 3,
- flush_reqs_qlen => 600,
- enable_burst_limit => false}},
- ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 500,
- send_burst({n,NumOfReqs}, seq, {chars,79}, info),
- Logged = count_lines(Log),
- ct:pal("Number of messages dropped = ~w (~w)",
- [NumOfReqs-Logged,NumOfReqs]),
- true = (Logged < NumOfReqs),
- ok = file:delete(Log).
+ Test =
+ fun() ->
+ {Log,HConfig,DLHConfig} =
+ start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ NumOfReqs = 300,
+ Procs = 2,
+ Bursts = 10,
+ NewHConfig =
+ HConfig#{logger_disk_log_h =>
+ DLHConfig#{toggle_sync_qlen => 1,
+ drop_new_reqs_qlen => 2,
+ flush_reqs_qlen => Procs*NumOfReqs*Bursts,
+ enable_burst_limit => false}},
+ ok = logger:set_handler_config(?MODULE, NewHConfig),
+ %% It sometimes happens that the handler either gets
+ %% the requests in a slow enough pace so that dropping
+ %% never occurs. Therefore, lets generate a number of
+ %% bursts to increase the chance of message buildup.
+ [send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info) ||
+ _ <- lists:seq(1, Bursts)],
+ Logged = count_lines(Log),
+ ok= stop_handler(?MODULE),
+ _ = file:delete(Log),
+ ct:pal("Number of messages dropped = ~w (~w)",
+ [Procs*NumOfReqs*Bursts-Logged,Procs*NumOfReqs*Bursts]),
+ true = (Logged < (Procs*NumOfReqs*Bursts)),
+ true = (Logged > 0),
+ ok
+ end,
+ %% As it's tricky to get the timing right in only one go, we perform the
+ %% test repeatedly, hoping that will generate a successful result.
+ case repeat_until_ok(Test, 10) of
+ {ok,{Failures,_Result}} ->
+ ct:log("Failed ~w times before success!", [Failures]);
+ {fails,Reason} ->
+ ct:fail(Reason)
+ end.
op_switch_to_drop(cleanup, _Config) ->
- ok = stop_handler(?MODULE).
+ _ = stop_handler(?MODULE).
op_switch_to_flush() ->
[{timetrap,{minutes,3}}].
op_switch_to_flush(Config) ->
- {Log,HConfig,DLHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ Test =
+ fun() ->
+ {Log,HConfig,DLHConfig} =
+ start_handler(?MODULE, ?FUNCTION_NAME, Config),
+
+ %% NOTE: it's important that both async and sync
+ %% requests have been queued when the flush happens
+ %% (verify with coverage of flush_log_requests/2)
- %% it's important that both async and sync requests have been queued
- %% when the flush happens (verify with coverage of flush_log_requests/2)
-
- NewHConfig =
- HConfig#{logger_disk_log_h => DLHConfig#{toggle_sync_qlen => 2,
- drop_new_reqs_qlen => 99,
- flush_reqs_qlen => 100,
- enable_burst_limit => false}},
- ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 1000,
- Procs = 500,
- send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info),
- Logged = count_lines(Log),
- ct:pal("Number of messages flushed/dropped = ~w (~w)",
- [(NumOfReqs*Procs)-Logged,NumOfReqs*Procs]),
- true = (Logged < (NumOfReqs*Procs)),
- ok = file:delete(Log).
+ NewHConfig =
+ HConfig#{logger_disk_log_h =>
+ DLHConfig#{toggle_sync_qlen => 2,
+ %% disable drop mode
+ drop_new_reqs_qlen => 300,
+ flush_reqs_qlen => 300,
+ enable_burst_limit => false}},
+ ok = logger:set_handler_config(?MODULE, NewHConfig),
+ NumOfReqs = 1500,
+ Procs = 10,
+ Bursts = 10,
+ %% It sometimes happens that the handler either gets
+ %% the requests in a slow enough pace so that flushing
+ %% never occurs, or it gets all messages at once,
+ %% causing all messages to get flushed (no dropping of
+ %% sync messages gets tested). Therefore, lets
+ %% generate a number of bursts to increase the chance
+ %% of message buildup in some random fashion.
+ [send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info) ||
+ _ <- lists:seq(1,Bursts)],
+ Logged = count_lines(Log),
+ ok= stop_handler(?MODULE),
+ _ = file:delete(Log),
+ ct:pal("Number of messages flushed/dropped = ~w (~w)",
+ [NumOfReqs*Procs*Bursts-Logged,NumOfReqs*Procs*Bursts]),
+ true = (Logged < (NumOfReqs*Procs*Bursts)),
+ true = (Logged > 0),
+ ok
+ end,
+ %% As it's tricky to get the timing right in only one go, we perform the
+ %% test repeatedly, hoping that will generate a successful result.
+ case repeat_until_ok(Test, 10) of
+ {ok,{Failures,_Result}} ->
+ ct:log("Failed ~w times before success!", [Failures]);
+ {fails,Reason} ->
+ ct:fail(Reason)
+ end.
op_switch_to_flush(cleanup, _Config) ->
- ok = stop_handler(?MODULE).
+ _ = stop_handler(?MODULE).
limit_burst_disabled(Config) ->
@@ -987,7 +1042,7 @@ qlen_kill_new(Config) ->
{_Log,HConfig,DLHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
Pid0 = whereis(?MODULE),
{_,Mem0} = process_info(Pid0, memory),
- RestartAfter = 2000,
+ RestartAfter = ?HANDLER_RESTART_AFTER,
NewHConfig =
HConfig#{logger_disk_log_h =>
DLHConfig#{enable_kill_overloaded=>true,
@@ -1008,7 +1063,7 @@ qlen_kill_new(Config) ->
killed ->
ct:pal("Slow shutdown, handler process was killed!", [])
end,
- timer:sleep(RestartAfter + 1000),
+ timer:sleep(RestartAfter + 2000),
true = is_pid(whereis(?MODULE)),
ok
after
@@ -1024,7 +1079,7 @@ mem_kill_new(Config) ->
{_Log,HConfig,DLHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
Pid0 = whereis(?MODULE),
{_,Mem0} = process_info(Pid0, memory),
- RestartAfter = 2000,
+ RestartAfter = ?HANDLER_RESTART_AFTER,
NewHConfig =
HConfig#{logger_disk_log_h =>
DLHConfig#{enable_kill_overloaded=>true,
@@ -1045,7 +1100,7 @@ mem_kill_new(Config) ->
killed ->
ct:pal("Slow shutdown, handler process was killed!", [])
end,
- timer:sleep(RestartAfter * 2),
+ timer:sleep(RestartAfter + 2000),
true = is_pid(whereis(?MODULE)),
ok
after
@@ -1078,7 +1133,7 @@ restart_after(Config) ->
end,
{Log,_,_} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
- RestartAfter = 2000,
+ RestartAfter = ?HANDLER_RESTART_AFTER,
NewHConfig2 =
HConfig#{logger_disk_log_h=>DLHConfig#{enable_kill_overloaded=>true,
handler_overloaded_qlen=>10,
@@ -1090,7 +1145,7 @@ restart_after(Config) ->
send_burst({n,100}, {spawn,2,0}, {chars,79}, info),
receive
{'DOWN', MRef2, _, _, _Info2} ->
- timer:sleep(RestartAfter + 1000),
+ timer:sleep(RestartAfter + 2000),
Pid1 = whereis(?MODULE),
true = is_pid(Pid1),
false = (Pid1 == Pid0),
@@ -1361,6 +1416,29 @@ count_lines1(File) ->
file:close(Dev),
Lines.
+repeat_until_ok(Fun, N) ->
+ repeat_until_ok(Fun, 0, N, undefined).
+
+repeat_until_ok(_Fun, Stop, Stop, Reason) ->
+ {fails,Reason};
+
+repeat_until_ok(Fun, C, Stop, FirstReason) ->
+ if C > 0 -> timer:sleep(5000);
+ true -> ok
+ end,
+ try Fun() of
+ Result ->
+ {ok,{C,Result}}
+ catch
+ _:Reason:Stack ->
+ ct:pal("Test fails: ~p (~p)~n", [Reason,hd(Stack)]),
+ if FirstReason == undefined ->
+ repeat_until_ok(Fun, C+1, Stop, {Reason,Stack});
+ true ->
+ repeat_until_ok(Fun, C+1, Stop, FirstReason)
+ end
+ end.
+
start_tracer(Trace,Expected) ->
Pid = self(),
dbg:tracer(process,{fun tracer/2,{Pid,Expected}}),
@@ -1382,7 +1460,8 @@ tpl([{M,F,A}|Trace]) ->
tpl([]) ->
ok.
-tracer({trace,_,call,{logger_disk_log_h,handle_cast,[{Op,_}|_]}}, {Pid,[{Mod,Func,Op}|Expected]}) ->
+tracer({trace,_,call,{logger_disk_log_h,handle_cast,[Op|_]}},
+ {Pid,[{Mod,Func,Op}|Expected]}) ->
maybe_tracer_done(Pid,Expected,{Mod,Func,Op});
tracer({trace,_,call,{Mod=disk_log,Func=blog,[_,Data]}}, {Pid,[{Mod,Func,Data}|Expected]}) ->
maybe_tracer_done(Pid,Expected,{Mod,Func,Data});
diff --git a/lib/kernel/test/logger_env_var_SUITE.erl b/lib/kernel/test/logger_env_var_SUITE.erl
index c2d3364701..764f443634 100644
--- a/lib/kernel/test/logger_env_var_SUITE.erl
+++ b/lib/kernel/test/logger_env_var_SUITE.erl
@@ -1,4 +1,4 @@
-%
+%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2018. All Rights Reserved.
@@ -21,83 +21,65 @@
-compile(export_all).
--include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/logger.hrl").
-include_lib("kernel/src/logger_internal.hrl").
--define(all_vars,[{kernel,logger_dest},
- {kernel,logger_level},
- {kernel,logger_log_progress},
- {kernel,logger_sasl_compatible},
- {kernel,error_logger}]).
+-import(logger_test_lib,[setup/2,log/3,sync_and_read/3]).
suite() ->
- [{timetrap,{seconds,30}}].
+ [{timetrap,{seconds,60}},
+ {ct_hooks,[logger_test_lib]}].
init_per_suite(Config) ->
- Env = [{App,Key,application:get_env(App,Key)} || {App,Key} <- ?all_vars],
- Removed = cleanup(),
- [{env,Env},{logger,Removed}|Config].
-
-end_per_suite(Config) ->
- [application:set_env(App,Key,Val) ||
- {App,Key,Val} <- ?config(env,Config),
- Val =/= undefined],
- Hs = ?config(logger,Config),
- [ok = logger:add_handler(Id,Mod,C) || {Id,Mod,C} <- Hs],
- ok.
-
-init_per_group(_Group, Config) ->
- Config.
-
-end_per_group(_Group, _Config) ->
- ok.
-
-init_per_testcase(_TestCase, Config) ->
Config.
-end_per_testcase(Case, Config) ->
- try apply(?MODULE,Case,[cleanup,Config])
- catch error:undef -> ok
- end,
- cleanup(),
+end_per_suite(_Config) ->
ok.
groups() ->
- [].
-
-all() ->
+ [{error_logger,[],[error_logger_tty,
+ error_logger_tty_sasl_compatible,
+ error_logger_false,
+ error_logger_false_progress,
+ error_logger_false_sasl_compatible,
+ error_logger_silent,
+ error_logger_silent_sasl_compatible,
+ error_logger_file]},
+ {logger,[],[logger_file,
+ logger_file_sasl_compatible,
+ logger_file_log_progress,
+ logger_file_no_filter,
+ logger_file_no_filter_level,
+ logger_file_formatter,
+ logger_filters,
+ logger_filters_stop,
+ logger_module_level,
+ logger_disk_log,
+ logger_disk_log_formatter,
+ logger_undefined,
+ logger_many_handlers_default_first,
+ logger_many_handlers_default_last,
+ logger_many_handlers_default_last_broken_filter
+ ]},
+ {bad,[],[bad_error_logger,
+ bad_level,
+ bad_sasl_compatibility,
+ bad_progress]}].
+
+all() ->
[default,
default_sasl_compatible,
- dest_tty,
- dest_tty_sasl_compatible,
- dest_false,
- dest_false_progress,
- dest_false_sasl_compatible,
- dest_silent,
- dest_silent_sasl_compatible,
- dest_file_old,
- dest_file,
- dest_disk_log,
- %% disk_log_vars, % or test this in logger_disk_log_SUITE?
sasl_compatible_false,
sasl_compatible_false_no_progress,
sasl_compatible,
- bad_dest%% ,
- %% bad_level,
- %% bad_sasl_compatibility,
- %% bad_progress
+ {group,bad},
+ {group,error_logger},
+ {group,logger}
].
default(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- undefined,
- undefined, % dest
- undefined, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- {logger_std_h,logger_std_h,StdC} = lists:keyfind(logger_std_h,1,Hs),
- true = is_pid(whereis(logger_std_h)),
+ {ok,#{handlers:=Hs},_Node} = setup(Config,[]),
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
info = maps:get(level,StdC),
StdFilters = maps:get(filters,StdC),
{domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
@@ -105,18 +87,12 @@ default(Config) ->
true = lists:keymember(stop_progress,1,StdFilters),
false = lists:keymember(logger_simple,1,Hs),
false = lists:keymember(sasl_h,1,Hs),
- false = is_pid(whereis(sasl_h)),
ok.
default_sasl_compatible(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- undefined,
- undefined, % dest
- undefined, % level
- true, % sasl comp (default=false)
- undefined), % progress (default=false)
- {logger_std_h,logger_std_h,StdC} = lists:keyfind(logger_std_h,1,Hs),
- true = is_pid(whereis(logger_std_h)),
+ {ok,#{handlers:=Hs},_Node} = setup(Config,
+ [{logger_sasl_compatible,true}]),
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
info = maps:get(level,StdC),
StdFilters = maps:get(filters,StdC),
{domain,{_,{log,prefix_of,[beam,erlang,otp]}}} =
@@ -124,18 +100,11 @@ default_sasl_compatible(Config) ->
false = lists:keymember(stop_progress,1,StdFilters),
false = lists:keymember(logger_simple,1,Hs),
true = lists:keymember(sasl_h,1,Hs),
- true = is_pid(whereis(sasl_h)),
ok.
-dest_tty(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- tty, % dest
- undefined, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- {logger_std_h,logger_std_h,StdC} = lists:keyfind(logger_std_h,1,Hs),
- true = is_pid(whereis(logger_std_h)),
+error_logger_tty(Config) ->
+ {ok,#{handlers:=Hs},_Node} = setup(Config,[{error_logger,tty}]),
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
info = maps:get(level,StdC),
StdFilters = maps:get(filters,StdC),
{domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
@@ -143,18 +112,13 @@ dest_tty(Config) ->
true = lists:keymember(stop_progress,1,StdFilters),
false = lists:keymember(logger_simple,1,Hs),
false = lists:keymember(sasl_h,1,Hs),
- false = is_pid(whereis(sasl_h)),
ok.
-dest_tty_sasl_compatible(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- tty, % dest
- undefined, % level
- true, % sasl comp (default=false)
- undefined), % progress (default=false)
- {logger_std_h,logger_std_h,StdC} = lists:keyfind(logger_std_h,1,Hs),
- true = is_pid(whereis(logger_std_h)),
+error_logger_tty_sasl_compatible(Config) ->
+ {ok,#{handlers:=Hs},_Node} = setup(Config,
+ [{error_logger,tty},
+ {logger_sasl_compatible,true}]),
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
info = maps:get(level,StdC),
StdFilters = maps:get(filters,StdC),
{domain,{_,{log,prefix_of,[beam,erlang,otp]}}} =
@@ -162,19 +126,17 @@ dest_tty_sasl_compatible(Config) ->
false = lists:keymember(stop_progress,1,StdFilters),
false = lists:keymember(logger_simple,1,Hs),
true = lists:keymember(sasl_h,1,Hs),
- true = is_pid(whereis(sasl_h)),
ok.
-dest_false(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- false, % dest
- notice, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- false = lists:keymember(logger_std_h,1,Hs),
+error_logger_false(Config) ->
+ {ok,#{handlers:=Hs,logger:=L},_Node} =
+ setup(Config,
+ [{error_logger,false},
+ {logger_level,notice}]),
+ false = lists:keymember(?STANDARD_HANDLER,1,Hs),
{logger_simple,logger_simple,SimpleC} = lists:keyfind(logger_simple,1,Hs),
- notice = maps:get(level,SimpleC),
+ info = maps:get(level,SimpleC),
+ notice = maps:get(level,L),
SimpleFilters = maps:get(filters,SimpleC),
{domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
lists:keyfind(domain,1,SimpleFilters),
@@ -182,16 +144,16 @@ dest_false(Config) ->
false = lists:keymember(sasl_h,1,Hs),
ok.
-dest_false_progress(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- false, % dest
- notice, % level
- undefined, % sasl comp (default=false)
- true), % progress (default=false)
- false = lists:keymember(logger_std_h,1,Hs),
+error_logger_false_progress(Config) ->
+ {ok,#{handlers:=Hs,logger:=L},_Node} =
+ setup(Config,
+ [{error_logger,false},
+ {logger_level,notice},
+ {logger_log_progress,true}]),
+ false = lists:keymember(?STANDARD_HANDLER,1,Hs),
{logger_simple,logger_simple,SimpleC} = lists:keyfind(logger_simple,1,Hs),
- notice = maps:get(level,SimpleC),
+ info = maps:get(level,SimpleC),
+ notice = maps:get(level,L),
SimpleFilters = maps:get(filters,SimpleC),
{domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
lists:keyfind(domain,1,SimpleFilters),
@@ -199,253 +161,496 @@ dest_false_progress(Config) ->
false = lists:keymember(sasl_h,1,Hs),
ok.
-dest_false_sasl_compatible(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- false, % dest
- notice, % level
- true, % sasl comp (default=false)
- undefined), % progress (default=false)
- false = lists:keymember(logger_std_h,1,Hs),
+error_logger_false_sasl_compatible(Config) ->
+ {ok,#{handlers:=Hs,logger:=L},_Node} =
+ setup(Config,
+ [{error_logger,false},
+ {logger_level,notice},
+ {logger_sasl_compatible,true}]),
+ false = lists:keymember(?STANDARD_HANDLER,1,Hs),
{logger_simple,logger_simple,SimpleC} = lists:keyfind(logger_simple,1,Hs),
- notice = maps:get(level,SimpleC),
+ info = maps:get(level,SimpleC),
+ notice = maps:get(level,L),
SimpleFilters = maps:get(filters,SimpleC),
{domain,{_,{log,prefix_of,[beam,erlang,otp]}}} =
lists:keyfind(domain,1,SimpleFilters),
false = lists:keymember(stop_progress,1,SimpleFilters),
true = lists:keymember(sasl_h,1,Hs),
- true = is_pid(whereis(sasl_h)),
ok.
-dest_silent(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- silent, % dest
- undefined, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- false = lists:keymember(logger_std_h,1,Hs),
+error_logger_silent(Config) ->
+ {ok,#{handlers:=Hs},_Node} = setup(Config,
+ [{error_logger,silent}]),
+ false = lists:keymember(?STANDARD_HANDLER,1,Hs),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+ ok.
+
+error_logger_silent_sasl_compatible(Config) ->
+ {ok,#{handlers:=Hs},_Node} = setup(Config,
+ [{error_logger,silent},
+ {logger_sasl_compatible,true}]),
+ false = lists:keymember(?STANDARD_HANDLER,1,Hs),
+ false = lists:keymember(logger_simple,1,Hs),
+ true = lists:keymember(sasl_h,1,Hs),
+ ok.
+
+
+error_logger_file(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,_Hs,Node} = setup(Config,
+ [{error_logger,{file,Log}}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0),% progress in std logger
+ ok.
+
+
+logger_file(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{logger_std_h=>#{type=>{file,Log}}}}]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ StdFilters = maps:get(filters,StdC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
+ lists:keyfind(domain,1,StdFilters),
+ true = lists:keymember(stop_progress,1,StdFilters),
false = lists:keymember(logger_simple,1,Hs),
false = lists:keymember(sasl_h,1,Hs),
+
ok.
-dest_silent_sasl_compatible(Config) ->
- {ok,{_Log,Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- silent, % dest
- undefined, % level
- true, % sasl comp (default=false)
- undefined), % progress (default=false)
- false = lists:keymember(logger_std_h,1,Hs),
+logger_file_sasl_compatible(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger_sasl_compatible,true},
+ {logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{logger_std_h=>#{type=>{file,Log}}}}]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ StdFilters = maps:get(filters,StdC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp]}}} =
+ lists:keyfind(domain,1,StdFilters),
+ false = lists:keymember(stop_progress,1,StdFilters),
false = lists:keymember(logger_simple,1,Hs),
true = lists:keymember(sasl_h,1,Hs),
- true = is_pid(whereis(sasl_h)),
+
+ ok.
+
+logger_file_log_progress(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger_log_progress,true},
+ {logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{logger_std_h=>#{type=>{file,Log}}}}]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 6),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ StdFilters = maps:get(filters,StdC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
+ lists:keyfind(domain,1,StdFilters),
+ false = lists:keymember(stop_progress,1,StdFilters),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+
+ ok.
+
+logger_file_no_filter(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{filter_default=>log,filters=>[],
+ logger_std_h=>#{type=>{file,Log}}}}]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 6),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ [] = maps:get(filters,StdC),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+
+ ok.
+
+logger_file_no_filter_level(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{filters=>[],level=>error,
+ logger_std_h=>#{type=>{file,Log}}}}]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0,% progress in std logger
+ error),% level
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ error = maps:get(level,StdC),
+ [] = maps:get(filters,StdC),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+
+ ok.
+
+logger_file_formatter(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{filters=>[],
+ formatter=>{logger_formatter,#{}},
+ logger_std_h=>#{type=>{file,Log}}}}]}]),
+ check_single_log(Node,Log,
+ file,% dest
+ 6),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ [] = maps:get(filters,StdC),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+
+ ok.
+
+logger_filters(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs,logger:=Logger},Node}
+ = setup(Config,
+ [{logger_log_progress,true},
+ {logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{logger_std_h=>#{type=>{file,Log}}}},
+ {filters,log,[{stop_progress,{fun logger_filters:progress/2,stop}}]}
+ ]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ StdFilters = maps:get(filters,StdC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
+ lists:keyfind(domain,1,StdFilters),
+ false = lists:keymember(stop_progress,1,StdFilters),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+ LoggerFilters = maps:get(filters,Logger),
+ true = lists:keymember(stop_progress,1,LoggerFilters),
+
+ ok.
+
+logger_filters_stop(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs,logger:=Logger},Node}
+ = setup(Config,
+ [{logger_log_progress,true},
+ {logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{filters=>[],
+ logger_std_h=>#{type=>{file,Log}}}},
+ {filters,stop,[{log_error,{fun logger_filters:level/2,{log,gt,info}}}]}
+ ]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0,
+ notice),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ [] = maps:get(filters,StdC),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+ LoggerFilters = maps:get(filters,Logger),
+ true = lists:keymember(log_error,1,LoggerFilters),
+
+ ok.
+
+logger_module_level(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs,module_levels:=ModuleLevels},Node}
+ = setup(Config,
+ [{logger_log_progress,true},
+ {logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{logger_std_h=>#{type=>{file,Log}}}},
+ {module_level,error,[supervisor]}
+ ]}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 3),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_std_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ StdFilters = maps:get(filters,StdC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
+ lists:keyfind(domain,1,StdFilters),
+ false = lists:keymember(stop_progress,1,StdFilters),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+ [{supervisor,error}] = ModuleLevels,
+ ok.
+
+logger_disk_log(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger,
+ [{handler,?STANDARD_HANDLER,logger_disk_log_h,
+ #{disk_log_opts=>#{file=>Log}}}]}]),
+ check_default_log(Node,Log,
+ disk_log,% dest
+ 0),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_disk_log_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ StdFilters = maps:get(filters,StdC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
+ lists:keyfind(domain,1,StdFilters),
+ true = lists:keymember(stop_progress,1,StdFilters),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
+
ok.
+logger_disk_log_formatter(Config) ->
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,#{handlers:=Hs},Node}
+ = setup(Config,
+ [{logger,
+ [{handler,?STANDARD_HANDLER,logger_disk_log_h,
+ #{filters=>[],
+ formatter=>{logger_formatter,#{}},
+ disk_log_opts=>#{file=>Log}}}]}]),
+ check_single_log(Node,Log,
+ disk_log,% dest
+ 6),% progress in std logger
+
+ {?STANDARD_HANDLER,logger_disk_log_h,StdC} = lists:keyfind(?STANDARD_HANDLER,1,Hs),
+ info = maps:get(level,StdC),
+ [] = maps:get(filters,StdC),
+ false = lists:keymember(logger_simple,1,Hs),
+ false = lists:keymember(sasl_h,1,Hs),
-dest_file_old(Config) ->
- {ok,{Log,_Hs}} = setup(Config,?FUNCTION_NAME,
- error_logger,
- file, % dest
- undefined, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- check_log(Log,
- file, % dest
- 0), % progress in std logger
ok.
-
-
-dest_file(Config) ->
- {ok,{Log,_Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- file, % dest
- undefined, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- check_log(Log,
- file, % dest
- 0), % progress in std logger
+
+logger_undefined(Config) ->
+ {ok,#{handlers:=Hs,logger:=L},_Node} =
+ setup(Config,[{logger,[{handler,?STANDARD_HANDLER,undefined}]}]),
+ false = lists:keymember(?STANDARD_HANDLER,1,Hs),
+ {logger_simple,logger_simple,SimpleC} = lists:keyfind(logger_simple,1,Hs),
+ info = maps:get(level,SimpleC),
+ info = maps:get(level,L),
+ SimpleFilters = maps:get(filters,SimpleC),
+ {domain,{_,{log,prefix_of,[beam,erlang,otp,sasl]}}} =
+ lists:keyfind(domain,1,SimpleFilters),
+ true = lists:keymember(stop_progress,1,SimpleFilters),
+ false = lists:keymember(sasl_h,1,Hs),
ok.
-
-
-dest_disk_log(Config) ->
- {ok,{Log,_Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- disk_log, % dest
- undefined, % level
- undefined, % sasl comp (default=false)
- undefined), % progress (default=false)
- check_log(Log,
- disk_log, % dest
- 0), % progress in std logger
+
+
+%% Test that we can add multiple handlers with the default first
+logger_many_handlers_default_first(Config) ->
+ LogErr = file(Config,logger_many_handlers_default_first_error),
+ LogInfo = file(Config,logger_many_handlers_default_first_info),
+
+ logger_many_handlers(
+ Config,[{logger,
+ [{handler,?STANDARD_HANDLER,logger_std_h,
+ #{level=>error,
+ filters=>[],
+ formatter=>{logger_formatter,#{}},
+ logger_std_h=>#{type=>{file,LogErr}}}
+ },
+ {handler,info,logger_std_h,
+ #{level=>info,
+ filters=>[{level,{fun logger_filters:level/2,{stop,gteq,error}}}],
+ logger_std_h=>#{type=>{file,LogInfo}}}
+ }
+ ]}], LogErr, LogInfo, 6).
+
+%% Test that we can add multiple handlers with the default last
+logger_many_handlers_default_last(Config) ->
+ LogErr = file(Config,logger_many_handlers_default_last_error),
+ LogInfo = file(Config,logger_many_handlers_default_last_info),
+ logger_many_handlers(
+ Config,[{logger,
+ [{handler,info,logger_std_h,
+ #{level=>info,
+ filters=>[{level,{fun logger_filters:level/2,{stop,gteq,error}}}],
+ logger_std_h=>#{type=>{file,LogInfo}}}
+ },
+ {handler,?STANDARD_HANDLER,logger_std_h,
+ #{level=>error,
+ filters=>[],
+ formatter=>{logger_formatter,#{}},
+ logger_std_h=>#{type=>{file,LogErr}}}
+ }
+ ]}], LogErr, LogInfo, 7).
+
+%% Check that we can handle that an added logger has a broken filter
+%% This used to cause a deadlock.
+logger_many_handlers_default_last_broken_filter(Config) ->
+ LogErr = file(Config,logger_many_handlers_default_first_broken_filter_error),
+ LogInfo = file(Config,logger_many_handlers_default_first_broken_filter_info),
+
+ logger_many_handlers(
+ Config,[{logger,
+ [{handler,info,logger_std_h,
+ #{level=>info,
+ filters=>[{broken,{fun logger_filters:level/2,broken_state}},
+ {level,{fun logger_filters:level/2,{stop,gteq,error}}}],
+ logger_std_h=>#{type=>{file,LogInfo}}}
+ },
+ {handler,?STANDARD_HANDLER,logger_std_h,
+ #{level=>error,
+ filters=>[],
+ formatter=>{logger_formatter,#{}},
+ logger_std_h=>#{type=>{file,LogErr}}}
+ }
+ ]}], LogErr, LogInfo, 7).
+
+logger_many_handlers(Config, Env, LogErr, LogInfo, NumProgress) ->
+ {ok,#{handlers:=Hs},Node} = setup(Config,Env),
+ check_single_log(Node,LogErr,
+ file,% dest
+ 0,% progress in std logger
+ error), % level
+ ok = rpc:call(Node,logger_std_h,filesync,[info]),
+ {ok, Bin} = file:read_file(LogInfo),
+ ct:log("Log content:~n~s",[Bin]),
+ match(Bin,<<"PROGRESS REPORT">>,NumProgress,info,info),
+ match(Bin,<<"ALERT REPORT">>,0,alert,info),
+
ok.
-
sasl_compatible_false(Config) ->
- {ok,{Log,_Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- file, % dest
- undefined, % level
- false, % sasl comp
- true), % progress
- check_log(Log,
- file, % dest
- 4), % progress in std logger
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,_Hs,Node} = setup(Config,
+ [{error_logger,{file,Log}},
+ {logger_sasl_compatible,false},
+ {logger_log_progress,true}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 6),% progress in std logger
ok.
sasl_compatible_false_no_progress(Config) ->
- {ok,{Log,_Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- file, % dest
- undefined, % level
- false, % sasl comp
- false), % progress
- check_log(Log,
- file, % dest
- 0), % progress in std logger
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,_Hs,Node} = setup(Config,
+ [{error_logger,{file,Log}},
+ {logger_sasl_compatible,false},
+ {logger_log_progress,false}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0),% progress in std logger
ok.
sasl_compatible(Config) ->
- {ok,{Log,_Hs}} = setup(Config,?FUNCTION_NAME,
- logger_dest,
- file, % dest
- undefined, % level
- true, % sasl comp
- undefined), % progress
- check_log(Log,
- file, % dest
- 0), % progress in std logger
+ Log = file(Config,?FUNCTION_NAME),
+ {ok,_Hs,Node} = setup(Config,
+ [{error_logger,{file,Log}},
+ {sasl_compatible,true}]),
+ check_default_log(Node,Log,
+ file,% dest
+ 0),% progress in std logger
ok.
-bad_dest(Config) ->
- {error,{bad_config,{kernel,{logger_dest,baddest}}}} =
- setup(Config,?FUNCTION_NAME,
- logger_dest,
- baddest,
- undefined,
- undefined,
- undefined).
+bad_error_logger(Config) ->
+ error = setup(Config,[{error_logger,baddest}]).
bad_level(Config) ->
- error =
- setup(Config,?FUNCTION_NAME,
- logger_dest,
- tty,
- badlevel,
- undefined,
- undefined).
+ error = setup(Config,[{logger_level,badlevel}]).
bad_sasl_compatibility(Config) ->
- error =
- setup(Config,?FUNCTION_NAME,
- logger_dest,
- tty,
- info,
- badcomp,
- undefined).
+ error = setup(Config,[{logger_sasl_compatible,badcomp}]).
bad_progress(Config) ->
- error =
- setup(Config,?FUNCTION_NAME,
- logger_dest,
- tty,
- info,
- undefined,
- badprogress).
+ error = setup(Config,[{logger_log_progress,badprogress}]).
%%%-----------------------------------------------------------------
%%% Internal
-setup(Config,Func,DestVar,Dest,Level,SaslComp,Progress) ->
- ok = logger:add_handler(logger_simple,logger_simple,
- #{filter_default=>log,
- logger_simple=>#{buffer=>true}}),
- Dir = ?config(priv_dir,Config),
- File = lists:concat([?MODULE,"_",Func,".log"]),
- Log = filename:join(Dir,File),
- case Dest of
- undefined ->
- ok;
- F when F==file; F==disk_log ->
- application:set_env(kernel,DestVar,{Dest,Log});
- _ ->
- application:set_env(kernel,DestVar,Dest)
- end,
- case Level of
- undefined ->
- ok;
- _ ->
- application:set_env(kernel,logger_level,Level)
- end,
- case SaslComp of
- undefined ->
- ok;
- _ ->
- application:set_env(kernel,logger_sasl_compatible,SaslComp)
- end,
- case Progress of
- undefined ->
- ok;
- _ ->
- application:set_env(kernel,logger_log_progress,Progress)
- end,
- case logger:setup_standard_handler() of
- ok ->
- application:start(sasl),
- StdH = case Dest of
- NoH when NoH==false; NoH==silent -> false;
- _ -> true
- end,
- StdH = is_pid(whereis(?STANDARD_HANDLER)),
- SaslH = if SaslComp -> true;
- true -> false
- end,
- SaslH = is_pid(whereis(sasl_h)),
- {ok,{Log,maps:get(handlers,logger:i())}};
- Error ->
- Error
- end.
+file(Config,Func) ->
+ filename:join(proplists:get_value(priv_dir,Config),
+ lists:concat([Func,".log"])).
+
+check_default_log(Node,Log,Dest,NumProgress) ->
+ check_default_log(Node,Log,Dest,NumProgress,info).
+check_default_log(Node,Log,Dest,NumProgress,Level) ->
+
+ {ok,Bin1,Bin2} = check_log(Node,Log,Dest),
+
+ match(Bin1,<<"PROGRESS REPORT">>,NumProgress,info,Level),
+ match(Bin1,<<"ALERT REPORT">>,1,alert,Level),
+ match(Bin1,<<"INFO REPORT">>,0,info,Level),
+ match(Bin1,<<"DEBUG REPORT">>,0,debug,Level),
+
+ match(Bin2,<<"INFO REPORT">>,1,info,Level),
+ match(Bin2,<<"DEBUG REPORT">>,0,debug,Level),
+ ok.
+
+check_single_log(Node,Log,Dest,NumProgress) ->
+ check_single_log(Node,Log,Dest,NumProgress,info).
+check_single_log(Node,Log,Dest,NumProgress,Level) ->
+
+ {ok,Bin1,Bin2} = check_log(Node,Log,Dest),
-check_log(Log,Dest,NumProgress) ->
- ok = logger:alert("dummy1"),
- ok = logger:debug("dummy1"),
+ match(Bin1,<<"info:">>,NumProgress,info,Level),
+ match(Bin1,<<"alert:">>,1,alert,Level),
+ match(Bin1,<<"debug:">>,0,debug,Level),
+
+ match(Bin2,<<"info:">>,NumProgress+1,info,Level),
+ match(Bin2,<<"debug:">>,0,debug,Level),
+
+ ok.
+
+check_log(Node,Log,Dest) ->
+
+ ok = log(Node,alert,["dummy1"]),
+ ok = log(Node,debug,["dummy1"]),
%% Check that there are progress reports (supervisor and
%% application_controller) and an error report (the call above) in
%% the log. There should not be any info reports yet.
- {ok,Bin1} = sync_and_read(Dest,Log),
+ {ok,Bin1} = sync_and_read(Node,Dest,Log),
ct:log("Log content:~n~s",[Bin1]),
- match(Bin1,<<"PROGRESS REPORT">>,NumProgress),
- match(Bin1,<<"ALERT REPORT">>,1),
- match(Bin1,<<"INFO REPORT">>,0),
- match(Bin1,<<"DEBUG REPORT">>,0),
%% Then stop sasl and see that the info report from
%% application_controller is there
- ok = application:stop(sasl),
- {ok,Bin2} = sync_and_read(Dest,Log),
+ ok = rpc:call(Node,application,stop,[sasl]),
+ {ok,Bin2} = sync_and_read(Node,Dest,Log),
ct:log("Log content:~n~s",[Bin2]),
- match(Bin2,<<"INFO REPORT">>,1),
- match(Bin1,<<"DEBUG REPORT">>,0),
- ok.
+ {ok,Bin1,Bin2}.
-match(Bin,Pattern,0) ->
+match(Bin,Pattern,0,_,_) ->
nomatch = re:run(Bin,Pattern,[{capture,none}]);
-match(Bin,Pattern,N) ->
- {match,M} = re:run(Bin,Pattern,[{capture,all},global]),
- N = length(M).
-
-sync_and_read(disk_log,Log) ->
- logger_disk_log_h:disk_log_sync(?STANDARD_HANDLER),
- file:read_file(Log ++ ".1");
-sync_and_read(file,Log) ->
- logger_std_h:filesync(?STANDARD_HANDLER),
- file:read_file(Log).
-
-cleanup() ->
- application:stop(sasl),
- [application:unset_env(App,Key) || {App,Key} <- ?all_vars],
- #{handlers:=Hs0} = logger:i(),
- Hs = lists:keydelete(cth_log_redirect,1,Hs0),
- [ok = logger:remove_handler(Id) || {Id,_,_} <- Hs],
- Hs.
+match(Bin,Pattern,N,LogLevel,ConfLevel) ->
+ case logger:compare_levels(LogLevel,ConfLevel) of
+ lt -> match(Bin,Pattern,0,LogLevel,ConfLevel);
+ _ ->
+ {match,M} = re:run(Bin,Pattern,[{capture,all},global]),
+ N = length(M)
+ end.
diff --git a/lib/kernel/test/logger_filters_SUITE.erl b/lib/kernel/test/logger_filters_SUITE.erl
index 21f14bbc02..c4b31370ff 100644
--- a/lib/kernel/test/logger_filters_SUITE.erl
+++ b/lib/kernel/test/logger_filters_SUITE.erl
@@ -81,6 +81,8 @@ domain(_Config) ->
stop = logger_filters:domain(?dlog([]),{stop,starts_with,[]}),
L3 = logger_filters:domain(L3=?dlog([]),{log,equals,[]}),
stop = logger_filters:domain(?dlog([]),{stop,equals,[]}),
+ ignore = logger_filters:domain(?dlog([]),{log,differs,[]}),
+ ignore = logger_filters:domain(?dlog([]),{stop,differs,[]}),
ignore = logger_filters:domain(?dlog([]),{log,no_domain,[]}),
ignore = logger_filters:domain(?dlog([]),{stop,no_domain,[]}),
@@ -90,15 +92,19 @@ domain(_Config) ->
ignore = logger_filters:domain(?dlog([a]),{stop,starts_with,[a,b]}),
ignore = logger_filters:domain(?dlog([a]),{log,equals,[a,b]}),
ignore = logger_filters:domain(?dlog([a]),{stop,equals,[a,b]}),
+ L5 = logger_filters:domain(L5=?dlog([a]),{log,differs,[a,b]}),
+ stop = logger_filters:domain(?dlog([a]),{stop,differs,[a,b]}),
ignore = logger_filters:domain(?dlog([a]),{log,no_domain,[a,b]}),
ignore = logger_filters:domain(?dlog([a]),{stop,no_domain,[a,b]}),
ignore = logger_filters:domain(?dlog([a,b]),{log,prefix_of,[a]}),
ignore = logger_filters:domain(?dlog([a,b]),{stop,prefix_of,[a]}),
- L5 = logger_filters:domain(L5=?dlog([a,b]),{log,starts_with,[a]}),
+ L6 = logger_filters:domain(L6=?dlog([a,b]),{log,starts_with,[a]}),
stop = logger_filters:domain(?dlog([a,b]),{stop,starts_with,[a]}),
ignore = logger_filters:domain(?dlog([a,b]),{log,equals,[a]}),
ignore = logger_filters:domain(?dlog([a,b]),{stop,equals,[a]}),
+ L7 = logger_filters:domain(L7=?dlog([a,b]),{log,differs,[a]}),
+ stop = logger_filters:domain(?dlog([a,b]),{stop,differs,[a]}),
ignore = logger_filters:domain(?dlog([a,b]),{log,no_domain,[a]}),
ignore = logger_filters:domain(?dlog([a,b]),{stop,no_domain,[a]}),
@@ -108,26 +114,33 @@ domain(_Config) ->
ignore = logger_filters:domain(?ndlog,{stop,starts_with,[a]}),
ignore = logger_filters:domain(?ndlog,{log,equals,[a]}),
ignore = logger_filters:domain(?ndlog,{stop,equals,[a]}),
- L6 = logger_filters:domain(L6=?ndlog,{log,no_domain,[a]}),
+ L8 = logger_filters:domain(L8=?ndlog,{log,differs,[a]}),
+ stop = logger_filters:domain(?ndlog,{stop,differs,[a]}),
+ L9 = logger_filters:domain(L9=?ndlog,{log,no_domain,[a]}),
stop = logger_filters:domain(?ndlog,{stop,no_domain,[a]}),
- L7 = logger_filters:domain(L7=?dlog([a,b,c,d]),{log,prefix_of,[a,b,c,d]}),
+ L10 = logger_filters:domain(L10=?dlog([a,b,c,d]),{log,prefix_of,[a,b,c,d]}),
stop = logger_filters:domain(?dlog([a,b,c,d]),{stop,prefix_of,[a,b,c,d]}),
- L8 = logger_filters:domain(L8=?dlog([a,b,c,d]),{log,starts_with,[a,b,c,d]}),
+ L11 = logger_filters:domain(L11=?dlog([a,b,c,d]),{log,starts_with,[a,b,c,d]}),
stop = logger_filters:domain(?dlog([a,b,c,d]),{stop,starts_with,[a,b,c,d]}),
- L9 = logger_filters:domain(L9=?dlog([a,b,c,d]),{log,equals,[a,b,c,d]}),
+ L12 = logger_filters:domain(L12=?dlog([a,b,c,d]),{log,equals,[a,b,c,d]}),
stop = logger_filters:domain(?dlog([a,b,c,d]),{stop,equals,[a,b,c,d]}),
+ ignore = logger_filters:domain(?dlog([a,b,c,d]),{log,differs,[a,b,c,d]}),
+ ignore = logger_filters:domain(?dlog([a,b,c,d]),{stop,differs,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog([a,b,c,d]),{log,no_domain,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog([a,b,c,d]),{stop,no_domain,[a,b,c,d]}),
%% A domain field in meta which is not a list is allowed by the
- %% filter, but it will never match.
+ %% filter, but since MatchDomain is always a list of atoms, only
+ %% Action=differs can ever match.
ignore = logger_filters:domain(?dlog(dummy),{log,prefix_of,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{stop,prefix_of,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{log,starts_with,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{stop,starts_with,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{log,equals,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{stop,equals,[a,b,c,d]}),
+ L13 = logger_filters:domain(L13=?dlog(dummy),{log,differs,[a,b,c,d]}),
+ stop = logger_filters:domain(?dlog(dummy),{stop,differs,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{log,no_domain,[a,b,c,d]}),
ignore = logger_filters:domain(?dlog(dummy),{stop,no_domain,[a,b,c,d]}),
diff --git a/lib/kernel/test/logger_formatter_SUITE.erl b/lib/kernel/test/logger_formatter_SUITE.erl
index 7d1f33746d..9baadfd65a 100644
--- a/lib/kernel/test/logger_formatter_SUITE.erl
+++ b/lib/kernel/test/logger_formatter_SUITE.erl
@@ -73,7 +73,7 @@ all() ->
default(_Config) ->
String1 = format(info,{"~p",[term]},#{},#{}),
ct:log(String1),
- [_Date,_Time,"info:","term\n"] = string:lexemes(String1," "),
+ [_DateTime,"info:","term\n"] = string:lexemes(String1," "),
Time = timestamp(),
ExpectedTimestamp = default_time_format(Time),
@@ -297,22 +297,22 @@ max_size(_Config) ->
single_line=>false},
"12345678901234567890" =
format(info,{"12345678901234567890",[]},#{},Cfg),
- application:set_env(kernel,logger_max_size,11),
- "12345678901234567890" = % min value is 50, so this is not limited
- format(info,{"12345678901234567890",[]},#{},Cfg),
- "12345678901234567890123456789012345678901234567..." = % 50
- format(info,
- {"123456789012345678901234567890123456789012345678901234567890",
- []},
- #{},
- Cfg),
- application:set_env(kernel,logger_max_size,53),
- "12345678901234567890123456789012345678901234567890..." = %53
- format(info,
- {"123456789012345678901234567890123456789012345678901234567890",
- []},
- #{},
- Cfg),
+ %% application:set_env(kernel,logger_max_size,11),
+ %% "12345678901234567890" = % min value is 50, so this is not limited
+ %% format(info,{"12345678901234567890",[]},#{},Cfg),
+ %% "12345678901234567890123456789012345678901234567..." = % 50
+ %% format(info,
+ %% {"123456789012345678901234567890123456789012345678901234567890",
+ %% []},
+ %% #{},
+ %% Cfg),
+ %% application:set_env(kernel,logger_max_size,53),
+ %% "12345678901234567890123456789012345678901234567890..." = %53
+ %% format(info,
+ %% {"123456789012345678901234567890123456789012345678901234567890",
+ %% []},
+ %% #{},
+ %% Cfg),
"123456789012..." =
format(info,{"12345678901234567890",[]},#{},Cfg#{max_size=>15}),
"12345678901234567890" =
@@ -341,12 +341,6 @@ depth(_Config) ->
{"~p",[[1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0]]},
#{},
#{template=>Template}),
- application:set_env(kernel,logger_format_depth,12),
- "[1,2,3,4,5,6,7,8,9,0,1|...]" =
- format(info,
- {"~p",[[1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0]]},
- #{},
- #{template=>Template}),
"[1,2,3,4,5,6,7,8,9,0,1,2|...]" =
format(info,
{"~p",[[1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0]]},
@@ -361,7 +355,7 @@ depth(_Config) ->
depth=>unlimited}),
ok.
depth(cleanup,_Config) ->
- application:unset_env(kernel,logger_format_depth),
+ application:unset_env(kernel,error_logger_format_depth),
ok.
chars_limit(_Config) ->
@@ -370,7 +364,7 @@ chars_limit(_Config) ->
lists:seq(1,100),
maps:from_list(lists:zip(lists:seq(1,100),
lists:duplicate(100,value)))]},
- Meta = #{time=>"2018-04-26 9:15:40.449879"},
+ Meta = #{time=>timestamp()},
Template = [time," - ", msg, "\n"],
FC = #{template=>Template,
depth=>unlimited,
@@ -382,7 +376,7 @@ chars_limit(_Config) ->
L1 = string:length(String1),
ct:log("String1: ~p~nLength1: ~p~n",[lists:flatten(String1),L1]),
true = L1 > CL1,
- true = L1 < CL1 + 10,
+ true = L1 < CL1 + 15,
String2 = format(info,FA,Meta,FC#{chars_limit=>CL1,depth=>10}),
L2 = string:length(String2),
@@ -394,13 +388,13 @@ chars_limit(_Config) ->
L3 = string:length(String3),
ct:log("String3: ~p~nLength3: ~p~n",[lists:flatten(String3),L3]),
true = L3 > CL3,
- true = L3 < CL3 + 10,
+ true = L3 < CL3 + 15,
String4 = format(info,FA,Meta,FC#{chars_limit=>CL3,depth=>10}),
L4 = string:length(String4),
ct:log("String4: ~p~nLength4: ~p~n",[lists:flatten(String4),L4]),
true = L4 > CL3,
- true = L4 < CL3 + 10,
+ true = L4 < CL3 + 15,
%% Test that max_size truncates the string which is limited by
%% depth and chars_limit
@@ -439,29 +433,58 @@ format_mfa(_Config) ->
ok.
format_time(_Config) ->
- Time1 = timestamp(),
- ExpectedTimestamp1 = default_time_format(Time1),
- String1 = format(info,{"~p",[term]},#{time=>Time1},#{}),
- ct:log(String1),
- " info: term\n" = string:prefix(String1,ExpectedTimestamp1),
-
- Time2 = timestamp(),
- ExpectedTimestamp2 = default_time_format(Time2,true),
- String2 = format(info,{"~p",[term]},#{time=>Time2},#{utc=>true}),
- ct:log(String2),
- " info: term\n" = string:prefix(String2,ExpectedTimestamp2),
-
- application:set_env(kernel,logger_utc,true),
- Time3 = timestamp(),
- ExpectedTimestamp3 = default_time_format(Time3,true),
- String3 = format(info,{"~p",[term]},#{time=>Time3},#{}),
- ct:log(String3),
- " info: term\n" = string:prefix(String3,ExpectedTimestamp3),
+ Time = timestamp(),
+ Meta = #{time=>Time},
+ FC = #{template=>[time]},
+ Msg = {string,""},
+ ExpectedLocal = default_time_format(Time,false),
+ ExpectedUtc = default_time_format(Time,true),
+
+ %% default - local time
+ ExpectedLocal = format(info,Msg,Meta,FC),
+
+ %% time_offset config parameter to formatter
+ ExpectedLocal = format(info,Msg,Meta,FC#{time_offset=>""}),
+ ExpectedUtc = format(info,Msg,Meta,FC#{time_offset=>"Z"}),
+
+ %% stdlib utc_log works when time_offset parameter is not set
+ application:set_env(stdlib,utc_log,true),
+ ExpectedUtc = format(info,Msg,Meta,FC),
+
+ %% sasl utc_log overwrites stdlib utc_log
+ application:set_env(sasl,utc_log,false),
+ ExpectedLocal = format(info,Msg,Meta,FC),
+
+ %% sasl utc_log overwrites stdlib utc_log
+ application:set_env(sasl,utc_log,true),
+ application:set_env(stdlib,utc_log,false),
+ ExpectedUtc = format(info,Msg,Meta,FC),
+
+ %% time_offset config parameter to formatter
+ %% overwrites sasl and stdlib utc_log
+ application:set_env(sasl,utc_log,false),
+ ExpectedUtc = format(info,Msg,Meta,FC#{time_offset=>"Z"}),
+
+ %% time_offset config parameter to formatter
+ %% overwrites sasl and stdlib utc_log
+ application:set_env(sasl,utc_log,true),
+ application:set_env(stdlib,utc_log,true),
+ ExpectedLocal = format(info,Msg,Meta,FC#{time_offset=>""}),
+
+ %% time_designator config parameter to formatter
+ ExpectedLocalS = default_time_format(Time,false,$\s),
+ ExpectedUtcS = default_time_format(Time,true,$\s),
+
+ ExpectedLocalS = format(info,Msg,Meta,FC#{time_offset=>"",
+ time_designator=>$\s}),
+ ExpectedUtcS = format(info,Msg,Meta,FC#{time_offset=>"Z",
+ time_designator=>$\s}),
ok.
format_time(cleanup,_Config) ->
- application:unset_env(kernel,logger_utc),
+ application:unset_env(sasl,utc_log),
+ application:unset_env(stdlib,utc_log),
ok.
level_or_msg_in_meta(_Config) ->
@@ -520,22 +543,17 @@ format(Log,Config) ->
default_time_format(Timestamp) ->
default_time_format(Timestamp,false).
-default_time_format(Timestamp0,Utc) when is_integer(Timestamp0) ->
+default_time_format(Timestamp,Utc) ->
+ default_time_format(Timestamp,Utc,$T).
+
+default_time_format(Timestamp0,Utc,Sep) ->
Timestamp=Timestamp0+erlang:time_offset(microsecond),
- %% calendar:system_time_to_rfc3339(Time,[{unit,microsecond}]).
- Micro = Timestamp rem 1000000,
- Sec = Timestamp div 1000000,
- UniversalTime = erlang:posixtime_to_universaltime(Sec),
- {Date,Time} =
- if Utc -> UniversalTime;
- true -> erlang:universaltime_to_localtime(UniversalTime)
- end,
- default_time_format(Date,Time,Micro).
-
-default_time_format({Y,M,D},{H,Min,S},Micro) ->
- lists:flatten(
- io_lib:format("~4w-~2..0w-~2..0w ~2w:~2..0w:~2..0w.~6..0w",
- [Y,M,D,H,Min,S,Micro])).
+ Offset = if Utc -> "Z";
+ true -> ""
+ end,
+ calendar:system_time_to_rfc3339(Timestamp,[{unit,microsecond},
+ {time_designator,Sep},
+ {offset,Offset}]).
integer(Str) ->
is_integer(list_to_integer(Str)).
diff --git a/lib/kernel/test/logger_simple_SUITE.erl b/lib/kernel/test/logger_simple_SUITE.erl
index 5d8d32492d..0d505b14f5 100644
--- a/lib/kernel/test/logger_simple_SUITE.erl
+++ b/lib/kernel/test/logger_simple_SUITE.erl
@@ -25,6 +25,8 @@
-include_lib("kernel/include/logger.hrl").
-include_lib("kernel/src/logger_internal.hrl").
+-import(logger_test_lib, [setup/2, log/3, sync_and_read/3]).
+
-define(check_no_log,[] = test_server:messages_get()).
-define(check(Expected),
receive {log,Expected} ->
@@ -42,15 +44,15 @@
-define(keyval_rep,[{function,?FUNCTION_NAME}, {line,?LINE}]).
suite() ->
- [{timetrap,{seconds,30}}].
+ [{timetrap,{seconds,30}},
+ {ct_hooks, [logger_test_lib]}].
init_per_suite(Config) ->
#{handlers:=Hs0} = logger:i(),
Hs = lists:keydelete(cth_log_redirect,1,Hs0),
[ok = logger:remove_handler(Id) || {Id,_,_} <- Hs],
Env = [{App,Key,application:get_env(App,Key)} ||
- {App,Key} <- [{kernel,logger_dest},
- {kernel,logger_level}]],
+ {App,Key} <- [{kernel,logger_level}]],
[{env,Env},{logger,Hs}|Config].
end_per_suite(Config) ->
@@ -79,7 +81,7 @@ groups() ->
all() ->
[start_stop,
- get_buffer,
+ replace_default,
replace_file,
replace_disk_log
].
@@ -100,99 +102,46 @@ start_stop(_Config) ->
start_stop(cleanup,_Config) ->
logger:remove_handler(logger_simple).
-get_buffer(_Config) ->
- %% Start simple without buffer
- ok = logger:add_handler(logger_simple,logger_simple,
- #{filter_default=>log}),
- logger:emergency(?str),
- logger:alert(?str,[]),
- logger:error(?map_rep),
- logger:info(?keyval_rep),
- {ok,[]} = logger_simple:get_buffer(), % no buffer
- ok = logger:remove_handler(logger_simple),
+%% This testcase just tests that it does not crash, the default handler prints
+%% to stdout which we cannot read from in a detached slave.
+replace_default(Config) ->
- %% Start with buffer
- ok = logger:add_handler(logger_simple,logger_simple,
- #{filter_default=>log,
- logger_simple=>#{buffer=>true}}),
- logger:emergency(M1=?str),
- logger:alert(M2=?str,[]),
- logger:error(M3=?map_rep),
- logger:info(M4=?keyval_rep),
- logger:info(M41=?keyval_rep++[not_key_val]),
- error_logger:error_report(some_type,M5=?map_rep),
- error_logger:warning_report("some_type",M6=?map_rep),
- logger:critical(M7=?str,[A7=?keyval_rep]),
- logger:notice(M8=["fake",string,"line:",?LINE]),
- {ok,Buffered1} = logger_simple:get_buffer(),
- [#{level:=emergency,msg:={string,M1}},
- #{level:=alert,msg:={M2,[]}},
- #{level:=error,msg:={report,M3}},
- #{level:=info,msg:={report,M4}},
- #{level:=info,msg:={report,M41}},
- #{level:=error,msg:={report,#{label:={error_logger,error_report},
- report:=M5}}},
- #{level:=warning,msg:={report,#{label:={error_logger,warning_report},
- report:=M6}}},
- #{level:=critical,msg:={M7,[A7]}},
- #{level:=notice,msg:={string,M8}}] = Buffered1,
-
- %% Keep logging - should not buffer any more
- logger:emergency(?str),
- logger:alert(?str,[]),
- logger:error(?map_rep),
- logger:info(?keyval_rep),
- {ok,[]} = logger_simple:get_buffer(),
- ok = logger:remove_handler(logger_simple),
+ {ok, _, Node} = logger_test_lib:setup(Config, [{logger, [{handler, default, undefined}]}]),
+ log(Node, emergency, [M1=?str]),
+ log(Node, alert, [M2=?str,[]]),
+ log(Node, error, [M3=?map_rep]),
+ log(Node, info, [M4=?keyval_rep]),
+ log(Node, info, [M41=?keyval_rep++[not_key_val]]),
+ rpc:call(Node, error_logger, error_report, [some_type,M5=?map_rep]),
+ rpc:call(Node, error_logger, warning_report, ["some_type",M6=?map_rep]),
+ log(Node, critical, [M7=?str,[A7=?keyval_rep]]),
+ log(Node, notice, [M8=["fake",string,"line:",?LINE]]),
+
+ Env = rpc:call(Node, application, get_env, [kernel, logger, []]),
+ ok = rpc:call(Node, logger, add_handlers, [Env]),
- %% Fill buffer and drop
- ok = logger:add_handler(logger_simple,logger_simple,
- #{filter_default=>log,
- logger_simple=>#{buffer=>true}}),
- logger:emergency(M9=?str),
- M10=?str,
- [logger:info(M10) || _ <- lists:seq(1,8)],
- logger:error(M11=?str),
- logger:error(?str),
- logger:error(?str),
- {ok,Buffered3} = logger_simple:get_buffer(),
- 11 = length(Buffered3),
- [#{level:=emergency,msg:={string,M9}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=info,msg:={string,M10}},
- #{level:=error,msg:={string,M11}},
- #{level:=info,msg:={"Simple handler buffer full, dropped ~w messages",[2]}}]
- = Buffered3,
ok.
-get_buffer(cleanup,_Config) ->
- logger:remove_handler(logger_simple).
replace_file(Config) ->
- ok = logger:add_handler(logger_simple,logger_simple,
- #{filter_default=>log,
- logger_simple=>#{buffer=>true}}),
- logger:emergency(M1=?str),
- logger:alert(M2=?str,[]),
- logger:error(?map_rep),
- logger:info(?keyval_rep),
- undefined = whereis(?STANDARD_HANDLER),
- PrivDir = ?config(priv_dir,Config),
- File = filename:join(PrivDir,atom_to_list(?FUNCTION_NAME)++".log"),
-
- application:set_env(kernel,logger_dest,{file,File}),
- application:set_env(kernel,logger_level,info),
-
- ok = logger:setup_standard_handler(),
- true = is_pid(whereis(?STANDARD_HANDLER)),
- ok = logger_std_h:filesync(?STANDARD_HANDLER),
- {ok,Bin} = file:read_file(File),
- Lines = [unicode:characters_to_list(L) ||
+
+ {ok, _, Node} = logger_test_lib:setup(Config, [{logger, [{handler, default, undefined}]}]),
+ log(Node, emergency, [M1=?str]),
+ log(Node, alert, [M2=?str,[]]),
+ log(Node, error, [M3=?map_rep]),
+ log(Node, info, [M4=?keyval_rep]),
+ log(Node, info, [M41=?keyval_rep++[not_key_val]]),
+ log(Node, critical, [M7=?str,[A7=?keyval_rep]]),
+ log(Node, notice, [M8=["fake",string,"line:",?LINE]]),
+
+ File = filename:join(proplists:get_value(priv_dir,Config),
+ atom_to_list(?FUNCTION_NAME)++".log"),
+
+ ok = rpc:call(Node, logger, add_handlers,
+ [[{handler, default, logger_std_h,
+ #{ logger_std_h => #{ type => {file, File} }}}]]),
+
+ {ok,Bin} = sync_and_read(Node, file, File),
+ Lines = [unicode:characters_to_list(L) ||
L <- binary:split(Bin,<<"\n">>,[global,trim])],
["=EMERGENCY REPORT===="++_,
M1,
@@ -203,32 +152,38 @@ replace_file(Config) ->
_,
"=INFO REPORT===="++_,
_,
- _] = Lines,
+ _,
+ "=INFO REPORT===="++_,
+ _,
+ _,
+ _,
+ "=CRITICAL REPORT===="++_,
+ _,
+ _,
+ "=NOTICE REPORT===="++_,
+ _
+ ] = Lines,
ok.
-replace_file(cleanup,_Config) ->
- logger:remove_handler(?STANDARD_HANDLER),
- logger:remove_handler(logger_simple).
-
+
replace_disk_log(Config) ->
- ok = logger:add_handler(logger_simple,logger_simple,
- #{filter_default=>log,
- logger_simple=>#{buffer=>true}}),
- logger:emergency(M1=?str),
- logger:alert(M2=?str,[]),
- logger:error(?map_rep),
- logger:info(?keyval_rep),
- undefined = whereis(?STANDARD_HANDLER),
- PrivDir = ?config(priv_dir,Config),
- File = filename:join(PrivDir,atom_to_list(?FUNCTION_NAME)),
-
- application:set_env(kernel,logger_dest,{disk_log,File}),
- application:set_env(kernel,logger_level,info),
-
- ok = logger:setup_standard_handler(),
- true = is_pid(whereis(?STANDARD_HANDLER)),
- ok = logger_disk_log_h:disk_log_sync(?STANDARD_HANDLER),
- {ok,Bin} = file:read_file(File++".1"),
- Lines = [unicode:characters_to_list(L) ||
+
+ {ok, _, Node} = logger_test_lib:setup(Config, [{logger, [{handler, default, undefined}]}]),
+ log(Node, emergency, [M1=?str]),
+ log(Node, alert, [M2=?str,[]]),
+ log(Node, error, [M3=?map_rep]),
+ log(Node, info, [M4=?keyval_rep]),
+ log(Node, info, [M41=?keyval_rep++[not_key_val]]),
+ log(Node, critical, [M7=?str,[A7=?keyval_rep]]),
+ log(Node, notice, [M8=["fake",string,"line:",?LINE]]),
+
+ File = filename:join(proplists:get_value(priv_dir,Config),
+ atom_to_list(?FUNCTION_NAME)++".log"),
+
+ ok = rpc:call(Node, logger, add_handlers,
+ [[{handler, default, logger_disk_log_h,
+ #{ disk_log_opts => #{ file => File }}}]]),
+ {ok,Bin} = sync_and_read(Node, disk_log, File),
+ Lines = [unicode:characters_to_list(L) ||
L <- binary:split(Bin,<<"\n">>,[global,trim])],
["=EMERGENCY REPORT===="++_,
M1,
@@ -239,9 +194,15 @@ replace_disk_log(Config) ->
_,
"=INFO REPORT===="++_,
_,
- _|_] = Lines, % the tail might be an info report about opening the disk log
+ _,
+ "=INFO REPORT===="++_,
+ _,
+ _,
+ _,
+ "=CRITICAL REPORT===="++_,
+ _,
+ _,
+ "=NOTICE REPORT===="++_,
+ _
+ ] = Lines,
ok.
-replace_disk_log(cleanup,_Config) ->
- logger:remove_handler(?STANDARD_HANDLER),
- logger:remove_handler(logger_simple).
-
diff --git a/lib/kernel/test/logger_std_h_SUITE.erl b/lib/kernel/test/logger_std_h_SUITE.erl
index 7c8d63cbbd..fc59d393e0 100644
--- a/lib/kernel/test/logger_std_h_SUITE.erl
+++ b/lib/kernel/test/logger_std_h_SUITE.erl
@@ -50,11 +50,12 @@
end).
suite() ->
- [{timetrap,{seconds,30}}].
+ [{timetrap,{seconds,30}},
+ {ct_hooks,[logger_test_lib]}].
init_per_suite(Config) ->
timer:start(), % to avoid progress report
- {ok,{?STANDARD_HANDLER,#{formatter:=OrigFormatter}}} =
+ {ok,{logger_std_h,#{formatter:=OrigFormatter}}} =
logger:get_handler_config(?STANDARD_HANDLER),
[{formatter,OrigFormatter}|Config].
@@ -241,7 +242,8 @@ formatter_fail(Config) ->
filters=>?DEFAULT_HANDLER_FILTERS([?MODULE])}),
Pid = whereis(?MODULE),
true = is_pid(Pid),
- {ok,#{handlers:=H}} = logger:get_logger_config(),
+ #{handlers:=HC1} = logger:i(),
+ H = [Id || {Id,_,_} <- HC1],
true = lists:member(?MODULE,H),
%% Formatter is added automatically
@@ -270,7 +272,8 @@ formatter_fail(Config) ->
%% Check that handler is still alive and was never dead
Pid = whereis(?MODULE),
- {ok,#{handlers:=H}} = logger:get_logger_config(),
+ #{handlers:=HC2} = logger:i(),
+ H = [Id || {Id,_,_} <- HC2],
ok.
@@ -289,10 +292,17 @@ config_fail(_Config) ->
#{logger_std_h => #{restart_type => bad},
filter_default=>log,
formatter=>{?MODULE,self()}}),
- {error,{handler_not_added,{invalid_levels,{42,42,_}}}} =
+ {error,{handler_not_added,{invalid_levels,{_,1,_}}}} =
logger:add_handler(?MODULE,logger_std_h,
- #{logger_std_h => #{toggle_sync_qlen=>42,
+ #{logger_std_h => #{drop_new_reqs_qlen=>1}}),
+ {error,{handler_not_added,{invalid_levels,{43,42,_}}}} =
+ logger:add_handler(?MODULE,logger_std_h,
+ #{logger_std_h => #{toggle_sync_qlen=>43,
drop_new_reqs_qlen=>42}}),
+ {error,{handler_not_added,{invalid_levels,{_,43,42}}}} =
+ logger:add_handler(?MODULE,logger_std_h,
+ #{logger_std_h => #{drop_new_reqs_qlen=>43,
+ flush_reqs_qlen=>42}}),
ok = logger:add_handler(?MODULE,logger_std_h,
#{filter_default=>log,
@@ -315,29 +325,32 @@ config_fail(cleanup,_Config) ->
logger:remove_handler(?MODULE).
crash_std_h_to_file(Config) ->
- crash_std_h(Config,?FUNCTION_NAME,logger_dest,file).
+ Dir = ?config(priv_dir,Config),
+ Log = filename:join(Dir,lists:concat([?MODULE,"_",?FUNCTION_NAME,".log"])),
+ crash_std_h(Config,?FUNCTION_NAME,
+ [{handler,default,logger_std_h,
+ #{ logger_std_h => #{ type => {file, Log} }}}],
+ file, Log).
crash_std_h_to_file(cleanup,_Config) ->
crash_std_h(cleanup).
crash_std_h_to_disk_log(Config) ->
- crash_std_h(Config,?FUNCTION_NAME,logger_dest,disk_log).
+ Dir = ?config(priv_dir,Config),
+ Log = filename:join(Dir,lists:concat([?MODULE,"_",?FUNCTION_NAME,".log"])),
+ crash_std_h(Config,?FUNCTION_NAME,
+ [{handler,default,logger_disk_log_h,
+ #{ disk_log_opts => #{ file => Log }}}],
+ disk_log,Log).
crash_std_h_to_disk_log(cleanup,_Config) ->
crash_std_h(cleanup).
-crash_std_h(Config,Func,Var,Type) ->
+crash_std_h(Config,Func,Var,Type,Log) ->
Dir = ?config(priv_dir,Config),
- File = lists:concat([?MODULE,"_",Func,".log"]),
- Log = filename:join(Dir,File),
+ SysConfig = filename:join(Dir,lists:concat([?MODULE,"_",Func,".config"])),
+ ok = file:write_file(SysConfig, io_lib:format("[{kernel,[{logger,~p}]}].",[Var])),
Pa = filename:dirname(code:which(?MODULE)),
- TypeAndLog =
- case os:type() of
- {win32,_} ->
- lists:concat([" {",Type,",\\\"",Log,"\\\"}"]);
- _ ->
- lists:concat([" \'{",Type,",\"",Log,"\"}\'"])
- end,
- Args = lists:concat([" -kernel ",Var,TypeAndLog," -pa ",Pa]),
Name = lists:concat([?MODULE,"_",Func]),
+ Args = lists:concat([" -config ",filename:rootname(SysConfig)," -pa ",Pa]),
ct:pal("Starting ~p with ~tp", [Name,Args]),
%% Start a node which prints kernel logs to the destination specified by Type
{ok,Node} = test_server:start_node(Name, peer, [{args, Args}]),
@@ -578,7 +591,7 @@ write_failure(Config) ->
Dir = ?config(priv_dir, Config),
File = lists:concat([?MODULE,"_",?FUNCTION_NAME,".log"]),
Log = filename:join(Dir, File),
- Node = start_std_h_on_new_node(Config, ?FUNCTION_NAME, Log),
+ Node = start_std_h_on_new_node(Config, Log),
false = (undefined == rpc:call(Node, ets, whereis, [?TEST_HOOKS_TAB])),
rpc:call(Node, ets, insert, [?TEST_HOOKS_TAB,{tester,self()}]),
rpc:call(Node, ?MODULE, set_internal_log, [?MODULE,internal_log]),
@@ -615,7 +628,7 @@ sync_failure(Config) ->
Dir = ?config(priv_dir, Config),
File = lists:concat([?MODULE,"_",?FUNCTION_NAME,".log"]),
Log = filename:join(Dir, File),
- Node = start_std_h_on_new_node(Config, ?FUNCTION_NAME, Log),
+ Node = start_std_h_on_new_node(Config, Log),
false = (undefined == rpc:call(Node, ets, whereis, [?TEST_HOOKS_TAB])),
rpc:call(Node, ets, insert, [?TEST_HOOKS_TAB,{tester,self()}]),
rpc:call(Node, ?MODULE, set_internal_log, [?MODULE,internal_log]),
@@ -651,21 +664,12 @@ sync_failure(cleanup, _Config) ->
Nodes = nodes(),
[test_server:stop_node(Node) || Node <- Nodes].
-start_std_h_on_new_node(_Config, Func, Log) ->
- Pa = filename:dirname(code:which(?MODULE)),
- Dest =
- case os:type() of
- {win32,_} ->
- lists:concat([" {file,\\\"",Log,"\\\"}"]);
- _ ->
- lists:concat([" \'{file,\"",Log,"\"}\'"])
- end,
- Args = lists:concat([" -kernel ",logger_dest,Dest," -pa ",Pa]),
- Name = lists:concat([?MODULE,"_",Func]),
- ct:pal("Starting ~s with ~tp", [Name,Args]),
- {ok,Node} = test_server:start_node(Name, peer, [{args, Args}]),
- Pid = rpc:call(Node,erlang,whereis,[?STANDARD_HANDLER]),
- true = is_pid(Pid),
+start_std_h_on_new_node(Config, Log) ->
+ {ok,_,Node} =
+ logger_test_lib:setup(
+ Config,
+ [{logger,[{handler,default,logger_std_h,
+ #{ logger_std_h => #{ type => {file,Log}}}}]}]),
ok = rpc:call(Node,logger,set_handler_config,[?STANDARD_HANDLER,formatter,
{?MODULE,nl}]),
Node.
@@ -691,16 +695,17 @@ internal_log(Type, Term) ->
op_switch_to_sync_file(Config) ->
{Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ NumOfReqs = 500,
NewHConfig =
- HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 3,
- drop_new_reqs_qlen => 501,
- flush_reqs_qlen => 2000,
+ HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 2,
+ drop_new_reqs_qlen => NumOfReqs+1,
+ flush_reqs_qlen => 2*NumOfReqs,
enable_burst_limit => false}},
ok = logger:set_handler_config(?MODULE, NewHConfig),
%% TRecvPid = start_op_trace(),
- NumOfReqs = 500,
send_burst({n,NumOfReqs}, seq, {chars,79}, info),
- NumOfReqs = count_lines(Log),
+ Lines = count_lines(Log),
+ ok = file:delete(Log),
%% true = analyse_trace(TRecvPid,
%% fun(Events) -> find_mode(async,Events) end),
%% true = analyse_trace(TRecvPid,
@@ -711,68 +716,82 @@ op_switch_to_sync_file(Config) ->
%% fun(Events) -> find_mode(drop,Events) end),
%% false = analyse_trace(TRecvPid,
%% fun(Events) -> find_mode(flush,Events) end),
- ok = file:delete(Log),
%% stop_op_trace(TRecvPid),
+ NumOfReqs = Lines,
ok.
op_switch_to_sync_file(cleanup, _Config) ->
ok = stop_handler(?MODULE).
op_switch_to_sync_tty(Config) ->
{HConfig,StdHConfig} = start_handler(?MODULE, standard_io, Config),
+ NumOfReqs = 500,
NewHConfig =
HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 3,
- drop_new_reqs_qlen => 501,
- flush_reqs_qlen => 2000,
+ drop_new_reqs_qlen => NumOfReqs+1,
+ flush_reqs_qlen => 2*NumOfReqs,
enable_burst_limit => false}},
ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 500,
send_burst({n,NumOfReqs}, seq, {chars,79}, info),
ok.
op_switch_to_sync_tty(cleanup, _Config) ->
ok = stop_handler(?MODULE).
+op_switch_to_drop_file() ->
+ [{timetrap,{seconds,180}}].
op_switch_to_drop_file(Config) ->
- {Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
-
- NewHConfig =
- HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 2,
- drop_new_reqs_qlen => 3,
- flush_reqs_qlen => 600,
+ Test =
+ fun() ->
+ {Log,HConfig,StdHConfig} =
+ start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ NumOfReqs = 300,
+ Procs = 2,
+ Bursts = 10,
+ NewHConfig =
+ HConfig#{logger_std_h =>
+ StdHConfig#{toggle_sync_qlen => 1,
+ drop_new_reqs_qlen => 2,
+ flush_reqs_qlen =>
+ Procs*NumOfReqs*Bursts,
enable_burst_limit => false}},
- ok = logger:set_handler_config(?MODULE, NewHConfig),
- %% TRecvPid = start_op_trace(),
- NumOfReqs = 500,
- send_burst({n,NumOfReqs}, seq, {chars,79}, info),
- Logged = count_lines(Log),
- ct:pal("Number of messages dropped = ~w (~w)",
- [NumOfReqs-Logged,NumOfReqs]),
- true = (Logged < NumOfReqs),
- %% true = analyse_trace(TRecvPid,
- %% fun(Events) -> find_mode(async,Events) end),
- %% true = analyse_trace(TRecvPid,
- %% fun(Events) -> find_mode(drop,Events) end),
- %% false = analyse_trace(TRecvPid,
- %% fun(Events) -> find_mode(flush,Events) end),
- %% true = analyse_trace(TRecvPid,
- %% fun(Events) -> find_switch(async,drop,Events)
- %% orelse find_switch(sync,drop,Events)
- %% end),
- ok = file:delete(Log),
- %% stop_op_trace(TRecvPid),
- ok.
+ ok = logger:set_handler_config(?MODULE, NewHConfig),
+ %% It sometimes happens that the handler gets the
+ %% requests in a slow enough pace so that dropping
+ %% never occurs. Therefore, lets generate a number of
+ %% bursts to increase the chance of message buildup.
+ [send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info) ||
+ _ <- lists:seq(1, Bursts)],
+ Logged = count_lines(Log),
+ ok = stop_handler(?MODULE),
+ _ = file:delete(Log),
+ ct:pal("Number of messages dropped = ~w (~w)",
+ [Procs*NumOfReqs*Bursts-Logged,Procs*NumOfReqs*Bursts]),
+ true = (Logged < (Procs*NumOfReqs*Bursts)),
+ true = (Logged > 0),
+ ok
+ end,
+ %% As it's tricky to get the timing right in only one go, we perform the
+ %% test repeatedly, hoping that will generate a successful result.
+ case repeat_until_ok(Test, 10) of
+ {ok,{Failures,_Result}} ->
+ ct:log("Failed ~w times before success!", [Failures]);
+ {fails,Reason} ->
+ ct:fail(Reason)
+ end.
op_switch_to_drop_file(cleanup, _Config) ->
- ok = stop_handler(?MODULE).
+ _ = stop_handler(?MODULE).
op_switch_to_drop_tty(Config) ->
{HConfig,StdHConfig} = start_handler(?MODULE, standard_io, Config),
+ NumOfReqs = 300,
+ Procs = 2,
NewHConfig =
- HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 2,
- drop_new_reqs_qlen => 3,
- flush_reqs_qlen => 600,
+ HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 1,
+ drop_new_reqs_qlen => 2,
+ flush_reqs_qlen =>
+ Procs*NumOfReqs+1,
enable_burst_limit => false}},
ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 500,
- send_burst({n,NumOfReqs}, seq, {chars,79}, info),
+ send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info),
ok.
op_switch_to_drop_tty(cleanup, _Config) ->
ok = stop_handler(?MODULE).
@@ -780,32 +799,54 @@ op_switch_to_drop_tty(cleanup, _Config) ->
op_switch_to_flush_file() ->
[{timetrap,{minutes,3}}].
op_switch_to_flush_file(Config) ->
- {Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
-
- %% it's important that both async and sync requests have been queued
- %% when the flush happens (verify with coverage of flush_log_requests/2)
-
- NewHConfig =
- HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 2,
- drop_new_reqs_qlen => 99,
- flush_reqs_qlen => 100,
+ Test =
+ fun() ->
+ {Log,HConfig,StdHConfig} =
+ start_handler(?MODULE, ?FUNCTION_NAME, Config),
+
+ %% NOTE: it's important that both async and sync
+ %% requests have been queued when the flush happens
+ %% (verify with coverage of flush_log_requests/2)
+
+ NewHConfig =
+ HConfig#{logger_std_h =>
+ StdHConfig#{toggle_sync_qlen => 2,
+ %% disable drop mode
+ drop_new_reqs_qlen => 300,
+ flush_reqs_qlen => 300,
enable_burst_limit => false}},
- ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 10000,
- Procs = 100,
- send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info),
- Logged = count_lines(Log),
- ct:pal("Number of messages flushed/dropped = ~w (~w)",
- [(NumOfReqs*Procs)-Logged,NumOfReqs*Procs]),
- true = (Logged < (NumOfReqs*Procs)),
-
- %%! --- Thu Apr 12 13:46:00 2018 --- peppe was here!
- %%! TODO: Verify that handler has switched to flush mode
-
- ok = file:delete(Log),
- ok.
+ ok = logger:set_handler_config(?MODULE, NewHConfig),
+ NumOfReqs = 1500,
+ Procs = 10,
+ Bursts = 10,
+ %% It sometimes happens that the handler either gets
+ %% the requests in a slow enough pace so that flushing
+ %% never occurs, or it gets all messages at once,
+ %% causing all messages to get flushed (no dropping of
+ %% sync messages gets tested). Therefore, lets
+ %% generate a number of bursts to increase the chance
+ %% of message buildup in some random fashion.
+ [send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info) ||
+ _ <- lists:seq(1,Bursts)],
+ Logged = count_lines(Log),
+ ok = stop_handler(?MODULE),
+ _ = file:delete(Log),
+ ct:pal("Number of messages flushed/dropped = ~w (~w)",
+ [NumOfReqs*Procs*Bursts-Logged,NumOfReqs*Procs*Bursts]),
+ true = (Logged < (NumOfReqs*Procs*Bursts)),
+ true = (Logged > 0),
+ ok
+ end,
+ %% As it's tricky to get the timing right in only one go, we perform the
+ %% test repeatedly, hoping that will generate a successful result.
+ case repeat_until_ok(Test, 10) of
+ {ok,{Failures,_Result}} ->
+ ct:log("Failed ~w times before success!", [Failures]);
+ {fails,Reason} ->
+ ct:fail(Reason)
+ end.
op_switch_to_flush_file(cleanup, _Config) ->
- ok = stop_handler(?MODULE).
+ _ = stop_handler(?MODULE).
op_switch_to_flush_tty(Config) ->
{HConfig,StdHConfig} = start_handler(?MODULE, standard_io, Config),
@@ -815,12 +856,13 @@ op_switch_to_flush_tty(Config) ->
NewHConfig =
HConfig#{logger_std_h => StdHConfig#{toggle_sync_qlen => 2,
- drop_new_reqs_qlen => 99,
+ %% disable drop mode
+ drop_new_reqs_qlen => 100,
flush_reqs_qlen => 100,
enable_burst_limit => false}},
ok = logger:set_handler_config(?MODULE, NewHConfig),
- NumOfReqs = 10000,
- Procs = 10,
+ NumOfReqs = 1000,
+ Procs = 100,
send_burst({n,NumOfReqs}, {spawn,Procs,0}, {chars,79}, info),
ok.
op_switch_to_flush_tty(cleanup, _Config) ->
@@ -904,10 +946,10 @@ kill_disabled(cleanup, _Config) ->
ok = stop_handler(?MODULE).
qlen_kill_new(Config) ->
- {Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ {_Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
Pid0 = whereis(?MODULE),
{_,Mem0} = process_info(Pid0, memory),
- RestartAfter = 2000,
+ RestartAfter = ?HANDLER_RESTART_AFTER,
NewHConfig =
HConfig#{logger_std_h=>StdHConfig#{enable_kill_overloaded=>true,
handler_overloaded_qlen=>10,
@@ -927,7 +969,7 @@ qlen_kill_new(Config) ->
killed ->
ct:pal("Slow shutdown, handler process was killed!", [])
end,
- timer:sleep(RestartAfter + 1000),
+ timer:sleep(RestartAfter + 2000),
true = is_pid(whereis(?MODULE)),
ok
after
@@ -941,7 +983,7 @@ qlen_kill_new(cleanup, _Config) ->
%% choke the standard handler on remote node to verify the termination
%% works as expected
-qlen_kill_std(Config) ->
+qlen_kill_std(_Config) ->
%%! HERE
%% Dir = ?config(priv_dir, Config),
%% File = lists:concat([?MODULE,"_",?FUNCTION_NAME,".log"]),
@@ -955,10 +997,10 @@ qlen_kill_std(Config) ->
{skip,"Not done yet"}.
mem_kill_new(Config) ->
- {Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
+ {_Log,HConfig,StdHConfig} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
Pid0 = whereis(?MODULE),
{_,Mem0} = process_info(Pid0, memory),
- RestartAfter = 2000,
+ RestartAfter = ?HANDLER_RESTART_AFTER,
NewHConfig =
HConfig#{logger_std_h=>StdHConfig#{enable_kill_overloaded=>true,
handler_overloaded_qlen=>50000,
@@ -978,7 +1020,7 @@ mem_kill_new(Config) ->
killed ->
ct:pal("Slow shutdown, handler process was killed!", [])
end,
- timer:sleep(RestartAfter * 2),
+ timer:sleep(RestartAfter + 2000),
true = is_pid(whereis(?MODULE)),
ok
after
@@ -992,7 +1034,7 @@ mem_kill_new(cleanup, _Config) ->
%% choke the standard handler on remote node to verify the termination
%% works as expected
-mem_kill_std(Config) ->
+mem_kill_std(_Config) ->
{skip,"Not done yet"}.
restart_after(Config) ->
@@ -1016,7 +1058,7 @@ restart_after(Config) ->
end,
{Log,_,_} = start_handler(?MODULE, ?FUNCTION_NAME, Config),
- RestartAfter = 2000,
+ RestartAfter = ?HANDLER_RESTART_AFTER,
NewHConfig2 =
HConfig#{logger_std_h=>StdHConfig#{enable_kill_overloaded=>true,
handler_overloaded_qlen=>10,
@@ -1028,7 +1070,7 @@ restart_after(Config) ->
send_burst({n,100}, {spawn,2,0}, {chars,79}, info),
receive
{'DOWN', MRef2, _, _, _Info2} ->
- timer:sleep(RestartAfter + 1000),
+ timer:sleep(RestartAfter + 2000),
Pid1 = whereis(?MODULE),
true = is_pid(Pid1),
false = (Pid1 == Pid0),
@@ -1074,7 +1116,7 @@ handler_requests_under_load(Config) ->
NoOfReqs = lists:foldl(fun({_,Res}, N) -> N + length(Res) end, 0, ReqResult),
ct:pal("~w requests made. Errors: ~n~p", [NoOfReqs,Errors]),
ok = file:delete(Log).
-handler_requests_under_load(cleanup, Config) ->
+handler_requests_under_load(cleanup, _Config) ->
ok = stop_handler(?MODULE).
send_requests(HName, TO, Reqs = [{Req,Res}|Rs]) ->
@@ -1126,8 +1168,9 @@ start_handler(Name, FuncName, Config) ->
{Log,HConfig,StdHConfig}.
stop_handler(Name) ->
- ok = logger:remove_handler(Name),
- ct:pal("Handler ~p stopped!", [Name]).
+ R = logger:remove_handler(Name),
+ ct:pal("Handler ~p stopped! Result: ~p", [Name,R]),
+ R.
count_lines(File) ->
wait_until_written(File, -1),
@@ -1299,6 +1342,30 @@ try_match_file(_,Pattern,_,Incorrect) ->
[Pattern,Incorrect]),
erlang:error({error,not_matching_pattern,Pattern,Incorrect}).
+repeat_until_ok(Fun, N) ->
+ repeat_until_ok(Fun, 0, N, undefined).
+
+repeat_until_ok(_Fun, Stop, Stop, Reason) ->
+ {fails,Reason};
+
+repeat_until_ok(Fun, C, Stop, FirstReason) ->
+ if C > 0 -> timer:sleep(5000);
+ true -> ok
+ end,
+ try Fun() of
+ Result ->
+ {ok,{C,Result}}
+ catch
+ _:Reason:Stack ->
+ ct:pal("Test fails: ~p (~p)~n", [Reason,hd(Stack)]),
+ if FirstReason == undefined ->
+ repeat_until_ok(Fun, C+1, Stop, {Reason,Stack});
+ true ->
+ repeat_until_ok(Fun, C+1, Stop, FirstReason)
+ end
+ end.
+
+
%%%-----------------------------------------------------------------
%%%
start_op_trace() ->
@@ -1339,17 +1406,17 @@ find_mode(flush, Events) ->
find_mode(Mode, Events) ->
lists:keymember([{mode,Mode}], 3, Events).
-find_switch(From, To, Events) ->
- try lists:foldl(fun({trace_return,check_load,{To,_,_,_}},
- {trace_call,check_load,[#{mode := From}]}) ->
- throw(match);
- (Event, _) ->
- Event
- end, undefined, Events) of
- _ -> false
- catch
- throw:match -> true
- end.
+%% find_switch(_From, To, Events) ->
+%% try lists:foldl(fun({trace_return,check_load,{To,_,_,_}},
+%% {trace_call,check_load,[#{mode := From}]}) ->
+%% throw(match);
+%% (Event, _) ->
+%% Event
+%% end, undefined, Events) of
+%% _ -> false
+%% catch
+%% throw:match -> true
+%% end.
analyse_trace(TRecvPid, TestFun) ->
TRecvPid ! {test,self(),TestFun},
@@ -1411,7 +1478,7 @@ tpl([{M,F,A}|Trace]) ->
tpl([]) ->
ok.
-tracer({trace,_,call,{logger_std_h,handle_cast,[{Op,_}|_]}},
+tracer({trace,_,call,{logger_std_h,handle_cast,[Op|_]}},
{Pid,[{Mod,Func,Op}|Expected]}) ->
maybe_tracer_done(Pid,Expected,{Mod,Func,Op});
tracer({trace,_,call,{Mod=logger_std_h,Func=write_to_dev,[_,Data,_,_,_]}},
diff --git a/lib/kernel/test/logger_test_lib.erl b/lib/kernel/test/logger_test_lib.erl
new file mode 100644
index 0000000000..4ac05e6480
--- /dev/null
+++ b/lib/kernel/test/logger_test_lib.erl
@@ -0,0 +1,82 @@
+%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2018. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(logger_test_lib).
+
+-include_lib("kernel/src/logger_internal.hrl").
+
+-export([setup/2, log/3, sync_and_read/3]).
+
+-export([init/2,
+ pre_init_per_suite/3, pre_init_per_testcase/4,
+ post_end_per_testcase/5, post_end_per_suite/3]).
+
+setup(Config,Vars) ->
+ FuncStr = lists:concat([proplists:get_value(suite, Config), "_",
+ proplists:get_value(tc, Config)]),
+ ConfigFileName = filename:join(proplists:get_value(priv_dir, Config), FuncStr),
+ file:write_file(ConfigFileName ++ ".config", io_lib:format("[{kernel, ~p}].",[Vars])),
+ case test_server:start_node(proplists:get_value(tc, Config), slave,
+ [{args, ["-pa ",filename:dirname(code:which(?MODULE)),
+ " -boot start_sasl -kernel start_timer true "
+ "-config ",ConfigFileName]}]) of
+ {ok, Node} ->
+ L = rpc:call(Node, logger, i, []),
+ ct:log("~p",[L]),
+ {ok, L, Node};
+ {error, Reason} ->
+ ct:log("Failed to start node: ~p",[Reason]),
+ error
+ end.
+
+log(Node, F, A) ->
+ log(Node, logger, F, A).
+log(Node, M, F, A) ->
+ MD = #{ gl => rpc:call(Node, erlang, whereis, [logger]) },
+ rpc:call(Node, M, F, A ++ [MD]).
+
+sync_and_read(Node,disk_log,Log) ->
+ rpc:call(Node,logger_disk_log_h,disk_log_sync,[?STANDARD_HANDLER]),
+ file:read_file(Log ++ ".1");
+sync_and_read(Node, file,Log) ->
+ ok = rpc:call(Node,logger_std_h,filesync,[?STANDARD_HANDLER]),
+ file:read_file(Log).
+
+
+init(_, _) ->
+ {ok, []}.
+
+pre_init_per_suite(_Suite, Config, State) ->
+ {[{nodes, nodes()} | Config], State}.
+
+pre_init_per_testcase(Suite, TC, Config, State) ->
+ cleanup(Config),
+ {[{suite, Suite}, {tc, TC} | Config], State}.
+
+post_end_per_testcase(_, _TC, Config, Res, State) ->
+ cleanup(Config),
+ {Res, State}.
+
+post_end_per_suite(_, Config, State) ->
+ cleanup(Config),
+ {Config, State}.
+
+cleanup(Config) ->
+ [test_server:stop_node(N) || N <- nodes(),
+ not lists:member(N, proplists:get_value(nodes, Config))].
diff --git a/lib/kernel/test/os_SUITE.erl b/lib/kernel/test/os_SUITE.erl
index 591fbb2125..abbc301360 100644
--- a/lib/kernel/test/os_SUITE.erl
+++ b/lib/kernel/test/os_SUITE.erl
@@ -227,8 +227,8 @@ find_executable(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
%% Smoke test.
- case lib:progname() of
- erl ->
+ case ct:get_progname() of
+ "erl" ->
ErlPath = os:find_executable("erl"),
true = is_list(ErlPath),
true = filelib:is_regular(ErlPath);
@@ -388,7 +388,7 @@ comp(Expected, Got) ->
ct:fail(failed)
end.
-%% Like lib:nonl/1, but strips \r as well as \n.
+%% strips \n and \r\n from end of string
strip_nl([$\r, $\n]) -> [];
strip_nl([$\n]) -> [];
diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl
index 0678b64134..718ef91942 100644
--- a/lib/observer/src/observer_lib.erl
+++ b/lib/observer/src/observer_lib.erl
@@ -682,7 +682,7 @@ parse_string(Str) ->
{error, {_SLine, SMod, SError}, _} ->
throw(io_lib:format("~ts", [SMod:format_error(SError)]))
end,
- case lib:extended_parse_term(Tokens) of
+ case erl_eval:extended_parse_term(Tokens) of
{error, {_PLine, PMod, PError}} ->
throw(io_lib:format("~ts", [PMod:format_error(PError)]));
Res -> Res
diff --git a/lib/parsetools/src/yecc.erl b/lib/parsetools/src/yecc.erl
index b4e1cfe5e3..ce1b9468fd 100644
--- a/lib/parsetools/src/yecc.erl
+++ b/lib/parsetools/src/yecc.erl
@@ -455,10 +455,14 @@ os_process_size() ->
case os:type() of
{unix, sunos} ->
Size = os:cmd("ps -o vsz -p " ++ os:getpid() ++ " | tail -1"),
- list_to_integer(lib:nonl(Size));
+ list_to_integer(nonl(Size));
_ ->
0
- end.
+ end.
+
+nonl([$\n]) -> [];
+nonl([]) -> [];
+nonl([H|T]) -> [H|nonl(T)].
timeit(Name, Fun, St0) ->
Time = runtime,
diff --git a/lib/public_key/src/pubkey_pem.erl b/lib/public_key/src/pubkey_pem.erl
index 06a4455b3f..bacc9ec600 100644
--- a/lib/public_key/src/pubkey_pem.erl
+++ b/lib/public_key/src/pubkey_pem.erl
@@ -209,6 +209,8 @@ pem_start('DSAPrivateKey') ->
<<"-----BEGIN DSA PRIVATE KEY-----">>;
pem_start('DHParameter') ->
<<"-----BEGIN DH PARAMETERS-----">>;
+pem_start('PrivateKeyInfo') ->
+ <<"-----BEGIN PRIVATE KEY-----">>;
pem_start('EncryptedPrivateKeyInfo') ->
<<"-----BEGIN ENCRYPTED PRIVATE KEY-----">>;
pem_start('CertificationRequest') ->
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 931901640a..1c4acc9e1a 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -237,7 +237,7 @@ der_decode(Asn1Type, Der) when (Asn1Type == 'PrivateKeyInfo') or
andalso is_binary(Der) ->
try
{ok, Decoded} = 'PKCS-FRAME':decode(Asn1Type, Der),
- Decoded
+ der_priv_key_decode(Decoded)
catch
error:{badmatch, {error, _}} = Error ->
erlang:error(Error)
@@ -252,12 +252,45 @@ der_decode(Asn1Type, Der) when is_atom(Asn1Type), is_binary(Der) ->
erlang:error(Error)
end.
+der_priv_key_decode({'PrivateKeyInfo', v1,
+ {'PrivateKeyInfo_privateKeyAlgorithm', ?'id-ecPublicKey', {asn1_OPENTYPE, Parameters}}, PrivKey, _}) ->
+ EcPrivKey = der_decode('ECPrivateKey', PrivKey),
+ EcPrivKey#'ECPrivateKey'{parameters = der_decode('EcpkParameters', Parameters)};
+der_priv_key_decode({'PrivateKeyInfo', v1,
+ {'PrivateKeyInfo_privateKeyAlgorithm', ?'rsaEncryption', _}, PrivKey, _}) ->
+ der_decode('RSAPrivateKey', PrivKey);
+der_priv_key_decode({'PrivateKeyInfo', v1,
+ {'PrivateKeyInfo_privateKeyAlgorithm', ?'id-dsa', {asn1_OPENTYPE, Parameters}}, PrivKey, _}) ->
+ {params, #'Dss-Parms'{p=P, q=Q, g=G}} = der_decode('DSAParams', Parameters),
+ X = der_decode('Prime-p', PrivKey),
+ #'DSAPrivateKey'{p=P, q=Q, g=G, x=X};
+der_priv_key_decode(PKCS8Key) ->
+ PKCS8Key.
+
%%--------------------------------------------------------------------
-spec der_encode(asn1_type(), term()) -> Der::binary().
%%
%% Description: Encodes a public key entity with asn1 DER encoding.
%%--------------------------------------------------------------------
-der_encode(Asn1Type, Entity) when (Asn1Type == 'PrivateKeyInfo') or
+
+der_encode('PrivateKeyInfo', #'DSAPrivateKey'{p=P, q=Q, g=G, x=X}) ->
+ der_encode('PrivateKeyInfo',
+ {'PrivateKeyInfo', v1,
+ {'PrivateKeyInfo_privateKeyAlgorithm', ?'id-dsa',
+ {asn1_OPENTYPE, der_encode('Dss-Parms', #'Dss-Parms'{p=P, q=Q, g=G})}},
+ der_encode('Prime-p', X), asn1_NOVALUE});
+der_encode('PrivateKeyInfo', #'RSAPrivateKey'{} = PrivKey) ->
+ der_encode('PrivateKeyInfo',
+ {'PrivateKeyInfo', v1,
+ {'PrivateKeyInfo_privateKeyAlgorithm', ?'rsaEncryption', {asn1_OPENTYPE, ?DER_NULL}},
+ der_encode('RSAPrivateKey', PrivKey), asn1_NOVALUE});
+der_encode('PrivateKeyInfo', #'ECPrivateKey'{parameters = Parameters} = PrivKey) ->
+ der_encode('PrivateKeyInfo',
+ {'PrivateKeyInfo', v1,
+ {'PrivateKeyInfo_privateKeyAlgorithm', ?'id-ecPublicKey',
+ {asn1_OPENTYPE, der_encode('EcpkParameters', Parameters)}},
+ der_encode('ECPrivateKey', PrivKey#'ECPrivateKey'{parameters = asn1_NOVALUE}), asn1_NOVALUE});
+der_encode(Asn1Type, Entity) when (Asn1Type == 'PrivateKeyInfo') or
(Asn1Type == 'EncryptedPrivateKeyInfo') ->
try
{ok, Encoded} = 'PKCS-FRAME':encode(Asn1Type, Entity),
diff --git a/lib/public_key/test/pbe_SUITE.erl b/lib/public_key/test/pbe_SUITE.erl
index 44caf479e5..8a5db4efec 100644
--- a/lib/public_key/test/pbe_SUITE.erl
+++ b/lib/public_key/test/pbe_SUITE.erl
@@ -226,11 +226,6 @@ pbes2(Config) when is_list(Config) ->
ok
end.
-check_key_info(#'PrivateKeyInfo'{privateKeyAlgorithm =
- #'PrivateKeyInfo_privateKeyAlgorithm'{algorithm = ?rsaEncryption},
- privateKey = Key}) ->
- #'RSAPrivateKey'{} = public_key:der_decode('RSAPrivateKey', iolist_to_binary(Key)).
-
decode_encode_key_file(File, Password, Cipher, Config) ->
Datadir = proplists:get_value(data_dir, Config),
{ok, PemKey} = file:read_file(filename:join(Datadir, File)),
@@ -238,11 +233,10 @@ decode_encode_key_file(File, Password, Cipher, Config) ->
PemEntry = public_key:pem_decode(PemKey),
ct:print("Pem entry: ~p" , [PemEntry]),
[{Asn1Type, _, {Cipher,_} = CipherInfo} = PubEntry] = PemEntry,
- KeyInfo = public_key:pem_entry_decode(PubEntry, Password),
+ #'RSAPrivateKey'{} = KeyInfo = public_key:pem_entry_decode(PubEntry, Password),
PemKey1 = public_key:pem_encode([public_key:pem_entry_encode(Asn1Type, KeyInfo, {CipherInfo, Password})]),
Pem = strip_ending_newlines(PemKey),
- Pem = strip_ending_newlines(PemKey1),
- check_key_info(KeyInfo).
+ Pem = strip_ending_newlines(PemKey1).
strip_ending_newlines(Bin) ->
string:strip(binary_to_list(Bin), right, 10).
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index 449d1fc040..572748edc9 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -64,6 +64,7 @@ all() ->
groups() ->
[{pem_decode_encode, [], [dsa_pem, rsa_pem, ec_pem, encrypted_pem,
dh_pem, cert_pem, pkcs7_pem, pkcs10_pem, ec_pem2,
+ rsa_priv_pkcs8, dsa_priv_pkcs8, ec_priv_pkcs8,
ec_pem_encode_generated,
gen_ec_param_prime_field, gen_ec_param_char_2_field
]},
@@ -181,6 +182,19 @@ dsa_pem(Config) when is_list(Config) ->
DSAPubPemNoEndNewLines = strip_superfluous_newlines(DSAPubPem),
DSAPubPemNoEndNewLines = strip_superfluous_newlines(public_key:pem_encode([PubEntry0])).
+dsa_priv_pkcs8() ->
+ [{doc, "DSA PKCS8 private key decode/encode"}].
+dsa_priv_pkcs8(Config) when is_list(Config) ->
+ Datadir = proplists:get_value(data_dir, Config),
+ {ok, DsaPem} = file:read_file(filename:join(Datadir, "dsa_key_pkcs8.pem")),
+ [{'PrivateKeyInfo', DerDSAKey, not_encrypted} = Entry0 ] = public_key:pem_decode(DsaPem),
+ DSAKey = public_key:der_decode('PrivateKeyInfo', DerDSAKey),
+ DSAKey = public_key:pem_entry_decode(Entry0),
+ true = check_entry_type(DSAKey, 'DSAPrivateKey'),
+ PrivEntry0 = public_key:pem_entry_encode('PrivateKeyInfo', DSAKey),
+ DSAPemNoEndNewLines = strip_superfluous_newlines(DsaPem),
+ DSAPemNoEndNewLines = strip_superfluous_newlines(public_key:pem_encode([PrivEntry0])).
+
%%--------------------------------------------------------------------
rsa_pem() ->
@@ -216,6 +230,19 @@ rsa_pem(Config) when is_list(Config) ->
RSARawPemNoEndNewLines = strip_superfluous_newlines(RSARawPem),
RSARawPemNoEndNewLines = strip_superfluous_newlines(public_key:pem_encode([PubEntry1])).
+rsa_priv_pkcs8() ->
+ [{doc, "RSA PKCS8 private key decode/encode"}].
+rsa_priv_pkcs8(Config) when is_list(Config) ->
+ Datadir = proplists:get_value(data_dir, Config),
+ {ok, RsaPem} = file:read_file(filename:join(Datadir, "rsa_key_pkcs8.pem")),
+ [{'PrivateKeyInfo', DerRSAKey, not_encrypted} = Entry0 ] = public_key:pem_decode(RsaPem),
+ RSAKey = public_key:der_decode('PrivateKeyInfo', DerRSAKey),
+ RSAKey = public_key:pem_entry_decode(Entry0),
+ true = check_entry_type(RSAKey, 'RSAPrivateKey'),
+ PrivEntry0 = public_key:pem_entry_encode('PrivateKeyInfo', RSAKey),
+ RSAPemNoEndNewLines = strip_superfluous_newlines(RsaPem),
+ RSAPemNoEndNewLines = strip_superfluous_newlines(public_key:pem_encode([PrivEntry0])).
+
%%--------------------------------------------------------------------
ec_pem() ->
@@ -262,6 +289,18 @@ ec_pem2(Config) when is_list(Config) ->
ECPemNoEndNewLines = strip_superfluous_newlines(ECPrivPem),
ECPemNoEndNewLines = strip_superfluous_newlines(public_key:pem_encode([Entry1, Entry2])).
+ec_priv_pkcs8() ->
+ [{doc, "EC PKCS8 private key decode/encode"}].
+ec_priv_pkcs8(Config) when is_list(Config) ->
+ Datadir = proplists:get_value(data_dir, Config),
+ {ok, ECPrivPem} = file:read_file(filename:join(Datadir, "ec_key_pkcs8.pem")),
+ [{'PrivateKeyInfo', _, not_encrypted} = PKCS8Key] = public_key:pem_decode(ECPrivPem),
+ ECPrivKey = public_key:pem_entry_decode(PKCS8Key),
+ true = check_entry_type(ECPrivKey, 'ECPrivateKey'),
+ true = check_entry_type(ECPrivKey#'ECPrivateKey'.parameters, 'EcpkParameters'),
+ PrivEntry0 = public_key:pem_entry_encode('PrivateKeyInfo', ECPrivKey),
+ ECPemNoEndNewLines = strip_superfluous_newlines(ECPrivPem),
+ ECPemNoEndNewLines = strip_superfluous_newlines(public_key:pem_encode([PrivEntry0])).
init_ec_pem_encode_generated(Config) ->
case catch true = lists:member('secp384r1', crypto:ec_curves()) of
diff --git a/lib/public_key/test/public_key_SUITE_data/dsa_key_pkcs8.pem b/lib/public_key/test/public_key_SUITE_data/dsa_key_pkcs8.pem
new file mode 100644
index 0000000000..86e38e2c76
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/dsa_key_pkcs8.pem
@@ -0,0 +1,9 @@
+-----BEGIN PRIVATE KEY-----
+MIIBSwIBADCCASwGByqGSM44BAEwggEfAoGBALez5tklY5CdFeTMos899pA6i4u4
+uCtszgBzrdBk6cl5FVqzdzWMGTQiynnTpGsrOESinzP06Ip+pG15We2OORwgvCxD
+/W95aCiN0/+MdiXqlsmboBARMzsa+SmBENN3gF/+tuuEAFzOXU1q2cmEywRLyfbM
+2KIBVE/TChWYw2eRAhUA1R64VvcQ90XA8SOKVDmMA0dBzukCgYEAlLMYP0pbgBlg
+HQVO3/avAHlWNrIq52Lxk7SdPJWgMvPjTK9Z6sv88kxsCcydtjvO439j1yqcwk50
+GQc+86ktBWWz93/HkIdnFyqafef4mmWvm2Uq6ClQKS+A0Asfaj8Mys+HUMiI+qsf
+djRbyIpwb7MX1nsVdsKzALnZNMW27A0EFgIUWYCfDrv5tqwPWKJu00ez0R192SY=
+-----END PRIVATE KEY-----
diff --git a/lib/public_key/test/public_key_SUITE_data/ec_key_pkcs8.pem b/lib/public_key/test/public_key_SUITE_data/ec_key_pkcs8.pem
new file mode 100644
index 0000000000..8280a3671a
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/ec_key_pkcs8.pem
@@ -0,0 +1,5 @@
+-----BEGIN PRIVATE KEY-----
+MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgB349XXSmba5BbJT5UuCK
+OoyoPHsygy6n+WzP1J+8eYShRANCAATTJdDtiqV9Hs7q+Y/yak1z3uJpukFQGYmr
+lJ2iztxfv7bz10eJ5yM/GNqG8kK0w7SIzjedsIkfjRK7bX6mP7h4
+-----END PRIVATE KEY-----
diff --git a/lib/public_key/test/public_key_SUITE_data/rsa_key_pkcs8.pem b/lib/public_key/test/public_key_SUITE_data/rsa_key_pkcs8.pem
new file mode 100644
index 0000000000..9ef5b3353f
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/rsa_key_pkcs8.pem
@@ -0,0 +1,10 @@
+-----BEGIN PRIVATE KEY-----
+MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEA1GLJmDS5yLvg1zqa
+epnwCgOXzxpPvHokDQx+AcgfO14SPtCD6UTlDEwYBp+6tUTm+qgeQN/CTi7POwIA
+m7P3UwIDAQABAkALFiEJ1e7AwLXq5j88GR8Dls5s3CW/Y+zP1ZAaTbT7p0QUMxG+
+0ko7h8NoxcQJHZU27sZXCjog/IBqn577Xv4RAiEA8/aQ09kz0jxi4aNvlix4B+bW
+gX0sYtcCDkBzx8Y6iMkCIQDe3WCxV9PuiDjpuC8cAy3UMC5PBygZG4iK3arpgzxp
+OwIhAKxKJg+mpgVEJiTpsiVhNEeIS1bZWp5W75m3BM1B/haZAiBQOhEcxikcrR0P
+xaXvx5Uv1UhWWpUstKSqmLF17jBJEQIhAMx4HMLqwaGeYwOcxfzxz6Al8fnPmfAR
+hqFR28fVJrWX
+-----END PRIVATE KEY-----
diff --git a/lib/sasl/doc/src/sasl_app.xml b/lib/sasl/doc/src/sasl_app.xml
index 48b0b8eafb..26b9bece2a 100644
--- a/lib/sasl/doc/src/sasl_app.xml
+++ b/lib/sasl/doc/src/sasl_app.xml
@@ -86,12 +86,6 @@
<c>RELDIR</c> is used. By default, this is
<c>$OTP_ROOT/releases</c>.</p>
</item>
- <tag><c><![CDATA[utc_log = true | false ]]></c></tag>
- <item>
- <p>If set to <c>true</c>, all dates in textual log outputs are
- displayed in Universal Coordinated Time with the string
- <c>UTC</c> appended.</p>
- </item>
</taglist>
</section>
@@ -119,22 +113,18 @@
<p>Formats and writes <em>supervisor reports</em>, <em>crash
reports</em>, and <em>progress reports</em> to <c>stdio</c>.
This error logger event handler uses
- <seealso marker="kernel:kernel_app#logger_format_depth"><c>logger_format_depth</c></seealso>
+ <seealso marker="kernel:kernel_app#deprecated-configuration-parameters"><c>error_logger_format_depth</c></seealso>
in the Kernel application to limit how much detail is printed
- in crash and supervisor reports. If <c>logger_format_depth</c>
- is not set, it uses the old <c>error_logger_format_depth</c>
- instead.</p>
+ in crash and supervisor reports.</p>
</item>
<tag><c>sasl_report_file_h</c></tag>
<item>
<p>Formats and writes <em>supervisor reports</em>, <em>crash
report</em>, and <em>progress report</em> to a single file.
This error logger event handler uses
- <seealso marker="kernel:kernel_app#logger_format_depth"><c>logger_format_depth</c></seealso>
+ <seealso marker="kernel:kernel_app#deprecated-configuration-parameters"><c>error_logger_format_depth</c></seealso>
in the Kernel application to limit the details printed in
- crash and supervisor reports. If <c>logger_format_depth</c> is
- not set, it uses the old <c>error_logger_format_depth</c>
- instead.</p>
+ crash and supervisor reports.</p>
</item>
</taglist>
<p>A similar behaviour, but still using the new logger API, can be
@@ -179,6 +169,12 @@
<c>sasl_error_logger</c> to error reports or progress reports,
or both. Default is <c>all</c>.</p>
</item>
+ <tag><marker id="utc_log"/><c><![CDATA[utc_log = true | false ]]></c></tag>
+ <item>
+ <p>If set to <c>true</c>, all dates in textual log outputs are
+ displayed in Universal Coordinated Time with the string
+ <c>UTC</c> appended.</p>
+ </item>
</taglist>
<p>The error logger event handler <c>log_mf_h</c> can also still
diff --git a/lib/sasl/src/sasl.erl b/lib/sasl/src/sasl.erl
index 2bf11bdcdf..9359bdb30e 100644
--- a/lib/sasl/src/sasl.erl
+++ b/lib/sasl/src/sasl.erl
@@ -142,7 +142,9 @@ add_sasl_logger(Dest, Level) ->
#{level=>Level,
filter_default=>stop,
filters=>
- [{sasl_domain,
+ [{remote_gl,
+ {fun logger_filters:remote_gl/2,stop}},
+ {sasl_domain,
{fun logger_filters:domain/2,
{log,equals,[beam,erlang,otp,sasl]}}}],
logger_std_h=>#{type=>Dest},
diff --git a/lib/sasl/test/sasl_report_SUITE.erl b/lib/sasl/test/sasl_report_SUITE.erl
index 96975aaf69..72ee2f0a10 100644
--- a/lib/sasl/test/sasl_report_SUITE.erl
+++ b/lib/sasl/test/sasl_report_SUITE.erl
@@ -54,7 +54,7 @@ gen_server_crash_unicode(Config) ->
gen_server_crash(Config, Encoding) ->
StopFilter = {fun(_,_) -> stop end, ok},
- logger:add_handler_filter(logger_std_h,stop_all,StopFilter),
+ logger:add_handler_filter(default,stop_all,StopFilter),
logger:add_handler_filter(cth_log_redirect,stop_all,StopFilter),
try
do_gen_server_crash(Config, Encoding)
@@ -62,7 +62,7 @@ gen_server_crash(Config, Encoding) ->
ok = application:unset_env(kernel, logger_sasl_compatible),
ok = application:unset_env(sasl, sasl_error_logger),
ok = application:unset_env(kernel, error_logger_format_depth),
- logger:remove_handler_filter(logger_std_h,stop_all),
+ logger:remove_handler_filter(default,stop_all),
logger:remove_handler_filter(cth_log_redirect,stop_all)
end,
ok.
@@ -83,9 +83,11 @@ do_gen_server_crash(Config, Encoding) ->
error_logger:logfile({open,KernelLog}),
application:start(sasl),
logger:i(print),
+ ct:log("error_logger handlers: ~p",[error_logger:which_report_handlers()]),
crash_me(),
+
error_logger:logfile(close),
application:stop(sasl),
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index 6aed525e8b..407956cc6f 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -762,6 +762,8 @@
<datatype>
<name name="rekey_limit_common_option"/>
+ <name name="limit_bytes"/>
+ <name name="limit_time"/>
<desc>
<p>Sets the limit when rekeying is to be initiated. Both the max time and max amount of data
could be configured:
@@ -773,6 +775,10 @@
</list>
<p>When a rekeying is done, both the timer and the byte counter are restarted.
Defaults to one hour and one GByte.</p>
+ <p>If <c>Minutes</c> is set to <c>infinity</c>, no rekeying will ever occur due to that max time has passed.
+ Setting <c>Bytes</c> to <c>infinity</c> will inhibit rekeying after a certain amount of data has been transferred.
+ If the option value is set to <c>{infinity, infinity}</c>, no rekeying will be initiated. Note that rekeying initiated
+ by the peer will still be performed.</p>
</desc>
</datatype>
diff --git a/lib/ssh/src/ssh.hrl b/lib/ssh/src/ssh.hrl
index fc0a3786ac..2efd239aae 100644
--- a/lib/ssh/src/ssh.hrl
+++ b/lib/ssh/src/ssh.hrl
@@ -191,10 +191,13 @@
-type user_dir_common_option() :: {user_dir, false | string()}.
-type profile_common_option() :: {profile, atom() }.
-type max_idle_time_common_option() :: {idle_time, timeout()}.
--type rekey_limit_common_option() :: {rekey_limit, Bytes::non_neg_integer() |
- {Minutes::non_neg_integer(), Bytes::non_neg_integer()}
+-type rekey_limit_common_option() :: {rekey_limit, Bytes::limit_bytes() |
+ {Minutes::limit_time(), Bytes::limit_bytes()}
}.
+-type limit_bytes() :: non_neg_integer() | infinity . % non_neg_integer due to compatibility
+-type limit_time() :: pos_integer() | infinity .
+
-type key_cb_common_option() :: {key_cb, Module::atom() | {Module::atom(),Opts::[term()]} } .
-type disconnectfun_common_option() ::
{disconnectfun, fun((Reason::term()) -> void | any()) }.
diff --git a/lib/ssh/src/ssh_client_channel.erl b/lib/ssh/src/ssh_client_channel.erl
index f20007baaf..134d3f08bd 100644
--- a/lib/ssh/src/ssh_client_channel.erl
+++ b/lib/ssh/src/ssh_client_channel.erl
@@ -305,8 +305,8 @@ terminate(Reason, #state{cm = ConnectionManager,
close_sent = false} = State) ->
catch ssh_connection:close(ConnectionManager, ChannelId),
terminate(Reason, State#state{close_sent = true});
-terminate(_, #state{channel_cb = Cb, channel_state = ChannelState}) ->
- catch Cb:terminate(Cb, ChannelState),
+terminate(Reason, #state{channel_cb = Cb, channel_state = ChannelState}) ->
+ catch Cb:terminate(Reason, ChannelState),
ok.
%%--------------------------------------------------------------------
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index dfdad769ed..f1ff3a70e2 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -71,7 +71,7 @@
-export([init_connection_handler/3, % proc_lib:spawn needs this
init_ssh_record/3, % Export of this internal function
% intended for low-level protocol test suites
- renegotiate/1, renegotiate_data/1, alg/1 % Export intended for test cases
+ renegotiate/1, alg/1 % Export intended for test cases
]).
-export([dbg_trace/3]).
@@ -325,14 +325,7 @@ close(ConnectionHandler, ChannelId) ->
) -> ok.
%% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
renegotiate(ConnectionHandler) ->
- cast(ConnectionHandler, renegotiate).
-
-%%--------------------------------------------------------------------
--spec renegotiate_data(connection_ref()
- ) -> ok.
-%% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
-renegotiate_data(ConnectionHandler) ->
- cast(ConnectionHandler, data_size).
+ cast(ConnectionHandler, force_renegotiate).
%%--------------------------------------------------------------------
alg(ConnectionHandler) ->
@@ -349,11 +342,6 @@ alg(ConnectionHandler) ->
connection_state :: #connection{},
latest_channel_id = 0 :: non_neg_integer()
| undefined,
- idle_timer_ref :: undefined
- | infinity
- | reference(),
- idle_timer_value = infinity :: infinity
- | pos_integer(),
transport_protocol :: atom()
| undefined, % ex: tcp
transport_cb :: atom()
@@ -429,22 +417,17 @@ init([Role,Socket,Opts]) ->
},
D = case Role of
client ->
- cache_init_idle_timer(D0);
+ D0;
server ->
Sups = ?GET_INTERNAL_OPT(supervisors, Opts),
- cache_init_idle_timer(
- D0#data{connection_state =
- C#connection{cli_spec = ?GET_OPT(ssh_cli, Opts, {ssh_cli,[?GET_OPT(shell, Opts)]}),
- exec = ?GET_OPT(exec, Opts),
- system_supervisor = proplists:get_value(system_sup, Sups),
- sub_system_supervisor = proplists:get_value(subsystem_sup, Sups),
- connection_supervisor = proplists:get_value(connection_sup, Sups)
- }})
+ D0#data{connection_state =
+ C#connection{cli_spec = ?GET_OPT(ssh_cli, Opts, {ssh_cli,[?GET_OPT(shell, Opts)]}),
+ exec = ?GET_OPT(exec, Opts),
+ system_supervisor = proplists:get_value(system_sup, Sups),
+ sub_system_supervisor = proplists:get_value(subsystem_sup, Sups),
+ connection_supervisor = proplists:get_value(connection_sup, Sups)
+ }}
end,
- %% Start the renegotiation timers
- {RekeyTimeout,_MaxSent} = ?GET_OPT(rekey_limit, (D#data.ssh_params)#ssh.opts),
- timer:apply_after(RekeyTimeout, gen_statem, cast, [self(), renegotiate]),
- timer:apply_after(?REKEY_DATA_TIMOUT, gen_statem, cast, [self(), data_size]),
{ok, {hello,Role}, D};
{error,Error} ->
@@ -559,10 +542,15 @@ renegotiation(_) -> false.
#data{}
) -> gen_statem:event_handler_result(state_name()) .
+-define(CONNECTION_MSG(Msg),
+ [{next_event, internal, prepare_next_packet},
+ {next_event,internal,{conn_msg,Msg}}]).
+
%% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
callback_mode() ->
- handle_event_function.
+ [handle_event_function,
+ state_enter].
handle_event(_, _Event, {init_error,Error}=StateName, D) ->
@@ -1017,97 +1005,92 @@ handle_event(_, #ssh_msg_debug{} = Msg, _, D) ->
debug_fun(Msg, D),
keep_state_and_data;
-handle_event(internal, Msg=#ssh_msg_global_request{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_request_success{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_request_failure{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_open{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_open_confirmation{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_open_failure{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_window_adjust{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_data{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_extended_data{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_eof{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
-
-handle_event(internal, Msg=#ssh_msg_channel_close{}, {connected,server} = StateName, D) ->
- handle_connection_msg(Msg, StateName, cache_request_idle_timer_check(D));
-
-handle_event(internal, Msg=#ssh_msg_channel_close{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
+handle_event(internal, {conn_msg,Msg}, StateName, #data{starter = User,
+ connection_state = Connection0,
+ event_queue = Qev0} = D0) ->
+ Role = role(StateName),
+ Rengotation = renegotiation(StateName),
+ try ssh_connection:handle_msg(Msg, Connection0, Role) of
+ {disconnect, Reason0, RepliesConn} ->
+ {Repls, D} = send_replies(RepliesConn, D0),
+ case {Reason0,Role} of
+ {{_, Reason}, client} when ((StateName =/= {connected,client})
+ and (not Rengotation)) ->
+ User ! {self(), not_connected, Reason};
+ _ ->
+ ok
+ end,
+ {stop_and_reply, {shutdown,normal}, Repls, D};
-handle_event(internal, Msg=#ssh_msg_channel_request{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
+ {Replies, Connection} when is_list(Replies) ->
+ {Repls, D} =
+ case StateName of
+ {connected,_} ->
+ send_replies(Replies, D0#data{connection_state=Connection});
+ _ ->
+ {ConnReplies, NonConnReplies} = lists:splitwith(fun not_connected_filter/1, Replies),
+ send_replies(NonConnReplies, D0#data{event_queue = Qev0 ++ ConnReplies})
+ end,
+ case {Msg, StateName} of
+ {#ssh_msg_channel_close{}, {connected,_}} ->
+ {keep_state, D, [cond_set_idle_timer(D)|Repls]};
+ {#ssh_msg_channel_success{}, _} ->
+ update_inet_buffers(D#data.socket),
+ {keep_state, D, Repls};
+ _ ->
+ {keep_state, D, Repls}
+ end
-handle_event(internal, Msg=#ssh_msg_channel_success{}, StateName, D) ->
- update_inet_buffers(D#data.socket),
- handle_connection_msg(Msg, StateName, D);
+ catch
+ Class:Error ->
+ {Repls, D1} = send_replies(ssh_connection:handle_stop(Connection0), D0),
+ {Shutdown, D} = ?send_disconnect(?SSH_DISCONNECT_BY_APPLICATION,
+ io_lib:format("Internal error: ~p:~p",[Class,Error]),
+ StateName, D1),
+ {stop_and_reply, Shutdown, Repls, D}
+ end;
-handle_event(internal, Msg=#ssh_msg_channel_failure{}, StateName, D) ->
- handle_connection_msg(Msg, StateName, D);
+handle_event(enter, _OldState, {connected,_}=State, D) ->
+ %% Entering the state where re-negotiation is possible
+ init_renegotiate_timers(State, D);
+
+handle_event(enter, _OldState, {ext_info,_,renegotiate}=State, D) ->
+ %% Could be hanging in exit_info state if nothing else arrives
+ init_renegotiate_timers(State, D);
+
+handle_event(enter, {connected,_}, State, D) ->
+ %% Exiting the state where re-negotiation is possible
+ pause_renegotiate_timers(State, D);
+
+handle_event(cast, force_renegotiate, StateName, D) ->
+ handle_event({timeout,renegotiate}, undefined, StateName, D);
+
+handle_event({timeout,renegotiate}, _, StateName, D0) ->
+ case StateName of
+ {connected,Role} ->
+ start_rekeying(Role, D0);
+ {ext_info,Role,renegotiate} ->
+ start_rekeying(Role, D0);
+ _ ->
+ %% Wrong state for starting a renegotiation, must be in re-negotiation
+ keep_state_and_data
+ end;
-handle_event(cast, renegotiate, {connected,Role}, D) ->
- {KeyInitMsg, SshPacket, Ssh} = ssh_transport:key_exchange_init_msg(D#data.ssh_params),
- send_bytes(SshPacket, D),
- {RekeyTimeout,_MaxSent} = ?GET_OPT(rekey_limit, Ssh#ssh.opts),
- timer:apply_after(RekeyTimeout, gen_statem, cast, [self(), renegotiate]),
- {next_state, {kexinit,Role,renegotiate}, D#data{ssh_params = Ssh,
- key_exchange_init_msg = KeyInitMsg}};
+handle_event({timeout,check_data_size}, _, StateName, D0) ->
+ %% Rekey due to sent data limit reached? (Can't be in {ext_info,...} if data is sent)
+ case StateName of
+ {connected,Role} ->
+ check_data_rekeying(Role, D0);
+ _ ->
+ %% Wrong state for starting a renegotiation, must be in re-negotiation
+ keep_state_and_data
+ end;
handle_event({call,From}, get_alg, _, D) ->
#ssh{algorithms=Algs} = D#data.ssh_params,
{keep_state_and_data, [{reply,From,Algs}]};
-handle_event(cast, renegotiate, _, D) ->
- %% Already in key-exchange so safe to ignore
- {RekeyTimeout,_MaxSent} = ?GET_OPT(rekey_limit, (D#data.ssh_params)#ssh.opts),
- timer:apply_after(RekeyTimeout, gen_statem, cast, [self(), renegotiate]),
- keep_state_and_data;
-
-
-%% Rekey due to sent data limit reached?
-handle_event(cast, data_size, {connected,Role}, D) ->
- {ok, [{send_oct,Sent0}]} = inet:getstat(D#data.socket, [send_oct]),
- Sent = Sent0 - D#data.last_size_rekey,
- {_RekeyTimeout,MaxSent} = ?GET_OPT(rekey_limit, (D#data.ssh_params)#ssh.opts),
- timer:apply_after(?REKEY_DATA_TIMOUT, gen_statem, cast, [self(), data_size]),
- case Sent >= MaxSent of
- true ->
- {KeyInitMsg, SshPacket, Ssh} =
- ssh_transport:key_exchange_init_msg(D#data.ssh_params),
- send_bytes(SshPacket, D),
- {next_state, {kexinit,Role,renegotiate}, D#data{ssh_params = Ssh,
- key_exchange_init_msg = KeyInitMsg,
- last_size_rekey = Sent0}};
- _ ->
- keep_state_and_data
- end;
-
-handle_event(cast, data_size, _, _) ->
- %% Already in key-exchange so safe to ignore
- timer:apply_after(?REKEY_DATA_TIMOUT, gen_statem, cast, [self(), data_size]), % FIXME: not here in original
- keep_state_and_data;
-
-
-
handle_event(cast, _, StateName, _) when not ?CONNECTED(StateName) ->
{keep_state_and_data, [postpone]};
@@ -1221,7 +1204,7 @@ handle_event({call,From}, {request, ChannelPid, ChannelId, Type, Data, Timeout},
D ->
%% Note reply to channel will happen later when reply is recived from peer on the socket
start_channel_request_timer(ChannelId, From, Timeout),
- {keep_state, cache_request_idle_timer_check(D)}
+ {keep_state, D, cond_set_idle_timer(D)}
end;
handle_event({call,From}, {request, ChannelId, Type, Data, Timeout}, StateName, D0)
@@ -1232,7 +1215,7 @@ handle_event({call,From}, {request, ChannelId, Type, Data, Timeout}, StateName,
D ->
%% Note reply to channel will happen later when reply is recived from peer on the socket
start_channel_request_timer(ChannelId, From, Timeout),
- {keep_state, cache_request_idle_timer_check(D)}
+ {keep_state, D, cond_set_idle_timer(D)}
end;
handle_event({call,From}, {data, ChannelId, Type, Data, Timeout}, StateName, D0)
@@ -1273,7 +1256,7 @@ handle_event({call,From},
}),
D = add_request(true, ChannelId, From, D2),
start_channel_request_timer(ChannelId, From, Timeout),
- {keep_state, cache_cancel_idle_timer(D)};
+ {keep_state, D, cond_set_idle_timer(D)};
handle_event({call,From}, {send_window, ChannelId}, StateName, D)
when ?CONNECTED(StateName) ->
@@ -1303,7 +1286,7 @@ handle_event({call,From}, {close, ChannelId}, StateName, D0)
#channel{remote_id = Id} = Channel ->
D1 = send_msg(ssh_connection:channel_close_msg(Id), D0),
ssh_client_channel:cache_update(cache(D1), Channel#channel{sent_close = true}),
- {keep_state, cache_request_idle_timer_check(D1), [{reply,From,ok}]};
+ {keep_state, D1, [cond_set_idle_timer(D1), {reply,From,ok}]};
undefined ->
{keep_state_and_data, [{reply,From,ok}]}
end;
@@ -1319,6 +1302,7 @@ handle_event(info, {Proto, Sock, Info}, {hello,_}, #data{socket = Sock,
{keep_state_and_data, [{next_event, internal, {info_line,Info}}]}
end;
+
handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
transport_protocol = Proto}) ->
try ssh_transport:handle_packet_part(
@@ -1336,13 +1320,29 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
try
ssh_message:decode(set_kex_overload_prefix(DecryptedBytes,D1))
of
- Msg = #ssh_msg_kexinit{} ->
+ #ssh_msg_kexinit{} = Msg ->
{keep_state, D1, [{next_event, internal, prepare_next_packet},
{next_event, internal, {Msg,DecryptedBytes}}
]};
+
+ #ssh_msg_global_request{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_request_success{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_request_failure{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_open{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_open_confirmation{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_open_failure{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_window_adjust{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_data{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_extended_data{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_eof{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_close{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_request{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_failure{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+ #ssh_msg_channel_success{} = Msg -> {keep_state, D1, ?CONNECTION_MSG(Msg)};
+
Msg ->
{keep_state, D1, [{next_event, internal, prepare_next_packet},
- {next_event, internal, Msg}
+ {next_event, internal, Msg}
]}
catch
C:E ->
@@ -1421,8 +1421,20 @@ handle_event(info, {timeout, {_, From} = Request}, _,
end;
%%% Handle that ssh channels user process goes down
-handle_event(info, {'DOWN', _Ref, process, ChannelPid, _Reason}, _, D0) ->
- {keep_state, handle_channel_down(ChannelPid, D0)};
+handle_event(info, {'DOWN', _Ref, process, ChannelPid, _Reason}, _, D) ->
+ Cache = cache(D),
+ ssh_client_channel:cache_foldl(
+ fun(#channel{user=U,
+ local_id=Id}, Acc) when U == ChannelPid ->
+ ssh_client_channel:cache_delete(Cache, Id),
+ Acc;
+ (_,Acc) ->
+ Acc
+ end, [], Cache),
+ {keep_state, D, cond_set_idle_timer(D)};
+
+handle_event({timeout,idle_time}, _Data, _StateName, _D) ->
+ {stop, {shutdown, "Timeout"}};
%%% So that terminate will be run when supervisor is shutdown
handle_event(info, {'EXIT', _Sup, Reason}, StateName, _) ->
@@ -1442,7 +1454,7 @@ handle_event(info, {'EXIT', _Sup, Reason}, StateName, _) ->
end;
handle_event(info, check_cache, _, D) ->
- {keep_state, cache_check_set_idle_timer(D)};
+ {keep_state, D, cond_set_idle_timer(D)};
handle_event(info, UnexpectedMessage, StateName, D = #data{ssh_params = Ssh}) ->
case unexpected_fun(UnexpectedMessage, D) of
@@ -1489,6 +1501,11 @@ handle_event(internal, {send_disconnect,Code,DetailedText,Module,Line}, StateNam
send_disconnect(Code, DetailedText, Module, Line, StateName, D0),
{stop, Shutdown, D};
+
+handle_event(enter, _OldState, State, D) ->
+ %% Just skip
+ {next_state, State, D};
+
handle_event(_Type, _Msg, {ext_info,Role,_ReNegFlag}, D) ->
%% If something else arrives, goto next state and handle the event in that one
{next_state, {connected,Role}, D, [postpone]};
@@ -1746,46 +1763,6 @@ call(FsmPid, Event, Timeout) ->
end.
-handle_connection_msg(Msg, StateName, D0 = #data{starter = User,
- connection_state = Connection0,
- event_queue = Qev0}) ->
- Renegotiation = renegotiation(StateName),
- Role = role(StateName),
- try ssh_connection:handle_msg(Msg, Connection0, Role) of
- {disconnect, Reason0, RepliesConn} ->
- {Repls, D} = send_replies(RepliesConn, D0),
- case {Reason0,Role} of
- {{_, Reason}, client} when ((StateName =/= {connected,client}) and (not Renegotiation)) ->
- User ! {self(), not_connected, Reason};
- _ ->
- ok
- end,
- {stop_and_reply, {shutdown,normal}, Repls, D};
-
- {[], Connection} ->
- {keep_state, D0#data{connection_state = Connection}};
-
- {Replies, Connection} when is_list(Replies) ->
- {Repls, D} =
- case StateName of
- {connected,_} ->
- send_replies(Replies, D0#data{connection_state=Connection});
- _ ->
- {ConnReplies, NonConnReplies} = lists:splitwith(fun not_connected_filter/1, Replies),
- send_replies(NonConnReplies, D0#data{event_queue = Qev0 ++ ConnReplies})
- end,
- {keep_state, D, Repls}
-
- catch
- Class:Error ->
- {Repls, D1} = send_replies(ssh_connection:handle_stop(Connection0), D0),
- {Shutdown, D} = ?send_disconnect(?SSH_DISCONNECT_BY_APPLICATION,
- io_lib:format("Internal error: ~p:~p",[Class,Error]),
- StateName, D1),
- {stop_and_reply, Shutdown, Repls, D}
- end.
-
-
set_kex_overload_prefix(Msg = <<?BYTE(Op),_/binary>>, #data{ssh_params=SshParams})
when Op == 30;
Op == 31
@@ -1891,19 +1868,6 @@ handle_request(ChannelId, Type, Data, WantReply, From, D) ->
end.
%%%----------------------------------------------------------------
-handle_channel_down(ChannelPid, D) ->
- Cache = cache(D),
- ssh_client_channel:cache_foldl(
- fun(#channel{user=U,
- local_id=Id}, Acc) when U == ChannelPid ->
- ssh_client_channel:cache_delete(Cache, Id),
- Acc;
- (_,Acc) ->
- Acc
- end, [], Cache),
- cache_check_set_idle_timer(D).
-
-
update_sys(Cache, Channel, Type, ChannelPid) ->
ssh_client_channel:cache_update(Cache,
Channel#channel{sys = Type, user = ChannelPid}).
@@ -1922,6 +1886,42 @@ new_channel_id(#data{connection_state = #connection{channel_id_seed = Id} =
{Id, State#data{connection_state =
Connection#connection{channel_id_seed = Id + 1}}}.
+
+%%%----------------------------------------------------------------
+start_rekeying(Role, D0) ->
+ {KeyInitMsg, SshPacket, Ssh} = ssh_transport:key_exchange_init_msg(D0#data.ssh_params),
+ send_bytes(SshPacket, D0),
+ D = D0#data{ssh_params = Ssh,
+ key_exchange_init_msg = KeyInitMsg},
+ {next_state, {kexinit,Role,renegotiate}, D}.
+
+
+init_renegotiate_timers(State, D) ->
+ {RekeyTimeout,_MaxSent} = ?GET_OPT(rekey_limit, (D#data.ssh_params)#ssh.opts),
+ {next_state, State, D, [{{timeout,renegotiate}, RekeyTimeout, none},
+ {{timeout,check_data_size}, ?REKEY_DATA_TIMOUT, none} ]}.
+
+
+pause_renegotiate_timers(State, D) ->
+ {next_state, State, D, [{{timeout,renegotiate}, infinity, none},
+ {{timeout,check_data_size}, infinity, none} ]}.
+
+check_data_rekeying(Role, D) ->
+ {ok, [{send_oct,SocketSentTotal}]} = inet:getstat(D#data.socket, [send_oct]),
+ SentSinceRekey = SocketSentTotal - D#data.last_size_rekey,
+ {_RekeyTimeout,MaxSent} = ?GET_OPT(rekey_limit, (D#data.ssh_params)#ssh.opts),
+ case check_data_rekeying_dbg(SentSinceRekey, MaxSent) of
+ true ->
+ start_rekeying(Role, D#data{last_size_rekey = SocketSentTotal});
+ _ ->
+ %% Not enough data sent for a re-negotiation. Restart timer.
+ {keep_state, D, {{timeout,check_data_size}, ?REKEY_DATA_TIMOUT, none}}
+ end.
+
+check_data_rekeying_dbg(SentSinceRekey, MaxSent) ->
+ %% This function is for the ssh_dbg to trace on. See dbg_trace/3 at the end.
+ SentSinceRekey >= MaxSent.
+
%%%----------------------------------------------------------------
%%% This server/client has decided to disconnect via the state machine:
%%% The unused arguments are for debugging.
@@ -2134,60 +2134,12 @@ retry_fun(User, Reason, #data{ssh_params = #ssh{opts = Opts,
%%% Cache idle timer that closes the connection if there are no
%%% channels open for a while.
-cache_init_idle_timer(D) ->
- case ?GET_OPT(idle_time, (D#data.ssh_params)#ssh.opts) of
- infinity ->
- D#data{idle_timer_value = infinity,
- idle_timer_ref = infinity % A flag used later...
- };
- IdleTime ->
- %% We dont want to set the timeout on first connect
- D#data{idle_timer_value = IdleTime}
- end.
-
-
-cache_check_set_idle_timer(D = #data{idle_timer_ref = undefined,
- idle_timer_value = IdleTime}) ->
- %% No timer set - shall we set one?
+cond_set_idle_timer(D) ->
case ssh_client_channel:cache_info(num_entries, cache(D)) of
- 0 when IdleTime == infinity ->
- %% No. Meaningless to set a timer that fires in an infinite time...
- D;
- 0 ->
- %% Yes, we'll set one since the cache is empty and it should not
- %% be that for a specified time
- D#data{idle_timer_ref =
- erlang:send_after(IdleTime, self(), {'EXIT',[],"Timeout"})};
- _ ->
- %% No - there are entries in the cache
- D
- end;
-cache_check_set_idle_timer(D) ->
- %% There is already a timer set or the timeout time is infinite
- D.
-
-
-cache_cancel_idle_timer(D) ->
- case D#data.idle_timer_ref of
- infinity ->
- %% The timer is not activated
- D;
- undefined ->
- %% The timer is already cancelled
- D;
- TimerRef ->
- %% The timer is active
- erlang:cancel_timer(TimerRef),
- D#data{idle_timer_ref = undefined}
+ 0 -> {{timeout,idle_time}, ?GET_OPT(idle_time, (D#data.ssh_params)#ssh.opts), none};
+ _ -> {{timeout,idle_time}, infinity, none}
end.
-
-cache_request_idle_timer_check(D = #data{idle_timer_value = infinity}) ->
- D;
-cache_request_idle_timer_check(D = #data{idle_timer_value = IdleTime}) ->
- erlang:send_after(IdleTime, self(), check_cache),
- D.
-
%%%----------------------------------------------------------------
start_channel_request_timer(_,_, infinity) ->
ok;
@@ -2248,7 +2200,7 @@ update_inet_buffers(Socket) ->
%%%# Tracing
%%%#
-dbg_trace(points, _, _) -> [terminate, disconnect, connections, connection_events];
+dbg_trace(points, _, _) -> [terminate, disconnect, connections, connection_events, renegotiation];
dbg_trace(flags, connections, A) -> [c] ++ dbg_trace(flags, terminate, A);
dbg_trace(on, connections, A) -> dbg:tp(?MODULE, init_connection_handler, 3, x),
@@ -2291,6 +2243,33 @@ dbg_trace(format, connection_events, {return_from, {?MODULE,handle_event,4}, Ret
io_lib:format("~p~n", [event_handler_result(Ret)])
];
+dbg_trace(flags, renegotiation, _) -> [c];
+dbg_trace(on, renegotiation, _) -> dbg:tpl(?MODULE, init_renegotiate_timers, 2, x),
+ dbg:tpl(?MODULE, pause_renegotiate_timers, 2, x),
+ dbg:tpl(?MODULE, check_data_rekeying_dbg, 2, x),
+ dbg:tpl(?MODULE, start_rekeying, 2, x);
+dbg_trace(off, renegotiation, _) -> dbg:ctpl(?MODULE, init_renegotiate_timers, 2),
+ dbg:ctpl(?MODULE, pause_renegotiate_timers, 2),
+ dbg:ctpl(?MODULE, check_data_rekeying_dbg, 2),
+ dbg:ctpl(?MODULE, start_rekeying, 2);
+dbg_trace(format, renegotiation, {call, {?MODULE,init_renegotiate_timers,[_State,D]}}) ->
+ ["Renegotiation init\n",
+ io_lib:format("rekey_limit: ~p ({ms,bytes})~ncheck_data_size: ~p (ms)~n",
+ [?GET_OPT(rekey_limit, (D#data.ssh_params)#ssh.opts),
+ ?REKEY_DATA_TIMOUT])
+ ];
+dbg_trace(format, renegotiation, {call, {?MODULE,pause_renegotiate_timers,[_State,_D]}}) ->
+ ["Renegotiation pause\n"];
+dbg_trace(format, renegotiation, {call, {?MODULE,start_rekeying,[_Role,_D]}}) ->
+ ["Renegotiation start rekeying\n"];
+dbg_trace(format, renegotiation, {call, {?MODULE,check_data_rekeying_dbg,[SentSinceRekey, MaxSent]}}) ->
+ ["Renegotiation check data sent\n",
+ io_lib:format("TotalSentSinceRekey: ~p~nMaxBeforeRekey: ~p~nStartRekey: ~p~n",
+ [SentSinceRekey, MaxSent, SentSinceRekey >= MaxSent])
+ ];
+
+
+
dbg_trace(flags, terminate, _) -> [c];
dbg_trace(on, terminate, _) -> dbg:tp(?MODULE, terminate, 3, x);
dbg_trace(off, terminate, _) -> dbg:ctpg(?MODULE, terminate, 3);
diff --git a/lib/ssh/src/ssh_options.erl b/lib/ssh/src/ssh_options.erl
index 73287e464a..fe95d2ac54 100644
--- a/lib/ssh/src/ssh_options.erl
+++ b/lib/ssh/src/ssh_options.erl
@@ -601,14 +601,19 @@ default(common) ->
{rekey_limit, def} =>
#{default => {3600000, 1024000000}, % {1 hour, 1 GB}
- chk => fun({TimeMins, SizBytes}) when is_integer(TimeMins) andalso TimeMins>=0,
- is_integer(SizBytes) andalso SizBytes>=0 ->
- %% New (>= 21) format
- {true, {TimeMins * 60*1000, % To ms
- SizBytes}};
- (SizBytes) when is_integer(SizBytes) andalso SizBytes>=0 ->
- %% Old (< 21) format
- {true, {3600000, SizBytes}};
+ chk => fun({infinity, infinity}) ->
+ true;
+ ({Mins, infinity}) when is_integer(Mins), Mins>0 ->
+ {true, {Mins*60*1000, infinity}};
+ ({infinity, Bytes}) when is_integer(Bytes), Bytes>=0 ->
+ true;
+ ({Mins, Bytes}) when is_integer(Mins), Mins>0,
+ is_integer(Bytes), Bytes>=0 ->
+ {true, {Mins*60*1000, Bytes}};
+ (infinity) ->
+ {true, {3600000, infinity}};
+ (Bytes) when is_integer(Bytes), Bytes>=0 ->
+ {true, {3600000, Bytes}};
(_) ->
false
end,
diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl
index 5984713ec9..9c391abc43 100644
--- a/lib/ssh/src/ssh_sftp.erl
+++ b/lib/ssh/src/ssh_sftp.erl
@@ -171,21 +171,16 @@ start_channel(Host, Port, UserOptions) ->
stop_channel(Pid) ->
case is_process_alive(Pid) of
true ->
- OldValue = process_flag(trap_exit, true),
- link(Pid),
- exit(Pid, ssh_sftp_stop_channel),
- receive
- {'EXIT', Pid, normal} ->
- ok
- after 5000 ->
- exit(Pid, kill),
- receive
- {'EXIT', Pid, killed} ->
- ok
- end
- end,
- process_flag(trap_exit, OldValue),
- ok;
+ MonRef = erlang:monitor(process, Pid),
+ unlink(Pid),
+ exit(Pid, ssh_sftp_stop_channel),
+ receive {'DOWN',MonRef,_,_,_} -> ok
+ after
+ 1000 ->
+ exit(Pid, kill),
+ erlang:demonitor(MonRef, [flush]),
+ ok
+ end;
false ->
ok
end.
diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl
index 603ac71d4b..807e23ff01 100644
--- a/lib/ssh/test/ssh_basic_SUITE.erl
+++ b/lib/ssh/test/ssh_basic_SUITE.erl
@@ -32,7 +32,7 @@
-define(NEWLINE, <<"\r\n">>).
--define(REKEY_DATA_TMO, 65000).
+-define(REKEY_DATA_TMO, 1 * 60000). % Should be multiples of 60000
%%--------------------------------------------------------------------
%% Common Test interface functions -----------------------------------
@@ -45,7 +45,6 @@ suite() ->
all() ->
[{group, all_tests}].
-
groups() ->
[{all_tests, [parallel], [{group, ssh_renegotiate_SUITE},
{group, ssh_basic_SUITE}
@@ -76,7 +75,11 @@ groups() ->
shell_exit_status
]},
- {ssh_renegotiate_SUITE, [parallel], [rekey,
+ {ssh_renegotiate_SUITE, [parallel], [rekey0,
+ rekey1,
+ rekey2,
+ rekey3,
+ rekey4,
rekey_limit_client,
rekey_limit_daemon,
rekey_time_limit_client,
@@ -1330,28 +1333,36 @@ shell_exit_status(Config) when is_list(Config) ->
ssh:stop_daemon(Pid).
+%%----------------------------------------------------------------------------
%%% Idle timeout test
-rekey() -> [{timetrap,{seconds,90}}].
+rekey0() -> [{timetrap,{seconds,90}}].
+rekey1() -> [{timetrap,{seconds,90}}].
+rekey2() -> [{timetrap,{seconds,90}}].
+rekey3() -> [{timetrap,{seconds,90}}].
+rekey4() -> [{timetrap,{seconds,90}}].
-rekey(Config) ->
- {Pid, Host, Port} =
- ssh_test_lib:std_daemon(Config,
- [{rekey_limit, 0}]),
- ConnectionRef =
- ssh_test_lib:std_connect(Config, Host, Port,
- [{rekey_limit, 0}]),
+rekey0(Config) -> rekey_chk(Config, 0, 0).
+rekey1(Config) -> rekey_chk(Config, infinity, 0).
+rekey2(Config) -> rekey_chk(Config, {infinity,infinity}, 0).
+rekey3(Config) -> rekey_chk(Config, 0, infinity).
+rekey4(Config) -> rekey_chk(Config, 0, {infinity,infinity}).
+
+rekey_chk(Config, RLdaemon, RLclient) ->
+ {Pid, Host, Port} = ssh_test_lib:std_daemon(Config, [{rekey_limit, RLdaemon}]),
+ ConnectionRef = ssh_test_lib:std_connect(Config, Host, Port, [{rekey_limit, RLclient}]),
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
- receive
- after ?REKEY_DATA_TMO ->
- %%By this time rekeying would have been done
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
- false = (Kex2 == Kex1),
- ssh:close(ConnectionRef),
- ssh:stop_daemon(Pid)
- end.
-%%--------------------------------------------------------------------
+ %% Make both sides send something:
+ {ok, SftpPid} = ssh_sftp:start_channel(ConnectionRef),
+
+ %% Check rekeying
+ timer:sleep(?REKEY_DATA_TMO),
+ ?wait_match(false, Kex1==ssh_test_lib:get_kex_init(ConnectionRef), [], 2000, 10),
+ ssh:close(ConnectionRef),
+ ssh:stop_daemon(Pid).
+
+%%--------------------------------------------------------------------
%%% Test rekeying by data volume
rekey_limit_client() -> [{timetrap,{seconds,400}}].
@@ -1359,7 +1370,7 @@ rekey_limit_client(Config) ->
Limit = 6000,
UserDir = proplists:get_value(priv_dir, Config),
DataFile = filename:join(UserDir, "rekey.data"),
-
+ Data = lists:duplicate(Limit+10,1),
Algs = proplists:get_value(preferred_algorithms, Config),
{Pid, Host, Port} = ssh_test_lib:std_daemon(Config,[{max_random_length_padding,0},
{preferred_algorithms,Algs}]),
@@ -1368,31 +1379,33 @@ rekey_limit_client(Config) ->
{max_random_length_padding,0}]),
{ok, SftpPid} = ssh_sftp:start_channel(ConnectionRef),
+ %% Check that it doesn't rekey without data transfer
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
-
timer:sleep(?REKEY_DATA_TMO),
- Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex1 == ssh_test_lib:get_kex_init(ConnectionRef)),
- Data = lists:duplicate(Limit+10,1),
+ %% Check that datatransfer triggers rekeying
ok = ssh_sftp:write_file(SftpPid, DataFile, Data),
-
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ ?wait_match(false, Kex1==(Kex2=ssh_test_lib:get_kex_init(ConnectionRef)), Kex2, 2000, 10),
- false = (Kex2 == Kex1),
+ %% Check that datatransfer continues to trigger rekeying
+ ok = ssh_sftp:write_file(SftpPid, DataFile, Data),
+ timer:sleep(?REKEY_DATA_TMO),
+ ?wait_match(false, Kex2==(Kex3=ssh_test_lib:get_kex_init(ConnectionRef)), Kex3, 2000, 10),
+ %% Check that it doesn't rekey without data transfer
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex3 == ssh_test_lib:get_kex_init(ConnectionRef)),
+ %% Check that it doesn't rekey on a small datatransfer
ok = ssh_sftp:write_file(SftpPid, DataFile, "hi\n"),
-
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
-
- false = (Kex2 == Kex1),
+ true = (Kex3 == ssh_test_lib:get_kex_init(ConnectionRef)),
+ %% Check that it doesn't rekey without data transfer
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex3 == ssh_test_lib:get_kex_init(ConnectionRef)),
ssh_sftp:stop_channel(SftpPid),
ssh:close(ConnectionRef),
@@ -1416,32 +1429,40 @@ rekey_limit_daemon(Config) ->
ConnectionRef = ssh_test_lib:std_connect(Config, Host, Port, [{max_random_length_padding,0}]),
{ok, SftpPid} = ssh_sftp:start_channel(ConnectionRef),
+ %% Check that it doesn't rekey without data transfer
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
timer:sleep(?REKEY_DATA_TMO),
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
+ %% Check that datatransfer triggers rekeying
{ok,_} = ssh_sftp:read_file(SftpPid, DataFile1),
+ timer:sleep(?REKEY_DATA_TMO),
+ ?wait_match(false, Kex1==(Kex2=ssh_test_lib:get_kex_init(ConnectionRef)), Kex2, 2000, 10),
+ %% Check that datatransfer continues to trigger rekeying
+ {ok,_} = ssh_sftp:read_file(SftpPid, DataFile1),
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
- false = (Kex2 == Kex1),
+ ?wait_match(false, Kex2==(Kex3=ssh_test_lib:get_kex_init(ConnectionRef)), Kex3, 2000, 10),
+ %% Check that it doesn't rekey without data transfer
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex3 == ssh_test_lib:get_kex_init(ConnectionRef)),
+ %% Check that it doesn't rekey on a small datatransfer
{ok,_} = ssh_sftp:read_file(SftpPid, DataFile2),
-
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex3 == ssh_test_lib:get_kex_init(ConnectionRef)),
+ %% Check that it doesn't rekey without data transfer
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex3 == ssh_test_lib:get_kex_init(ConnectionRef)),
ssh_sftp:stop_channel(SftpPid),
ssh:close(ConnectionRef),
ssh:stop_daemon(Pid).
+%%--------------------------------------------------------------------
%% Check that datatransfer in the other direction does not trigger re-keying
norekey_limit_client() -> [{timetrap,{seconds,400}}].
norekey_limit_client(Config) ->
@@ -1460,13 +1481,12 @@ norekey_limit_client(Config) ->
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
timer:sleep(?REKEY_DATA_TMO),
- Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex1 == ssh_test_lib:get_kex_init(ConnectionRef)),
{ok,_} = ssh_sftp:read_file(SftpPid, DataFile),
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex1 == ssh_test_lib:get_kex_init(ConnectionRef)),
- Kex1 = Kex2,
ssh_sftp:stop_channel(SftpPid),
ssh:close(ConnectionRef),
ssh:stop_daemon(Pid).
@@ -1488,13 +1508,12 @@ norekey_limit_daemon(Config) ->
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
timer:sleep(?REKEY_DATA_TMO),
- Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex1 == ssh_test_lib:get_kex_init(ConnectionRef)),
ok = ssh_sftp:write_file(SftpPid, DataFile, lists:duplicate(Limit+10,1)),
timer:sleep(?REKEY_DATA_TMO),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex1 == ssh_test_lib:get_kex_init(ConnectionRef)),
- Kex1 = Kex2,
ssh_sftp:stop_channel(SftpPid),
ssh:close(ConnectionRef),
ssh:stop_daemon(Pid).
@@ -1504,39 +1523,41 @@ norekey_limit_daemon(Config) ->
rekey_time_limit_client() -> [{timetrap,{seconds,400}}].
rekey_time_limit_client(Config) ->
- Minutes = 1,
+ Minutes = ?REKEY_DATA_TMO div 60000,
GB = 1024*1000*1000,
Algs = proplists:get_value(preferred_algorithms, Config),
{Pid, Host, Port} = ssh_test_lib:std_daemon(Config,[{max_random_length_padding,0},
{preferred_algorithms,Algs}]),
ConnectionRef = ssh_test_lib:std_connect(Config, Host, Port, [{rekey_limit, {Minutes, GB}},
{max_random_length_padding,0}]),
- {ok, SftpPid} = ssh_sftp:start_channel(ConnectionRef),
- rekey_time_limit(Pid, Minutes, ConnectionRef, SftpPid).
+ rekey_time_limit(Pid, ConnectionRef).
rekey_time_limit_daemon() -> [{timetrap,{seconds,400}}].
rekey_time_limit_daemon(Config) ->
- Minutes = 1,
+ Minutes = ?REKEY_DATA_TMO div 60000,
GB = 1024*1000*1000,
Algs = proplists:get_value(preferred_algorithms, Config),
{Pid, Host, Port} = ssh_test_lib:std_daemon(Config,[{rekey_limit, {Minutes, GB}},
{max_random_length_padding,0},
{preferred_algorithms,Algs}]),
ConnectionRef = ssh_test_lib:std_connect(Config, Host, Port, [{max_random_length_padding,0}]),
- {ok, SftpPid} = ssh_sftp:start_channel(ConnectionRef),
- rekey_time_limit(Pid, Minutes, ConnectionRef, SftpPid).
+ rekey_time_limit(Pid, ConnectionRef).
-rekey_time_limit(Pid, Minutes, ConnectionRef, SftpPid) ->
+rekey_time_limit(Pid, ConnectionRef) ->
+ {ok, SftpPid} = ssh_sftp:start_channel(ConnectionRef),
Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
timer:sleep(5000),
- Kex1 = ssh_test_lib:get_kex_init(ConnectionRef),
+ true = (Kex1 == ssh_test_lib:get_kex_init(ConnectionRef)),
- timer:sleep((Minutes*60 + 30) * 1000),
- Kex2 = ssh_test_lib:get_kex_init(ConnectionRef),
+ %% Check that it rekeys when the max time + 30s has passed
+ timer:sleep(?REKEY_DATA_TMO + 30*1000),
+ ?wait_match(false, Kex1==(Kex2=ssh_test_lib:get_kex_init(ConnectionRef)), Kex2, 2000, 10),
- false = (Kex2 == Kex1),
+ %% Check that it does not rekey when nothing is transferred
+ timer:sleep(?REKEY_DATA_TMO + 30*1000),
+ ?wait_match(false, Kex2==ssh_test_lib:get_kex_init(ConnectionRef), [], 2000, 10),
ssh_sftp:stop_channel(SftpPid),
ssh:close(ConnectionRef),
@@ -1544,7 +1565,7 @@ rekey_time_limit(Pid, Minutes, ConnectionRef, SftpPid) ->
%%--------------------------------------------------------------------
-%%% Test rekeying with simulataneous send request
+%%% Test rekeying with simultaneous send request
renegotiate1(Config) ->
UserDir = proplists:get_value(priv_dir, Config),
diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl
index 57ae2dbac2..65970535f4 100644
--- a/lib/ssh/test/ssh_test_lib.erl
+++ b/lib/ssh/test/ssh_test_lib.erl
@@ -926,7 +926,7 @@ get_kex_init(Conn, Ref, TRef) ->
end;
false ->
- ct:log("Not in 'connected' state: ~p",[State]),
+ ct:log("~p:~p Not in 'connected' state: ~p",[?MODULE,?LINE,State]),
receive
{reneg_timeout,Ref} ->
ct:log("S = ~p", [S]),
diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile
index 11b3e65912..c389aa8cfe 100644
--- a/lib/ssl/src/Makefile
+++ b/lib/ssl/src/Makefile
@@ -54,8 +54,8 @@ MODULES= \
ssl_connection_sup \
ssl_listen_tracker_sup\
dtls_connection_sup \
- dtls_udp_listener\
- dtls_udp_sup \
+ dtls_packet_demux \
+ dtls_listener_sup \
ssl_dist_sup\
ssl_dist_admin_sup\
ssl_dist_connection_sup\
diff --git a/lib/ssl/src/dtls_connection.erl b/lib/ssl/src/dtls_connection.erl
index 0fe568759d..4e3f65d9c6 100644
--- a/lib/ssl/src/dtls_connection.erl
+++ b/lib/ssl/src/dtls_connection.erl
@@ -137,9 +137,8 @@ next_record(#state{protocol_buffers =
Buffers#protocol_buffers{dtls_cipher_texts = Rest},
connection_states = ConnectionStates});
next_record(#state{role = server,
- socket = {Listener, {Client, _}},
- transport_cb = gen_udp} = State) ->
- dtls_udp_listener:active_once(Listener, Client, self()),
+ socket = {Listener, {Client, _}}} = State) ->
+ dtls_packet_demux:active_once(Listener, Client, self()),
{no_record, State};
next_record(#state{role = client,
socket = {_Server, Socket} = DTLSSocket,
@@ -448,7 +447,7 @@ init({call, From}, {start, Timeout},
},
{Record, State} = next_record(State3),
next_event(hello, Record, State, Actions);
-init({call, _} = Type, Event, #state{role = server, transport_cb = gen_udp} = State) ->
+init({call, _} = Type, Event, #state{role = server, data_tag = udp} = State) ->
Result = gen_handshake(?FUNCTION_NAME, Type, Event,
State#state{flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT},
protocol_specific = #{current_cookie_secret => dtls_v1:cookie_secret(),
@@ -922,7 +921,7 @@ handle_alerts([Alert | Alerts], {next_state, StateName, State}) ->
handle_alerts([Alert | Alerts], {next_state, StateName, State, _Actions}) ->
handle_alerts(Alerts, ssl_connection:handle_alert(Alert, StateName, State)).
-handle_own_alert(Alert, Version, StateName, #state{transport_cb = gen_udp,
+handle_own_alert(Alert, Version, StateName, #state{data_tag = udp,
role = Role,
ssl_options = Options} = State0) ->
case ignore_alert(Alert, State0) of
@@ -1013,10 +1012,10 @@ next_flight(Flight) ->
change_cipher_spec => undefined,
handshakes_after_change_cipher_spec => []}.
-handle_flight_timer(#state{transport_cb = gen_udp,
+handle_flight_timer(#state{data_tag = udp,
flight_state = {retransmit, Timeout}} = State) ->
start_retransmision_timer(Timeout, State);
-handle_flight_timer(#state{transport_cb = gen_udp,
+handle_flight_timer(#state{data_tag = udp,
flight_state = connection} = State) ->
{State, []};
handle_flight_timer(State) ->
diff --git a/lib/ssl/src/dtls_udp_sup.erl b/lib/ssl/src/dtls_listener_sup.erl
index 197882e92f..6939f1ef3b 100644
--- a/lib/ssl/src/dtls_udp_sup.erl
+++ b/lib/ssl/src/dtls_listener_sup.erl
@@ -23,7 +23,7 @@
%% Purpose: Supervisor for a procsses dispatching upd datagrams to
%% correct DTLS handler
%%----------------------------------------------------------------------
--module(dtls_udp_sup).
+-module(dtls_listener_sup).
-behaviour(supervisor).
@@ -52,10 +52,10 @@ init(_O) ->
MaxT = 3600,
Name = undefined, % As simple_one_for_one is used.
- StartFunc = {dtls_udp_listener, start_link, []},
+ StartFunc = {dtls_packet_demux, start_link, []},
Restart = temporary, % E.g. should not be restarted
Shutdown = 4000,
- Modules = [dtls_udp_listener],
+ Modules = [dtls_packet_demux],
Type = worker,
ChildSpec = {Name, StartFunc, Restart, Shutdown, Type, Modules},
diff --git a/lib/ssl/src/dtls_udp_listener.erl b/lib/ssl/src/dtls_packet_demux.erl
index 0608c6bd2b..1672626165 100644
--- a/lib/ssl/src/dtls_udp_listener.erl
+++ b/lib/ssl/src/dtls_packet_demux.erl
@@ -19,15 +19,15 @@
%%
--module(dtls_udp_listener).
+-module(dtls_packet_demux).
-behaviour(gen_server).
-include("ssl_internal.hrl").
%% API
--export([start_link/4, active_once/3, accept/2, sockname/1, close/1,
- get_all_opts/1, get_sock_opts/2, set_sock_opts/2]).
+-export([start_link/5, active_once/3, accept/2, sockname/1, close/1,
+ get_all_opts/1, get_sock_opts/2, set_sock_opts/2]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -36,6 +36,7 @@
-record(state,
{port,
listener,
+ transport,
dtls_options,
emulated_options,
dtls_msq_queues = kv_new(),
@@ -50,35 +51,36 @@
%%% API
%%%===================================================================
-start_link(Port, EmOpts, InetOptions, DTLSOptions) ->
- gen_server:start_link(?MODULE, [Port, EmOpts, InetOptions, DTLSOptions], []).
+start_link(Port, TransportInfo, EmOpts, InetOptions, DTLSOptions) ->
+ gen_server:start_link(?MODULE, [Port, TransportInfo, EmOpts, InetOptions, DTLSOptions], []).
-active_once(UDPConnection, Client, Pid) ->
- gen_server:cast(UDPConnection, {active_once, Client, Pid}).
+active_once(PacketSocket, Client, Pid) ->
+ gen_server:cast(PacketSocket, {active_once, Client, Pid}).
-accept(UDPConnection, Accepter) ->
- call(UDPConnection, {accept, Accepter}).
+accept(PacketSocket, Accepter) ->
+ call(PacketSocket, {accept, Accepter}).
-sockname(UDPConnection) ->
- call(UDPConnection, sockname).
-close(UDPConnection) ->
- call(UDPConnection, close).
-get_sock_opts(UDPConnection, SplitSockOpts) ->
- call(UDPConnection, {get_sock_opts, SplitSockOpts}).
-get_all_opts(UDPConnection) ->
- call(UDPConnection, get_all_opts).
-set_sock_opts(UDPConnection, Opts) ->
- call(UDPConnection, {set_sock_opts, Opts}).
+sockname(PacketSocket) ->
+ call(PacketSocket, sockname).
+close(PacketSocket) ->
+ call(PacketSocket, close).
+get_sock_opts(PacketSocket, SplitSockOpts) ->
+ call(PacketSocket, {get_sock_opts, SplitSockOpts}).
+get_all_opts(PacketSocket) ->
+ call(PacketSocket, get_all_opts).
+set_sock_opts(PacketSocket, Opts) ->
+ call(PacketSocket, {set_sock_opts, Opts}).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
-init([Port, EmOpts, InetOptions, DTLSOptions]) ->
+init([Port, {TransportModule, _,_,_} = TransportInfo, EmOpts, InetOptions, DTLSOptions]) ->
try
- {ok, Socket} = gen_udp:open(Port, InetOptions),
+ {ok, Socket} = TransportModule:open(Port, InetOptions),
{ok, #state{port = Port,
first = true,
+ transport = TransportInfo,
dtls_options = DTLSOptions,
emulated_options = EmOpts,
listener = Socket,
@@ -134,20 +136,20 @@ handle_cast({active_once, Client, Pid}, State0) ->
State = handle_active_once(Client, Pid, State0),
{noreply, State}.
-handle_info({udp, Socket, IP, InPortNo, _} = Msg, #state{listener = Socket} = State0) ->
+handle_info({Transport, Socket, IP, InPortNo, _} = Msg, #state{listener = Socket, transport = {_,Transport,_,_}} = State0) ->
State = handle_datagram({IP, InPortNo}, Msg, State0),
next_datagram(Socket),
{noreply, State};
%% UDP socket does not have a connection and should not receive an econnreset
-%% This does however happens on on some windows versions. Just ignoring it
+%% This does however happens on some windows versions. Just ignoring it
%% appears to make things work as expected!
-handle_info({udp_error, Socket, econnreset = Error}, #state{listener = Socket} = State) ->
+handle_info({Error, Socket, econnreset = Error}, #state{listener = Socket, transport = {_,_,_, udp_error}} = State) ->
Report = io_lib:format("Ignore SSL UDP Listener: Socket error: ~p ~n", [Error]),
error_logger:info_report(Report),
{noreply, State};
-handle_info({udp_error, Socket, Error}, #state{listener = Socket} = State) ->
- Report = io_lib:format("SSL UDP Listener shutdown: Socket error: ~p ~n", [Error]),
+handle_info({Error, Socket, Error}, #state{listener = Socket, transport = {_,_,_, Error}} = State) ->
+ Report = io_lib:format("SSL Packet muliplxer shutdown: Socket error: ~p ~n", [Error]),
error_logger:info_report(Report),
{noreply, State#state{close=true}};
@@ -231,7 +233,7 @@ setup_new_connection(User, From, Client, Msg, #state{dtls_processes = Processes,
listener = Socket,
emulated_options = EmOpts} = State) ->
ConnArgs = [server, "localhost", Port, {self(), {Client, Socket}},
- {DTLSOpts, EmOpts, udp_listener}, User, dtls_socket:default_cb_info()],
+ {DTLSOpts, EmOpts, dtls_listener}, User, dtls_socket:default_cb_info()],
case dtls_connection_sup:start_child(ConnArgs) of
{ok, Pid} ->
erlang:monitor(process, Pid),
diff --git a/lib/ssl/src/dtls_socket.erl b/lib/ssl/src/dtls_socket.erl
index 0e4ab089dc..8dd62bc352 100644
--- a/lib/ssl/src/dtls_socket.erl
+++ b/lib/ssl/src/dtls_socket.erl
@@ -22,31 +22,31 @@
-include("ssl_internal.hrl").
-include("ssl_api.hrl").
--export([send/3, listen/3, accept/3, connect/4, socket/4, setopts/3, getopts/3, getstat/3,
+-export([send/3, listen/2, accept/3, connect/4, socket/4, setopts/3, getopts/3, getstat/3,
peername/2, sockname/2, port/2, close/2]).
-export([emulated_options/0, emulated_options/1, internal_inet_values/0, default_inet_values/0, default_cb_info/0]).
send(Transport, {{IP,Port},Socket}, Data) ->
Transport:send(Socket, IP, Port, Data).
-listen(gen_udp = Transport, Port, #config{transport_info = {Transport, _, _, _},
- ssl = SslOpts,
- emulated = EmOpts,
- inet_user = Options} = Config) ->
+listen(Port, #config{transport_info = TransportInfo,
+ ssl = SslOpts,
+ emulated = EmOpts,
+ inet_user = Options} = Config) ->
- case dtls_udp_sup:start_child([Port, emulated_socket_options(EmOpts, #socket_options{}),
+ case dtls_listener_sup:start_child([Port, TransportInfo, emulated_socket_options(EmOpts, #socket_options{}),
Options ++ internal_inet_values(), SslOpts]) of
{ok, Pid} ->
- {ok, #sslsocket{pid = {udp, Config#config{udp_handler = {Pid, Port}}}}};
+ {ok, #sslsocket{pid = {dtls, Config#config{dtls_handler = {Pid, Port}}}}};
Err = {error, _} ->
Err
end.
-accept(udp, #config{transport_info = {Transport = gen_udp,_,_,_},
+accept(dtls, #config{transport_info = {Transport,_,_,_},
connection_cb = ConnectionCb,
- udp_handler = {Listner, _}}, _Timeout) ->
- case dtls_udp_listener:accept(Listner, self()) of
+ dtls_handler = {Listner, _}}, _Timeout) ->
+ case dtls_packet_demux:accept(Listner, self()) of
{ok, Pid, Socket} ->
{ok, socket(Pid, Transport, {Listner, Socket}, ConnectionCb)};
{error, Reason} ->
@@ -69,7 +69,9 @@ connect(Address, Port, #config{transport_info = {Transport, _, _, _} = CbInfo,
end.
close(gen_udp, {_Client, _Socket}) ->
- ok.
+ ok;
+close(Transport, {_Client, Socket}) ->
+ Transport:close(Socket).
socket(Pid, gen_udp = Transport, {{_, _}, Socket}, ConnectionCb) ->
#sslsocket{pid = Pid,
@@ -79,18 +81,18 @@ socket(Pid, Transport, Socket, ConnectionCb) ->
#sslsocket{pid = Pid,
%% "The name "fd" is keept for backwards compatibility
fd = {Transport, Socket, ConnectionCb}}.
-setopts(_, #sslsocket{pid = {udp, #config{udp_handler = {ListenPid, _}}}}, Options) ->
+setopts(_, #sslsocket{pid = {dtls, #config{dtls_handler = {ListenPid, _}}}}, Options) ->
SplitOpts = tls_socket:split_options(Options),
- dtls_udp_listener:set_sock_opts(ListenPid, SplitOpts);
+ dtls_packet_demux:set_sock_opts(ListenPid, SplitOpts);
%%% Following clauses will not be called for emulated options, they are handled in the connection process
setopts(gen_udp, Socket, Options) ->
inet:setopts(Socket, Options);
setopts(Transport, Socket, Options) ->
Transport:setopts(Socket, Options).
-getopts(_, #sslsocket{pid = {udp, #config{udp_handler = {ListenPid, _}}}}, Options) ->
+getopts(_, #sslsocket{pid = {dtls, #config{dtls_handler = {ListenPid, _}}}}, Options) ->
SplitOpts = tls_socket:split_options(Options),
- dtls_udp_listener:get_sock_opts(ListenPid, SplitOpts);
+ dtls_packet_demux:get_sock_opts(ListenPid, SplitOpts);
getopts(gen_udp, #sslsocket{pid = {Socket, #config{emulated = EmOpts}}}, Options) ->
{SockOptNames, EmulatedOptNames} = tls_socket:split_options(Options),
EmulatedOpts = get_emulated_opts(EmOpts, EmulatedOptNames),
@@ -112,7 +114,7 @@ getstat(gen_udp, {_,Socket}, Options) ->
inet:getstat(Socket, Options);
getstat(Transport, Socket, Options) ->
Transport:getstat(Socket, Options).
-peername(udp, _) ->
+peername(_, undefined) ->
{error, enotconn};
peername(gen_udp, {_, {Client, _Socket}}) ->
{ok, Client};
diff --git a/lib/ssl/src/inet_tls_dist.erl b/lib/ssl/src/inet_tls_dist.erl
index 3e9828a2fe..a6ceff25cb 100644
--- a/lib/ssl/src/inet_tls_dist.erl
+++ b/lib/ssl/src/inet_tls_dist.erl
@@ -518,51 +518,16 @@ gen_setup(Driver, Node, Type, MyNode, LongOrShortNames, SetupTime) ->
do_setup(Driver, Kernel, Node, Type, MyNode, LongOrShortNames, SetupTime) ->
{Name, Address} = split_node(Driver, Node, LongOrShortNames),
- case Driver:getaddr(Address) of
+ ErlEpmd = net_kernel:epmd_module(),
+ {ARMod, ARFun} = get_address_resolver(ErlEpmd, Driver),
+ Timer = trace(dist_util:start_timer(SetupTime)),
+ case ARMod:ARFun(Name,Address,Driver:family()) of
+ {ok, Ip, TcpPort, Version} ->
+ do_setup_connect(Driver, Kernel, Node, Address, Ip, TcpPort, Version, Type, MyNode, Timer);
{ok, Ip} ->
- Timer = trace(dist_util:start_timer(SetupTime)),
- ErlEpmd = net_kernel:epmd_module(),
case ErlEpmd:port_please(Name, Ip) of
{port, TcpPort, Version} ->
- Opts =
- trace(
- connect_options(
- %%
- %% Use verify_server/3 to verify that
- %% the server's certificate is for Node
- %%
- setup_verify_server(
- get_ssl_options(client), Node))),
- dist_util:reset_timer(Timer),
- case ssl:connect(
- Address, TcpPort,
- [binary, {active, false}, {packet, 4},
- Driver:family(), nodelay()] ++ Opts,
- net_kernel:connecttime()) of
- {ok, #sslsocket{pid = DistCtrl} = SslSocket} ->
- _ = monitor_pid(DistCtrl),
- ok = ssl:controlling_process(SslSocket, self()),
- HSData0 = hs_data_common(SslSocket),
- HSData =
- HSData0#hs_data{
- kernel_pid = Kernel,
- other_node = Node,
- this_node = MyNode,
- socket = DistCtrl,
- timer = Timer,
- this_flags = 0,
- other_version = Version,
- request_type = Type},
- link(DistCtrl),
- dist_util:handshake_we_started(trace(HSData));
- Other ->
- %% Other Node may have closed since
- %% port_please !
- ?shutdown2(
- Node,
- trace(
- {ssl_connect_failed, Ip, TcpPort, Other}))
- end;
+ do_setup_connect(Driver, Kernel, Node, Address, Ip, TcpPort, Version, Type, MyNode, Timer);
Other ->
?shutdown2(
Node,
@@ -575,6 +540,47 @@ do_setup(Driver, Kernel, Node, Type, MyNode, LongOrShortNames, SetupTime) ->
trace({getaddr_failed, Driver, Address, Other}))
end.
+do_setup_connect(Driver, Kernel, Node, Address, Ip, TcpPort, Version, Type, MyNode, Timer) ->
+ Opts =
+ trace(
+ connect_options(
+ %%
+ %% Use verify_server/3 to verify that
+ %% the server's certificate is for Node
+ %%
+ setup_verify_server(
+ get_ssl_options(client), Node))),
+ dist_util:reset_timer(Timer),
+ case ssl:connect(
+ Address, TcpPort,
+ [binary, {active, false}, {packet, 4},
+ Driver:family(), nodelay()] ++ Opts,
+ net_kernel:connecttime()) of
+ {ok, #sslsocket{pid = DistCtrl} = SslSocket} ->
+ _ = monitor_pid(DistCtrl),
+ ok = ssl:controlling_process(SslSocket, self()),
+ HSData0 = hs_data_common(SslSocket),
+ HSData =
+ HSData0#hs_data{
+ kernel_pid = Kernel,
+ other_node = Node,
+ this_node = MyNode,
+ socket = DistCtrl,
+ timer = Timer,
+ this_flags = 0,
+ other_version = Version,
+ request_type = Type},
+ link(DistCtrl),
+ dist_util:handshake_we_started(trace(HSData));
+ Other ->
+ %% Other Node may have closed since
+ %% port_please !
+ ?shutdown2(
+ Node,
+ trace(
+ {ssl_connect_failed, Ip, TcpPort, Other}))
+ end.
+
close(Socket) ->
gen_close(inet, Socket).
@@ -644,6 +650,16 @@ verify_server(PeerCert, valid_peer, {CertNodesFun,Node} = S) ->
%% ------------------------------------------------------------
+%% Determine if EPMD module supports address resolving. Default
+%% is to use inet_tcp:getaddr/2.
+%% ------------------------------------------------------------
+get_address_resolver(EpmdModule, Driver) ->
+ case erlang:function_exported(EpmdModule, address_please, 3) of
+ true -> {EpmdModule, address_please};
+ _ -> {Driver, getaddr}
+ end.
+
+%% ------------------------------------------------------------
%% Do only accept new connection attempts from nodes at our
%% own LAN, if the check_ip environment parameter is true.
%% ------------------------------------------------------------
diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src
index 2aecb6836e..da281829cb 100644
--- a/lib/ssl/src/ssl.app.src
+++ b/lib/ssl/src/ssl.app.src
@@ -17,8 +17,8 @@
dtls_socket,
dtls_v1,
dtls_connection_sup,
- dtls_udp_listener,
- dtls_udp_sup,
+ dtls_packet_demux,
+ dtls_listener_sup,
%% API
ssl, %% Main API
tls, %% TLS specific
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index 5b6d92ebf4..a7b4ec2bf7 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -23,9 +23,17 @@
%%% Purpose : Main API module for SSL see also tls.erl and dtls.erl
-module(ssl).
--include("ssl_internal.hrl").
+
-include_lib("public_key/include/public_key.hrl").
+-include("ssl_internal.hrl").
+-include("ssl_api.hrl").
+-include("ssl_internal.hrl").
+-include("ssl_record.hrl").
+-include("ssl_cipher.hrl").
+-include("ssl_handshake.hrl").
+-include("ssl_srp.hrl").
+
%% Application handling
-export([start/0, start/1, stop/0, clear_pem_cache/0]).
@@ -39,8 +47,8 @@
close/1, close/2, shutdown/2, recv/2, recv/3, send/2,
getopts/2, setopts/2, getstat/1, getstat/2
]).
-%% SSL/TLS protocol handling
+%% SSL/TLS protocol handling
-export([cipher_suites/0, cipher_suites/1, cipher_suites/2, filter_cipher_suites/2,
prepend_cipher_suites/2, append_cipher_suites/2,
eccs/0, eccs/1, versions/0,
@@ -49,14 +57,9 @@
%% Misc
-export([handle_options/2, tls_version/1, new_ssl_options/3]).
--include("ssl_api.hrl").
--include("ssl_internal.hrl").
--include("ssl_record.hrl").
--include("ssl_cipher.hrl").
--include("ssl_handshake.hrl").
--include("ssl_srp.hrl").
-
--include_lib("public_key/include/public_key.hrl").
+-deprecated({ssl_accept, 1, eventually}).
+-deprecated({ssl_accept, 2, eventually}).
+-deprecated({ssl_accept, 3, eventually}).
%%--------------------------------------------------------------------
-spec start() -> ok | {error, reason()}.
@@ -231,7 +234,7 @@ handshake(#sslsocket{fd = {_, _, _, Tracker}} = Socket, SslOpts, Timeout) when
handshake(#sslsocket{pid = Pid, fd = {_, _, _}} = Socket, SslOpts, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
try
- {ok, EmOpts, _} = dtls_udp_listener:get_all_opts(Pid),
+ {ok, EmOpts, _} = dtls_packet_demux:get_all_opts(Pid),
ssl_connection:handshake(Socket, {SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
catch
@@ -280,8 +283,8 @@ handshake_cancel(Socket) ->
%%--------------------------------------------------------------------
close(#sslsocket{pid = Pid}) when is_pid(Pid) ->
ssl_connection:close(Pid, {close, ?DEFAULT_TIMEOUT});
-close(#sslsocket{pid = {udp, #config{udp_handler = {Pid, _}}}}) ->
- dtls_udp_listener:close(Pid);
+close(#sslsocket{pid = {dtls, #config{dtls_handler = {Pid, _}}}}) ->
+ dtls_packet_demux:close(Pid);
close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_, _, _}}}}) ->
Transport:close(ListenSocket).
@@ -308,10 +311,10 @@ close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_, _, _}
%%--------------------------------------------------------------------
send(#sslsocket{pid = Pid}, Data) when is_pid(Pid) ->
ssl_connection:send(Pid, Data);
-send(#sslsocket{pid = {_, #config{transport_info={gen_udp, _, _, _}}}}, _) ->
+send(#sslsocket{pid = {_, #config{transport_info={_, udp, _, _}}}}, _) ->
{error,enotconn}; %% Emulate connection behaviour
-send(#sslsocket{pid = {udp,_}}, _) ->
- {error,enotconn};
+send(#sslsocket{pid = {dtls,_}}, _) ->
+ {error,enotconn}; %% Emulate connection behaviour
send(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport, _, _, _}}}}, Data) ->
Transport:send(ListenSocket, Data). %% {error,enotconn}
@@ -326,7 +329,7 @@ recv(Socket, Length) ->
recv(#sslsocket{pid = Pid}, Length, Timeout) when is_pid(Pid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
ssl_connection:recv(Pid, Length, Timeout);
-recv(#sslsocket{pid = {udp,_}}, _, _) ->
+recv(#sslsocket{pid = {dtls,_}}, _, _) ->
{error,enotconn};
recv(#sslsocket{pid = {Listen,
#config{transport_info = {Transport, _, _, _}}}}, _,_) when is_port(Listen)->
@@ -340,7 +343,7 @@ recv(#sslsocket{pid = {Listen,
%%--------------------------------------------------------------------
controlling_process(#sslsocket{pid = Pid}, NewOwner) when is_pid(Pid), is_pid(NewOwner) ->
ssl_connection:new_user(Pid, NewOwner);
-controlling_process(#sslsocket{pid = {udp, _}},
+controlling_process(#sslsocket{pid = {dtls, _}},
NewOwner) when is_pid(NewOwner) ->
ok; %% Meaningless but let it be allowed to conform with TLS
controlling_process(#sslsocket{pid = {Listen,
@@ -365,7 +368,7 @@ connection_information(#sslsocket{pid = Pid}) when is_pid(Pid) ->
end;
connection_information(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn};
-connection_information(#sslsocket{pid = {udp,_}}) ->
+connection_information(#sslsocket{pid = {dtls,_}}) ->
{error,enotconn}.
%%--------------------------------------------------------------------
@@ -391,13 +394,11 @@ peername(#sslsocket{pid = Pid, fd = {Transport, Socket, _}}) when is_pid(Pid)->
dtls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid)->
tls_socket:peername(Transport, Socket);
-peername(#sslsocket{pid = {udp = Transport, #config{udp_handler = {_Pid, _}}}}) ->
- dtls_socket:peername(Transport, undefined);
-peername(#sslsocket{pid = Pid, fd = {gen_udp= Transport, Socket, _, _}}) when is_pid(Pid) ->
- dtls_socket:peername(Transport, Socket);
+peername(#sslsocket{pid = {dtls, #config{dtls_handler = {_Pid, _}}}}) ->
+ dtls_socket:peername(dtls, undefined);
peername(#sslsocket{pid = {ListenSocket, #config{transport_info = {Transport,_,_,_}}}}) ->
tls_socket:peername(Transport, ListenSocket); %% Will return {error, enotconn}
-peername(#sslsocket{pid = {udp,_}}) ->
+peername(#sslsocket{pid = {dtls,_}}) ->
{error,enotconn}.
%%--------------------------------------------------------------------
@@ -412,7 +413,7 @@ peercert(#sslsocket{pid = Pid}) when is_pid(Pid) ->
Result ->
Result
end;
-peercert(#sslsocket{pid = {udp, _}}) ->
+peercert(#sslsocket{pid = {dtls, _}}) ->
{error, enotconn};
peercert(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn}.
@@ -562,7 +563,7 @@ eccs_filter_supported(Curves) ->
%%--------------------------------------------------------------------
getopts(#sslsocket{pid = Pid}, OptionTags) when is_pid(Pid), is_list(OptionTags) ->
ssl_connection:get_opts(Pid, OptionTags);
-getopts(#sslsocket{pid = {udp, #config{transport_info = {Transport,_,_,_}}}} = ListenSocket, OptionTags) when is_list(OptionTags) ->
+getopts(#sslsocket{pid = {dtls, #config{transport_info = {Transport,_,_,_}}}} = ListenSocket, OptionTags) when is_list(OptionTags) ->
try dtls_socket:getopts(Transport, ListenSocket, OptionTags) of
{ok, _} = Result ->
Result;
@@ -600,7 +601,7 @@ setopts(#sslsocket{pid = Pid}, Options0) when is_pid(Pid), is_list(Options0) ->
_:_ ->
{error, {options, {not_a_proplist, Options0}}}
end;
-setopts(#sslsocket{pid = {udp, #config{transport_info = {Transport,_,_,_}}}} = ListenSocket, Options) when is_list(Options) ->
+setopts(#sslsocket{pid = {dtls, #config{transport_info = {Transport,_,_,_}}}} = ListenSocket, Options) when is_list(Options) ->
try dtls_socket:setopts(Transport, ListenSocket, Options) of
ok ->
ok;
@@ -657,7 +658,7 @@ getstat(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}, Options) when is_
shutdown(#sslsocket{pid = {Listen, #config{transport_info = {Transport,_, _, _}}}},
How) when is_port(Listen) ->
Transport:shutdown(Listen, How);
-shutdown(#sslsocket{pid = {udp,_}},_) ->
+shutdown(#sslsocket{pid = {dtls,_}},_) ->
{error, enotconn};
shutdown(#sslsocket{pid = Pid}, How) ->
ssl_connection:shutdown(Pid, How).
@@ -669,8 +670,8 @@ shutdown(#sslsocket{pid = Pid}, How) ->
%%--------------------------------------------------------------------
sockname(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _}}}}) when is_port(Listen) ->
tls_socket:sockname(Transport, Listen);
-sockname(#sslsocket{pid = {udp, #config{udp_handler = {Pid, _}}}}) ->
- dtls_udp_listener:sockname(Pid);
+sockname(#sslsocket{pid = {dtls, #config{dtls_handler = {Pid, _}}}}) ->
+ dtls_packet_demux:sockname(Pid);
sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _}}) when is_pid(Pid) ->
dtls_socket:sockname(Transport, Socket);
sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid) ->
@@ -704,7 +705,7 @@ versions() ->
%%--------------------------------------------------------------------
renegotiate(#sslsocket{pid = Pid}) when is_pid(Pid) ->
ssl_connection:renegotiation(Pid);
-renegotiate(#sslsocket{pid = {udp,_}}) ->
+renegotiate(#sslsocket{pid = {dtls,_}}) ->
{error, enotconn};
renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
{error, enotconn}.
@@ -719,7 +720,7 @@ renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
prf(#sslsocket{pid = Pid},
Secret, Label, Seed, WantedLength) when is_pid(Pid) ->
ssl_connection:prf(Pid, Secret, Label, Seed, WantedLength);
-prf(#sslsocket{pid = {udp,_}}, _,_,_,_) ->
+prf(#sslsocket{pid = {dtls,_}}, _,_,_,_) ->
{error, enotconn};
prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) ->
{error, enotconn}.
@@ -792,8 +793,8 @@ supported_suites(anonymous, Version) ->
do_listen(Port, #config{transport_info = {Transport, _, _, _}} = Config, tls_connection) ->
tls_socket:listen(Transport, Port, Config);
-do_listen(Port, #config{transport_info = {Transport, _, _, _}} = Config, dtls_connection) ->
- dtls_socket:listen(Transport, Port, Config).
+do_listen(Port, Config, dtls_connection) ->
+ dtls_socket:listen(Port, Config).
%% Handle extra ssl options given to ssl_accept
-spec handle_options([any()], #ssl_options{}) -> #ssl_options{}
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index 0956d3501d..3f8b9a8a9b 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -2230,7 +2230,7 @@ filter(DerCert, Ciphers0, Version) ->
Ciphers0, Version, OtpCert),
{_, Sign} = public_key:pkix_sign_types(SigAlg#'SignatureAlgorithm'.algorithm),
filter_suites_signature(Sign, Ciphers, Version).
-
+
%%--------------------------------------------------------------------
-spec filter_suites([erl_cipher_suite()] | [cipher_suite()], map()) ->
[erl_cipher_suite()] | [cipher_suite()].
@@ -2662,29 +2662,33 @@ next_iv(Bin, IV) ->
<<_:FirstPart/binary, NextIV:IVSz/binary>> = Bin,
NextIV.
-
-filter_suites_pubkey(rsa, CiphersSuites0, Version, OtpCert) ->
+filter_suites_pubkey(rsa, CiphersSuites0, _Version, OtpCert) ->
KeyUses = key_uses(OtpCert),
+ NotECDSAKeyed = (CiphersSuites0 -- ec_keyed_suites(CiphersSuites0))
+ -- dss_keyed_suites(CiphersSuites0),
CiphersSuites = filter_keyuse_suites(keyEncipherment, KeyUses,
- (CiphersSuites0 -- ec_keyed_suites(CiphersSuites0))
- -- dss_keyed_suites(CiphersSuites0),
+ NotECDSAKeyed,
rsa_suites_encipher(CiphersSuites0)),
filter_keyuse_suites(digitalSignature, KeyUses, CiphersSuites,
- rsa_signed_suites(CiphersSuites, Version));
-filter_suites_pubkey(dsa, Ciphers, _, _OtpCert) ->
- (Ciphers -- rsa_keyed_suites(Ciphers)) -- ec_keyed_suites(Ciphers);
+ rsa_ecdhe_dhe_suites(CiphersSuites));
+filter_suites_pubkey(dsa, Ciphers, _, OtpCert) ->
+ KeyUses = key_uses(OtpCert),
+ NotECRSAKeyed = (Ciphers -- rsa_keyed_suites(Ciphers)) -- ec_keyed_suites(Ciphers),
+ filter_keyuse_suites(digitalSignature, KeyUses, NotECRSAKeyed,
+ dss_dhe_suites(Ciphers));
filter_suites_pubkey(ec, Ciphers, _, OtpCert) ->
- Uses = key_uses(OtpCert),
- filter_keyuse_suites(digitalSignature, Uses,
- (Ciphers -- rsa_keyed_suites(Ciphers)) -- dss_keyed_suites(Ciphers),
- ecdsa_sign_suites(Ciphers)).
+ Uses = key_uses(OtpCert),
+ NotRSADSAKeyed = (Ciphers -- rsa_keyed_suites(Ciphers)) -- dss_keyed_suites(Ciphers),
+ CiphersSuites = filter_keyuse_suites(digitalSignature, Uses, NotRSADSAKeyed,
+ ec_ecdhe_suites(Ciphers)),
+ filter_keyuse_suites(keyAgreement, Uses, CiphersSuites, ec_ecdh_suites(Ciphers)).
filter_suites_signature(rsa, Ciphers, Version) ->
- Ciphers -- ecdsa_signed_suites(Ciphers, Version) -- dsa_signed_suites(Ciphers, Version);
+ (Ciphers -- ecdsa_signed_suites(Ciphers, Version)) -- dsa_signed_suites(Ciphers, Version);
filter_suites_signature(dsa, Ciphers, Version) ->
- Ciphers -- ecdsa_signed_suites(Ciphers, Version) -- rsa_signed_suites(Ciphers, Version);
+ (Ciphers -- ecdsa_signed_suites(Ciphers, Version)) -- rsa_signed_suites(Ciphers, Version);
filter_suites_signature(ecdsa, Ciphers, Version) ->
- Ciphers -- rsa_signed_suites(Ciphers, Version) -- dsa_signed_suites(Ciphers, Version).
+ (Ciphers -- rsa_signed_suites(Ciphers, Version)) -- dsa_signed_suites(Ciphers, Version).
%% From RFC 5246 - Section 7.4.2. Server Certificate
@@ -2751,8 +2755,6 @@ rsa_keyed(rsa_psk) ->
true;
rsa_keyed(srp_rsa) ->
true;
-rsa_keyed(ecdhe_rsa) ->
- true;
rsa_keyed(_) ->
false.
@@ -2793,24 +2795,22 @@ dsa_signed_suites(Ciphers, Version) ->
cipher_filters => [],
mac_filters => [],
prf_filters => []}).
-
-dsa_signed({3,N}) when N >= 3 ->
- fun(dhe_dss) -> true;
- (ecdhe_dss) -> true;
- (_) -> false
- end;
dsa_signed(_) ->
fun(dhe_dss) -> true;
- (ecdh_dss) -> true;
- (ecdhe_dss) -> true;
(_) -> false
end.
+dss_dhe_suites(Ciphers) ->
+ filter_suites(Ciphers, #{key_exchange_filters => [fun(dhe_dss) -> true;
+ (_) -> false
+ end],
+ cipher_filters => [],
+ mac_filters => [],
+ prf_filters => []}).
+
ec_keyed(ecdh_ecdsa) ->
true;
-ec_keyed(ecdhe_ecdsa) ->
- true;
-ec_keyed(ecdh_rsa) ->
+ec_keyed(ecdh_rsa) ->
true;
ec_keyed(_) ->
false.
@@ -2822,9 +2822,28 @@ ec_keyed_suites(Ciphers) ->
mac_filters => [],
prf_filters => []}).
-%% EC Certs key can be used for signing
-ecdsa_sign_suites(Ciphers)->
+%% EC Certs key usage keyAgreement
+ec_ecdh_suites(Ciphers)->
+ filter_suites(Ciphers, #{key_exchange_filters => [fun(ecdh_ecdsa) -> true;
+ (_) -> false
+ end],
+ cipher_filters => [],
+ mac_filters => [],
+ prf_filters => []}).
+
+%% EC Certs key usage digitalSignature
+ec_ecdhe_suites(Ciphers) ->
filter_suites(Ciphers, #{key_exchange_filters => [fun(ecdhe_ecdsa) -> true;
+ (ecdhe_rsa) -> true;
+ (_) -> false
+ end],
+ cipher_filters => [],
+ mac_filters => [],
+ prf_filters => []}).
+%% RSA Certs key usage digitalSignature
+rsa_ecdhe_dhe_suites(Ciphers) ->
+ filter_suites(Ciphers, #{key_exchange_filters => [fun(dhe_rsa) -> true;
+ (ecdhe_rsa) -> true;
(_) -> false
end],
cipher_filters => [],
@@ -2837,11 +2856,14 @@ key_uses(OtpCert) ->
Extensions = ssl_certificate:extensions_list(TBSExtensions),
case ssl_certificate:select_extension(?'id-ce-keyUsage', Extensions) of
undefined ->
- undefined;
+ [];
#'Extension'{extnValue = KeyUses} ->
KeyUses
end.
+%% If no key-usage extension is defined all key-usages are allowed
+filter_keyuse_suites(_, [], CiphersSuites, _) ->
+ CiphersSuites;
filter_keyuse_suites(Use, KeyUse, CipherSuits, Suites) ->
case ssl_certificate:is_valid_key_usage(KeyUse, Use) of
true ->
diff --git a/lib/ssl/src/ssl_connection_sup.erl b/lib/ssl/src/ssl_connection_sup.erl
index 1a1f43e683..1aa7c5844f 100644
--- a/lib/ssl/src/ssl_connection_sup.erl
+++ b/lib/ssl/src/ssl_connection_sup.erl
@@ -51,12 +51,12 @@ init([]) ->
ListenOptionsTracker = listen_options_tracker_child_spec(),
DTLSConnetionManager = dtls_connection_manager_child_spec(),
- DTLSUdpListeners = dtls_udp_listeners_spec(),
+ DTLSListeners = dtls_listeners_spec(),
{ok, {{one_for_one, 10, 3600}, [TLSConnetionManager,
ListenOptionsTracker,
DTLSConnetionManager,
- DTLSUdpListeners
+ DTLSListeners
]}}.
@@ -91,9 +91,9 @@ listen_options_tracker_child_spec() ->
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-dtls_udp_listeners_spec() ->
- Name = dtls_udp_listener,
- StartFunc = {dtls_udp_sup, start_link, []},
+dtls_listeners_spec() ->
+ Name = dtls_listener,
+ StartFunc = {dtls_listener_sup, start_link, []},
Restart = permanent,
Shutdown = 4000,
Modules = [],
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 090e7b69b7..ebbb633b22 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -2233,13 +2233,12 @@ sign_algo(Alg) ->
is_acceptable_hash_sign(Algos, _, _, KeyExAlgo, SupportedHashSigns) when
KeyExAlgo == dh_dss;
KeyExAlgo == dh_rsa;
- KeyExAlgo == ecdh_ecdsa;
KeyExAlgo == ecdh_rsa;
KeyExAlgo == ecdh_ecdsa
->
%% *dh_* could be called only *dh in TLS-1.2
is_acceptable_hash_sign(Algos, SupportedHashSigns);
-is_acceptable_hash_sign(Algos, rsa, ecdsa, ecdh_rsa, SupportedHashSigns) ->
+is_acceptable_hash_sign(Algos, rsa, ecdsa, ecdhe_rsa, SupportedHashSigns) ->
is_acceptable_hash_sign(Algos, SupportedHashSigns);
is_acceptable_hash_sign({_, rsa} = Algos, rsa, _, dhe_rsa, SupportedHashSigns) ->
is_acceptable_hash_sign(Algos, SupportedHashSigns);
@@ -2270,7 +2269,7 @@ is_acceptable_hash_sign(_, _, _, KeyExAlgo, _) when
KeyExAlgo == ecdhe_anon
->
true;
-is_acceptable_hash_sign(_,_, _,_,_) ->
+is_acceptable_hash_sign(_,_,_,_,_) ->
false.
is_acceptable_hash_sign(Algos, SupportedHashSigns) ->
lists:member(Algos, SupportedHashSigns).
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 5df00de0e5..977d012fa7 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -160,7 +160,7 @@
-record(config, {ssl, %% SSL parameters
inet_user, %% User set inet options
emulated, %% Emulated option list or "inherit_tracker" pid
- udp_handler,
+ dtls_handler,
inet_ssl, %% inet options for internal ssl socket
transport_info, %% Callback info
connection_cb
diff --git a/lib/ssl/test/ssl_ECC.erl b/lib/ssl/test/ssl_ECC.erl
index 2096cf8166..36d949f74b 100644
--- a/lib/ssl/test/ssl_ECC.erl
+++ b/lib/ssl/test/ssl_ECC.erl
@@ -34,53 +34,65 @@
%% ECDH_RSA
client_ecdh_rsa_server_ecdh_rsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [keyAgreement]}]),
Suites = all_rsa_suites(Config),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdh_rsa, ecdh_rsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
ssl_test_lib:ssl_options(SOpts, Config),
[{check_keyex, ecdh_rsa}, {ciphers, Suites} | proplists:delete(check_keyex, Config)]).
client_ecdhe_rsa_server_ecdh_rsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [keyAgreement]}]),
Suites = all_rsa_suites(Config),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdhe_rsa, ecdh_rsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
ssl_test_lib:ssl_options(SOpts, Config),
[{check_keyex, ecdh_rsa}, {ciphers, Suites} | proplists:delete(check_keyex, Config)]).
client_ecdhe_ecdsa_server_ecdh_rsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [keyAgreement]}]),
Suites = all_rsa_suites(Config),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdhe_ecdsa, ecdh_rsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
- ssl_test_lib:ssl_options(SOpts, Config),
- [{check_keyex, ecdh_rsa}, {ciphers, Suites} | proplists:delete(check_keyex, Config)]).
+ ssl_test_lib:ssl_options(SOpts, Config),
+ [{check_keyex, ecdh_rsa}, {ciphers, Suites} | proplists:delete(check_keyex, Config)]).
%% ECDHE_RSA
client_ecdh_rsa_server_ecdhe_rsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdh_rsa, ecdhe_rsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
ssl_test_lib:ssl_options(SOpts, Config),
[{check_keyex, ecdhe_rsa} | proplists:delete(check_keyex, Config)]).
client_ecdhe_rsa_server_ecdhe_rsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdhe_rsa, ecdhe_rsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
- ssl_test_lib:ssl_options(SOpts, Config),
+ ssl_test_lib:ssl_options(SOpts, Config),
[{check_keyex, ecdhe_rsa} | proplists:delete(check_keyex, Config)]).
client_ecdhe_ecdsa_server_ecdhe_rsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdh_ecdsa, ecdhe_rsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
@@ -122,24 +134,30 @@ client_ecdhe_ecdsa_server_ecdh_ecdsa(Config) when is_list(Config) ->
%% ECDHE_ECDSA
client_ecdh_rsa_server_ecdhe_ecdsa(Config) when is_list(Config) ->
- Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
+ Default = ssl_test_lib:default_cert_chain_conf(),
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdh_rsa, ecdhe_ecdsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
ssl_test_lib:ssl_options(SOpts, Config),
[{check_keyex, ecdhe_ecdsa} | proplists:delete(check_keyex, Config)]).
client_ecdh_ecdsa_server_ecdhe_ecdsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdh_ecdsa, ecdhe_ecdsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
ssl_test_lib:ssl_options(SOpts, Config),
[{check_keyex, ecdhe_ecdsa} | proplists:delete(check_keyex, Config)]).
client_ecdhe_ecdsa_server_ecdhe_ecdsa(Config) when is_list(Config) ->
+ Ext = x509_test:extensions([{key_usage, [digitalSignature]}]),
Default = ssl_test_lib:default_cert_chain_conf(),
- {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain, Default},
+ {COpts, SOpts} = ssl_test_lib:make_ec_cert_chains([{server_chain,
+ [[], [], [{extensions, Ext}]]},
{client_chain, Default}],
ecdhe_ecdsa, ecdhe_ecdsa, Config),
ssl_test_lib:basic_test(ssl_test_lib:ssl_options(COpts, Config),
diff --git a/lib/ssl/test/ssl_ECC_openssl_SUITE.erl b/lib/ssl/test/ssl_ECC_openssl_SUITE.erl
index 280fa94ecb..5a08b152a6 100644
--- a/lib/ssl/test/ssl_ECC_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_ECC_openssl_SUITE.erl
@@ -157,7 +157,7 @@ init_per_testcase(TestCase, Config) ->
ct:log("Ciphers: ~p~n ", [ssl:cipher_suites(default, Version)]),
end_per_testcase(TestCase, Config),
ssl:start(),
- ct:timetrap({seconds, 15}),
+ ct:timetrap({seconds, 30}),
Config.
end_per_testcase(_TestCase, Config) ->
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index d3b13050e3..162c63850f 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -686,11 +686,16 @@ hello_client_cancel(Config) when is_list(Config) ->
{host, Hostname},
{from, self()},
{options, ssl_test_lib:ssl_options([{handshake, hello}], Config)},
- {continue_options, cancel}]),
-
- ssl_test_lib:check_result(Server, {error, {tls_alert, "user canceled"}}).
-%%--------------------------------------------------------------------
+ {continue_options, cancel}]),
+ receive
+ {Server, {error, {tls_alert, "user canceled"}}} ->
+ ok;
+ {Server, {error, closed}} ->
+ ct:pal("Did not receive the ALERT"),
+ ok
+ end.
+%%--------------------------------------------------------------------
hello_server_cancel() ->
[{doc, "Test API function ssl:handshake_cancel/1 on the server side"}].
hello_server_cancel(Config) when is_list(Config) ->
@@ -2539,7 +2544,7 @@ anonymous_cipher_suites()->
[{doc,"Test the anonymous ciphersuites"}].
anonymous_cipher_suites(Config) when is_list(Config) ->
NVersion = ssl_test_lib:protocol_version(Config, tuple),
- Ciphers = ssl_test_lib:anonymous_suites(NVersion),
+ Ciphers = ssl_test_lib:ecdh_dh_anonymous_suites(NVersion),
run_suites(Ciphers, Config, anonymous).
%%-------------------------------------------------------------------
psk_cipher_suites() ->
@@ -2635,7 +2640,7 @@ default_reject_anonymous(Config) when is_list(Config) ->
Version = ssl_test_lib:protocol_version(Config),
TLSVersion = ssl_test_lib:tls_version(Version),
- [CipherSuite | _] = ssl_test_lib:anonymous_suites(TLSVersion),
+ [CipherSuite | _] = ssl_test_lib:ecdh_dh_anonymous_suites(TLSVersion),
Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
{from, self()},
@@ -5046,8 +5051,14 @@ tls_downgrade_result(Socket) ->
tls_close(Socket) ->
ok = ssl_test_lib:send_recv_result(Socket),
- ok = ssl:close(Socket, 5000).
-
+ case ssl:close(Socket, 5000) of
+ ok ->
+ ok;
+ {error, closed} ->
+ ok;
+ Other ->
+ ct:fail(Other)
+ end.
%% First two clauses handles 1/n-1 splitting countermeasure Rizzo/Duong-Beast
treashold(N, {3,0}) ->
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index 8c27571d64..1e88ca15de 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -1264,8 +1264,16 @@ string_regex_filter(Str, Search) when is_list(Str) ->
string_regex_filter(_Str, _Search) ->
false.
-anonymous_suites(Version) ->
- ssl:filter_cipher_suites([ssl_cipher:suite_definition(S) || S <- ssl_cipher:anonymous_suites(Version)],[]).
+ecdh_dh_anonymous_suites(Version) ->
+ ssl:filter_cipher_suites([ssl_cipher:suite_definition(S) || S <- ssl_cipher:anonymous_suites(Version)],
+ [{key_exchange,
+ fun(dh_anon) ->
+ true;
+ (ecdh_anon) ->
+ true;
+ (_) ->
+ false
+ end}]).
psk_suites(Version) ->
ssl:filter_cipher_suites([ssl_cipher:suite_definition(S) || S <- ssl_cipher:psk_suites(Version)], []).
diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl
index a2e8ef8be0..4f02d8d15d 100644
--- a/lib/ssl/test/ssl_to_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl
@@ -412,8 +412,16 @@ basic_erlang_server_openssl_client(Config) when is_list(Config) ->
Port = ssl_test_lib:inet_port(Server),
Exe = "openssl",
- Args = ["s_client", "-connect", hostname_format(Hostname) ++
- ":" ++ integer_to_list(Port) ++ no_low_flag() | workaround_openssl_s_clinent()],
+ Args = case no_low_flag("-no_ssl2") of
+ [] ->
+ ["s_client", "-connect", hostname_format(Hostname) ++
+ ":" ++ integer_to_list(Port), no_low_flag("-no_ssl3")
+ | workaround_openssl_s_clinent()];
+ Flag ->
+ ["s_client", "-connect", hostname_format(Hostname) ++
+ ":" ++ integer_to_list(Port), no_low_flag("-no_ssl3"), Flag
+ | workaround_openssl_s_clinent()]
+ end,
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
true = port_command(OpenSslPort, Data),
@@ -588,7 +596,7 @@ erlang_client_openssl_server_anon(Config) when is_list(Config) ->
ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
ClientOpts = ssl_test_lib:ssl_options(client_anon_opts, Config),
VersionTuple = ssl_test_lib:protocol_version(Config, tuple),
- Ciphers = ssl_test_lib:anonymous_suites(VersionTuple),
+ Ciphers = ssl_test_lib:ecdh_dh_anonymous_suites(VersionTuple),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -631,7 +639,7 @@ erlang_server_openssl_client_anon(Config) when is_list(Config) ->
process_flag(trap_exit, true),
ServerOpts = ssl_test_lib:ssl_options(server_anon_opts, Config),
VersionTuple = ssl_test_lib:protocol_version(Config, tuple),
- Ciphers = ssl_test_lib:anonymous_suites(VersionTuple),
+ Ciphers = ssl_test_lib:ecdh_dh_anonymous_suites(VersionTuple),
{_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -665,7 +673,7 @@ erlang_server_openssl_client_anon_with_cert(Config) when is_list(Config) ->
process_flag(trap_exit, true),
ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
VersionTuple = ssl_test_lib:protocol_version(Config, tuple),
- Ciphers = ssl_test_lib:anonymous_suites(VersionTuple),
+ Ciphers = ssl_test_lib:ecdh_dh_anonymous_suites(VersionTuple),
{_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -1995,10 +2003,12 @@ hostname_format(Hostname) ->
"localhost"
end.
-no_low_flag() ->
+no_low_flag("-no_ssl2" = Flag) ->
case ssl_test_lib:supports_ssl_tls_version(sslv2) of
true ->
- " -no_ssl2 -no_ssl3";
+ Flag;
false ->
- " -no_ssl3"
- end.
+ ""
+ end;
+no_low_flag(Flag) ->
+ Flag.
diff --git a/lib/stdlib/doc/src/Makefile b/lib/stdlib/doc/src/Makefile
index 508a4fa2de..5c6b714f80 100644
--- a/lib/stdlib/doc/src/Makefile
+++ b/lib/stdlib/doc/src/Makefile
@@ -71,7 +71,6 @@ XML_REF3_FILES = \
gen_statem.xml \
io.xml \
io_lib.xml \
- lib.xml \
lists.xml \
log_mf_h.xml \
maps.xml \
diff --git a/lib/stdlib/doc/src/io_lib.xml b/lib/stdlib/doc/src/io_lib.xml
index 4a2b425e8e..a3df2897ac 100644
--- a/lib/stdlib/doc/src/io_lib.xml
+++ b/lib/stdlib/doc/src/io_lib.xml
@@ -163,16 +163,20 @@
<p>Returns a character list that represents <c><anno>Data</anno></c>
formatted in accordance with <c><anno>Format</anno></c> in
the same way as
- <seealso marker="#fwrite/2"><c>fwrite/2</c></seealso> and
- <seealso marker="#format/2"><c>format/2</c></seealso>,
- but takes an extra argument, a list of options.</p>
- <p>Available options:</p>
- <taglist>
- <tag><c><anno>CharsLimit</anno></c></tag>
- <item>
- <p>A soft limit on the number of characters returned.</p>
- </item>
- </taglist>
+ <seealso marker="#fwrite/2"><c>fwrite/2</c></seealso> and
+ <seealso marker="#format/2"><c>format/2</c></seealso>,
+ but takes an extra argument, a list of options.</p>
+ <p>Valid option:</p>
+ <taglist>
+ <tag><c>{chars_limit, <anno>CharsLimit</anno>}</c></tag>
+ <item>
+ <p>A soft limit on the number of characters returned.
+ When the number of characters is reached, remaining
+ structures are replaced by "<c>...</c>".
+ <c><anno>CharsLimit</anno></c> defaults to -1, which
+ means no limit on the number of characters returned.</p>
+ </item>
+ </taglist>
</desc>
</func>
@@ -390,11 +394,11 @@
everything below this level is replaced by "<c>...</c>".
<c><anno>Depth</anno></c> defaults to -1, which means
no limitation. Option <c><anno>CharsLimit</anno></c> puts a
- soft limit on the number of characters returned. When the
- number of characters is reached, remaining structures are
- replaced by "<c>...</c>". <c><anno>CharsLimit</anno></c>
- defaults to -1, which means no limit on the number of
- characters returned.</p>
+ soft limit on the number of characters returned. When the
+ number of characters is reached, remaining structures are
+ replaced by "<c>...</c>". <c><anno>CharsLimit</anno></c>
+ defaults to -1, which means no limit on the number of
+ characters returned.</p>
<p><em>Example:</em></p>
<pre>
1> <input>lists:flatten(io_lib:write({1,[2],[3],[4,5],6,7,8,9})).</input>
diff --git a/lib/stdlib/doc/src/lib.xml b/lib/stdlib/doc/src/lib.xml
deleted file mode 100644
index 58dad7c9e0..0000000000
--- a/lib/stdlib/doc/src/lib.xml
+++ /dev/null
@@ -1,103 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE erlref SYSTEM "erlref.dtd">
-
-<erlref>
- <header>
- <copyright>
- <year>1996</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>lib</title>
- <prepared></prepared>
- <docno></docno>
- <date></date>
- <rev></rev>
- </header>
- <module>lib</module>
- <modulesummary>Useful library functions.</modulesummary>
- <description>
- <warning>
- <p>This module is retained for backward compatibility. It can disappear
- without warning in a future Erlang/OTP release.</p>
- </warning>
- </description>
-
- <funcs>
- <func>
- <name name="error_message" arity="2"/>
- <fsummary>Print error message.</fsummary>
- <desc>
- <p>Prints error message <c><anno>Args</anno></c> in accordance with
- <c><anno>Format</anno></c>. Similar to
- <seealso marker="io#format/1"><c>io:format/2</c></seealso>.</p>
- </desc>
- </func>
-
- <func>
- <name name="flush_receive" arity="0"/>
- <fsummary>Flush messages.</fsummary>
- <desc>
- <p>Flushes the message buffer of the current process.</p>
- </desc>
- </func>
-
- <func>
- <name name="nonl" arity="1"/>
- <fsummary>Remove last newline.</fsummary>
- <desc>
- <p>Removes the last newline character, if any, in
- <c><anno>String1</anno></c>.</p>
- </desc>
- </func>
-
- <func>
- <name name="progname" arity="0"/>
- <fsummary>Return name of Erlang start script.</fsummary>
- <desc>
- <p>Returns the name of the script that started the current
- Erlang session.</p>
- </desc>
- </func>
-
- <func>
- <name name="send" arity="2"/>
- <fsummary>Send a message.</fsummary>
- <desc>
- <p>Makes it possible to send a message using the <c>apply/3</c> BIF.</p>
- </desc>
- </func>
-
- <func>
- <name name="sendw" arity="2"/>
- <fsummary>Send a message and wait for an answer.</fsummary>
- <desc>
- <p>As <seealso marker="#send/2"><c>send/2</c></seealso>,
- but waits for an answer. It is implemented as follows:</p>
- <code type="none">
-sendw(To, Msg) ->
- To ! {self(),Msg},
- receive
- Reply -> Reply
- end.</code>
- <p>The returned message is not necessarily a reply to the sent
- message.</p>
- </desc>
- </func>
- </funcs>
-</erlref>
-
diff --git a/lib/stdlib/doc/src/ref_man.xml b/lib/stdlib/doc/src/ref_man.xml
index 68bfddbc71..c6f30d272d 100644
--- a/lib/stdlib/doc/src/ref_man.xml
+++ b/lib/stdlib/doc/src/ref_man.xml
@@ -66,7 +66,6 @@
<xi:include href="gen_statem.xml"/>
<xi:include href="io.xml"/>
<xi:include href="io_lib.xml"/>
- <xi:include href="lib.xml"/>
<xi:include href="lists.xml"/>
<xi:include href="log_mf_h.xml"/>
<xi:include href="maps.xml"/>
diff --git a/lib/stdlib/doc/src/specs.xml b/lib/stdlib/doc/src/specs.xml
index d559adf9b6..fd2d625685 100644
--- a/lib/stdlib/doc/src/specs.xml
+++ b/lib/stdlib/doc/src/specs.xml
@@ -33,7 +33,6 @@
<xi:include href="../specs/specs_gen_statem.xml"/>
<xi:include href="../specs/specs_io.xml"/>
<xi:include href="../specs/specs_io_lib.xml"/>
- <xi:include href="../specs/specs_lib.xml"/>
<xi:include href="../specs/specs_lists.xml"/>
<xi:include href="../specs/specs_log_mf_h.xml"/>
<xi:include href="../specs/specs_maps.xml"/>
diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile
index dc3735055a..dfe6bf3e68 100644
--- a/lib/stdlib/src/Makefile
+++ b/lib/stdlib/src/Makefile
@@ -62,6 +62,7 @@ MODULES= \
erl_anno \
erl_bits \
erl_compile \
+ erl_error \
erl_eval \
erl_expand_records \
erl_internal \
@@ -91,7 +92,6 @@ MODULES= \
io_lib_format \
io_lib_fread \
io_lib_pretty \
- lib \
lists \
log_mf_h \
maps \
@@ -176,6 +176,7 @@ docs:
primary_bootstrap_compiler: \
$(BOOTSTRAP_COMPILER)/ebin/epp.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_anno.beam \
+ $(BOOTSTRAP_COMPILER)/ebin/erl_error.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_scan.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_parse.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_lint.beam \
diff --git a/lib/stdlib/src/epp.erl b/lib/stdlib/src/epp.erl
index 77cc88eb08..cc34d4bdd3 100644
--- a/lib/stdlib/src/epp.erl
+++ b/lib/stdlib/src/epp.erl
@@ -38,7 +38,7 @@
-type epp_handle() :: pid().
-type source_encoding() :: latin1 | utf8.
--type ifdef() :: 'ifdef' | 'ifndef' | 'else'.
+-type ifdef() :: 'ifdef' | 'ifndef' | 'if' | 'else'.
-type name() :: atom().
-type argspec() :: 'none' %No arguments
@@ -221,6 +221,8 @@ format_error({illegal_function,Macro}) ->
io_lib:format("?~s can only be used within a function", [Macro]);
format_error({illegal_function_usage,Macro}) ->
io_lib:format("?~s must not begin a form", [Macro]);
+format_error(elif_after_else) ->
+ "'elif' following 'else'";
format_error({'NYI',What}) ->
io_lib:format("not yet implemented '~s'", [What]);
format_error({error,Term}) ->
@@ -571,6 +573,7 @@ init_server(Pid, Name, Options, St0) ->
predef_macros(File) ->
Machine = list_to_atom(erlang:system_info(machine)),
Anno = line1(),
+ OtpVersion = list_to_integer(erlang:system_info(otp_release)),
Defs = [{'FILE', {none,[{string,Anno,File}]}},
{'FUNCTION_NAME', undefined},
{'FUNCTION_ARITY', undefined},
@@ -580,7 +583,8 @@ predef_macros(File) ->
{'BASE_MODULE', undefined},
{'BASE_MODULE_STRING', undefined},
{'MACHINE', {none,[{atom,Anno,Machine}]}},
- {Machine, {none,[{atom,Anno,true}]}}
+ {Machine, {none,[{atom,Anno,true}]}},
+ {'OTP_RELEASE', {none,[{integer,Anno,OtpVersion}]}}
],
maps:from_list(Defs).
@@ -1085,21 +1089,118 @@ scan_else(_Toks, Else, From, St) ->
epp_reply(From, {error,{loc(Else),epp,{bad,'else'}}}),
wait_req_scan(St).
-%% scan_if(Tokens, EndifToken, From, EppState)
+%% scan_if(Tokens, IfToken, From, EppState)
%% Handle the conditional parsing of a file.
-%% Report a badly formed if test and then treat as false macro.
+scan_if([{'(',_}|_]=Toks, If, From, St) ->
+ try eval_if(Toks, St) of
+ true ->
+ scan_toks(From, St#epp{istk=['if'|St#epp.istk]});
+ _ ->
+ skip_toks(From, St, ['if'])
+ catch
+ throw:Error0 ->
+ Error = case Error0 of
+ {_,erl_parse,_} ->
+ {error,Error0};
+ _ ->
+ {error,{loc(If),epp,Error0}}
+ end,
+ epp_reply(From, Error),
+ wait_req_skip(St, ['if'])
+ end;
scan_if(_Toks, If, From, St) ->
- epp_reply(From, {error,{loc(If),epp,{'NYI','if'}}}),
+ epp_reply(From, {error,{loc(If),epp,{bad,'if'}}}),
wait_req_skip(St, ['if']).
+eval_if(Toks0, St) ->
+ Toks = expand_macros(Toks0, St),
+ Es1 = case erl_parse:parse_exprs(Toks) of
+ {ok,Es0} -> Es0;
+ {error,E} -> throw(E)
+ end,
+ Es = rewrite_expr(Es1, St),
+ assert_guard_expr(Es),
+ Bs = erl_eval:new_bindings(),
+ LocalFun = fun(_Name, _Args) ->
+ error(badarg)
+ end,
+ try erl_eval:exprs(Es, Bs, {value,LocalFun}) of
+ {value,Res,_} ->
+ Res
+ catch
+ _:_ ->
+ false
+ end.
+
+assert_guard_expr([E0]) ->
+ E = rewrite_expr(E0, none),
+ case erl_lint:is_guard_expr(E) of
+ false ->
+ throw({bad,'if'});
+ true ->
+ ok
+ end;
+assert_guard_expr(_) ->
+ throw({bad,'if'}).
+
+%% Dual-purpose rewriting function. When the second argument is
+%% an #epp{} record, calls to defined(Symbol) will be evaluated.
+%% When the second argument is 'none', legal calls to our built-in
+%% functions are eliminated in order to turn the expression into
+%% a legal guard expression.
+
+rewrite_expr({call,_,{atom,_,defined},[N0]}, #epp{macs=Macs}) ->
+ %% Evaluate defined(Symbol).
+ N = case N0 of
+ {var,_,N1} -> N1;
+ {atom,_,N1} -> N1;
+ _ -> throw({bad,'if'})
+ end,
+ {atom,0,maps:is_key(N, Macs)};
+rewrite_expr({call,_,{atom,_,Name},As0}, none) ->
+ As = rewrite_expr(As0, none),
+ Arity = length(As),
+ case erl_internal:bif(Name, Arity) andalso
+ not erl_internal:guard_bif(Name, Arity) of
+ false ->
+ %% A guard BIF, an -if built-in, or an unknown function.
+ %% Eliminate the call so that erl_lint will not complain.
+ %% The call might fail later at evaluation time.
+ to_conses(As);
+ true ->
+ %% An auto-imported BIF (not guard BIF). Not allowed.
+ throw({bad,'if'})
+ end;
+rewrite_expr([H|T], St) ->
+ [rewrite_expr(H, St)|rewrite_expr(T, St)];
+rewrite_expr(Tuple, St) when is_tuple(Tuple) ->
+ list_to_tuple(rewrite_expr(tuple_to_list(Tuple), St));
+rewrite_expr(Other, _) ->
+ Other.
+
+to_conses([H|T]) ->
+ {cons,0,H,to_conses(T)};
+to_conses([]) ->
+ {nil,0}.
+
%% scan_elif(Tokens, EndifToken, From, EppState)
%% Handle the conditional parsing of a file.
%% Report a badly formed if test and then treat as false macro.
scan_elif(_Toks, Elif, From, St) ->
- epp_reply(From, {error,{loc(Elif),epp,{'NYI','elif'}}}),
- wait_req_scan(St).
+ case St#epp.istk of
+ ['else'|Cis] ->
+ epp_reply(From, {error,{loc(Elif),
+ epp,{illegal,"unbalanced",'elif'}}}),
+ wait_req_skip(St#epp{istk=Cis}, ['else']);
+ [_I|Cis] ->
+ skip_toks(From, St#epp{istk=Cis}, ['elif']);
+ [] ->
+ epp_reply(From, {error,{loc(Elif),epp,
+ {illegal,"unbalanced",elif}}}),
+ wait_req_scan(St)
+ end.
%% scan_endif(Tokens, EndifToken, From, EppState)
%% If we are in an if body then exit it, else report an error.
@@ -1158,6 +1259,8 @@ skip_toks(From, St, [I|Sis]) ->
skip_toks(From, St#epp{location=Cl}, ['if',I|Sis]);
{ok,[{'-',_Lh},{atom,_Le,'else'}=Else|_Toks],Cl}->
skip_else(Else, From, St#epp{location=Cl}, [I|Sis]);
+ {ok,[{'-',_Lh},{atom,_Le,'elif'}=Elif|Toks],Cl}->
+ skip_elif(Toks, Elif, From, St#epp{location=Cl}, [I|Sis]);
{ok,[{'-',_Lh},{atom,_Le,endif}|_Toks],Cl} ->
skip_toks(From, St#epp{location=Cl}, Sis);
{ok,_Toks,Cl} ->
@@ -1188,11 +1291,21 @@ skip_toks(From, St, []) ->
skip_else(Else, From, St, ['else'|Sis]) ->
epp_reply(From, {error,{loc(Else),epp,{illegal,"repeated",'else'}}}),
wait_req_skip(St, ['else'|Sis]);
+skip_else(_Else, From, St, ['elif'|Sis]) ->
+ skip_toks(From, St, ['else'|Sis]);
skip_else(_Else, From, St, [_I]) ->
scan_toks(From, St#epp{istk=['else'|St#epp.istk]});
skip_else(_Else, From, St, Sis) ->
skip_toks(From, St, Sis).
+skip_elif(_Toks, Elif, From, St, ['else'|_]=Sis) ->
+ epp_reply(From, {error,{loc(Elif),epp,elif_after_else}}),
+ wait_req_skip(St, Sis);
+skip_elif(Toks, Elif, From, St, [_I]) ->
+ scan_if(Toks, Elif, From, St);
+skip_elif(_Toks, _Elif, From, St, Sis) ->
+ skip_toks(From, St, Sis).
+
%% macro_pars(Tokens, ArgStack)
%% macro_expansion(Tokens, Anno)
%% Extract the macro parameters and the expansion from a macro definition.
diff --git a/lib/stdlib/src/lib.erl b/lib/stdlib/src/erl_error.erl
index 51e0c3f77e..fdcb9e824c 100644
--- a/lib/stdlib/src/lib.erl
+++ b/lib/stdlib/src/erl_error.erl
@@ -17,337 +17,12 @@
%%
%% %CopyrightEnd%
%%
--module(lib).
-
--export([flush_receive/0, error_message/2, progname/0, nonl/1, send/2,
- sendw/2, eval_str/1]).
-
--export([extended_parse_exprs/1, extended_parse_term/1,
- subst_values_for_vars/2]).
+-module(erl_error).
-export([format_exception/6, format_exception/7,
format_stacktrace/4, format_stacktrace/5,
format_call/4, format_call/5, format_fun/1, format_fun/2]).
--spec flush_receive() -> 'ok'.
-
-flush_receive() ->
- receive
- _Any ->
- flush_receive()
- after
- 0 ->
- ok
- end.
-
-%%
-%% Functions for doing standard system format i/o.
-%%
--spec error_message(Format, Args) -> 'ok' when
- Format :: io:format(),
- Args :: [term()].
-
-error_message(Format, Args) ->
- io:format(<<"** ~ts **\n">>, [io_lib:format(Format, Args)]).
-
-%% Return the name of the script that starts (this) erlang
-%%
--spec progname() -> atom().
-
-progname() ->
- case init:get_argument(progname) of
- {ok, [[Prog]]} ->
- list_to_atom(Prog);
- _Other ->
- no_prog_name
- end.
-
--spec nonl(String1) -> String2 when
- String1 :: string(),
- String2 :: string().
-
-nonl([10]) -> [];
-nonl([]) -> [];
-nonl([H|T]) -> [H|nonl(T)].
-
--spec send(To, Msg) -> Msg when
- To :: pid() | atom() | {atom(), node()},
- Msg :: term().
-
-send(To, Msg) -> To ! Msg.
-
--spec sendw(To, Msg) -> term() when
- To :: pid() | atom() | {atom(), node()},
- Msg :: term().
-
-sendw(To, Msg) ->
- To ! {self(), Msg},
- receive
- Reply -> Reply
- end.
-
-%% eval_str(InStr) -> {ok, OutStr} | {error, ErrStr'}
-%% InStr must represent a body
-%% Note: If InStr is a binary it has to be a Latin-1 string.
-%% If you have a UTF-8 encoded binary you have to call
-%% unicode:characters_to_list/1 before the call to eval_str().
-
--define(result(F,D), lists:flatten(io_lib:format(F, D))).
-
--spec eval_str(string() | unicode:latin1_binary()) ->
- {'ok', string()} | {'error', string()}.
-
-eval_str(Str) when is_list(Str) ->
- case erl_scan:tokens([], Str, 0) of
- {more, _} ->
- {error, "Incomplete form (missing .<cr>)??"};
- {done, {ok, Toks, _}, Rest} ->
- case all_white(Rest) of
- true ->
- case erl_parse:parse_exprs(Toks) of
- {ok, Exprs} ->
- case catch erl_eval:exprs(Exprs, erl_eval:new_bindings()) of
- {value, Val, _} ->
- {ok, Val};
- Other ->
- {error, ?result("*** eval: ~p", [Other])}
- end;
- {error, {_Line, Mod, Args}} ->
- Msg = ?result("*** ~ts",[Mod:format_error(Args)]),
- {error, Msg}
- end;
- false ->
- {error, ?result("Non-white space found after "
- "end-of-form :~ts", [Rest])}
- end
- end;
-eval_str(Bin) when is_binary(Bin) ->
- eval_str(binary_to_list(Bin)).
-
-all_white([$\s|T]) -> all_white(T);
-all_white([$\n|T]) -> all_white(T);
-all_white([$\t|T]) -> all_white(T);
-all_white([]) -> true;
-all_white(_) -> false.
-
-%% `Tokens' is assumed to have been scanned with the 'text' option.
-%% The annotations of the returned expressions are locations.
-%%
-%% Can handle pids, ports, references, and external funs ("items").
-%% Known items are represented by variables in the erl_parse tree, and
-%% the items themselves are stored in the returned bindings.
-
--spec extended_parse_exprs(Tokens) ->
- {'ok', ExprList, Bindings} | {'error', ErrorInfo} when
- Tokens :: [erl_scan:token()],
- ExprList :: [erl_parse:abstract_expr()],
- Bindings :: erl_eval:binding_struct(),
- ErrorInfo :: erl_parse:error_info().
-
-extended_parse_exprs(Tokens) ->
- Ts = tokens_fixup(Tokens),
- case erl_parse:parse_exprs(Ts) of
- {ok, Exprs0} ->
- {Exprs, Bs} = expr_fixup(Exprs0),
- {ok, reset_expr_anno(Exprs), Bs};
- _ErrorInfo ->
- erl_parse:parse_exprs(reset_token_anno(Ts))
- end.
-
-tokens_fixup([]) -> [];
-tokens_fixup([T|Ts]=Ts0) ->
- try token_fixup(Ts0) of
- {NewT, NewTs} ->
- [NewT|tokens_fixup(NewTs)]
- catch
- _:_ ->
- [T|tokens_fixup(Ts)]
- end.
-
-token_fixup(Ts) ->
- {AnnoL, NewTs, FixupTag} = unscannable(Ts),
- String = lists:append([erl_anno:text(A) || A <- AnnoL]),
- _ = (fixup_fun(FixupTag))(String),
- NewAnno = erl_anno:set_text(fixup_text(FixupTag), hd(AnnoL)),
- {{string, NewAnno, String}, NewTs}.
-
-unscannable([{'#', A1}, {var, A2, 'Fun'}, {'<', A3}, {atom, A4, _},
- {'.', A5}, {float, A6, _}, {'>', A7}|Ts]) ->
- {[A1, A2, A3, A4, A5, A6, A7], Ts, function};
-unscannable([{'#', A1}, {var, A2, 'Fun'}, {'<', A3}, {atom, A4, _},
- {'.', A5}, {atom, A6, _}, {'.', A7}, {integer, A8, _},
- {'>', A9}|Ts]) ->
- {[A1, A2, A3, A4, A5, A6, A7, A8, A9], Ts, function};
-unscannable([{'<', A1}, {float, A2, _}, {'.', A3}, {integer, A4, _},
- {'>', A5}|Ts]) ->
- {[A1, A2, A3, A4, A5], Ts, pid};
-unscannable([{'#', A1}, {var, A2, 'Port'}, {'<', A3}, {float, A4, _},
- {'>', A5}|Ts]) ->
- {[A1, A2, A3, A4, A5], Ts, port};
-unscannable([{'#', A1}, {var, A2, 'Ref'}, {'<', A3}, {float, A4, _},
- {'.', A5}, {float, A6, _}, {'>', A7}|Ts]) ->
- {[A1, A2, A3, A4, A5, A6, A7], Ts, reference}.
-
-expr_fixup(Expr0) ->
- {Expr, Bs, _} = expr_fixup(Expr0, erl_eval:new_bindings(), 1),
- {Expr, Bs}.
-
-expr_fixup({string,A,S}=T, Bs0, I) ->
- try string_fixup(A, S) of
- Value ->
- Var = new_var(I),
- Bs = erl_eval:add_binding(Var, Value, Bs0),
- {{var, A, Var}, Bs, I+1}
- catch
- _:_ ->
- {T, Bs0, I}
- end;
-expr_fixup(Tuple, Bs0, I0) when is_tuple(Tuple) ->
- {L, Bs, I} = expr_fixup(tuple_to_list(Tuple), Bs0, I0),
- {list_to_tuple(L), Bs, I};
-expr_fixup([E0|Es0], Bs0, I0) ->
- {E, Bs1, I1} = expr_fixup(E0, Bs0, I0),
- {Es, Bs, I} = expr_fixup(Es0, Bs1, I1),
- {[E|Es], Bs, I};
-expr_fixup(T, Bs, I) ->
- {T, Bs, I}.
-
-string_fixup(A, S) ->
- Text = erl_anno:text(A),
- FixupTag = fixup_tag(Text, S),
- (fixup_fun(FixupTag))(S).
-
-new_var(I) ->
- list_to_atom(lists:concat(['__ExtendedParseExprs_', I, '__'])).
-
-reset_token_anno(Tokens) ->
- [setelement(2, T, (reset_anno())(element(2, T))) || T <- Tokens].
-
-reset_expr_anno(Exprs) ->
- [erl_parse:map_anno(reset_anno(), E) || E <- Exprs].
-
-reset_anno() ->
- fun(A) -> erl_anno:new(erl_anno:location(A)) end.
-
-fixup_fun(function) -> fun function/1;
-fixup_fun(pid) -> fun erlang:list_to_pid/1;
-fixup_fun(port) -> fun erlang:list_to_port/1;
-fixup_fun(reference) -> fun erlang:list_to_ref/1.
-
-function(S) ->
- %% External function.
- {ok, [_, _, _,
- {atom, _, Module}, _,
- {atom, _, Function}, _,
- {integer, _, Arity}|_], _} = erl_scan:string(S),
- erlang:make_fun(Module, Function, Arity).
-
-fixup_text(function) -> "function";
-fixup_text(pid) -> "pid";
-fixup_text(port) -> "port";
-fixup_text(reference) -> "reference".
-
-fixup_tag("function", "#"++_) -> function;
-fixup_tag("pid", "<"++_) -> pid;
-fixup_tag("port", "#"++_) -> port;
-fixup_tag("reference", "#"++_) -> reference.
-
-%%% End of extended_parse_exprs.
-
-%% `Tokens' is assumed to have been scanned with the 'text' option.
-%%
-%% Can handle pids, ports, references, and external funs.
-
--spec extended_parse_term(Tokens) ->
- {'ok', Term} | {'error', ErrorInfo} when
- Tokens :: [erl_scan:token()],
- Term :: term(),
- ErrorInfo :: erl_parse:error_info().
-
-extended_parse_term(Tokens) ->
- case extended_parse_exprs(Tokens) of
- {ok, [Expr], Bindings} ->
- try normalise(Expr, Bindings) of
- Term ->
- {ok, Term}
- catch
- _:_ ->
- Loc = erl_anno:location(element(2, Expr)),
- {error,{Loc,?MODULE,"bad term"}}
- end;
- {ok, [_,Expr|_], _Bindings} ->
- Loc = erl_anno:location(element(2, Expr)),
- {error,{Loc,?MODULE,"bad term"}};
- {error, _} = Error ->
- Error
- end.
-
-%% From erl_parse.
-normalise({var, _, V}, Bs) ->
- {value, Value} = erl_eval:binding(V, Bs),
- Value;
-normalise({char,_,C}, _Bs) -> C;
-normalise({integer,_,I}, _Bs) -> I;
-normalise({float,_,F}, _Bs) -> F;
-normalise({atom,_,A}, _Bs) -> A;
-normalise({string,_,S}, _Bs) -> S;
-normalise({nil,_}, _Bs) -> [];
-normalise({bin,_,Fs}, Bs) ->
- {value, B, _} =
- eval_bits:expr_grp(Fs, [],
- fun(E, _) ->
- {value, normalise(E, Bs), []}
- end, [], true),
- B;
-normalise({cons,_,Head,Tail}, Bs) ->
- [normalise(Head, Bs)|normalise(Tail, Bs)];
-normalise({tuple,_,Args}, Bs) ->
- list_to_tuple(normalise_list(Args, Bs));
-normalise({map,_,Pairs}, Bs) ->
- maps:from_list(lists:map(fun
- %% only allow '=>'
- ({map_field_assoc,_,K,V}) ->
- {normalise(K, Bs),normalise(V, Bs)}
- end, Pairs));
-%% Special case for unary +/-.
-normalise({op,_,'+',{char,_,I}}, _Bs) -> I;
-normalise({op,_,'+',{integer,_,I}}, _Bs) -> I;
-normalise({op,_,'+',{float,_,F}}, _Bs) -> F;
-normalise({op,_,'-',{char,_,I}}, _Bs) -> -I; %Weird, but compatible!
-normalise({op,_,'-',{integer,_,I}}, _Bs) -> -I;
-normalise({op,_,'-',{float,_,F}}, _Bs) -> -F;
-normalise({'fun',_,{function,{atom,_,M},{atom,_,F},{integer,_,A}}}, _Bs) ->
- %% Since "#Fun<M.F.A>" is recognized, "fun M:F/A" should be too.
- fun M:F/A.
-
-normalise_list([H|T], Bs) ->
- [normalise(H, Bs)|normalise_list(T, Bs)];
-normalise_list([], _Bs) ->
- [].
-
-%% To be used on ExprList and Bindings returned from extended_parse_exprs().
-%% Substitute {value, A, Item} for {var, A, ExtendedParseVar}.
-%% {value, A, Item} is a shell/erl_eval convention, and for example
-%% the linter cannot handle it.
-
--spec subst_values_for_vars(ExprList, Bindings) -> [term()] when
- ExprList :: [erl_parse:abstract_expr()],
- Bindings :: erl_eval:binding_struct().
-
-subst_values_for_vars({var, A, V}=Var, Bs) ->
- case erl_eval:binding(V, Bs) of
- {value, Value} ->
- {value, A, Value};
- unbound ->
- Var
- end;
-subst_values_for_vars(L, Bs) when is_list(L) ->
- [subst_values_for_vars(E, Bs) || E <- L];
-subst_values_for_vars(T, Bs) when is_tuple(T) ->
- list_to_tuple(subst_values_for_vars(tuple_to_list(T), Bs));
-subst_values_for_vars(T, _Bs) ->
- T.
-
%%% Formatting of exceptions, mfa:s and funs.
%% -> iolist() (no \n at end)
diff --git a/lib/stdlib/src/erl_eval.erl b/lib/stdlib/src/erl_eval.erl
index 4ee11383da..0f6d48b9a3 100644
--- a/lib/stdlib/src/erl_eval.erl
+++ b/lib/stdlib/src/erl_eval.erl
@@ -27,7 +27,8 @@
-export([exprs/2,exprs/3,exprs/4,expr/2,expr/3,expr/4,expr/5,
expr_list/2,expr_list/3,expr_list/4]).
-export([new_bindings/0,bindings/1,binding/2,add_binding/3,del_binding/2]).
-
+-export([extended_parse_exprs/1, extended_parse_term/1,
+ subst_values_for_vars/2]).
-export([is_constant_expr/1, partial_eval/1]).
%% Is used by standalone Erlang (escript).
@@ -1286,6 +1287,224 @@ merge_bindings(Bs1, Bs2) ->
%% error -> Bs
%% end
%% end, Bs2, Bs1).
+
+%% Substitute {value, A, Item} for {var, A, Var}, preserving A.
+%% {value, A, Item} is a shell/erl_eval convention, and for example
+%% the linter cannot handle it.
+
+-spec subst_values_for_vars(ExprList, Bindings) -> [term()] when
+ ExprList :: [erl_parse:abstract_expr()],
+ Bindings :: binding_struct().
+
+subst_values_for_vars({var, A, V}=Var, Bs) ->
+ case erl_eval:binding(V, Bs) of
+ {value, Value} ->
+ {value, A, Value};
+ unbound ->
+ Var
+ end;
+subst_values_for_vars(L, Bs) when is_list(L) ->
+ [subst_values_for_vars(E, Bs) || E <- L];
+subst_values_for_vars(T, Bs) when is_tuple(T) ->
+ list_to_tuple(subst_values_for_vars(tuple_to_list(T), Bs));
+subst_values_for_vars(T, _Bs) ->
+ T.
+
+%% `Tokens' is assumed to have been scanned with the 'text' option.
+%% The annotations of the returned expressions are locations.
+%%
+%% Can handle pids, ports, references, and external funs ("items").
+%% Known items are represented by variables in the erl_parse tree, and
+%% the items themselves are stored in the returned bindings.
+
+-spec extended_parse_exprs(Tokens) ->
+ {'ok', ExprList, Bindings} | {'error', ErrorInfo} when
+ Tokens :: [erl_scan:token()],
+ ExprList :: [erl_parse:abstract_expr()],
+ Bindings :: erl_eval:binding_struct(),
+ ErrorInfo :: erl_parse:error_info().
+
+extended_parse_exprs(Tokens) ->
+ Ts = tokens_fixup(Tokens),
+ case erl_parse:parse_exprs(Ts) of
+ {ok, Exprs0} ->
+ {Exprs, Bs} = expr_fixup(Exprs0),
+ {ok, reset_expr_anno(Exprs), Bs};
+ _ErrorInfo ->
+ erl_parse:parse_exprs(reset_token_anno(Ts))
+ end.
+
+tokens_fixup([]) -> [];
+tokens_fixup([T|Ts]=Ts0) ->
+ try token_fixup(Ts0) of
+ {NewT, NewTs} ->
+ [NewT|tokens_fixup(NewTs)]
+ catch
+ _:_ ->
+ [T|tokens_fixup(Ts)]
+ end.
+
+token_fixup(Ts) ->
+ {AnnoL, NewTs, FixupTag} = unscannable(Ts),
+ String = lists:append([erl_anno:text(A) || A <- AnnoL]),
+ _ = (fixup_fun(FixupTag))(String),
+ NewAnno = erl_anno:set_text(fixup_text(FixupTag), hd(AnnoL)),
+ {{string, NewAnno, String}, NewTs}.
+
+unscannable([{'#', A1}, {var, A2, 'Fun'}, {'<', A3}, {atom, A4, _},
+ {'.', A5}, {float, A6, _}, {'>', A7}|Ts]) ->
+ {[A1, A2, A3, A4, A5, A6, A7], Ts, function};
+unscannable([{'#', A1}, {var, A2, 'Fun'}, {'<', A3}, {atom, A4, _},
+ {'.', A5}, {atom, A6, _}, {'.', A7}, {integer, A8, _},
+ {'>', A9}|Ts]) ->
+ {[A1, A2, A3, A4, A5, A6, A7, A8, A9], Ts, function};
+unscannable([{'<', A1}, {float, A2, _}, {'.', A3}, {integer, A4, _},
+ {'>', A5}|Ts]) ->
+ {[A1, A2, A3, A4, A5], Ts, pid};
+unscannable([{'#', A1}, {var, A2, 'Port'}, {'<', A3}, {float, A4, _},
+ {'>', A5}|Ts]) ->
+ {[A1, A2, A3, A4, A5], Ts, port};
+unscannable([{'#', A1}, {var, A2, 'Ref'}, {'<', A3}, {float, A4, _},
+ {'.', A5}, {float, A6, _}, {'>', A7}|Ts]) ->
+ {[A1, A2, A3, A4, A5, A6, A7], Ts, reference}.
+
+expr_fixup(Expr0) ->
+ {Expr, Bs, _} = expr_fixup(Expr0, erl_eval:new_bindings(), 1),
+ {Expr, Bs}.
+
+expr_fixup({string,A,S}=T, Bs0, I) ->
+ try string_fixup(A, S) of
+ Value ->
+ Var = new_var(I),
+ Bs = erl_eval:add_binding(Var, Value, Bs0),
+ {{var, A, Var}, Bs, I+1}
+ catch
+ _:_ ->
+ {T, Bs0, I}
+ end;
+expr_fixup(Tuple, Bs0, I0) when is_tuple(Tuple) ->
+ {L, Bs, I} = expr_fixup(tuple_to_list(Tuple), Bs0, I0),
+ {list_to_tuple(L), Bs, I};
+expr_fixup([E0|Es0], Bs0, I0) ->
+ {E, Bs1, I1} = expr_fixup(E0, Bs0, I0),
+ {Es, Bs, I} = expr_fixup(Es0, Bs1, I1),
+ {[E|Es], Bs, I};
+expr_fixup(T, Bs, I) ->
+ {T, Bs, I}.
+
+string_fixup(A, S) ->
+ Text = erl_anno:text(A),
+ FixupTag = fixup_tag(Text, S),
+ (fixup_fun(FixupTag))(S).
+
+new_var(I) ->
+ list_to_atom(lists:concat(['__ExtendedParseExprs_', I, '__'])).
+
+reset_token_anno(Tokens) ->
+ [setelement(2, T, (reset_anno())(element(2, T))) || T <- Tokens].
+
+reset_expr_anno(Exprs) ->
+ [erl_parse:map_anno(reset_anno(), E) || E <- Exprs].
+
+reset_anno() ->
+ fun(A) -> erl_anno:new(erl_anno:location(A)) end.
+
+fixup_fun(function) -> fun function/1;
+fixup_fun(pid) -> fun erlang:list_to_pid/1;
+fixup_fun(port) -> fun erlang:list_to_port/1;
+fixup_fun(reference) -> fun erlang:list_to_ref/1.
+
+function(S) ->
+ %% External function.
+ {ok, [_, _, _,
+ {atom, _, Module}, _,
+ {atom, _, Function}, _,
+ {integer, _, Arity}|_], _} = erl_scan:string(S),
+ erlang:make_fun(Module, Function, Arity).
+
+fixup_text(function) -> "function";
+fixup_text(pid) -> "pid";
+fixup_text(port) -> "port";
+fixup_text(reference) -> "reference".
+
+fixup_tag("function", "#"++_) -> function;
+fixup_tag("pid", "<"++_) -> pid;
+fixup_tag("port", "#"++_) -> port;
+fixup_tag("reference", "#"++_) -> reference.
+
+%%% End of extended_parse_exprs.
+
+%% `Tokens' is assumed to have been scanned with the 'text' option.
+%%
+%% Can handle pids, ports, references, and external funs.
+
+-spec extended_parse_term(Tokens) ->
+ {'ok', Term} | {'error', ErrorInfo} when
+ Tokens :: [erl_scan:token()],
+ Term :: term(),
+ ErrorInfo :: erl_parse:error_info().
+
+extended_parse_term(Tokens) ->
+ case extended_parse_exprs(Tokens) of
+ {ok, [Expr], Bindings} ->
+ try normalise(Expr, Bindings) of
+ Term ->
+ {ok, Term}
+ catch
+ _:_ ->
+ Loc = erl_anno:location(element(2, Expr)),
+ {error,{Loc,?MODULE,"bad term"}}
+ end;
+ {ok, [_,Expr|_], _Bindings} ->
+ Loc = erl_anno:location(element(2, Expr)),
+ {error,{Loc,?MODULE,"bad term"}};
+ {error, _} = Error ->
+ Error
+ end.
+
+%% From erl_parse.
+normalise({var, _, V}, Bs) ->
+ {value, Value} = erl_eval:binding(V, Bs),
+ Value;
+normalise({char,_,C}, _Bs) -> C;
+normalise({integer,_,I}, _Bs) -> I;
+normalise({float,_,F}, _Bs) -> F;
+normalise({atom,_,A}, _Bs) -> A;
+normalise({string,_,S}, _Bs) -> S;
+normalise({nil,_}, _Bs) -> [];
+normalise({bin,_,Fs}, Bs) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E, Bs), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}, Bs) ->
+ [normalise(Head, Bs)|normalise(Tail, Bs)];
+normalise({tuple,_,Args}, Bs) ->
+ list_to_tuple(normalise_list(Args, Bs));
+normalise({map,_,Pairs}, Bs) ->
+ maps:from_list(lists:map(fun
+ %% only allow '=>'
+ ({map_field_assoc,_,K,V}) ->
+ {normalise(K, Bs),normalise(V, Bs)}
+ end, Pairs));
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}, _Bs) -> I;
+normalise({op,_,'+',{integer,_,I}}, _Bs) -> I;
+normalise({op,_,'+',{float,_,F}}, _Bs) -> F;
+normalise({op,_,'-',{char,_,I}}, _Bs) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}, _Bs) -> -I;
+normalise({op,_,'-',{float,_,F}}, _Bs) -> -F;
+normalise({'fun',_,{function,{atom,_,M},{atom,_,F},{integer,_,A}}}, _Bs) ->
+ %% Since "#Fun<M.F.A>" is recognized, "fun M:F/A" should be too.
+ fun M:F/A.
+
+normalise_list([H|T], Bs) ->
+ [normalise(H, Bs)|normalise_list(T, Bs)];
+normalise_list([], _Bs) ->
+ [].
+
%%----------------------------------------------------------------------------
%%
%% Evaluate expressions:
diff --git a/lib/stdlib/src/escript.erl b/lib/stdlib/src/escript.erl
index beea9927d2..89a81684f5 100644
--- a/lib/stdlib/src/escript.erl
+++ b/lib/stdlib/src/escript.erl
@@ -882,7 +882,7 @@ format_exception(Class, Reason, StackTrace) ->
io_lib:format("~." ++ integer_to_list(I) ++ P, [Term, 50])
end,
StackFun = fun(M, _F, _A) -> (M =:= erl_eval) or (M =:= ?MODULE) end,
- lib:format_exception(1, Class, Reason, StackTrace, StackFun, PF, Enc).
+ erl_error:format_exception(1, Class, Reason, StackTrace, StackFun, PF, Enc).
encoding() ->
[{encoding, Encoding}] = enc(),
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 6a559f0be5..a35f79c0d9 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -77,7 +77,9 @@
whereis/1]).
%% internal exports
--export([internal_request_all/0]).
+-export([internal_request_all/0,
+ internal_delete_all/2,
+ internal_select_delete/2]).
-spec all() -> [Tab] when
Tab :: tab().
@@ -116,7 +118,15 @@ delete(_, _) ->
-spec delete_all_objects(Tab) -> true when
Tab :: tab().
-delete_all_objects(_) ->
+delete_all_objects(Tab) ->
+ _ = ets:internal_delete_all(Tab, undefined),
+ true.
+
+-spec internal_delete_all(Tab, undefined) -> NumDeleted when
+ Tab :: tab(),
+ NumDeleted :: non_neg_integer().
+
+internal_delete_all(_, _) ->
erlang:nif_error(undef).
-spec delete_object(Tab, Object) -> true when
@@ -378,7 +388,17 @@ select_count(_, _) ->
MatchSpec :: match_spec(),
NumDeleted :: non_neg_integer().
-select_delete(_, _) ->
+select_delete(Tab, [{'_',[],[true]}]) ->
+ ets:internal_delete_all(Tab, undefined);
+select_delete(Tab, MatchSpec) ->
+ ets:internal_select_delete(Tab, MatchSpec).
+
+-spec internal_select_delete(Tab, MatchSpec) -> NumDeleted when
+ Tab :: tab(),
+ MatchSpec :: match_spec(),
+ NumDeleted :: non_neg_integer().
+
+internal_select_delete(_, _) ->
erlang:nif_error(undef).
-spec select_replace(Tab, MatchSpec) -> NumReplaced when
diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl
index 77826c3dc6..1646186761 100644
--- a/lib/stdlib/src/gen_fsm.erl
+++ b/lib/stdlib/src/gen_fsm.erl
@@ -129,25 +129,25 @@
%% logger callback
-export([format_log/1]).
--deprecated({start, 3, next_major_release}).
--deprecated({start, 4, next_major_release}).
--deprecated({start_link, 3, next_major_release}).
--deprecated({start_link, 4, next_major_release}).
--deprecated({stop, 1, next_major_release}).
--deprecated({stop, 3, next_major_release}).
--deprecated({send_event, 2, next_major_release}).
--deprecated({sync_send_event, 2, next_major_release}).
--deprecated({sync_send_event, 3, next_major_release}).
--deprecated({send_all_state_event, 2, next_major_release}).
--deprecated({sync_send_all_state_event, 2, next_major_release}).
--deprecated({sync_send_all_state_event, 3, next_major_release}).
--deprecated({reply, 2, next_major_release}).
--deprecated({start_timer, 2, next_major_release}).
--deprecated({send_event_after, 2, next_major_release}).
--deprecated({cancel_timer, 1, next_major_release}).
--deprecated({enter_loop, 4, next_major_release}).
--deprecated({enter_loop, 5, next_major_release}).
--deprecated({enter_loop, 6, next_major_release}).
+-deprecated({start, 3, eventually}).
+-deprecated({start, 4, eventually}).
+-deprecated({start_link, 3, eventually}).
+-deprecated({start_link, 4, eventually}).
+-deprecated({stop, 1, eventually}).
+-deprecated({stop, 3, eventually}).
+-deprecated({send_event, 2, eventually}).
+-deprecated({sync_send_event, 2, eventually}).
+-deprecated({sync_send_event, 3, eventually}).
+-deprecated({send_all_state_event, 2, eventually}).
+-deprecated({sync_send_all_state_event, 2, eventually}).
+-deprecated({sync_send_all_state_event, 3, eventually}).
+-deprecated({reply, 2, eventually}).
+-deprecated({start_timer, 2, eventually}).
+-deprecated({send_event_after, 2, eventually}).
+-deprecated({cancel_timer, 1, eventually}).
+-deprecated({enter_loop, 4, eventually}).
+-deprecated({enter_loop, 5, eventually}).
+-deprecated({enter_loop, 6, eventually}).
%%% ---------------------------------------------------
%%% Interface functions.
diff --git a/lib/stdlib/src/gen_server.erl b/lib/stdlib/src/gen_server.erl
index 035dd871ff..09f77c0810 100644
--- a/lib/stdlib/src/gen_server.erl
+++ b/lib/stdlib/src/gen_server.erl
@@ -934,14 +934,14 @@ format_log(#{label:={gen_server,terminate},
end
end;
_ ->
- logger:limit_term(Reason)
+ error_logger:limit_term(Reason)
end,
{ClientFmt,ClientArgs} = format_client_log(Client),
{"** Generic server ~tp terminating \n"
"** Last message in was ~tp~n"
"** When Server state == ~tp~n"
"** Reason for termination == ~n** ~tp~n" ++ ClientFmt,
- [Name, Msg, logger:limit_term(State), Reason1] ++ ClientArgs};
+ [Name, Msg, error_logger:limit_term(State), Reason1] ++ ClientArgs};
format_log(#{label:={gen_server,no_handle_info},
module:=Mod,
message:=Msg}) ->
diff --git a/lib/stdlib/src/gen_statem.erl b/lib/stdlib/src/gen_statem.erl
index f558f0d33e..b36b8cd5a5 100644
--- a/lib/stdlib/src/gen_statem.erl
+++ b/lib/stdlib/src/gen_statem.erl
@@ -1938,7 +1938,7 @@ format_log(#{label:={gen_statem,terminate},
_ -> {Reason,Stacktrace}
end,
[LimitedP, LimitedFmtData, LimitedFixedReason] =
- [logger:limit_term(D) || D <- [P, FmtData, FixedReason]],
+ [error_logger:limit_term(D) || D <- [P, FmtData, FixedReason]],
CBMode =
case StateEnter of
true ->
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index a17addcc42..ceec3079a1 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -612,6 +612,15 @@ obsolete_1(erlang, get_stacktrace, 0) ->
obsolete_1(erlang, hash, 2) ->
{removed, {erlang, phash2, 2}, "20.0"};
+%% Add in OTP 21.
+
+obsolete_1(ssl, ssl_accept, 1) ->
+ {deprecated, "deprecated; use ssl:handshake/1 instead"};
+obsolete_1(ssl, ssl_accept, 2) ->
+ {deprecated, "deprecated; use ssl:handshake/2 instead"};
+obsolete_1(ssl, ssl_accept, 3) ->
+ {deprecated, "deprecated; use ssl:handshake/3 instead"};
+
%% not obsolete
obsolete_1(_, _, _) ->
diff --git a/lib/stdlib/src/proc_lib.erl b/lib/stdlib/src/proc_lib.erl
index 8d01840313..5f14e78f91 100644
--- a/lib/stdlib/src/proc_lib.erl
+++ b/lib/stdlib/src/proc_lib.erl
@@ -553,10 +553,10 @@ get_ancestors(Pid) ->
%% assumed that all report handlers call proc_lib:format().
get_messages(Pid) ->
Messages = get_process_messages(Pid),
- {messages, logger:limit_term(Messages)}.
+ {messages, error_logger:limit_term(Messages)}.
get_process_messages(Pid) ->
- Depth = logger:get_format_depth(),
+ Depth = error_logger:get_format_depth(),
case Pid =/= self() orelse Depth =:= unlimited of
true ->
{messages, Messages} = get_process_info(Pid, messages),
@@ -586,7 +586,7 @@ get_cleaned_dictionary(Pid) ->
cleaned_dict(Dict) ->
CleanDict = clean_dict(Dict),
- logger:limit_term(CleanDict).
+ error_logger:limit_term(CleanDict).
clean_dict([{'$ancestors',_}|Dict]) ->
clean_dict(Dict);
@@ -756,7 +756,7 @@ check(Res) -> Res.
Args :: [term()].
report_cb(#{label:={proc_lib,crash},
report:=CrashReport}) ->
- Depth = logger:get_format_depth(),
+ Depth = error_logger:get_format_depth(),
get_format_and_args(CrashReport, utf8, Depth).
-spec format(CrashReport) -> string() when
@@ -841,8 +841,8 @@ format_exception(Class, Reason, StackTrace, {Enc,_}=Extra) ->
StackFun = fun(M, _F, _A) -> (M =:= erl_eval) or (M =:= ?MODULE) end,
%% EI = " exception: ",
EI = " ",
- [EI, lib:format_exception(1+length(EI), Class, Reason,
- StackTrace, StackFun, PF, Enc), "\n"].
+ [EI, erl_error:format_exception(1+length(EI), Class, Reason,
+ StackTrace, StackFun, PF, Enc), "\n"].
to_string(A, latin1) ->
io_lib:write_atom_as_latin1(A);
diff --git a/lib/stdlib/src/qlc.erl b/lib/stdlib/src/qlc.erl
index 3a66f6930b..4a0e976ba4 100644
--- a/lib/stdlib/src/qlc.erl
+++ b/lib/stdlib/src/qlc.erl
@@ -638,7 +638,7 @@ string_to_handle(Str, Options, Bindings) when is_list(Str) ->
case erl_scan:string(Str, 1, [text]) of
{ok, Tokens, _} ->
ScanRes =
- case lib:extended_parse_exprs(Tokens) of
+ case erl_eval:extended_parse_exprs(Tokens) of
{ok, [Expr0], SBs} ->
{ok, Expr0, SBs};
{ok, _ExprList, _SBs} ->
@@ -1196,8 +1196,8 @@ abstract1({table, TableDesc}, _NElements, _Depth, _A) ->
{ok, Tokens, _} =
erl_scan:string(lists:flatten(TableDesc++"."), 1, [text]),
{ok, Es, Bs} =
- lib:extended_parse_exprs(Tokens),
- [Expr] = lib:subst_values_for_vars(Es, Bs),
+ erl_eval:extended_parse_exprs(Tokens),
+ [Expr] = erl_eval:subst_values_for_vars(Es, Bs),
special(Expr);
false -> % abstract expression
TableDesc
@@ -3749,7 +3749,7 @@ maybe_error_logger(Name, Why) ->
expand_stacktrace(),
Trimmer = fun(M, _F, _A) -> M =:= erl_eval end,
Formater = fun(Term, I) -> io_lib:print(Term, I, 80, -1) end,
- X = lib:format_stacktrace(1, Stacktrace, Trimmer, Formater),
+ X = erl_error:format_stacktrace(1, Stacktrace, Trimmer, Formater),
error_logger:Name("qlc: temporary file was needed for ~w\n~ts\n",
[Why, lists:flatten(X)]).
diff --git a/lib/stdlib/src/shell.erl b/lib/stdlib/src/shell.erl
index 1be37672e7..c73cf22943 100644
--- a/lib/stdlib/src/shell.erl
+++ b/lib/stdlib/src/shell.erl
@@ -230,7 +230,7 @@ server_loop(N0, Eval_0, Bs00, RT, Ds00, History0, Results0) ->
{Res,Eval0} = get_command(Prompt, Eval_1, Bs0, RT, Ds0),
case Res of
{ok,Es0,XBs} ->
- Es1 = lib:subst_values_for_vars(Es0, XBs),
+ Es1 = erl_eval:subst_values_for_vars(Es0, XBs),
case expand_hist(Es1, N) of
{ok,Es} ->
{V,Eval,Bs,Ds} = shell_cmd(Es, Eval0, Bs0, RT, Ds0, cmd),
@@ -280,7 +280,7 @@ get_command(Prompt, Eval, Bs, RT, Ds) ->
io:scan_erl_exprs(group_leader(), Prompt, 1, [text])
of
{ok,Toks,_EndPos} ->
- lib:extended_parse_exprs(Toks);
+ erl_eval:extended_parse_exprs(Toks);
{eof,_EndPos} ->
eof;
{error,ErrorInfo,_EndPos} ->
@@ -589,7 +589,7 @@ report_exception(Class, Severity, {Reason,Stacktrace}, RT) ->
PF = fun(Term, I1) -> pp(Term, I1, RT) end,
SF = fun(M, _F, _A) -> (M =:= erl_eval) or (M =:= ?MODULE) end,
Enc = encoding(),
- Str = lib:format_exception(I, Class, Reason, Stacktrace, SF, PF, Enc),
+ Str = erl_error:format_exception(I, Class, Reason, Stacktrace, SF, PF, Enc),
io:requests([{put_chars, latin1, Tag},
{put_chars, unicode, Str},
nl]).
diff --git a/lib/stdlib/src/slave.erl b/lib/stdlib/src/slave.erl
index b3f3206d67..37c1f6bfd9 100644
--- a/lib/stdlib/src/slave.erl
+++ b/lib/stdlib/src/slave.erl
@@ -187,7 +187,7 @@ start_link(Host, Name, Args) ->
start(Host, Name, Args, self()).
start(Host0, Name, Args, LinkTo) ->
- Prog = lib:progname(),
+ Prog = progname(),
start(Host0, Name, Args, LinkTo, Prog).
start(Host0, Name, Args, LinkTo, Prog) ->
@@ -296,7 +296,6 @@ mk_cmd(Host, Name, Args, Waiter, Prog0) ->
" -s slave slave_start ", node(),
" ", Waiter,
" ", Args]),
-
case after_char($@, atom_to_list(node())) of
Host ->
{ok, BasicCmd};
@@ -309,6 +308,15 @@ mk_cmd(Host, Name, Args, Waiter, Prog0) ->
end
end.
+%% Return the name of the script that starts (this) erlang
+progname() ->
+ case init:get_argument(progname) of
+ {ok, [[Prog]]} ->
+ Prog;
+ _Other ->
+ "no_prog_name"
+ end.
+
%% This is an attempt to distinguish between spaces in the program
%% path and spaces that separate arguments. The program is quoted to
%% allow spaces in the path.
@@ -317,7 +325,7 @@ mk_cmd(Host, Name, Args, Waiter, Prog0) ->
%% (through start/5) or if the -program switch to beam is used and
%% includes arguments (typically done by cerl in OTP test environment
%% in order to ensure that slave/peer nodes are started with the same
-%% emulator and flags as the test node. The return from lib:progname()
+%% emulator and flags as the test node. The result from progname()
%% could then typically be '/<full_path_to>/cerl -gcov').
quote_progname(Progname) ->
do_quote_progname(string:lexemes(to_list(Progname)," ")).
diff --git a/lib/stdlib/src/stdlib.app.src b/lib/stdlib/src/stdlib.app.src
index 5fb48acfab..cd09872b87 100644
--- a/lib/stdlib/src/stdlib.app.src
+++ b/lib/stdlib/src/stdlib.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -43,6 +43,7 @@
erl_anno,
erl_bits,
erl_compile,
+ erl_error,
erl_eval,
erl_expand_records,
erl_internal,
@@ -71,7 +72,6 @@
io_lib_format,
io_lib_fread,
io_lib_pretty,
- lib,
lists,
log_mf_h,
maps,
diff --git a/lib/stdlib/src/string.erl b/lib/stdlib/src/string.erl
index 0736374f21..f5d271c06d 100644
--- a/lib/stdlib/src/string.erl
+++ b/lib/stdlib/src/string.erl
@@ -323,16 +323,30 @@ take(Str, Sep0, true, trailing) ->
%% Uppercase all chars in Str
-spec uppercase(String::unicode:chardata()) -> unicode:chardata().
uppercase(CD) when is_list(CD) ->
- uppercase_list(CD);
-uppercase(CD) when is_binary(CD) ->
- uppercase_bin(CD,<<>>).
+ try uppercase_list(CD, false)
+ catch unchanged -> CD
+ end;
+uppercase(<<CP1/utf8, Rest/binary>>=Orig) ->
+ try uppercase_bin(CP1, Rest, false) of
+ List -> unicode:characters_to_binary(List)
+ catch unchanged -> Orig
+ end;
+uppercase(<<>>) ->
+ <<>>.
%% Lowercase all chars in Str
-spec lowercase(String::unicode:chardata()) -> unicode:chardata().
lowercase(CD) when is_list(CD) ->
- lowercase_list(CD);
-lowercase(CD) when is_binary(CD) ->
- lowercase_bin(CD,<<>>).
+ try lowercase_list(CD, false)
+ catch unchanged -> CD
+ end;
+lowercase(<<CP1/utf8, Rest/binary>>=Orig) ->
+ try lowercase_bin(CP1, Rest, false) of
+ List -> unicode:characters_to_binary(List)
+ catch unchanged -> Orig
+ end;
+lowercase(<<>>) ->
+ <<>>.
%% Make a titlecase of the first char in Str
-spec titlecase(String::unicode:chardata()) -> unicode:chardata().
@@ -352,9 +366,16 @@ titlecase(CD) when is_binary(CD) ->
%% Make a comparable string of the Str should be used for equality tests only
-spec casefold(String::unicode:chardata()) -> unicode:chardata().
casefold(CD) when is_list(CD) ->
- casefold_list(CD);
-casefold(CD) when is_binary(CD) ->
- casefold_bin(CD,<<>>).
+ try casefold_list(CD, false)
+ catch unchanged -> CD
+ end;
+casefold(<<CP1/utf8, Rest/binary>>=Orig) ->
+ try casefold_bin(CP1, Rest, false) of
+ List -> unicode:characters_to_binary(List)
+ catch unchanged -> Orig
+ end;
+casefold(<<>>) ->
+ <<>>.
-spec to_integer(String) -> {Int, Rest} | {'error', Reason} when
String :: unicode:chardata(),
@@ -652,52 +673,127 @@ slice_bin(CD, CP1, N) when N > 0 ->
slice_bin(CD, CP1, 0) ->
byte_size(CD)+byte_size(<<CP1/utf8>>).
-uppercase_list(CPs0) ->
+uppercase_list([CP1|[CP2|_]=Cont], _Changed) when $a =< CP1, CP1 =< $z, CP2 < 256 ->
+ [CP1-32|uppercase_list(Cont, true)];
+uppercase_list([CP1|[CP2|_]=Cont], Changed) when CP1 < 128, CP2 < 256 ->
+ [CP1|uppercase_list(Cont, Changed)];
+uppercase_list([], true) ->
+ [];
+uppercase_list([], false) ->
+ throw(unchanged);
+uppercase_list(CPs0, Changed) ->
case unicode_util:uppercase(CPs0) of
- [Char|CPs] -> append(Char,uppercase_list(CPs));
- [] -> []
+ [Char|CPs] when Char =:= hd(CPs0) -> [Char|uppercase_list(CPs, Changed)];
+ [Char|CPs] -> append(Char,uppercase_list(CPs, true));
+ [] -> uppercase_list([], Changed)
end.
-uppercase_bin(CPs0, Acc) ->
- case unicode_util:uppercase(CPs0) of
- [Char|CPs] when is_integer(Char) ->
- uppercase_bin(CPs, <<Acc/binary, Char/utf8>>);
- [Chars|CPs] ->
- uppercase_bin(CPs, <<Acc/binary,
- << <<CP/utf8>> || CP <- Chars>>/binary >>);
- [] -> Acc
+uppercase_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed)
+ when $a =< CP1, CP1 =< $z, CP2 < 256 ->
+ [CP1-32|uppercase_bin(CP2, Bin, true)];
+uppercase_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed)
+ when CP1 < 128, CP2 < 256 ->
+ [CP1|uppercase_bin(CP2, Bin, false)];
+uppercase_bin(CP1, Bin, Changed) ->
+ case unicode_util:uppercase([CP1|Bin]) of
+ [CP1|CPs] ->
+ case unicode_util:cp(CPs) of
+ [Next|Rest] ->
+ [CP1|uppercase_bin(Next, Rest, Changed)];
+ [] when Changed ->
+ [CP1];
+ [] ->
+ throw(unchanged)
+ end;
+ [Char|CPs] ->
+ case unicode_util:cp(CPs) of
+ [Next|Rest] ->
+ [Char|uppercase_bin(Next, Rest, true)];
+ [] ->
+ [Char]
+ end
end.
-lowercase_list(CPs0) ->
+lowercase_list([CP1|[CP2|_]=Cont], _Changed) when $A =< CP1, CP1 =< $Z, CP2 < 256 ->
+ [CP1+32|lowercase_list(Cont, true)];
+lowercase_list([CP1|[CP2|_]=Cont], Changed) when CP1 < 128, CP2 < 256 ->
+ [CP1|lowercase_list(Cont, Changed)];
+lowercase_list([], true) ->
+ [];
+lowercase_list([], false) ->
+ throw(unchanged);
+lowercase_list(CPs0, Changed) ->
case unicode_util:lowercase(CPs0) of
- [Char|CPs] -> append(Char,lowercase_list(CPs));
- [] -> []
+ [Char|CPs] when Char =:= hd(CPs0) -> [Char|lowercase_list(CPs, Changed)];
+ [Char|CPs] -> append(Char,lowercase_list(CPs, true));
+ [] -> lowercase_list([], Changed)
end.
-lowercase_bin(CPs0, Acc) ->
- case unicode_util:lowercase(CPs0) of
- [Char|CPs] when is_integer(Char) ->
- lowercase_bin(CPs, <<Acc/binary, Char/utf8>>);
- [Chars|CPs] ->
- lowercase_bin(CPs, <<Acc/binary,
- << <<CP/utf8>> || CP <- Chars>>/binary >>);
- [] -> Acc
+lowercase_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed)
+ when $A =< CP1, CP1 =< $Z, CP2 < 256 ->
+ [CP1+32|lowercase_bin(CP2, Bin, true)];
+lowercase_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed)
+ when CP1 < 128, CP2 < 256 ->
+ [CP1|lowercase_bin(CP2, Bin, false)];
+lowercase_bin(CP1, Bin, Changed) ->
+ case unicode_util:lowercase([CP1|Bin]) of
+ [CP1|CPs] ->
+ case unicode_util:cp(CPs) of
+ [Next|Rest] ->
+ [CP1|lowercase_bin(Next, Rest, Changed)];
+ [] when Changed ->
+ [CP1];
+ [] ->
+ throw(unchanged)
+ end;
+ [Char|CPs] ->
+ case unicode_util:cp(CPs) of
+ [Next|Rest] ->
+ [Char|lowercase_bin(Next, Rest, true)];
+ [] ->
+ [Char]
+ end
end.
-casefold_list(CPs0) ->
+casefold_list([CP1|[CP2|_]=Cont], _Changed) when $A =< CP1, CP1 =< $Z, CP2 < 256 ->
+ [CP1+32|casefold_list(Cont, true)];
+casefold_list([CP1|[CP2|_]=Cont], Changed) when CP1 < 128, CP2 < 256 ->
+ [CP1|casefold_list(Cont, Changed)];
+casefold_list([], true) ->
+ [];
+casefold_list([], false) ->
+ throw(unchanged);
+casefold_list(CPs0, Changed) ->
case unicode_util:casefold(CPs0) of
- [Char|CPs] -> append(Char, casefold_list(CPs));
- [] -> []
+ [Char|CPs] when Char =:= hd(CPs0) -> [Char|casefold_list(CPs, Changed)];
+ [Char|CPs] -> append(Char,casefold_list(CPs, true));
+ [] -> casefold_list([], Changed)
end.
-casefold_bin(CPs0, Acc) ->
- case unicode_util:casefold(CPs0) of
- [Char|CPs] when is_integer(Char) ->
- casefold_bin(CPs, <<Acc/binary, Char/utf8>>);
- [Chars|CPs] ->
- casefold_bin(CPs, <<Acc/binary,
- << <<CP/utf8>> || CP <- Chars>>/binary >>);
- [] -> Acc
+casefold_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed)
+ when $A =< CP1, CP1 =< $Z, CP2 < 256 ->
+ [CP1+32|casefold_bin(CP2, Bin, true)];
+casefold_bin(CP1, <<CP2/utf8, Bin/binary>>, _Changed)
+ when CP1 < 128, CP2 < 256 ->
+ [CP1|casefold_bin(CP2, Bin, false)];
+casefold_bin(CP1, Bin, Changed) ->
+ case unicode_util:casefold([CP1|Bin]) of
+ [CP1|CPs] ->
+ case unicode_util:cp(CPs) of
+ [Next|Rest] ->
+ [CP1|casefold_bin(Next, Rest, Changed)];
+ [] when Changed ->
+ [CP1];
+ [] ->
+ throw(unchanged)
+ end;
+ [Char|CPs] ->
+ case unicode_util:cp(CPs) of
+ [Next|Rest] ->
+ [Char|casefold_bin(Next, Rest, true)];
+ [] ->
+ [Char]
+ end
end.
%% Fast path for ascii searching for one character in lists
diff --git a/lib/stdlib/test/epp_SUITE.erl b/lib/stdlib/test/epp_SUITE.erl
index 9123bf2f28..a3e294ffea 100644
--- a/lib/stdlib/test/epp_SUITE.erl
+++ b/lib/stdlib/test/epp_SUITE.erl
@@ -28,7 +28,8 @@
otp_8130/1, overload_mac/1, otp_8388/1, otp_8470/1,
otp_8562/1, otp_8665/1, otp_8911/1, otp_10302/1, otp_10820/1,
otp_11728/1, encoding/1, extends/1, function_macro/1,
- test_error/1, test_warning/1, otp_14285/1]).
+ test_error/1, test_warning/1, otp_14285/1,
+ test_if/1]).
-export([epp_parse_erl_form/2]).
@@ -69,7 +70,7 @@ all() ->
overload_mac, otp_8388, otp_8470, otp_8562,
otp_8665, otp_8911, otp_10302, otp_10820, otp_11728,
encoding, extends, function_macro, test_error, test_warning,
- otp_14285].
+ otp_14285, test_if].
groups() ->
[{upcase_mac, [], [upcase_mac_1, upcase_mac_2]},
@@ -799,7 +800,8 @@ otp_8130(Config) when is_list(Config) ->
PreDefMacs = macs(Epp),
['BASE_MODULE','BASE_MODULE_STRING','BEAM','FILE',
'FUNCTION_ARITY','FUNCTION_NAME',
- 'LINE','MACHINE','MODULE','MODULE_STRING'] = PreDefMacs,
+ 'LINE','MACHINE','MODULE','MODULE_STRING',
+ 'OTP_RELEASE'] = PreDefMacs,
{ok,[{'-',_},{atom,_,file}|_]} = epp:scan_erl_form(Epp),
{ok,[{'-',_},{atom,_,module}|_]} = epp:scan_erl_form(Epp),
{ok,[{atom,_,t}|_]} = epp:scan_erl_form(Epp),
@@ -952,27 +954,7 @@ ifdef(Config) ->
{define_c5,
<<"-\ndefine a.\n">>,
- {errors,[{{2,1},epp,{bad,define}}],[]}},
-
- {define_c6,
- <<"\n-if.\n"
- "-endif.\n">>,
- {errors,[{{2,2},epp,{'NYI','if'}}],[]}},
-
- {define_c7,
- <<"-ifndef(a).\n"
- "-elif.\n"
- "-endif.\n">>,
- {errors,[{{2,2},epp,{'NYI',elif}}],[]}},
-
- {define_c7,
- <<"-ifndef(a).\n"
- "-if.\n"
- "-elif.\n"
- "-endif.\n"
- "-endif.\n"
- "t() -> a.\n">>,
- {errors,[{{2,2},epp,{'NYI','if'}}],[]}}
+ {errors,[{{2,1},epp,{bad,define}}],[]}}
],
[] = compile(Config, Cs),
@@ -1117,6 +1099,147 @@ test_warning(Config) ->
[] = compile(Config, Cs),
ok.
+%% OTP-12847: Test the -if and -elif directives and the built-in
+%% function defined(Symbol).
+test_if(Config) ->
+ Cs = [{if_1c,
+ <<"-if.\n"
+ "-endif.\n"
+ "-if no_parentheses.\n"
+ "-endif.\n"
+ "-if(syntax error.\n"
+ "-endif.\n"
+ "-if(true).\n"
+ "-if(a+3).\n"
+ "syntax error not triggered here.\n"
+ "-endif.\n">>,
+ {errors,[{1,epp,{bad,'if'}},
+ {3,epp,{bad,'if'}},
+ {5,erl_parse,["syntax error before: ","error"]},
+ {11,epp,{illegal,"unterminated",'if'}}],
+ []}},
+
+ {if_2c, %Bad guard expressions.
+ <<"-if(is_list(integer_to_list(42))).\n" %Not guard BIF.
+ "-endif.\n"
+ "-if(begin true end).\n"
+ "-endif.\n">>,
+ {errors,[{1,epp,{bad,'if'}},
+ {3,epp,{bad,'if'}}],
+ []}},
+
+ {if_3c, %Invalid use of defined/1.
+ <<"-if defined(42).\n"
+ "-endif.\n">>,
+ {errors,[{1,epp,{bad,'if'}}],[]}},
+
+ {if_4c,
+ <<"-elif OTP_RELEASE > 18.\n">>,
+ {errors,[{1,epp,{illegal,"unbalanced",'elif'}}],[]}},
+
+ {if_5c,
+ <<"-ifdef(not_defined_today).\n"
+ "-else.\n"
+ "-elif OTP_RELEASE > 18.\n"
+ "-endif.\n">>,
+ {errors,[{3,epp,{illegal,"unbalanced",'elif'}}],[]}},
+
+ {if_6c,
+ <<"-if(defined(OTP_RELEASE)).\n"
+ "-else.\n"
+ "-elif(true).\n"
+ "-endif.\n">>,
+ {errors,[{3,epp,elif_after_else}],[]}},
+
+ {if_7c,
+ <<"-if(begin true end).\n" %Not a guard expression.
+ "-endif.\n">>,
+ {errors,[{1,epp,{bad,'if'}}],[]}}
+
+ ],
+ [] = compile(Config, Cs),
+
+ Ts = [{if_1,
+ <<"-if(?OTP_RELEASE > 18).\n"
+ "t() -> ok.\n"
+ "-else.\n"
+ "a bug.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_2,
+ <<"-if(false).\n"
+ "a bug.\n"
+ "-elif(?OTP_RELEASE > 18).\n"
+ "t() -> ok.\n"
+ "-else.\n"
+ "a bug.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_3,
+ <<"-if(true).\n"
+ "t() -> ok.\n"
+ "-elif(?OTP_RELEASE > 18).\n"
+ "a bug.\n"
+ "-else.\n"
+ "a bug.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_4,
+ <<"-define(a, 1).\n"
+ "-if(defined(a) andalso defined(OTP_RELEASE)).\n"
+ "t() -> ok.\n"
+ "-else.\n"
+ "a bug.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_5,
+ <<"-if(defined(a)).\n"
+ "a bug.\n"
+ "-else.\n"
+ "t() -> ok.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_6,
+ <<"-if(defined(not_defined_today)).\n"
+ " -if(true).\n"
+ " bug1.\n"
+ " -elif(true).\n"
+ " bug2.\n"
+ " -elif(true).\n"
+ " bug3.\n"
+ " -else.\n"
+ " bug4.\n"
+ " -endif.\n"
+ "-else.\n"
+ "t() -> ok.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_7,
+ <<"-if(not_builtin()).\n"
+ "a bug.\n"
+ "-else.\n"
+ "t() -> ok.\n"
+ "-endif.\n">>,
+ ok},
+
+ {if_8,
+ <<"-if(42).\n" %Not boolean.
+ "a bug.\n"
+ "-else.\n"
+ "t() -> ok.\n"
+ "-endif.\n">>,
+ ok}
+ ],
+ [] = run(Config, Ts),
+
+ ok.
+
%% Advanced test on overloading macros.
overload_mac(Config) when is_list(Config) ->
Cs = [
diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl
index 02211fa8df..574aac96c8 100644
--- a/lib/stdlib/test/ets_SUITE.erl
+++ b/lib/stdlib/test/ets_SUITE.erl
@@ -87,6 +87,7 @@
-export([t_select_reverse/1]).
+-include_lib("stdlib/include/ms_transform.hrl"). % ets:fun2ms
-include_lib("common_test/include/ct.hrl").
-define(m(A,B), assert_eq(A,B)).
@@ -173,10 +174,12 @@ groups() ->
init_per_suite(Config) ->
erts_debug:set_internal_state(available_internal_state, true),
+ erts_debug:set_internal_state(ets_force_trap, true),
Config.
end_per_suite(_Config) ->
stop_spawn_logger(),
+ erts_debug:set_internal_state(ets_force_trap, false),
catch erts_debug:set_internal_state(available_internal_state, false),
ok.
@@ -812,7 +815,60 @@ t_delete_all_objects_do(Opts) ->
4000 = ets:info(T,size),
true = ets:delete_all_objects(T),
0 = ets:info(T,size),
- ets:delete(T).
+ ets:delete(T),
+
+ %% Test delete_all_objects is atomic
+ T2 = ets:new(t_delete_all_objects, [public | Opts]),
+ Self = self(),
+ Inserters = [spawn_link(fun() -> inserter(T2, 100*1000, 1, Self) end) || _ <- [1,2,3,4]],
+ [receive {Ipid, running} -> ok end || Ipid <- Inserters],
+
+ ets:delete_all_objects(T2),
+ erlang:yield(),
+ [Ipid ! stop || Ipid <- Inserters],
+ Result = [receive {Ipid, stopped, Highest} -> {Ipid,Highest} end || Ipid <- Inserters],
+
+ %% Verify unbroken sequences of objects inserted _after_ ets:delete_all_objects.
+ Sum = lists:foldl(fun({Ipid, Highest}, AccSum) ->
+ %% ets:fun2ms(fun({{K,Ipid}}) when K =< Highest -> true end),
+ AliveMS = [{{{'$1',Ipid}},[{'=<','$1',{const,Highest}}],[true]}],
+ Alive = ets:select_count(T2, AliveMS),
+ Lowest = Highest - (Alive-1),
+
+ %% ets:fun2ms(fun({{K,Ipid}}) when K < Lowest -> true end)
+ DeletedMS = [{{{'$1',Ipid}},[{'<','$1',{const,Lowest}}],[true]}],
+ 0 = ets:select_count(T2, DeletedMS),
+ AccSum + Alive
+ end,
+ 0,
+ Result),
+ ok = case ets:info(T2, size) of
+ Sum -> ok;
+ Size ->
+ io:format("Sum = ~p\nSize = ~p\n", [Sum, Size]),
+ {Sum,Size}
+ end,
+
+ ets:delete(T2).
+
+inserter(_, 0, _, _) ->
+ ok;
+inserter(T, N, Next, Papa) ->
+ case Next of
+ 10*1000 ->
+ Papa ! {self(), running};
+ _ ->
+ ok
+ end,
+
+ ets:insert(T, {{Next, self()}}),
+ receive
+ stop ->
+ Papa ! {self(), stopped, Next},
+ ok
+ after 0 ->
+ inserter(T, N-1, Next+1, Papa)
+ end.
%% Test ets:delete_object/2.
diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl
index 9f48fbf5e3..13f2cbd27b 100644
--- a/lib/stdlib/test/io_SUITE.erl
+++ b/lib/stdlib/test/io_SUITE.erl
@@ -1808,7 +1808,7 @@ rpc_call_max(Node, M, F, Args) ->
%% Make sure that a bad specification for a printable range is rejected.
bad_printable_range(Config) when is_list(Config) ->
- Cmd = lists:concat([lib:progname()," +pcunnnnnicode -run erlang halt"]),
+ Cmd = ct:get_progname() ++ " +pcunnnnnicode -run erlang halt",
P = open_port({spawn, Cmd}, [stderr_to_stdout, {line, 200}]),
ok = receive
{P, {data, {eol , "bad range of printable characters" ++ _}}} ->
diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl
index 8f8a0f6e73..5c189a6c73 100644
--- a/lib/stdlib/test/qlc_SUITE.erl
+++ b/lib/stdlib/test/qlc_SUITE.erl
@@ -7468,7 +7468,7 @@ strip_qlc_call(H) ->
strip_qlc_call2(H) ->
S = qlc:info(H, {flat, false}),
{ok, Tokens, _EndLine} = erl_scan:string(S++".", 1, [text]),
- {ok, [Expr], Bs} = lib:extended_parse_exprs(Tokens),
+ {ok, [Expr], Bs} = erl_eval:extended_parse_exprs(Tokens),
{case Expr of
{call,_,{remote,_,{atom,_,qlc},{atom,_,q}},[LC]} ->
{qlc, lists:flatten([erl_pp:expr(LC), "."]), []};
@@ -7489,7 +7489,7 @@ strip_qlc_call2(H) ->
join_info_count(H) ->
S = qlc:info(H, {flat, false}),
{ok, Tokens, _EndLine} = erl_scan:string(S++".", 1, [text]),
- {ok, [Expr], _Bs} = lib:extended_parse_exprs(Tokens),
+ {ok, [Expr], _Bs} = erl_eval:extended_parse_exprs(Tokens),
#ji{nmerge = Nmerge, nlookup = Nlookup,
nkeysort = NKeysort, nnested_loop = Nnested_loop} =
ji(Expr, #ji{}),
@@ -7533,7 +7533,7 @@ lookup_keys({generate,_,Q}, L) ->
lookup_keys(Q, L);
lookup_keys({table,Chars}, L) when is_list(Chars) ->
{ok, Tokens, _} = erl_scan:string(lists:flatten(Chars++"."), 1, [text]),
- {ok, [Expr], _Bs} = lib:extended_parse_exprs(Tokens),
+ {ok, [Expr], _Bs} = erl_eval:extended_parse_exprs(Tokens),
case Expr of
{call,_,_,[_fun,AKs]} ->
case erl_parse:normalise(AKs) of
diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl
index ca85314775..22136d687c 100644
--- a/lib/stdlib/test/shell_SUITE.erl
+++ b/lib/stdlib/test/shell_SUITE.erl
@@ -2780,7 +2780,7 @@ otp_10302(Config) when is_list(Config) ->
rpc:call(Node,shell, prompt_func, [default]),
_ = shell:prompt_func(default),
- %% Test lib:format_exception() (cf. OTP-6554)
+ %% Test erl_error:format_exception() (cf. OTP-6554)
Test6 =
<<"begin
A = <<\"\\xaa\">>,
@@ -2967,10 +2967,10 @@ otp_14296(Config) when is_list(Config) ->
R = t(S)
end(),
- %% Test lib:extended_parse_term/1
+ %% Test erl_eval:extended_parse_term/1
TF = fun(S) ->
{ok, Ts, _} = erl_scan:string(S++".", 1, [text]),
- case lib:extended_parse_term(Ts) of
+ case erl_eval:extended_parse_term(Ts) of
{ok, Term} -> Term;
{error, _}=Error -> Error
end
diff --git a/lib/stdlib/test/string_SUITE.erl b/lib/stdlib/test/string_SUITE.erl
index fdff2d24b8..29fabb4583 100644
--- a/lib/stdlib/test/string_SUITE.erl
+++ b/lib/stdlib/test/string_SUITE.erl
@@ -810,6 +810,18 @@ do_measure(DataDir) ->
Do2(slice, repeat(fun() -> string:slice(S0, 20, 15) end), list),
Do2(slice, repeat(fun() -> string:slice(S0B, 20, 15) end), binary),
+ LCase = "areaa reare rerar earea reare reare",
+ LCaseB = unicode:characters_to_binary(LCase),
+ UCase = string:uppercase(LCase),
+ UCaseB = unicode:characters_to_binary(UCase),
+
+ Do2(to_upper_0, repeat(fun() -> string:to_upper(UCase) end), list),
+ Do2(uppercase_0, repeat(fun() -> string:uppercase(UCase) end), list),
+ Do2(uppercase_0, repeat(fun() -> string:uppercase(UCaseB) end), binary),
+ Do2(to_upper_a, repeat(fun() -> string:to_upper(LCase) end), list),
+ Do2(uppercase_a, repeat(fun() -> string:uppercase(LCase) end), list),
+ Do2(uppercase_a, repeat(fun() -> string:uppercase(LCaseB) end), binary),
+
io:format("--~n",[]),
NthTokens = {nth_lexemes, fun(Str) -> string:nth_lexeme(Str, 18000, [$\n,$\r]) end},
[Do(Name,Fun,Mode) || {Name,Fun} <- [NthTokens], Mode <- [list, binary]],
diff --git a/lib/syntax_tools/src/epp_dodger.erl b/lib/syntax_tools/src/epp_dodger.erl
index 0a12e8fd8b..7e741cc649 100644
--- a/lib/syntax_tools/src/epp_dodger.erl
+++ b/lib/syntax_tools/src/epp_dodger.erl
@@ -502,6 +502,10 @@ quickscan_form([{'-', _L}, {atom, La, ifdef} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, ifndef} | _Ts]) ->
kill_form(La);
+quickscan_form([{'-', _L}, {'if', La} | _Ts]) ->
+ kill_form(La);
+quickscan_form([{'-', _L}, {atom, La, elif} | _Ts]) ->
+ kill_form(La);
quickscan_form([{'-', _L}, {atom, La, else} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, endif} | _Ts]) ->
@@ -615,8 +619,13 @@ filter_form(T) ->
%% ---------------------------------------------------------------------
%% Normal parsing - try to preserve all information
-normal_parser(Ts, Opt) ->
- rewrite_form(parse_tokens(scan_form(Ts, Opt))).
+normal_parser(Ts0, Opt) ->
+ case scan_form(Ts0, Opt) of
+ Ts when is_list(Ts) ->
+ rewrite_form(parse_tokens(Ts));
+ Node ->
+ Node
+ end.
scan_form([{'-', _L}, {atom, La, define} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
@@ -636,12 +645,26 @@ scan_form([{'-', _L}, {atom, La, ifdef} | Ts], Opt) ->
scan_form([{'-', _L}, {atom, La, ifndef} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, ifndef} | scan_macros(Ts, Opt)];
+scan_form([{'-', _L}, {'if', La} | Ts], Opt) ->
+ [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
+ {atom, La, 'if'} | scan_macros(Ts, Opt)];
+scan_form([{'-', _L}, {atom, La, elif} | Ts], Opt) ->
+ [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
+ {atom, La, 'elif'} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, else} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, else} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, endif} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, endif} | scan_macros(Ts, Opt)];
+scan_form([{'-', _L}, {atom, La, error} | Ts], _Opt) ->
+ Desc = build_info_string("-error", Ts),
+ ErrorInfo = {La, ?MODULE, {error, Desc}},
+ erl_syntax:error_marker(ErrorInfo);
+scan_form([{'-', _L}, {atom, La, warning} | Ts], _Opt) ->
+ Desc = build_info_string("-warning", Ts),
+ ErrorInfo = {La, ?MODULE, {warning, Desc}},
+ erl_syntax:error_marker(ErrorInfo);
scan_form([{'-', L}, {'?', L1}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt)
when Type =:= atom; Type =:= var ->
%% minus, macro and open parenthesis at start of form - assume that
@@ -657,6 +680,11 @@ scan_form([{'?', L}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt)
scan_form(Ts, Opt) ->
scan_macros(Ts, Opt).
+build_info_string(Prefix, Ts0) ->
+ Ts = lists:droplast(Ts0),
+ String = lists:droplast(tokens_to_string(Ts)),
+ Prefix ++ " " ++ String ++ ".".
+
scan_macros(Ts, Opt) ->
scan_macros(Ts, [], Opt).
@@ -865,6 +893,10 @@ tokens_to_string([]) ->
format_error(macro_args) ->
errormsg("macro call missing end parenthesis");
+format_error({error, Error}) ->
+ Error;
+format_error({warning, Error}) ->
+ Error;
format_error({unknown, Reason}) ->
errormsg(io_lib:format("unknown error: ~tP", [Reason, 15])).
diff --git a/lib/syntax_tools/src/erl_prettypr.erl b/lib/syntax_tools/src/erl_prettypr.erl
index 60a15c8e3f..6906ef1553 100644
--- a/lib/syntax_tools/src/erl_prettypr.erl
+++ b/lib/syntax_tools/src/erl_prettypr.erl
@@ -675,7 +675,12 @@ lay_2(Node, Ctxt) ->
%% attribute name, without following parentheses.
Ctxt1 = reset_prec(Ctxt),
Args = erl_syntax:attribute_arguments(Node),
- N = erl_syntax:attribute_name(Node),
+ N = case erl_syntax:attribute_name(Node) of
+ {atom, _, 'if'} ->
+ erl_syntax:variable('if');
+ N0 ->
+ N0
+ end,
D = case attribute_type(Node) of
spec ->
[SpecTuple] = Args,
diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl
index c7f477c4d2..ced0dba3e2 100644
--- a/lib/syntax_tools/src/erl_syntax_lib.erl
+++ b/lib/syntax_tools/src/erl_syntax_lib.erl
@@ -1317,6 +1317,8 @@ analyze_attribute(Node) ->
include_lib -> preprocessor;
ifdef -> preprocessor;
ifndef -> preprocessor;
+ 'if' -> preprocessor;
+ elif -> preprocessor;
else -> preprocessor;
endif -> preprocessor;
A ->
diff --git a/lib/tools/doc/src/fprof.xml b/lib/tools/doc/src/fprof.xml
index 4c9e48045e..72624bd33b 100644
--- a/lib/tools/doc/src/fprof.xml
+++ b/lib/tools/doc/src/fprof.xml
@@ -328,10 +328,16 @@
purposes. This option is only
allowed with the <c>start</c> option.</item>
<tag><c>cpu_time</c>| <c>{cpu_time, bool()}</c></tag>
- <item>The options <c>cpu_time</c> or <c>{cpu_time, true></c>
+ <item>The options <c>cpu_time</c> or <c>{cpu_time, true}</c>
makes the timestamps in the trace be in CPU time instead
of wallclock time which is the default. This option is
- only allowed with the <c>start</c> option.</item>
+ only allowed with the <c>start</c> option.
+ <warning><p>Getting correct values out of cpu_time can be difficult.
+ The best way to get correct values is to run using a single
+ scheduler and bind that scheduler to a specific CPU,
+ i.e. <c>erl +S 1 +sbt db</c>.</p>
+ </warning>
+ </item>
<tag><c>{procs, PidSpec}</c>| <c>{procs, [PidSpec]}</c></tag>
<item>Specifies which processes that shall be traced. If
this option is not given, the calling process is
diff --git a/lib/tools/src/xref.erl b/lib/tools/src/xref.erl
index 32efa36fa2..466ec7d331 100644
--- a/lib/tools/src/xref.erl
+++ b/lib/tools/src/xref.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -182,7 +182,9 @@ split_args(Opts) ->
end.
stop(Name) ->
- gen_server:call(Name, stop, infinity).
+ try gen_server:call(Name, stop, infinity)
+ after catch unregister(Name) % ensure the name is gone
+ end.
add_release(Name, Dir) ->
gen_server:call(Name, {add_release, Dir}, infinity).
diff --git a/lib/tools/test/eprof_SUITE_data/eed.erl b/lib/tools/test/eprof_SUITE_data/eed.erl
index 5f2a21aa60..9fe49c6f5c 100644
--- a/lib/tools/test/eprof_SUITE_data/eed.erl
+++ b/lib/tools/test/eprof_SUITE_data/eed.erl
@@ -54,7 +54,7 @@ edit(Name) ->
loop(St0) ->
{ok, St1, Cmd} = get_line(St0),
- case catch command(lib:nonl(Cmd), St1) of
+ case catch command(nonl(Cmd), St1) of
{'EXIT', Reason} ->
%% XXX Should clear outstanding global command here.
loop(print_error({'EXIT', Reason}, St1));
@@ -66,6 +66,10 @@ loop(St0) ->
loop(St2)
end.
+nonl([$\n]) -> [];
+nonl([]) -> [];
+nonl([H|T]) -> [H|nonl(T)].
+
command(Cmd, St) ->
case parse_command(Cmd, St) of
quit ->